+ ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.zjCvduAmFp --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [7920/7920 modules configured] [2474/4374 modules rendered] [2 ymakes processing] [7920/7920 modules configured] [4272/4374 modules rendered] [2 ymakes processing] [7920/7920 modules configured] [4374/4374 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7926/7926 modules configured] [4374/4374 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES | 3.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut | 6.9%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a | 6.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a | 6.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a | 6.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a | 6.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a | 7.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a | 7.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a | 7.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a | 7.5%| PREPARE $(VCS) | 8.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a | 9.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a | 9.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a | 9.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |11.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |11.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |12.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |12.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |13.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |14.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |14.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |14.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |14.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |14.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |14.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |14.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |15.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |15.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |15.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |16.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |16.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |16.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |16.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |16.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |16.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |17.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |17.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |17.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |17.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |17.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |17.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |17.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |17.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |18.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |18.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |18.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |18.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |18.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |18.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |19.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |19.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |18.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |19.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/skiff_ext/libyt-library-skiff_ext.a |19.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |19.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1/py3/libpy3python-pyasn1-py3.global.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |19.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyrsistent/py3/libpy3python-pyrsistent-py3.global.a |19.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyasn1-modules/py3/libpy3python-pyasn1-modules-py3.global.a |20.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |20.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.global.a |20.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |20.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |20.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |20.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |21.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml.clib/py3/libpy3python-ruamel.yaml.clib-py3.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/formats/libyt-client-formats.a |20.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ruamel.yaml/py3/libpy3python-ruamel.yaml-py3.global.a |20.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |20.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |20.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |20.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |21.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |21.3%| PREPARE $(YMAKE_PYTHON3-4256832079) |21.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |22.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |22.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |22.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tenacity/py3/libpy3python-tenacity-py3.global.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |22.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |22.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |22.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |22.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |23.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |23.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |24.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/websocket-client/libpy3contrib-python-websocket-client.global.a |24.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests-oauthlib/libpy3contrib-python-requests-oauthlib.global.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |24.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/rsa/py3/libpy3python-rsa-py3.global.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |25.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |25.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3_ansi/libv1-lexer-antlr3_ansi.a |25.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |26.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |26.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3_ansi/libv1-proto_parser-antlr3_ansi.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr3/libv1-proto_parser-antlr3.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |26.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libpy3api-service-protos.global.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libpy3providers-s3-proto.global.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/walle/libpy3tools-cfg-walle.global.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/libpy3ydb-tools-cfg.global.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |28.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |29.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |29.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |30.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |30.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_aggrcount.cpp |30.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_addmember.cpp |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |32.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi/libproto_ast-gen-v1_ansi.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/logger/libimpl-ydb_internal-logger.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |33.8%| PREPARE $(LLD_ROOT-3808007503) |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/db_driver_state/libimpl-ydb_internal-db_driver_state.a |33.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/accessservice/libclient-nc_private-accessservice.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |38.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |38.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |39.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |40.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |41.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |41.7%| PREPARE $(PYTHON) |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/abstract/libsession-selector-abstract.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/s3/libsession-storage-s3.global.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/abstract/libsbuckets-logic-abstract.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/one_head/libsbuckets-logic-one_head.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/common/liboptimizer-sbuckets-common.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/service/libtx-limiter-service.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/constructor/libreader-sys_view-constructor.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |43.1%| PREPARE $(CLANG_FORMAT-2212207123) |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/usage/libtx-limiter-usage.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |41.5%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__data_erasure_manager.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cansel_build_index.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |46.9%| PREPARE $(FLAKE8_PY3-715603131) |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_data_erasure_manager.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |49.5%| PREPARE $(TEST_TOOL_HOST-sbr:8330113388) |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_data_erasure_manager.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_scheme_uploader.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_getter.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_utils.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |50.7%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |50.6%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |50.6%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/common/liblibs-brotli-common.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/dec/liblibs-brotli-dec.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/enc/liblibs-brotli-enc.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang18-rt/lib/asan/libclang_rt.asan-x86_64.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yajl/libcontrib-libs-yajl.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/hash/libabseil-cpp-tstring-y_absl-hash.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/profiling/libabseil-cpp-tstring-y_absl-profiling.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/container/libabseil-cpp-tstring-y_absl-container.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/status/libabseil-cpp-tstring-y_absl-status.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/random/libabseil-cpp-tstring-y_absl-random.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/base/libabseil-cpp-absl-base.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/synchronization/libabseil-cpp-tstring-y_absl-synchronization.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/container/libabseil-cpp-absl-container.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/types/libabseil-cpp-absl-types.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/status/libabseil-cpp-absl-status.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/synchronization/libabseil-cpp-absl-synchronization.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/debugging/libabseil-cpp-absl-debugging.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/time/libabseil-cpp-absl-time.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/counter_time_keeper/liblibrary-persqueue-counter_time_keeper.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/http-parser/libcontrib-restricted-http-parser.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/config/liblibrary-cpp-config.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lua/liblibrary-cpp-lua.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |52.6%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/random/libabseil-cpp-absl-random.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/profiling/libabseil-cpp-absl-profiling.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lua/libcontrib-libs-lua.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/debugging/libabseil-cpp-tstring-y_absl-debugging.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/libyql-dq-comp_nodes.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/base/libabseil-cpp-tstring-y_absl-base.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/time/libabseil-cpp-tstring-y_absl-time.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/flags/libabseil-cpp-tstring-y_absl-flags.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |52.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/proto/libfile_storage-http_download-proto.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |53.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/log/libabseil-cpp-tstring-y_absl-log.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |51.7%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/strings/libabseil-cpp-absl-strings.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/config/libcore-persqueue-config.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/partition_key_range/libcore-persqueue-partition_key_range.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/kqp_query_plan.h_serialized.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_column_statistics_requester.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phase.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_statistics_transformer.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_constant_folding_transformer.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_phy_query.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_query_blocks_transformer.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/purecalc/libcore-persqueue-purecalc.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_check.cpp |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_phy_finalize.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_build_txs.cpp |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |50.1%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |50.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |50.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_effects.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/opt/kqp_opt_kql.cpp |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |51.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getFQDNOrHostName.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUT.cpp |52.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/JSON.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionCombinatorFactory.cpp |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUserNameWithHost.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnsCommon.cpp |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Port.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUseQuery.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ResizeProcessor.cpp |52.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/LimitTransform.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISource.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISimpleTransform.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/RowInputFormatWithDiagnosticInfo.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ISink.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowOutputFormat.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOutput.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDropQuery.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithOnCluster.cpp |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQualifiedAsterisk.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryParameter.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionDeclaration.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTProjectionSelectQuery.cpp |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |52.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTPartition.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOrderByElement.cpp |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTInsertQuery.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTConstraintDeclaration.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTCreateQuery.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferValidUTF8.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromPocoSocket.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsMatcher.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptorDiscardOnFailure.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileDescriptor.cpp |52.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFile.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFile.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/JSONEachRowUtils.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadHelpers.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/toFixedString.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/FormatFactory.cpp |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/registerDataTypeDateTime.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationWrapper.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationUUID.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/IDataType.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/getLeastSupertype.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNumberBase.cpp |53.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTupleElement.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationArray.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationMap.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationCustomSimpleText.cpp |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationAggregateFunction.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/ISerialization.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/EnumValues.cpp |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationLowCardinality.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeUUID.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesNumber.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNothing.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypesDecimal.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinalityHelpers.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeTuple.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeString.cpp |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeEnum.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime64.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/IBlockInputStream.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDateTime.cpp |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |53.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ExecutionSpeedLimits.cpp |53.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/LZ4_decompress_faster.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/ColumnGathererStream.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeLowCardinality.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Block.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BaseSettings.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsFields.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/SettingsEnums.cpp |53.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/NamesAndTypes.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecLZ4.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomSimpleAggregateFunction.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedWriteBuffer.cpp |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferBase.cpp |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |53.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hex.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/parseAddress.cpp |53.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getNumberOfPhysicalCPUCores.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBuffer.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/getMultipleKeysFromConfig.cpp |53.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/hasLinuxCapability.cpp |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |53.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |53.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadProfileEvents.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatReadable.cpp |53.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TaskStatsInfoGetter.cpp |53.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadPool.cpp |53.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMetrics.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Config/AbstractConfigurationComparison.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentMemoryTracker.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/CurrentThread.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/createHardLink.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnNullable.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Allocator.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/DNSResolver.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnTuple.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/FilterDescription.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/IColumn.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFunction.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnString.cpp |53.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnCompressed.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnFixedString.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnLowCardinality.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnConst.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/IAggregateFunction.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnVector.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/preciseExp10.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/shift10.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/AggregateFunctions/AggregateFunctionFactory.cpp |53.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/mremap.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getResource.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ClickHouseRevision.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnAggregateFunction.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnArray.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/AlignedBuffer.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnDecimal.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/errnoToString.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getPageSize.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProfileEvents.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/getThreadId.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/checkStackSize.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/ColumnMap.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/demangle.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PODArray.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ProcfsMetricsProvider.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IntervalKind.cpp |53.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |53.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorWriteBinary.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Exception.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Epoll.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorToString.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ErrorCodes.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/IPv6ToBinary.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ZooKeeper/IKeeper.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/StringRef.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/FieldVisitorDump.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/DateLUTImpl.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/OpenSSLHelpers.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/ThreadStatus.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/formatIPv6.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/escapeForFileName.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/RemoteHostFilter.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/setThreadName.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/Throttler.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/quoteString.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/PipeFDs.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/isLocalAddress.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/thread_local_rng.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/MemoryTracker.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Columns/MaskOperations.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/randomSeed.cpp |53.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecMultiple.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionCodecNone.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressedReadBufferFromFile.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/ICompressionCodec.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/base/common/sleep.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/ColumnWithTypeAndName.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate32.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/materializeBlock.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/BlockStreamProfileInfo.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Compression/CompressionFactory.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDate.cpp |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomIPv4AndIPv6.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockOutputStream.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeInterval.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFunction.cpp |52.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeCustomGeo.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeMap.cpp |52.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationIP.cpp |52.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDate32.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimal.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/NestedUtils.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/BlockInfo.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationFixedString.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeArray.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Settings.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Core/Field.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationString.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeAggregateFunction.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/SizeLimits.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataStreams/NativeBlockInputStream.cpp |52.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeDecimalBase.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFixedString.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNullable.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNothing.cpp |52.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeFactory.cpp |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationNumber.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNested.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationTuple.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/extractTimeZoneFromFunctionArguments.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/IFunction.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/verbosePrintString.cpp |52.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationEnum.cpp |52.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDecimalBase.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/Serializations/SerializationDateTime64.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/NativeFormat.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufWriter.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/ProtobufReader.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Formats/registerFormats.cpp |52.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionFactory.cpp |52.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionHelpers.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFile.cpp |51.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/AsynchronousReadBufferFromFileDescriptor.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileDescriptor.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFile.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileBase.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/PeekableReadBuffer.cpp |51.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMapReadBufferFromFileWithCache.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/Progress.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/DoubleConverter.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/CompressionMethod.cpp |51.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/OpenedFile.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFile.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromPocoSocket.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromMemory.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/TimeoutSetter.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/MMappedFileDescriptor.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadBufferFromFileDescriptor.cpp |51.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/SynchronousReader.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/UseSSL.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ReadSettings.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/ThreadPoolReader.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteBufferFromFileBase.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ClientInfo.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryLog.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/copyData.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/createReadBufferFromFileBase.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/WriteHelpers.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/readFloatText.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/IO/parseDateTimeBestEffort.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/QueryThreadLog.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/InternalTextLogsQueue.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/ProfileEventsExt.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTBackupQuery.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/TablesStatus.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAlterQuery.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnDeclaration.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTAsterisk.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionaryAttributeDeclaration.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTExpressionList.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDictionary.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunctionWithKeyValueArguments.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTColumnsTransformers.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDatabaseOrNone.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTDropQuery.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTOptimizeQuery.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIndexDeclaration.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTFunction.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionaryAttributeDeclaration.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTLiteral.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTIdentifier.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTNameTypePair.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTKillQueryQuery.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectWithUnionQuery.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTQueryWithTableAndOutput.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/CommonParsers.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSettingsProfileElement.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSelectQuery.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowTablesQuery.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTRolesOrUsersSet.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSampleRatio.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetRoleQuery.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSetQuery.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithAlias.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTablesInSelectQuery.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSubquery.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTTTLElement.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWindowDefinition.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTSystemQuery.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTUserNameWithHost.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCase.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDictionary.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionElementParsers.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTWithElement.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/Lexer.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IAST.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/IParserBase.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDescribeTableQuery.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserBackupQuery.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDatabaseOrNone.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserDataType.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCheckQuery.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserPartition.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ExpressionListParsers.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRenameQuery.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserCreateQuery.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserInsertQuery.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/FunctionsConversion.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserRolesOrUsersSet.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExternalDDLQuery.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserExplainQuery.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserKillQueryQuery.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserQuery.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserProjectionSelectQuery.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserOptimizeQuery.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/InsertQuerySettingsPushDownVisitor.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserAlterQuery.cpp |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIntervalKind.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_cbo_helpers.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TabSeparatedRowInputFormat.cpp |50.1%| PREPARE $(JDK_DEFAULT-472926544) |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Functions/CastOverloadResolver.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWatchQuery.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseIdentifierOrStringLiteral.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Interpreters/castColumn.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatAST.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseDatabaseAndTableName.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/QueryWithOutputSettingsPushDownVisitor.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserWithElement.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/formatSettingName.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/TokenIterator.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowOutputFormat.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Chunk.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IOutputFormat.cpp |49.0%| PREPARE $(JDK17-472926544) |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowColumnToCHColumn.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/queryToString.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseQuery.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/parseUserName.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Executors/PollingQueue.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/ConcatProcessor.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IInputFormat.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBlockInputFormat.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ArrowBufferedStreams.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowInputFormat.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CSVRowInputFormat.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/IRowOutputFormat.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/AvroRowInputFormat.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockInputFormat.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONAsStringRowInputFormat.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowOutputFormat.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ParquetBlockOutputFormat.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowGrantsQuery.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/ORCBlockInputFormat.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/RawBLOBRowInputFormat.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetQuery.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/OutputStreamToOutputFormat.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/CHColumnToArrowColumn.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IAccumulatingTransform.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowOutputFormat.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/JSONEachRowRowInputFormat.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/IProcessor.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Processors/Formats/Impl/TSKVRowInputFormat.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSampleRatio.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectWithUnionQuery.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserUnionQueryElement.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSelectQuery.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowPrivilegesQuery.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSetRoleQuery.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Common/TimerDescriptor.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSettingsProfileElement.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserShowTablesQuery.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablesInSelectQuery.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserTablePropertiesQuery.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/clickhouse_client_udf.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ParserSystemQuery.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/Parsers/ASTShowGrantsQuery.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/udfs/common/clickhouse/client/src/DataTypes/DataTypeNullable.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/value_helpers/libimpl-ydb_internal-value_helpers.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_stats/libclient-impl-ydb_stats.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |48.7%| PREPARE $(CLANG-1922233694) |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |48.4%| {BAZEL_DOWNLOAD} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |48.4%| PREPARE $(WITH_JDK17-sbr:7832760150) |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |47.5%| PREPARE $(WITH_JDK-sbr:7832760150) |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/thread_pool/libimpl-ydb_internal-thread_pool.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_endpoints/libclient-impl-ydb_endpoints.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/common/libimpl-ydb_internal-common.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/kqp_session_common/libimpl-ydb_internal-kqp_session_common.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/session_pool/libimpl-ydb_internal-session_pool.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/make_request/libimpl-ydb_internal-make_request.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/plain_status/libimpl-ydb_internal-plain_status.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/retry/libimpl-ydb_internal-retry.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/ydb_internal/grpc_connections/libimpl-ydb_internal-grpc_connections.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |47.1%| PREPARE $(GDB) |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1/libproto_ast-gen-v1.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/constructor/liboptimizer-sbuckets-constructor.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/counters/liboptimizer-sbuckets-counters.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/index/liboptimizer-sbuckets-index.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/logic/slices/libsbuckets-logic-slices.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/events/libcolumnshard-export-events.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/sbuckets/optimizer/liboptimizer-sbuckets-optimizer.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/abstract/libsession-storage-abstract.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/selector/backup/libsession-selector-backup.global.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/storage/tier/libsession-storage-tier.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/insert_table/libcolumnshard-normalizer-insert_table.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/types/libabseil-cpp-tstring-y_absl-types.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/tz/libpublic-udf-tz.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/numeric/libabseil-cpp-tstring-y_absl-numeric.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/numeric/libabseil-cpp-absl-numeric.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/hash/libabseil-cpp-absl-hash.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/flags/libabseil-cpp-absl-flags.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/y_absl/strings/libabseil-cpp-tstring-y_absl-strings.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/absl/log/libabseil-cpp-absl-log.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |45.0%| PREPARE $(CLANG-2518231432) |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr3/libv1-lexer-antlr3.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |43.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |44.1%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cachetools/py3/libpy3python-cachetools-py3.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/codecs/libcore-persqueue-codecs.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |44.2%| PREPARE $(CLANG18-3363451693) |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |43.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/fbs/libclient-arrow-fbs.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/google-auth/py3/libpy3python-google-auth-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split/libproto_ast-gen-v1_proto_split.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |43.4%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |43.4%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/arrow/libyt-client-arrow.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |43.5%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/common/libcolumnshard-counters-common.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |43.6%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |43.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/column_converters/libyt-library-column_converters.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |43.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |42.9%| PREPARE $(CLANG16-1380963495) |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |42.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |42.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |42.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |42.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |42.6%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |41.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |41.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |42.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |42.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |42.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/cfg/k8s_api/libpy3tools-cfg-k8s_api.global.a |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |42.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/ydbd_slice/libpy3ydbd_slice.global.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/optimizer/libreader-sys_view-optimizer.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/portions/libreader-sys_view-portions.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |42.4%| RESOURCE $(sbr:4966407557) |42.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/insert_table/libcolumnshard-engines-insert_table.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jsonschema/py3/libpy3python-jsonschema-py3.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |42.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |42.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |42.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |42.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/granules/libreader-sys_view-granules.global.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |42.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |42.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |42.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/chunks/libreader-sys_view-chunks.global.a |42.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |42.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/sys_view/abstract/libreader-sys_view-abstract.a |42.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |42.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |42.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |42.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |42.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |42.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |42.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |42.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |42.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |42.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |42.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |42.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |42.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |42.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |42.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |42.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |42.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |42.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |42.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |42.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |42.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/oauthlib/libpy3contrib-python-oauthlib.global.a |42.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |42.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |42.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |42.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |43.0%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure/ut_data_erasure.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/kubernetes/libpy3contrib-python-kubernetes.global.a |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |43.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |43.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |43.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |43.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |43.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_854d6cc7a0cc5cdd793cfc1e6d.o |43.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_ec3163328cb5ab8f222e66dd41.o |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_d1ba757d227a70ff4910717854.o |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_1b4bf9f1f46a6111d16337dee0.o |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_340b457b8174f6293d5748588e.o |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/formats/libyt-library-formats.a |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_a40d299361b06d7622f78b2238.o |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_5831cbd77ecc92a241b6cf1ea2.o |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |43.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |43.3%| PREPARE $(FLAKE8_LINTER-sbr:6561765464) |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |43.3%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |43.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |43.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |43.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |43.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |43.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reshuffle_kmeans.cpp |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |43.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |43.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |43.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |43.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |43.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |43.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |43.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_55f2556d6eafcd77ebc4c517d4.o |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |43.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/0dff0b13f2d02975a4a973a1e8_raw.auxcpp |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |43.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |43.6%| PREPARE $(BLACK_LINTER-sbr:8107723363) |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |43.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8649dacdf340abe7c53df69638.o |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |43.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |43.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_a5c82b9ecb3bf738ea9e628123.o |43.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |43.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |43.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |43.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |43.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_local_kmeans.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |43.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/attributes_md5_ut.cpp |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/infly_ut.cpp |43.7%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/flake8_linter/flake8_linter |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/message_delay_stats_ut.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/metering_ut.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |43.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/ut/sha256_ut.cpp |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |43.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_2f7ac0f750374152d13c6bfbcf.o |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_023a23fcfdf79043d814bb8aab.o |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/log/tests/objcopy_4f9d76a39d2f7ba2b9f198f28c.o |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |43.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_1a397c908c9859dc40a771ddf1.o |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |43.8%| [ld] {default-linux-x86_64, release, asan} $(B)/tools/black_linter/black_linter |43.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |43.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_71d73932c95681fccfc7215041.o |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_0665be2c60952715f39eb25568.o |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/export.cpp |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |43.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |43.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_3e8bf44ed681ff82ae143aaec3.o |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/df691ac52d0b755cb039db39b5_raw.auxcpp |44.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |43.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |44.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/client/ydb_topic/include/libclient-ydb_topic-include.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a >> test_commit.py::flake8 [GOOD] >> test_timeout.py::flake8 [GOOD] |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp >> test_compatibility.py::flake8 [GOOD] >> test_followers.py::flake8 [GOOD] >> test_stress.py::flake8 [GOOD] >> test_cms_erasure.py::flake8 [GOOD] >> test_cms_restart.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_cms_state_storage.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> test_config_with_metadata.py::flake8 [GOOD] >> test_generate_dynamic_config.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |44.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |44.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/port_discovery_ut.cpp |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a >> base.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_alter_compression.py::flake8 [GOOD] >> test_alter_tiering.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_read_update_write_load.py::flake8 [GOOD] >> test_scheme_load.py::flake8 [GOOD] >> test_simple.py::flake8 [GOOD] >> data_correctness.py::flake8 [GOOD] >> data_migration_when_alter_ttl.py::flake8 [GOOD] >> ttl_delete_s3.py::flake8 [GOOD] >> ttl_unavailable_s3.py::flake8 [GOOD] >> unstable_connection.py::flake8 [GOOD] |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |44.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a >> test_leader_start_inflight.py::flake8 [GOOD] >> conftest.py::black [GOOD] >> conftest.py::black [GOOD] >> test_join.py::black [GOOD] >> test_clickhouse.py::black [GOOD] >> test_greenplum.py::black [GOOD] >> test_join.py::black [GOOD] >> test_postgresql.py::black [GOOD] >> test_ydb.py::black [GOOD] |44.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o >> test_workload.py::flake8 [GOOD] |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |44.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_bf6c9c02784d65e20a01685ce8.o |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a >> test.py::flake8 [GOOD] >> test_ttl.py::flake8 [GOOD] |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |44.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/datetime/libdatetime_udf.so |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |44.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so >> test.py::flake8 [GOOD] >> test_cp_ic.py::flake8 [GOOD] >> test_dispatch.py::flake8 [GOOD] >> test_retry.py::flake8 [GOOD] >> test_retry_high_rate.py::flake8 [GOOD] |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |44.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/flake8 >> test_timeout.py::flake8 [GOOD] |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 >> test.py::flake8 [GOOD] |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 >> test.py::flake8 [GOOD] |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 >> test.py::flake8 [GOOD] >> test_schemeshard_limits.py::flake8 [GOOD] >> test_postgres.py::flake8 [GOOD] |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/compatibility/flake8 >> test_stress.py::flake8 [GOOD] |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 >> test.py::flake8 [GOOD] |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/flake8 >> utils.py::flake8 [GOOD] |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/config/flake8 >> test_generate_dynamic_config.py::flake8 [GOOD] |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/trace_ut.cpp >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> scenario.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_case.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_http_api.py::flake8 [GOOD] |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 >> test.py::flake8 [GOOD] |44.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut >> test.py::flake8 [GOOD] >> base.py::flake8 [GOOD] >> test_tpch_import.py::flake8 [GOOD] >> alter_compression.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> column_table_helper.py::flake8 [GOOD] >> range_allocator.py::flake8 [GOOD] >> s3_client.py::flake8 [GOOD] >> thread_helper.py::flake8 [GOOD] >> time_histogram.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_client.py::flake8 [GOOD] >> test_kqprun_recipe.py::flake8 [GOOD] |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |44.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |44.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/flake8 >> unstable_connection.py::flake8 [GOOD] |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> base.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_update_script_tables.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |44.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |44.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/flake8 >> test_simple.py::flake8 [GOOD] |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |44.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/black >> test_ydb.py::black [GOOD] |44.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/large/flake8 >> test_leader_start_inflight.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> parser.py::flake8 [GOOD] >> test_query.py::flake8 [GOOD] >> test_s3.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> docker_wrapper_test.py::flake8 [GOOD] >> test_alloc_default.py::flake8 [GOOD] |44.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/black >> test_join.py::black [GOOD] |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a >> test_dc_local.py::flake8 [GOOD] >> test_result_limits.py::flake8 [GOOD] >> test_scheduling.py::flake8 [GOOD] >> test_actorsystem.py::flake8 [GOOD] |44.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |44.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/flake8 >> test_workload.py::flake8 [GOOD] |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |44.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2de2accab39327e9b10680901f.o |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots >> test_disk.py::flake8 [GOOD] >> test_tablet.py::flake8 [GOOD] >> test_encryption.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |44.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |44.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/flake8 >> test_ttl.py::flake8 [GOOD] |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a >> test.py::flake8 [GOOD] >> allure_utils.py::flake8 [GOOD] >> results_processor.py::flake8 [GOOD] >> utils.py::flake8 [GOOD] >> ydb_cli.py::flake8 [GOOD] >> ydb_cluster.py::flake8 [GOOD] |44.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 >> test.py::flake8 [GOOD] |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |44.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/flake8 >> test_retry_high_rate.py::flake8 [GOOD] |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> conftest.py::flake8 [GOOD] |44.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 >> test.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts >> select_positive.py::flake8 [GOOD] >> select_positive_with_schema.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_bulkupserts_tpch.py::flake8 [GOOD] >> test_insert_delete_duplicate_records.py::flake8 [GOOD] >> test_insertinto_selectfrom.py::flake8 [GOOD] >> test_tiering.py::flake8 [GOOD] >> test_workload_manager.py::flake8 [GOOD] |44.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |44.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |44.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_12d01741952bd4afa836364d84.o |44.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_15e284a8ecb30c90903e842e70.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/mem_alloc/objcopy_19dadf8afeb30502d735b660ce.o >> test_quoting.py::flake8 [GOOD] |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/flake8 >> test_schemeshard_limits.py::flake8 [GOOD] |44.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 >> test.py::flake8 [GOOD] |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/flake8 >> test_postgres.py::flake8 [GOOD] |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_4ea639aebd19c36ee3cdb4479d.o |44.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 >> test.py::flake8 [GOOD] |44.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |44.5%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/join/flake8 >> test_case.py::flake8 [GOOD] |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |44.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_5525925030ba2866c1b1040841.o |44.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 >> test.py::flake8 [GOOD] |44.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 >> test.py::flake8 [GOOD] |44.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |44.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut >> test_clickbench.py::flake8 [GOOD] >> test_account_actions.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] >> test_acl.py::flake8 [GOOD] |44.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/flake8 >> test_http_api.py::flake8 [GOOD] >> test_counters.py::flake8 [GOOD] |44.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/flake8 >> test_tpch_import.py::flake8 [GOOD] >> test_format_without_version.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/flake8 >> test_kqprun_recipe.py::flake8 [GOOD] >> test_garbage_collection.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/flake8 >> test.py::flake8 [GOOD] >> test_multiplexing_tables_format.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/flake8 >> base.py::flake8 [GOOD] >> test_ping.py::flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp >> test_queue_attributes_validation.py::flake8 [GOOD] >> test_queue_counters.py::flake8 [GOOD] >> test_queue_tags.py::flake8 [GOOD] >> test_queues_managing.py::flake8 [GOOD] >> test_throttling.py::flake8 [GOOD] >> test_restarts.py::flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_prefix_kmeans.cpp |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |44.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/common/flake8 >> ydb_client.py::flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_build_index.cpp |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/docs/generator/flake8 >> parser.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/flake8 >> test_update_script_tables.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/solomon/flake8 >> test.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/lib/flake8 >> test_s3.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/flake8 >> test_scheduling.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/postgres_integrations/go-libpq/flake8 >> docker_wrapper_test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 >> test.py::flake8 [GOOD] |44.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/autoconfig/flake8 >> test_actorsystem.py::flake8 [GOOD] |44.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_compress.cpp |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> common.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_rename.py::flake8 [GOOD] |44.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |44.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |44.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 >> test.py::flake8 [GOOD] |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/flake8 >> test_encryption.py::flake8 [GOOD] |44.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 >> test.py::flake8 [GOOD] |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/flake8 >> test_tablet.py::flake8 [GOOD] |44.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/lib/flake8 >> ydb_cluster.py::flake8 [GOOD] >> test_example.py::flake8 [GOOD] |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 >> test.py::flake8 [GOOD] |44.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |44.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp >> tablet_scheme_tests.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_serverless.py::flake8 [GOOD] >> test_crud.py::flake8 [GOOD] >> test_discovery.py::flake8 [GOOD] >> test_execute_scheme.py::flake8 [GOOD] >> test_indexes.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_isolation.py::flake8 [GOOD] >> test_public_api.py::flake8 [GOOD] >> test_read_table.py::flake8 [GOOD] >> test_session_grace_shutdown.py::flake8 [GOOD] >> test_session_pool.py::flake8 [GOOD] |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/large/flake8 >> test_workload_manager.py::flake8 [GOOD] |44.9%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 >> test.py::flake8 [GOOD] |44.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_data_cleanup.cpp |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |45.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/flake8 >> test_quoting.py::flake8 [GOOD] |45.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} >> test_crud.py::flake8 [GOOD] >> test_inserts.py::flake8 [GOOD] >> test_kv.py::flake8 [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ensure.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |45.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a >> test.py::flake8 [GOOD] >> test_pdisk_format_info.py::flake8 [GOOD] >> test_replication.py::flake8 [GOOD] >> test_self_heal.py::flake8 [GOOD] >> test_tablet_channel_migration.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_ydb_backup.py::flake8 [GOOD] >> test_ydb_flame_graph.py::flake8 [GOOD] >> test_ydb_impex.py::flake8 [GOOD] >> test_ydb_scheme.py::flake8 [GOOD] >> test_ydb_scripting.py::flake8 [GOOD] >> test_ydb_sql.py::flake8 [GOOD] >> test_ydb_table.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |45.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/common/flake8 >> test_throttling.py::flake8 [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |45.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/flake8 >> test_tpch.py::flake8 [GOOD] |45.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 >> test.py::flake8 [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp >> test_sql.py::flake8 [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |45.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 >> test.py::flake8 [GOOD] |45.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |45.0%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/flake8 >> test_restarts.py::flake8 [GOOD] |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |45.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/552c373b422666221556a5a9bd_raw.auxcpp >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime_with_service_name.py::flake8 [GOOD] >> select_positive_with_service_name.py::flake8 [GOOD] >> test.py::flake8 [GOOD] |45.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_a0bee0ed11edab150a8172af5c.o |45.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |45.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_53073eb93c76466fca8f474c5f.o |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |45.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |45.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} >> conftest.py::flake8 [GOOD] >> helpers.py::flake8 [GOOD] >> test_ctas.py::flake8 [GOOD] >> test_yt_reading.py::flake8 [GOOD] >> test_clickbench.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch.py::flake8 [GOOD] |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |45.1%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 >> test.py::flake8 [GOOD] |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp >> test_common.py::flake8 [GOOD] >> test_yandex_cloud_mode.py::flake8 [GOOD] >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |45.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/flake8 >> test_rename.py::flake8 [GOOD] >> test_tpcds.py::flake8 [GOOD] >> test_tpch_spilling.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |45.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/example/flake8 >> test_example.py::flake8 [GOOD] |45.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/flake8 >> test_serverless.py::flake8 [GOOD] |45.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |45.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_88a0e187d0d0f8235a5e3f2fff.o |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp >> test_workload.py::flake8 [GOOD] |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |45.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |45.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/b9d4e191a9fd03221b46c5af49_raw.auxcpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |45.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |45.1%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/flake8 >> tablet_scheme_tests.py::flake8 [GOOD] |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |45.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_alter_ops.py::flake8 [GOOD] >> test_insert_restarts.py::flake8 [GOOD] >> test_copy_ops.py::flake8 [GOOD] >> test_scheme_shard_operations.py::flake8 [GOOD] |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |45.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |45.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_3e7b0e88092417daa72b89bfde.o |45.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |45.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/api/flake8 >> test_session_pool.py::flake8 [GOOD] |45.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_79fff48e52404c1611400b8a2c.o |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |45.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp >> test.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_stats_mode.py::flake8 [GOOD] |45.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |45.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o >> test_liveness_wardens.py::flake8 [GOOD] |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |45.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/sql/flake8 >> test_kv.py::flake8 [GOOD] >> test_quota_exhaustion.py::flake8 [GOOD] |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |45.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> conftest.py::flake8 [GOOD] >> test_query_cache.py::flake8 [GOOD] >> test_clickhouse.py::flake8 [GOOD] >> test_greenplum.py::flake8 [GOOD] >> test_join.py::flake8 [GOOD] >> test_postgresql.py::flake8 [GOOD] >> test_ydb.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auditlog.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_unknown_data_source.py::flake8 [GOOD] |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |45.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 >> test.py::flake8 [GOOD] |45.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/ydb_cli/flake8 >> test_ydb_table.py::flake8 [GOOD] |45.3%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/flake8 >> test_tablet_channel_migration.py::flake8 [GOOD] |45.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/flake8 >> test.py::flake8 [GOOD] >> collection.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> select_datetime.py::flake8 [GOOD] >> select_positive.py::flake8 [GOOD] >> test.py::flake8 [GOOD] >> test_multinode_cluster.py::flake8 [GOOD] >> test_recompiles_requests.py::flake8 [GOOD] >> test_workload.py::flake8 [GOOD] >> __main__.py::flake8 [GOOD] |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> test_workload.py::flake8 [GOOD] >> hive_matchers.py::flake8 [GOOD] >> test_create_tablets.py::flake8 [GOOD] >> test_drain.py::flake8 [GOOD] >> test_kill_tablets.py::flake8 [GOOD] |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a >> test.py::flake8 [GOOD] |45.2%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/canonical/flake8 >> test_sql.py::flake8 [GOOD] |45.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |45.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |45.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |45.3%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 >> test.py::flake8 [GOOD] |45.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |45.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |45.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/flake8 >> test_yt_reading.py::flake8 [GOOD] |45.4%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/load/flake8 >> test_tpch.py::flake8 [GOOD] |45.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_f42b1add98328abd34a53e4aef.o >> conftest.py::flake8 [GOOD] >> test_2_selects_limit.py::flake8 [GOOD] >> test_3_selects.py::flake8 [GOOD] >> test_bad_syntax.py::flake8 [GOOD] >> test_base.py::flake8 [GOOD] >> test_big_state.py::flake8 [GOOD] >> test_continue_mode.py::flake8 [GOOD] >> test_cpu_quota.py::flake8 [GOOD] >> test_delete_read_rules_after_abort_by_system.py::flake8 [GOOD] >> test_eval.py::flake8 [GOOD] >> test_invalid_consumer.py::flake8 [GOOD] >> test_kill_pq_bill.py::flake8 [GOOD] >> test_mem_alloc.py::flake8 [GOOD] >> test_metrics_cleanup.py::flake8 [GOOD] >> test_pq_read_write.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_read_rules_deletion.py::flake8 [GOOD] >> test_recovery.py::flake8 [GOOD] >> test_recovery_match_recognize.py::flake8 [GOOD] >> test_recovery_mz.py::flake8 [GOOD] >> test_restart_query.py::flake8 [GOOD] >> test_row_dispatcher.py::flake8 [GOOD] >> test_select_1.py::flake8 [GOOD] >> test_select_limit.py::flake8 [GOOD] >> test_select_limit_db_id.py::flake8 [GOOD] >> test_select_timings.py::flake8 [GOOD] >> test_stop.py::flake8 [GOOD] >> test_watermarks.py::flake8 [GOOD] >> test_yds_bindings.py::flake8 [GOOD] >> test_yq_streaming.py::flake8 [GOOD] |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/flake8 >> test_yandex_cloud_queue_counters.py::flake8 [GOOD] |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 >> test.py::flake8 [GOOD] |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/streaming/flake8 >> test_join.py::flake8 [GOOD] |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 >> test.py::flake8 [GOOD] |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/large/flake8 >> test_tpch_spilling.py::flake8 [GOOD] >> test_fifo_messaging.py::flake8 [GOOD] >> conftest.py::flake8 [GOOD] >> test_auth_system_views.py::flake8 [GOOD] >> test_create_users.py::flake8 [GOOD] >> test_create_users_strict_acl_checks.py::flake8 [GOOD] >> test_db_counters.py::flake8 [GOOD] >> test_dynamic_tenants.py::flake8 [GOOD] >> test_publish_into_schemeboard_with_common_ssring.py::flake8 [GOOD] >> test_storage_config.py::flake8 [GOOD] >> test_system_views.py::flake8 [GOOD] >> test_tenants.py::flake8 [GOOD] >> test_user_administration.py::flake8 [GOOD] >> test_generic_messaging.py::flake8 [GOOD] >> test_polling.py::flake8 [GOOD] >> test_users_groups_with_acl.py::flake8 [GOOD] |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 >> test.py::flake8 [GOOD] |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |45.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/flake8 >> test_workload.py::flake8 [GOOD] |45.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/flake8 >> test_insert_restarts.py::flake8 [GOOD] |45.5%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/flake8 >> test_scheme_shard_operations.py::flake8 [GOOD] |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 >> test.py::flake8 [GOOD] |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 >> test.py::flake8 [GOOD] |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/plans/flake8 >> test_stats_mode.py::flake8 [GOOD] |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/flake8 >> test_liveness_wardens.py::flake8 [GOOD] |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/olap/flake8 >> test_quota_exhaustion.py::flake8 [GOOD] |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_zip.cpp |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/generic/analytics/flake8 >> test_ydb.py::flake8 [GOOD] |45.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/flake8 >> test_query_cache.py::flake8 [GOOD] |45.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_time_order_recover.cpp |45.6%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/flake8 >> test_auditlog.py::flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/common/flake8 >> test_unknown_data_source.py::flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 >> test.py::flake8 [GOOD] |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_guess.cpp |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/recipe/flake8 >> __main__.py::flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/flake8 >> test_workload.py::flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/multinode/flake8 >> test_recompiles_requests.py::flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/flake8 >> test_workload.py::flake8 [GOOD] |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/flake8 >> test_kill_tablets.py::flake8 [GOOD] |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 >> test.py::flake8 [GOOD] |45.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/yds/flake8 >> test_yq_streaming.py::flake8 [GOOD] |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/flake8 >> test_polling.py::flake8 [GOOD] |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/7179c606fb7373cb8f04d9971a_raw.auxcpp |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_exists.cpp |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |45.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/functional/tenants/flake8 >> test_users_groups_with_acl.py::flake8 [GOOD] |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |45.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |45.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_6a5c78aa9f679a0920be5264fe.o |45.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chain_map.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |45.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |45.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_d0255dda539959b69d421868a2.o |45.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_909bbfbd36bf4d7cf0544f0406.o |45.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_173de88696c8239b22567e7ece.o |45.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_decimal.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |45.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |45.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |45.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/plans/objcopy_96b8686cd075e874d95d4aa5c5.o |45.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |45.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_insert_table.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |45.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |45.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |45.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_7897d1b03fc78e49620c18f81a.o |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_logs_engine.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |45.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |45.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |45.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |46.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_27c0687ceeb7ce4ff5e4cea90a.o |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |46.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_7eab954373d77ffb1fab95ca0d.o |46.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |46.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/large/objcopy_b0a88dfa3c67850033b8c21ce7.o |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |46.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |46.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/docs/generator/generator |46.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |46.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_skiptake.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dynamic_variant.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold1.cpp |46.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |46.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |46.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_element.cpp |46.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |46.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_enumerate.cpp |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |46.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain1_map.cpp |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |46.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_listfromrange.cpp |46.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |46.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |46.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |46.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lazy_list.cpp |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ydb-tests-olap |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |46.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_199ab4be3deaff025e1ab92143.o |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_422ca1effff14e5a08952658d0.o |46.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/large/objcopy_8ac5034640eee44b1cd5fa5253.o |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/spilling/kqp_scan_spilling_ut.cpp |46.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |46.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |46.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_measure_arg.cpp |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |46.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |46.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/udf_resolver.pb.{h, cc} |46.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |46.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_list.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_removemember.cpp |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_rh_hash.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp >> conftest.py::flake8 [GOOD] >> s3_helpers.py::flake8 [GOOD] >> test_bindings_0.py::flake8 [GOOD] >> test_bindings_1.py::flake8 [GOOD] >> test_compressions.py::flake8 [GOOD] >> test_early_finish.py::flake8 [GOOD] >> test_empty.py::flake8 [GOOD] >> test_explicit_partitioning_0.py::flake8 [GOOD] >> test_explicit_partitioning_1.py::flake8 [GOOD] >> test_format_setting.py::flake8 [GOOD] >> test_formats.py::flake8 [GOOD] >> test_inflight.py::flake8 [GOOD] >> test_insert.py::flake8 [GOOD] >> test_public_metrics.py::flake8 [GOOD] >> test_push_down.py::flake8 [GOOD] >> test_s3_0.py::flake8 [GOOD] >> test_s3_1.py::flake8 [GOOD] >> test_size_limit.py::flake8 [GOOD] >> test_statistics.py::flake8 [GOOD] >> test_streaming_join.py::flake8 [GOOD] >> test_test_connection.py::flake8 [GOOD] >> test_ydb_over_fq.py::flake8 [GOOD] >> test_yq_v2.py::flake8 [GOOD] |46.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |46.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_heap.cpp |46.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |46.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multimap.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_next_value.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_queue.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_sort.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_safe_circular_buffer.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_replicate.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chopper.cpp |46.4%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/yql_kikimr_expr_nodes.{gen.h ... defs.inl.h} |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_some.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tobytes.cpp |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |46.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_way.cpp |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_visitall.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_source.cpp |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_while.cpp |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flow.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_withcontext.cpp |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_todict.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_coalesce.cpp |46.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_collect.cpp |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/libpy3olap-docs-generator.global.a |46.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/docs/generator/objcopy_ac8dbe7f54a2cb7efb6636f75f.o |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_getelem.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join_imp.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hasitems.cpp |46.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_length.cpp |46.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterable.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |46.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_coalesce.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_logical.cpp |46.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |46.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |46.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_sum.cpp |46.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_7cbdf366fff58ab43b08c0aaa3.o |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/utils/libpy3fq-generic-utils.global.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/types/libpy3tests-utils-types.global.a |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_5309010d16487b3f4dcf314c15.o |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/tests/utils/libpy3connector-tests-utils.global.a |46.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/objcopy_1007df29dec27b0b7a1587d49f.o |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/simplejson/py3/libpy3python-simplejson-py3.global.a |46.6%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/expr_nodes/dq_expr_nodes.{gen.h ... defs.inl.h} |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.global.a |46.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |46.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_2cc418e8604751e5b8f9029a81.o |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |46.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_50f818df501fa237b5369d0e33.o |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |46.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/objcopy_5aa2f431b8ed27f6c5b5d8a131.o |46.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |46.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |46.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/prctl/libpy3library-python-prctl.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/type_info/libpy3python-yt-type_info.global.a |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/transfer/ydb-tests-functional-transfer |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/yson/libpy3python-yt-yson.global.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/analytics/c52bef66453eb652f14989b79d_raw.auxcpp |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/python/yt/libpy3yt-python-yt.global.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |46.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_1a867878d783e80bc2d70bd8d0.o |46.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_7dbead413d0eb2c0f2ebe75a93.o |46.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_1a637ae81b754dfa4e06b949b8.o |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |46.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/32049c3ef1f885f0e34984b3bf_raw.auxcpp |46.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator_grpc/solomon_recipe_grpc |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |46.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |46.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |46.8%| [TS] {asan, default-linux-x86_64, release} ydb/tests/fq/s3/flake8 >> test_yq_v2.py::flake8 [GOOD] |46.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_eff0a6b0f75ccb9a2cb742007c.o |46.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_7e7e709046fe8acad91d924675.o |46.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |46.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_6cfba3dbee97ec121b2f346459.o |46.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_ce073e3cc612363936bdd04210.o |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |46.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/s3/objcopy_999b0e05144f29a542dbe4b3f5.o |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |46.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_exists.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_combine.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |46.9%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/yql_expr_nodes.{gen.h ... defs.inl.h} |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |46.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/yql_mount.pb.{h, cc} |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |46.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |46.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_prepend.cpp |46.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_now.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_pickle.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |47.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reverse.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |47.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_take.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reduce.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |47.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |47.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map_join.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |47.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |47.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hopping.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_filter.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_combine.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_factory.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |47.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_seq.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |47.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |47.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_frombytes.cpp |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_map_join.cpp |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |47.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_0a29eb8c456ab5b998f2d12ba1.o |47.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_34efc91ed920a8b27d971c44a6.o |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |47.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_4246ee6b3505ab22753eb44ce7.o |47.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_69ec8108bd4bdc059abab5b374.o |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/compatibility/objcopy_083605b223ce507d0fef919d0d.o |47.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |47.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/transfer_writer_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_f364ff47dd846bb94c3e83f2a8.o |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_a3fc9153ce93c876df4c755b36.o |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/config/objcopy_ae5b9f6e7a00f305f01a3dde87.o |47.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_51000f45ee1f1ab0908a7e71c9.o |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |47.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1515671fe2dfb16894dfbe901e.o |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_1c0f807c059fe226699115f242.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_div.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tostring.cpp |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_top_sort.cpp |46.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mod.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |46.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense1.cpp |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |46.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |46.8%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |46.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |46.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |46.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |46.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |46.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_93197284f82f9ae9fc0256ee95.o |46.9%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/kqp_expr_nodes.{gen.h ... defs.inl.h} |46.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |46.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |46.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_b306c2955ce13e6db6cae73363.o |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |47.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_1c18035bb4b3759d5e029db746.o |47.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/multinode/objcopy_10b0cfa01297f7d7392eb4d9e4.o |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/45be6e48ea8f2ac38577085d0d_raw.auxcpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_9c56ea1b7d34c7d8f6329bfcfd.o |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |47.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_3efa41af97c0510be1d2e99f05.o |47.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |47.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |47.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_4cf502b19212965f14d6660a20.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_065e9244d685c2b8f0ab66e414.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |47.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |47.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/canonical/objcopy_461999da7ba13deab5689c18ec.o |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ut_data_erasure_reboots.cpp |47.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |47.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_5a4a401f33f46c70417a65f584.o |47.3%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/expr_nodes/dqs_expr_nodes.{gen.h ... defs.inl.h} |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/flavours/libpy3tests-library-flavours.global.a |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/4129dc9878c2058404494fb088_raw.auxcpp |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_c84c8d511807425dc18073129b.o |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |47.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_0c4ce75555cfd5c0dd63e9dfbd.o |47.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tenants/objcopy_86ad37399122e504f3e6d8378d.o |47.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_compute_scheduler_ut.cpp |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |47.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_8e9f839326d1a9224e4b2e15e2.o |47.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |47.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |47.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |47.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/01e1cebcd98e239de10ed70b94_raw.auxcpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromyson.cpp |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map.cpp |47.6%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |47.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |47.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |47.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |47.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_toindexdict.cpp |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |47.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |47.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dictitems.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_random.cpp |47.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_just.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_unwrap.cpp |47.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_to_list.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_size.cpp |47.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_d1da8f48b4e80ef5678b1197a3.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_49bad8251d240ad7c49d384b91.o |47.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_0664e2ab2eb37ae9f02538e483.o |48.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_round.cpp |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_1a75593bdb000d1e31dd6e96d5.o |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_6f577a0a3d7a659599df51626e.o |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/e294827eb799173498fe26d398_raw.auxcpp |48.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |48.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/ttl_tiering/objcopy_52647c3535f2451207dfa29a87.o |48.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/generic/streaming/objcopy_181bdcd1743e9a1a78fafe4b60.o |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |48.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_sample_k.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |48.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |48.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |48.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_context.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/ssl_helpers.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |48.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes_ut.cpp |48.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |48.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |48.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/compression.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |48.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hedging_manager.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |48.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_compression.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |48.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |48.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/signal_registry.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |48.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |48.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |49.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |48.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |48.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |48.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |48.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_check_args.cpp |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |48.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |49.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_null.cpp |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |49.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ifpresent.cpp |49.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_nop.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_state.cpp |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |49.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |49.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_callable.cpp |49.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_2e1dd9c9bc385e6efd22b78136.o |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_c8e04cf4d110f8c670988beb0f.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_e5d897582dc0fbda7c578cb53f.o |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/lib/libpy3olap-load-lib.global.a |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_0b6bc206b470900b0b94249ade.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_e3640190fc6b98b359c2a9e990.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_a99732b1d02edd62e674483ffe.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/medium/objcopy_ccde7a40b2fd2886f22cd46a85.o |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |49.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/load/objcopy_323a17e94d8d570989807d19d3.o |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_top.cpp |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/lib/libpy3functional-tpc-lib.global.a |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_apply.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_func.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_if.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tooptional.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_extend.cpp |49.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_switch.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterator.cpp |49.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_group.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_container.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mul.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_factory.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_discard.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_invoke.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_blocks.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_timezone.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_938861be99a6cedecb22904193.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_ac7eeedcbf7038a60a7673762a.o |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_9caa7583d1e4955730dbd6f3fd.o |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flatmap.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |49.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_5051832ffa0b6b13cebe014eb1.o |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_c65a9d5efe13dc05c1466090ba.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_b8aa61f402be805d2e3e9e75a2.o |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/multi_plane/objcopy_f580ed931409135de17b6aff8b.o |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |49.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_504b845d57f1a23561e970de61.o |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |49.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/e01aded916ad04e888f13223cf_raw.auxcpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_fab8b7643e4f24e45a3680af85.o |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_f1ad243e6909bb2fe522538c38.o |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_c77713875cf17988efd8fc0fb3.o |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |49.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ydb_cli/objcopy_3505d99c4c5dcee86804fd8d27.o |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_41295709119857c2e0f1a41f31.o |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_tx_scheme.{pb.h ... grpc.pb.h} |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_board.{pb.h ... grpc.pb.h} |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/sdk_sessions_pool_ut/sdk_sessions_pool_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/external_sources.{pb.h ... grpc.pb.h} |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |49.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |49.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d1dee10c0c00d50989b086bd3f.o |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |49.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |49.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/locks/range_treap_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_ab26d720c654ba47c2acacaa33.o |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |49.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_0553360a969b2c9633badb428d.o |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_703c8e1d9a9a2b271b8b995a29.o |49.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_switchable_ut.cpp |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |49.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/tpc/large/objcopy_a4b303e939cc32858d35564cac.o |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |49.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |49.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_452efd8b0828678a61ff4e0569.o |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/objcopy_1574e8a5a6c530c7bfd6378c4d.o |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_6e536fb2c379a4ebe79c499de8.o |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/s3_import/objcopy_2d296dfaf373f7f15e6312517a.o |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_4c839b0fc6ee60e0bb4adc7079.o |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_7c328c2741f9dd7697a2e0e8b1.o |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/autoconfig/objcopy_994fcbd53c4e2174c302bdb5ab.o |49.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |49.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_clock_pro_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_contains.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_udf.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lookup.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_filter.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_condense.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |49.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_data_cleanup.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mapnext.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_map.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromstring.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multihopping.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_append.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_rows_formatter.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |49.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_if.cpp |49.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |49.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_skip.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_count.cpp |49.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join_dict.cpp |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |49.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |49.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_minmax.cpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |49.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |49.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_logical.cpp |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |49.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |49.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |49.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |50.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |50.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |49.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_178e64ce5db822fc6aa8b3e608.o |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_53273ad3976098fa8cbd55f5a9.o |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |50.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |50.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_scalar_apply.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |50.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_8f2fbd9f79880fbfa3c1838d80.o |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_0e928e66807fd553d7fcaa58a3.o |50.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |50.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_5dc9c76fd90ae0562084321e87.o |50.1%| [PR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/yql_res_expr_nodes.{gen.h ... defs.inl.h} |50.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |50.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |50.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_52476c20dac0af4f59edc2917e.o |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |50.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_0a1f127d9343562caddfbacf79.o |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_b08299d456f3448b368e814cb8.o |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_f363a941fa24746cadffc60594.o |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_696078ddd4c2d0788472b3ebfe.o |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_6b8c453743f8fd2c5380af70c6.o |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |50.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/objcopy_496e4638abf3c5ef12eafab52c.o |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |50.3%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yds/e1ff312a3308444783623a7c6e_raw.auxcpp |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_79d897640a3a634a87f173e2f4.o |50.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/common/objcopy_a54664d42025a3be375f961b82.o |50.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_range.cpp |50.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chopper.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_varitem.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_weakmember.cpp |50.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain_map.cpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/cache_eviction_ut.cpp |50.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |50.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |50.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_3ddbad334a37a829b3772ddb05.o |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/type_codecs_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/microseconds_sliding_window_ut.cpp |50.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_2b60b599fc27771d93e79090fc.o |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/library/libpy3tests-postgres_integrations-library.global.a |50.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/7fdc9492198d5f306aa05e0de1_raw.auxcpp |50.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/postgres_integrations/go-libpq/objcopy_4352b8b3e3cf61532c865b371b.o |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |50.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_1d0482d354dc270d18e7123281.o |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |50.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_895e78a038dc7069fda56c2e82.o |50.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |50.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |50.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/docker/libpy3contrib-python-docker.global.a |50.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/fetch_request_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/quota_tracker_ut.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/list_all_topics_ut.cpp |50.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |50.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |50.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |51.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |49.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |49.7%| COMPACTING CACHE 311.7MiB |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |49.7%| [TS] {RESULT} ydb/tests/fq/solomon/flake8 |49.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |49.7%| [TS] {RESULT} ydb/tests/fq/mem_alloc/flake8 |49.7%| [TS] {RESULT} ydb/tests/functional/restarts/flake8 |49.7%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/flake8 |49.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part8/flake8 |49.7%| [TS] {RESULT} ydb/tests/olap/docs/generator/flake8 |49.7%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part18/flake8 |49.7%| [TS] {RESULT} ydb/tests/fq/multi_plane/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/tpc/medium/flake8 |49.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/mysql/flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part1/flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/http_api/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/sqs/large/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/audit/flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part6/flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part16/flake8 |49.8%| [TS] {RESULT} ydb/tests/example/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/sqs/with_quotas/flake8 |49.8%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/join/flake8 |49.8%| [TS] {RESULT} ydb/tests/olap/common/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/canonical/flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part2/flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part10/flake8 |49.8%| [TS] {RESULT} ydb/tests/functional/wardens/flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/yds/flake8 |49.8%| [TS] {RESULT} ydb/tests/fq/common/flake8 |49.8%| [TS] {RESULT} ydb/tests/tools/kqprun/tests/flake8 |49.8%| [TS] {RESULT} ydb/tests/stress/olap_workload/tests/flake8 |49.9%| [TS] {RESULT} ydb/tests/functional/sqs/common/flake8 |49.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part3/flake8 |49.9%| [TS] {RESULT} ydb/tests/functional/script_execution/flake8 |49.9%| [TS] {RESULT} ydb/tests/stress/oltp_workload/tests/flake8 |49.9%| [TS] {RESULT} ydb/core/viewer/tests/flake8 |49.9%| [TS] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/fifo/flake8 |49.9%| [TS] {RESULT} ydb/tests/functional/api/flake8 |49.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part5/flake8 |49.9%| [TS] {RESULT} ydb/tests/functional/limits/flake8 |49.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part9/flake8 |49.9%| [TS] {RESULT} ydb/tests/functional/scheme_tests/flake8 |49.9%| [TS] {RESULT} ydb/tests/stress/simple_queue/tests/flake8 |49.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part7/flake8 |49.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part12/flake8 |49.9%| [TS] {RESULT} ydb/tests/tools/kqprun/recipe/flake8 |49.9%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_import/flake8 |49.9%| [TS] {RESULT} ydb/tests/postgres_integrations/go-libpq/flake8 |49.9%| [TS] {RESULT} ydb/tests/fq/generic/analytics/flake8 |49.9%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/clickhouse/flake8 |50.0%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/oracle/flake8 |50.0%| [TS] {RESULT} ydb/tests/olap/load/flake8 |50.0%| [TS] {RESULT} ydb/tests/functional/scheme_shard/flake8 |50.0%| [TS] {RESULT} ydb/tests/functional/hive/flake8 |50.0%| [TS] {RESULT} ydb/tests/sql/flake8 |50.0%| [TS] {RESULT} ydb/tests/functional/encryption/flake8 |50.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |50.0%| [TS] {RESULT} ydb/tests/functional/blobstorage/flake8 |50.0%| [TS] {RESULT} ydb/tests/functional/serializable/flake8 |50.0%| [TS] {RESULT} ydb/tests/functional/postgresql/flake8 |50.0%| [TS] {RESULT} ydb/tests/tools/nemesis/ut/flake8 |50.0%| [TS] {RESULT} ydb/tests/olap/lib/flake8 |50.0%| [TS] {RESULT} ydb/tests/olap/s3_import/flake8 |50.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part11/flake8 |50.0%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part15/flake8 |50.0%| [TS] {RESULT} ydb/tests/olap/scenario/flake8 |50.0%| [TS] {RESULT} ydb/tests/functional/config/flake8 |50.0%| [TS] {RESULT} ydb/tests/tools/pq_read/test/flake8 |50.0%| [TS] {RESULT} ydb/tests/fq/s3/flake8 |50.0%| [TS] {RESULT} ydb/tests/sql/lib/flake8 |50.1%| [TS] {RESULT} ydb/tests/fq/generic/streaming/black |50.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part19/flake8 |50.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part0/flake8 |50.1%| [TS] {RESULT} ydb/tests/olap/flake8 |50.1%| [TS] {RESULT} ydb/tests/sql/large/flake8 |50.1%| [TS] {RESULT} ydb/tests/fq/restarts/flake8 |50.1%| [TS] {RESULT} ydb/tests/functional/cms/flake8 |50.1%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ydb/flake8 |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |50.1%| [TS] {RESULT} ydb/tests/fq/plans/flake8 |50.1%| [TS] {RESULT} ydb/tests/functional/sqs/multinode/flake8 |50.1%| [TS] {RESULT} ydb/tests/functional/query_cache/flake8 |50.1%| [TS] {RESULT} ydb/tests/functional/compatibility/flake8 |50.1%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/postgresql/flake8 |50.1%| [TS] {RESULT} ydb/tests/olap/ttl_tiering/flake8 |50.1%| [TS] {RESULT} ydb/tests/fq/generic/streaming/flake8 |50.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part17/flake8 |50.1%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part14/flake8 |50.1%| [TS] {RESULT} ydb/tests/functional/rename/flake8 |50.2%| [TS] {RESULT} ydb/tests/functional/autoconfig/flake8 |50.2%| [TS] {RESULT} ydb/tests/functional/sqs/cloud/flake8 |50.2%| [TS] {RESULT} ydb/tests/functional/tenants/flake8 |50.2%| [TS] {RESULT} ydb/tests/fq/generic/analytics/black |50.2%| [TS] {RESULT} ydb/tests/functional/tpc/large/flake8 |50.2%| [TS] {RESULT} ydb/tests/functional/serverless/flake8 |50.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part13/flake8 |50.3%| [TS] {RESULT} ydb/tests/functional/ttl/flake8 |50.3%| [TS] {RESULT} ydb/tests/stress/log/tests/flake8 |50.3%| [TS] {RESULT} ydb/tests/functional/ydb_cli/flake8 |50.3%| [TS] {RESULT} ydb/tests/fq/yt/kqp_yt_file/part4/flake8 |50.4%| [TS] {RESULT} ydb/library/yql/providers/generic/connector/tests/datasource/ms_sql_server/flake8 |50.4%| [TS] {RESULT} ydb/tests/functional/sqs/messaging/flake8 |50.5%| [TS] {RESULT} ydb/tests/olap/column_family/compression/flake8 |50.5%| [TS] {RESULT} ydb/tests/stress/kv/tests/flake8 |55.7%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |55.8%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |57.2%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |57.2%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |57.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |58.7%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |58.7%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |58.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |58.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |58.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |58.8%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |59.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |59.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |59.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |59.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |59.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |59.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |59.1%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |59.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |59.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |59.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |59.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |59.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |59.1%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |59.2%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |59.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |59.2%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |59.2%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |59.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |59.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |59.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |59.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |59.4%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |59.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |59.4%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |59.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |59.4%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |59.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |59.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |59.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |59.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |60.7%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |60.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |60.7%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |62.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |62.0%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |62.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |62.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |62.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |62.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |62.1%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |62.1%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |62.1%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |62.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |62.2%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |62.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |62.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |62.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |62.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |62.3%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |62.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |62.3%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |62.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |62.3%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |62.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |62.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |62.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |62.4%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |62.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |62.4%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |62.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |63.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |63.8%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |65.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |65.5%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |66.4%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |66.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |66.4%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |66.4%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |66.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |66.4%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |66.5%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |66.5%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |66.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |66.6%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |67.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |67.3%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |67.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |68.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |69.0%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |69.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |69.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |69.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |69.2%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |69.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |69.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |69.2%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |69.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql |69.3%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |69.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |69.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |69.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |69.7%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |70.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |70.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |70.1%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |70.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |70.8%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |70.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |70.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |70.8%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |70.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |71.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |71.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |71.6%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |71.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |71.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |71.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |71.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |71.9%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |71.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |72.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |72.0%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |72.0%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |72.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |72.0%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |72.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |72.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |72.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |72.1%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |72.1%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |72.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |72.6%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |72.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |72.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |72.7%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |72.7%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |72.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |72.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |72.7%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |72.8%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/docs/generator/generator |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |72.8%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |72.8%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |72.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |72.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/ydb-tests-olap |72.8%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |72.9%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |72.9%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |72.9%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |72.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |72.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |72.9%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |73.0%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |73.0%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |73.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |73.0%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |73.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |73.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |73.1%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |73.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |73.2%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |73.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.2%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |73.3%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |73.3%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |73.3%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |73.3%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |73.3%| [AR] {RESULT} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |73.3%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/kqp/opt/kqp_query_plan.cpp |73.3%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |73.3%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |73.3%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |73.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |73.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |73.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> GenericProviderLookupActor::Lookup [GOOD] >> GenericProviderLookupActor::LookupWithErrors >> GenericProviderLookupActor::LookupWithErrors [GOOD] |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TMonitoringTests::InvalidActorId |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/actors/ut/unittest >> GenericProviderLookupActor::LookupWithErrors [GOOD] Test command err: 2025-03-28 08:58:47.099 INFO ydb-library-yql-providers-generic-actors-ut(pid=9454, tid=0x00007FD97D674B40) [generic] yql_generic_lookup_actor.cpp:151: New generic proivider lookup source actor(ActorId=[1:4:2051]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2025-03-28 08:58:47.110 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=9454, tid=0x00007FD97D674B40) [generic] yql_generic_lookup_actor.cpp:288: ActorId=[1:4:2051] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { ... tion: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2025-03-28 08:58:47.301 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=9454, tid=0x00007FD97A014640) [generic] yql_generic_lookup_actor.cpp:319: ActorId=[2:7486782240385393773:2051] Got TListSplitsStreamIterator 2025-03-28 08:58:47.301 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=9454, tid=0x00007FD97A014640) [generic] yql_generic_lookup_actor.cpp:196: ActorId=[2:7486782240385393773:2051] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY CRAB Expected: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY CRAB Actual: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "TEST_TOKEN" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2025-03-28 08:58:47.302 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=9454, tid=0x00007FD97A014640) [generic] yql_generic_lookup_actor.cpp:229: ActorId=[2:7486782240385393773:2051] Got ReadSplitsStreamIterator from Connector 2025-03-28 08:58:47.302 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=9454, tid=0x00007FD97A014640) [generic] yql_generic_lookup_actor.cpp:341: ActorId=[2:7486782240385393773:2051] Got DataChunk 2025-03-28 08:58:47.306 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=9454, tid=0x00007FD97A014640) [generic] yql_generic_lookup_actor.cpp:352: ActorId=[2:7486782240385393773:2051] Got EOF 2025-03-28 08:58:47.307 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=9454, tid=0x00007FD97A014640) [generic] yql_generic_lookup_actor.cpp:402: Sending lookup results for 3 keys |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TS3WrapperTests::HeadObject |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] >> TS3WrapperTests::CopyPartUpload >> TMonitoringTests::InvalidActorId [GOOD] |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |73.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart |73.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> TS3WrapperTests::MultipartUpload |73.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |73.4%| [TS] {RESULT} ydb/library/yql/providers/generic/actors/ut/unittest |73.5%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> TS3WrapperTests::HeadObject [GOOD] >> TS3WrapperTests::AbortUnknownUpload >> TS3WrapperTests::GetUnknownObject >> TS3WrapperTests::HeadUnknownObject >> TS3WrapperTests::CompleteUnknownUpload |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] >> TS3WrapperTests::GetObject >> TS3WrapperTests::UploadUnknownPart [GOOD] >> TS3WrapperTests::AbortUnknownUpload [GOOD] >> TS3WrapperTests::CopyPartUpload [GOOD] >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> TS3WrapperTests::GetUnknownObject [GOOD] >> TS3WrapperTests::MultipartUpload [GOOD] |73.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2025-03-28T08:58:48.208704Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 9A274AE8-DFCE-480C-B49C-9BA7C3A4C264, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:5466 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A745C552-D8B5-4CA1-9D93-2223E70F5C68 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-03-28T08:58:48.222943Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 9A274AE8-DFCE-480C-B49C-9BA7C3A4C264, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-03-28T08:58:48.223935Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 2A295623-5348-4E55-9C42-39F99B311A96, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:5466 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8A6EC650-F457-4BEF-B236-DA474724493D amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-03-28T08:58:48.251114Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 2A295623-5348-4E55-9C42-39F99B311A96, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } >> TS3WrapperTests::GetObject [GOOD] >> TS3WrapperTests::HeadUnknownObject [GOOD] >> TS3WrapperTests::PutObject >> TS3WrapperTests::AbortMultipartUpload ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2025-03-28T08:58:48.448501Z node 1 :S3_WRAPPER NOTICE: Request: uuid# AB78CF28-E146-4529-95CA-D7EE3C5B3D7C, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:21945 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B9540F3B-8230-4801-8E4E-05F4A252BC66 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2025-03-28T08:58:48.455961Z node 1 :S3_WRAPPER NOTICE: Response: uuid# AB78CF28-E146-4529-95CA-D7EE3C5B3D7C, response# ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2025-03-28T08:58:48.557437Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 32F5CD7C-63FA-44A6-A085-5AB27CFB26CD, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:12402 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F30EEA4D-F2E1-4334-9CEC-2293241DB170 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-03-28T08:58:48.562667Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 32F5CD7C-63FA-44A6-A085-5AB27CFB26CD, response# No response body. |73.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |73.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |73.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |73.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2025-03-28T08:58:48.519037Z node 1 :S3_WRAPPER NOTICE: Request: uuid# A97B90D4-2DA0-4EEB-A64A-306378121CF8, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:32410 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E2AEAB99-4816-43BE-A69E-2D5E874ECE7B amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-03-28T08:58:48.524460Z node 1 :S3_WRAPPER NOTICE: Response: uuid# A97B90D4-2DA0-4EEB-A64A-306378121CF8, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-03-28T08:58:48.524781Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 272473F0-B694-4DC6-94E5-6DAEFCD78285, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:32410 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2A04297B-147B-4234-81EB-7A26BB24D1EE amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2025-03-28T08:58:48.527979Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 272473F0-B694-4DC6-94E5-6DAEFCD78285, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-03-28T08:58:48.528308Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 302E88C1-A891-445D-BBB4-148995BB1104, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:32410 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1BDCE5DF-6AFB-4840-B5E1-D3B73E8890A6 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2025-03-28T08:58:48.532649Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 302E88C1-A891-445D-BBB4-148995BB1104, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-03-28T08:58:48.533220Z node 1 :S3_WRAPPER NOTICE: Request: uuid# F79F75CA-BDC2-4348-B869-16E3593845B8, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:32410 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2BDC4219-0C75-4224-8D21-5687298BB6C6 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-03-28T08:58:48.540498Z node 1 :S3_WRAPPER NOTICE: Response: uuid# F79F75CA-BDC2-4348-B869-16E3593845B8, response# GetObjectResult { } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2025-03-28T08:58:48.499415Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D8751000-DD70-4CFF-ABF7-104B4B4E725B, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:17562 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7C4DD714-87FC-46B9-BE44-B495CFF3558B amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2025-03-28T08:58:48.505175Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D8751000-DD70-4CFF-ABF7-104B4B4E725B, response# ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2025-03-28T08:58:48.582529Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D3B5A6F1-3B84-4DE3-9C71-E7C62B6F61EE, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:16855 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B571DE9C-3B01-4DE2-9F8B-D10AF09A1E87 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-03-28T08:58:48.586240Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D3B5A6F1-3B84-4DE3-9C71-E7C62B6F61EE, response# No response body. >> TS3WrapperTests::AbortMultipartUpload [GOOD] >> TS3WrapperTests::PutObject [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2025-03-28T08:58:48.473517Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 4E63A8DB-9D7B-469D-BD18-C3FC2F726417, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:4532 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 489A1270-19F7-44DA-8626-072171261246 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-03-28T08:58:48.495753Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 4E63A8DB-9D7B-469D-BD18-C3FC2F726417, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-03-28T08:58:48.498937Z node 1 :S3_WRAPPER NOTICE: Request: uuid# D5C7C015-948E-4904-8A9D-8D6990CFC9C9, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:4532 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1E890704-6C4D-42C7-AEB9-4750F5857F5D amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2025-03-28T08:58:48.503346Z node 1 :S3_WRAPPER NOTICE: Response: uuid# D5C7C015-948E-4904-8A9D-8D6990CFC9C9, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2025-03-28T08:58:48.503917Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 6581A30D-047E-41F6-8204-FFEE77490716, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:4532 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 870DDED7-664E-49C1-B332-409448AAA924 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2025-03-28T08:58:48.514309Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 6581A30D-047E-41F6-8204-FFEE77490716, response# UploadPartCopyResult { } 2025-03-28T08:58:48.514751Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 0769BB76-303F-4077-9FC6-14F9AE81BDC0, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:4532 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 063B2FBD-E3E1-49CB-915F-90FAA24066D3 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2025-03-28T08:58:48.518563Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 0769BB76-303F-4077-9FC6-14F9AE81BDC0, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2025-03-28T08:58:48.519097Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 054CAE8B-D806-40E4-B237-F908C8547ABE, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:4532 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A03C39F2-1200-4D5C-BF36-ED6846614C36 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2025-03-28T08:58:48.522197Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 054CAE8B-D806-40E4-B237-F908C8547ABE, response# GetObjectResult { } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2025-03-28T08:58:48.541492Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 56F91B26-1C18-4931-BB5C-8C2545418EBC, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:20568 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9DB64A4E-58F2-481A-A455-5DE8A3116DD5 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2025-03-28T08:58:48.552322Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 56F91B26-1C18-4931-BB5C-8C2545418EBC, response# ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2025-03-28T08:58:48.625460Z node 1 :S3_WRAPPER NOTICE: Request: uuid# B1C46F08-87CC-4A88-8C89-96B4441E1F9B, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:13080 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DA684E5C-B9B8-4E98-AE0E-D1AB6C4F902D amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-03-28T08:58:48.629516Z node 1 :S3_WRAPPER NOTICE: Response: uuid# B1C46F08-87CC-4A88-8C89-96B4441E1F9B, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2025-03-28T08:58:48.629765Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 004BAFC7-B421-4250-A62A-93EEBE2F302F, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:13080 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4CC732F6-B240-48BC-B2F9-91058CF018C6 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2025-03-28T08:58:48.632310Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 004BAFC7-B421-4250-A62A-93EEBE2F302F, response# GetObjectResult { } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2025-03-28T08:58:49.019767Z node 1 :S3_WRAPPER NOTICE: Request: uuid# BB7BD898-A652-434F-89E9-C93A48C3CBF8, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:21838 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FD2AB2BF-590F-4196-8F30-5C55DDB14EDE amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2025-03-28T08:58:49.025108Z node 1 :S3_WRAPPER NOTICE: Response: uuid# BB7BD898-A652-434F-89E9-C93A48C3CBF8, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2025-03-28T08:58:48.978378Z node 1 :S3_WRAPPER NOTICE: Request: uuid# 7355ECAE-5CCD-4B6C-BE66-03FB6AA996D7, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:13061 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 419D5CB4-BE39-430D-8451-7E4C0BA906B0 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2025-03-28T08:58:48.982597Z node 1 :S3_WRAPPER NOTICE: Response: uuid# 7355ECAE-5CCD-4B6C-BE66-03FB6AA996D7, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2025-03-28T08:58:48.982936Z node 1 :S3_WRAPPER NOTICE: Request: uuid# F0B7E6EE-8EC5-4B47-BDB9-C770F45AC17E, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:13061 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D8FDC883-0B3B-46C5-A4AE-D479E69A1DC3 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2025-03-28T08:58:48.984801Z node 1 :S3_WRAPPER NOTICE: Response: uuid# F0B7E6EE-8EC5-4B47-BDB9-C770F45AC17E, response# AbortMultipartUploadResult { } 2025-03-28T08:58:48.984989Z node 1 :S3_WRAPPER NOTICE: Request: uuid# FE39A1ED-61D5-4963-838D-BAAE823C002B, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:13061 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 37EB68E1-70DE-4403-80DB-D19A295C0BD8 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 2025-03-28T08:58:48.986804Z node 1 :S3_WRAPPER NOTICE: Response: uuid# FE39A1ED-61D5-4963-838D-BAAE823C002B, response# No response body. |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |73.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |73.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |73.6%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TableWriter::Restore [GOOD] |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |73.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |73.6%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] |73.6%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} >> TestDataErasure::DataErasureManualLaunch3Cycles >> TestDataErasure::DataErasureManualLaunch |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |73.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |73.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::SimpleDataErasureTest >> TDataShardTrace::TestTraceWriteImmediateOnShard >> TDataShardTrace::TestTraceDistributedSelect >> TDataShardTrace::TestTraceDistributedUpsert+UseSink >> TDataShardTrace::TestTraceDistributedUpsert-UseSink >> TDataShardTrace::TestTraceDistributedSelectViaReadActors |73.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |73.7%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |73.7%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |73.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> THealthCheckTest::Issues100Groups100VCardListing >> THealthCheckTest::Basic >> THealthCheckTest::StorageLimit95 >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus >> THealthCheckTest::Issues100GroupsListing >> THealthCheckTest::StaticGroupIssue >> THealthCheckTest::ServerlessBadTablets >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> KqpQuery::QueryResultsTruncated >> THealthCheckTest::OneIssueListing >> KqpParams::RowsList >> KqpLimits::OutOfSpaceBulkUpsertFail >> KqpLimits::BigParameter >> KqpQuery::YqlSyntaxV0 >> KqpQuery::PreparedQueryInvalidate >> KqpQuery::Now >> THealthCheckTest::SpecificServerless >> KqpTypes::UnsafeTimestampCastV0 >> KqpStats::DataQueryWithEffects+UseSink >> KqpQuery::UdfTerminate >> KqpQuery::QueryClientTimeout >> KqpParams::CheckQueryCacheForPreparedQuery >> KqpQuery::RewriteIfPresentToMap |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |73.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer >> KqpAnalyze::AnalyzeTable+ColumnStore >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> KqpStats::JoinNoStatsYql >> KqpStats::MultiTxStatsFullExpYql >> KqpLimits::LargeParametersAndMkqlFailure >> KqpLimits::KqpMkqlMemoryLimitException >> KqpQuery::SelectWhereInSubquery >> KqpStats::StreamLookupStats+StreamLookupJoin >> KqpQuery::QueryTimeout >> KqpQuery::DdlInDataQuery >> TestDataErasure::DataErasureManualLaunch [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta >> KqpQuery::OlapCreateAsSelect_Simple >> KqpStats::JoinNoStatsScan >> KqpExplain::Explain >> KqpStats::OneShardLocalExec-UseSink >> KqpQuery::QueryCacheTtl >> KqpParams::Decimal-QueryService-UseSink >> KqpLimits::WaitCAsStateOnAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T08:58:57.075186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T08:58:57.075289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T08:58:57.075327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T08:58:57.075370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T08:58:57.075411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T08:58:57.075436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T08:58:57.075503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T08:58:57.075571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T08:58:57.075892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T08:58:57.159802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T08:58:57.159857Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:58:57.191181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T08:58:57.191465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T08:58:57.191598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T08:58:57.231536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T08:58:57.231715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T08:58:57.232301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T08:58:57.232517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T08:58:57.239341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:57.240630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T08:58:57.240689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:57.240833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T08:58:57.240874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T08:58:57.240909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T08:58:57.240993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.254284Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T08:58:57.394129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T08:58:57.394374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.394559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T08:58:57.394824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T08:58:57.394877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.397115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T08:58:57.397251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T08:58:57.397414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.397482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T08:58:57.397533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T08:58:57.397561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T08:58:57.399497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.399553Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T08:58:57.399586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T08:58:57.401217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.401271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.401305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:58:57.401357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T08:58:57.404776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T08:58:57.408021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T08:58:57.408195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T08:58:57.409108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T08:58:57.409236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T08:58:57.409304Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:58:57.409550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T08:58:57.409600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:58:57.409751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T08:58:57.409818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T08:58:57.413294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T08:58:57.413350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T08:58:57.413532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:57.413591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T08:58:57.413965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.414006Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T08:58:57.414113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T08:58:57.414152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:58:57.414188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T08:58:57.414214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:58:57.414244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T08:58:57.414281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:58:57.414319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T08:58:57.414349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T08:58:57.414414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T08:58:57.414458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T08:58:57.414488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T08:58:57.416194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T08:58:57.416298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T08:58:57.416339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 08:59:01.227861Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-28T08:59:01.227945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-28T08:59:01.228031Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 58 ms, next wakeup# 599.942000s, rate# 0, in queue# 0 tenants, running# 1 tenants at schemeshard 72057594046678944 2025-03-28T08:59:01.229844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2025-03-28T08:59:01.263949Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:632:2550], Recipient [1:453:2406]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 1 TabletId: 72075186233409549 Status: OK 2025-03-28T08:59:01.264011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-28T08:59:01.264084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-03-28T08:59:01.264151Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409549, shardIdx# 72075186233409546:4 in# 60 ms, next wakeup in# 14.940000s, rate# 1, in queue# 0 shards, running# 1 shards at schemeshard 72075186233409546 2025-03-28T08:59:01.264414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:636:2551], Recipient [1:453:2406]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 1 TabletId: 72075186233409550 Status: OK 2025-03-28T08:59:01.264454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-28T08:59:01.264491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-03-28T08:59:01.264537Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409550, shardIdx# 72075186233409546:5 in# 60 ms, next wakeup in# 14.940000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-03-28T08:59:01.264581Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-03-28T08:59:01.264609Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 1 2025-03-28T08:59:01.267401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-03-28T08:59:01.267576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-03-28T08:59:01.267964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1901:3570], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T08:59:01.268011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T08:59:01.268055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T08:59:01.268172Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1900:3569], Recipient [1:453:2406]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1901:3570] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-03-28T08:59:01.268249Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T08:59:01.268282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2025-03-28T08:59:01.268407Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:453:2406], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 1 Status: COMPLETED 2025-03-28T08:59:01.268442Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-28T08:59:01.268498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-28T08:59:01.268566Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 62 ms, next wakeup# 599.938000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-03-28T08:59:01.268636Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-03-28T08:59:01.270268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-03-28T08:59:01.270322Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-28T08:59:01.270782Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1905:3574], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1906:3575] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T08:59:01.270840Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T08:59:01.270878Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-03-28T08:59:01.271014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-03-28T08:59:01.271053Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T08:59:01.271093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T08:59:01.271163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T08:59:01.271201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-03-28T08:59:01.271255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-28T08:59:01.271309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-28T08:59:02.285507Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:02.285593Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:02.285646Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-28T08:59:02.285848Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:02.285881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:02.286039Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-03-28T08:59:02.286068Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T08:59:02.286111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T08:59:02.286177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T08:59:02.286206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-03-28T08:59:02.286257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-28T08:59:02.286296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-28T08:59:02.716622Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:02.716688Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:02.716975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:02.717096Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:02.717122Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-28T08:59:02.717322Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:02.717351Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:02.717554Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-03-28T08:59:02.717579Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T08:59:02.717602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T08:59:02.717653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T08:59:02.717677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-03-28T08:59:02.718021Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-03-28T08:59:02.725129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-03-28T08:59:02.729695Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1985:3654], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T08:59:02.729891Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T08:59:02.730071Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T08:59:02.730802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:272:2263], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-03-28T08:59:02.730839Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-03-28T08:59:02.731195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TestDataErasure::SimpleDataErasureTest [GOOD] >> KqpStats::SysViewClientLost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::SimpleDataErasureTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T08:58:59.164264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T08:58:59.165239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T08:58:59.165417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T08:58:59.165902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T08:58:59.166073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T08:58:59.166248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T08:58:59.166296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T08:58:59.166632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T08:58:59.170485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T08:58:59.590486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T08:58:59.590627Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:58:59.639106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T08:58:59.640215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T08:58:59.640864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T08:58:59.668120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T08:58:59.670289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T08:58:59.673481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T08:58:59.674076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T08:58:59.682397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:59.688250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T08:58:59.688847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:59.689405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T08:58:59.689566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T08:58:59.689858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T08:58:59.690480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T08:58:59.722641Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T08:59:00.130314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T08:59:00.131646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:59:00.132843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T08:59:00.134506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T08:59:00.134990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:59:00.140452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T08:59:00.140591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T08:59:00.140752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:59:00.140809Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T08:59:00.140836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T08:59:00.140858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T08:59:00.149768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:59:00.150148Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T08:59:00.150339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T08:59:00.158118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:59:00.158162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:59:00.158633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:59:00.158976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T08:59:00.172667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T08:59:00.180842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T08:59:00.181962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T08:59:00.188274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T08:59:00.188853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T08:59:00.189043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:59:00.190884Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T08:59:00.191080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:59:00.192299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T08:59:00.193142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T08:59:00.202880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T08:59:00.203253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T08:59:00.204671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:59:00.204864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T08:59:00.206475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:59:00.206979Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T08:59:00.207634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T08:59:00.207870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:59:00.207901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T08:59:00.207925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:59:00.208085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T08:59:00.208424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:59:00.208574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T08:59:00.208757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T08:59:00.209114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T08:59:00.209631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T08:59:00.209992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T08:59:00.219465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T08:59:00.219607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T08:59:00.219651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T08:59:05.946114Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T08:59:05.946214Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1978:3649], Recipient [1:832:2715]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:1979:3650] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-03-28T08:59:05.946236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T08:59:05.946259Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409551 2025-03-28T08:59:05.946338Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:832:2715], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 1 Status: COMPLETED 2025-03-28T08:59:05.946361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-28T08:59:05.946587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-28T08:59:05.946625Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 65 ms, next wakeup# 599.935000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-03-28T08:59:05.947004Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-03-28T08:59:05.960164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-03-28T08:59:05.960372Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-28T08:59:05.961864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:1983:3654], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:1984:3655] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T08:59:05.961894Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T08:59:05.961919Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2025-03-28T08:59:05.962414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2025-03-28T08:59:05.962443Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T08:59:05.967525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T08:59:05.968016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T08:59:05.968242Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-03-28T08:59:05.968475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-28T08:59:05.968695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-28T08:59:06.503001Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.503275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.503346Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.503555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.504014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.504048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.504107Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:832:2715], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.504131Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.504185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.504203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.504246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2408], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.504268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.551201Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:06.551442Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:06.551498Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-28T08:59:06.551724Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2025-03-28T08:59:06.551753Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T08:59:06.551777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T08:59:06.551830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T08:59:06.551856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-03-28T08:59:06.551906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-28T08:59:06.551945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-28T08:59:06.987103Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.987172Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.987400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.987422Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.987665Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.987685Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.987737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.987771Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.987826Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2408], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.987845Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.987886Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:832:2715], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.988086Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:07.034947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:07.035174Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:07.035389Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 1 2025-03-28T08:59:07.035959Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2025-03-28T08:59:07.035986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T08:59:07.036011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T08:59:07.036065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T08:59:07.036265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-03-28T08:59:07.036625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.934000s 2025-03-28T08:59:07.037027Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 1, duration# 2 s 2025-03-28T08:59:07.046802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-03-28T08:59:07.049683Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:2005:3676], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T08:59:07.050117Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T08:59:07.055170Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T08:59:07.055902Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:272:2263], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-03-28T08:59:07.056118Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-03-28T08:59:07.056341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] |73.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |73.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |73.7%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: 2025-03-28T08:59:04.567234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:04.569664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:04.570245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00131e/r3tmp/tmpiNk7sp/pdisk_1.dat 2025-03-28T08:59:06.713917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T08:59:06.811250Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:06.885499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:06.886347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:06.904014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:07.039594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: 2025-03-28T08:59:03.668973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:03.670652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:03.671092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012f6/r3tmp/tmp7e0lLp/pdisk_1.dat 2025-03-28T08:59:06.211149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T08:59:06.345036Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:06.415477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:06.416100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:06.434436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:06.553825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T08:59:10.079672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:10.079770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:10.080106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:10.105649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T08:59:10.184998Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T08:59:10.474063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T08:59:10.762038Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:12.119141Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdzm0vwab9a6tjdzw2ranc1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjk4YWJmMTEtZTc4OTYxN2YtNzM3ZTY0MGEtNTgxMmQzYjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] >> KqpParams::ImplicitParameterTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: 2025-03-28T08:59:05.090080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:05.091904Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:05.092311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001309/r3tmp/tmp1t8opL/pdisk_1.dat 2025-03-28T08:59:07.310451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T08:59:07.401216Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:07.481027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:07.481692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:07.499432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:07.621880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T08:59:11.392596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:11.393621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:11.396413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:11.423765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T08:59:11.543656Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T08:59:11.833936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T08:59:11.974054Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:12.350102Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdzm24n5rcy7zr5vyfb12pg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmVlZWJlYTgtOTllYmUwZjMtZmI0ZDk4ZTYtY2M4OTgyZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T08:59:12.526309Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdzm35nd7sqzadjz8h3sn0k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTc2MDRiYTYtNjFlNzQ0MjktNDZlMDA0MWUtNGFkYzJhNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T08:59:15.175867Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdzm3mbfzgk34h0eyywjzw6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTMwNDhjYjktZTkxYTg0ZDktMzRmNmRmMWEtN2Q3OWExYjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: 2025-03-28T08:59:04.096810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:04.106748Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:04.107403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00131b/r3tmp/tmpqGcf80/pdisk_1.dat 2025-03-28T08:59:09.648338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T08:59:09.991293Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:10.092920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:10.093781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:10.110357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:10.338253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T08:59:14.674375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:14.677204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:14.679283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:14.687734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T08:59:14.732098Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T08:59:15.051188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T08:59:15.277919Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:19.173210Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdzm5axapa86n8rdqrgqat9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjNjMGE1NjYtZjdkMmI4Mi1jYjk2NzI1Yi03OTkxYjhlYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(TKqpForwardWriteActor)]) , (RunTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) >> KqpQuery::QueryClientTimeoutPrecompiled |73.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |73.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: 2025-03-28T08:59:08.167103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:08.168955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:08.169532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012e4/r3tmp/tmpz3ObR2/pdisk_1.dat 2025-03-28T08:59:10.780879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T08:59:10.871393Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:10.960672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:10.961351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:10.981694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:11.119436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T08:59:15.030532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2776], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:15.031486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:945:2781], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:15.034218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:15.060827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T08:59:15.165347Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T08:59:15.560157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:948:2784], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T08:59:15.951091Z node 1 :TX_PROXY ERROR: Actor# [1:1017:2833] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:19.714211Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdzm5pabhwycyrvapg7a663, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFmNDFlNTYtMmYyMzdmY2UtNTZiOWMxMjktNmE5YzhhZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T08:59:20.832977Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdzmad65b0225tcxrnws611, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M5OGE1NDQtNGVlZWJhY2YtNDhmYzAyZTYtYzgzZDM0OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T08:59:22.186394Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdzmbfv4qk94w3p56h7v4n7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdmYjU4YWQtZTIwZjBkMTQtMjFlNjlmZDAtNDUzYmI2YWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> THealthCheckTest::ServerlessBadTablets [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes |73.8%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles 2025-03-28 08:59:11,868 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 08:59:11,967 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 20 secs timeout. Process tree before termination: pid rss ref pdirt 10940 46.2M 46.4M 23.5M test_tool run_ut @/home/runner/.ya/build/build_root/2909/0010c8/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff/chunk2/testing_out_stuff/test_to 11027 628M 629M 328M └─ ydb-core-tx-schemeshard-ut_data_erasure --trace-path-append /home/runner/.ya/build/build_root/2909/0010c8/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/te Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T08:58:57.350170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T08:58:57.350275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T08:58:57.350308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T08:58:57.350355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T08:58:57.350396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T08:58:57.350422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T08:58:57.350477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T08:58:57.350545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T08:58:57.350882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T08:58:57.429946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T08:58:57.430002Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:58:57.445253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T08:58:57.445491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T08:58:57.445627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T08:58:57.468587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T08:58:57.470020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T08:58:57.473471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T08:58:57.474102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T08:58:57.492865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:57.501704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T08:58:57.501943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:57.502806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T08:58:57.503024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T08:58:57.503566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T08:58:57.504253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.554941Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T08:58:57.971076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T08:58:57.972055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.973157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T08:58:57.974532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T08:58:57.975050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.986269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T08:58:57.987252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T08:58:57.988387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.988699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T08:58:57.988851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T08:58:57.989025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T08:58:58.002036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:58.002407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T08:58:58.002577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T08:58:58.017269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:58.017316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:58.017711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:58:58.017886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T08:58:58.035363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T08:58:58.046146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T08:58:58.048698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T08:58:58.064870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T08:58:58.065610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T08:58:58.065963Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:58:58.067915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T08:58:58.068106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:58:58.069405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T08:58:58.070082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T08:58:58.079179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T08:58:58.079533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T08:58:58.080729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:58.080944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T08:58:58.082823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:58.083094Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T08:58:58.083731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T08:58:58.084071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:58:58.084249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T08:58:58.084460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:58:58.084653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T08:58:58.085007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:58:58.085196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T08:58:58.085599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T08:58:58.085979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T08:58:58.086514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T08:58:58.086893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T08:58:58.095874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T08:58:58.096582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T08:58:58.097116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:10.813141Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125513, Sender [1:290:2274], Recipient [1:453:2406]: NKikimrScheme.TEvTenantDataErasureRequest Generation: 3 2025-03-28T08:59:10.813165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureRequest 2025-03-28T08:59:10.813185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvTenantDataErasureRequest, at schemeshard: 72075186233409546 2025-03-28T08:59:10.813218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxRunTenantDataErasure Execute at schemestard: 72075186233409546 2025-03-28T08:59:10.813237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [TenantDataErasureManager] Clear operation queue and active pipes 2025-03-28T08:59:10.813448Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [TenantDataErasureManager] Clear WaitingDataErasureShards: Size# 2 2025-03-28T08:59:10.813503Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Start] Data erasure for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409549, next wakeup# 9.000000s, rate# 1, in queue# 1 shards, running# 0 shards at schemeshard 72075186233409546 2025-03-28T08:59:10.813537Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [TenantDataErasureManager] [Enqueue] Enqueued shard# 72075186233409546:4 at schemeshard 72075186233409546 2025-03-28T08:59:10.813573Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Start] Data erasure for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409550, next wakeup# 9.000000s, rate# 1, in queue# 1 shards, running# 1 shards at schemeshard 72075186233409546 2025-03-28T08:59:10.813597Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [TenantDataErasureManager] [Enqueue] Enqueued shard# 72075186233409546:5 at schemeshard 72075186233409546 2025-03-28T08:59:10.813630Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [TenantDataErasureManager] Run: Queue.Size# 0, WaitingDataErasureShards.size# 2, Status# 2 2025-03-28T08:59:10.838137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxRunDataErasure Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2025-03-28T08:59:10.959559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxRunTenantDataErasure Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-03-28T08:59:10.959657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxRunTenantDataErasure Complete at schemestard: 72075186233409551, NeedResponseComplete# false 2025-03-28T08:59:11.792835Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:1011:2858], Recipient [1:825:2711]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409554 Status: OK 2025-03-28T08:59:11.792889Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-28T08:59:11.792940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409551 2025-03-28T08:59:11.793007Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409551, LocalPathId: 2], datashard# 72075186233409554, shardIdx# 72075186233409551:4 in# 60 ms, next wakeup in# 8.940000s, rate# 1, in queue# 0 shards, running# 1 shards at schemeshard 72075186233409551 2025-03-28T08:59:11.795533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# false 2025-03-28T08:59:11.812272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:1012:2859], Recipient [1:825:2711]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409555 Status: OK 2025-03-28T08:59:11.812317Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-28T08:59:11.812363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409551 2025-03-28T08:59:11.812433Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409551, LocalPathId: 2], datashard# 72075186233409555, shardIdx# 72075186233409551:5 in# 62 ms, next wakeup in# 8.938000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409551 2025-03-28T08:59:11.812495Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-03-28T08:59:11.812520Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-03-28T08:59:11.813895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# true 2025-03-28T08:59:11.814084Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:825:2711], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 3 Status: COMPLETED 2025-03-28T08:59:11.814113Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-28T08:59:11.814158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-28T08:59:11.814216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 63 ms, next wakeup# 593.937000s, rate# 0, in queue# 0 tenants, running# 1 tenants at schemeshard 72057594046678944 2025-03-28T08:59:11.816099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2025-03-28T08:59:11.830666Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:632:2550], Recipient [1:453:2406]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409549 Status: OK 2025-03-28T08:59:11.830726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-28T08:59:11.830773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-03-28T08:59:11.830831Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409549, shardIdx# 72075186233409546:4 in# 65 ms, next wakeup in# 8.935000s, rate# 1, in queue# 0 shards, running# 1 shards at schemeshard 72075186233409546 2025-03-28T08:59:11.832421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# false 2025-03-28T08:59:11.844112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553241, Sender [1:636:2551], Recipient [1:453:2406]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409550 Status: OK 2025-03-28T08:59:11.844154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-28T08:59:11.844198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409546 2025-03-28T08:59:11.844247Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 2], datashard# 72075186233409550, shardIdx# 72075186233409546:5 in# 67 ms, next wakeup in# 8.933000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2025-03-28T08:59:11.844301Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-03-28T08:59:11.844341Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-03-28T08:59:11.847100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2025-03-28T08:59:11.847250Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:453:2406], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 2 } Generation: 3 Status: COMPLETED 2025-03-28T08:59:11.847283Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-28T08:59:11.847329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-28T08:59:11.847375Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2] in# 68 ms, next wakeup# 593.932000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-03-28T08:59:11.847429Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-03-28T08:59:11.852583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-03-28T08:59:11.852615Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-28T08:59:11.852772Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-03-28T08:59:11.852803Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T08:59:11.852829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T08:59:11.852866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T08:59:11.852890Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-03-28T08:59:11.852927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-28T08:59:11.852961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 20 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/0010c8/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff/chunk2/testing_out_stuff/test_tool.args']' stopped by 20 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("20 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/0010c8/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff/chunk2/testing_out_stuff/test_tool.args']' stopped by 20 seconds timeout",), {}) >> KqpParams::MissingParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureManualLaunch3Cycles [GOOD] 2025-03-28 08:59:11,479 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 08:59:11,619 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 20 secs timeout. Process tree before termination: pid rss ref pdirt 10901 46.2M 46.4M 23.4M test_tool run_ut @/home/runner/.ya/build/build_root/2909/0010f3/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_to 11020 618M 618M 314M └─ ydb-core-tx-schemeshard-ut_data_erasure --trace-path-append /home/runner/.ya/build/build_root/2909/0010f3/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/te Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T08:58:57.015600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T08:58:57.015708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T08:58:57.015745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T08:58:57.015785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T08:58:57.015827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T08:58:57.015855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T08:58:57.015912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T08:58:57.015996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T08:58:57.016316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T08:58:57.140968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T08:58:57.141029Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:58:57.187179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T08:58:57.187454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T08:58:57.187593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T08:58:57.223588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T08:58:57.223801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T08:58:57.224377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T08:58:57.224605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T08:58:57.231335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:57.232642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T08:58:57.232704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:57.232846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T08:58:57.232887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T08:58:57.232923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T08:58:57.232990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.246243Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T08:58:57.355811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T08:58:57.356050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.356235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T08:58:57.356433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T08:58:57.356486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.360358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T08:58:57.360500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T08:58:57.360701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.360770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T08:58:57.360810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T08:58:57.360838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T08:58:57.362987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.363045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T08:58:57.363078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T08:58:57.364827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.364874Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.364913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:58:57.364965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T08:58:57.368526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T08:58:57.374450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T08:58:57.374640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T08:58:57.375634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T08:58:57.375767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T08:58:57.375825Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:58:57.376064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T08:58:57.376130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T08:58:57.376291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T08:58:57.376362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T08:58:57.378357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T08:58:57.378419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T08:58:57.378601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T08:58:57.378641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T08:58:57.379027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T08:58:57.379074Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T08:58:57.379162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T08:58:57.379193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:58:57.379228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T08:58:57.379258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:58:57.379291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T08:58:57.379337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T08:58:57.379372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T08:58:57.379398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T08:58:57.379464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T08:58:57.379513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T08:58:57.379542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T08:58:57.381162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T08:58:57.381263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T08:58:57.381299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# true 2025-03-28T08:59:06.116144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:832:2715], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 3 Status: COMPLETED 2025-03-28T08:59:06.116173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-28T08:59:06.116214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-28T08:59:06.116253Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 62 ms, next wakeup# 595.803000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-03-28T08:59:06.116301Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-03-28T08:59:06.123538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-03-28T08:59:06.123593Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-28T08:59:06.124228Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-03-28T08:59:06.124270Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T08:59:06.124510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T08:59:06.124918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T08:59:06.125129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-03-28T08:59:06.125374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-28T08:59:06.125610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-28T08:59:06.687276Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.687343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.687638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.687672Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.699102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.699151Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.699200Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.699222Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:06.699265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2408], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.699286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.699336Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:832:2715], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.699353Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:06.747020Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:06.747231Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:06.747440Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-28T08:59:06.747851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-03-28T08:59:06.747876Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T08:59:06.747900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T08:59:06.747952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T08:59:06.747977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-03-28T08:59:06.748023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-28T08:59:06.748052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-28T08:59:07.156587Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:07.156651Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:07.156726Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:07.156749Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:07.167417Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:07.167472Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:07.167522Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:07.167542Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T08:59:07.167589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2408], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:07.167610Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:07.167665Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:832:2715], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:07.167683Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T08:59:07.214237Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:07.214295Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T08:59:07.214324Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-28T08:59:07.214524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-03-28T08:59:07.214548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T08:59:07.214570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T08:59:07.214625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T08:59:07.214654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-03-28T08:59:07.214709Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-03-28T08:59:07.218292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-03-28T08:59:07.218830Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:3553:4912], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T08:59:07.218869Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T08:59:07.218898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T08:59:07.218968Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:2772:4287], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-03-28T08:59:07.218991Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-03-28T08:59:07.219016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 20 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/0010f3/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 20 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("20 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/0010f3/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff/chunk1/testing_out_stuff/test_tool.args']' stopped by 20 seconds timeout",), {}) >> KqpLimits::StreamWrite+Allowed >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 |73.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood >> KqpQuery::Now [GOOD] >> KqpQuery::NoEvaluate >> KqpExplain::Explain [GOOD] >> KqpExplain::CompoundKeyRange >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery |73.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |73.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |73.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> TargetDiscoverer::Dirs |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpParams::RowsList [GOOD] >> KqpQuery::CurrentUtcTimestamp |73.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} |73.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::QueryCache >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::GenericQueryNoRowsLimit |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryCancelWrite >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::ReadOverloaded+StreamLookup >> KqpQuery::DdlInDataQuery [GOOD] >> KqpQuery::DeleteWhereInSubquery >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::ManyPartitions >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageNoQuota |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> KqpStats::JoinNoStatsYql [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCacheInvalidate |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::ComputeNodeMemoryLimit >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit87 >> TSubscriberTest::NotifyDelete >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink |73.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |73.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index >> TSubscriberTest::NotifyDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyDelete [GOOD] Test command err: 2025-03-28T08:59:49.964189Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T08:59:49.966912Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-03-28T08:59:49.967032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-03-28T08:59:49.967083Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-03-28T08:59:49.967144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-03-28T08:59:49.967191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-03-28T08:59:49.967233Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T08:59:49.967332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-03-28T08:59:49.967390Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T08:59:49.967637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:9:2056] 2025-03-28T08:59:49.967747Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2025-03-28T08:59:49.967814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2025-03-28T08:59:49.967859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2066] 2025-03-28T08:59:49.967938Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Path was updated to new version: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T08:59:49.968024Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:36:2066] 2025-03-28T08:59:49.968073Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T08:59:49.968115Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:37:2066] 2025-03-28T08:59:49.968162Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } >> TSubscriberCombinationsTest::CombinationsRootDomain >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::RedGroupIssueOnRedSpace >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::YellowGroupIssueOnYellowSpace >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService-UseSink |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64 >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpStats::SysViewCancelled >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing >> KqpExplain::CompoundKeyRange [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsLimit999 >> KqpExplain::ExplainDataQuery >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::OlapCreateAsSelect_Complex |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |73.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql >> TSubscriberTest::StrongNotificationAfterCommit >> KqpLimits::BigParameter [GOOD] >> KqpLimits::CancelAfterRoTx >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects+UseSink >> TVPatchTests::PatchPartGetError >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> KqpQuery::QueryCache [GOOD] >> KqpQuery::Pure >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> KqpQuery::YqlTableSample [GOOD] >> KqpQuery::UpdateWhereInSubquery >> KqpQuery::QueryCancelWrite [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> TVPatchTests::PatchPartGetError [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-03-28T08:59:56.104490Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T08:59:56.106882Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-28T08:59:56.106976Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-28T08:59:56.107028Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-28T08:59:56.107074Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-28T08:59:56.107149Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-28T08:59:56.107187Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T08:59:56.107240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-03-28T08:59:56.107273Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T08:59:56.107579Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-28T08:59:56.107660Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-28T08:59:56.107707Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T08:59:56.107872Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-28T08:59:56.107921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-28T08:59:56.107955Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ShardsLimit905 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2025-03-28T08:59:57.936937Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T08:59:57.940185Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-28T08:59:57.950752Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-28T08:59:57.952743Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-03-28T08:59:57.953183Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T08:59:57.963003Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-03-28T08:59:57.963741Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::StreamWrite-Allowed >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryExplain >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpQuery::CreateAsSelect_BadCases >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::QueryCachePermissionsLoss >> TargetDiscoverer::Dirs [GOOD] >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> KqpStats::DataQueryWithEffects-UseSink [GOOD] >> KqpStats::DataQueryMulti >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2025-03-28T08:59:42.208252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782464537759961:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:42.209806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018f1/r3tmp/tmpuNEkl0/pdisk_1.dat 2025-03-28T08:59:47.334256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:47.343262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:47.365648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:48.567086Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782464537759961:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:48.767886Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:48.781765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:48.781905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:17155 TServer::EnableGrpc on GrpcPort 26749, node 1 2025-03-28T08:59:50.494411Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:50.494437Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:50.494443Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:50.512579Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:53.164083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:53.181851Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:53.197939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T08:59:53.201506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:53.499575Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743152393227, tx_id: 1 } } } 2025-03-28T08:59:53.499606Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-03-28T08:59:53.539430Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743152393241, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-03-28T08:59:53.539470Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-03-28T08:59:53.555936Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743152393346, tx_id: 281474976710659 } }] } } 2025-03-28T08:59:53.555962Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2025-03-28T08:59:58.399665Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743152393346, tx_id: 281474976710659 } } } 2025-03-28T08:59:58.399693Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2025-03-28T08:59:58.399724Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpQuery::DictJoin >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> TVPatchTests::FindingPartsWhenPartsAreDontExist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 32174, MsgBus: 63292 2025-03-28T08:59:05.063441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782312839044590:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:05.163933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f8/r3tmp/tmpwR8oOf/pdisk_1.dat 2025-03-28T08:59:10.091074Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782312839044590:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:10.091502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:11.460572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.995959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.007638Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:12.009886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.010001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.029245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32174, node 1 2025-03-28T08:59:12.231025Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.231049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.231054Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.231166Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63292 TClient is connected to server localhost:63292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:17.425136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:17.721376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:19.564432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:21.938590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.499494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.138804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782398738392189:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.138894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:25.092054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.131040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.251996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.363051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.511830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.649605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.023616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782407328327330:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.023677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.026189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782407328327335:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.083404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:26.202651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782407328327337:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:26.281213Z node 1 :TX_PROXY ERROR: Actor# [1:7486782407328327398:3511] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:26.995014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:26.995041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:29.573233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480
: Warning: Optimization, code: 1070
:3:29: Warning: Unsafe conversion integral value to Timestamp, consider using date types, code: 1102 Trying to start YDB, gRPC: 17844, MsgBus: 20739 2025-03-28T08:59:34.146485Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782438679757518:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:34.146527Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f8/r3tmp/tmpZxkAYB/pdisk_1.dat 2025-03-28T08:59:34.614333Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:34.627558Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:34.627655Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:34.632059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17844, node 2 2025-03-28T08:59:34.859232Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:34.859257Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:34.859263Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:34.859377Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20739 2025-03-28T08:59:39.148290Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782438679757518:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:39.149586Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:20739 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:39.598652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:39.932465Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T08:59:39.975011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:40.887120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:42.377913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:43.005012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:47.866923Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782494514334172:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:47.867392Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:47.954603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:48.006158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:48.176046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:48.262717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:48.312716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:48.389983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:48.507007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782498809301996:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:48.507091Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:48.507440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782498809302001:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:48.511058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:48.531500Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T08:59:48.532524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782498809302003:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T08:59:48.590544Z node 2 :TX_PROXY ERROR: Actor# [2:7486782498809302061:3475] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:49.643278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:49.643298Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:50.685255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T08:59:50.768808Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486782507399237016:2514], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. 2025-03-28T08:59:50.770259Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjZiMjFiMWQtMzkxMjFiMWYtYjJiYzE0ZjMtMzUzMDg3MDI=, ActorId: [2:7486782507399236923:2501], ActorState: ExecuteState, TraceId: 01jqdzn8jr93pan10vbren8tgp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:25: Error: At function: AsList
:6:46: Error: At function: AsStruct
:3:29: Error: At function: Just, At function: UnsafeTimestampCast
:3:29: Error: Unsafe timestamp cast restricted from SQL v1. Trying to start YDB, gRPC: 13274, MsgBus: 30543 2025-03-28T08:59:52.974091Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782519804583971:2219];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:52.974530Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f8/r3tmp/tmpGeSv6C/pdisk_1.dat 2025-03-28T08:59:53.249317Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:53.264176Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:53.264269Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:53.267963Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13274, node 3 2025-03-28T08:59:53.454178Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:53.454205Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:53.454213Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:53.454353Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30543 TClient is connected to server localhost:30543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T08:59:54.553175Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T08:59:57.966444Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782519804583971:2219];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:57.966507Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:03.436765Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782567049224758:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:03.436825Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:03.538513Z node 3 :TX_PROXY ERROR: Actor# [3:7486782567049224779:2329] txid# 281474976715658, issues: { message: "Type \'Datetime64\' specified for column \'DatetimePK\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> KqpParams::MissingParameter [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2025-03-28T09:00:10.052238Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:00:10.059211Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-28T09:00:10.059295Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-03-28T09:00:10.059422Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-03-28T09:00:10.149226Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:00:10.150563Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-28T09:00:10.150628Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-28T09:00:10.150862Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-03-28T09:00:10.150927Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-03-28T09:00:10.151003Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::DatashardProgramSize+useSink >> THealthCheckTest::RedGroupIssueOnRedSpace [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues >> KqpQuery::RandomUuid [GOOD] |73.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |73.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |74.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::NoStoragePools |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpQuery::Pure [GOOD] >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::TooBigColumn+useSink >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> TVPatchTests::PatchPartFastXorDiffDisorder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::RandomUuid [GOOD] Test command err: Trying to start YDB, gRPC: 7657, MsgBus: 8779 2025-03-28T08:59:08.660260Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782326668181203:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:08.660618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b4/r3tmp/tmpr0nG5Y/pdisk_1.dat 2025-03-28T08:59:11.960256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.165509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.165599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.170746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7657, node 1 2025-03-28T08:59:12.858361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.858388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.858398Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.874783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:12.890888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:14.541158Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782326668181203:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:14.691384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8779 TClient is connected to server localhost:8779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:22.857314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.921694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.498422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.663751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.753983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.155086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.155103Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:29.399153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782408272561635:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:29.755898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:29.901960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.042061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.158937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.613709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.796764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.928373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:31.208687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782425452431399:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.208768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.215157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782425452431404:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.248883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:31.356045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782425452431406:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:31.438085Z node 1 :TX_PROXY ERROR: Actor# [1:7486782425452431466:3503] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:34.059018Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWU1YmY0MmEtZGI5MDZmNjMtMmQwYjEwOGMtNDRjZWZmODA=, ActorId: [1:7486782434042366330:2509], ActorState: ExecuteState, TraceId: 01jqdzmr7p6r85njm656h6h1xj, Create QueryResponse for error on request, msg:
: Error: Request timeout 50ms exceeded
: Error: Cancelling after 51ms during compilation Trying to start YDB, gRPC: 24259, MsgBus: 6105 2025-03-28T08:59:39.526139Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782461852984964:2189];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:39.958550Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b4/r3tmp/tmpgQoUOu/pdisk_1.dat 2025-03-28T08:59:41.444265Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:41.623237Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:41.718182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:41.718494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:41.737901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24259, node 2 2025-03-28T08:59:42.693727Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:42.693747Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:42.693754Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:42.694435Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:44.247070Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782461852984964:2189];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:44.247126Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:6105 TClient is connected to server localhost:6105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 7 ... E WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782513392594180:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:51.900519Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:51.991346Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.072162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.140611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.246492Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.339321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.518097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.827696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782517687562022:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.827788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.834831Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782517687562028:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.841715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:52.888797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782517687562030:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T08:59:52.993223Z node 2 :TX_PROXY ERROR: Actor# [2:7486782517687562086:3488] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3227, MsgBus: 28749 2025-03-28T08:59:57.835730Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782541048988986:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:57.836206Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b4/r3tmp/tmphvFWFc/pdisk_1.dat 2025-03-28T08:59:59.499936Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:59.879101Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:00.044180Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:00.044270Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:00.063107Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3227, node 3 2025-03-28T09:00:01.093444Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:01.093466Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:01.093472Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:01.093796Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:02.850997Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782541048988986:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:02.851065Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:28749 TClient is connected to server localhost:28749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:05.972450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:00:06.036573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:06.485259Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:00:08.163173Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:00:08.481152Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:00:10.689448Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782596883565629:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:10.689524Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:10.739006Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:10.839091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:10.930202Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:10.991385Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.066970Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.129477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.221127Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782601178533447:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.221202Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.221566Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782601178533452:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.225864Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:11.244608Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782601178533454:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:11.342340Z node 3 :TX_PROXY ERROR: Actor# [3:7486782601178533513:3473] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:14.099095Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:14.099122Z node 3 :IMPORT WARN: Table profiles were not loaded >> TVPatchTests::FindingPartsWhenError [GOOD] >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpLimits::QueryExecTimeout >> KqpQuery::UpdateWhereInSubquery [GOOD] >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2025-03-28T09:00:16.143929Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:00:16.144709Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2025-03-28T09:00:16.144787Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-03-28T09:00:16.144962Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2025-03-28T09:00:16.569404Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:00:16.572059Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-03-28T09:00:16.572200Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-03-28T09:00:16.573094Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-03-28T09:00:16.573320Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-28T09:00:16.573870Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 14096, MsgBus: 22466 2025-03-28T08:59:02.630959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782304334850022:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:02.631046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001484/r3tmp/tmpOamUWw/pdisk_1.dat 2025-03-28T08:59:05.742616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:06.803236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:07.677723Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782304334850022:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.678196Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:08.497695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:08.497788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:08.498355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:08.522997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:08.760266Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14096, node 1 2025-03-28T08:59:10.688233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:10.720716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:10.720735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:10.721154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:10.721896Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22466 TClient is connected to server localhost:22466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:17.505840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:17.830264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:19.442235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:20.360404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:20.977099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.755614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:23.755641Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:23.948299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782394529164935:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:23.948418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.683248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:24.941819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.131765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.315637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.474921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.727808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.150552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782407414067384:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.150784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.152592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782407414067389:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.167018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:26.264299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782407414067391:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:26.352931Z node 1 :TX_PROXY ERROR: Actor# [1:7486782407414067450:3518] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:34.256791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14997, MsgBus: 26772 2025-03-28T08:59:40.143187Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782465154239334:2187];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:40.771055Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001484/r3tmp/tmpPD2TKp/pdisk_1.dat 2025-03-28T08:59:41.849753Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:41.999690Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:42.504941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:42.505035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:42.549696Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14997, node 2 2025-03-28T08:59:43.365507Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:43.374743Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:43.375038Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:43.375155Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:45.886804Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782465154239334:2187];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:45.886859Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:26772 TClient is connected to server localhost:26772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Ver ... e, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:51.623251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782512398881281:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:51.623314Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:51.666980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:51.781080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:51.886200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.160737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.255223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.371020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.667718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782516693849104:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.667781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.667974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782516693849109:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.678938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:52.719978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782516693849111:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T08:59:52.791390Z node 2 :TX_PROXY ERROR: Actor# [2:7486782516693849168:3479] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15153, MsgBus: 3387 2025-03-28T08:59:58.538048Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782544021660510:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:58.625780Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001484/r3tmp/tmpc3YLaI/pdisk_1.dat 2025-03-28T09:00:00.202863Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:00.299975Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:00.652100Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:00.652168Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:00.697434Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15153, node 3 2025-03-28T09:00:02.157795Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:02.157815Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:02.157822Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:02.157930Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:03.482796Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782544021660510:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:03.482857Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3387 TClient is connected to server localhost:3387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:07.499177Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:07.609019Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:08.307765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:08.663377Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:09.140327Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:11.437499Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782599856237123:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.437606Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.503051Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.582868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.678324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.826966Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.981382Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:12.236236Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:12.343658Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782604151204951:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:12.343725Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:12.344003Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782604151204956:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:12.347831Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:12.390958Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782604151204958:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:12.481197Z node 3 :TX_PROXY ERROR: Actor# [3:7486782604151205019:3499] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> THealthCheckTest::YellowGroupIssueOnYellowSpace [GOOD] >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2025-03-28T09:00:17.164055Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:00:17.165599Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-03-28T09:00:17.165669Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-03-28T09:00:17.165897Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2025-03-28T09:00:17.165986Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-28T09:00:17.166139Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 >> KqpStats::DataQueryMulti [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateWhereInSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 7860, MsgBus: 24560 2025-03-28T08:59:02.608600Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782300850429549:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:02.609066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d8/r3tmp/tmp88iRmm/pdisk_1.dat 2025-03-28T08:59:07.994091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:07.994532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:08.010660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7860, node 1 2025-03-28T08:59:09.579289Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782300850429549:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:09.580055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:10.727711Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T08:59:11.060576Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:11.105254Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T08:59:11.121033Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:11.134383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:11.134402Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:11.135141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.135351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.136204Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24560 TClient is connected to server localhost:24560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:19.968537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:20.555880Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:20.807325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:21.944045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.861406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.099514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.063014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:25.063041Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:28.712391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782412519580975:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:28.712556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:29.622774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:29.704025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:29.813894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:29.975198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.293502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.704760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:31.095180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782425404483419:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.095260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.095720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782425404483424:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.105776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:31.210905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782425404483426:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:31.307236Z node 1 :TX_PROXY ERROR: Actor# [1:7486782425404483482:3523] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:34.767822Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486782438289385701:2543], status: GENERIC_ERROR, issues:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMILY, FILTER, FIRST, FLATTEN, FLOW, FOLLOWING, FOR, FOREIGN, FROM, FULL, FUNCTION, GLOB, GLOBAL, GRANT, GROUP, GROUPING, GROUPS, HASH, HAVING, HOP, IF, IGNORE, ILIKE, IMMEDIATE, IMPORT, IN, INCREMENT, INCREMENTAL, INDEX, INDEXED, INHERITS, INITIAL, INITIALLY, INNER, INSERT, INSTEAD, INTERSECT, INTO, IS, ISNULL, JOIN, JSON_EXISTS, JSON_QUERY, JSON_VALUE, KEY, LAST, LEFT, LEGACY, LIKE, LIMIT, LIST, LOCAL, LOGIN, MANAGE, MATCH, MATCHES, MATCH_RECOGNIZE, MEASURES, MICROSECONDS, MILLISECONDS, MODIFY, NANOSECONDS, NATURAL, NEXT, NO, NOLOGIN, NOT, NOTNULL, NULL, NULLS, OBJECT, OF, OFFSET, OMIT, ON, ONE, ONLY, OPTION, OPTIONAL, OR, ORDER, OTHERS, OUTER, OVER, OWNER, PARALLEL, PARTITION, PASSING, PASSWORD, PAST, PATTERN, PER, PERMUTE, PLAN, POOL, PRAGMA, PRECEDING, PRESORT, PRIMARY, PRIVILEGES, PROCESS, QUERY, QUEUE, RAISE, RANGE, REDUCE, REFERENCES, REGEXP, REINDEX, RELEASE, REMOVE, RENAME, REPLACE, REPLICATION, RESET, RESOURCE, RESPECT, RESTART, RESTORE, RESTRICT, RESULT, RETURN, RETURNING, REVERT, REVOKE, RIGHT, RLIKE, ROLLBACK, ROLLUP, ROW, ROWS, SAMPLE, SAVEPOINT, SCHEMA, SECONDS, SEEK, SELECT, SEMI, SET, SETS, SHOW, TSKIP, SEQUENCE, SOURCE, START, STREAM, STRUCT, SUBQUERY, SUBSET, SYMBOLS, SYMMETRIC, SYNC, SYSTEM, TABLE, TABLES, TABLESAMPLE, TABLESTORE, TAGGED, TEMP, TEMPORARY, THEN, TIES, TO, TOPIC, TRANSACTION, TRANSFER, TRIGGER, TUPLE, TYPE, UNBOUNDED, UNCONDITIONAL, UNION, UNIQUE, UNKNOWN, UNMATCHED, UPDATE, UPSERT, USE, USER, USING, VACUUM, VALUES, VARIANT, VIEW, VIRTUAL, WHEN, WHERE, WINDOW, WITH, WITHOUT, WRAPPER, XOR, STRING_VALUE, ID_PLAIN, ID_QUOTED} 2025-03-28T08:59:34.768316Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjI5MjA0MzItNmE4NDI0YTMtZTgzNDdjYTAtYmUzZjhhYjI=, ActorId: [1:7486782438289385665:2534], ActorState: ExecuteState, TraceId: 01jqdzmrywdyg3h5t0gbhgy1c3, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ... SHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.724634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.797299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.868807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.941058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.989825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.107000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782523340435540:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.107131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.107436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782523340435546:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.111487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:53.148693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782523340435548:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T08:59:53.205765Z node 2 :TX_PROXY ERROR: Actor# [2:7486782523340435603:3471] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:55.093632Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486782527635403214:2512], status: UNSUPPORTED, issues:
: Error: Default error
:1:15: Error: ATOM evaluation is not supported in YDB queries., code: 2030 2025-03-28T08:59:55.094936Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTExNTczMmUtZGJkNmZlNmQtNDY1ZjRkM2YtNWI5YTM5Yw==, ActorId: [2:7486782527635403205:2507], ActorState: ExecuteState, TraceId: 01jqdzncqrd43fhev16p8n09pg, ReplyQueryCompileError, status UNSUPPORTED remove tx with tx_id: Trying to start YDB, gRPC: 6534, MsgBus: 3150 2025-03-28T08:59:59.876927Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782546157678441:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:59.877571Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d8/r3tmp/tmpZ0ZMsW/pdisk_1.dat 2025-03-28T09:00:02.114848Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:02.708738Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:02.783656Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:02.783755Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:02.798431Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6534, node 3 2025-03-28T09:00:03.643812Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:03.643831Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:03.643837Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:03.643936Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:04.880696Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782546157678441:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:04.880754Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:3150 TClient is connected to server localhost:3150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:00:08.512600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:00:08.540336Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:08.550412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:08.818612Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:09.667000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:09.765723Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:11.423864Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782597697287790:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.423924Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.470178Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.511935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.563048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.646544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.742745Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.879754Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:12.556694Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782601992255639:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:12.556839Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:12.558506Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782601992255646:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:12.565931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:12.621878Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782601992255648:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:12.719578Z node 3 :TX_PROXY ERROR: Actor# [3:7486782601992255703:3503] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TUniqueIndexTests::CreateTable >> KqpQuery::QueryExplain [GOOD] >> KqpQuery::QueryFromSqs >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations >> KqpQuery::QueryStats-UseSink [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWriteImmediate [GOOD] Test command err: Trying to start YDB, gRPC: 12562, MsgBus: 11504 2025-03-28T08:59:05.388953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782317947563189:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:05.388998Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001510/r3tmp/tmpxSU4Ye/pdisk_1.dat 2025-03-28T08:59:10.391907Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782317947563189:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:10.392192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:12.102918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.153308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.202477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.202584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.214792Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:12.221540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12562, node 1 2025-03-28T08:59:12.462035Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.462062Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.462074Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.462176Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11504 TClient is connected to server localhost:11504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:17.515103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:17.862126Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:17.929082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:19.764082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:21.295529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.078550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.681770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782399551943643:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.681880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:25.892644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.089408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.210175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.314753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.693805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:27.021420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:27.155318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.155338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:27.732250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782412436846105:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:27.732322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:27.732723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782412436846110:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:27.759801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:27.838961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782412436846112:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:27.916091Z node 1 :TX_PROXY ERROR: Actor# [1:7486782412436846179:3518] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:33.139212Z node 1 :GRPC_SERVER DEBUG: [0x51b00032f180] finished request Name# CreateSession ok# true peer# ipv6:%5B::1%5D:45684 2025-03-28T08:59:33.211327Z node 1 :GRPC_SERVER DEBUG: [0x51b000260280] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=OTZhY2UwODItOGRkZTkyMTQtOTdlMGUzNC05MGE1NjFjNA==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:45698 2025-03-28T08:59:33.211774Z node 1 :GRPC_SERVER DEBUG: [0x51b000261080] created request Name# ExecuteDataQuery 2025-03-28T08:59:33.213212Z node 1 :GRPC_SERVER DEBUG: [0x51b000260280] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=OTZhY2UwODItOGRkZTkyMTQtOTdlMGUzNC05MGE1NjFjNA==" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " } query_cache_policy { } operation_params { } peer# ipv6:%5B::1%5D:45698 database# /Root 2025-03-28T08:59:33.215272Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jqdzmqex5mz32yhdw6sdc7e8, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:45698, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.927440s
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12562 2025-03-28T08:59:36.181850Z node 1 :GRPC_SERVER DEBUG: [0x51b000260280] issuing response Name# ExecuteDataQuery data# operation { ready: true status: INTERNAL_ERROR issues { message: "Closing Grpc request, client should not see this message." severity: 1 } } peer# ipv6:%5B::1%5D:45698 2025-03-28T08:59:36.183021Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486782438206650331:2538] TxId: 281474976710671. Ctx: { TraceId: 01jqdzmqex5mz32yhdw6sdc7e8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTZhY2UwODItOGRkZTkyMTQtOTdlMGUzNC05MGE1NjFjNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T08:59:36.185126Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782438206650340:2547], TxId: 281474976710671, task: 1. Ctx: { TraceId : 01jqdzmqex5mz32yhdw6sdc7e8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTZhY2UwODItOGRkZTkyMTQtOTdlMGUzNC05MGE1NjFjNA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486782438206650331:2538], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T08:59:36.186245Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782438206650341:2548], TxId: 281474976710671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTZhY2UwODItOGRkZTkyMTQtOTdlMGUzNC05MGE1NjFjNA==. TraceId : 01jqdzmqex5mz32yhdw6sdc7e8. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486782438206650331:2538], status: ABORTED, reason: { : Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.480853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.575844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.626480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.668496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.704384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.747075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.807914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.930840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782523107914201:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.930953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.933001Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782523107914207:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.940832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:53.981709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782523107914209:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T08:59:54.088962Z node 2 :TX_PROXY ERROR: Actor# [2:7486782527402881563:3465] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9217, MsgBus: 25304 2025-03-28T08:59:59.839953Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782548648414517:2150];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:00.419052Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001510/r3tmp/tmp4kHxeu/pdisk_1.dat 2025-03-28T09:00:02.050840Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:02.078059Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:02.078999Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:02.079055Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:02.079981Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9217, node 3 2025-03-28T09:00:02.831086Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:02.831105Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:02.831111Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:02.831222Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25304 2025-03-28T09:00:04.839323Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782548648414517:2150];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:05.455536Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:25304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:08.469914Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:08.486103Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:08.496200Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:08.602008Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:09.627661Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:00:09.813930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:00:12.395005Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782604482991039:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:12.395553Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:12.694244Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:12.757984Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:12.819139Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:12.868330Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:12.950183Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:13.097524Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:13.259067Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782608777958865:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:13.259157Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:13.259519Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782608777958870:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:13.263061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:13.284526Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782608777958873:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:13.377484Z node 3 :TX_PROXY ERROR: Actor# [3:7486782608777958929:3478] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |74.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |74.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery >> KqpYql::EvaluateExpr1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryMulti [GOOD] Test command err: Trying to start YDB, gRPC: 24113, MsgBus: 25871 2025-03-28T08:59:05.507179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782313956992736:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:05.531159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b2/r3tmp/tmpj4ZsJW/pdisk_1.dat 2025-03-28T08:59:10.047565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:10.475143Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782313956992736:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:10.475183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:12.138990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.152184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:13.161332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:13.704804Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:13.880913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:13.881339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:13.949357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24113, node 1 2025-03-28T08:59:15.302418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:16.989158Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:16.989177Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:16.989182Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:16.989661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25871 TClient is connected to server localhost:25871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:24.580002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.603922Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:24.618274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.748715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.510740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:28.119067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:28.119087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:28.362009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:32.992444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782429921111577:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.255035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.273222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.510056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.553921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.636395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.696058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.762478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.836276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782438511046708:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.836366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.836578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782438511046713:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.840527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:34.863771Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T08:59:34.864516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782438511046715:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:34.951944Z node 1 :TX_PROXY ERROR: Actor# [1:7486782438511046771:3500] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19112, MsgBus: 11045 2025-03-28T08:59:48.247542Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782501670520632:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b2/r3tmp/tmphwBDTM/pdisk_1.dat 2025-03-28T08:59:48.403081Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T08:59:48.449682Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:48.474296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:48.474379Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:48.483778Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19112, node 2 2025-03-28T08:59:48.779021Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:48.779041Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:48.779049Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:48.779153Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11045 TClient is connected to server localhost:11045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: ... aseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:55.468046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:55.606672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.691609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.825408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.029427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.118214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.273814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.464109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782536030261175:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:56.464212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:56.464555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782536030261180:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:56.468039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:56.483137Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T08:59:56.483375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782536030261182:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:56.591019Z node 2 :TX_PROXY ERROR: Actor# [2:7486782536030261234:3469] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13423, MsgBus: 11524 2025-03-28T09:00:06.921894Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782577249277555:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:06.938721Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b2/r3tmp/tmp7ezCw7/pdisk_1.dat 2025-03-28T09:00:08.461479Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:08.461570Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:08.463758Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13423, node 3 2025-03-28T09:00:08.500103Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:08.597256Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:08.597284Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:08.597292Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:08.597418Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11524 TClient is connected to server localhost:11524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:10.418263Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:10.427555Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:00:10.451030Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:00:10.607438Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:10.858805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:00:10.955391Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:11.914864Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782577249277555:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:11.914913Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:13.848924Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782607314050404:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:13.849335Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:13.928562Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.073930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.197680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.312510Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.369027Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.471598Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.677614Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782611609018228:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:14.677703Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:14.677935Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782611609018233:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:14.683138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:14.701607Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:00:14.702113Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782611609018235:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:00:14.773454Z node 3 :TX_PROXY ERROR: Actor# [3:7486782611609018289:3464] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] Test command err: Trying to start YDB, gRPC: 7708, MsgBus: 27568 2025-03-28T08:59:02.701992Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782304585057713:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:02.702043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014fd/r3tmp/tmpaiQzsb/pdisk_1.dat 2025-03-28T08:59:05.732138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:07.702963Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782304585057713:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.703418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:08.347371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:09.223116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:09.431302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:10.411324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:10.483004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.055604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:11.224052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:11.224142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:11.293499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7708, node 1 2025-03-28T08:59:12.416655Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.416674Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.416679Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.416810Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27568 TClient is connected to server localhost:27568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:19.405386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:19.461061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:20.527730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.318890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.813804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.021513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782399074339831:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.021606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:25.092101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.334978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.411446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:25.411465Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:25.525539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.721286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.857813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.162495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.691369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782407664274976:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.691613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.693722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782407664274981:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.746974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:26.850778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782407664274983:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:26.994118Z node 1 :TX_PROXY ERROR: Actor# [1:7486782407664275051:3518] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27820, MsgBus: 21985 2025-03-28T08:59:34.851704Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782439623921700:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:34.851751Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014fd/r3tmp/tmp9Eh6e7/pdisk_1.dat 2025-03-28T08:59:35.667666Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:35.722230Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:35.722299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:35.738380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27820, node 2 2025-03-28T08:59:36.424295Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:36.424497Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:36.424506Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:36.424618Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21985 2025-03-28T08:59:39.855173Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782439623921700:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:39.855696Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:21985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version ... p;id=8; 2025-03-28T09:00:12.213648Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:00:12.213683Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:00:12.214196Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:00:12.223848Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:00:12.223902Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:00:12.224116Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:00:12.224143Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:00:12.224247Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:00:12.224275Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:00:12.224423Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:00:12.224449Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:00:12.224541Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:00:12.224562Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037908;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:00:12.345069Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:00:12.357813Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:00:12.358391Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:00:12.359029Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:00:12.359180Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:00:12.359546Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:00:12.359654Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:00:12.359745Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:00:12.360096Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:00:12.360209Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:00:12.360301Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:00:12.360408Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;self_id=[3:7486782597400173648:2581];tablet_id=72075186224037909;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:00:12.518326Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:12.518354Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:12.690174Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:00:12.690232Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:00:12.690345Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:00:12.690394Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:00:12.690564Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:00:12.690603Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:00:12.724520Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:00:12.724591Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:00:12.724689Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:00:12.724721Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:00:12.724760Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:00:12.724789Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:00:12.725490Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:00:12.725551Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:00:12.725765Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:00:12.725796Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:00:12.725957Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:00:12.725989Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:00:12.726176Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:00:12.726203Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:00:12.726373Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:00:12.726410Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037909;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:00:12.784323Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-03-28T09:00:12.800220Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-03-28T09:00:12.804433Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; 2025-03-28T09:00:12.807781Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715666;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715666; >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpLimits::ManyPartitionsSortingLimit >> KqpYql::TableUseBeforeCreate |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |74.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryStats-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12880, MsgBus: 12572 2025-03-28T08:59:02.473535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782302116917104:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:02.482328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001469/r3tmp/tmp0GhorW/pdisk_1.dat 2025-03-28T08:59:05.495434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:05.495779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:05.660291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12880, node 1 2025-03-28T08:59:09.792887Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782302116917104:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:09.810493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:10.591428Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:10.611058Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:10.611276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:10.620240Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:10.633080Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12572 TClient is connected to server localhost:12572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:19.522474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:19.835562Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:19.975627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:21.217253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.241507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.671720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:22.671741Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:22.746817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:26.873485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782405196133917:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.873597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:28.094712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:28.190749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:28.416767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:28.768558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:28.901991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:29.182523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:29.394482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782418081036367:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:29.394546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:29.394902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782418081036372:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:29.399054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:29.421340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782418081036374:2477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:29.522559Z node 1 :TX_PROXY ERROR: Actor# [1:7486782418081036430:3512] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:34.448560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28695, MsgBus: 12818 2025-03-28T08:59:43.583651Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782479252997372:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:43.583929Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001469/r3tmp/tmpjBcziz/pdisk_1.dat 2025-03-28T08:59:45.575957Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:45.739878Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:45.900795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:45.900871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:45.913247Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28695, node 2 2025-03-28T08:59:47.217996Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:47.223345Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:47.223358Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:47.223678Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:48.564890Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782479252997372:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:48.564940Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:12818 TClient is connected to server localhost:12818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:50.787925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, ... propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.491918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.530436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.573357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.619996Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.722483Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782530792607250:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:55.722784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:55.724434Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782530792607255:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:55.736113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:55.752316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782530792607257:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:55.815863Z node 2 :TX_PROXY ERROR: Actor# [2:7486782530792607311:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } query_phases { duration_us: 64738 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 24385 affected_shards: 2 } compilation { duration_us: 639429 cpu_time_us: 616186 } process_cpu_time_us: 582 total_duration_us: 706709 total_cpu_time_us: 641153 Trying to start YDB, gRPC: 28329, MsgBus: 2298 2025-03-28T09:00:04.288076Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782570218352183:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:04.288587Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001469/r3tmp/tmpN9IXvO/pdisk_1.dat 2025-03-28T09:00:06.925468Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:06.992045Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:06.992127Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:07.007178Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:00:07.040754Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 28329, node 3 2025-03-28T09:00:08.024704Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:08.024726Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:08.024733Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:08.024835Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2298 2025-03-28T09:00:09.276576Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782570218352183:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:09.276656Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:2298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:09.836796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:09.870101Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:09.967303Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:10.209933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:10.313048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:14.095642Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782608873059628:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:14.095716Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:14.123826Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.210450Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.255728Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.336934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.463572Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.771828Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:14.905150Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782613168027459:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:14.905236Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:14.906312Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782613168027464:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:14.914596Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:14.948661Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782613168027466:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:15.050786Z node 3 :TX_PROXY ERROR: Actor# [3:7486782617462994823:3470] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } query_phases { duration_us: 17941 table_access { name: "/Root/TwoShard" reads { rows: 3 bytes: 35 } partitions_count: 1 } cpu_time_us: 3401 affected_shards: 1 } query_phases { duration_us: 6805 table_access { name: "/Root/EightShard" updates { rows: 3 bytes: 47 } partitions_count: 1 } cpu_time_us: 2310 affected_shards: 2 } compilation { duration_us: 304520 cpu_time_us: 291104 } process_cpu_time_us: 680 total_duration_us: 338956 total_cpu_time_us: 297495 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] Test command err: Trying to start YDB, gRPC: 2449, MsgBus: 4461 2025-03-28T08:59:07.278220Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782322879467102:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.283542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c5/r3tmp/tmp41yZqR/pdisk_1.dat 2025-03-28T08:59:12.099968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.165116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.165194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.173357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:12.210431Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2449, node 1 2025-03-28T08:59:12.267653Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.267670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.267676Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.267775Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:12.278776Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782322879467102:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.278828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:4461 TClient is connected to server localhost:4461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:17.547239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:18.459810Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:18.591581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:20.186007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.156116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.439890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.672690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782395893912958:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.680262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:25.788580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.917466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.049619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.156163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.273750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.510478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.917270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782404483848101:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.917335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.919089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782404483848106:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.923100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:26.982197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782404483848108:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:27.046209Z node 1 :TX_PROXY ERROR: Actor# [1:7486782408778815470:3500] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:27.147710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.147722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:33.322296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7503, MsgBus: 6335 2025-03-28T08:59:40.039939Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782467595008364:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:40.039978Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c5/r3tmp/tmpRuzCg6/pdisk_1.dat 2025-03-28T08:59:42.122767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:42.122869Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:42.161319Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7503, node 2 2025-03-28T08:59:43.379783Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:43.470123Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:43.470139Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:43.470145Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:43.470879Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:45.027134Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782467595008364:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:45.027181Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:6335 TClient is connected to server localhost:6335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:48.420808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propos ... 74976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:48.989717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:49.212759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:52.096124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782519134617739:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.096201Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.331340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.539165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.600454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.692343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.785426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.865822Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.990787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782519134618275:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.990848Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.994812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782519134618280:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.998989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:53.015679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782519134618282:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:53.079520Z node 2 :TX_PROXY ERROR: Actor# [2:7486782523429585633:3476] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19009, MsgBus: 6819 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c5/r3tmp/tmpZHPda0/pdisk_1.dat 2025-03-28T09:00:01.419461Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:01.528229Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:01.686885Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:01.686976Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:01.713838Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19009, node 3 2025-03-28T09:00:02.579075Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:02.579342Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:02.579350Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:02.579697Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6819 TClient is connected to server localhost:6819 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:08.395386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:08.418931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:08.546083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:00:09.465755Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:00:09.653209Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:10.973519Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782593747331920:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:10.973624Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.057546Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.134214Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.179726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.219175Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.263717Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.301212Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:11.404389Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782598042299730:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.404474Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.405855Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782598042299735:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:11.414518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:11.439416Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782598042299737:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:00:11.507312Z node 3 :TX_PROXY ERROR: Actor# [3:7486782598042299793:3466] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:13.572968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:16.024446Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:16.024477Z node 3 :IMPORT WARN: Table profiles were not loaded >> KqpScripting::LimitOnShard >> KqpScripting::ScriptValidate >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] >> KqpStats::DeferredEffects+UseSink [GOOD] >> KqpStats::DeferredEffects-UseSink >> KqpScripting::StreamScanQuery >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> TUniqueIndexTests::CreateTable [GOOD] >> THealthCheckTest::NoStoragePools [GOOD] >> THealthCheckTest::NoBscResponse |74.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |74.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |74.0%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 64004, MsgBus: 9416 2025-03-28T08:59:07.954779Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782323216464846:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.978871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c2/r3tmp/tmpezgVnW/pdisk_1.dat 2025-03-28T08:59:12.494438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.644007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.647239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.753197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64004, node 1 2025-03-28T08:59:12.827621Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:12.883642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.883688Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.883695Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.883811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:12.952591Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782323216464846:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.952848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:9416 TClient is connected to server localhost:9416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:22.805958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.834751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.185648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.505773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.584917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.815927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.815948Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:28.477145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782413410779675:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:28.516740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:30.073210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.340543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.500673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.797207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:31.079093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:31.276903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:31.690443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782426295682137:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.690502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.692535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782426295682142:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.717059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:31.792614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782426295682144:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:31.880231Z node 1 :TX_PROXY ERROR: Actor# [1:7486782426295682201:3509] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:38.868957Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152378128, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 12888, MsgBus: 3112 2025-03-28T08:59:46.049421Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782493329984398:2122];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:46.091837Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c2/r3tmp/tmpNUvaOV/pdisk_1.dat 2025-03-28T08:59:47.367435Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:48.206092Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:48.207806Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:48.207876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:48.215686Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12888, node 2 2025-03-28T08:59:48.412185Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:48.412205Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:48.412212Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:48.412304Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3112 TClient is connected to server localhost:3112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:49.864529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:49.872073Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:49.896776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part prop ... main>: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:54.660930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:54.700509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:54.777549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:54.881046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.034364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.159660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.597186Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:55.878026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782531984692346:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:55.878097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:55.879742Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782531984692352:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:55.891321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:55.924409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782531984692354:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:56.009711Z node 2 :TX_PROXY ERROR: Actor# [2:7486782536279659705:3475] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:58.878774Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152398799, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 21851, MsgBus: 20876 2025-03-28T09:00:03.901970Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782566807737286:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:04.055195Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c2/r3tmp/tmpkHorF4/pdisk_1.dat 2025-03-28T09:00:06.436372Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:06.436471Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:06.438499Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:00:06.489551Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 21851, node 3 2025-03-28T09:00:06.554614Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:07.676802Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:07.676824Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:07.676832Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:07.676939Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20876 2025-03-28T09:00:08.920495Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782566807737286:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:08.920556Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:20876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:09.848713Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:09.859137Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:09.868936Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:10.288918Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:11.556177Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:13.080388Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:15.876339Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782618347346494:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:15.876442Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:15.914015Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.993000Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:16.046097Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:16.087119Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:16.130336Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:16.198275Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:16.415763Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782622642314317:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:16.415839Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:16.416048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782622642314322:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:16.421224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:16.444021Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782622642314324:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:16.503549Z node 3 :TX_PROXY ERROR: Actor# [3:7486782622642314377:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TUniqueIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:00:19.796001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:19.796110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:19.796167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:19.796216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:19.796255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:19.796293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:19.796362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:19.796457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:19.796766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:19.907973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:00:19.908031Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:19.923007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:19.923282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:19.923427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:19.928803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:19.929007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:19.929659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:19.929904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:19.931804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:19.933025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:19.933080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:19.933239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:19.933282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:19.933339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:19.933447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:00:19.940473Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:00:20.123483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:20.123881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:20.124188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:20.125858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:20.126055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:20.141161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:20.141990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:20.151550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:20.151624Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:20.151660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:20.151696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:20.167697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:20.168001Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:20.168253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:20.180168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:20.180229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:20.180419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:20.180610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:20.201657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:20.219572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:20.219795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:20.220844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:20.221013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:20.221064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:20.221345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:20.221399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:20.221575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:20.221674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:20.227797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:20.227893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:20.228131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:20.228190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:20.228645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:20.228690Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:20.228783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:20.228817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:20.228854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:20.228887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:20.228922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:00:20.228962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:20.229012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:20.229055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:00:20.229132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:20.229164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:00:20.229283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:00:20.231206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:20.231311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:20.231349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:21.067743Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:00:21.067774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:00:21.067806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-03-28T09:00:21.068602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:21.068638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-03-28T09:00:21.068714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 327 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:21.068743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:00:21.068790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 327 RawX2: 4294969605 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:21.068826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:21.068857Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:21.068880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:00:21.068910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:00:21.070195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:00:21.070264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:00:21.081036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:00:21.081306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:00:21.081393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:00:21.081633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:21.089485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:00:21.089878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:00:21.089929Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-03-28T09:00:21.090044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-28T09:00:21.090077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-28T09:00:21.090131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-28T09:00:21.090171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-28T09:00:21.090208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-03-28T09:00:21.090427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:21.090770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:21.090806Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:00:21.090874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-28T09:00:21.090903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:00:21.090932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-28T09:00:21.090965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:00:21.090992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-03-28T09:00:21.091070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 101 2025-03-28T09:00:21.091123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:00:21.091228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:00:21.091264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:00:21.091394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:00:21.091438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-03-28T09:00:21.091470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-03-28T09:00:21.091500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:00:21.091524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-03-28T09:00:21.091540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-03-28T09:00:21.091576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:00:21.093912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:00:21.093958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:376:2344] TestWaitNotification: OK eventTxId 101 2025-03-28T09:00:21.094475Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:00:21.094738Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 270us result status StatusSuccess 2025-03-28T09:00:21.095527Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsLimit800 |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 24042, MsgBus: 16397 2025-03-28T08:59:07.953914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782324549266401:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:08.018798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014bd/r3tmp/tmpneuCFN/pdisk_1.dat 2025-03-28T08:59:12.039397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.782497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.782617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.801777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:12.904964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:13.003382Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782324549266401:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:13.003645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:13.112302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 24042, node 1 2025-03-28T08:59:14.617964Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:14.617980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:14.617984Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:14.618235Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16397 TClient is connected to server localhost:16397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:23.430374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.264596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782410448613004:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:27.265117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:27.271257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782410448613031:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:27.308712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T08:59:27.565039Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T08:59:27.566594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782410448613033:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T08:59:27.685237Z node 1 :TX_PROXY ERROR: Actor# [1:7486782410448613085:2369] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:27.879292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.879313Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:29.247008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T08:59:29.787646Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T08:59:29.862065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T08:59:29.862242Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2025-03-28T08:59:30.093056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T08:59:30.093647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T08:59:30.102936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T08:59:30.103380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T08:59:30.103765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T08:59:30.103869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T08:59:30.104166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T08:59:30.104257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T08:59:30.104572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T08:59:30.104665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T08:59:30.104948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T08:59:30.105228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486782419038547853:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T08:59:30.144811Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:7486782419038547878:2361];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T08:59:30.464579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7486782419038547888:2364];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T08:59:30.468114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:7486782419038547878:2361];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T08:59:30.468232Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037893 2025-03-28T08:59:30.513542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486782419038547878:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T08:59:30.513605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486782419038547878:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T08:59:30.518959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486782419038547878:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T08:59:30.519151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486782419038547878:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T08:59:30.519236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486782419038547878:2361];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T08:59:30.519463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486782419038547878:2361];tablet_id= ... ], ActorState: ExecuteState, TraceId: 01jqdzn6n9bz53svbge1q5vye8, Create QueryResponse for error on request, msg: CTAS statement can be executed only in NoTx mode. 2025-03-28T08:59:48.845278Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037900;parent=[1:7486782440513384952:2506];fline=actor.cpp:33;event=skip_flush_writing; Trying to start YDB, gRPC: 63244, MsgBus: 21781 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014bd/r3tmp/tmp2pwKP6/pdisk_1.dat 2025-03-28T08:59:50.490837Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:50.538956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:50.539036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:50.542989Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:50.602331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63244, node 2 2025-03-28T08:59:50.751190Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:50.751211Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:50.751218Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:50.751320Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21781 TClient is connected to server localhost:21781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:52.469338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:52.566539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.820135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T08:59:58.744319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782541640325690:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:58.744412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:58.751536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782541640325717:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:58.766586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T08:59:58.852980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782541640325719:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T08:59:58.960096Z node 2 :TX_PROXY ERROR: Actor# [2:7486782541640325772:2364] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:59.036652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:00.606825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21199, MsgBus: 25782 2025-03-28T09:00:09.938733Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782588823280640:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014bd/r3tmp/tmpcNk1iG/pdisk_1.dat 2025-03-28T09:00:10.246629Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:00:10.642961Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:10.659620Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:10.659715Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:10.671897Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21199, node 3 2025-03-28T09:00:11.411858Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:11.411896Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:11.411911Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:11.412095Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25782 2025-03-28T09:00:14.326737Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782588823280640:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:15.330764Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:25782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:15.996156Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:16.002118Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:00:19.636857Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782631772954109:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.636962Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782631772954125:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.637008Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.644726Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:00:19.668860Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:00:19.669325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782631772954128:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:00:19.761775Z node 3 :TX_PROXY ERROR: Actor# [3:7486782631772954182:2351] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:19.803024Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:00:20.167286Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486782636067921615:2362], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2025-03-28T09:00:20.169062Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTg4MzY5ZDctMjgwNjE2ZDAtNzljY2RiY2EtOWUyMWZmYTE=, ActorId: [3:7486782636067921613:2361], ActorState: ExecuteState, TraceId: 01jqdzp583f2x1xtsbr36rsjc1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> KqpStats::OneShardNonLocalExec+UseSink [GOOD] >> KqpStats::OneShardNonLocalExec-UseSink >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> KqpQuery::DictJoin [GOOD] >> TColumnShardTestSchema::HotTiersTtl >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::ParameterTypes >> KqpQuery::QueryCachePermissionsLoss [GOOD] |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |74.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> KqpStats::JoinStatsBasicScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 25568, MsgBus: 19386 2025-03-28T08:59:08.178719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782329402114829:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:08.359095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d7/r3tmp/tmpZLupSj/pdisk_1.dat 2025-03-28T08:59:12.183237Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:13.814283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:13.826761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:13.949860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25568, node 1 2025-03-28T08:59:15.200746Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782329402114829:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:15.201008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:15.795062Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:16.031151Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:16.031168Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:16.031173Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:16.035084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19386 TClient is connected to server localhost:19386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:24.610312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.690324Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:24.744124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:26.569178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.869942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:28.005818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:28.453596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:28.453615Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:33.674788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782436776299058:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.675074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.964509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.077983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.215233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.395722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.512514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.644519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:36.435217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782449661201506:2500], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:36.435487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:36.437464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782449661201512:2503], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:36.455719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:36.518297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782449661201514:2504], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:36.593852Z node 1 :TX_PROXY ERROR: Actor# [1:7486782449661201565:3535] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:42.738721Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486782475431005701:2568], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-03-28T08:59:42.739698Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTQ5ZmVjODItMzllZjcwZjItMjRiMzliZWUtNTA0NTkyZGE=, ActorId: [1:7486782475431005675:2561], ActorState: ExecuteState, TraceId: 01jqdzn0q72jwgax6hgxrmahfk, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2025-03-28T08:59:42.906654Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486782475431005727:2571], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-03-28T08:59:42.907705Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTQ5ZmVjODItMzllZjcwZjItMjRiMzliZWUtNTA0NTkyZGE=, ActorId: [1:7486782475431005675:2561], ActorState: ExecuteState, TraceId: 01jqdzn0x85cjecrfk07spyqde, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2025-03-28T08:59:43.157298Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486782479725973049:2575], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2025-03-28T08:59:43.162984Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTQ5ZmVjODItMzllZjcwZjItMjRiMzliZWUtNTA0NTkyZGE=, ActorId: [1:7486782475431005675:2561], ActorState: ExecuteState, TraceId: 01jqdzn134apq7yxg1czp3wvxp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 Trying to start YDB, gRPC: 24447, MsgBus: 12773 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d7/r3tmp/tmpcqh8Mi/pdisk_1.dat 2025-03-28T08:59:49.795218Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:50.134883Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:50.197261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:50.197430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:50.205321Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24447, node 2 2025-03-28T08:59:50.589256Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:50.589277Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:50.589283Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:50.589386Z node 2 :NET_CLASSIFIER ERROR: got b ... ng... 2025-03-28T08:59:58.047138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782544070386718:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:58.047619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:58.352485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:58.623620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:58.780048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:58.847170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:58.903603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:59.037323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:59.406070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782548365354569:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:59.406151Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:59.408152Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782548365354575:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:59.413450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:59.458788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782548365354577:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:59.532105Z node 2 :TX_PROXY ERROR: Actor# [2:7486782548365354632:3470] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:05.103422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:05.103438Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 21605, MsgBus: 26035 2025-03-28T09:00:09.824884Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782588761749848:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:10.334556Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d7/r3tmp/tmpBmDuwC/pdisk_1.dat 2025-03-28T09:00:11.163630Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:11.181916Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:11.181984Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:11.187859Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21605, node 3 2025-03-28T09:00:12.275948Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:12.276174Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:12.276180Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:12.276493Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26035 2025-03-28T09:00:14.747461Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782588761749848:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:14.747524Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:26035 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:15.908033Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:15.913232Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:00:15.924729Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:16.017384Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:16.435802Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:16.649707Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:19.483928Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782631711424442:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.484006Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.557002Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.609697Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.659668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.731855Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.795455Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.873281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.976336Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782631711424964:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.976441Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.982880Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782631711424969:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.989747Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:20.026419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782631711424971:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:00:20.082633Z node 3 :TX_PROXY ERROR: Actor# [3:7486782636006392321:3470] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TColumnShardTestSchema::OneTier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCachePermissionsLoss [GOOD] Test command err: Trying to start YDB, gRPC: 6166, MsgBus: 16127 2025-03-28T08:59:08.487682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782329617404900:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:08.487901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c4/r3tmp/tmpFFF15u/pdisk_1.dat 2025-03-28T08:59:12.742816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:13.478852Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782329617404900:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:13.478893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:13.819910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:13.848056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:13.848328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:13.885543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:14.676842Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:14.678856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 6166, node 1 2025-03-28T08:59:16.049375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:16.049393Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:16.049397Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:16.049820Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16127 TClient is connected to server localhost:16127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:23.754731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T08:59:23.782226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T08:59:24.441040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.605878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.936953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:29.421235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:29.421269Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:31.445426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782424106687261:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.677790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:32.760667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.042317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.133449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.301413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.388578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.525841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.657141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782436991589697:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.657203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.658258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782436991589702:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.672344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:33.705400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782436991589704:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:33.794358Z node 1 :TX_PROXY ERROR: Actor# [1:7486782436991589765:3513] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12184, MsgBus: 29117 2025-03-28T08:59:48.663693Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782501477111834:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c4/r3tmp/tmpr1ET56/pdisk_1.dat 2025-03-28T08:59:48.737110Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T08:59:48.862753Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:48.876900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:48.876972Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:48.882200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12184, node 2 2025-03-28T08:59:49.188947Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:49.188970Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:49.188978Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:49.189090Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29117 TClient is connected to server localhost:29117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:50.299678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:50.311261Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:50.325386Z nod ... for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 4 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:00:21.229665Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MzkxZjQ0MDMtMzhkNWQyMzQtZjYyMzc3ZmItYWRmMGEzZWI=, ActorId: [3:7486782637789269198:2613], ActorState: ExecuteState, TraceId: 01jqdzp64re3twys1hcrn9rm7c, Create QueryResponse for error on request, msg: 2025-03-28T09:00:21.230517Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqdzp64re3twys1hcrn9rm7c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzkxZjQ0MDMtMzhkNWQyMzQtZjYyMzc3ZmItYWRmMGEzZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:00:21.275612Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-03-28T09:00:21.699637Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782642084236561:3940], for# user0@builtin, access# SelectRow 2025-03-28T09:00:21.699892Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715686. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:00:21.700105Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmE4MWY5NzItOWZmMjc5YWUtNDY1MGJkZGQtODY5ZWVkODQ=, ActorId: [3:7486782642084236540:2626], ActorState: ExecuteState, TraceId: 01jqdzp6h86ctga7nxjxcybh4g, Create QueryResponse for error on request, msg: 2025-03-28T09:00:21.700818Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715687. Ctx: { TraceId: 01jqdzp6h86ctga7nxjxcybh4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmE4MWY5NzItOWZmMjc5YWUtNDY1MGJkZGQtODY5ZWVkODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:00:22.370502Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715688. Ctx: { TraceId: 01jqdzp6y36j7h69nqnj1wygrp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmQzMWFkOTAtNjE1OWUxYTgtOGNiYmI0ZDUtMTI5YzViYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:00:22.379586Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jqdzp6y36j7h69nqnj1wygrp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmQzMWFkOTAtNjE1OWUxYTgtOGNiYmI0ZDUtMTI5YzViYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:00:22.382933Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782646379203918:3963], for# user0@builtin, access# UpdateRow 2025-03-28T09:00:22.383156Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715690. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 2 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:00:22.383341Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmQzMWFkOTAtNjE1OWUxYTgtOGNiYmI0ZDUtMTI5YzViYQ==, ActorId: [3:7486782642084236574:2636], ActorState: ExecuteState, TraceId: 01jqdzp6y36j7h69nqnj1wygrp, Create QueryResponse for error on request, msg: 2025-03-28T09:00:22.384122Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715691. Ctx: { TraceId: 01jqdzp6y36j7h69nqnj1wygrp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmQzMWFkOTAtNjE1OWUxYTgtOGNiYmI0ZDUtMTI5YzViYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:00:22.606322Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782646379203946:3973], for# user0@builtin, access# EraseRow 2025-03-28T09:00:22.606465Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715692. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 4 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:00:22.606716Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=N2RjM2UyYzQtNGJiM2ZkMzgtNTk5MmFiZTQtYmIzNTliNmU=, ActorId: [3:7486782646379203931:2655], ActorState: ExecuteState, TraceId: 01jqdzp7js6kgh3a09yazpfzmg, Create QueryResponse for error on request, msg: 2025-03-28T09:00:22.607343Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715693. Ctx: { TraceId: 01jqdzp7js6kgh3a09yazpfzmg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=N2RjM2UyYzQtNGJiM2ZkMzgtNTk5MmFiZTQtYmIzNTliNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:00:22.633149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-03-28T09:00:22.923417Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782646379203978:3988], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:22.923445Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782646379203978:3988], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:22.929507Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486782646379203974:2668], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:00:22.934936Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDU5NWIyMDQtZTY5NzZjN2QtYjZmYmFhZDItNzFhNWVhODc=, ActorId: [3:7486782646379203969:2666], ActorState: ExecuteState, TraceId: 01jqdzp7yz4p44tmy96nw8559t, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:00:23.156714Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782650674171298:3997], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:23.156745Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782650674171298:3997], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:23.178739Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486782650674171294:2679], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:00:23.180390Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTdmOTNkYTAtZjNlN2E2NjEtZTgyYTRhNmEtMTZmMzhkZjI=, ActorId: [3:7486782646379203989:2675], ActorState: ExecuteState, TraceId: 01jqdzp8620gyvn8pepwb20ex0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:00:23.603132Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782650674171321:4006], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:23.603157Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782650674171321:4006], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:23.610446Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486782650674171318:2689], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:00:23.611329Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGIxMTYzZDQtOTllYzkyODgtODI5MDc3YjYtNjZjZjc1ZWE=, ActorId: [3:7486782650674171312:2687], ActorState: ExecuteState, TraceId: 01jqdzp8ka2ndrkzcyrjmngw7b, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:00:23.803848Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-03-28T09:00:24.338094Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782654969138650:4020], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:24.338120Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782654969138650:4020], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:24.357656Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486782654969138645:2701], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:00:24.359231Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OGQyMGYwZjEtMzVmYWQzMjctMjYxODQwMDgtZWRhYjI4NzM=, ActorId: [3:7486782654969138640:2699], ActorState: ExecuteState, TraceId: 01jqdzp9ak18m78kn2ezej83f5, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:00:25.187963Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782659264105971:4030], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:25.187991Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782659264105971:4030], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:25.190554Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486782659264105968:2713], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:8:25: Error: At function: KiWriteTable!
:8:25: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:00:25.191044Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWE2N2Q1MGMtMjgyYWMzYWMtZjE2MDAwNmUtYmZhODQyNWI=, ActorId: [3:7486782654969138660:2709], ActorState: ExecuteState, TraceId: 01jqdzpa4ka2vmqp73n4dwsknv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:00:25.336991Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782659264105998:4043], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:25.337022Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486782659264105998:4043], for# user0@builtin, access# DescribeSchema 2025-03-28T09:00:25.342587Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486782659264105993:2722], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiWriteTable!
:12:30: Error: Cannot find table 'db.[/Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:00:25.343440Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDRkMTZlYTItYjE2NzNiMTgtZmQ0NGY1ZmItYmNhN2FmYmM=, ActorId: [3:7486782659264105989:2720], ActorState: ExecuteState, TraceId: 01jqdzpaascsn159m2se1r92db, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] Test command err: Trying to start YDB, gRPC: 63109, MsgBus: 1203 2025-03-28T08:59:05.066374Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782314815285464:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:05.066584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001509/r3tmp/tmpquvzdr/pdisk_1.dat 2025-03-28T08:59:08.691531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:09.860120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:10.067722Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782314815285464:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:10.067768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:11.167770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.490596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.066049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.066143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.082525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63109, node 1 2025-03-28T08:59:12.222362Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T08:59:12.222974Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T08:59:12.251871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:13.158004Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:13.160881Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:13.164941Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:13.164952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:13.165225Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1203 TClient is connected to server localhost:1203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:23.730350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.868716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.136301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.469950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:26.133202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.628478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.628495Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:30.647148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782422189469749:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:30.732037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:32.275112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.085245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.214838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.440949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.597799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.838047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.322453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782439369339510:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.322568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.322767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782439369339518:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.356227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:34.472010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782439369339520:2500], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:34.534870Z node 1 :TX_PROXY ERROR: Actor# [1:7486782439369339578:3544] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:41.940919Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782469434111083:2567], TxId: 281474976710671, task: 1. Ctx: { TraceId : 01jqdzmxh04skbergxzzkd3fy8. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NWY1NzE0MWMtMTM2Y2QxZWUtODkzYjAxNS02MTZiYmM2YQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(17): Bad filter value. }. 2025-03-28T08:59:41.967528Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782469434111084:2568], TxId: 281474976710671, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NWY1NzE0MWMtMTM2Y2QxZWUtODkzYjAxNS02MTZiYmM2YQ==. TraceId : 01jqdzmxh04skbergxzzkd3fy8. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486782469434111079:2555], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T08:59:42.005038Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWY1NzE0MWMtMTM2Y2QxZWUtODkzYjAxNS02MTZiYmM2YQ==, ActorId: [1:7486782460844176378:2555], ActorState: ExecuteState, TraceId: 01jqdzmxh04skbergxzzkd3fy8, Create QueryResponse for error on request, msg:
: Error: Terminate was called, reason(17): Bad filter value. Trying to start YDB, gRPC: 12078, MsgBus: 4869 2025-03-28T08:59:46.953706Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782493179603194:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:47.017277Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001509/r3tmp/tmp7wB1MD/pdisk_1.dat 2025-03-28T08:59:48.393745Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:48.400293Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:48.451389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:48.451453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:48.503365Z node 2 :HIVE WAR ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.186535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.288866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.359739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.519389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.726333Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782536129278391:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:56.726415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:56.727930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782536129278396:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:56.758912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:56.848042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782536129278398:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:56.923857Z node 2 :TX_PROXY ERROR: Actor# [2:7486782536129278462:3476] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:03.282763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:03.282793Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 24613, MsgBus: 7268 2025-03-28T09:00:10.739786Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782596742667793:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:10.739831Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001509/r3tmp/tmpvCC1mH/pdisk_1.dat 2025-03-28T09:00:12.414835Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:12.837725Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:12.854986Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:12.855090Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:12.859973Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24613, node 3 2025-03-28T09:00:13.353524Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:13.353543Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:13.353549Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:13.353864Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7268 2025-03-28T09:00:15.743556Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782596742667793:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:15.743606Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:7268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:00:16.027874Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:00:16.074135Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:16.100368Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:16.261386Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:16.580068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:16.671451Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:19.522836Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782635397375239:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.522963Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.640908Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.706013Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.810245Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.865717Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.923678Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:20.007422Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:20.205006Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782639692343054:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:20.205173Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:20.206180Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782639692343059:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:20.211193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:20.239662Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782639692343061:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:20.304989Z node 3 :TX_PROXY ERROR: Actor# [3:7486782639692343118:3471] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:22.575811Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486782648282278006:2507], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:3:84: Error: At function: KiUpdateTable!
:3:84: Error: Column 'NonExistentColumn' does not exist in table '/Root/KeyValue'., code: 2017 2025-03-28T09:00:22.577457Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDNjYmU4MzItZmZmMDlmYi01MTIzNDA2YS00MDdiM2ZjNA==, ActorId: [3:7486782648282277998:2502], ActorState: ExecuteState, TraceId: 01jqdzp7m5fy2ntnw7qesevf04, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> TColumnShardTestSchema::ExternalTTL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15430, MsgBus: 8835 2025-03-28T08:59:07.633127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782324469578682:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.633162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b6/r3tmp/tmpV51Y2N/pdisk_1.dat 2025-03-28T08:59:10.587798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.781546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.092933Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:12.114364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.114469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.115952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15430, node 1 2025-03-28T08:59:12.242714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.242736Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.242743Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.242849Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:12.638973Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782324469578682:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.639246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8835 TClient is connected to server localhost:8835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:15.987840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:19.455163Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:19.464439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:21.069073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.175384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T08:59:22.762087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.369045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782401778991821:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:25.369146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.196376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.389021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.610785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.811016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.943760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:27.095263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.095281Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:27.224065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:28.098788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782414663894274:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:28.098875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:28.099530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782414663894279:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:28.130492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:28.271344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782414663894281:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:28.372278Z node 1 :TX_PROXY ERROR: Actor# [1:7486782414663894338:3507] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19581, MsgBus: 23616 2025-03-28T08:59:42.112211Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782474928528428:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:42.112775Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b6/r3tmp/tmpti7WAS/pdisk_1.dat 2025-03-28T08:59:43.128541Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782479223496124:2294];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:43.129159Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T08:59:45.255316Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:45.358563Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:45.360350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:45.380314Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19581, node 2 2025-03-28T08:59:46.379697Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:46.566583Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:46.575081Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:46.594814Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:46.596854Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:47.115109Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782474928528428:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:47.115366Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:23616 2025-03-28T08:59:48.135630Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782479223496124:2294];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:48.135944Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:23616 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:48.520379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:48.529554Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:52.960180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782517878202080:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.960266Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.016773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.364220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.647020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782522173170740:2453], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.647146Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.648509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782522173170745:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.652994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T08:59:53.674865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782522173170747:2457], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T08:59:53.773681Z node 2 :TX_PROXY ERROR: Actor# [2:7486782522173170830:3279] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:59.938987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:59.939017Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 19156, MsgBus: 17396 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b6/r3tmp/tmpzgPLAV/pdisk_1.dat 2025-03-28T09:00:08.639862Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:08.871577Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:08.926021Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:08.926108Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:08.932442Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19156, node 3 2025-03-28T09:00:09.765744Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:09.765895Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:09.765902Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:09.766159Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17396 TClient is connected to server localhost:17396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:14.310273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:14.328974Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:18.794846Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782628784966802:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:18.794940Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:18.828625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.155367Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:00:19.543044Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782633079935465:2447], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.543154Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.546193Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782633079935470:2450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:19.551143Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:00:19.565771Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782633079935472:2451], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:00:19.646430Z node 3 :TX_PROXY ERROR: Actor# [3:7486782633079935537:3268] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:23.594049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:23.594076Z node 3 :IMPORT WARN: Table profiles were not loaded |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 30347, MsgBus: 8834 2025-03-28T08:59:07.066148Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782326364775982:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.066188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e9/r3tmp/tmpqQEVJN/pdisk_1.dat 2025-03-28T08:59:12.100104Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782326364775982:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.100399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:12.450782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.837228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.837357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.850017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30347, node 1 2025-03-28T08:59:14.869964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:19.973407Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:19.973425Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:19.973620Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:19.973882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8834 TClient is connected to server localhost:8834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:24.338138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.413103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.640356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:26.741114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.081851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:28.296923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:28.297129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:32.859560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782433738960230:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:32.859839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.370345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.473852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.530397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.568351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.666840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.746449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.825365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782442328895366:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.825442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.825654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782442328895371:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.837711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:34.865100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782442328895373:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:34.956282Z node 1 :TX_PROXY ERROR: Actor# [1:7486782442328895428:3501] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21994, MsgBus: 13606 2025-03-28T08:59:48.659834Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782500257960496:2078];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e9/r3tmp/tmpqdlta1/pdisk_1.dat 2025-03-28T08:59:48.839551Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T08:59:48.966531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:48.966624Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:48.970740Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:48.985298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21994, node 2 2025-03-28T08:59:49.429057Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:49.429079Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:49.429086Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:49.429199Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13606 TClient is connected to server localhost:13606 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:50.443818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:50.454363Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:50.471956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:50.637464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025 ... 5-03-28T08:59:53.663094Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:56.135694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782530322733324:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:56.136696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:56.339891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.474065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.586908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.669988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.821883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.978722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:57.515039Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782538912668478:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:57.515326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:57.517946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782538912668483:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:57.525390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:57.604050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782538912668485:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:57.691065Z node 2 :TX_PROXY ERROR: Actor# [2:7486782538912668544:3490] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20856, MsgBus: 23772 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e9/r3tmp/tmpVvq2D6/pdisk_1.dat 2025-03-28T09:00:08.587586Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:08.950185Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:08.951788Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:08.960134Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:08.966055Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20856, node 3 2025-03-28T09:00:09.555358Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:09.555393Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:09.555407Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:09.555536Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23772 TClient is connected to server localhost:23772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:10.562406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:10.593147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:10.726344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:11.006468Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:11.131597Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:15.087574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782610505477801:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:15.089382Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:15.201809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.353593Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.529529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.763673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.859786Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.948511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:16.064068Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782619095412929:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:16.064168Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:16.064394Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782619095412934:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:16.068566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:16.083904Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782619095412936:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:16.186631Z node 3 :TX_PROXY ERROR: Actor# [3:7486782619095412993:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:23.957996Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:23.958028Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:25.747832Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782631980315345:2517];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:25.780882Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:25.965211Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152419064, txId: 281474976710671] shutting down >> TColumnShardTestSchema::ExportAfterFail >> TReplicationTests::Create >> TColumnShardTestSchema::HotTiersTtlWithStat |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |74.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |74.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |74.1%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> TReplicationTests::CreateSequential >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::DefaultParameterValue >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink >> TSyncNeighborsTests::SerDes3 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] >> KqpQuery::QueryFromSqs [GOOD] |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] >> TProxyActorTest::TestCreateSemaphore >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::DatashardProgramSize-useSink >> TReplicationTests::Create [GOOD] >> TReplicationTests::ConsistencyLevel >> KqpLimits::ManyPartitionsSortingLimit [GOOD] |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |74.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ExplainDataQueryWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 31052, MsgBus: 15957 2025-03-28T08:59:07.810110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782325599676222:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.810531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b3/r3tmp/tmp6hyER6/pdisk_1.dat 2025-03-28T08:59:12.049021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.096326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.096444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.099176Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:12.104681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31052, node 1 2025-03-28T08:59:12.274639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.274663Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.274669Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.274779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:12.786804Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782325599676222:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.786871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:15957 TClient is connected to server localhost:15957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:16.913767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:17.543562Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:17.625103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:19.949015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.386246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.739371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.429959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782398614122097:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.430338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:25.620076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.749038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.971537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.184065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.333081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.583451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.990156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782407204057238:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.990225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.999384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782407204057243:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:27.022993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:27.117130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.117144Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:27.315363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782407204057245:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:27.387374Z node 1 :TX_PROXY ERROR: Actor# [1:7486782411499024618:3516] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimat ... lt_binding_0_3","ReadLimit":"1001","Name":"TableRangeScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":"4"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1_1","PlanNodeType":"ResultSet"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":16,"Plans":[{"PlanNodeId":17,"Plans":[{"PlanNodeId":19,"Plans":[{"PlanNodeId":20,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":23,"Operators":[{"Scan":"Parallel","ReadRange":["Key (20, 120]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_2","PlanNodeType":"ResultSet"},{"PlanNodeId":24,"Plans":[{"PlanNodeId":28,"Plans":[{"PlanNodeId":29,"Plans":[{"PlanNodeId":30,"Plans":[{"PlanNodeId":32,"Plans":[{"PlanNodeId":33,"Plans":[{"PlanNodeId":34,"Plans":[{"PlanNodeId":35,"Plans":[{"PlanNodeId":36,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_3","PlanNodeType":"ResultSet"},{"PlanNodeId":37,"Plans":[{"PlanNodeId":41,"Plans":[{"PlanNodeId":42,"Plans":[{"PlanNodeId":43,"Plans":[{"PlanNodeId":45,"Plans":[{"PlanNodeId":46,"Plans":[{"PlanNodeId":47,"Plans":[{"PlanNodeId":48,"Plans":[{"PlanNodeId":49,"Operators":[{"Scan":"Parallel","ReadRange":["Key [10, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Value"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"GroupBy":"item.Value","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"Value\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1_4","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 9669, MsgBus: 7148 2025-03-28T09:00:17.007362Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486782620230948715:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:17.008397Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b3/r3tmp/tmpJiYh24/pdisk_1.dat 2025-03-28T09:00:17.337055Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:17.337152Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:17.348964Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:17.363418Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9669, node 4 2025-03-28T09:00:17.430736Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:17.430760Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:17.430770Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:17.430913Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7148 TClient is connected to server localhost:7148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:00:18.348187Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:00:18.382044Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:18.544479Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:19.150422Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:19.311436Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:21.999798Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486782620230948715:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:21.999862Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:26.316824Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782663180623333:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:26.316931Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:26.389380Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:26.494070Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:26.610216Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:26.840385Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:26.899977Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:27.009771Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:27.330243Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782667475591168:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:27.330341Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:27.330580Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782667475591173:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:27.336091Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:27.388495Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486782667475591175:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:27.441744Z node 4 :TX_PROXY ERROR: Actor# [4:7486782667475591228:3481] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] |74.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |74.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |74.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues >> TProxyActorTest::TestAttachSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 16196, MsgBus: 7313 2025-03-28T08:59:25.518987Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782402178353456:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:25.570710Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b1/r3tmp/tmp7pZbGO/pdisk_1.dat 2025-03-28T08:59:30.491677Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782402178353456:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:30.491723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:30.881380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:32.580535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:32.588559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:32.588576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:32.613662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:32.613950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:32.663432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16196, node 1 2025-03-28T08:59:34.272533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:34.272552Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:34.272558Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:34.272654Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7313 TClient is connected to server localhost:7313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:42.389976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:42.437072Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:42.477124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:43.992154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:45.863910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:46.228524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:47.086569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:47.086588Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:49.956738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782505257570403:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:49.956838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:50.738480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:50.789029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:50.823994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:50.853761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:50.893051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:50.982616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:51.356269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782513847505540:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:51.356548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:51.356842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782513847505545:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:51.363011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:51.400755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782513847505547:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:51.481197Z node 1 :TX_PROXY ERROR: Actor# [1:7486782513847505604:3513] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:53.713529Z node 1 :GRPC_SERVER DEBUG: [0x51b0000f7380] received request Name# PrepareDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=N2M1YzgyNTItMzg4OWQ5YmEtYzg0Mzc4ZmYtN2Y1ZTcyMWE=" yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " operation_params { } peer# ipv6:%5B::1%5D:55080 2025-03-28T08:59:53.713611Z node 1 :GRPC_SERVER DEBUG: [0x51b000261080] created request Name# PrepareDataQuery 2025-03-28T08:59:53.713893Z node 1 :GRPC_SERVER DEBUG: [0x51b0000f7380] received request without user token Name# PrepareDataQuery data# session_id: "ydb://session/3?node_id=1&id=N2M1YzgyNTItMzg4OWQ5YmEtYzg0Mzc4ZmYtN2Y1ZTcyMWE=" yql_text: "\n SELECT * FROM `/Root/TwoShard`;\n " operation_params { } peer# ipv6:%5B::1%5D:55080 database# /Root 2025-03-28T08:59:53.714354Z node 1 :GRPC_SERVER DEBUG: Got grpc request# PrepareDataQueryRequest, traceId# 01jqdznbfheeqed83hkenw2bxp, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:55080, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T08:59:53.973306Z node 1 :GRPC_SERVER DEBUG: [0x51b0000f7380] issuing response Name# PrepareDataQuery data# operation { ready: true status: SUCCESS result { type_url: "type.googleapis.com/Ydb.Table.PrepareQueryResult" value: "\n=ydb://preparedqueryid/4?id=d364618f-191a2cea-4eb6e5bf-ca5d607" } } peer# ipv6:%5B::1%5D:55080 2025-03-28T08:59:53.976198Z node 1 :GRPC_SERVER DEBUG: [0x51b0000f7380] finished request Name# PrepareDataQuery ok# true peer# ipv6:%5B::1%5D:55080 2025-03-28T08:59:54.124353Z node 1 :GRPC_SERVER DEBUG: [0x51b000260280] received request Name# ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=N2M1YzgyNTItMzg4OWQ5YmEtYzg0Mzc4ZmYtN2Y1ZTcyMWE=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { id: "ydb://preparedqueryid/4?id=d364618f-191a2cea-4eb6e5bf-ca5d607" } query_cache_policy { keep_in_cache: true } operation_params { } peer# ipv6:%5B::1%5D:55082 2025-03-28T08:59:54.124410Z node 1 :GRPC_SERVER DEBUG: [0x51b000261780] created request Name# ExecuteDataQuery 2025-03-28T08:59:54.125330Z node 1 :GRPC_SERVER DEBUG: [0x51b000260280] received request without user token Name# ExecuteDataQuery data# session_id: "ydb://session/3?node_id=1&id=N2M1YzgyNTItMzg4OWQ5YmEtYzg0Mzc4ZmYtN2Y1ZTcyMWE=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { id: "ydb://preparedqueryid/4?id=d364618f-191a2cea-4eb6e5bf-ca5d607" } query_cache_policy { keep_in_cache: true } operation_params { } peer# ipv6:%5B::1%5D:55082 database# /Root 2025-03-28T08:59:54.131102Z node 1 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jqdznbwd68z0mt0g4w7fe2qb, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# /Root, peer# ipv6:[::1]:55082, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 2.902614s
: Error: GRpc error: (4): Deadline Exc ... NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:15.176170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:15.248694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782616746593024:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:15.317307Z node 2 :TX_PROXY ERROR: Actor# [2:7486782616746593088:3475] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:9" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 (Uint32 '1)) (let $5 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) '((KqlKeyExc $4 (String '"Name")) (KqlKeyInc $4)))) (let $6 (OptionalType (DataType 'String))) (let $7 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $6) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $6))) (let $8 '('('"_logical_id" '710) '('"_id" '"25c4c620-556ec26-24fe8668-9c7bbef2") '('"_wide_channels" $7))) (let $9 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $5)) (lambda '($13) (block '( (let $14 (lambda '($15) (Member $15 '"Amount") (Member $15 '"Comment") (Member $15 '"Group") (Member $15 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $13) $3) $14))) ))) $8)) (let $10 (DqCnUnionAll (TDqOutput $9 '"0"))) (let $11 (DqPhyStage '($10) (lambda '($16) (FromFlow (NarrowMap (Take (ToFlow $16) $3) (lambda '($17 $18 $19 $20) (AsStruct '('"Amount" $17) '('"Comment" $18) '('"Group" $19) '('"Name" $20)))))) '('('"_logical_id" '723) '('"_id" '"78261f22-e2f14808-8323fb1d-6171b886")))) (let $12 (DqCnResult (TDqOutput $11 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($9 $11) '($12) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $7) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","ReadLimit":"1001","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Test","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"No estimate","ReadLimit":"1001","Name":"TableRangeScan","Path":"\/Root\/Test","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 27244, MsgBus: 6187 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b1/r3tmp/tmpqFNgXo/pdisk_1.dat 2025-03-28T09:00:18.974938Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:19.036731Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:19.059882Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:19.059974Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:19.062379Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27244, node 3 2025-03-28T09:00:19.175288Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:19.175320Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:19.175329Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:19.175463Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6187 TClient is connected to server localhost:6187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:19.824432Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:19.847023Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:00:19.865321Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:19.999249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:20.262642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:20.362799Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:27.029093Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782664049807001:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:27.029206Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:27.160579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:27.333227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:27.523791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:27.601593Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:27.695620Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:27.993764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:28.393870Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782672639742133:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:28.393942Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:28.394364Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782672639742138:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:28.404207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:28.419957Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782672639742140:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:00:28.497011Z node 3 :TX_PROXY ERROR: Actor# [3:7486782672639742194:3478] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:29.972396Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::Discard >> KqpYql::TableUseBeforeCreate [GOOD] >> TProxyActorTest::TestCreateSemaphore [GOOD] >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> TProxyActorTest::TestDisconnectWhileAttaching >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsNoLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSortingLimit [GOOD] Test command err: Trying to start YDB, gRPC: 15501, MsgBus: 10959 2025-03-28T08:59:07.423054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782326317787627:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.424322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c6/r3tmp/tmpMFgOSk/pdisk_1.dat 2025-03-28T08:59:12.107868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.741679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.741846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.752797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:12.807812Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782326317787627:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.807846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:12.918935Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15501, node 1 2025-03-28T08:59:13.234989Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T08:59:13.235890Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T08:59:14.388554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:14.388767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:14.388997Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:14.389640Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10959 TClient is connected to server localhost:10959 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:25.439893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.597618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:26.667737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.779159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.859140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.859159Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:28.241502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:33.247864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782433691971875:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.255760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.393895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.563598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.636410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.710238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.863714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.081077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.368915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782446576874327:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:35.369007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:35.375332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782446576874332:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:35.397708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:35.494580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782446576874334:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:35.561531Z node 1 :TX_PROXY ERROR: Actor# [1:7486782446576874393:3526] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:40.674741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:42.982455Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDgyZTg2ZmQtNTc0NTBlMS1mYTA5MzYwYi01ZmIxZjU1OQ==, ActorId: [1:7486782472346678901:2564], ActorState: ExecuteState, TraceId: 01jqdzn0dr9wj7nd847qjk40sp, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:999: Memory limit exception at ExecuteState, current limit is 1024 bytes.
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:999: Memory limit exception at ExecuteState, current limit is 1024 bytes. Trying to start YDB, gRPC: 16820, MsgBus: 21170 2025-03-28T08:59:48.575532Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782502535728819:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:48.837474Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c6/r3tmp/tmp70FTIM/pdisk_1.dat 2025-03-28T08:59:49.910799Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:49.970237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:49.970439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:49.981643Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:49.983620Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 2 2025-03-28T08:59:50.512217Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:50.512236Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:50.512241Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:50.512434Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21170 TClient is connected to server localhost:21170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 P ... stMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"Bytes\":{\"Count\":1,\"Sum\":7689,\"Max\":7689,\"Min\":7689,\"History\":[22,356,46,356,70,680,92,840,114,1243,139,1735,190,7689]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":181000,\"Max\":181000,\"Min\":181000}},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":189,\"Max\":189,\"Min\":189},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":155,\"Max\":155,\"Min\":155},\"FirstMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"ActiveMessageMs\":{\"Count\":1,\"Max\":189,\"Min\":8},\"PauseMessageMs\":{\"Count\":1,\"Sum\":5,\"Max\":5,\"Min\":5},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":181000,\"Max\":181000,\"Min\":181000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":127303,\"Max\":127303,\"Min\":127303,\"History\":[22,19437,46,44099,70,60633,92,81579,114,97661,139,109595,190,127303]},\"WaitPeriods\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20},\"WaitMessageMs\":{\"Count\":1,\"Max\":155,\"Min\":5}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[2,1048576,190,1048576]},\"DurationUs\":{\"Count\":1,\"Sum\":186000,\"Max\":186000,\"Min\":186000},\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7689,\"Max\":7689,\"Min\":7689},\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"FinishedTasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"StageDurationUs\":186000,\"BaseTimeMs\":1743152418518,\"OutputBytes\":{\"Count\":1,\"Sum\":7689,\"Max\":7689,\"Min\":7689},\"CpuTimeUs\":{\"Count\":1,\"Sum\":61973,\"Max\":61973,\"Min\":61973,\"History\":[2,331,22,1713,46,1767,70,8762,92,9782,114,15034,139,27662,190,61973]},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":27,\"Max\":27,\"Min\":27},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":188,\"Max\":188,\"Min\":188},\"ActiveMessageMs\":{\"Count\":1,\"Max\":188,\"Min\":4},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[22,694,46,694,70,1018,92,1184,114,1599,139,2097,190,8168]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":184000,\"Max\":184000,\"Min\":184000}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":155,\"Max\":155,\"Min\":155},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":155,\"Max\":155,\"Min\":155},\"FirstMessageMs\":{\"Count\":1,\"Sum\":4,\"Max\":4,\"Min\":4},\"ActiveMessageMs\":{\"Count\":1,\"Max\":155,\"Min\":4},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[22,2437,46,4842,70,5736,92,6450,114,7179,139,7919,190,8168]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":151000,\"Max\":151000,\"Min\":151000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":31351,\"Max\":31351,\"Min\":31351,\"History\":[22,4611,46,10776,70,14878,92,20042,114,24015,139,26937,190,31351]},\"WaitPeriods\":{\"Count\":1,\"Sum\":22,\"Max\":22,\"Min\":22},\"WaitMessageMs\":{\"Count\":1,\"Max\":155,\"Min\":1}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":185111,\"CpuTimeUs\":181690},\"ProcessCpuTimeUs\":267,\"TotalDurationUs\":560593,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":167529},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Data\",\"Key\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:2\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'367) \'(\'\"_id\" \'\"2a285ff7-4733502c-2ec571db-477245cf\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'379) \'(\'\"_id\" \'\"eafd431b-3eb0d045-f33e63e1-aec89838\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 560593 total_cpu_time_us: 338648 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":2},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743152418\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"c4aedeaa-1adce276-fc490160-cc85d025\",\"version\":\"1.0\"}" Trying to start YDB, gRPC: 16820, MsgBus: 21172 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c6/r3tmp/tmprSiFWV/pdisk_1.dat 2025-03-28T09:00:21.097657Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:21.170473Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:21.226267Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:21.228377Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:21.239627Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 4 2025-03-28T09:00:21.619313Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:21.619339Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:21.619345Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:21.619471Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21172 TClient is connected to server localhost:21172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:24.978227Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:25.007984Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:25.032928Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:30.789223Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782682924360288:2640], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:30.789222Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782682924360269:2637], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:30.789308Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:30.792947Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:00:30.811987Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486782682924360293:2641], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:00:30.886146Z node 4 :TX_PROXY ERROR: Actor# [4:7486782682924360406:5492] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 4069, MsgBus: 61846 2025-03-28T08:59:04.183795Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782312307681422:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:04.338220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f5/r3tmp/tmpbEkZAN/pdisk_1.dat 2025-03-28T08:59:09.078347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782312307681422:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:09.078404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:09.922926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.016735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.016815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.050626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4069, node 1 2025-03-28T08:59:12.364352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:12.518593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.518625Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.518632Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.518762Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61846 TClient is connected to server localhost:61846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:19.543115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:20.304844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:21.961752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T08:59:22.807548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T08:59:22.890577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.542710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782402501996353:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:25.543023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.635662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.759736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.874425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.975316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:26.975336Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:27.639212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:27.691468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:27.992491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:28.683317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782415386898827:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:28.683426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:28.684861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782415386898834:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:28.720246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:28.791631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782415386898836:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:28.884238Z node 1 :TX_PROXY ERROR: Actor# [1:7486782415386898890:3513] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30423, MsgBus: 20811 2025-03-28T08:59:39.328334Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782461571014277:2204];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f5/r3tmp/tmprdsxaR/pdisk_1.dat 2025-03-28T08:59:40.111264Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T08:59:41.576046Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:41.615272Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:41.726398Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:41.726881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:41.740665Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30423, node 2 2025-03-28T08:59:42.879300Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:42.879320Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:42.879326Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:42.879427Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:44.151062Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782461571014277:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:44.151335Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:20811 TClient is connected to server localhost:20811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:48.787204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:5 ... false data# peer# 2025-03-28T09:00:31.613175Z node 4 :GRPC_SERVER DEBUG: [0x51b000363280] received request Name# Coordination/CreateNode ok# false data# peer# 2025-03-28T09:00:31.613343Z node 4 :GRPC_SERVER DEBUG: [0x51b000363980] received request Name# Coordination/AlterNode ok# false data# peer# 2025-03-28T09:00:31.613522Z node 4 :GRPC_SERVER DEBUG: [0x51b000364780] received request Name# Coordination/DropNode ok# false data# peer# 2025-03-28T09:00:31.613678Z node 4 :GRPC_SERVER DEBUG: [0x51b0000b1a80] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-03-28T09:00:31.613854Z node 4 :GRPC_SERVER DEBUG: [0x51b0000b2880] received request Name# CreateDatabase ok# false data# peer# 2025-03-28T09:00:31.614028Z node 4 :GRPC_SERVER DEBUG: [0x51b000259280] received request Name# GetDatabaseStatus ok# false data# peer# 2025-03-28T09:00:31.614194Z node 4 :GRPC_SERVER DEBUG: [0x51b0000b2f80] received request Name# AlterDatabase ok# false data# peer# 2025-03-28T09:00:31.614351Z node 4 :GRPC_SERVER DEBUG: [0x51b00010e680] received request Name# ListDatabases ok# false data# peer# 2025-03-28T09:00:31.614554Z node 4 :GRPC_SERVER DEBUG: [0x51b00010df80] received request Name# RemoveDatabase ok# false data# peer# 2025-03-28T09:00:31.614731Z node 4 :GRPC_SERVER DEBUG: [0x51b00010d880] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-03-28T09:00:31.614934Z node 4 :GRPC_SERVER DEBUG: [0x51b00010d180] received request Name# GetScaleRecommendation ok# false data# peer# 2025-03-28T09:00:31.615131Z node 4 :GRPC_SERVER DEBUG: [0x51b0000b1380] received request Name# ListEndpoints ok# false data# peer# 2025-03-28T09:00:31.615230Z node 4 :GRPC_SERVER DEBUG: [0x51b00010ca80] received request Name# WhoAmI ok# false data# peer# 2025-03-28T09:00:31.615410Z node 4 :GRPC_SERVER DEBUG: [0x51b00010bc80] received request Name# NodeRegistration ok# false data# peer# 2025-03-28T09:00:31.615591Z node 4 :GRPC_SERVER DEBUG: [0x51b00010b580] received request Name# Scan ok# false data# peer# 2025-03-28T09:00:31.615756Z node 4 :GRPC_SERVER DEBUG: [0x51b000103080] received request Name# GetShardLocations ok# false data# peer# 2025-03-28T09:00:31.615912Z node 4 :GRPC_SERVER DEBUG: [0x51b000102980] received request Name# DescribeTable ok# false data# peer# 2025-03-28T09:00:31.616077Z node 4 :GRPC_SERVER DEBUG: [0x51b000113380] received request Name# CreateSnapshot ok# false data# peer# 2025-03-28T09:00:31.616241Z node 4 :GRPC_SERVER DEBUG: [0x51b0002aa180] received request Name# RefreshSnapshot ok# false data# peer# 2025-03-28T09:00:31.616392Z node 4 :GRPC_SERVER DEBUG: [0x51b0002acb80] received request Name# DiscardSnapshot ok# false data# peer# 2025-03-28T09:00:31.616558Z node 4 :GRPC_SERVER DEBUG: [0x51b00027fa80] received request Name# List ok# false data# peer# 2025-03-28T09:00:31.616764Z node 4 :GRPC_SERVER DEBUG: [0x51b000283980] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-03-28T09:00:31.616948Z node 4 :GRPC_SERVER DEBUG: [0x51b000173780] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-03-28T09:00:31.617117Z node 4 :GRPC_SERVER DEBUG: [0x51b000174580] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-03-28T09:00:31.617271Z node 4 :GRPC_SERVER DEBUG: [0x51b00032c080] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-03-28T09:00:31.617415Z node 4 :GRPC_SERVER DEBUG: [0x51b00032ce80] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-03-28T09:00:31.617570Z node 4 :GRPC_SERVER DEBUG: [0x51b00032e380] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-03-28T09:00:31.617737Z node 4 :GRPC_SERVER DEBUG: [0x51b00038ee80] received request Name# CreateStream ok# false data# peer# 2025-03-28T09:00:31.617934Z node 4 :GRPC_SERVER DEBUG: [0x51b000390380] received request Name# ListStreams ok# false data# peer# 2025-03-28T09:00:31.618088Z node 4 :GRPC_SERVER DEBUG: [0x51b00038f580] received request Name# DeleteStream ok# false data# peer# 2025-03-28T09:00:31.618269Z node 4 :GRPC_SERVER DEBUG: [0x51b00032dc80] received request Name# DescribeStream ok# false data# peer# 2025-03-28T09:00:31.618452Z node 4 :GRPC_SERVER DEBUG: [0x51b0000d8980] received request Name# ListShards ok# false data# peer# 2025-03-28T09:00:31.618620Z node 4 :GRPC_SERVER DEBUG: [0x51b000113a80] received request Name# SetWriteQuota ok# false data# peer# 2025-03-28T09:00:31.619836Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c5980] received request Name# UpdateStream ok# false data# peer# 2025-03-28T09:00:31.620050Z node 4 :GRPC_SERVER DEBUG: [0x51b000237780] received request Name# PutRecord ok# false data# peer# 2025-03-28T09:00:31.620227Z node 4 :GRPC_SERVER DEBUG: [0x51b000237e80] received request Name# PutRecords ok# false data# peer# 2025-03-28T09:00:31.620420Z node 4 :GRPC_SERVER DEBUG: [0x51b000238c80] received request Name# GetRecords ok# false data# peer# 2025-03-28T09:00:31.620586Z node 4 :GRPC_SERVER DEBUG: [0x51b00023a180] received request Name# GetShardIterator ok# false data# peer# 2025-03-28T09:00:31.620750Z node 4 :GRPC_SERVER DEBUG: [0x51b000239380] received request Name# SubscribeToShard ok# false data# peer# 2025-03-28T09:00:31.620924Z node 4 :GRPC_SERVER DEBUG: [0x51b000245780] received request Name# DescribeLimits ok# false data# peer# 2025-03-28T09:00:31.621088Z node 4 :GRPC_SERVER DEBUG: [0x51b000247a80] received request Name# DescribeStreamSummary ok# false data# peer# 2025-03-28T09:00:31.621249Z node 4 :GRPC_SERVER DEBUG: [0x51b000247380] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-03-28T09:00:31.621404Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d6380] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-03-28T09:00:31.621557Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d5c80] received request Name# UpdateShardCount ok# false data# peer# 2025-03-28T09:00:31.621730Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d5580] received request Name# UpdateStreamMode ok# false data# peer# 2025-03-28T09:00:31.621891Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d4e80] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-03-28T09:00:31.622063Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d4780] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-03-28T09:00:31.622248Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d4080] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-03-28T09:00:31.622426Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d3980] received request Name# ListStreamConsumers ok# false data# peer# 2025-03-28T09:00:31.622633Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d2480] received request Name# AddTagsToStream ok# false data# peer# 2025-03-28T09:00:31.622855Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d1d80] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-03-28T09:00:31.623030Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d1680] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-03-28T09:00:31.623212Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d0f80] received request Name# ListTagsForStream ok# false data# peer# 2025-03-28T09:00:31.623395Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d0880] received request Name# MergeShards ok# false data# peer# 2025-03-28T09:00:31.623576Z node 4 :GRPC_SERVER DEBUG: [0x51b0003d0180] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-03-28T09:00:31.623747Z node 4 :GRPC_SERVER DEBUG: [0x51b0001ec380] received request Name# SplitShard ok# false data# peer# 2025-03-28T09:00:31.623914Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c2180] received request Name# StartStreamEncryption ok# false data# peer# 2025-03-28T09:00:31.624074Z node 4 :GRPC_SERVER DEBUG: [0x51b0003c3d80] received request Name# StopStreamEncryption ok# false data# peer# 2025-03-28T09:00:31.624244Z node 4 :GRPC_SERVER DEBUG: [0x51b000114180] received request Name# SelfCheck ok# false data# peer# 2025-03-28T09:00:31.624416Z node 4 :GRPC_SERVER DEBUG: [0x51b000114880] received request Name# NodeCheck ok# false data# peer# 2025-03-28T09:00:31.624583Z node 4 :GRPC_SERVER DEBUG: [0x51b000037980] received request Name# CreateSession ok# false data# peer# 2025-03-28T09:00:31.624749Z node 4 :GRPC_SERVER DEBUG: [0x51b000038080] received request Name# DeleteSession ok# false data# peer# 2025-03-28T09:00:31.624926Z node 4 :GRPC_SERVER DEBUG: [0x51b000116b80] received request Name# AttachSession ok# false data# peer# 2025-03-28T09:00:31.625091Z node 4 :GRPC_SERVER DEBUG: [0x51b00012de80] received request Name# BeginTransaction ok# false data# peer# 2025-03-28T09:00:31.625274Z node 4 :GRPC_SERVER DEBUG: [0x51b000135c80] received request Name# CommitTransaction ok# false data# peer# 2025-03-28T09:00:31.625450Z node 4 :GRPC_SERVER DEBUG: [0x51b000136380] received request Name# RollbackTransaction ok# false data# peer# 2025-03-28T09:00:31.625618Z node 4 :GRPC_SERVER DEBUG: [0x51b000114f80] received request Name# ExecuteQuery ok# false data# peer# 2025-03-28T09:00:31.625796Z node 4 :GRPC_SERVER DEBUG: [0x51b000115d80] received request Name# ExecuteScript ok# false data# peer# 2025-03-28T09:00:31.625964Z node 4 :GRPC_SERVER DEBUG: [0x51b000116480] received request Name# FetchScriptResults ok# false data# peer# 2025-03-28T09:00:31.626158Z node 4 :GRPC_SERVER DEBUG: [0x51b000137180] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-03-28T09:00:31.626374Z node 4 :GRPC_SERVER DEBUG: [0x51b00014f980] received request Name# ChangeTabletSchema ok# false data# peer# 2025-03-28T09:00:31.626643Z node 4 :GRPC_SERVER DEBUG: [0x51b000152380] received request Name# RestartTablet ok# false data# peer# 2025-03-28T09:00:31.627890Z node 4 :GRPC_SERVER DEBUG: [0x51b000153880] received request Name# CreateLogStore ok# false data# peer# 2025-03-28T09:00:31.628101Z node 4 :GRPC_SERVER DEBUG: [0x51b000153180] received request Name# DescribeLogStore ok# false data# peer# 2025-03-28T09:00:31.628280Z node 4 :GRPC_SERVER DEBUG: [0x51b000158c80] received request Name# DropLogStore ok# false data# peer# 2025-03-28T09:00:31.628477Z node 4 :GRPC_SERVER DEBUG: [0x51b000159380] received request Name# AlterLogStore ok# false data# peer# 2025-03-28T09:00:31.628649Z node 4 :GRPC_SERVER DEBUG: [0x51b0000e4680] received request Name# CreateLogTable ok# false data# peer# 2025-03-28T09:00:31.628831Z node 4 :GRPC_SERVER DEBUG: [0x51b00003bf80] received request Name# DescribeLogTable ok# false data# peer# 2025-03-28T09:00:31.629017Z node 4 :GRPC_SERVER DEBUG: [0x51b0000e7e80] received request Name# DropLogTable ok# false data# peer# 2025-03-28T09:00:31.629179Z node 4 :GRPC_SERVER DEBUG: [0x51b00003c680] received request Name# AlterLogTable ok# false data# peer# 2025-03-28T09:00:31.629356Z node 4 :GRPC_SERVER DEBUG: [0x51b0000e8c80] received request Name# Login ok# false data# peer# 2025-03-28T09:00:31.629537Z node 4 :GRPC_SERVER DEBUG: [0x51b00003cd80] received request Name# DescribeReplication ok# false data# peer# 2025-03-28T09:00:31.629731Z node 4 :GRPC_SERVER DEBUG: [0x51b0000e5480] received request Name# DescribeView ok# false data# peer# >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::Alter >> TProxyActorTest::TestAttachSession [GOOD] |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 62432, MsgBus: 15462 2025-03-28T09:00:20.405652Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782637779940710:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:20.421444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b00/r3tmp/tmpBnTYlE/pdisk_1.dat 2025-03-28T09:00:21.237509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:21.245249Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:21.254625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:21.258992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62432, node 1 2025-03-28T09:00:21.832911Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:21.833179Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:21.833185Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:21.833492Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15462 2025-03-28T09:00:25.408892Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782637779940710:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:25.409513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:15462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:29.148988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.162117Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:29.170743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.347458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:00:29.517711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:00:29.598668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:30.984624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782680729615322:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:30.984718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.287882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.350621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.438017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.505833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.570850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.661028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.750351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782685024583145:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.750433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.750655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782685024583150:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.755047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:31.770431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782685024583152:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:31.844758Z node 1 :TX_PROXY ERROR: Actor# [1:7486782685024583207:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries >> THealthCheckTest::YellowIssueReadyVDisksOnFaultyPDisks [GOOD] >> THealthCheckTest::TestTabletIsDead >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateDropRecreate >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> TReplicationTests::Alter [GOOD] >> TReplicationTests::CannotAddReplicationConfig >> KqpStats::DeferredEffects-UseSink [GOOD] |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR ... waiting for blocked registrations (done) 2025-03-28T09:00:34.855971Z node 1 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> KqpParams::ParameterTypes [GOOD] >> KqpParams::InvalidJson ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DeferredEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13433, MsgBus: 28539 2025-03-28T08:59:08.429749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782327672698653:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:08.430524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e5/r3tmp/tmp10X6ct/pdisk_1.dat 2025-03-28T08:59:12.207008Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.793911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.794117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.828084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:12.897900Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:13.427987Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782327672698653:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:13.447024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:13.501853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 13433, node 1 2025-03-28T08:59:14.707166Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:14.707184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:14.707197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:14.707284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28539 TClient is connected to server localhost:28539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:24.662692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.016644Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:25.109784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.726112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:26.996837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.415170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.875235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.875252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:33.616907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782435046882951:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.617019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.652453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.738947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.790761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.840888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.898951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.985532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.159412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782443636818070:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:35.159648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:35.160626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782443636818075:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:35.188122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:35.231872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782443636818077:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:35.306938Z node 1 :TX_PROXY ERROR: Actor# [1:7486782443636818135:3526] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:54.315479Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqdzmzy53wayb2y2mbnhxjsk", SessionId: ydb://session/3?node_id=1&id=NWRhMzk2M2EtYmQ3MTg1ZjAtYjAyYjQ4ZTItYjExNmRjNWQ=, Slow query, duration: 12.401425s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT count(*) FROM `/Root/EightShard` AS t JOIN `/Root/KeyValue` AS kv ON t.Data = kv.Key;\n ", parameters: 0b 2025-03-28T08:59:54.315800Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152385870, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 3531, MsgBus: 8940 2025-03-28T08:59:55.893242Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782529995626695:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:55.893637Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e5/r3tmp/tmp4eZdWJ/pdisk_1.dat 2025-03-28T08:59:56.448666Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:56.480802Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:56.486826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:56.495350Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3531, node 2 2025-03-28T08:59:57.156631Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:57.156653Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:57.156659Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:57.156776Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8940 2025-03-28T09:00:00.875811Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782529995626695:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:00.875858Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:8940 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDo ... 2025-03-28T09:00:14.795003Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:14.925570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.037251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.121361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.275547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.453307Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.834723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:15.920465Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782615894974872:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:15.920569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:15.920869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782615894974877:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:15.925601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:15.947609Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782615894974879:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:16.020002Z node 2 :TX_PROXY ERROR: Actor# [2:7486782620189942234:3509] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 4985, MsgBus: 30421 2025-03-28T09:00:21.186056Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782644271384032:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e5/r3tmp/tmpi3mwqo/pdisk_1.dat 2025-03-28T09:00:21.360500Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:00:21.872698Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:21.988679Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:21.988763Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:22.000749Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4985, node 3 2025-03-28T09:00:22.644282Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:22.644309Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:22.644316Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:22.644457Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30421 2025-03-28T09:00:26.170766Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782644271384032:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:26.170821Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:30421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:28.144348Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:28.154211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:28.855433Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.530897Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.625312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:31.674896Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782687221058634:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.675018Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.727404Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.799838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.849524Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.891176Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.943955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.049529Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.132408Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782691516026453:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.132495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.132932Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782691516026458:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.137842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:32.157417Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782691516026460:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:32.233662Z node 3 :TX_PROXY ERROR: Actor# [3:7486782691516026516:3473] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:46: Warning: At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpParams::BadParameterType [GOOD] >> KqpParams::CheckCacheByAst >> THealthCheckTest::ShardsLimit800 [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TKeyValueTest::TestRenameWorks >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks >> KqpParams::DefaultParameterValue [GOOD] |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::CreateWithoutCredentials |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] Test command err: 2025-03-28T08:59:35.729049Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782446522569346:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:35.799082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157a/r3tmp/tmpeVV76r/pdisk_1.dat 2025-03-28T08:59:40.637166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:40.655948Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782446522569346:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:40.656001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:43.060598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:43.091052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:45.071938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:45.550959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:46.340070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:46.638440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:46.669319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:46.687325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:46.687389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:46.692239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6496, node 1 2025-03-28T08:59:48.097747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:48.097766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:48.097773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:48.098114Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:49.266732Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T08:59:49.290701Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T08:59:49.290738Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T08:59:49.291840Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21622, port: 21622 2025-03-28T08:59:49.291899Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T08:59:49.503216Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T08:59:49.607454Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T08:59:49.667061Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T08:59:49.670100Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T08:59:49.670294Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T08:59:49.731525Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T08:59:49.779280Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T08:59:49.786447Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****Ak2w (B332EE1B) () has now valid token of ldapuser@ldap 2025-03-28T08:59:57.793418Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782540722887690:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:57.793668Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157a/r3tmp/tmpH5Nmbp/pdisk_1.dat 2025-03-28T08:59:59.675945Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:00.117912Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:00.174752Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:00.174832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:00.180782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3992, node 2 2025-03-28T09:00:01.076391Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:01.076419Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:01.076426Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:01.076546Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:02.699136Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:00:02.720536Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:00:02.720562Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:00:02.736984Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:8184, port: 8184 2025-03-28T09:00:02.737825Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:00:02.981565Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:00:03.056446Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:00:03.105035Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782540722887690:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:03.105128Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:03.106514Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****8nRw (782F8400) () has now valid token of ldapuser@ldap 2025-03-28T09:00:11.201623Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782601370720947:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:11.263675Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157a/r3tmp/tmph8e6Pm/pdisk_1.dat 2025-03-28T09:00:11.369040Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:11.391072Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:11.391145Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:11.392879Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19114, node 3 2025-03-28T09:00:11.504271Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:11.504290Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:11.504297Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:11.504404Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:11.999120Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:00:12.036240Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:00:12.036271Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:00:12.038316Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:27037 ldap://localhost:27037 ldap://localhost:11111, port: 27037 2025-03-28T09:00:12.038400Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:00:12.225139Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:00:12.297959Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:00:12.367537Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:00:12.415347Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:00:12.415413Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:00:12.512343Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:00:12.588029Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:00:12.611176Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****8gxw (80C36A55) () has now valid token of ldapuser@ldap 2025-03-28T09:00:18.625228Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486782627434709974:2107];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:18.627466Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157a/r3tmp/tmpcWvSXt/pdisk_1.dat 2025-03-28T09:00:18.961335Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:18.981977Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:18.982060Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:18.987828Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11012, node 4 2025-03-28T09:00:19.102807Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:19.102830Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:19.102837Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:19.102981Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:19.274822Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:00:19.278117Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:00:19.278154Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:00:19.278951Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10511, port: 10511 2025-03-28T09:00:19.279039Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:00:19.291179Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:00:19.337137Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-28T09:00:19.383605Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****motw (9DF67771) () has now valid token of ldapuser@ldap 2025-03-28T09:00:28.022934Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486782668616580056:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:28.455975Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157a/r3tmp/tmptW9s1Y/pdisk_1.dat 2025-03-28T09:00:29.139761Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:29.149757Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:29.149856Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:29.152685Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3017, node 5 2025-03-28T09:00:29.239586Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:29.239617Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:29.239625Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:29.239777Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:29.474623Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:00:29.474981Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:00:29.475002Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:00:29.475784Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:16621, port: 16621 2025-03-28T09:00:29.475866Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:00:29.514548Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-03-28T09:00:29.559319Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:16621. Invalid credentials 2025-03-28T09:00:29.559888Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****kK5w (45B579FE) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:16621. Invalid credentials)' 2025-03-28T09:00:33.625679Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486782692755444739:2212];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157a/r3tmp/tmpFbIXub/pdisk_1.dat 2025-03-28T09:00:33.713873Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:00:33.799116Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:33.831821Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:33.831915Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:33.840378Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17300, node 6 2025-03-28T09:00:33.897492Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:33.897515Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:33.897526Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:33.897659Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:34.122846Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:00:34.124844Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:00:34.124872Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:00:34.125673Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:25512, port: 25512 2025-03-28T09:00:34.125751Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:00:34.143776Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:00:34.187244Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:25512. Invalid credentials 2025-03-28T09:00:34.187659Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****laeA (97E11A61) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:25512. Invalid credentials)' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: 2025-03-28T08:59:15.329242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:15.330530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:15.330782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dc9/r3tmp/tmpykUaIu/pdisk_1.dat 2025-03-28T08:59:20.530267Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23130, node 1 TClient is connected to server localhost:10783 2025-03-28T08:59:23.639066Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:23.639125Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:23.639159Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:23.639668Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:36.232668Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:36.233541Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:36.234285Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dc9/r3tmp/tmpnG7xqp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18478, node 3 TClient is connected to server localhost:28288 2025-03-28T08:59:53.810063Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:453:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:53.810507Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:53.810572Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dc9/r3tmp/tmpVsPady/pdisk_1.dat 2025-03-28T08:59:54.445891Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13647, node 4 TClient is connected to server localhost:23335 2025-03-28T08:59:55.632123Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:55.632207Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:55.632260Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:55.632488Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:19.032741Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:491:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:19.033193Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:19.033378Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:19.034762Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:486:2155], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:19.035233Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:19.035380Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dc9/r3tmp/tmp0yQUNp/pdisk_1.dat 2025-03-28T09:00:19.503325Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21412, node 6 TClient is connected to server localhost:26315 2025-03-28T09:00:20.508551Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:20.508625Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:20.508665Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:20.509299Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:35.020831Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:699:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:35.021476Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:35.021599Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:35.022706Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:696:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:35.023060Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:35.023253Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dc9/r3tmp/tmphghrKu/pdisk_1.dat 2025-03-28T09:00:35.699077Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24476, node 8 TClient is connected to server localhost:3847 2025-03-28T09:00:36.792360Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:36.792407Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:36.792431Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:36.793354Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 18537, MsgBus: 18880 2025-03-28T08:59:04.990177Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782313430220415:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:04.990233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f1/r3tmp/tmp6kOnYu/pdisk_1.dat 2025-03-28T08:59:09.067233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.332849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:11.343980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:11.415340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:11.692933Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782313430220415:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:11.701796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TServer::EnableGrpc on GrpcPort 18537, node 1 2025-03-28T08:59:12.464966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.503450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.503537Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.503543Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.503900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:12.552545Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18880 TClient is connected to server localhost:18880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:23.226572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.316995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.751402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.786734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.245572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.019315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.019333Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:30.027479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782425099371974:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:30.028040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.734747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:31.808703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:31.915561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:32.088576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:32.228606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:32.703668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.163685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782437984274432:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.163745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.167159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782437984274437:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.215217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:33.311487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782437984274439:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:33.423742Z node 1 :TX_PROXY ERROR: Actor# [1:7486782437984274498:3540] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25108, MsgBus: 12539 2025-03-28T08:59:54.087204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:54.088693Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:54.088995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f1/r3tmp/tmp42gslw/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25108, node 2 2025-03-28T08:59:55.431710Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:55.433698Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:55.433755Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:55.433790Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:55.434105Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:55.476092Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:55.476221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:55.487705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12539 TClient is connected to server localhost:12539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:56.483559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:56.673357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo uns ... 17.088195Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3268:4565], TxId: 281474976715674, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZDNlYmVkMmQtZmJkY2EyOTMtM2I3OTk4MS01MzM0YjFk. CustomerSuppliedId : . TraceId : 01jqdznzt3dbx16szf2yak05n3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:3261:4104], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-28T09:00:17.088656Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3269:4566], TxId: 281474976715674, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jqdznzt3dbx16szf2yak05n3. SessionId : ydb://session/3?node_id=2&id=ZDNlYmVkMmQtZmJkY2EyOTMtM2I3OTk4MS01MzM0YjFk. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:3261:4104], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-28T09:00:17.089222Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:3270:4567], TxId: 281474976715674, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZDNlYmVkMmQtZmJkY2EyOTMtM2I3OTk4MS01MzM0YjFk. TraceId : 01jqdznzt3dbx16szf2yak05n3. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:3261:4104], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-28T09:00:17.105276Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDNlYmVkMmQtZmJkY2EyOTMtM2I3OTk4MS01MzM0YjFk, ActorId: [2:2691:4104], ActorState: ExecuteState, TraceId: 01jqdznzt3dbx16szf2yak05n3, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 13926, MsgBus: 10056 2025-03-28T09:00:25.586248Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:25.586470Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:25.586568Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f1/r3tmp/tmp8ImoDB/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13926, node 3 2025-03-28T09:00:27.996253Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:28.007316Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:28.007382Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:28.007691Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:28.009349Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:28.080024Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:28.080621Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:28.097358Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10056 TClient is connected to server localhost:10056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:29.418091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.442311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.621279Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:30.175534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:30.503565Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:31.277506Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1859:3456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.277800Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.300943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.581924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.830198Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.147420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.488093Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.928074Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:33.473165Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2413:3875], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:33.473294Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:33.473596Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2418:3880], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:33.480619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:33.714658Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2420:3882], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:00:33.770839Z node 3 :TX_PROXY ERROR: Actor# [3:2490:3933] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:35.278140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:00:35.916974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:00:36.762220Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:00:39.719860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:39.720037Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:39.981853Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3284:4584], TxId: 281474976715674, task: 1. Ctx: { TraceId : 01jqdzppa1fj8dfc40qcew0qjh. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NzMwY2EyODQtOTIyMmMxNDctYTljNmM1YTMtYTRlZWIwMTk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2025-03-28T09:00:39.982497Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3284:4584], TxId: 281474976715674, task: 1. Ctx: { TraceId : 01jqdzppa1fj8dfc40qcew0qjh. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NzMwY2EyODQtOTIyMmMxNDctYTljNmM1YTMtYTRlZWIwMTk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2025-03-28T09:00:39.986628Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:3285:4585], TxId: 281474976715674, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=NzMwY2EyODQtOTIyMmMxNDctYTljNmM1YTMtYTRlZWIwMTk=. TraceId : 01jqdzppa1fj8dfc40qcew0qjh. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:3278:4123], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-28T09:00:39.994677Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzMwY2EyODQtOTIyMmMxNDctYTljNmM1YTMtYTRlZWIwMTk=, ActorId: [3:2708:4123], ActorState: ExecuteState, TraceId: 01jqdzppa1fj8dfc40qcew0qjh, Create QueryResponse for error on request, msg: >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelect_BadCases [GOOD] Test command err: Trying to start YDB, gRPC: 9049, MsgBus: 18919 2025-03-28T08:59:02.484725Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782302028011291:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:02.485707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d2/r3tmp/tmphNYUOl/pdisk_1.dat 2025-03-28T08:59:06.040627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:06.041101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:06.073524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9049, node 1 2025-03-28T08:59:09.510210Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782302028011291:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:09.519809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:09.562566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:09.751260Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:09.751281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:09.751286Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:09.752846Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:09.804932Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:18919 TClient is connected to server localhost:18919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:12.851935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:12.879467Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:12.888939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:13.724788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:16.168401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:16.581160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.246334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:23.246353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:23.915798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782392222326357:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:23.915950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.860716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.023866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.090208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.279327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.403942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.526234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.997381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782400812261513:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:25.997651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:25.998988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782400812261518:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.030631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:26.109296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782400812261520:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:26.218888Z node 1 :TX_PROXY ERROR: Actor# [1:7486782405107228876:3529] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8918, MsgBus: 27916 2025-03-28T08:59:38.623799Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782458779839377:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:38.624052Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d2/r3tmp/tmpxYJygT/pdisk_1.dat 2025-03-28T08:59:41.205039Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:41.460208Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:41.490538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:41.499331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:41.578971Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8918, node 2 2025-03-28T08:59:42.507324Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:42.507347Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:42.507352Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:42.507446Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:43.635076Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782458779839377:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:43.635351Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:27916 TClient is connected to server localhost:27916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:48.920850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... 3, TabletId: 72075186224038009 not found 2025-03-28T09:00:34.692392Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037981 not found 2025-03-28T09:00:34.692407Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037975 not found 2025-03-28T09:00:34.692423Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038013 not found 2025-03-28T09:00:34.692438Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037974 not found 2025-03-28T09:00:34.692455Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037992 not found 2025-03-28T09:00:34.692468Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038023 not found 2025-03-28T09:00:34.692483Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038019 not found 2025-03-28T09:00:34.692496Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037984 not found 2025-03-28T09:00:34.692509Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037982 not found 2025-03-28T09:00:34.692523Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038002 not found 2025-03-28T09:00:34.692537Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037969 not found 2025-03-28T09:00:34.692550Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037971 not found 2025-03-28T09:00:34.692564Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037977 not found 2025-03-28T09:00:34.692581Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037966 not found 2025-03-28T09:00:34.692596Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038000 not found 2025-03-28T09:00:34.692609Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037967 not found 2025-03-28T09:00:34.692623Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224038004 not found 2025-03-28T09:00:34.692637Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037976 not found 2025-03-28T09:00:34.692653Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037990 not found 2025-03-28T09:00:34.692669Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037986 not found 2025-03-28T09:00:34.700494Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037996;self_id=[3:7486782680152737032:3374];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.703868Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[3:7486782680152737050:3383];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.712173Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[3:7486782680152736772:3352];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.719759Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037985;self_id=[3:7486782680152737038:3377];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.722098Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[3:7486782680152736592:3332];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.729487Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[3:7486782680152736753:3346];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.737167Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[3:7486782684447704597:3425];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.739943Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037973;self_id=[3:7486782680152736721:3341];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.745265Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037988;self_id=[3:7486782680152737082:3400];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.746134Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[3:7486782680152737034:3375];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.748651Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[3:7486782680152736723:3342];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.749603Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[3:7486782680152736641:3336];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.752272Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037975;self_id=[3:7486782684447704505:3420];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.756129Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[3:7486782680152737168:3408];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.757224Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037971;self_id=[3:7486782680152737127:3404];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.760193Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[3:7486782680152736582:3328];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.763789Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037979;self_id=[3:7486782680152737016:3365];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.764508Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037977;self_id=[3:7486782684447704487:3419];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.767267Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037984;self_id=[3:7486782680152737018:3366];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.768235Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037966;self_id=[3:7486782680152736643:3337];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.770723Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037976;self_id=[3:7486782680152737176:3409];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.774090Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[3:7486782680152736774:3353];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.777441Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[3:7486782680152736768:3350];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.779894Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[3:7486782680152736981:3358];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.780583Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;self_id=[3:7486782680152736580:3327];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.783783Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037992;self_id=[3:7486782680152737052:3384];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.787387Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[3:7486782680152736587:3330];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.790550Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;self_id=[3:7486782680152737080:3399];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.794380Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[3:7486782680152736972:3357];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.799810Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037986;self_id=[3:7486782680152737066:3391];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:00:34.908100Z node 3 :TX_PROXY ERROR: Actor# [3:7486782697332609589:7560] txid# 281474976710687, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:34.919018Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:1, at schemeshard: 72057594046644480 2025-03-28T09:00:35.132855Z node 3 :TX_PROXY ERROR: Actor# [3:7486782701627577022:7645] txid# 281474976710691, issues: { message: "Check failed: path: \'/Root/RowSrc\', error: path exist, request doesn\'t accept it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:35.133267Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDQwM2IyMzQtOGM0YWZiZWYtMzYyMjlhMzItNTI5MDVlODc=, ActorId: [3:7486782697332609564:4173], ActorState: ExecuteState, TraceId: 01jqdzpkn67y6z2ww9gb2sb773, Create QueryResponse for error on request, msg: 2025-03-28T09:00:35.471362Z node 3 :TX_PROXY ERROR: Actor# [3:7486782701627577093:7676] txid# 281474976710693, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:35.483582Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710694, at schemeshard: 72057594046644480 2025-03-28T09:00:35.510720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:1, at schemeshard: 72057594046644480 2025-03-28T09:00:38.642089Z node 3 :TX_PROXY ERROR: Actor# [3:7486782714512479740:7909] txid# 281474976710699, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:38.668766Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710700, at schemeshard: 72057594046644480 2025-03-28T09:00:38.674382Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:1, at schemeshard: 72057594046644480 >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> KqpLimits::TooBigColumn-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::DefaultParameterValue [GOOD] Test command err: Trying to start YDB, gRPC: 23734, MsgBus: 23241 2025-03-28T08:59:23.607554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782393967802341:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:23.608198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b0/r3tmp/tmpetHUse/pdisk_1.dat 2025-03-28T08:59:26.840873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:28.359292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:28.359375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:28.422357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23734, node 1 2025-03-28T08:59:29.066070Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782393967802341:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:32.497415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:33.099531Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:33.204762Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:33.204980Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:33.204988Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:33.205267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23241 TClient is connected to server localhost:23241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:37.616590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:38.289938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:41.078944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:43.327632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:44.581513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:44.581532Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:44.686184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:48.724549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782501341986459:2422], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:48.724642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:49.154675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:49.367110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:49.678435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:49.725731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:49.771954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:49.836885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:49.913122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782505636954284:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:49.913202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:49.913459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782505636954289:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:49.917230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:49.930166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782505636954291:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:50.027218Z node 1 :TX_PROXY ERROR: Actor# [1:7486782509931921642:3502] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29337, MsgBus: 16267 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b0/r3tmp/tmpJUUeZb/pdisk_1.dat 2025-03-28T08:59:54.872704Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:55.221308Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:55.221376Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:55.225564Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:55.238332Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29337, node 2 2025-03-28T08:59:55.524466Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:55.524491Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:55.524500Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:55.524604Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16267 TClient is connected to server localhost:16267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T08:59:57.921478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:58.028393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T08:59:58.589034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:59.428461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:00.162486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, sub ... 55340Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:25.194064Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782659103171966:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:25.194135Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:25.301638Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:25.473475Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:25.545365Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:25.616327Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:25.740627Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:25.929966Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:26.314348Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782663398139796:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:26.314454Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:26.314537Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782663398139801:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:26.321781Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:26.393694Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782663398139803:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:26.453455Z node 3 :TX_PROXY ERROR: Actor# [3:7486782663398139862:3480] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26589, MsgBus: 28490 2025-03-28T09:00:31.352360Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486782687229834024:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:31.352419Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b0/r3tmp/tmpISLQJg/pdisk_1.dat 2025-03-28T09:00:31.597281Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:31.613016Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:31.613111Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:31.615750Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26589, node 4 2025-03-28T09:00:31.719321Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:31.719348Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:31.719356Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:31.719506Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28490 TClient is connected to server localhost:28490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:32.518787Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:32.531971Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:32.545284Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:32.733347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:33.135124Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:00:33.367306Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:00:36.352743Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486782687229834024:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:36.352800Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:37.068186Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782708704672216:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:37.068720Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:37.150247Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:37.258577Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:37.336776Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:38.085857Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:38.172947Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:38.278649Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:38.420386Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782717294607353:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:38.420467Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:38.427390Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782717294607358:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:38.483063Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:38.536192Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486782717294607361:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:38.633406Z node 4 :TX_PROXY ERROR: Actor# [4:7486782717294607421:3476] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::Describe >> KqpYql::Discard [GOOD] >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable >> KqpParams::Decimal+QueryService+UseSink [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> KqpScripting::NoAstSizeLimit [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] |74.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |74.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard [GOOD] Test command err: Trying to start YDB, gRPC: 19494, MsgBus: 6175 2025-03-28T09:00:19.941313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782632235234748:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:19.941745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b07/r3tmp/tmpRmH554/pdisk_1.dat 2025-03-28T09:00:20.714977Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:20.716850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:20.716923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:20.723995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19494, node 1 2025-03-28T09:00:21.057793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:21.057813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:21.057820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:21.057943Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6175 TClient is connected to server localhost:6175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:22.870165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:00:22.982273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:00:24.926787Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782632235234748:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:24.938864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:25.477425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:28.881701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:00:29.058963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:00:30.668448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782679479876680:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:30.668541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:30.974274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.072077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.168946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.202602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.283089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.381868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.459652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782683774844507:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.459765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.462820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782683774844512:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.467661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:31.481082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782683774844514:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:31.542291Z node 1 :TX_PROXY ERROR: Actor# [1:7486782683774844567:3480] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 63890, MsgBus: 64990 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b07/r3tmp/tmpmqSLuB/pdisk_1.dat 2025-03-28T09:00:34.290556Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:34.293820Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:34.320671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:34.320842Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:34.323877Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63890, node 2 2025-03-28T09:00:34.462241Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:34.462272Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:34.462290Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:34.462436Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64990 TClient is connected to server localhost:64990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:35.078214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:35.086738Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:00:35.097501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:35.185134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:35.374108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:35.652757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:41.456945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782726719092885:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:41.457230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:41.544390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:41.652706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:41.702115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:41.740438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:41.778716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:41.914918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:42.013584Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782731014060715:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:42.013663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:42.013843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782731014060720:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:42.016985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:42.026809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782731014060722:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:00:42.134525Z node 2 :TX_PROXY ERROR: Actor# [2:7486782731014060779:3463] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:43.906430Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486782735309028341:2499], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2025-03-28T09:00:43.906985Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzMxMmZjYWYtZDVlOTQyMWEtMTJhMDdkZWYtODFiYTk4Mzg=, ActorId: [2:7486782735309028333:2495], ActorState: ExecuteState, TraceId: 01jqdzpway1scr2k8hgh7ejhaq, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:108:2140] 2025-03-28T09:00:47.584738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:47.584859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:47.584896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:47.584935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:47.584978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:47.585029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:47.585095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:47.585225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:47.585619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:47.683953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:00:47.684014Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:47.704391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:47.704605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:47.704775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:47.711132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:47.711503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:47.712127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:47.712389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:47.716911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:47.718289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:47.718366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:47.718496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:47.718547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:47.718580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:47.718793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:00:47.725519Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-28T09:00:47.884570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:47.884925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:47.885148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:47.885383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:47.885472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:47.888053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:47.888213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:47.888457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:47.888523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:47.888570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:47.888614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:47.890879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:47.890952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:47.890992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:47.892919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:47.892976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:47.893031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:47.893083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:47.897280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:47.899519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:47.899719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:47.900743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:47.900887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:47.900948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:47.901249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:47.901318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:47.901617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:47.901717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:47.903956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:47.904013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:47.904192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:47.904252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:47.904693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:47.904740Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:47.904847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:47.904887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:47.904923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:47.904954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:47.904985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:00:47.905020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:47.905051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:47.905095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:00:47.905167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:47.905202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:00:47.905240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:00:47.907161Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:47.907302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:47.907348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:00:47.907385Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:00:47.907421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:47.907509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:00:47.911301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:00:47.911882Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1743152447.913282 29150 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-03-28T09:00:47.913779Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] Bootstrap 2025-03-28T09:00:47.933652Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] Become StateWork (SchemeCache [1:275:2266]) 2025-03-28T09:00:47.938252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:47.938698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:47.939098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-03-28T09:00:47.939983Z node 1 :TX_PROXY DEBUG: actor# [1:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:00:47.947612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:47.947817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-03-28T09:00:47.948551Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1743152447.948995 29150 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-03-28T09:00:47.957063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:47.957460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:47.957726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2025-03-28T09:00:47.963568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:47.963743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 12582, MsgBus: 7990 2025-03-28T09:00:20.913519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782636099845842:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:20.914024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000aee/r3tmp/tmp61IH3p/pdisk_1.dat 2025-03-28T09:00:22.225064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:22.534217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:22.534315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:22.543001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:00:22.673674Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12582, node 1 2025-03-28T09:00:24.057606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:24.057630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:24.057636Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:24.058438Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:25.899193Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782636099845842:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:25.899478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:7990 TClient is connected to server localhost:7990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:29.313202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.345665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:00:29.536078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:00:29.764021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.857505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:31.709558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782683344487774:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.709663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.059834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.099885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.140776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.191817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.238645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.318596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.426824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782687639455590:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.426922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.427098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782687639455595:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.435618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:32.449203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782687639455597:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:32.545755Z node 1 :TX_PROXY ERROR: Actor# [1:7486782687639455653:3475] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:34.447070Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152434471, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 17655, MsgBus: 3399 2025-03-28T09:00:36.471427Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782706161510999:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:36.472388Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000aee/r3tmp/tmpgu5Q5C/pdisk_1.dat 2025-03-28T09:00:37.717097Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:37.755047Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:37.763355Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:37.763427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:37.767397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17655, node 2 2025-03-28T09:00:38.327330Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:38.327352Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:38.327360Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:38.327472Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3399 TClient is connected to server localhost:3399 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:40.233567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:41.476988Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782706161510999:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:41.477050Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:45.411503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782744816217171:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:45.411599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:45.707070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:00:46.692296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782749111184599:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:46.692698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:47.251079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782753406151917:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:47.251229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:47.259780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782753406151922:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:47.307662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:00:47.366022Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782753406151924:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:00:47.425281Z node 2 :TX_PROXY ERROR: Actor# [2:7486782753406151975:2432] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable >> TSchemeShardTTLTests::CheckCounters |74.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61856, MsgBus: 29387 2025-03-28T08:59:09.071430Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782332801016050:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:09.071494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c1/r3tmp/tmpfnlH54/pdisk_1.dat 2025-03-28T08:59:12.142804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:13.259539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:14.073022Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782332801016050:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:14.073071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:14.547261Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:14.809999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:14.810417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:14.878898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:16.666972Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:16.761103Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:16.771109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 61856, node 1 2025-03-28T08:59:17.904645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:17.904662Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:17.904667Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:17.905477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29387 TClient is connected to server localhost:29387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:26.230258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:26.395864Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:26.418853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.187918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:28.363762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:28.755190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:31.111155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:31.111173Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:34.606483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782440175200339:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.606581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:35.558325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.874291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.998502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:36.132674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:36.389983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:36.701993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:37.054966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782453060102778:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:37.055010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:37.055169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782453060102783:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:37.091557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:37.188817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782453060102785:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:37.307062Z node 1 :TX_PROXY ERROR: Actor# [1:7486782453060102842:3535] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:42.688878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T08:59:47.428968Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486782496009776472:2608], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-03-28T08:59:47.430303Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzY1NWU2ODAtNThhZDFmMzgtMmM4MzA0YzUtMjliZGZjNzQ=, ActorId: [1:7486782474534939662:2551], ActorState: ExecuteState, TraceId: 01jqdzn57q0z1h84jjk0zvjwxb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T08:59:47.983967Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzY1NWU2ODAtNThhZDFmMzgtMmM4MzA0YzUtMjliZGZjNzQ=, ActorId: [1:7486782474534939662:2551], ActorState: ExecuteState, TraceId: 01jqdzn5gqcqy1bnwhbs0kmd1a, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-03-28T08:59:48.190274Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486782500304743794:2617], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T08:59:48.196303Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzY1NWU2ODAtNThhZDFmMzgtMmM4MzA0YzUtMjliZGZjNzQ=, ActorId: [1:7486782474534939662:2551], ActorState: ExecuteState, TraceId: 01jq ... types, code: 2031 2025-03-28T09:00:29.483205Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmM2MzI2ZTktMWEwMGI5OTgtMzJjMzVhYzgtNjM4ZjkyMDE=, ActorId: [3:7486782665216246624:2507], ActorState: ExecuteState, TraceId: 01jqdzpecbek9mfqcqefxdvaqa, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 1548, MsgBus: 9478 2025-03-28T09:00:31.384499Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486782683599644016:2146];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c1/r3tmp/tmp8R6qN4/pdisk_1.dat 2025-03-28T09:00:31.557376Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:00:31.797084Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:31.798906Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:31.799002Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:31.801129Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1548, node 4 2025-03-28T09:00:31.877480Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:31.877510Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:31.877520Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:31.877682Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9478 TClient is connected to server localhost:9478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:32.982885Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:33.043154Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:33.251557Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:33.671229Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:33.841132Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:36.386859Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486782683599644016:2146];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:36.386933Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:37.822828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782709369449489:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:37.822919Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:37.892887Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:38.049840Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:38.256680Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:38.325734Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:38.404344Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:38.496938Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:38.634625Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782713664417323:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:38.634737Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:38.635335Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782713664417328:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:38.639524Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:38.691798Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486782713664417330:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:38.776795Z node 4 :TX_PROXY ERROR: Actor# [4:7486782713664417391:3472] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:40.469705Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:00:42.254646Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7486782730844287220:2559], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2025-03-28T09:00:42.256127Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTNkODA4ZDMtMTZkNzA5ZDYtYTI5YTlmYTQtM2VkZjhiODc=, ActorId: [4:7486782730844287218:2558], ActorState: ExecuteState, TraceId: 01jqdzptvm4d9jwkg6zbsyhy99, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:00:42.456762Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjY3Yzg2YTYtZTI3YTc2MjAtOTNjZmI4YzctZmQ5ZTIxMDE=, ActorId: [4:7486782730844287224:2561], ActorState: ExecuteState, TraceId: 01jqdzptx79vcf21fnzb94habx, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:271: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 2025-03-28T09:00:42.627401Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7486782730844287243:2568], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:00:42.640026Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWExODJlZC02NDhhM2VkNi0yNWQ2ZmQzNy0yZDEzZDYzMQ==, ActorId: [4:7486782730844287241:2567], ActorState: ExecuteState, TraceId: 01jqdzpv4t2s70h8a72bez40q9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:00:42.869328Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7486782730844287260:2575], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:00:42.870913Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTliYjc2YWItNzk2NmEyODItNzI0NmM0MGYtYzgxNGRiMTc=, ActorId: [4:7486782730844287258:2574], ActorState: ExecuteState, TraceId: 01jqdzpvdrd0h95m3zaf5vydsd, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] >> TSchemeShardTTLTestsWithReboots::CreateTable |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] Test command err: 2025-03-28T08:59:01.877300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782297848880095:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:01.940734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d93/r3tmp/tmp7JNtPA/pdisk_1.dat 2025-03-28T08:59:02.372868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:02.373099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:02.400773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:02.447780Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27916, node 1 2025-03-28T08:59:02.647419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:02.647438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:02.647444Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:02.647548Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-03-28T08:59:07.164807Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782297848880095:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.165614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:07.827981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:09.084564Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:18.986860Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782372622397496:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:19.415734Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d93/r3tmp/tmpNOdk16/pdisk_1.dat 2025-03-28T08:59:21.371671Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:21.376924Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:21.451674Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:21.452116Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:21.479537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22100, node 2 2025-03-28T08:59:22.025379Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:22.025396Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:22.025401Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:22.025489Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:22.836749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.850159Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T08:59:54.877032Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:54.879018Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:54.879831Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T08:59:54.906480Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:54.909211Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:54.909457Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d93/r3tmp/tmpafojQ3/pdisk_1.dat 2025-03-28T08:59:55.717168Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2219, node 3 TClient is connected to server localhost:26819 2025-03-28T08:59:58.506463Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:58.506525Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:58.506561Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:58.506837Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:28.965161Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:501:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:28.965436Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:28.965649Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:28.970410Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:498:2160], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:28.970828Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:28.970945Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d93/r3tmp/tmpDB9RuC/pdisk_1.dat 2025-03-28T09:00:29.869313Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27360, node 5 TClient is connected to server localhost:28813 2025-03-28T09:00:30.641881Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:30.641965Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:30.642012Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:30.642307Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-03-28T09:00:39.925148Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:39.926469Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:39.926818Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d93/r3tmp/tmp6M7nkF/pdisk_1.dat 2025-03-28T09:00:41.264089Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13651, node 7 TClient is connected to server localhost:26569 2025-03-28T09:00:42.938393Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:42.938492Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:42.938536Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:42.938937Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TReplicationTests::CopyReplicatedTable [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 23703, MsgBus: 13334 2025-03-28T09:00:21.156159Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782640333981262:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:21.163130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000aec/r3tmp/tmpRjCuZq/pdisk_1.dat 2025-03-28T09:00:22.544514Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:23.806846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:24.633029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:24.633135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:24.718797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:00:25.185442Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.157496s 2025-03-28T09:00:25.185796Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.157907s TServer::EnableGrpc on GrpcPort 23703, node 1 2025-03-28T09:00:25.312732Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:25.313162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:25.404640Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:00:25.404783Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:00:27.742636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782640333981262:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:27.742916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:27.767380Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:27.767620Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:27.767627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:27.768275Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13334 TClient is connected to server localhost:13334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:29.554893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.571878Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:29.588255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.781447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:30.003609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:30.094625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:31.840905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782683283655879:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.841019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.116679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.168833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.207202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.311920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.419462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.507467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.610325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782687578623697:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.610413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.610623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782687578623702:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.615408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:32.642953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782687578623704:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:32.739248Z node 1 :TX_PROXY ERROR: Actor# [1:7486782687578623759:3470] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:34.783375Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152434779, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 7803, MsgBus: 12394 2025-03-28T09:00:37.556734Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782710635886488:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:37.581118Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000aec/r3tmp/tmptMQ4pu/pdisk_1.dat 2025-03-28T09:00:38.344252Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:38.371390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:38.371476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:38.375504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7803, node 2 2025-03-28T09:00:39.292866Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:39.292894Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:39.292902Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:39.293021Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12394 TClient is connected to server localhost:12394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:41.684836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:41.757166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:41.921946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:42.108690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:42.473554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:42.918787Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782710635886488:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:42.986917Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:47.640341Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782753585561229:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:47.640422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:47.678910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:47.715532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:47.771322Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:47.811742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:47.855181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:47.959963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:48.142857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782757880529060:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:48.142943Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:48.152148Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782757880529065:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:48.158207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:48.225169Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:00:48.225441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782757880529067:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:48.311462Z node 2 :TX_PROXY ERROR: Actor# [2:7486782757880529127:3476] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpLimits::DatashardReplySize >> TSchemeShardTTLTests::TtlTiersValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:00:49.895026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:49.895124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:49.895159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:49.895190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:49.895227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:49.895255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:49.895323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:49.895396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:49.895710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:50.418285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:00:50.418702Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:50.547333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:50.547726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:50.547902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:50.588558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:50.588987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:50.589624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:50.589852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:50.600506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:50.606159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:50.606249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:50.606431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:50.606496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:50.606557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:50.606673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:00:50.613730Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:00:50.734536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:50.734792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:50.734999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:50.735254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:50.735323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:50.739354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:50.739478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:50.739637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:50.739685Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:50.739743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:50.739772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:50.741499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:50.741555Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:50.741591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:50.743509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:50.743557Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:50.743592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:50.743635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:50.747605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:50.750118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:50.750305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:50.751340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:50.751480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:50.751539Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:50.751837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:50.751887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:50.752034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:50.752119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:50.754312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:50.754357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:50.754544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:50.754581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:50.755028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:50.755073Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:50.755169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:50.755202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:50.755239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:50.755269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:50.755302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:00:50.755371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:50.755407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:50.755434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:00:50.755502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:50.755539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:00:50.755569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:00:50.757440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:50.757545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:50.757579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :51.109609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:00:51.109670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/3, is published: true 2025-03-28T09:00:51.110524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:51.110579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-03-28T09:00:51.119047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:51.119151Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:00:51.119270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:51.119341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:51.119378Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:00:51.119433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:00:51.119476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-03-28T09:00:51.120378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:51.120423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-03-28T09:00:51.120521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:51.120557Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:00:51.120631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:51.120684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:51.120726Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.120759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:00:51.120788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:00:51.139261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:00:51.139828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:00:51.145754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:00:51.146073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:00:51.146409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.150879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:00:51.151254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:00:51.151699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:00:51.151743Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-03-28T09:00:51.151882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-28T09:00:51.151918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-28T09:00:51.151952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-28T09:00:51.151983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-28T09:00:51.152019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-03-28T09:00:51.152514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.152920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.152970Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:00:51.153039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-28T09:00:51.153078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:00:51.153119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-28T09:00:51.153147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:00:51.153179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-03-28T09:00:51.153246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 101 2025-03-28T09:00:51.153291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:00:51.153347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:00:51.153388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:00:51.153502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:00:51.153554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-03-28T09:00:51.153574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-03-28T09:00:51.153603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:00:51.153624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-03-28T09:00:51.153650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-03-28T09:00:51.153705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:00:51.160950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:00:51.161029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:376:2344] TestWaitNotification: OK eventTxId 101 2025-03-28T09:00:51.161564Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:00:51.161821Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 261us result status StatusSuccess 2025-03-28T09:00:51.162296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5059, MsgBus: 22057 2025-03-28T08:59:08.639935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782327745687270:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:08.640344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ce/r3tmp/tmprtImJX/pdisk_1.dat 2025-03-28T08:59:13.611667Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782327745687270:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:13.611718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:13.668660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:16.731278Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:16.748611Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:16.771033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:16.810176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:16.810472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:17.008028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5059, node 1 2025-03-28T08:59:19.069664Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:19.069693Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:19.078799Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:19.078929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22057 TClient is connected to server localhost:22057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:27.480804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.850592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:28.743845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:29.421929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:29.696787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:30.907284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:30.907305Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:34.764931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782439414838830:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.765061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:35.588920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:35.784255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:36.001948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:36.184050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:36.475051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:36.674696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:36.947148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782448004773982:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:36.955889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:36.956181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782448004773989:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:36.963025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:37.029945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782448004773991:2482], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:37.105863Z node 1 :TX_PROXY ERROR: Actor# [1:7486782452299741344:3520] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14452, MsgBus: 8753 2025-03-28T08:59:50.905768Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782509169881605:2086];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:51.215444Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ce/r3tmp/tmpPW2cMN/pdisk_1.dat 2025-03-28T08:59:51.521183Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486782509836254335:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:51.688747Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T08:59:52.242109Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:52.960266Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:53.092557Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:53.173178Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:53.173253Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:53.175898Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:53.175964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:53.178801Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T08:59:53.178960Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:53.180550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14452, node 2 2025-03-28T08:59:53.459981Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:53.460003Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:53.460027Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:53.460158Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8753 2025-03-28T08:59:55.884091Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782509169881605:2086];send_to=[0:7307199536658146131:7762515]; 202 ... ], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:14.542954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:14.684551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782612249099911:2445], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:00:14.764643Z node 2 :TX_PROXY ERROR: Actor# [2:7486782612249100001:4385] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:19.542713Z node 2 :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [72075186224037889:2:11:0:0:41:1] status# {Status# BLOCKED} Marker# BSVS03 2025-03-28T09:00:19.551116Z node 3 :BS_PROXY_PUT ERROR: [0b821c7800547bec] Result# TEvPutResult {Id# [72075186224037889:2:11:0:0:41:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 Trying to start YDB, gRPC: 24900, MsgBus: 27502 2025-03-28T09:00:29.126816Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486782676080789812:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:29.126986Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:00:29.185138Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486782677910347587:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:29.185242Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ce/r3tmp/tmpOc7xex/pdisk_1.dat 2025-03-28T09:00:29.610506Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:29.686700Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:29.686823Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:29.687310Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:29.687377Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:29.703068Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-28T09:00:29.703242Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:00:29.711299Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24900, node 4 2025-03-28T09:00:30.056480Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:30.056508Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:30.056515Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:30.056663Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27502 TClient is connected to server localhost:27502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:00:30.620824Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:00:30.651473Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:30.799002Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:31.163385Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:31.326182Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:34.110908Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486782676080789812:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:34.110967Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:34.178815Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486782677910347587:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:34.178900Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:35.155956Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782701850595676:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:35.156067Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:35.186644Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:35.297938Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:35.409492Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:35.621223Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:35.756431Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:35.951366Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:36.277074Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782706145563727:2421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:36.277172Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:36.277543Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782706145563732:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:36.282332Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:36.316394Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486782706145563734:2425], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:36.418388Z node 4 :TX_PROXY ERROR: Actor# [4:7486782706145563803:4295] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:39.523578Z node 4 :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVMultiPut: failed to pass the Hull check; id# [72075186224037895:1:17:0:0:41:1] status# {Status# BLOCKED} Marker# BSVS03 2025-03-28T09:00:39.523621Z node 4 :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVMultiPut: failed to pass the Hull check; id# [72075186224037897:1:17:0:0:41:1] status# {Status# BLOCKED} Marker# BSVS03 2025-03-28T09:00:39.523634Z node 4 :BS_VDISK_PUT ERROR: VDISK[82000000:_:0:0:0]: TEvVMultiPut: failed to pass the Hull check; id# [72075186224037911:1:17:0:0:41:1] status# {Status# BLOCKED} Marker# BSVS03 2025-03-28T09:00:39.525887Z node 4 :BS_PROXY_PUT ERROR: [0a49b8c328f31443] Result# TEvPutResult {Id# [72075186224037895:1:17:0:0:41:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VMultiPutResult itemStatus# BLOCKED from VDiskId# [82000000:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-28T09:00:44.578989Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:44.579012Z node 4 :IMPORT WARN: Table profiles were not loaded |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable >> THealthCheckTest::LayoutIncorrect [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:00:30.930049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:30.930168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:30.930220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:30.930261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:30.930309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:30.930339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:30.930396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:30.930497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:30.930894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:31.032038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:00:31.032103Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:31.048020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:31.048357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:31.048543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:31.054992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:31.055254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:31.055991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:31.056246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:31.058971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:31.060273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:31.060340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:31.060548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:31.060598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:31.060637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:31.060723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:00:31.067971Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:00:31.234292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:31.234571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:31.234869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:31.235163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:31.235227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:31.238374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:31.238542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:31.238786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:31.238846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:31.238884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:31.238918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:31.241319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:31.241398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:31.241440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:31.243769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:31.243827Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:31.243870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:31.243923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:31.248334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:31.250620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:31.250886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:31.251995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:31.252150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:31.252214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:31.252506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:31.252565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:31.252803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:31.252893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:31.255547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:31.255623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:31.255850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:31.255891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:31.256277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:31.256328Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:31.256422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:31.256458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:31.256496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:31.256531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:31.256567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:00:31.256607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:31.256641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:31.256676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:00:31.256770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:31.256827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:00:31.256865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:00:31.259382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:31.259511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:31.259558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ersion: 3 2025-03-28T09:00:51.542871Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:00:51.542943Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:00:51.554973Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:00:51.566852Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:00:51.576273Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1607 } } 2025-03-28T09:00:51.576327Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:00:51.576468Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1607 } } 2025-03-28T09:00:51.576578Z node 8 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1607 } } 2025-03-28T09:00:51.577142Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 408 RawX2: 34359740745 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:00:51.577194Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:00:51.577355Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 408 RawX2: 34359740745 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:00:51.577416Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:00:51.577524Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 408 RawX2: 34359740745 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:00:51.577593Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:51.577641Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-03-28T09:00:51.595283Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.595785Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.615736Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 34359740661 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:00:51.615796Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:00:51.615922Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 34359740661 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:00:51.615962Z node 8 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:00:51.616028Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 34359740661 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:00:51.616074Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:51.616119Z node 8 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.616162Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:00:51.616202Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:00:51.616229Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:00:51.630599Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.631069Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.631133Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-03-28T09:00:51.631193Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:00:51.631239Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-03-28T09:00:51.631348Z node 8 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-03-28T09:00:51.631394Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-03-28T09:00:51.639617Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.639701Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:00:51.639856Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:00:51.639900Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:00:51.639954Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:00:51.639998Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:00:51.640041Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:00:51.640140Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:334:2313] message: TxId: 102 2025-03-28T09:00:51.640203Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:00:51.640251Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:00:51.640291Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:00:51.640476Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:00:51.640520Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:00:51.647597Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:00:51.647676Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:436:2397] TestWaitNotification: OK eventTxId 102 2025-03-28T09:00:51.648317Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:00:51.648584Z node 8 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 296us result status StatusSuccess 2025-03-28T09:00:51.649014Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THealthCheckTest::StorageLimit50 [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |74.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> THealthCheckTest::ShardsNoLimit [GOOD] >> TSchemeShardTTLTestsWithReboots::MoveTable >> KqpParams::InvalidJson [GOOD] |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:00:51.508752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:51.509075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:51.509221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:51.509370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:51.509622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:51.509646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:51.509927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:51.510209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:51.511966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:51.923144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:00:51.923197Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:51.943436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:51.943704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:51.943822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:51.948863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:51.949043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:51.949552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:51.949706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:51.951557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:51.952544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:51.952591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:51.952724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:51.952759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:51.952797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:51.952872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.958653Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:00:53.473120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:53.474532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:53.487578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:53.502967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:53.503916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:53.547778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:53.547891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:53.548872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:53.549820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:53.555190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:53.555528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:53.601141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:53.601809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:53.602113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:53.647037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:53.647098Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:53.647423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:53.653381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:53.786356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:53.839923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:53.842991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:53.892507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:53.893828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:53.894460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:53.907927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:53.908295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:53.910522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:53.921457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:53.958446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:53.958507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:53.962592Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:53.963021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:53.979125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:53.979720Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:53.980948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:53.981242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:53.987588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:53.987919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:53.988510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:00:53.989132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:53.989466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:53.989774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:00:53.990114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:53.999544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:00:53.999911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:00:54.044355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:54.045071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:54.045408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:00:54.046302Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:00:54.046646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:54.068033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:00:54.130886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:00:54.145547Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:00:54.195359Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:00:55.027762Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:00:55.055151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:55.066942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:55.067637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2025-03-28T09:00:55.073499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2025-03-28T09:00:55.089404Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:00:55.124053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:55.125636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-28T09:00:55.127640Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> AutoConfig::GetServicePoolsWith1CPU [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:00:55.292554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:55.293155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:55.293701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:55.295579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:55.295888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:55.296156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:55.297012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:55.297919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:55.302500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:55.919079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:00:55.919403Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:56.022198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:56.023159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:56.024191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:56.071299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:56.071519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:56.072057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:56.072264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:56.084171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:56.092972Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:56.093217Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:56.093392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:56.093661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:56.093877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:56.094888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:00:56.148234Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:00:56.419290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:56.420498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:56.421887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:56.425589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:56.426327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:56.454111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:56.454321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:56.454578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:56.454668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:56.454752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:56.454786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:56.458421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:56.458501Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:56.458550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:56.467376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:56.467592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:56.468040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:56.468524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:56.474175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:56.476298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:56.476494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:56.477383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:56.477510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:56.477560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:56.477853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:56.477913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:56.478088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:56.478186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:56.480176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:56.480222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:56.480407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:56.480452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:56.480846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:56.480892Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:56.480978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:56.481058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:56.481092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:56.481118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:56.481154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:00:56.481193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:56.481226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:56.481258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:00:56.481334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:56.481386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:00:56.481429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:00:56.483222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:56.483316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:56.483355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 89773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:00:57.090613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:57.090705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:00:57.090770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:00:57.091331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.091393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:00:57.099513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:00:57.100673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:00:57.100965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:00:57.101272Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:00:57.102024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:57.108322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:00:57.108719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:00:57.108749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:00:57.108776Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:00:57.108803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:00:57.108874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:00:57.110945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 12661 } } 2025-03-28T09:00:57.110992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:00:57.111109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 12661 } } 2025-03-28T09:00:57.111197Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 12661 } } 2025-03-28T09:00:57.112425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:57.112464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:00:57.112563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:57.112617Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:00:57.112704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:00:57.112757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:57.112802Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.112840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:00:57.112874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:00:57.113701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:00:57.119247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:00:57.119381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.119470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.119780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.119819Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:00:57.119913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:00:57.119944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:00:57.119977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:00:57.120012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:00:57.120051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-28T09:00:57.120123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 101 2025-03-28T09:00:57.120168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:00:57.120204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:00:57.120242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:00:57.120342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:00:57.125656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:00:57.125706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2315] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-28T09:00:57.128716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:57.129000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.129296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2025-03-28T09:00:57.135594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:57.135756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-03-28T09:00:57.138192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { Storage: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:57.138388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.139061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2025-03-28T09:00:57.151291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:57.152707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 |74.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |74.4%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |74.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> AutoConfig::GetASPoolsith1CPU [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 27544, MsgBus: 23492 2025-03-28T08:59:34.343245Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782439155188017:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:34.429636Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ae/r3tmp/tmpNIrcHW/pdisk_1.dat 2025-03-28T08:59:36.239369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:37.814813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:37.875694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:37.876023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:37.932057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27544, node 1 2025-03-28T08:59:38.685604Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T08:59:38.824817Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T08:59:38.829703Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:39.315101Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782439155188017:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:39.315156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:39.801654Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:39.801674Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:39.801681Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:39.801995Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:40.618625Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:40.707038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:40.731326Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:23492 TClient is connected to server localhost:23492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:49.340765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:49.760267Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:49.810192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:50.583583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:51.230125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:51.671914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:54.571288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:54.571326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:54.761758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782525054535773:2427], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:54.761852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:56.451089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.549140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.697463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.804859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.976404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:57.190092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:57.726926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782537939438237:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:57.727392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:57.732017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782537939438248:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:57.803317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:57.884310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782537939438250:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:57.993047Z node 1 :TX_PROXY ERROR: Actor# [1:7486782537939438308:3536] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:06.000685Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDRkOGE3NzAtODk1NzA0ZmEtYmEwZTFhNTMtY2U5NjcwM2E=, ActorId: [1:7486782568004209724:2546], ActorState: ExecuteState, TraceId: 01jqdznp2ecbze5tjtdw4kv0hc, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1294: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Missing value for parameter: $group Trying to start YDB, gRPC: 16672, MsgBus: 10553 2025-03-28T09:00:10.550812Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782596225372323:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:10.550870Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ae/r3tmp/tmpvobLKZ/pdisk_1.dat 2025-03-28T09:00:11.176458Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:11.176761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:11.182996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16672, node 2 2025-03-28T09:00:11.487735Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:11.507749Z node 2 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:00:11.507963Z node 2 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:00:12.624875Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:12.624897Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:12.624905Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:12.625025Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10553 2025-03-28T09:00:15.555382Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782596225372323:2060];send_to=[0:7307199536658146131:7762515]; 20 ... :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:33.820239Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:33.868825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:33.947128Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:34.023948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:34.158161Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782699522021312:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:34.158241Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:34.158501Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782699522021317:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:34.163401Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:34.183567Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:00:34.183852Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782699522021319:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:34.269971Z node 3 :TX_PROXY ERROR: Actor# [3:7486782699522021375:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30075, MsgBus: 28194 2025-03-28T09:00:40.324489Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486782722223238245:2137];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ae/r3tmp/tmpzSp0Pc/pdisk_1.dat 2025-03-28T09:00:40.993337Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:00:41.494037Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:41.599243Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:41.599341Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:41.615523Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30075, node 4 2025-03-28T09:00:41.810669Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:41.810724Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:41.810735Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:41.810874Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28194 TClient is connected to server localhost:28194 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:43.169206Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:43.176959Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:00:43.204607Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:43.438470Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:43.946120Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:44.127874Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:45.535129Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486782722223238245:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:46.753654Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:50.205388Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782765172912908:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:50.205802Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:50.459387Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:50.532882Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:50.602079Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:50.700917Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:50.745146Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:50.830581Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:50.949244Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782765172913436:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:50.949342Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:50.949534Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782765172913441:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:50.954055Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:50.973801Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:00:50.974181Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486782765172913443:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:00:51.037717Z node 4 :TX_PROXY ERROR: Actor# [4:7486782769467880792:3463] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:55.211011Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:00:55.545358Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MmUxMDc0OGEtOThkNzlmZDEtN2YxZTBhZGYtZTg0ZGViMg==, ActorId: [4:7486782786647750315:2519], ActorState: ExecuteState, TraceId: 01jqdzq7mv8ytyr2rdzcgeq4x4, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: Invalid Json value
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: Invalid Json value ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: 2025-03-28T08:59:23.679615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:23.679862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:23.679910Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001db3/r3tmp/tmplXAwjl/pdisk_1.dat 2025-03-28T08:59:24.101004Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11940, node 1 TClient is connected to server localhost:18729 2025-03-28T08:59:26.652055Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:26.652102Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:26.652332Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:26.655351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:50.165795Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:446:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:50.166461Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:50.166634Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001db3/r3tmp/tmp6yaj3B/pdisk_1.dat 2025-03-28T08:59:50.808589Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6825, node 3 TClient is connected to server localhost:25625 2025-03-28T08:59:52.858649Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:52.858752Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:52.858786Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:52.859027Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:13.487716Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:501:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:13.487952Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:13.488133Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:13.490244Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:498:2160], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:13.490559Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:13.490660Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001db3/r3tmp/tmpFsMOdH/pdisk_1.dat 2025-03-28T09:00:15.544892Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18645, node 5 TClient is connected to server localhost:23586 2025-03-28T09:00:16.325201Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:16.325276Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:16.325320Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:16.325583Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:31.465736Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:31.466190Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:31.466397Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:31.468903Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:31.469376Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:31.469482Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001db3/r3tmp/tmpx7oCrt/pdisk_1.dat 2025-03-28T09:00:31.885625Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29459, node 7 TClient is connected to server localhost:4190 2025-03-28T09:00:32.420762Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:32.420828Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:32.420855Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:32.421425Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:50.693024Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:50.693461Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:50.693702Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:50.694091Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:50.694290Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:50.694407Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001db3/r3tmp/tmpfLpr0X/pdisk_1.dat 2025-03-28T09:00:51.084067Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63441, node 9 TClient is connected to server localhost:22663 2025-03-28T09:00:51.511981Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:51.512028Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:51.512054Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:51.512905Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration |74.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce |74.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:00:51.808465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:51.808939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:51.809098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:51.809301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:51.809576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:51.809719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:51.810130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:51.810432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:51.812678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:52.326911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:00:52.327171Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:52.447212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:52.449316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:52.449866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:52.488615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:52.489716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:52.493859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:52.494531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:52.507253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:52.509957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:52.510184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:52.510847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:52.511144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:52.511328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:52.511927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:00:52.603949Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:00:55.289380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:55.290590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:55.304362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:55.321059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:55.322292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:55.396847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:55.411295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:55.413735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:55.427179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:55.428156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:55.428571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:55.484210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:55.484533Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:55.485148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:55.539335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:55.539386Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:55.540011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:55.540340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:55.643051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:55.687276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:55.689741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:55.723196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:55.723314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:55.723361Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:55.723595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:55.723634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:55.723767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:55.723831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:55.731195Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:55.731242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:55.731364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:55.731393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:55.731707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:55.731745Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:55.731818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:55.731845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:55.731885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:55.731910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:55.731942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:00:55.731975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:55.732003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:55.732029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:00:55.732080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:55.732110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:00:55.732137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:00:55.733613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:55.733694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:55.733720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ntStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-03-28T09:00:57.850679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:57.850842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:57.850900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:00:57.851121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:00:57.851238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:00:57.859479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:57.859527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:00:57.859795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:57.859844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:00:57.860238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.860309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:00:57.860877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:00:57.860960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:00:57.860990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:00:57.861034Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-28T09:00:57.861074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:00:57.861145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:00:57.871535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:00:57.897555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1008 } } 2025-03-28T09:00:57.897952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:00:57.898362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1008 } } 2025-03-28T09:00:57.898734Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1008 } } FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:00:57.912581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:00:57.912803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:00:57.912979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:00:57.913021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:00:57.913092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:00:57.913150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:57.913185Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.913220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:00:57.913278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:00:57.924419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.924891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.925158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.925204Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:00:57.925311Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:00:57.925343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:00:57.925407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:00:57.925443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:00:57.925487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:00:57.925543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 102 2025-03-28T09:00:57.925597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:00:57.925631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:00:57.925657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:00:57.926404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:00:57.941669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:00:57.941723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:505:2430] TestWaitNotification: OK eventTxId 102 2025-03-28T09:00:57.942159Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:00:57.942375Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 227us result status StatusSuccess 2025-03-28T09:00:57.946944Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> AutoConfig::GetASPoolsWith2CPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] Test command err: 2025-03-28T08:59:40.586700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:40.587935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:40.595385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T08:59:40.604479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:40.619600Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:40.620582Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dac/r3tmp/tmp6u4TcX/pdisk_1.dat 2025-03-28T08:59:45.516329Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28903, node 1 TClient is connected to server localhost:15459 2025-03-28T08:59:48.803538Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:48.803595Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:48.803632Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:48.803842Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:15.106965Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:15.109347Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:15.110251Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:15.119643Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:15.122399Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:15.122486Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dac/r3tmp/tmp7vaGhK/pdisk_1.dat 2025-03-28T09:00:16.111784Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21060, node 3 TClient is connected to server localhost:13212 2025-03-28T09:00:17.026824Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:17.026911Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:17.026958Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:17.027199Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-03-28T09:00:31.596868Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:501:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:31.597241Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:31.597450Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:31.599966Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:498:2160], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:31.600305Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:31.600415Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dac/r3tmp/tmp2zSQI4/pdisk_1.dat 2025-03-28T09:00:31.989972Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62879, node 5 TClient is connected to server localhost:30671 2025-03-28T09:00:33.376205Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:33.376276Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:33.376319Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:33.376634Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-03-28T09:00:39.320382Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:39.321029Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:39.321356Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dac/r3tmp/tmpMNcm9C/pdisk_1.dat 2025-03-28T09:00:40.170207Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7782, node 7 TClient is connected to server localhost:29529 2025-03-28T09:00:41.972806Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:41.972877Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:41.972921Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:41.973498Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:51.922897Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:446:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:51.923230Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:51.923400Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dac/r3tmp/tmp7tq1Zz/pdisk_1.dat 2025-03-28T09:00:52.557760Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62122, node 9 TClient is connected to server localhost:21341 2025-03-28T09:00:56.160197Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:56.160266Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:56.160311Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:56.169613Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> Sharding::XXUsage >> Sharding::XXUsage [GOOD] |74.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |74.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest |74.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest |74.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith2CPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 12035792738368783173 17771036748850650410 8746113190159771306 10465798179140015335 3293960894214210701 17617712360449861841 13382107884858899410 803150523013103951 10952310014434288463 5588307076712779029 185819218382225139 5674272865544663873 4410232808537573675 5870384556268556533 10366057040494093640 4018230729350911846 13347198740101264413 100694744361882843 14775147424720980057 6517937259768748694 15816748729620057094 532611606528064230 15316024259512041464 12197308189377428953 7421401974539601257 5540067026748168575 12296360172336534290 10215271357940309596 15393155668930059180 2739300680044673428 284872488598251182 6118494846997197607 14979375538042058633 6535528656649552333 1938005555620877442 16068612512126945812 4208093445685559682 1906388994182081157 1944639053025007819 10854696304841874776 6710314935782834531 15320490735642003557 12446012578148308820 9802007882020056190 17727498475601578362 198126531229796066 11842614950954033729 4900587228369817335 9250887930150150731 9326346395820539045 541352606174998649 1474072684249079992 16977686337806625953 2978830758788315759 16677295758133277291 6742613458955200916 3053494678379480380 15076858237764677944 12889171389734392539 5805668274223544955 8278356707735932879 10474369667808195627 17033149847302821703 3267868523595683594 2325074194031973474 7886595755912294833 1316589829137368023 5962350102227834426 1089517348252516654 10970625778247921577 7884081726600927225 15419122418055258576 9676364746096977407 3118911361737564019 6262867909161386166 16548777583365009988 3662296191619910529 4159586674024544471 9312305542983326930 91220912575288266 1395027393782670275 8420225995080129084 111540861285881626 10085544972735844879 6912612691893266793 16740450022659508540 9677875453497344811 15053283579680459561 17384811378418997350 13302387650626421288 |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] |74.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] |74.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] >> KqpScripting::ScriptStats [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:01:02.137489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:01:02.137589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:01:02.137632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:01:02.137679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:01:02.137724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:01:02.137791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:01:02.137879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:01:02.137959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:01:02.138300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:02.754162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:01:02.754220Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:02.769524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:02.769776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:01:02.769899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:01:02.792434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:01:02.792687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:01:02.793341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:01:02.793539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:01:02.795375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:01:02.796629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:01:02.796687Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:01:02.796845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:01:02.796890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:01:02.796965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:01:02.797045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:01:02.804063Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:01:02.952672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:01:02.952902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:02.953100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:01:02.953334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:01:02.953418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:02.955702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:01:02.955830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:01:02.956009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:02.956073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:01:02.956150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:01:02.956191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:01:02.958564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:02.958619Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:01:02.958650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:01:02.960532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:02.960580Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:02.960622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:01:02.960668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:01:02.964354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:01:02.966267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:01:02.966487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:01:02.968018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:01:02.968172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:01:02.968239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:01:02.968520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:01:02.968572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:01:02.968760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:01:02.968849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:01:02.970995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:01:02.971044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:01:02.971210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:01:02.971250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:01:02.971700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:02.971746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:01:02.971842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:01:02.971874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:02.971908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:01:02.971940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:02.971975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:01:02.972017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:02.972051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:01:02.972084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:01:02.972158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:01:02.972205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:01:02.972237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:01:02.974107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:01:02.974232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:01:02.974271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... msg type: 269090816 2025-03-28T09:01:03.813041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-28T09:01:03.813569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:01:03.813667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:01:03.813730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:01:03.814007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:01:03.814147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:01:03.817072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:01:03.817119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:01:03.817322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:01:03.817363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:01:03.817759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.817803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:01:03.818216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:01:03.818305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:01:03.818349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:01:03.818387Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-28T09:01:03.818425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:01:03.818506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:01:03.831977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:01:03.851574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 936 } } 2025-03-28T09:01:03.851637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:01:03.851792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 936 } } 2025-03-28T09:01:03.851887Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 936 } } FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:01:03.852874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:01:03.852930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:01:03.853051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:01:03.853092Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:01:03.853162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:01:03.853224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:01:03.853260Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.853294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:01:03.853330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:01:03.855040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.857176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.857435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.857478Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:01:03.857564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:01:03.857596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:01:03.857643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:01:03.857693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:01:03.857755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:01:03.857806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:01:03.857847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:01:03.857884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:01:03.857915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:01:03.858029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:01:03.863376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:01:03.863439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:392:2364] TestWaitNotification: OK eventTxId 102 2025-03-28T09:01:03.863955Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:01:03.864145Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 208us result status StatusSuccess 2025-03-28T09:01:03.864592Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:00:31.708863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:31.708992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:31.709055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:31.709102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:31.709147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:31.709177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:31.709240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:31.709332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:31.709754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:31.813789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:00:31.813866Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:31.842329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:31.842657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:31.842857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:31.849573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:31.849788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:31.850498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:31.850773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:31.859818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:31.861199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:31.861285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:31.861497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:31.861547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:31.861586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:31.861663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:00:31.872365Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:00:32.034212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:32.034482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:32.034718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:32.034990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:32.035043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:32.039980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:32.040142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:32.040344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:32.040399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:32.040431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:32.040473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:32.043194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:32.043271Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:32.043322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:32.047537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:32.047599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:32.047643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:32.047697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:32.059773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:32.071721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:32.071998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:32.073171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:32.073344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:32.073398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:32.073689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:32.073743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:32.073976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:32.074099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:32.081610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:32.081700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:32.081937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:32.081979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:32.082363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:32.082415Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:32.082556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:32.082589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:32.082627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:32.082660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:32.082710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:00:32.082754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:32.082789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:32.082823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:00:32.082922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:32.082969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:00:32.083016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:00:32.085175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:32.085296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:32.085347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... E: StateWork, received event# 2146435072, Sender [9:125:2151], Recipient [9:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:01:04.328542Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:01:04.328623Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:01:04.328653Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:01:04.328759Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:01:04.328883Z node 9 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:01:04.328908Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:01:04.328938Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [9:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:01:04.329215Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:01:04.329250Z node 9 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:01:04.329358Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:01:04.329391Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:01:04.329430Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:01:04.329470Z node 9 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:01:04.329505Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:01:04.329550Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:01:04.329593Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:01:04.329632Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:01:04.329670Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:01:04.329807Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:01:04.329857Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:01:04.329895Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:01:04.329932Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:01:04.330621Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:207:2209], Recipient [9:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 } 2025-03-28T09:01:04.330655Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-03-28T09:01:04.330734Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:01:04.330795Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:01:04.330827Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:01:04.330868Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:01:04.330911Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:01:04.330990Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:01:04.331479Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [9:207:2209], Recipient [9:125:2151]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 } 2025-03-28T09:01:04.331509Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-03-28T09:01:04.331565Z node 9 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:01:04.331621Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:01:04.331645Z node 9 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:01:04.331669Z node 9 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:01:04.331693Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:01:04.331754Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:01:04.331787Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:01:04.332286Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [9:125:2151], Recipient [9:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-03-28T09:01:04.332318Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-03-28T09:01:04.332370Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:01:04.332414Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:01:04.332477Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:01:04.335140Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:01:04.335304Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:01:04.335326Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:01:04.336570Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:01:04.336602Z node 9 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:01:04.336682Z node 9 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:01:04.336873Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:01:04.336913Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:01:04.337289Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [9:449:2404], Recipient [9:125:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:04.337338Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:04.337373Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:01:04.337447Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [9:365:2344], Recipient [9:125:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2025-03-28T09:01:04.337476Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:01:04.337536Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:01:04.337621Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:01:04.337658Z node 9 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [9:447:2402] 2025-03-28T09:01:04.337820Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [9:449:2404], Recipient [9:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:01:04.337849Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:01:04.337884Z node 9 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-03-28T09:01:04.338189Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [9:450:2405], Recipient [9:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-28T09:01:04.338272Z node 9 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:01:04.338351Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:01:04.338495Z node 9 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 137us result status StatusPathDoesNotExist 2025-03-28T09:01:04.338603Z node 9 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |74.5%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |74.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |74.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |74.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |74.6%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> IncrementalRestoreScan::ChangeSenderSimple |74.6%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 10875, MsgBus: 28283 2025-03-28T09:00:20.846490Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782639989703012:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:20.875064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000af7/r3tmp/tmpAtYUve/pdisk_1.dat 2025-03-28T09:00:22.504094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:22.735009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:22.735092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:22.748556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10875, node 1 2025-03-28T09:00:23.918698Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:25.481050Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:25.481069Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:25.481076Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:25.481380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:25.843026Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782639989703012:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:25.843072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:28283 TClient is connected to server localhost:28283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:29.417350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.438892Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:29.454973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.667953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.893861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.966863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:31.595567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782687234344926:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.595688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:31.880789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.924858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:31.969670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.009774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.099829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.188762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.262179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782691529312752:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.262260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.262583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782691529312757:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.266678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:32.291005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782691529312759:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:32.358858Z node 1 :TX_PROXY ERROR: Actor# [1:7486782691529312814:3477] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 26000, MsgBus: 64823 2025-03-28T09:00:36.830956Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782704604534148:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:36.857040Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000af7/r3tmp/tmpPNTkGg/pdisk_1.dat 2025-03-28T09:00:38.234962Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:38.251650Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:38.257601Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:38.258110Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:38.275359Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26000, node 2 2025-03-28T09:00:38.547214Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:38.547235Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:38.547242Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:38.547355Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64823 TClient is connected to server localhost:64823 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-03-28T09:00:41.779756Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782704604534148:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:41.779826Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:41.807969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:41.819489Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:41.891390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:42.039164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:42.979460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:43.084989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:51.587107Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782769029045440:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:51.588478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:51.668386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:51.808748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:51.905687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:51.992276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:52.057572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:52.161950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:52.350067Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782773324013255:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:52.350138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:52.350474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782773324013260:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:52.365276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:52.454041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782773324013262:2485], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:52.522827Z node 2 :TX_PROXY ERROR: Actor# [2:7486782773324013321:3497] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:53.216908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:53.216930Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:57.626986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:01:00.142663Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152458747, txId: 281474976710674] shutting down 2025-03-28T09:01:01.855067Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152461862, txId: 281474976710678] shutting down 2025-03-28T09:01:02.780171Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152462762, txId: 281474976710682] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutIncorrect [FAIL] Test command err: 2025-03-28T08:59:34.659854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:34.660150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:34.660314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T08:59:34.660944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:34.661261Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:34.661467Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dd4/r3tmp/tmpdOZ8MU/pdisk_1.dat 2025-03-28T08:59:36.850301Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5046, node 1 TClient is connected to server localhost:22422 2025-03-28T08:59:43.325441Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:43.325724Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:43.325963Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:43.335419Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:07.086596Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:07.087732Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:07.088127Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:07.103943Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:07.105226Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:07.105302Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dd4/r3tmp/tmpX6y4IK/pdisk_1.dat 2025-03-28T09:00:09.737448Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32132, node 3 TClient is connected to server localhost:1933 2025-03-28T09:00:12.997326Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:12.997391Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:12.997598Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:12.998544Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:19.649219Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:19.649483Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:19.649700Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dd4/r3tmp/tmpVS83bd/pdisk_1.dat 2025-03-28T09:00:20.203033Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12651, node 5 TClient is connected to server localhost:23360 2025-03-28T09:00:20.843721Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:20.843788Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:20.843825Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:20.844511Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:32.085969Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:32.086211Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:32.086373Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dd4/r3tmp/tmpuTnqPd/pdisk_1.dat 2025-03-28T09:00:32.535947Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10469, node 7 TClient is connected to server localhost:12191 2025-03-28T09:00:33.353673Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:33.353738Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:33.353782Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:33.354060Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:33.440314Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:33.440489Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:33.466463Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:00:49.195538Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:446:2410], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:49.196888Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:49.198234Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dd4/r3tmp/tmp8zhEJz/pdisk_1.dat 2025-03-28T09:00:50.197365Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2685, node 9 TClient is connected to server localhost:16517 2025-03-28T09:00:51.067562Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:51.067634Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:51.067687Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:51.068531Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration assertion failed at ydb/core/health_check/health_check_ut.cpp:2275, virtual void NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext &): (issue_log.message() == "Database has storage issues") failed: ("Database has multiple issues" != Database has storage issues) , with diff: ("|)Database has (mul|s)t(ipl|orag)e issues("|) TBackTrace::Capture()+28 (0x18D0A09C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x191C7660) NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext&)+45740 (0x1880E3EC) std::__y1::__function::__func, void ()>::operator()()+280 (0x18938388) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x191FE686) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x191CE1D9) NKikimr::NTestSuiteTHealthCheckTest::TCurrentTest::Execute()+1204 (0x18937334) NUnitTest::TTestFactory::Execute()+2438 (0x191CFAA6) NUnitTest::RunMain(int, char**)+5213 (0x191F8BFD) ??+0 (0x7F57416ECD90) __libc_start_main+128 (0x7F57416ECE40) _start+41 (0x160EE029) >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> LocalPartitionReader::Booting |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |74.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: 2025-03-28T09:00:47.442957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:47.443492Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:47.443559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000c7e/r3tmp/tmpEJYQrk/pdisk_1.dat 2025-03-28T09:00:47.937323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:00:47.959893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:00:47.961597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:00:47.962575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:00:47.969015Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-03-28T09:00:47.969077Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-03-28T09:00:48.012317Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:48.038274Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-03-28T09:00:48.141487Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:336:2375] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-03-28T09:00:48.141681Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-03-28T09:00:48.141822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:48.141883Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-28T09:00:48.141921Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-28T09:00:48.141981Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-28T09:00:48.142015Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-28T09:00:48.142136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:48.142412Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-28T09:00:48.142485Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-28T09:00:48.142561Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-03-28T09:00:48.142616Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-28T09:00:48.142830Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-03-28T09:00:48.155315Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-03-28T09:00:48.155431Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:336:2375]) 2025-03-28T09:00:48.155530Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-28T09:00:48.156048Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-03-28T09:00:48.156136Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:336:2375])::Execute 2025-03-28T09:00:48.156180Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-28T09:00:48.156252Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:336:2375])::Complete 2025-03-28T09:00:48.156429Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443356160 } 2025-03-28T09:00:48.156514Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-03-28T09:00:48.156577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:00:48.156751Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-03-28T09:00:48.156824Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-28T09:00:48.156862Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-28T09:00:48.157012Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-28T09:00:48.157058Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-28T09:00:48.157090Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-03-28T09:00:48.157149Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-28T09:00:48.171287Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-03-28T09:00:48.171366Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-28T09:00:48.243149Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-03-28T09:00:48.243256Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-03-28T09:00:48.243625Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-03-28T09:00:48.244103Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-03-28T09:00:48.244173Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-03-28T09:00:48.245045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:00:48.246434Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-28T09:00:48.246544Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-03-28T09:00:48.246581Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-03-28T09:00:48.246617Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-03-28T09:00:48.248479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:48.248809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-03-28T09:00:48.266951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-03-28T09:00:48.270217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:00:48.275702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:00:48.275780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:00:48.276774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-03-28T09:00:48.285147Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-03-28T09:00:48.320179Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:00:48.320288Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-03-28T09:00:48.320522Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-03-28T09:00:48.320593Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-03-28T09:00:48.320658Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-03-28T09:00:48.320842Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-03-28T09:00:48.321463Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-03-28T09:00:48.321615Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-03-28T09:00:48.322226Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-03-28T09:00:48.322530Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-03-28T09:00:48.322639Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048995872}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-03-28T09:00:48.322736Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048995872}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-03-28T09:00:48.322952Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048995872}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-03-28T09:00:48.323044Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048995872}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... TxBlockStorageResult::Execute(72075186224037888 OK) 2025-03-28T09:01:05.710219Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2025-03-28T09:01:05.710540Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:01:05.710630Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-03-28T09:01:05.710680Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-28T09:01:05.711939Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2025-03-28T09:01:05.712355Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2025-03-28T09:01:05.724613Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:01:05.726572Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2025-03-28T09:01:05.726645Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 step# 3500 Status# 16 SEND to# [2:409:2404] Proxy marker# C1 2025-03-28T09:01:05.738138Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2025-03-28T09:01:05.818253Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715666 has been planned 2025-03-28T09:01:05.818369Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2025-03-28T09:01:05.818608Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2025-03-28T09:01:05.818891Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4000 in 0.500000s at 3.950000s 2025-03-28T09:01:05.819400Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 3500, txid# 281474976715666 marker# C2 2025-03-28T09:01:05.819489Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715666 stepId# 3500 Status# 17 SEND EvProposeTransactionStatus to# [2:409:2404] Proxy 2025-03-28T09:01:05.820195Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715666 at step 3500 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-28T09:01:05.820256Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:01:05.820457Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:01:05.822862Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:01:05.822938Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:01:05.822996Z node 2 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-03-28T09:01:05.823247Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-03-28T09:01:05.823394Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:01:05.823741Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:01:05.823829Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-03-28T09:01:05.824272Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:01:05.825849Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-03-28T09:01:05.825932Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:01:05.826876Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:01:05.826959Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:01:05.827028Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2025-03-28T09:01:05.827135Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:01:05.827256Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-28T09:01:05.827388Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-03-28T09:01:05.827442Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-03-28T09:01:05.827479Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2025-03-28T09:01:05.827520Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:25] persistent tx 281474976715666 acknowledged 2025-03-28T09:01:05.828510Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2025-03-28T09:01:05.831277Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2025-03-28T09:01:05.831370Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-28T09:01:05.831999Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2025-03-28T09:01:05.832106Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 2, subscribers: 1 2025-03-28T09:01:05.832817Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 1 2025-03-28T09:01:05.833409Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:01:05.834421Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.834 INFO ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-03-28T09:01:05.834565Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.834 INFO ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-03-28T09:01:05.834835Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.834 INFO ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-28T09:01:05.834909Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.834 INFO ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-28T09:01:05.835013Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.834 TRACE ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-28T09:01:05.835169Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.835 INFO ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-03-28T09:01:05.835256Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.835 INFO ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-03-28T09:01:05.835399Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.835 INFO ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-03-28T09:01:05.835504Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.835 INFO ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-03-28T09:01:05.835698Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.835 NOTE ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-03-28T09:01:05.835776Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.835 NOTE ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-03-28T09:01:05.835866Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=OTI4OWZkZjctMTBhNzkxNGItNDFiNzZhMDktZDk5MmI0OGM= 2025-03-28 09:01:05.835 NOTE ydb-core-tx-datashard-ut_minstep(pid=27004, tid=0x00007F584CCCCD00) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-03-28T09:01:05.849190Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:01:05.849378Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-03-28T09:01:05.851447Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:01:05.852595Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-28T09:01:05.853069Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-03-28T09:01:05.853131Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-03-28T09:01:05.853246Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-03-28T09:01:05.853386Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-03-28T09:01:05.853517Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 >> LocalPartitionReader::Booting [GOOD] |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> THealthCheckTest::TestTabletIsDead [GOOD] >> LocalPartitionReader::FeedSlowly >> IncrementalRestoreScan::ChangeSenderEmpty |74.6%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> LocalPartitionReader::Simple [GOOD] |74.7%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.7%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::FeedSlowly [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> TPQTest::TestDirectReadHappyWay >> TPartitionTests::CorrectRange_Commit >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |74.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects >> IncrementalRestoreScan::Empty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestTabletIsDead [GOOD] Test command err: 2025-03-28T08:59:39.481823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:39.500930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:39.502005Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T08:59:39.505689Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:39.515638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:39.516682Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001de6/r3tmp/tmprSz2oV/pdisk_1.dat 2025-03-28T08:59:43.730370Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5276, node 1 TClient is connected to server localhost:15910 2025-03-28T08:59:48.138637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:48.142825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:48.143075Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:48.143929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:12.580994Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:12.581453Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:12.581574Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:12.594469Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:12.599320Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:12.599432Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001de6/r3tmp/tmpK4T7sO/pdisk_1.dat 2025-03-28T09:00:14.149566Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30505, node 3 TClient is connected to server localhost:27335 2025-03-28T09:00:16.079841Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:16.079962Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:16.080070Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:16.080643Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-70fb-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" reason: "YELLOW-5321-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-5321-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-595f-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-595f-1231c6b1-f7549920" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-ef3e-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-99d2-1231c6b1-3-2147483648-3-55-0-55" status: YELLOW message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "YELLOW-e463-3-3-42" reason: "YELLOW-e463-3-3-43" reason: "YELLOW-e463-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "YELLOW-e463-3-3-42" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/2909/001de6/r3tmp/tmpK4T7sO/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-43" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/2909/001de6/r3tmp/tmpK4T7sO/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-e463-3-3-44" status: YELLOW message: "Available size is less than 12%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/2909/001de6/r3tmp/tmpK4T7sO/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-ef3e-1231c6b1-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "YELLOW-99d2-1231c6b1-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-03-28T09:00:33.108964Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:500:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:33.109518Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:33.109719Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:33.112007Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:496:2159], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:33.112537Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:33.112582Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001de6/r3tmp/tmpWd9KV8/pdisk_1.dat 2025-03-28T09:00:33.623730Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7211, node 5 TClient is connected to server localhost:11451 2025-03-28T09:00:34.265883Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:34.265961Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:34.266000Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:34.266286Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-70fb-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" reason: "YELLOW-5321-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-5321-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-595f-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-595f-1231c6b1-f7549920" status: YELLOW message: "Pool degraded" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "YELLOW-ef3e-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-a594-5-5-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-42" path: "/home/runner/.ya/build/build_root/2909/001de6/r3tmp/tmpWd9KV8/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-43" path: "/home/runner/.ya/build/build_root/2909/001de6/r3tmp/tmpWd9KV8/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a594-5-5-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 5 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "5-44" path: "/home/runner/.ya/build/build_root/2909/001de6/r3tmp/tmpWd9KV8/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-ef3e-1231c6b1-2147483648" status: YELLOW message: "Group degraded" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } type: "STORAGE_GROUP" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-03-28T09:00:52.092404Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:773:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:52.092930Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:52.093306Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:52.093864Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:770:2356], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:52.094134Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:52.094298Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001de6/r3tmp/tmpW1cCPX/pdisk_1.dat 2025-03-28T09:00:52.772445Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10205, node 7 TClient is connected to server localhost:63850 2025-03-28T09:01:06.009150Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:06.009221Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:06.009268Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:06.010677Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:01:06.028484Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:06.028648Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:06.068447Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-03-28T09:01:06.069199Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:06.319767Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-03-28T09:01:06.320853Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "RED-f489-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-6fa7-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-6fa7-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-e5e3-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-7" reason: "YELLOW-e9e2-1231c6b1-8" reason: "YELLOW-e9e2-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-e5e3-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 7 host: "::1" port: 12001 } |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert2 >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath >> TPartitionTests::CorrectRange_Commit [GOOD] >> ReadLoad::ShouldReadKqp >> UpsertLoad::ShouldWriteDataBulkUpsertBatch >> TPartitionTests::ConflictingTxIsAborted >> ReadLoad::ShouldReadIterate >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] Test command err: 2025-03-28T08:59:35.502849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:35.503604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:35.504361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T08:59:35.507489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:35.508689Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:35.509388Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbf/r3tmp/tmp4Fix7J/pdisk_1.dat 2025-03-28T08:59:38.972641Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61631, node 1 TClient is connected to server localhost:22545 2025-03-28T08:59:45.369065Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:45.369381Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:45.369625Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:45.382963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-be81-1231c6b1" status: YELLOW message: "Database has storage issues" location { database { name: "/Root" } } reason: "YELLOW-5321-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-5321-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-595f-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-595f-1231c6b1-80c02825" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-ef3e-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-4847-1231c6b1-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-ef3e-1231c6b1-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-4847-1231c6b1-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 1 host: "::1" port: 12001 } 2025-03-28T09:00:13.730945Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:13.732854Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:13.733243Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:13.765684Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:13.767766Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:13.767866Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbf/r3tmp/tmpinpql2/pdisk_1.dat 2025-03-28T09:00:14.936031Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17897, node 3 TClient is connected to server localhost:4026 2025-03-28T09:00:16.569956Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:16.570028Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:16.570061Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:16.570278Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:31.197759Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:500:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:31.198105Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:31.198174Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:31.199719Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:496:2159], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:31.200167Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:31.200218Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbf/r3tmp/tmp5El2I7/pdisk_1.dat 2025-03-28T09:00:31.602874Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25320, node 5 TClient is connected to server localhost:12571 2025-03-28T09:00:32.054459Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:32.054518Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:32.054543Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:32.054818Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:48.041265Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:48.042248Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:48.042358Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:48.053659Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:48.053713Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:48.054162Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbf/r3tmp/tmp1tuxfT/pdisk_1.dat 2025-03-28T09:00:49.204725Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2082, node 7 TClient is connected to server localhost:10215 2025-03-28T09:00:50.383719Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:50.383768Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:50.383789Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:50.384502Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:01:06.839808Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:06.839944Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:06.840006Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbf/r3tmp/tmppNddTX/pdisk_1.dat 2025-03-28T09:01:07.423884Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27627, node 9 TClient is connected to server localhost:21448 2025-03-28T09:01:07.939634Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:07.939707Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:07.939754Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:07.940537Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TPartitionTests::ConflictingTxIsAborted [GOOD] |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |74.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write >> TPartitionTests::ConflictingTxProceedAfterRollback |74.7%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestDescribeBalancer |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameWorksNewApi >> KqpLimits::TooBigColumn-useSink [GOOD] >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TSourceIdTests::ExpensiveCleanup |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:00:57.395605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:57.395692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:57.395732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:57.395764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:57.395818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:57.395848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:57.395919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:57.395995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:57.396298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:57.580315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:00:57.580372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:57.616483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:57.618643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:57.623148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:57.679823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:57.680057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:57.680628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:57.680854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:57.687421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:57.688831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:57.688906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:57.689072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:57.689135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:57.689182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:57.689272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.703640Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:00:57.898588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:57.898827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.899073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:57.899359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:57.899423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.901902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:57.902029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:57.902261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.902332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:57.902407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:57.902441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:57.904917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.904968Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:57.905005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:57.907015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.907103Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.907146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:57.907194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:57.924194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:57.926799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:57.927027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:57.928171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:57.928323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:57.928384Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:57.928712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:57.928778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:57.928937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:57.929038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:57.931444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:57.931490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:57.931696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:57.931754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:57.932199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.932246Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:57.932341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:57.932373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:57.932410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:57.932440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:57.932475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:00:57.932544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:57.932590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:57.932619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:00:57.932687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:57.932726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:00:57.932758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:00:57.935052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:57.935197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:57.935237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... lete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.960173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.960306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.960407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.960518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.960587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.960665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.960789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.960856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.960920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.960982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.964920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.965056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.965345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.965436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.965508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.965571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.965686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.965727Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:01:10.965838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:01:10.965875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:01:10.965925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:01:10.965957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:01:10.966013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-28T09:01:10.966106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2816:4081] message: TxId: 101 2025-03-28T09:01:10.966154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:01:10.966211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:01:10.966258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:01:10.967525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-03-28T09:01:10.972005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:01:10.972067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:2817:4082] TestWaitNotification: OK eventTxId 101 2025-03-28T09:01:10.972608Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:01:10.972837Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 266us result status StatusSuccess 2025-03-28T09:01:10.973445Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1743152470.974243 30361 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2025-03-28T09:01:10.976807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:01:10.977009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:01:10.977400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-03-28T09:01:10.979908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:01:10.980039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 28448, MsgBus: 28909 2025-03-28T08:59:34.528221Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782440413057971:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:34.528252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ab/r3tmp/tmpHshrj7/pdisk_1.dat 2025-03-28T08:59:38.471557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:39.531178Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782440413057971:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:39.531221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:39.870891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:41.358880Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:41.403480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:41.403504Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:41.808951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:41.809755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:41.844830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:42.290484Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.106484s 2025-03-28T08:59:42.290555Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.106564s TServer::EnableGrpc on GrpcPort 28448, node 1 2025-03-28T08:59:43.521058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:43.521076Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:43.521083Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:43.521170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28909 TClient is connected to server localhost:28909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:50.519424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:50.540023Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:53.757707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782522017437249:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.758318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782522017437238:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.758391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:53.761339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T08:59:53.776370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782522017437252:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T08:59:53.851941Z node 1 :TX_PROXY ERROR: Actor# [1:7486782522017437303:2366] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:55.026325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T08:59:56.191023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:56.191045Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 21307, MsgBus: 18728 2025-03-28T09:00:03.135539Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782566507272967:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:03.135574Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ab/r3tmp/tmpUcKSXo/pdisk_1.dat 2025-03-28T09:00:05.044514Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:05.546850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:05.547304Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:05.592155Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:05.748995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21307, node 2 2025-03-28T09:00:06.821075Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:06.821349Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:06.821359Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:06.821461Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:08.139248Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782566507272967:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:08.139611Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:18728 TClient is connected to server localhost:18728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:09.866730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:09.879423Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:13.805501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782609456946613:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:13.805547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782609456946590:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:13.806165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:13.811407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:00:13.957626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782609456946626:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transact ... ::NKqp::AssertSuccessResult(const NYdb::TStatus &): (result.IsSuccess())
: Error: Stream write queries aren't allowed., code: 2029 library/cpp/testing/unittest/registar.cpp:37 RaiseError(): requirement UnittestThread failed 2025-03-28T09:00:31.548097Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:31.548131Z node 3 :IMPORT WARN: Table profiles were not loaded 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x194DF2A8 1. /-S/util/system/yassert.cpp:55: Panic @ 0x194CD53A 2. /tmp//-S/library/cpp/testing/unittest/registar.cpp:37: RaiseError @ 0x1995C771 3. /-S/ydb/core/kqp/ut/common/kqp_ut_common.h:372: AssertSuccessResult @ 0x18CCDD36 4. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:348: CreateSampleTables @ 0x48DE6916 5. /tmp//-S/ydb/core/kqp/ut/common/kqp_ut_common.cpp:544: operator() @ 0x48E25C63 6. /-S/library/cpp/threading/future/core/future-inl.h:493: SetValue @ 0x48E25C63 7. /-S/library/cpp/threading/future/async.h:24: operator() @ 0x48E25C63 8. /-S/util/thread/pool.h:71: Process @ 0x48E25C63 9. /-S/util/thread/pool.cpp:405: DoExecute @ 0x194F23A5 10. /-S/util/thread/factory.h:15: Execute @ 0x194EEF5C 11. /-S/util/thread/factory.cpp:36: ThreadProc @ 0x194EEF5C 12. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x194E36F4 13. /tmp//-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239: asan_thread_start @ 0x19194F88 14. ??:0: ?? @ 0x7F88BE43AAC2 15. ??:0: ?? @ 0x7F88BE4CC84F Trying to start YDB, gRPC: 10225, MsgBus: 61545 2025-03-28T09:00:49.591133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782762467752483:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:49.591760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ab/r3tmp/tmpKhNBDc/pdisk_1.dat 2025-03-28T09:00:52.503259Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:54.520809Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:54.587424Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782762467752483:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:54.588165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:55.206827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:55.212007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:55.212101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:55.241538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:00:55.882730Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.101923s 2025-03-28T09:00:55.883078Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.106132s TServer::EnableGrpc on GrpcPort 10225, node 1 2025-03-28T09:00:56.781482Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:56.781506Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:56.781512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:56.790979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61545 TClient is connected to server localhost:61545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:01.857465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:01.911564Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:01:01.924902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:02.750658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:03.182480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:03.445328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:07.235904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782839777165486:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:07.236097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:07.576896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:07.618417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:07.663567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:07.699375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:07.757224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:07.832496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:07.875499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782839777166003:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:07.875567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:07.875639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782839777166008:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:07.878955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:07.891386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782839777166010:2484], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:01:07.962148Z node 1 :TX_PROXY ERROR: Actor# [1:7486782839777166063:3480] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:09.247620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:09.247645Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:11.297853Z node 1 :TX_DATASHARD ERROR: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2025-03-28T09:01:11.303247Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710671 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2025-03-28T09:01:11.306899Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486782848367101007:2513] TxId: 281474976710671. Ctx: { TraceId: 01jqdzqn9n5wfj4n8pdg46vb53, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjczZjRlMDUtZGY3MWI3MzctNmQ0MTY5ODctNmRiMDY4YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; 2025-03-28T09:01:11.364207Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjczZjRlMDUtZGY3MWI3MzctNmQ0MTY5ODctNmRiMDY4YzM=, ActorId: [1:7486782848367100936:2513], ActorState: ExecuteState, TraceId: 01jqdzqn9n5wfj4n8pdg46vb53, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold >> TSourceIdTests::ExpensiveCleanup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] Test command err: 2025-03-28T08:59:37.004333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:37.004629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:37.004769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T08:59:37.005287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:37.005623Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:37.005830Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbc/r3tmp/tmpx6l4WY/pdisk_1.dat 2025-03-28T08:59:43.515098Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16132, node 1 TClient is connected to server localhost:10799 2025-03-28T08:59:47.721996Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:47.722223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:47.722502Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:47.723759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:10.602061Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:10.602499Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:10.602661Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:10.605630Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:10.606201Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:10.606291Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbc/r3tmp/tmphymMCK/pdisk_1.dat 2025-03-28T09:00:11.281002Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9114, node 3 TClient is connected to server localhost:63274 2025-03-28T09:00:13.030653Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:13.046883Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:13.047064Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:13.047988Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-70fb-1231c6b1" status: RED message: "Database has multiple issues" location { database { name: "/Root" } } reason: "RED-d6d1-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-3" reason: "YELLOW-e9e2-1231c6b1-4" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-d6d1-1231c6b1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-258e-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "RED-258e-1231c6b1-f7549920" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-819b-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-99d2-1231c6b1-3-2147483648-3-55-0-55" status: RED message: "VDisks have space issue" location { storage { node { id: 3 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483648-3-55-0-55" id: "2147483648-3-56-0-56" id: "2147483648-3-57-0-57" } } } } database { name: "/Root" } } reason: "RED-8ac8-3-3-42" reason: "RED-8ac8-3-3-43" reason: "RED-8ac8-3-3-44" type: "VDISK" level: 5 listed: 3 count: 3 } issue_log { id: "RED-8ac8-3-3-42" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-42" path: "/home/runner/.ya/build/build_root/2909/001dbc/r3tmp/tmphymMCK/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-43" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-43" path: "/home/runner/.ya/build/build_root/2909/001dbc/r3tmp/tmphymMCK/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-8ac8-3-3-44" status: RED message: "Available size is less than 6%" location { storage { node { id: 3 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "3-44" path: "/home/runner/.ya/build/build_root/2909/001dbc/r3tmp/tmphymMCK/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-819b-1231c6b1-2147483648" status: RED message: "Group failed" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-99d2-1231c6b1-3-2147483648-3-55-0-55" type: "STORAGE_GROUP" level: 4 } location { id: 3 host: "::1" port: 12001 } 2025-03-28T09:00:30.292644Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:500:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:30.293162Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:30.293236Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:30.294888Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:496:2159], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:30.295326Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:30.295378Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbc/r3tmp/tmpeGlWHb/pdisk_1.dat 2025-03-28T09:00:30.740035Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15361, node 5 TClient is connected to server localhost:3060 2025-03-28T09:00:31.274226Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:31.274305Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:31.274347Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:31.274667Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:47.762210Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:47.766798Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:47.767166Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:47.769409Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:47.769467Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:47.769671Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbc/r3tmp/tmpiatGql/pdisk_1.dat 2025-03-28T09:00:48.421310Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23786, node 7 TClient is connected to server localhost:8857 2025-03-28T09:00:50.006218Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:50.006573Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:50.009867Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:50.027428Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:01:08.266209Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:08.266444Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:08.266778Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:01:08.267895Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:08.268406Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:08.268471Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbc/r3tmp/tmpbGxWhA/pdisk_1.dat 2025-03-28T09:01:08.691360Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20777, node 9 TClient is connected to server localhost:62834 2025-03-28T09:01:10.054990Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:10.055169Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:10.055318Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:10.056030Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2025-03-28T09:01:10.283239Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:01:10.283345Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:01:10.308742Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:01:10.309160Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:01:10.309547Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:177:2192] 2025-03-28T09:01:10.310482Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2025-03-28T09:01:10.310547Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:177:2192] 2025-03-28T09:01:10.310602Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:01:10.311111Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:01:10.311364Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:10.311407Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:10.311461Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-28T09:01:10.311499Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-28T09:01:10.311563Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-1 2025-03-28T09:01:10.311593Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-1 2025-03-28T09:01:10.311612Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-03-28T09:01:10.311644Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:10.311678Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-28T09:01:10.311749Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:01:10.311942Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-03-28T09:01:10.312033Z node 1 :PERSQUEUE INFO: new Cookie owner1|eebfec86-402f35b-71e864e1-18d5189_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send disk status response with cookie: 0 2025-03-28T09:01:10.312490Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:01:10.312612Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2025-03-28T09:01:10.312872Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 1 2025-03-28T09:01:10.312950Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2025-03-28T09:01:10.313167Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2025-03-28T09:01:10.314054Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-03-28T09:01:10.314719Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000| size 104 WTime 127 2025-03-28T09:01:10.314857Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:10.314893Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:10.314930Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-03-28T09:01:10.314962Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-28T09:01:10.315021Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001pSourceId 2025-03-28T09:01:10.315063Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000100_00000_0000000001_00000| 2025-03-28T09:01:10.315087Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-28T09:01:10.315116Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:10.315150Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-28T09:01:10.366955Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:01:10.367058Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-28T09:01:10.367137Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2025-03-28T09:01:10.367309Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 2025-03-28T09:01:10.710976Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-03-28T09:01:10.733346Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2025-03-28T09:01:10.733465Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2025-03-28T09:01:10.733616Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2025-03-28T09:01:10.734028Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob sourceId 'SourceId' seqNo 1 partNo 0 result is x0000000001_00000000000000000100_00000_0000000001_00000 size 104 2025-03-28T09:01:10.734102Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 old key x0000000001_00000000000000000100_00000_0000000001_00000 new key d0000000001_00000000000000000100_00000_0000000001_00000 size 104 WTime 1328 2025-03-28T09:01:10.734936Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2025-03-28T09:01:10.735524Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 100 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000| size 105 WTime 1328 2025-03-28T09:01:10.735659Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:10.735693Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:10.735725Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [x0000000001, x0000000002) 2025-03-28T09:01:10.735761Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-28T09:01:10.735796Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000100_00000_0000000001_00000 2025-03-28T09:01:10.735833Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001pSourceId 2025-03-28T09:01:10.735866Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] d0000000001_00000000000000000200_00000_0000000001_00000| 2025-03-28T09:01:10.735888Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-28T09:01:10.735919Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:10.735950Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-28T09:01:10.757272Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:01:10.757381Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::ReplyWrite. Partition: 1 2025-03-28T09:01:10.757452Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 1, partNo: 0, Offset: 200 is stored on disk 2025-03-28T09:01:10.757711Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:10.757757Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:10.757802Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] [d0000000001_00000000000000000100_00000_0000000001_00000|, d0000000001_00000000000000000100_00000_0000000001_00000|] 2025-03-28T09:01:10.757839Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-28T09:01:10.757873Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-28T09:01:10.757908Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-28T09:01: ... 2 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |74.8%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |74.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> QueryStats::Ranges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: 2025-03-28T09:00:49.002587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:49.003078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:49.003143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000cc0/r3tmp/tmpjAvG38/pdisk_1.dat 2025-03-28T09:00:50.933890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:00:50.952563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:00:50.957001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:00:50.957924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:00:50.962645Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-03-28T09:00:50.963028Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-03-28T09:00:51.003978Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:51.009159Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Merged config: { } 2025-03-28T09:00:51.060832Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [1:336:2375] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-03-28T09:00:51.060998Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Execute 2025-03-28T09:00:51.061246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:51.061304Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-28T09:00:51.061346Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-28T09:00:51.061393Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-28T09:00:51.061430Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-28T09:00:51.061533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:51.061798Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-28T09:00:51.061849Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-28T09:00:51.061908Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-03-28T09:00:51.061969Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-28T09:00:51.062123Z node 1 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 1 Location DataCenter: "1" Module: "1" Rack: "1" Unit: "1" 2025-03-28T09:00:51.073925Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(1)::Complete 2025-03-28T09:00:51.074008Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Node(1) Ping([1:336:2375]) 2025-03-28T09:00:51.074128Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-28T09:00:51.074712Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvSyncTablets 2025-03-28T09:00:51.074805Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:336:2375])::Execute 2025-03-28T09:00:51.074846Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-28T09:00:51.074916Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxSyncTablets([1:336:2375])::Complete 2025-03-28T09:00:51.075081Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 1: Status: 0 StartTime: 0 ResourceMaximum { Memory: 270443356160 } 2025-03-28T09:00:51.075169Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Execute 2025-03-28T09:00:51.075222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:00:51.075401Z node 1 :HIVE DEBUG: HIVE#72057594037968897 AddRegisteredDataCentersNode(1, 1) 2025-03-28T09:00:51.075465Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-28T09:00:51.075501Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-28T09:00:51.075643Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-28T09:00:51.075677Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-28T09:00:51.075716Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessWaitQueue (size: 0) 2025-03-28T09:00:51.075762Z node 1 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-28T09:00:51.087895Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(1)::Complete 2025-03-28T09:00:51.087990Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-28T09:00:51.157127Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-03-28T09:00:51.157620Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-03-28T09:00:51.159229Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-03-28T09:00:51.164902Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-03-28T09:00:51.165214Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-03-28T09:00:51.170403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:00:51.177133Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-28T09:00:51.177230Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-03-28T09:00:51.177263Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-03-28T09:00:51.177298Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-03-28T09:00:51.178711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:51.178790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-03-28T09:00:51.179622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-03-28T09:00:51.182477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:00:51.190024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:00:51.190189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:00:51.191452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2025-03-28T09:00:51.205948Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2025-03-28T09:00:51.259690Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:00:51.260218Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-03-28T09:00:51.261452Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2025-03-28T09:00:51.261907Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,2): {} 2025-03-28T09:00:51.262780Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-03-28T09:00:51.263964Z node 1 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2025-03-28T09:00:51.271736Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-03-28T09:00:51.272576Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-03-28T09:00:51.274451Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2025-03-28T09:00:51.292124Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false } } 2025-03-28T09:00:51.292696Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048995872}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2025-03-28T09:00:51.293230Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048995872}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2025-03-28T09:00:51.294025Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048995872}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2025-03-28T09:00:51.294304Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923048995872}: tablet 72075186224037888 channel 2 assigned to group 21810380 ... 037889 2025-03-28T09:01:16.184750Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715667 ssId 72057594046644480 seqNo 2:4 2025-03-28T09:01:16.185143Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715667 at tablet 72075186224037889 2025-03-28T09:01:16.186531Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:01:16.195741Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:01:16.195809Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:01:16.196163Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:01:16.196495Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-03-28T09:01:16.207467Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:01:16.207828Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:01:16.221329Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 HANDLE EvProposeTransaction marker# C0 2025-03-28T09:01:16.222349Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 step# 33000 Status# 16 SEND to# [2:409:2404] Proxy marker# C1 2025-03-28T09:01:16.307166Z node 2 :TX_COORDINATOR DEBUG: Transaction 281474976715667 has been planned 2025-03-28T09:01:16.307268Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 2025-03-28T09:01:16.307309Z node 2 :TX_COORDINATOR DEBUG: Planned transaction 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 2025-03-28T09:01:16.307547Z node 2 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 33500 in 0.500000s at 33.450000s 2025-03-28T09:01:16.307900Z node 2 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 33000, txid# 281474976715667 marker# C2 2025-03-28T09:01:16.307975Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715667 stepId# 33000 Status# 17 SEND EvProposeTransactionStatus to# [2:409:2404] Proxy 2025-03-28T09:01:16.308928Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715667 at step 33000 at tablet 72075186224037889 { Transactions { TxId: 281474976715667 AckTo { RawX1: 0 RawX2: 0 } } Step: 33000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-28T09:01:16.309108Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:01:16.309505Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 33000, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:01:16.310600Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:01:16.310796Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:01:16.310934Z node 2 :TX_DATASHARD DEBUG: Found ready operation [33000:281474976715667] in PlanQueue unit at 72075186224037889 2025-03-28T09:01:16.311541Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 33000:281474976715667 keys extracted: 0 2025-03-28T09:01:16.311741Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:01:16.312184Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:01:16.312368Z node 2 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037889 2025-03-28T09:01:16.314074Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:01:16.321953Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 33000} 2025-03-28T09:01:16.322259Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:01:16.328155Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:01:16.328462Z node 2 :TX_DATASHARD DEBUG: Complete [33000 : 281474976715667] from 72075186224037889 at tablet 72075186224037889 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:01:16.328801Z node 2 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715667 state PreOffline TxInFly 0 2025-03-28T09:01:16.330134Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:01:16.330786Z node 2 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-28T09:01:16.331119Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-03-28T09:01:16.331448Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2025-03-28T09:01:16.331474Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 for mediator 72057594046382081 acknowledged 2025-03-28T09:01:16.331753Z node 2 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:54] persistent tx 281474976715667 acknowledged 2025-03-28T09:01:16.334590Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715667, done: 0, blocked: 1 2025-03-28T09:01:16.347089Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715667 datashard 72075186224037889 state PreOffline 2025-03-28T09:01:16.347963Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-28T09:01:16.350629Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2025-03-28T09:01:16.351103Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715667, publications: 1, subscribers: 1 2025-03-28T09:01:16.352155Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715667, subscribers: 1 2025-03-28T09:01:16.353889Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:01:16.357166Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.357 INFO ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-03-28T09:01:16.358286Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.358 INFO ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-03-28T09:01:16.360467Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.360 INFO ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-28T09:01:16.361006Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.360 INFO ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-28T09:01:16.361736Z node 2 :KQP_YQL TRACE: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.361 TRACE ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-28T09:01:16.362237Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.362 INFO ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-03-28T09:01:16.363262Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.363 INFO ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-03-28T09:01:16.363824Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.363 INFO ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-03-28T09:01:16.364596Z node 2 :KQP_YQL INFO: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.364 INFO ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-03-28T09:01:16.365783Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.365 NOTE ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [common provider] yql_provider_gateway.cpp:21:
: Info: Execution, code: 1060 2025-03-28T09:01:16.366396Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.366 NOTE ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [common provider] yql_provider_gateway.cpp:21:
:1:12: Info: Executing DROP TABLE 2025-03-28T09:01:16.367121Z node 2 :KQP_YQL NOTICE: SessionId: ydb://session/3?node_id=2&id=MmJmMTNhZjItYjEzNjc0ZmUtZTMxMTlhZDEtMzQ5MjYwZjU= 2025-03-28 09:01:16.367 NOTE ydb-core-tx-datashard-ut_minstep(pid=27237, tid=0x00007F4C7B384D00) [common provider] yql_provider_gateway.cpp:21:
: Info: Success, code: 4 2025-03-28T09:01:16.393239Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:01:16.394066Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-03-28T09:01:16.402045Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:01:16.405980Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-28T09:01:16.408303Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186224037889 2025-03-28T09:01:16.408550Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2025-03-28T09:01:16.409024Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2025-03-28T09:01:16.409855Z node 2 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2025-03-28T09:01:16.410183Z node 2 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches [GOOD] >> IncrementalRestoreScan::Empty [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: 2025-03-28T09:01:11.275587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:11.276044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:11.276109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cb9/r3tmp/tmpjdqN2B/pdisk_1.dat 2025-03-28T09:01:14.550001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-28T09:01:14.550398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:14.559575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:01:14.561208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:01:14.562044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:14.566037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:01:14.574892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:01:14.575325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:14.575780Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:01:14.575945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:01:14.576184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:01:14.582904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:14.583332Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:01:14.583475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:01:14.584690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:14.584729Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:14.584772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-28T09:01:14.584842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:01:14.602866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:01:14.626963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:01:14.627271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:01:14.628352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:01:14.628409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-28T09:01:14.628467Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:01:14.862003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-28T09:01:14.872902Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:14.917048Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:01:14.918319Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:01:14.918572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:14.918707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:14.937787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:15.027265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:01:15.027452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:01:15.027526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-03-28T09:01:15.027815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:01:15.027876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-03-28T09:01:15.028057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:01:15.028143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:01:15.029216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:01:15.029279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:01:15.029457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:01:15.029498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:572:2499], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-03-28T09:01:15.029866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:15.029931Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-03-28T09:01:15.030031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:01:15.030069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:15.030420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:01:15.030471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:15.030525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:01:15.030576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:15.030633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:01:15.030667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:01:15.030907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:01:15.030950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-03-28T09:01:15.030989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-03-28T09:01:15.033514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-03-28T09:01:15.033658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-03-28T09:01:15.033698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-03-28T09:01:15.033758Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-03-28T09:01:15.033811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:01:15.035746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-03-28T09:01:15.035811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:593:2518] 2025-03-28T09:01:15.036697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-03-28T09:01:15.037226Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:01:15.037277Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:01:15.037458Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:01:15.045751Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:01:15.045859Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:01:15.046540Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:01:15.046645Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavi ... 1/1 2025-03-28T09:01:16.102818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-28T09:01:16.103129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-28T09:01:16.103423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-28T09:01:16.103786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: true 2025-03-28T09:01:16.104462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:593:2518] message: TxId: 281474976715658 2025-03-28T09:01:16.104810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-28T09:01:16.105483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-03-28T09:01:16.105818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715658:0 2025-03-28T09:01:16.106854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:01:16.108856Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037889 state Ready 2025-03-28T09:01:16.109199Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-28T09:01:16.113933Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-03-28T09:01:16.114950Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-03-28T09:01:16.133064Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:01:16.134294Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2025-03-28T09:01:16.140013Z node 1 :TX_PROXY DEBUG: Actor# [1:818:2676] Handle TEvDescribeSchemeResult Forward to# [1:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:01:16.155590Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:828:2680], serverId# [1:829:2681], sessionId# [0:0:0] 2025-03-28T09:01:16.164800Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:01:16.167159Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:01:16.170716Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:01:16.173387Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] BodySize: 18 }] } 2025-03-28T09:01:16.174332Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:01:16.176448Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvGetProxyServicesRequest 2025-03-28T09:01:16.177375Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:834:2682] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-28T09:01:16.180011Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:835:2686], serverId# [1:836:2687], sessionId# [0:0:0] 2025-03-28T09:01:16.238400Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:834:2682] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:01:16.239599Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:01:16.240829Z node 1 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][0:0][72075186224037888][1:834:2682] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 3] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:01:16.240894Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:01:16.242179Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:830:2682] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |74.9%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTest::TestDescribeBalancer [GOOD] >> TPQTest::TestCheckACL |74.9%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::QueryExecTimeout [GOOD] >> KqpLimits::QSReplySize-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: 2025-03-28T09:01:17.728735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:17.729320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:17.729405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001caf/r3tmp/tmpdLO1jM/pdisk_1.dat 2025-03-28T09:01:18.881928Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Exhausted 2025-03-28T09:01:18.882075Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2025-03-28T09:01:18.882136Z node 1 :CHANGE_EXCHANGE DEBUG: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:597:2521] Finish 0 >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] >> KqpStreamLookup::ReadTableDuringSplit >> TPartitionTests::ConflictingSrcIdForTxWithHead >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> KqpParams::CheckCacheByAst [GOOD] >> KqpEffects::InsertAbort_Literal_Success >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] >> KqpEffects::InsertAbort_Select_Success >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> KqpWrite::Insert >> DataShardWrite::UpsertImmediate >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] Test command err: 2025-03-28T08:59:33.159656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:33.159918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:33.160060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T08:59:33.160551Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:33.160813Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:33.160967Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001da8/r3tmp/tmp1td6Sd/pdisk_1.dat 2025-03-28T08:59:35.264304Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15109, node 1 TClient is connected to server localhost:63865 2025-03-28T08:59:39.996000Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:39.996053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:39.996084Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:39.996274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:15.675525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:15.676436Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:15.676565Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:15.693421Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:15.711050Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:15.711174Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001da8/r3tmp/tmpgUUk7Q/pdisk_1.dat 2025-03-28T09:00:16.182168Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61841, node 3 TClient is connected to server localhost:1066 2025-03-28T09:00:16.792607Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:16.792675Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:16.792705Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:16.792955Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:30.267186Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:488:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:30.267619Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:30.267760Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:30.268813Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:778:2357], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:30.269138Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:30.269319Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001da8/r3tmp/tmpyfB7cV/pdisk_1.dat 2025-03-28T09:00:30.670208Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2525, node 5 TClient is connected to server localhost:9227 2025-03-28T09:00:35.265490Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:35.265561Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:35.265607Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:35.266294Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:35.303453Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:35.303600Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:35.389528Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-03-28T09:00:35.399020Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-5" reason: "YELLOW-e9e2-1231c6b1-6" reason: "YELLOW-e9e2-1231c6b1-7" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-6" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 6 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 5 host: "::1" port: 12001 } 2025-03-28T09:01:01.951697Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:853:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:01.952243Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:01.952621Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:01:01.955699Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:850:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:01.956237Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:01.956293Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001da8/r3tmp/tmps7bTe9/pdisk_1.dat 2025-03-28T09:01:03.018136Z node 8 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15152, node 8 TClient is connected to server localhost:7456 2025-03-28T09:01:10.051034Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:10.051107Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:10.051152Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:10.052442Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:01:10.057210Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:1340:2703]) [8:1605:2708] 2025-03-28T09:01:10.058094Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2025-03-28T09:01:10.080573Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2025-03-28T09:01:10.080728Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2025-03-28T09:01:10.081066Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2025-03-28T09:01:10.081169Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2025-03-28T09:01:10.081399Z node 8 :HIVE DEBUG: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888.Leader.0 2025-03-28T09:01:10.081477Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2025-03-28T09:01:10.081653Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2025-03-28T09:01:10.083501Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 ... VE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected nodes count 1 2025-03-28T09:01:10.187483Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected max priority nodes count 1 2025-03-28T09:01:10.187532Z node 8 :HIVE TRACE: HIVE#72057594037968897 [FBN] Tablet PersQueue.72075186224037888.Leader.0 selected node 10 2025-03-28T09:01:10.187604Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 10) 2025-03-28T09:01:10.187661Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2025-03-28T09:01:10.187768Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-03-28T09:01:10.188254Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-03-28T09:01:10.188558Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-28T09:01:10.188815Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2025-03-28T09:01:10.189481Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(PersQueue.72075186224037888.Leader.1) to node 10 storage {Version# 1 TabletID# 72075186224037888 TabletType# PersQueue Channels# {0:{Channel# 0 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.067536Z}}, 1:{Channel# 1 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.067536Z}}, 2:{Channel# 2 Type# none StoragePool# /Root:test History# {0:{FromGeneration# 0 GroupID# 2181038080 Timestamp# 1970-01-01T00:00:00.067536Z}}} Tenant: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:01:10.211512Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2025-03-28T09:01:10.211607Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-28T09:01:10.211894Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStartTablet::Complete Tablet (72075186224037888,0) SideEffects: {Notifications: 0x10080002 [10:1585:2366] NKikimrLocal.TEvBootTablet Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038080 } StoragePool: "/Root:test" } TabletType: PersQueue Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 1 } SuggestedGeneration: 1 BootMode: BOOT_MODE_LEADER FollowerId: 0} 2025-03-28T09:01:10.212428Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected (duplicate), NodeId 10 Cookie 72075186224037888 2025-03-28T09:01:10.634993Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2025-03-28T09:01:10.635140Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Execute for tablet PersQueue.72075186224037888.Leader.1 status 0 generation 1 follower 0 from local [10:1585:2366] 2025-03-28T09:01:10.635225Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Starting -> Running (Node 10) 2025-03-28T09:01:10.635294Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2025-03-28T09:01:10.635421Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-03-28T09:01:10.635506Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-03-28T09:01:10.635571Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,0,0,0)->(0,1048576,0,0)) 2025-03-28T09:01:10.635652Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {} -> {Memory: 1048576} 2025-03-28T09:01:10.635733Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {} -> {Memory: 1048576} 2025-03-28T09:01:10.635857Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (0) 2025-03-28T09:01:10.635899Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-03-28T09:01:10.636120Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - executing 2025-03-28T09:01:10.636183Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2025-03-28T09:01:10.636244Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle ProcessBootQueue (size: 0) 2025-03-28T09:01:10.636294Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2025-03-28T09:01:10.683120Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxUpdateTabletStatus::Complete TabletId: 72075186224037888 SideEffects: {Notifications: 0x10040207 [8:1339:2702] {EvTabletCreationResult Status: OK TabletID: 72075186224037888}} 2025-03-28T09:01:10.683341Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Complete 2025-03-28T09:01:17.760769Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 10: Status: 2 2025-03-28T09:01:17.760898Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Execute 2025-03-28T09:01:17.760965Z node 8 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-03-28T09:01:17.761101Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxStatus(10)::Complete 2025-03-28T09:01:17.761641Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRestartTablet(PersQueue.72075186224037888.Leader.1)::Execute 2025-03-28T09:01:17.761777Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Running -> Stopped (Node 10) 2025-03-28T09:01:17.761854Z node 8 :HIVE TRACE: HIVE#72057594037968897 Node(10, (0,1048576,0,0)->(0,0,0,0)) 2025-03-28T09:01:17.761988Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: ObjectId (72057594046578946,0): {Memory: 1048576} -> {} 2025-03-28T09:01:17.762093Z node 8 :HIVE TRACE: HIVE#72057594037968897 UpdateTotalResources: Type PersQueue: {Memory: 1048576} -> {} 2025-03-28T09:01:17.762178Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(PersQueue.72075186224037888.Leader.1 gen 1) to node 10 2025-03-28T09:01:17.762263Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Tablet(PersQueue.72075186224037888.Leader.1) VolatileState: Stopped -> Booting 2025-03-28T09:01:17.762321Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-03-28T09:01:17.762371Z node 8 :HIVE TRACE: HIVE#72057594037968897 ProcessBootQueue - sending 2025-03-28T09:01:17.762707Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxKillNode(10)::Execute 2025-03-28T09:01:17.762817Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:01:17.762870Z node 8 :HIVE TRACE: Node(10) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2025-03-28T09:01:17.762933Z node 8 :HIVE DEBUG: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(3, 10) 2025-03-28T09:01:17.762992Z node 8 :HIVE TRACE: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [8:1647:2713] 2025-03-28T09:01:17.763050Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TryToDeleteNode(10): waiting 3600.000000s 2025-03-28T09:01:17.764745Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([10:1586:2366]) [8:1647:2713] 2025-03-28T09:01:17.769020Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([8:2030:2735]) [8:2031:2740] 2025-03-28T09:01:17.776114Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([8:2030:2735]) [8:2031:2740] 2025-03-28T09:01:17.778502Z node 8 :HIVE TRACE: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([11:2008:2365]) [8:2065:2743] 2025-03-28T09:01:17.797284Z node 8 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvRegisterNode from [11:2007:2365] HiveId: 72057594037968897 ServicedDomains { SchemeShard: 72057594046644480 PathId: 1 } TabletAvailability { Type: Mediator Priority: 0 } TabletAvailability { Type: Dummy Priority: 0 } TabletAvailability { Type: KeyValue Priority: 0 } TabletAvailability { Type: Coordinator Priority: 0 } TabletAvailability { Type: Hive Priority: 0 } TabletAvailability { Type: SchemeShard Priority: 0 } TabletAvailability { Type: DataShard Priority: 0 } TabletAvailability { Type: PersQueue Priority: 0 } TabletAvailability { Type: PersQueueReadBalancer Priority: 0 } TabletAvailability { Type: Kesus Priority: 0 } TabletAvailability { Type: SysViewProcessor Priority: 0 } TabletAvailability { Type: ColumnShard Priority: 0 } TabletAvailability { Type: SequenceShard Priority: 0 } TabletAvailability { Type: ReplicationController Priority: 0 } TabletAvailability { Type: StatisticsAggregator Priority: 0 } 2025-03-28T09:01:17.797399Z node 8 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxRegisterNode(11)::Execute 2025-03-28T09:01:17.797520Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:17.797569Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-28T09:01:17.797602Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-03-28T09:01:17.797638Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessWaitQueue (0) 2025-03-28T09:01:17.797677Z node 8 :HIVE DEBUG: HIVE#72057594037968897 ProcessBootQueue (1) 2025-03-28T09:01:17.797750Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:17.799438Z node 8 :HIVE DEBUG: HIVE#72057594037968897 TEvInterconnect::TEvNodeInfo NodeId 11 Location DataCenter: "4" Module: "4" Rack: "4" Unit: "4" self_check_result: EMERGENCY issue_log { id: "RED-f489-1231c6b1" status: RED message: "Database has compute issues" location { database { name: "/Root" } } reason: "RED-6fa7-1231c6b1" reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "RED-6fa7-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-e5e3-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-10" reason: "YELLOW-e9e2-1231c6b1-11" reason: "YELLOW-e9e2-1231c6b1-8" reason: "YELLOW-e9e2-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-11" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 11 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "RED-e5e3-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } location { id: 8 host: "::1" port: 12001 } >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheByAst [GOOD] Test command err: Trying to start YDB, gRPC: 65521, MsgBus: 8352 2025-03-28T08:59:33.535080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:33.536961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:33.537314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c8/r3tmp/tmpf5Fp2E/pdisk_1.dat TServer::EnableGrpc on GrpcPort 65521, node 1 2025-03-28T08:59:36.597391Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:36.675922Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:36.676007Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:36.676038Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:36.676423Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:36.758239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:36.770820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:36.795401Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8352 TClient is connected to server localhost:8352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-03-28T08:59:43.926493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:43.926759Z node 1 :IMPORT WARN: Table profiles were not loaded Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:44.034473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:44.105945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:46.379298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:48.432940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:49.199992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:52.047901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1921:3517], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.048005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:52.148252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:52.689858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.010221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.309886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:53.641252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:54.135835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:54.752753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2486:3947], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:54.752859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:54.753118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2491:3952], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:54.778540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:55.051080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2493:3954], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T08:59:55.102101Z node 1 :TX_PROXY ERROR: Actor# [1:2561:4003] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:00.101850Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:2851:4231] TxId: 281474976715671. Ctx: { TraceId: 01jqdznhph8p13d90axs0syjqv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTI5ODIzMGQtMTI5M2M0N2EtMjg1NDFmZjctODc1MTc4NDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. STATUS_CODE_UNSPECIFIED: 2025-03-28T09:00:00.137980Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2859:4270], TxId: 281474976715671, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=YTI5ODIzMGQtMTI5M2M0N2EtMjg1NDFmZjctODc1MTc4NDM=. CustomerSuppliedId : . TraceId : 01jqdznhph8p13d90axs0syjqv. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:2851:4231], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-03-28T09:00:00.149773Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2857:4268], TxId: 281474976715671, task: 1. Ctx: { TraceId : 01jqdznhph8p13d90axs0syjqv. SessionId : ydb://session/3?node_id=1&id=YTI5ODIzMGQtMTI5M2M0N2EtMjg1NDFmZjctODc1MTc4NDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2851:4231], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-03-28T09:00:00.158808Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:2858:4269], TxId: 281474976715671, task: 2. Ctx: { TraceId : 01jqdznhph8p13d90axs0syjqv. SessionId : ydb://session/3?node_id=1&id=YTI5ODIzMGQtMTI5M2M0N2EtMjg1NDFmZjctODc1MTc4NDM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:2851:4231], status: UNSPECIFIED, reason: {
: Error: Terminate execution } 2025-03-28T09:00:00.159505Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTI5ODIzMGQtMTI5M2M0N2EtMjg1NDFmZjctODc1MTc4NDM=, ActorId: [1:2817:4231], ActorState: ExecuteState, TraceId: 01jqdznhph8p13d90axs0syjqv, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2791, MsgBus: 19791 2025-03-28T09:00:14.284079Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:14.284357Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:14.284411Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c8/r3tmp/tmpIdB5LE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2791, node 2 2025-03-28T09:00:15.719946Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:15.723087Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:15.723471Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:15.723766Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:15.724715Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:15.780966Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:15.781085Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:15.793759Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19791 TClient is connected to server localhost:19791 WaitRootIsUp 'Root'... TClient::Ls request: ... .803902Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:01:06.550839Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486782834168133228:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:06.550942Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:06.608036Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:06.653201Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:06.698329Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:06.741731Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:06.786376Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:06.873239Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:07.006836Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486782838463101057:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:07.006927Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:07.007262Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486782838463101062:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:07.011436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:07.030614Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486782838463101064:2470], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:01:07.111969Z node 5 :TX_PROXY ERROR: Actor# [5:7486782838463101119:3470] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8055, MsgBus: 21416 2025-03-28T09:01:10.270521Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486782851304826238:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:10.271683Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c8/r3tmp/tmp7FBTR1/pdisk_1.dat 2025-03-28T09:01:10.414396Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:10.458712Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:10.458820Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:10.460974Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8055, node 6 2025-03-28T09:01:10.539000Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:10.539026Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:10.539033Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:10.539180Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21416 TClient is connected to server localhost:21416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:12.276691Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:12.366594Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:01:12.403759Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:12.925112Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:13.649708Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:13.841081Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:15.152819Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486782851304826238:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:15.195238Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:01:16.306250Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486782877074631649:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:16.306319Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:16.389182Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:16.431545Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:16.521621Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:16.572812Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:16.681950Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:16.768735Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:16.990556Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486782877074632175:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:16.990642Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:16.990846Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486782877074632180:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:16.994515Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:17.015906Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486782877074632182:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:01:17.086428Z node 6 :TX_PROXY ERROR: Actor# [6:7486782881369599533:3467] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> KqpImmediateEffects::ImmediateUpdate >> KqpImmediateEffects::ConflictingKeyR1WR2 >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: 2025-03-28T09:01:15.544653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:15.545159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:15.545240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cc7/r3tmp/tmphjvEp8/pdisk_1.dat 2025-03-28T09:01:18.271700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-28T09:01:18.272612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:18.272841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:01:18.273097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:01:18.273169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:18.273900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:01:18.274031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:01:18.274210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:18.274272Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:01:18.274320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:01:18.274352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:01:18.285434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:18.286339Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:01:18.290759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:01:18.302790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:18.303127Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:18.303726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-28T09:01:18.304618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:01:18.366776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:01:18.378597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:01:18.381412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:01:18.390442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:01:18.391698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-28T09:01:18.392363Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:01:18.567246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-28T09:01:18.567968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:18.698207Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:01:18.712168Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:01:18.714774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:18.715461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:18.727795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:18.836484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:01:18.837208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:01:18.838236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-03-28T09:01:18.841090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:01:18.841409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2025-03-28T09:01:18.857334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:01:18.857930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:01:18.871314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:01:18.871390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:01:18.871577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:01:18.871617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:572:2499], at schemeshard: 72057594046644480, txId: 1, path id: 1 2025-03-28T09:01:18.872056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:18.872101Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 1:0 ProgressState 2025-03-28T09:01:18.872211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:01:18.872242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:18.872282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:01:18.872313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:18.872354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:01:18.872395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:18.872444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:01:18.872472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:01:18.872530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:01:18.872562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2025-03-28T09:01:18.872722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-03-28T09:01:18.887853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-03-28T09:01:18.888277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 1 2025-03-28T09:01:18.888489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 1 2025-03-28T09:01:18.888693Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-03-28T09:01:18.888869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:01:18.889122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2025-03-28T09:01:18.889206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:593:2518] 2025-03-28T09:01:18.895215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 1 2025-03-28T09:01:18.897848Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:01:18.898068Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:01:18.907345Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:01:18.970658Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:01:18.991207Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:01:19.011995Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:01:19.012966Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavi ... 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-03-28T09:01:22.559579Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvNavigate describe path /Root/IncrBackupTable 2025-03-28T09:01:22.560549Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] HANDLE EvNavigateScheme /Root/IncrBackupTable 2025-03-28T09:01:22.560878Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:01:22.560938Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2025-03-28T09:01:22.562422Z node 1 :TX_PROXY DEBUG: Actor# [1:830:2682] Handle TEvDescribeSchemeResult Forward to# [1:593:2518] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:01:22.577216Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:01:22.577416Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:01:22.578550Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:01:22.578989Z node 1 :CHANGE_EXCHANGE DEBUG: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 2]][1:832:2684] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] >> DataShardWrite::UpsertPrepared+Volatile >> DataShardWrite::WriteImmediateBadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] Test command err: 2025-03-28T08:59:44.590464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:44.623724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:44.624841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T08:59:44.643475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T08:59:44.645408Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:44.646266Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ddb/r3tmp/tmpkXJ51L/pdisk_1.dat 2025-03-28T08:59:48.692526Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5211, node 1 TClient is connected to server localhost:6235 2025-03-28T08:59:50.635503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:50.635643Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:50.635675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:50.636325Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:11.129336Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:11.129761Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:11.129931Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:11.141667Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:11.142322Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:11.142431Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ddb/r3tmp/tmpDPl628/pdisk_1.dat 2025-03-28T09:00:14.074829Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18527, node 3 TClient is connected to server localhost:17198 2025-03-28T09:00:16.151695Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:16.151807Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:16.151909Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:16.152400Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:32.981774Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:500:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:32.982890Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:32.983000Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:32.984865Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:496:2159], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:00:32.985483Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:00:32.985521Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ddb/r3tmp/tmpM6QtH8/pdisk_1.dat 2025-03-28T09:00:33.691670Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27818, node 5 TClient is connected to server localhost:62526 2025-03-28T09:00:34.395228Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:34.395301Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:34.395354Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:34.395658Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:01:01.927127Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:01.927643Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:01.927853Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:01:01.930279Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:01.930340Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:01:01.930570Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ddb/r3tmp/tmp34f57F/pdisk_1.dat 2025-03-28T09:01:03.126569Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19088, node 7 TClient is connected to server localhost:21413 2025-03-28T09:01:05.931399Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:05.931555Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:05.931688Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:05.932584Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:01:18.113672Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:18.113961Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:18.114267Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:01:18.115676Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:18.116298Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:18.116395Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ddb/r3tmp/tmp65d4XV/pdisk_1.dat 2025-03-28T09:01:18.966329Z node 9 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20088, node 9 TClient is connected to server localhost:27301 2025-03-28T09:01:21.853295Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:21.853634Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:21.853931Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:21.855306Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] >> TPartitionTests::ConflictingCommitFails ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: 2025-03-28T09:01:17.744167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:17.744629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:17.744679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001de3/r3tmp/tmp0VUP0b/pdisk_1.dat 2025-03-28T09:01:18.858225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:18.901166Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:18.986681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:19.003086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:19.028725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:19.240784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:20.856714Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2025-03-28T09:01:20.857668Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2025-03-28T09:01:20.926298Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-03-28T09:01:20.926748Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-03-28T09:01:20.927432Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-03-28T09:01:20.927462Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-03-28T09:01:20.927482Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-03-28T09:01:20.928121Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2025-03-28T09:01:20.959385Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=NDQ3Mzc1NDktNGFkNDk5MDgtYjY2ZmRkOGUtYWI5NzM5YWU= 2025-03-28T09:01:20.968701Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=NWQ4ZWM0Yi1kNzYyNDcyMC1kNTViMGNiYi03ZWUwMWY2Mg== 2025-03-28T09:01:20.988471Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=YTFmZGE5Y2YtMWFhNmNkNWMtODM5OWUyMWItOWM2ZDZhZDQ= 2025-03-28T09:01:20.988528Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=ZmQxMGU4OWQtN2IxMDA2MmItZjcxMTA5YjMtNDVkOWQ5ZTk= 2025-03-28T09:01:20.998509Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=NDhlZDk4MjUtNWE5N2FmZjktYzM4MGJkNzgtYjNmMWY3N2I= 2025-03-28T09:01:21.087405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.087868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.088238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.088297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.089042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.097085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2659], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.104292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.192865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:01:21.358246Z node 1 :TX_PROXY ERROR: Actor# [1:799:2675] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:01:21.359222Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:01:21.359979Z node 1 :TX_PROXY ERROR: Actor# [1:801:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:01:21.360404Z node 1 :TX_PROXY ERROR: Actor# [1:811:2678] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:01:21.807052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2671], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:21.808074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:21.808766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:21.809127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:21.809456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:21.860117Z node 1 :TX_PROXY ERROR: Actor# [1:900:2741] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:25.661849Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1743152485.661799s, errors=0 2025-03-28T09:01:25.662159Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1743152485661 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:01:25.683845Z node 1 :TX_PROXY ERROR: Actor# [1:953:2779] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:25.777880Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1743152485.777842s, errors=0 2025-03-28T09:01:25.778032Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1743152485777 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:01:25.791828Z node 1 :TX_PROXY ERROR: Actor# [1:984:2801] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:25.854425Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1743152485.854388s, errors=0 2025-03-28T09:01:25.854774Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1743152485854 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:01:25.868477Z node 1 :TX_PROXY ERROR: Actor# [1:1015:2823] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:25.936365Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1743152485.936323s, errors=0 2025-03-28T09:01:25.936503Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1743152485936 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:01:25.949861Z node 1 :TX_PROXY ERROR: Actor# [1:1046:2845] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:26.020028Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1743152486.019990s, errors=0 2025-03-28T09:01:26.020317Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1743152486019 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:01:26.020418Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 5.096013s, oks# 20, errors# 0 2025-03-28T09:01:26.020558Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 |74.9%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadLoad::ShouldReadIterate [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |74.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |74.9%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |74.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |74.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |74.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: 2025-03-28T09:01:17.931410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:17.931812Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:17.931861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dec/r3tmp/tmpPinzsR/pdisk_1.dat 2025-03-28T09:01:18.917537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:19.060490Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:19.182003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:19.191042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:19.211421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:19.439950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:20.753795Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-03-28T09:01:20.767375Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-03-28T09:01:20.957738Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-03-28T09:01:20.957818Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-28T09:01:20.957870Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-28T09:01:20.957901Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-28T09:01:20.957924Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-28T09:01:20.957957Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-28T09:01:20.995548Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=YWZkNmY3ZTAtNzNhMDI2OS1kODRlZjcwYy01MWI5MTJlYg== 2025-03-28T09:01:21.024541Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=ODgzMzI5ZGItYWIwZDgyYWUtNzZjNjUwM2ItMzRlMjM5ZWU= 2025-03-28T09:01:21.078448Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=ZTZlYjVlNy01NDUzNDMzZS1kY2Q0OWUzZS1kYTAwOGIxMw== 2025-03-28T09:01:21.078521Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=MWU3ZDU3MzMtOTdhODlmODctZmJlZDg5ZDAtOWFjOTlkZDc= 2025-03-28T09:01:21.086587Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=NzA4NmY2Zi04ZThlNDNiMS00OWUwOWIyMC02NTUwYWZjZQ== 2025-03-28T09:01:21.152536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.153739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.154108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.163141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.163553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.164750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2659], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.165126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:21.190661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:01:21.315442Z node 1 :TX_PROXY ERROR: Actor# [1:799:2675] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:01:21.316409Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:01:21.317139Z node 1 :TX_PROXY ERROR: Actor# [1:801:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:01:21.317558Z node 1 :TX_PROXY ERROR: Actor# [1:811:2678] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:01:21.583008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2671], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:21.583792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:21.584654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:21.585002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:21.585367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:21.666099Z node 1 :TX_PROXY ERROR: Actor# [1:900:2741] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:25.650822Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1743152485.650774s, errors=0 2025-03-28T09:01:25.651112Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1743152485650 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:01:25.667605Z node 1 :TX_PROXY ERROR: Actor# [1:953:2779] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:25.764375Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1743152485.764337s, errors=0 2025-03-28T09:01:25.764569Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1743152485764 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:01:25.778168Z node 1 :TX_PROXY ERROR: Actor# [1:984:2801] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:25.833799Z node 1 :TX_PROXY ERROR: Actor# [1:1008:2818] txid# 281474976715677, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:25.892743Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1743152485.892708s, errors=0 2025-03-28T09:01:25.893022Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1743152485892 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:01:25.930960Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1743152485.930915s, errors=0 2025-03-28T09:01:25.931213Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1743152485930 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:01:25.944721Z node 1 :TX_PROXY ERROR: Actor# [1:1044:2843] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:26.035661Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1743152486.035619s, errors=0 2025-03-28T09:01:26.035923Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1743152486035 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:01:26.035991Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 5.078455s, oks# 20, errors# 0 2025-03-28T09:01:26.036124Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 >> KqpImmediateEffects::InsertExistingKey+UseSink |74.9%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: 2025-03-28T09:01:25.579802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:25.580291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:25.580347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dd0/r3tmp/tmpxYseiJ/pdisk_1.dat 2025-03-28T09:01:26.036725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:26.135540Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:26.184651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:26.184795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:26.196295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:26.286082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:26.764476Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-03-28T09:01:26.764669Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-03-28T09:01:26.923430Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.158118s, errors=0 2025-03-28T09:01:26.923520Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 >> TSchemeShardTTLTests::CheckCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] Test command err: 2025-03-28T09:01:09.680503Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:01:09.680631Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:01:09.697958Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] 2025-03-28T09:01:09.699874Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:01:09.000000Z 2025-03-28T09:01:09.699937Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\210\250\357\337\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\210\250\357\337\3352" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-28T09:01:10.789641Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:01:10.789711Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:01:12.420182Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:01:12.420242Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:01:12.446300Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:01:12.447802Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:01:12.450073Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:178:2193] 2025-03-28T09:01:12.457592Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:01:12.459177Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-28T09:01:12.460538Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:01:12.462490Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:01:12.467909Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-28T09:01:12.468870Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:01:12.468905Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:01:12.468945Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:01:12.000000Z 2025-03-28T09:01:12.468975Z node 3 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:01:12.469303Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [3:178:2193] 2025-03-28T09:01:12.469905Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-28T09:01:12.470898Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create immediate tx with id = 4 and act no: 5 2025-03-28T09:01:13.971455Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-28T09:01:13.971537Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-03-28T09:01:15.487277Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:01:15.487428Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:01:15.487478Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion Got batch complete: 2 Wait batch completion Wait kv request 2025-03-28T09:01:15.792789Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-03-28T09:01:15.793139Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-03-28T09:01:15.793799Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:01:15.794474Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-28T09:01:15.799143Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-28T09:01:15.799454Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:01:15.800019Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-28T09:01:15.932916Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-28T09:01:15.934233Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:15.934284Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:15.934325Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:01:15.950760Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-03-28T09:01:15.951070Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-28T09:01:15.951327Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:15.951355Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:15.951376Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:01:15.951406Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:15.951948Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 2 2025-03-28T09:01:15.991652Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait immediate tx complete 4 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 2025-03-28T09:01:19.013309Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:01:19.013373Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:01:19.065077Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:01:19.073813Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:01:19.076670Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:178:2193] 2025-03-28T09:01:19.081593Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:01:19.082550Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request Got KV request 2025-03-28T09:01:19.092794Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:01:19.096013Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:01:19.108274Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-03-28T09:01:19.109518Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:01:19.109818Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:01:19.110461Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:01:19.000000Z 2025-03-28T09:01:19.110809Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:01:19.111402Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:178:2193] 2025-03-28T09:01:19.111752Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-03-28T09:01:19.112463Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:01:19.576361Z node 4 :PERSQUEUE INFO: new Cookie src1|a2859835-7ca3dbff-ac84abff-86875966_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-03-28T09:01:19.577149Z node 4 : ... Q/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-28T09:01:25.978338Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:01:25.978569Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:01:25.981303Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-03-28T09:01:25.981421Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:01:25.981471Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:01:25.981526Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:01:25.000000Z 2025-03-28T09:01:25.981575Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:01:25.981635Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-03-28T09:01:25.981750Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-03-28T09:01:25.981816Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:01:26.339245Z node 5 :PERSQUEUE INFO: new Cookie src1|86f3962c-1bf6e334-9313c89-e14237b7_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-03-28T09:01:26.339386Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 2025-03-28T09:01:27.439693Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-28T09:01:28.943920Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Wait batch completion Wait 1st KV request Wait kv request 2025-03-28T09:01:28.944763Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-03-28T09:01:28.945008Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-03-28T09:01:28.945499Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=1 2025-03-28T09:01:28.945945Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2025-03-28T09:01:28.946896Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 10 partNo 0 2025-03-28T09:01:28.995066Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 85 count 1 nextOffset 2 batches 1 2025-03-28T09:01:28.995758Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 1,1 HeadOffset 1 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 71 WTime 10139 2025-03-28T09:01:28.995972Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:28.996013Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:28.996052Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:01:28.996092Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:01:28.996147Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-28T09:01:28.996645Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000| 2025-03-28T09:01:28.996675Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:28.996700Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:28.996723Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:01:28.996753Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:28.997166Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 0 2025-03-28T09:01:29.008802Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2025-03-28T09:01:29.009046Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 8. Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 2. CurOffset: 2. Offset: 20 2025-03-28T09:01:29.009100Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 10. Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 2. CurOffset: 2. Offset: 30 2025-03-28T09:01:29.009192Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 11 partNo 0 2025-03-28T09:01:29.009592Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src1' seqNo 11 partNo 0 result is x0000000000_00000000000000000001_00000_0000000001_00000 size 71 2025-03-28T09:01:29.009668Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000001_00000_0000000001_00000 new key d0000000000_00000000000000000001_00000_0000000001_00000 size 71 WTime 10239 2025-03-28T09:01:29.118035Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 11 partNo 0 FormedBlobsCount 1 NewHead: Offset 40 PartNo 0 PackedSize 84 count 1 nextOffset 41 batches 1 2025-03-28T09:01:29.118676Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 40,1 HeadOffset 1 endOffset 2 curOffset 41 d0000000000_00000000000000000040_00000_0000000001_00000| size 70 WTime 10239 2025-03-28T09:01:29.118859Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:29.118903Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:29.118942Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:01:29.118980Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:01:29.119018Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000 2025-03-28T09:01:29.119046Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-28T09:01:29.119071Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000040_00000_0000000001_00000| 2025-03-28T09:01:29.119096Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:29.119128Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:29.119165Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 3 Got KV request Got KV request Wait batch completion Wait 2nd KV request Wait kv request 2025-03-28T09:01:29.141113Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:01:29.141208Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:01:29.141295Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 2 is already written 2025-03-28T09:01:29.141357Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:01:29.141402Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 2 is already written 2025-03-28T09:01:29.141454Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:01:29.141496Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 40 is stored on disk 2025-03-28T09:01:29.141776Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:29.142254Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:29.142628Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000001_00000_0000000001_00000|, d0000000000_00000000000000000001_00000_0000000001_00000|] 2025-03-28T09:01:29.142995Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:01:29.143412Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:29.143786Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:29.144001Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:00:51.178726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:51.178830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:51.178880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:51.178923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:51.178970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:51.178995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:51.179073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:51.179152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:51.179539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:51.285498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:00:51.285562Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:51.301732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:51.302112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:51.302281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:51.310542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:51.310843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:51.311493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:51.311855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:51.313940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:51.315324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:51.315385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:51.315569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:51.315624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:51.315667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:51.315855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.322982Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:00:51.453568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:51.453838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.454037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:51.454269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:51.454348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.456949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:51.457093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:51.457345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.457424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:51.457472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:51.457505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:51.459618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.459699Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:51.459734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:51.464105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.464172Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.464218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:51.464275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:51.468405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:51.470823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:51.471026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:51.472071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:51.472257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:51.472318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:51.472610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:51.472662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:51.472818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:51.472902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:51.475199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:51.475245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:51.475460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:51.475527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:00:51.475922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:51.475965Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:51.476060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:51.476092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:51.476131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:51.476161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:51.476197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:00:51.476238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:51.476270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:00:51.476303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:00:51.476373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:00:51.476409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:00:51.476463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:00:51.478305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:51.478411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:00:51.478446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... adyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:01:29.167359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-03-28T09:01:29.167416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:343:2322] message: TxId: 107 2025-03-28T09:01:29.167458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:01:29.167495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-03-28T09:01:29.167523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-03-28T09:01:29.167632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:01:29.167671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:01:29.167959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:01:29.167999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:01:29.168052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:01:29.176052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-28T09:01:29.176098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1355:3256] 2025-03-28T09:01:29.176521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-03-28T09:01:29.315185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-28T09:01:29.323682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-28T09:01:29.325481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-03-28T09:01:29.325552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-28T09:01:29.325594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-28T09:01:29.325903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-03-28T09:01:29.325960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-28T09:01:29.325990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-03-28T09:01:29.500557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-03-28T09:01:29.500927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:01:29.501936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:01:29.507112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1743165741661620 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-28T09:01:29.507927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1743165741661620 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-28T09:01:29.523420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:01:29.524497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:01:29.525387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-28T09:01:29.525704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:01:29.534929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-28T09:01:29.534984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:01:29.575704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-28T09:01:29.575898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:01:29.576370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-03-28T13:42:21.661620Z, at schemeshard: 72057594046678944 2025-03-28T09:01:29.576465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-28T09:01:29.576855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:01:29.577212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:01:29.578209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2025-03-28T13:42:21.661620Z, at schemeshard: 72057594046678944 2025-03-28T09:01:29.578273Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:01:29.619429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:01:29.705181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-28T09:01:29.706187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-28T09:01:29.706265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-03-28T09:01:29.706636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-28T09:01:29.715035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-28T09:01:29.715568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-03-28T09:01:29.715623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-28T09:01:29.715655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2025-03-28T09:01:29.798236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:01:29.897029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0012 2025-03-28T09:01:29.897367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0002 2025-03-28T09:01:29.897659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 0 row count 0 2025-03-28T09:01:29.923304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-28T09:01:29.923698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-28T09:01:29.925041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:4 data size 0 row count 0 2025-03-28T09:01:29.925090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTableMoved, is column=0, is olap=0 2025-03-28T09:01:29.925113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 4: RowCount 0, DataSize 0 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 >> KqpWrite::ProjectReplace+UseSink |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/ut/ydb-core-client-ut |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |75.0%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |75.0%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut >> KqpWrite::UpsertNullKey >> DataShardTxOrder::RandomPointsAndRanges >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> TPartitionTests::ConflictingCommitFails [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:76:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:79:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:78:2110] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:82:2057] recipient: [13:78:2110] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:81:2111] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:135:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:76:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:78:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:79:2110] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:82:2057] recipient: [14:79:2110] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:81:2111] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:135:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:77:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:80:2057] recipient: [15:79:2110] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:83:2057] recipient: [15:79:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:82:2111] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:136:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:82:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:83:2113] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:86:2057] recipient: [16:83:2113] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:85:2114] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:139:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:83:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:82:2113] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:86:2057] recipient: [17:82:2113] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:85:2114] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:139:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:83:2113] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:87:2057] recipient: [18:83:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:86:2114] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:104:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:87:2057] recipient: [19:85:2115] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:89:2057] recipient: [19:85:2115] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:88:2116] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:142:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:86:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:87:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:89:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:88:2116] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:142:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:88:2057] recipient: [21:86:2115] Leader for TabletID 72057594037927937 is [21:89:2116] sender: [21:90:2057] recipient: [21:86:2115] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 13559, MsgBus: 19502 2025-03-28T09:00:22.332442Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782647090581304:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:22.332558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000afe/r3tmp/tmpmeKyPF/pdisk_1.dat 2025-03-28T09:00:23.815281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:24.882792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:25.167562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:25.167663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:25.242035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:00:25.242301Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13559, node 1 2025-03-28T09:00:25.459133Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:25.459161Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:25.459172Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:25.459571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:00:27.334555Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782647090581304:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:27.335084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:19502 TClient is connected to server localhost:19502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:00:29.445454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.467465Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:00:29.486674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:29.856675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:30.149990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:30.298467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:32.100355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782690040255986:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.100439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:32.600011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.674768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.755955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.844229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:32.897059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:33.009759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:33.132512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782694335223806:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:33.132580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:33.138937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782694335223811:2473], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:33.155128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:33.193963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782694335223813:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:33.259058Z node 1 :TX_PROXY ERROR: Actor# [1:7486782694335223871:3477] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:36.356201Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152435276, txId: 281474976710671] shutting down 2025-03-28T09:00:36.372352Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152435276, txId: 281474976710672] shutting down 2025-03-28T09:00:37.242407Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152436816, txId: 281474976710676] shutting down 2025-03-28T09:00:37.258313Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152436816, txId: 281474976710675] shutting down 2025-03-28T09:00:37.416975Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152437264, txId: 281474976710679] shutting down 2025-03-28T09:00:38.562078Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152438524, txId: 281474976710681] shutting down 2025-03-28T09:00:38.581116Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152438524, txId: 281474976710682] shutting down 2025-03-28T09:00:39.713948Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152439329, txId: 281474976710685] shutting down 2025-03-28T09:00:40.102754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:40.110998Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:40.873861Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152440274, txId: 281474976710687] shutting down 2025-03-28T09:00:40.902444Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152440295, txId: 281474976710688] shutting down 2025-03-28T09:00:41.404481Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152441156, txId: 281474976710691] shutting down 2025-03-28T09:00:41.689798Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152441688, txId: 281474976710694] shutting down 2025-03-28T09:00:41.689886Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152441688, txId: 281474976710693] shutting down 2025-03-28T09:00:41.913007Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152441849, txId: 281474976710697] shutting down 2025-03-28T09:00:42.122441Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152442136, txId: 281474976710700] shutting down 2025-03-28T09:00:42.199617Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152442143, txId: 281474976710699] shutting down 2025-03-28T09:00:42.845771Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152442822, txId: 281474976710703] shutting down 2025-03-28T09:00:43.034246Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152443025, txId: 281474976710705] shutting down 2025-03-28T09:00:43.037627Z node 1 :KQP_RESO ... 281474976710672] shutting down 2025-03-28T09:01:13.581760Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152473097, txId: 281474976710676] shutting down 2025-03-28T09:01:13.585608Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152473097, txId: 281474976710675] shutting down 2025-03-28T09:01:14.283324Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152474273, txId: 281474976710679] shutting down 2025-03-28T09:01:14.690228Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152474623, txId: 281474976710681] shutting down 2025-03-28T09:01:14.836584Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152474623, txId: 281474976710682] shutting down 2025-03-28T09:01:15.222444Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152475155, txId: 281474976710685] shutting down 2025-03-28T09:01:15.235431Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152475155, txId: 281474976710686] shutting down 2025-03-28T09:01:15.269923Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152475155, txId: 281474976710687] shutting down 2025-03-28T09:01:15.481348Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152475470, txId: 281474976710691] shutting down 2025-03-28T09:01:15.583442Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152475470, txId: 281474976710692] shutting down 2025-03-28T09:01:15.674552Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152475638, txId: 281474976710695] shutting down 2025-03-28T09:01:16.489790Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152475967, txId: 281474976710697] shutting down 2025-03-28T09:01:16.585009Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152475967, txId: 281474976710698] shutting down 2025-03-28T09:01:16.692528Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152476646, txId: 281474976710701] shutting down 2025-03-28T09:01:16.759062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:16.759090Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:17.482909Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152477073, txId: 281474976710703] shutting down 2025-03-28T09:01:17.512441Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152477073, txId: 281474976710704] shutting down 2025-03-28T09:01:17.664562Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152477339, txId: 281474976710707] shutting down 2025-03-28T09:01:17.991427Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152477794, txId: 281474976710709] shutting down 2025-03-28T09:01:18.009920Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152477794, txId: 281474976710711] shutting down 2025-03-28T09:01:18.014604Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152477794, txId: 281474976710710] shutting down 2025-03-28T09:01:18.262785Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152478172, txId: 281474976710715] shutting down 2025-03-28T09:01:18.265118Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152478172, txId: 281474976710716] shutting down 2025-03-28T09:01:18.990628Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152478641, txId: 281474976710719] shutting down 2025-03-28T09:01:19.016135Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152478641, txId: 281474976710720] shutting down 2025-03-28T09:01:19.044070Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152478893, txId: 281474976710723] shutting down 2025-03-28T09:01:19.739382Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152479264, txId: 281474976710726] shutting down 2025-03-28T09:01:19.740139Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152479264, txId: 281474976710725] shutting down 2025-03-28T09:01:20.524933Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152480111, txId: 281474976710729] shutting down 2025-03-28T09:01:20.556550Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152480111, txId: 281474976710730] shutting down 2025-03-28T09:01:20.643993Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152480615, txId: 281474976710733] shutting down 2025-03-28T09:01:20.904564Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152480846, txId: 281474976710735] shutting down 2025-03-28T09:01:21.422965Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152481140, txId: 281474976710737] shutting down 2025-03-28T09:01:21.509181Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152481476, txId: 281474976710739] shutting down 2025-03-28T09:01:21.521730Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152481476, txId: 281474976710740] shutting down 2025-03-28T09:01:22.240446Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152481966, txId: 281474976710743] shutting down 2025-03-28T09:01:22.363161Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152482267, txId: 281474976710745] shutting down 2025-03-28T09:01:22.612431Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=N2M5YzdjZC0xZjVjZjU0My05ZDcwM2NjMi02YTY5ZmM1Nw==, ActorId: [2:7486782906020529573:3317], ActorState: ExecuteState, TraceId: 01jqdzr21w0x1akrrbb3k2e7k4, Create QueryResponse for error on request, msg: 2025-03-28T09:01:22.861150Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152482617, txId: 281474976710748] shutting down 2025-03-28T09:01:22.861880Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152482624, txId: 281474976710747] shutting down 2025-03-28T09:01:23.719377Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152483373, txId: 281474976710751] shutting down 2025-03-28T09:01:25.557998Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152483786, txId: 281474976710754] shutting down 2025-03-28T09:01:25.571702Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152483786, txId: 281474976710753] shutting down 2025-03-28T09:01:25.604549Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152484213, txId: 281474976710757] shutting down 2025-03-28T09:01:25.648279Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152484213, txId: 281474976710758] shutting down 2025-03-28T09:01:25.757319Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152484892, txId: 281474976710761] shutting down 2025-03-28T09:01:25.843439Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152485487, txId: 281474976710763] shutting down 2025-03-28T09:01:25.855246Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152485487, txId: 281474976710764] shutting down 2025-03-28T09:01:25.911045Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152485921, txId: 281474976710768] shutting down 2025-03-28T09:01:25.917778Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152485921, txId: 281474976710767] shutting down 2025-03-28T09:01:26.137386Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152486124, txId: 281474976710771] shutting down 2025-03-28T09:01:26.365808Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152486383, txId: 281474976710773] shutting down 2025-03-28T09:01:26.718096Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152486656, txId: 281474976710775] shutting down 2025-03-28T09:01:26.950483Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152486957, txId: 281474976710777] shutting down 2025-03-28T09:01:27.189380Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152487209, txId: 281474976710779] shutting down 2025-03-28T09:01:27.479390Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152487475, txId: 281474976710781] shutting down 2025-03-28T09:01:28.050937Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152488042, txId: 281474976710783] shutting down 2025-03-28T09:01:28.283999Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152488287, txId: 281474976710785] shutting down 2025-03-28T09:01:28.890340Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152488896, txId: 281474976710787] shutting down 2025-03-28T09:01:29.123202Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152489141, txId: 281474976710789] shutting down 2025-03-28T09:01:29.892186Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152489393, txId: 281474976710791] shutting down 2025-03-28T09:01:29.973531Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152489939, txId: 281474976710793] shutting down 2025-03-28T09:01:30.187136Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152490184, txId: 281474976710795] shutting down 2025-03-28T09:01:30.427344Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152490429, txId: 281474976710797] shutting down 2025-03-28T09:01:30.714142Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152490730, txId: 281474976710799] shutting down 2025-03-28T09:01:31.064208Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152491080, txId: 281474976710801] shutting down 2025-03-28T09:01:31.349827Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152491353, txId: 281474976710803] shutting down >> KqpStats::SysViewCancelled [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> TPQTest::TestCheckACL [GOOD] >> TPQTest::TestLowWatermark >> KqpStats::SysViewClientLost [FAIL] >> KqpTypes::DyNumberCompare >> TColumnShardTestSchema::OneTier [GOOD] >> TKeyValueTest::TestRenameWorksNewApi [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewCancelled [GOOD] Test command err: Trying to start YDB, gRPC: 2898, MsgBus: 8600 2025-03-28T08:59:07.221906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782324577710764:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.221961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c0/r3tmp/tmpF2lrJd/pdisk_1.dat 2025-03-28T08:59:10.239567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.375021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.108805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.108929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.119786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:12.140225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2898, node 1 2025-03-28T08:59:12.210917Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782324577710764:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.210967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:12.249098Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.249127Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.249137Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.249245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8600 TClient is connected to server localhost:8600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:18.219384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:19.378862Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:19.446802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:20.433691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.816590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.873922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.017989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782397592156586:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.018408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.510954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.125590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.299054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.419028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.537095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:25.723801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:26.102248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782406182091752:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.102322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.106614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782406182091757:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:26.156172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:26.201152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782406182091759:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:26.283408Z node 1 :TX_PROXY ERROR: Actor# [1:7486782406182091818:3498] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:27.117465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.117484Z node 1 :IMPORT WARN: Table profiles were not loaded {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"E-Size":"No estimate","PlanNodeId":5,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/TwoShard","Columns":["Key","Value1","Value2"],"E-Rows":"No estimate","Table":"TwoShard","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":13,"Max":13,"Min":13},"FirstMessageMs":{"Count":1,"Sum":13,"Max":13,"Min":13},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5,"History":[19,5]}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":13,"Max":13,"Min":13},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"ActiveMessageMs":{"Count":1,"Max":13,"Min":10},"ActiveTimeUs":{"Count":1,"Sum":3000,"Max":3000,"Min":3000},"WaitTimeUs":{"Count":1,"Sum":9329,"Max":9329,"Min":9329,"History":[19,9329]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"DurationUs":{"Count":1,"Sum":6000,"Max":6000,"Min":6000},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[19,1048576]},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":6000,"Table":[{"Path":"\/Root\/KeyValue","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":16,"Max":16,"Min":16}}],"BaseTimeMs":1743152372085,"OutputBytes":{"Count":1,"Sum":5,"Max":5,"Min":5},"CpuTimeUs":{"Count":1,"Sum":3667,"Max":3667,"Min":3667,"History":[18,3667]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32,"History":[19,32]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":32,"Max":32,"Min":32,"History":[19,32]},"WaitTimeUs":{"Count":1,"Sum":9339,"Max":9339,"Min":9339,"History":[19,9339]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"HashShuffle","KeyColumns":["Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"PartitionByKey","Input":"NarrowMap"}],"Node Type":"Aggregate","Stats":{"UseLlvm":"undefined","Output":[{"Pop" ... 1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5,"History":[17,5]}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":5,"Max":5,"Min":5,"History":[17,5]},"WaitTimeUs":{"Count":1,"Sum":9503,"Max":9503,"Min":9503,"History":[17,9503]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":704312,"CpuTimeUs":700242},"ProcessCpuTimeUs":767,"TotalDurationUs":790786,"ResourcePoolId":"default","QueuedTimeUs":575},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["Key","Value1","Value2"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"A-Rows":2,"A-SelfCpu":9.139,"A-Cpu":9.139,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":2,"A-SelfCpu":6.495,"A-Cpu":15.634,"A-Size":18,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 12505, MsgBus: 16457 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014c0/r3tmp/tmpaj74P4/pdisk_1.dat 2025-03-28T08:59:53.826948Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:54.261325Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:54.276758Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:54.276838Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:54.288044Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12505, node 3 2025-03-28T08:59:54.754718Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:54.754737Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:54.754745Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:54.754868Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16457 TClient is connected to server localhost:16457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:58.903038Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:59.117946Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:00.151751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:01.553382Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:02.191304Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:05.775334Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782575481502232:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:05.775639Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:05.791845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:00:05.922389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:00:06.028634Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:06.154158Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:06.328672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:06.454470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:06.824508Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782579776470071:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:06.824570Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:06.824728Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782579776470076:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:06.828155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:06.843015Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:00:06.844154Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782579776470078:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:00:06.914904Z node 3 :TX_PROXY ERROR: Actor# [3:7486782579776470131:3489] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:09.047168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:09.047190Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:09.624073Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:30.869104Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152490855, txId: 281474976715672] shutting down 2025-03-28T09:01:31.716519Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486782944848692485:2827] TxId: 281474976715674. Ctx: { TraceId: 01jqdzrb2n30gs1wbj2brverf3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzBhZTA4ZGYtZWM0M2ZlMDUtOTI1YTg1MTQtNjc0ODgzOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 110ms during execution } ] 2025-03-28T09:01:31.717257Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486782944848692513:2855], TxId: 281474976715674, task: 7. Ctx: { SessionId : ydb://session/3?node_id=3&id=NzBhZTA4ZGYtZWM0M2ZlMDUtOTI1YTg1MTQtNjc0ODgzOWQ=. CustomerSuppliedId : . TraceId : 01jqdzrb2n30gs1wbj2brverf3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7486782944848692485:2827], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:01:31.717640Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486782944848692515:2857], TxId: 281474976715674, task: 9. Ctx: { TraceId : 01jqdzrb2n30gs1wbj2brverf3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NzBhZTA4ZGYtZWM0M2ZlMDUtOTI1YTg1MTQtNjc0ODgzOWQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7486782944848692485:2827], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:01:31.719332Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzBhZTA4ZGYtZWM0M2ZlMDUtOTI1YTg1MTQtNjc0ODgzOWQ=, ActorId: [3:7486782940553725127:2827], ActorState: ExecuteState, TraceId: 01jqdzrb2n30gs1wbj2brverf3, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 110ms during execution 2025-03-28T09:01:32.179930Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152492131, txId: 281474976715676] shutting down >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: 2025-03-28T09:01:21.304714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:21.305177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:21.305232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001df3/r3tmp/tmpQBuwvH/pdisk_1.dat 2025-03-28T09:01:23.662557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:23.938245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:24.072784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:24.080461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:24.103339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:24.433975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:25.796215Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2025-03-28T09:01:25.796415Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-03-28T09:01:25.893428Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.096525s, errors=0 2025-03-28T09:01:25.893527Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-03-28T09:01:37.671731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:37.672014Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:37.672060Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001df3/r3tmp/tmp8IrwsV/pdisk_1.dat 2025-03-28T09:01:39.002724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:39.101066Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:39.209774Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:39.219246Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:39.253718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:39.467356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:41.027826Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2025-03-28T09:01:41.028829Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-03-28T09:01:41.175424Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.142858s, errors=0 2025-03-28T09:01:41.175509Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:84:2114] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:88:2057] recipient: [8:84:2114] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:87:2115] Leader for TabletID 72057594037927937 is [8:87:2115] sender: [8:107:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:108:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:145:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:88:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:209 ... recipient: [15:78:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:81:2111] Leader for TabletID 72057594037927937 is [15:81:2111] sender: [15:135:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:77:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:81:2057] recipient: [17:79:2110] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:83:2057] recipient: [17:79:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:82:2111] Leader for TabletID 72057594037927937 is [17:82:2111] sender: [17:136:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:83:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:82:2113] Leader for TabletID 72057594037927937 is [18:85:2114] sender: [18:86:2057] recipient: [18:82:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:85:2114] Leader for TabletID 72057594037927937 is [18:85:2114] sender: [18:139:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:81:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:85:2057] recipient: [20:84:2113] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:87:2057] recipient: [20:84:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:86:2114] Leader for TabletID 72057594037927937 is [20:86:2114] sender: [20:140:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:82:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:86:2057] recipient: [21:84:2114] Leader for TabletID 72057594037927937 is [21:87:2115] sender: [21:88:2057] recipient: [21:84:2114] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:87:2115] Leader for TabletID 72057594037927937 is [21:87:2115] sender: [21:107:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:108:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:89:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:90:2057] recipient: [23:88:2118] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:92:2057] recipient: [23:88:2118] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:91:2119] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:145:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:86:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:89:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:90:2057] recipient: [24:88:2118] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:92:2057] recipient: [24:88:2118] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:91:2119] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:145:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:90:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:91:2057] recipient: [25:89:2118] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:93:2057] recipient: [25:89:2118] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:92:2119] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: 2025-03-28T09:01:21.347166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:21.347635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:21.347687Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dce/r3tmp/tmpWwDZ8z/pdisk_1.dat 2025-03-28T09:01:24.598758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:25.057911Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:25.211723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:25.213134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:25.236376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:25.448243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:25.872087Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2025-03-28T09:01:25.873952Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-03-28T09:01:25.964402Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor finished in 0.090123s, errors=0 2025-03-28T09:01:25.964968Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-03-28T09:01:25.965094Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [1:747:2629] with id# {Tag: 0, parent: [1:738:2620], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-03-28T09:01:25.966178Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-03-28T09:01:25.966306Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:750:2632] 2025-03-28T09:01:25.966394Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Bootstrap called, sample# 0 2025-03-28T09:01:25.966468Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-03-28T09:01:25.968233Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-03-28T09:01:25.975142Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} finished in 0.006832s, read# 1000 2025-03-28T09:01:25.975744Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:750:2632] with chunkSize# 0 finished: 0 { DurationMs: 6 OperationsOK: 1000 OperationsError: 0 } 2025-03-28T09:01:25.975905Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:753:2635] 2025-03-28T09:01:25.975959Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Bootstrap called, sample# 0 2025-03-28T09:01:25.975997Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Connect to# 72075186224037888 called 2025-03-28T09:01:25.976247Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-03-28T09:01:26.443032Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 2} finished in 0.466736s, read# 1000 2025-03-28T09:01:26.443186Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:753:2635] with chunkSize# 1 finished: 0 { DurationMs: 466 OperationsOK: 1000 OperationsError: 0 } 2025-03-28T09:01:26.443298Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:756:2638] 2025-03-28T09:01:26.443344Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Bootstrap called, sample# 0 2025-03-28T09:01:26.443376Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Connect to# 72075186224037888 called 2025-03-28T09:01:26.443642Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-03-28T09:01:26.575256Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 3} finished in 0.131555s, read# 1000 2025-03-28T09:01:26.575413Z node 1 :DS_LOAD_TEST NOTICE: fullscan actor# [1:756:2638] with chunkSize# 10 finished: 0 { DurationMs: 131 OperationsOK: 1000 OperationsError: 0 } 2025-03-28T09:01:26.575561Z node 1 :DS_LOAD_TEST INFO: started fullscan actor# [1:759:2641] 2025-03-28T09:01:26.575615Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Bootstrap called, sample# 1000 2025-03-28T09:01:26.575651Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Connect to# 72075186224037888 called 2025-03-28T09:01:26.575903Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-03-28T09:01:26.587379Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 4} finished in 0.002429s, sampled# 1000, iter finished# 1, oks# 1000 2025-03-28T09:01:26.587615Z node 1 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} received keyCount# 1000 2025-03-28T09:01:26.587780Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} started read actor with id# [1:762:2644] 2025-03-28T09:01:26.587851Z node 1 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [1:747:2629], subTag: 5} Bootstrap called, will read keys# 1000 2025-03-28T09:01:27.159620Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} received point times# 1000, Inflight left# 0 2025-03-28T09:01:27.159827Z node 1 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 572 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 7\n99.9%: 28\n" } 2025-03-28T09:01:27.160000Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [1:738:2620], subTag: 3} finished in 1.194739s with report: { DurationMs: 6 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 466 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 131 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 572 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 7\n99.9%: 28\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-03-28T09:01:27.160437Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:747:2629] with tag# 3 2025-03-28T09:01:37.248349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:37.248636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:37.248686Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dce/r3tmp/tmpMDy3Ia/pdisk_1.dat 2025-03-28T09:01:37.778292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:37.864679Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:37.948234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:37.948355Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:37.961696Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:38.205594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:39.556836Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2025-03-28T09:01:39.570718Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-03-28T09:01:39.646975Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 1} TUpsertActor finished in 0.057279s, errors=0 2025-03-28T09:01:39.733049Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2025-03-28T09:01:39.733767Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# [2:747:2629] with id# {Tag: 0, parent: [2:738:2620], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2025-03-28T09:01:39.757395Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-03-28T09:01:39.758623Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:750:2632] 2025-03-28T09:01:39.761979Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Bootstrap called, sample# 0 2025-03-28T09:01:39.762321Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-03-28T09:01:39.764608Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-03-28T09:01:39.786540Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 1} finished in 0.021254s, read# 10 2025-03-28T09:01:39.788014Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:750:2632] with chunkSize# 0 finished: 0 { DurationMs: 21 OperationsOK: 10 OperationsError: 0 } 2025-03-28T09:01:39.789326Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:753:2635] 2025-03-28T09:01:39.789937Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Bootstrap called, sample# 0 2025-03-28T09:01:39.790299Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Connect to# 72075186224037888 called 2025-03-28T09:01:39.791203Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} Handle TEvClientConnected called, Status# OK 2025-03-28T09:01:39.865127Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 2} finished in 0.073877s, read# 10 2025-03-28T09:01:39.865260Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:753:2635] with chunkSize# 1 finished: 0 { DurationMs: 73 OperationsOK: 10 OperationsError: 0 } 2025-03-28T09:01:39.865349Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:756:2638] 2025-03-28T09:01:39.865382Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Bootstrap called, sample# 0 2025-03-28T09:01:39.865407Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Connect to# 72075186224037888 called 2025-03-28T09:01:39.867832Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} Handle TEvClientConnected called, Status# OK 2025-03-28T09:01:39.892935Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 3} finished in 0.025060s, read# 10 2025-03-28T09:01:39.893372Z node 2 :DS_LOAD_TEST NOTICE: fullscan actor# [2:756:2638] with chunkSize# 10 finished: 0 { DurationMs: 25 OperationsOK: 10 OperationsError: 0 } 2025-03-28T09:01:39.894130Z node 2 :DS_LOAD_TEST INFO: started fullscan actor# [2:759:2641] 2025-03-28T09:01:39.894166Z node 2 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Bootstrap called, sample# 10 2025-03-28T09:01:39.894585Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Connect to# 72075186224037888 called 2025-03-28T09:01:39.895107Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} Handle TEvClientConnected called, Status# OK 2025-03-28T09:01:39.916967Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [2:747:2629], subTag: 4} finished in 0.021498s, sampled# 10, iter finished# 1, oks# 10 2025-03-28T09:01:39.924362Z node 2 :DS_LOAD_TEST INFO: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} received keyCount# 10 2025-03-28T09:01:39.925787Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} started read actor with id# [2:762:2644] 2025-03-28T09:01:39.926212Z node 2 :DS_LOAD_TEST NOTICE: TReadIteratorPoints# {Tag: 0, parent: [2:747:2629], subTag: 5} Bootstrap called, will read keys# 10 2025-03-28T09:01:42.170922Z node 2 :DS_LOAD_TEST DEBUG: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} received point times# 1000, Inflight left# 0 2025-03-28T09:01:42.171133Z node 2 :DS_LOAD_TEST INFO: headread with inflight# 1 finished: 0 { DurationMs: 2245 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 9\n99%: 18\n99.9%: 44\n" } 2025-03-28T09:01:42.171307Z node 2 :DS_LOAD_TEST NOTICE: ReadIteratorLoadScenario# {Tag: 0, parent: [2:738:2620], subTag: 3} finished in 2.436662s with report: { DurationMs: 21 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 73 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 25 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 2245 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 9\n99%: 18\n99.9%: 44\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2025-03-28T09:01:42.171410Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:747:2629] with tag# 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: 2025-03-28T09:01:23.837594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:23.838306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:23.838713Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001de2/r3tmp/tmp18jgeG/pdisk_1.dat 2025-03-28T09:01:25.943462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:26.007036Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:26.049473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:26.049670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:26.067013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:26.167250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:26.661470Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-03-28T09:01:26.661633Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-03-28T09:01:26.908501Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.246436s, errors=0 2025-03-28T09:01:26.908610Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-03-28T09:01:37.499322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:37.500171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:37.500344Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001de2/r3tmp/tmpeCIhrs/pdisk_1.dat 2025-03-28T09:01:39.540453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:39.714640Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:39.989551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:39.990540Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:40.020743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:40.414173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:41.970332Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2025-03-28T09:01:41.970458Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-03-28T09:01:42.097611Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.126841s, errors=0 2025-03-28T09:01:42.097950Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 >> DataShardTxOrder::RandomPoints_DelayRS >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTier [GOOD] Test command err: 2025-03-28T09:00:30.042678Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:00:30.157223Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:00:30.162335Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:00:30.162790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:00:30.188513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:00:30.188827Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:00:30.197716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:00:30.197942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:00:30.198159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:00:30.198269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:00:30.198373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:00:30.198523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:00:30.198630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:00:30.198813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:00:30.198967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:00:30.199087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:00:30.199215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:00:30.199337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:00:30.226031Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:00:30.230710Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:00:30.231027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:00:30.231078Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:00:30.231263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:00:30.231427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:00:30.231505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:00:30.231564Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:00:30.231665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:00:30.231728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:00:30.231778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:00:30.231814Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:00:30.231969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:00:30.232032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:00:30.232080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:00:30.232117Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:00:30.232235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:00:30.232306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:00:30.232351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:00:30.232379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:00:30.232449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:00:30.232483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:00:30.232509Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:00:30.232569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:00:30.232617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:00:30.232646Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:00:30.233048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-03-28T09:00:30.233126Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-03-28T09:00:30.233222Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-03-28T09:00:30.233317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-03-28T09:00:30.233491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:00:30.233558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:00:30.233594Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:00:30.233839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:00:30.233884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:00:30.233924Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:00:30.234074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:00:30.234115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:00:30.234148Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:00:30.234336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:00:30.234387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:00:30.234430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T0 ... n_ids=1;column_names=timestamp;);;;); 2025-03-28T09:01:40.413147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:01:40.413173Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:01:40.413204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:01:40.413284Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:01:40.413361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:01:40.413385Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:01:40.413439Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-03-28T09:01:40.413468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-03-28T09:01:40.413576Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:673:2689];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-28T09:01:40.413641Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:01:40.413712Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:01:40.413804Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:01:40.413893Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:01:40.413954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:01:40.414017Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:01:40.414045Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:680:2696] finished for tablet 9437184 2025-03-28T09:01:40.415366Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:673:2689];stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":1.293},{"events":["l_bootstrap"],"t":1.618},{"events":["f_processing","f_task_result"],"t":1.71},{"events":["l_task_result"],"t":23.353},{"events":["f_ack"],"t":23.485},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":27.514}],"full":{"a":1743152472899525,"name":"_full_task","f":1743152472899525,"d_finished":0,"c":0,"l":1743152500414096,"d":27514571},"events":[{"name":"bootstrap","f":1743152472900835,"d_finished":1617574,"c":1,"l":1743152474518409,"d":1617574},{"a":1743152500413874,"name":"ack","f":1743152496385051,"d_finished":3923365,"c":904,"l":1743152500413829,"d":3923587},{"a":1743152500413866,"name":"processing","f":1743152474609665,"d_finished":13832910,"c":4520,"l":1743152500413831,"d":13833140},{"name":"ProduceResults","f":1743152474192868,"d_finished":6645996,"c":5426,"l":1743152500414032,"d":6645996},{"a":1743152500414035,"name":"Finish","f":1743152500414035,"d_finished":0,"c":0,"l":1743152500414096,"d":61},{"name":"task_result","f":1743152474609700,"d_finished":9720099,"c":3616,"l":1743152496252740,"d":9720099}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:01:40.415449Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:673:2689];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:01:40.415908Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:673:2689];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.001},{"events":["f_ProduceResults"],"t":1.293},{"events":["l_bootstrap"],"t":1.618},{"events":["f_processing","f_task_result"],"t":1.71},{"events":["l_task_result"],"t":23.353},{"events":["f_ack"],"t":23.485},{"events":["l_ProduceResults","f_Finish"],"t":27.514},{"events":["l_ack","l_processing","l_Finish"],"t":27.515}],"full":{"a":1743152472899525,"name":"_full_task","f":1743152472899525,"d_finished":0,"c":0,"l":1743152500415496,"d":27515971},"events":[{"name":"bootstrap","f":1743152472900835,"d_finished":1617574,"c":1,"l":1743152474518409,"d":1617574},{"a":1743152500413874,"name":"ack","f":1743152496385051,"d_finished":3923365,"c":904,"l":1743152500413829,"d":3924987},{"a":1743152500413866,"name":"processing","f":1743152474609665,"d_finished":13832910,"c":4520,"l":1743152500413831,"d":13834540},{"name":"ProduceResults","f":1743152474192868,"d_finished":6645996,"c":5426,"l":1743152500414032,"d":6645996},{"a":1743152500414035,"name":"Finish","f":1743152500414035,"d_finished":0,"c":0,"l":1743152500415496,"d":1461},{"name":"task_result","f":1743152474609700,"d_finished":9720099,"c":3616,"l":1743152496252740,"d":9720099}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:01:40.415974Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:01:12.641346Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-03-28T09:01:40.416012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:01:40.416201Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2025-03-28T09:01:09.978246Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:01:09.978359Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:01:10.026486Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:01:10.026765Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:01:10.027185Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:176:2191] 2025-03-28T09:01:10.028135Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:01:10.028350Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-28T09:01:10.028548Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:01:10.028778Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:01:10.029195Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-03-28T09:01:10.029304Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:01:10.029347Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:01:10.029395Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:01:10.000000Z 2025-03-28T09:01:10.029445Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:01:10.029523Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:176:2191] 2025-03-28T09:01:10.029584Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-03-28T09:01:10.029664Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:01:10.393115Z node 1 :PERSQUEUE INFO: new Cookie src1|5b6b616f-74e6cef9-b6b7f2b5-27dbff6e_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-03-28T09:01:10.393272Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-03-28T09:01:10.393449Z node 1 :PERSQUEUE INFO: new Cookie src4|743afd93-c1ad7565-c4357d09-dfca283f_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src4 2025-03-28T09:01:10.393513Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create distr tx with id = 2 and act no: 3 Create distr tx with id = 4 and act no: 5 Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 11 and act no: 12 2025-03-28T09:01:11.593554Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-28T09:01:11.594198Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-03-28T09:01:11.594570Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-03-28T09:01:11.607730Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-03-28T09:01:13.298829Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:01:13.298944Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:01:13.298993Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:01:13.299042Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:01:13.299066Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 2 Wait batch completion 2025-03-28T09:01:13.299236Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-03-28T09:01:13.299289Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-03-28T09:01:13.299360Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:01:13.299440Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 Got batch complete: 1 Wait batch completion Wait batch completion 2025-03-28T09:01:13.589507Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-03-28T09:01:13.589583Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-03-28T09:01:13.589641Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:01:13.589691Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2025-03-28T09:01:13.589862Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 7 partNo 0 2025-03-28T09:01:13.638332Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 7 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 84 count 1 nextOffset 61 batches 1 2025-03-28T09:01:13.641609Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 8 partNo 0 2025-03-28T09:01:13.641679Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 136 count 2 nextOffset 62 batches 1 2025-03-28T09:01:13.641715Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 9 partNo 0 2025-03-28T09:01:13.641742Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 188 count 3 nextOffset 63 batches 1 2025-03-28T09:01:13.641768Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 10 partNo 0 2025-03-28T09:01:13.641816Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 240 count 4 nextOffset 64 batches 1 2025-03-28T09:01:13.641851Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 11 partNo 0 2025-03-28T09:01:13.641885Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 292 count 5 nextOffset 65 batches 1 2025-03-28T09:01:13.641917Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 12 partNo 0 2025-03-28T09:01:13.641941Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 12 partNo 0 FormedBlobsCount 0 NewHead: Offset 60 PartNo 0 PackedSize 344 count 6 nextOffset 66 batches 1 2025-03-28T09:01:13.641968Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 1 partNo 0 2025-03-28T09:01:13.651758Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src4' seqNo 1 partNo 0 result is x0000000000_00000000000000000060_00000_0000000006_00000 size 211 2025-03-28T09:01:13.651871Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000060_00000_0000000006_00000 new key d0000000000_00000000000000000060_00000_0000000006_00000 size 211 WTime 11139 2025-03-28T09:01:13.695011Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 1 partNo 0 FormedBlobsCount 1 NewHead: Offset 70 PartNo 0 PackedSize 84 count 1 nextOffset 71 batches 1 2025-03-28T09:01:13.695160Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 2 partNo 0 2025-03-28T09:01:13.695229Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 70 PartNo 0 PackedSize 136 count 2 nextOffset 72 batches 1 2025-03-28T09:01:13.695812Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 70,2 HeadOffset 1 endOffset 1 curOffset 72 d0000000000_00000000000000000070_00000_0000000002_00000| size 121 WTime 11139 2025-03-28T09:01:13.695994Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:13.696033Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition ... c' partition 0 user client-1 offset is set to 3 (startOffset 0) session session-client-1 2025-03-28T09:01:35.714571Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:35.715207Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:35.715493Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:01:35.716327Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:35.716975Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:01:35.716997Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-03-28T09:01:35.717016Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-03-28T09:01:35.717041Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:35.717594Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait batch completion Wait kv request 2025-03-28T09:01:36.028709Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:01:36.029851Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'client-1' Bad request (gap) Offset 3 Begin 0 Got batch complete: 1 Wait batch completion Wait kv request 2025-03-28T09:01:36.034075Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:36.034519Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:36.035146Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:01:36.035500Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:36.035823Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:01:36.036203Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:36.037225Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 8 and act no: 9 2025-03-28T09:01:36.052319Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-03-28T09:01:36.087586Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:01:37.384106Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 3 Wait kv request 2025-03-28T09:01:37.384358Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-28T09:01:37.384409Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-28T09:01:37.384487Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-03-28T09:01:37.384682Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:37.384730Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:37.384795Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:01:37.384841Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:37.384890Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:01:37.384915Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-03-28T09:01:37.384948Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-03-28T09:01:37.384984Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:37.385023Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait immediate tx complete 10 2025-03-28T09:01:37.411386Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 10 Wait immediate tx complete 11 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 11 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2025-03-28T09:01:42.577081Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:01:42.577154Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:01:42.590940Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:01:42.591228Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:01:42.591513Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-03-28T09:01:42.592431Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:01:42.592596Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-28T09:01:42.592741Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:01:42.593432Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:01:42.593864Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-28T09:01:42.593985Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:01:42.594028Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:01:42.594077Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:01:42.000000Z 2025-03-28T09:01:42.594120Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:01:42.594165Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-03-28T09:01:42.594226Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-28T09:01:42.594274Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:01:42.594343Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:01:42.594389Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:01:42.594442Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:01:42.594741Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-03-28T09:01:42.594902Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-03-28T09:01:42.594949Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Created Tx with id 0 as act# 0 Created Tx with id 1 as act# 1 2025-03-28T09:01:44.084699Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-28T09:01:44.085105Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 1 Got batch complete: 1 Wait batch completion Got batch complete: 1 Wait batch completion Wait kv request 2025-03-28T09:01:44.345751Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 1 2025-03-28T09:01:44.346109Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 1 2025-03-28T09:01:44.346931Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:44.347222Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:44.347511Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:01:44.347785Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:44.347819Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:01:44.347844Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-28T09:01:44.347869Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-28T09:01:44.348026Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:44.348290Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 1 2025-03-28T09:01:44.375210Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait for no tx committed >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows >> DataShardOutOfOrder::TestReadTableWriteConflict >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |75.0%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |75.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:82:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:83:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:83:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:87:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:87:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... bletID 72057594037927937 is [13:56:2097] sender: [13:87:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:89:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:89:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:146:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:79:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:78:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:78:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:79:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:80:2110] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:80:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:104:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:85:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:86:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:86:2115] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:86:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:86:2115] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] >> YdbIndexTable::MultiShardTableOneUniqIndex >> YdbIndexTable::OnlineBuild ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:86:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:86:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:85:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:86:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:86:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:76:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:79:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:78:2110] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:82:2057] recipient: [13:78:2110] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:81:2111] Leader for TabletID 72057594037927937 is [13:81:2111] sender: [13:135:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:76:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:78:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:79:2110] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:82:2057] recipient: [14:79:2110] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:81:2111] Leader for TabletID 72057594037927937 is [14:81:2111] sender: [14:135:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:77:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:79:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:80:2110] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:83:2057] recipient: [15:80:2110] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:82:2111] Leader for TabletID 72057594037927937 is [15:82:2111] sender: [15:136:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:83:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:82:2113] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:86:2057] recipient: [16:82:2113] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:85:2114] Leader for TabletID 72057594037927937 is [16:85:2114] sender: [16:139:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:83:2057] recipient: [17:82:2113] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:86:2057] recipient: [17:82:2113] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:85:2114] Leader for TabletID 72057594037927937 is [17:85:2114] sender: [17:139:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:84:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:83:2113] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:87:2057] recipient: [18:83:2113] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:86:2114] Leader for TabletID 72057594037927937 is [18:86:2114] sender: [18:104:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:86:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:87:2057] recipient: [19:85:2115] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:89:2057] recipient: [19:85:2115] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:88:2116] Leader for TabletID 72057594037927937 is [19:88:2116] sender: [19:142:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:85:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:87:2057] recipient: [20:86:2115] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:89:2057] recipient: [20:86:2115] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:88:2116] Leader for TabletID 72057594037927937 is [20:88:2116] sender: [20:142:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:87:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:88:2057] recipient: [21:86:2115] Leader for TabletID 72057594037927937 is [21:89:2116] sender: [21:90:2057] recipient: [21:86:2115] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:89:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> DataShardTxOrder::RandomDotRanges_DelayRS >> KqpWrite::Insert [GOOD] >> KqpWrite::InsertRevert >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey-UseSink >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink >> TColumnShardTestSchema::ExternalTTL [GOOD] >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect >> TFlatTest::SelectRangeBytesLimit >> KqpLimits::ComputeNodeMemoryLimit [FAIL] >> KqpLimits::DataShardReplySizeExceeded >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> KqpTypes::DyNumberCompare [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::ReplaceImmediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL [GOOD] Test command err: 2025-03-28T09:00:30.160019Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:00:30.389133Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:00:30.398057Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:00:30.398483Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:00:30.429909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:00:30.430309Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:00:30.439659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:00:30.439919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:00:30.440171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:00:30.440298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:00:30.440407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:00:30.440535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:00:30.440671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:00:30.440777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:00:30.440933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:00:30.441058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:00:30.441185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:00:30.441303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:00:30.468624Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:00:30.473524Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:00:30.473902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:00:30.473975Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:00:30.474184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:00:30.474359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:00:30.474469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:00:30.474526Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:00:30.474620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:00:30.474680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:00:30.474748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:00:30.474783Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:00:30.474965Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:00:30.475049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:00:30.475103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:00:30.475136Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:00:30.475239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:00:30.475312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:00:30.475363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:00:30.475394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:00:30.475466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:00:30.475512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:00:30.475557Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:00:30.475617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:00:30.475654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:00:30.475684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:00:30.476155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=96; 2025-03-28T09:00:30.476237Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-03-28T09:00:30.476313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-03-28T09:00:30.476393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:00:30.476571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:00:30.476639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:00:30.476672Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:00:30.476877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:00:30.476920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:00:30.476962Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:00:30.477152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:00:30.477198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:00:30.477242Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:00:30.477437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:00:30.477492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:00:30.477522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T0 ... column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:01:57.802891Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:01:57.802916Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:01:57.802940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:01:57.803011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:01:57.803771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:01:57.803798Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:01:57.804163Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-03-28T09:01:57.805298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-03-28T09:01:57.806051Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:684:2700];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-28T09:01:57.806116Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:01:57.806180Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:01:57.808202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:01:57.809018Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:01:57.809351Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:01:57.809413Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:01:57.810001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:691:2707] finished for tablet 9437184 2025-03-28T09:01:57.822168Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:684:2700];stats={"p":[{"events":["f_bootstrap"],"t":0.07},{"events":["f_ProduceResults"],"t":0.595},{"events":["l_bootstrap"],"t":0.904},{"events":["f_processing","f_task_result"],"t":0.94},{"events":["l_task_result"],"t":26.36},{"events":["f_ack"],"t":26.433},{"events":["l_ProduceResults","f_Finish"],"t":31.497},{"events":["l_ack","l_processing","l_Finish"],"t":31.498}],"full":{"a":1743152486311769,"name":"_full_task","f":1743152486311769,"d_finished":0,"c":0,"l":1743152517810335,"d":31498566},"events":[{"name":"bootstrap","f":1743152486382076,"d_finished":834086,"c":1,"l":1743152487216162,"d":834086},{"a":1743152517809003,"name":"ack","f":1743152512745104,"d_finished":4873269,"c":903,"l":1743152517808943,"d":4874601},{"a":1743152517808992,"name":"processing","f":1743152487252267,"d_finished":16203630,"c":4515,"l":1743152517808945,"d":16204973},{"name":"ProduceResults","f":1743152486907474,"d_finished":8097013,"c":5420,"l":1743152517809697,"d":8097013},{"a":1743152517809699,"name":"Finish","f":1743152517809699,"d_finished":0,"c":0,"l":1743152517810335,"d":636},{"name":"task_result","f":1743152487252295,"d_finished":11045279,"c":3612,"l":1743152512671958,"d":11045279}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:01:57.823209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:684:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:01:57.832737Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:684:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.07},{"events":["f_ProduceResults"],"t":0.595},{"events":["l_bootstrap"],"t":0.904},{"events":["f_processing","f_task_result"],"t":0.94},{"events":["l_task_result"],"t":26.36},{"events":["f_ack"],"t":26.433},{"events":["l_ProduceResults","f_Finish"],"t":31.497},{"events":["l_ack","l_processing","l_Finish"],"t":31.512}],"full":{"a":1743152486311769,"name":"_full_task","f":1743152486311769,"d_finished":0,"c":0,"l":1743152517823907,"d":31512138},"events":[{"name":"bootstrap","f":1743152486382076,"d_finished":834086,"c":1,"l":1743152487216162,"d":834086},{"a":1743152517809003,"name":"ack","f":1743152512745104,"d_finished":4873269,"c":903,"l":1743152517808943,"d":4888173},{"a":1743152517808992,"name":"processing","f":1743152487252267,"d_finished":16203630,"c":4515,"l":1743152517808945,"d":16218545},{"name":"ProduceResults","f":1743152486907474,"d_finished":8097013,"c":5420,"l":1743152517809697,"d":8097013},{"a":1743152517809699,"name":"Finish","f":1743152517809699,"d_finished":0,"c":0,"l":1743152517823907,"d":14208},{"name":"task_result","f":1743152487252295,"d_finished":11045279,"c":3612,"l":1743152512671958,"d":11045279}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:01:57.833871Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:01:26.249886Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-03-28T09:01:57.834240Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:01:57.856390Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::DeleteImmediate >> KqpWrite::UpsertNullKey [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 >> TFlatTest::PathSorting >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> KqpWrite::ProjectReplace+UseSink [GOOD] >> KqpWrite::ProjectReplace-UseSink >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] |75.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::UpsertNullKey [GOOD] Test command err: Trying to start YDB, gRPC: 10907, MsgBus: 21221 2025-03-28T09:01:37.483043Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782969257224262:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:37.483135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013c1/r3tmp/tmpLO5GIn/pdisk_1.dat 2025-03-28T09:01:42.126884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:43.112458Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782969257224262:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:43.112676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:01:43.183695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:43.210914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:43.211017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:43.214962Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:43.216696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10907, node 1 2025-03-28T09:01:44.203109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:44.203130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:44.203135Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:44.203237Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21221 TClient is connected to server localhost:21221 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:47.874420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:47.966625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:49.085078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:49.676616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:51.082554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:58.211393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:58.211414Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:04.227602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783072336441021:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:04.230446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:04.636797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:05.167180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:05.292397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:05.399640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:05.505849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:05.645195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:06.199680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783093811278080:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:06.199759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:06.200061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783093811278085:2504], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:06.233758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:06.334197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783093811278087:2505], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:06.444011Z node 1 :TX_PROXY ERROR: Actor# [1:7486783093811278153:3569] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> KqpStreamLookup::ReadTableDuringSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2025-03-28T09:01:44.373703Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:01:44.386876Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:01:44.387438Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:01:44.387696Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:44.549743Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:01:45.126238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:01:45.126609Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:45.206815Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:45.227090Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:01:45.260989Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:01:45.261662Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:01:45.262577Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:01:45.268463Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:01:45.268891Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:01:45.272483Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:01:45.667233Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:01:45.925791Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:01:45.925982Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:01:45.926095Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:01:45.926130Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:01:45.926161Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:01:45.926211Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:01:45.926466Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:45.926524Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:45.926787Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:01:45.926864Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:01:45.927167Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:01:45.927245Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:01:45.927278Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:01:45.927308Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:01:45.927335Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:01:45.927369Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:01:45.927415Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:01:45.927528Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:45.927567Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:45.927613Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:01:45.930076Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:01:45.930136Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:01:45.930211Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:01:45.931231Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:01:45.932294Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:01:45.932679Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:01:45.933748Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:01:45.934466Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:01:45.934873Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:01:45.935252Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:01:45.939684Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:01:45.939723Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:01:45.940400Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:01:45.940427Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:01:45.941128Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:01:45.941476Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:01:45.941856Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:01:45.942512Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:01:45.943006Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:01:45.993172Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:01:45.993573Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:01:45.994316Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:01:45.994348Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:01:45.995418Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:01:46.003934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:46.004928Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:46.005278Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:01:46.012058Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:01:46.012702Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:01:46.013876Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:01:46.014623Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:01:46.022890Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:01:46.023362Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:01:46.028670Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:01:46.028724Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:01:46.028940Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:46.028972Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:46.029032Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:01:46.029068Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:01:46.029096Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:01:46.029131Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:01:46.029159Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:01:46.029199Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:01:46.029245Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:01:46.029279Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:01:46.029316Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:01:46.029446Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:01:46.029470Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:01:46.029486Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:01:46.029504Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:01:46.029522Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:01:46.029579Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:01:46.029600Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:01:46.029623Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:01:46.029649Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:01:46.029709Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:01:46.029735Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:01:46.029762Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:01:46.029795Z node 1 :TX_DATA ... lt to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:02:16.896877Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:16.896964Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:16.896986Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-03-28T09:02:16.897016Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:02:16.897038Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:16.897121Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:16.897142Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-03-28T09:02:16.897171Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:02:16.897195Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:16.897284Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:16.897328Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-03-28T09:02:16.897363Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:02:16.897399Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:16.897491Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:16.897513Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-03-28T09:02:16.897543Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:02:16.897567Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:16.897672Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:16.897701Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:02:16.897913Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 104 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 34} 2025-03-28T09:02:16.897950Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.897987Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 104 2025-03-28T09:02:16.898132Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 107 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 35} 2025-03-28T09:02:16.898162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.898187Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 107 2025-03-28T09:02:16.898274Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-03-28T09:02:16.898316Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.898347Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-03-28T09:02:16.898428Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-03-28T09:02:16.898454Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.898480Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-03-28T09:02:16.898574Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-03-28T09:02:16.898612Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.898637Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-03-28T09:02:16.903034Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-03-28T09:02:16.903105Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.903144Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-03-28T09:02:16.903303Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-03-28T09:02:16.903328Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.903350Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-03-28T09:02:16.903421Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-03-28T09:02:16.903444Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.903467Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-03-28T09:02:16.903529Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-28T09:02:16.903567Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.903595Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-28T09:02:16.903669Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-28T09:02:16.903700Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.903725Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-28T09:02:16.903809Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-28T09:02:16.903833Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.903859Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-28T09:02:16.903940Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-28T09:02:16.903965Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.904003Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-28T09:02:16.904067Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-28T09:02:16.904089Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.904111Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-28T09:02:16.904215Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-28T09:02:16.904250Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.904273Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-28T09:02:16.904352Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-28T09:02:16.904374Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.904397Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-28T09:02:16.904462Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-28T09:02:16.904489Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.904512Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-28T09:02:16.931758Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:16.931855Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-03-28T09:02:16.931936Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-28T09:02:16.932031Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:02:16.932088Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:16.932401Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:02:16.932440Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.932479Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] >> TCancelTx::CrossShardReadOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: 2025-03-28T09:01:54.558134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:54.558558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:54.558611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016bf/r3tmp/tmpbL0fWj/pdisk_1.dat 2025-03-28T09:01:59.949982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:00.004934Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:00.048278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:00.048389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:00.071802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:00.418379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:01.022963Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:02:01.050391Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:02:01.054819Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:02:01.055051Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:02.095168Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:02:02.109789Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:02.118711Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:02.225861Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:02.234968Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:02.235701Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:02.244995Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:02.245526Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:02.245906Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:02:02.263321Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:02.864495Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:02.865707Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:02.866912Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:02:02.867242Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:02.867648Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:02.868741Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:02.877914Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:02.878622Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:02.883253Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:02.883326Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:02.883678Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:02.883713Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:02.883742Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:02:02.883766Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:02.883791Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:02.883817Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:02.883851Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:02.883933Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:02.883970Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:02.884067Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:02:02.884124Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:02:02.884162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:02:02.884235Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:02.884388Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:02:02.884423Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:02.884483Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:02:02.884522Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:02:02.884555Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:02:02.884587Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:02:02.884611Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:02:02.884825Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:02:02.884850Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:02:02.884877Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:02:02.884906Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:02.884954Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:02:02.884986Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:02:02.885057Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:02:02.885107Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:02:02.885521Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:02:02.940449Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:02:02.940833Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:02.957465Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:02.958274Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:02:02.958671Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:02.959840Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:02:02.960243Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:03.243840Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:03.243890Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:03.243921Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:02:03.264218Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:02:03.264589Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:02:03.266440Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:02:03.273159Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:02:03.274319Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:02:03.275046Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:02:03.396564Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:03.396631Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:03.397215Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:03.397247Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:03.397284Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:0 ... de 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:16.788448Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:756:2634]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-03-28T09:02:16.788847Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715663 LockNode: 1 Status: STATUS_NOT_FOUND 2025-03-28T09:02:16.803385Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-28T09:02:16.803554Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:666:2570], Recipient [1:756:2634]: {TEvReadSet step# 3013 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-03-28T09:02:16.803598Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.803635Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715664 2025-03-28T09:02:16.803703Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-28T09:02:16.803780Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:756:2634], Recipient [1:666:2570]: {TEvReadSet step# 3013 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-03-28T09:02:16.803803Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:16.803826Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2025-03-28T09:02:17.497872Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqdzsqgv3ytqtvvxey0c73rn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDA2NjJiNDktNjIzNTFiNjktZjFhNTAwYTYtOTlmZjFkMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:17.501774Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1090:2877], Recipient [1:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-03-28T09:02:17.501983Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:02:17.502045Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-03-28T09:02:17.502121Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:02:17.502157Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:02:17.502189Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:17.502220Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:02:17.502278Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-03-28T09:02:17.502336Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:02:17.502360Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:17.502397Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:02:17.502420Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:02:17.502529Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-28T09:02:17.502813Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:02:17.502862Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-03-28T09:02:17.502903Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1090:2877], 0} after executionsCount# 1 2025-03-28T09:02:17.502947Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1090:2877], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:17.503022Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1090:2877], 0} finished in read 2025-03-28T09:02:17.503087Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:02:17.503110Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:02:17.503133Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:02:17.503155Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:02:17.503218Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:02:17.503249Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:02:17.503274Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-03-28T09:02:17.503308Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:02:17.503391Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:02:17.503555Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1092:2878], Recipient [1:756:2634]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2025-03-28T09:02:17.503738Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-03-28T09:02:17.503777Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-28T09:02:17.503830Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2025-03-28T09:02:17.503879Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-03-28T09:02:17.503902Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2025-03-28T09:02:17.503933Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:17.503977Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:02:17.504012Z node 1 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037889 2025-03-28T09:02:17.504039Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-03-28T09:02:17.504058Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:17.504091Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2025-03-28T09:02:17.504113Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2025-03-28T09:02:17.504196Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-28T09:02:17.504378Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 3] 2025-03-28T09:02:17.504411Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-03-28T09:02:17.504440Z node 1 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[1:1092:2878], 0} after executionsCount# 1 2025-03-28T09:02:17.504480Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1092:2878], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:17.504531Z node 1 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[1:1092:2878], 0} finished in read 2025-03-28T09:02:17.504590Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-03-28T09:02:17.504614Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2025-03-28T09:02:17.504638Z node 1 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:02:17.504660Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:02:17.504692Z node 1 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037889 is Executed 2025-03-28T09:02:17.504711Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:02:17.504731Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037889 has finished 2025-03-28T09:02:17.504764Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-28T09:02:17.504827Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-28T09:02:17.505009Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:756:2634]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-03-28T09:02:17.506056Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1090:2877], Recipient [1:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:02:17.506106Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-28T09:02:17.535639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1092:2878], Recipient [1:756:2634]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:02:17.535712Z node 1 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |75.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |75.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: 2025-03-28T09:01:36.443680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:36.444079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:36.444141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00175f/r3tmp/tmp6Dgbb1/pdisk_1.dat 2025-03-28T09:01:40.150909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:40.426780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:40.596213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:40.597386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:40.633567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:40.865378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:42.589551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:739:2621], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:42.589675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2626], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:42.589742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:42.594613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:01:43.062523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:753:2629], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:01:43.228077Z node 1 :TX_PROXY ERROR: Actor# [1:827:2672] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:17.340193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdzrnsr4pskf7g530pz5kks, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjRjMzEyM2YtYWJlMjc3ODQtMWZkYWNhNzktNjU5N2NlNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:17.395026Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqdzrnsr4pskf7g530pz5kks", SessionId: ydb://session/3?node_id=1&id=MjRjMzEyM2YtYWJlMjc3ODQtMWZkYWNhNzktNjU5N2NlNDY=, Slow query, duration: 34.809935s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b 2025-03-28T09:02:17.812474Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdzsqss6heeryq4pcapg9cg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY3NDNjNDMtNGI2MmJmMmYtNDRkM2U0MjItZDYxOGU1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR 2025-03-28T09:02:17.823775Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdzsqss6heeryq4pcapg9cg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY3NDNjNDMtNGI2MmJmMmYtNDRkM2U0MjItZDYxOGU1Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29762, MsgBus: 65385 2025-03-28T09:01:25.947686Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782918709824335:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:25.948213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d6/r3tmp/tmpUqbYWo/pdisk_1.dat 2025-03-28T09:01:26.637379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:26.640000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:26.644417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:26.654889Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29762, node 1 2025-03-28T09:01:26.895217Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:26.895241Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:26.895247Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:26.895349Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65385 2025-03-28T09:01:30.941939Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782918709824335:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:30.942004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:65385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:31.265313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:31.305719Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:01:31.367799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:31.581040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:32.010141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:32.755423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:41.650860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:41.650879Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:43.318972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782991724270100:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:43.319150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:43.852255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:43.895096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:43.971162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.040653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.145690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.268566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.550035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783000314205233:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:44.551138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:44.554259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783000314205239:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:44.612048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:44.756823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783000314205241:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:01:44.852607Z node 1 :TX_PROXY ERROR: Actor# [1:7486783000314205305:3528] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19488, MsgBus: 22705 2025-03-28T09:01:51.007026Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783026175116003:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:51.007940Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d6/r3tmp/tmpj14T3z/pdisk_1.dat 2025-03-28T09:01:52.796758Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:52.887831Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:52.901948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:52.918816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:52.927443Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19488, node 2 2025-03-28T09:01:54.236814Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:54.236836Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:54.236844Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:54.237601Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:01:56.011569Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783026175116003:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:56.011618Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:22705 TClient is connected to server localhost:22705 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:04.374152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:04.638132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:05.794075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:07.570760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:02:07.570784Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:08.157066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:09.196272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:15.288177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783133549300253:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:15.288404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:15.389800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:15.453029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:15.516411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:15.598988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:15.679233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:15.774985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:15.876294Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783133549300773:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:15.876387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:15.876851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783133549300778:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:15.880939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:15.897893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783133549300780:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:02:15.978420Z node 2 :TX_PROXY ERROR: Actor# [2:7486783133549300836:3511] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:17.834588Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486783142139235773:2530], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YWYxOGQ4NTQtODJhNWEwMDgtNjI1YzI4MzctOGU5MzEyNjA=. TraceId : 01jqdzsqyp1vv6c3sj6tsy0pe4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-28T09:02:17.835007Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486783142139235775:2531], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzsqyp1vv6c3sj6tsy0pe4. SessionId : ydb://session/3?node_id=2&id=YWYxOGQ4NTQtODJhNWEwMDgtNjI1YzI4MzctOGU5MzEyNjA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486783142139235770:2512], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:02:17.835512Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWYxOGQ4NTQtODJhNWEwMDgtNjI1YzI4MzctOGU5MzEyNjA=, ActorId: [2:7486783142139235701:2512], ActorState: ExecuteState, TraceId: 01jqdzsqyp1vv6c3sj6tsy0pe4, Create QueryResponse for error on request, msg: >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> TFlatTest::ReadOnlyMode >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18075, MsgBus: 7541 2025-03-28T09:01:26.007849Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782919594785445:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:26.007905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d3/r3tmp/tmpsp3EcC/pdisk_1.dat 2025-03-28T09:01:26.735356Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:26.735909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:26.746807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:26.764044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18075, node 1 2025-03-28T09:01:27.099173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:27.099192Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:27.099203Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:27.099309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7541 TClient is connected to server localhost:7541 WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-03-28T09:01:31.014786Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782919594785445:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:31.014844Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:31.220609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:31.248232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:01:31.262603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:31.474207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:31.954004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:32.677490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:41.727782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:41.727805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:42.458838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782988314264056:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:42.458944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:44.013435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.099989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.186023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.234428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.310171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.371878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.576483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782996904199193:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:44.576859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:44.577526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782996904199198:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:44.606058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:44.681537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782996904199200:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:01:44.776962Z node 1 :TX_PROXY ERROR: Actor# [1:7486782996904199259:3520] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:48.118224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64603, MsgBus: 4816 2025-03-28T09:01:55.024154Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783041544978797:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d3/r3tmp/tmp2EXWG4/pdisk_1.dat 2025-03-28T09:01:55.440397Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:01:55.795629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:55.795705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:55.807704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64603, node 2 2025-03-28T09:01:57.821415Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:58.086065Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:58.086085Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:58.086091Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:58.086593Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:01:59.591186Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783041544978797:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:59.591239Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:4816 TClient is connected to server localhost:4816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:04.753723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:05.348281Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:05.392782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:05.916390Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:08.267596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:09.125266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:10.647573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:02:10.647602Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:16.304524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783136034260988:2423], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:16.304620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:16.336159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:16.382598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:16.421080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:16.461237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:16.496816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:16.568722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:16.643427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783136034261514:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:16.643504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:16.643708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783136034261519:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:16.647167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:16.660414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783136034261521:2478], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:16.761899Z node 2 :TX_PROXY ERROR: Actor# [2:7486783136034261577:3502] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:18.164839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:02:18.753084Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486783144624196618:2543], TxId: 281474976710675, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzsrr3fmfne7m50vn1fr0d. SessionId : ydb://session/3?node_id=2&id=OWI0NmFlZjctZjBjODBhOGEtOTFkYzY5NDYtYjMyYzNjNTM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-28T09:02:18.753653Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486783144624196619:2544], TxId: 281474976710675, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzsrr3fmfne7m50vn1fr0d. SessionId : ydb://session/3?node_id=2&id=OWI0NmFlZjctZjBjODBhOGEtOTFkYzY5NDYtYjMyYzNjNTM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486783144624196615:2514], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:02:18.755858Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWI0NmFlZjctZjBjODBhOGEtOTFkYzY5NDYtYjMyYzNjNTM=, ActorId: [2:7486783144624196444:2514], ActorState: ExecuteState, TraceId: 01jqdzsrr3fmfne7m50vn1fr0d, Create QueryResponse for error on request, msg: >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> TFlatTest::SelectRangeForbidNullArgs2 >> TLocksTest::SetLockFail >> KqpLimits::DatashardReplySize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2025-03-28T09:01:57.470999Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:01:58.162093Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:01:58.162525Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:01:58.162791Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:58.810954Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:01:59.936251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:01:59.936319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:59.976271Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:59.994897Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:01:59.996751Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:01:59.996830Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:01:59.996900Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:01:59.997235Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:01:59.997312Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:01:59.997386Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:02:00.650559Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:00.946002Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:02:00.951632Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:00.952356Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:02:00.952703Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:02:00.953449Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:02:00.954131Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:02:00.967253Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:00.968334Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:00.969849Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:02:00.970967Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:02:00.971705Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:02:00.971764Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:00.972159Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:02:00.972962Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:02:00.973685Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:02:00.974393Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:02:00.978917Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:02:00.980448Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:00.980828Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:00.987642Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:02:01.029405Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:02:01.030143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:02:01.031827Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:02:01.033046Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:02:01.034098Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:02:01.034486Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:02:01.035212Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:02:01.036284Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:02:01.036623Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:02:01.036967Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:02:01.041779Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:02:01.042197Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:02:01.042541Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:02:01.042574Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:02:01.042625Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:02:01.043504Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:02:01.043855Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:02:01.045007Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:02:01.045038Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:02:01.091231Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:02:01.091609Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:02:01.092355Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:02:01.092719Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:02:01.093456Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:01.112365Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:01.113128Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:01.113551Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:02:01.119875Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-03-28T09:02:01.120262Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:02:01.121867Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-03-28T09:02:01.124334Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:02:01.125118Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:02:01.125519Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:02:01.211134Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:02:01.211520Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:02:01.214420Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:01.218821Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:01.219270Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:02:01.219638Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:02:01.219670Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:02:01.219721Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-03-28T09:02:01.219754Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-03-28T09:02:01.221087Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:02:01.221458Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:02:01.222162Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:02:01.224739Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:02:01.226537Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-03-28T09:02:01.227236Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:02:01.227262Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:02:01.227285Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:02:01.227307Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:02:01.227701Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:02:01.228039Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:02:01.228389Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:01.228739Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:02:01.229136Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-03-28T09:02:01.230291Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically incomplete end at 9437184 2025-03-28T09:02:01.238987Z node 1 :TX_DATASHARD TRACE: Activated operation [2:1] at 9437184 2025-03-28T09:02:01.239732Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:02:01.239758Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1 ... -03-28T09:02:21.114418Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:21.114543Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:02:21.114567Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:02:21.114587Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:02:21.114609Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:02:21.114660Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437186 {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 49 Flags# 0} 2025-03-28T09:02:21.114780Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:21.114808Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:131] at 9437186 on unit CompleteOperation 2025-03-28T09:02:21.114845Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 131] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:02:21.114886Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-28T09:02:21.114921Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:21.115032Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:02:21.115071Z node 1 :TX_DATASHARD DEBUG: Send RS Ack at 9437186 {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-03-28T09:02:21.115105Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:21.115130Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:134] at 9437186 on unit CompleteOperation 2025-03-28T09:02:21.115165Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 134] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:02:21.115205Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-28T09:02:21.115230Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:21.115336Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:21.115370Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:137] at 9437186 on unit CompleteOperation 2025-03-28T09:02:21.115425Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:02:21.115486Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-28T09:02:21.115537Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:21.115639Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:21.115663Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:140] at 9437186 on unit CompleteOperation 2025-03-28T09:02:21.115696Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:02:21.115734Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-28T09:02:21.115760Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:21.115848Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:21.115870Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:143] at 9437186 on unit CompleteOperation 2025-03-28T09:02:21.115898Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:02:21.115934Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-28T09:02:21.115966Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:21.116098Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:21.116124Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:146] at 9437186 on unit CompleteOperation 2025-03-28T09:02:21.116173Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:02:21.116219Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-28T09:02:21.116246Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:21.116363Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:21.116389Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:149] at 9437186 on unit CompleteOperation 2025-03-28T09:02:21.116421Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:02:21.116449Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:21.116541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:02:21.116565Z node 1 :TX_DATASHARD TRACE: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2025-03-28T09:02:21.116595Z node 1 :TX_DATASHARD DEBUG: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:02:21.116619Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:02:21.117007Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-03-28T09:02:21.117045Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.117076Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-03-28T09:02:21.117143Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-03-28T09:02:21.117178Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.117210Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-03-28T09:02:21.117307Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-28T09:02:21.117336Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.117362Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-28T09:02:21.117437Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-28T09:02:21.117476Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.117501Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-28T09:02:21.117606Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-28T09:02:21.117630Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.117653Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-28T09:02:21.117722Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:02:21.117746Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.117767Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-03-28T09:02:21.117814Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-28T09:02:21.117855Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.117892Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-28T09:02:21.117990Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-28T09:02:21.118015Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.118036Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-28T09:02:21.118106Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-28T09:02:21.118135Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.118156Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-28T09:02:21.118219Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-28T09:02:21.118239Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.118261Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-28T09:02:21.118350Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:457:2399], Recipient [1:235:2228]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-28T09:02:21.118371Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:21.118410Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: 2025-03-28T09:01:54.618216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:54.618646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:54.618808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016a7/r3tmp/tmpnTnd5S/pdisk_1.dat 2025-03-28T09:01:58.320847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:58.783440Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:58.921308Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:01:58.933033Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:01:58.943782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:58.945293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:58.970472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:59.178550Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:01:59.194949Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:01:59.195112Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:02:00.746971Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:02:00.747054Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:02:00.747538Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:02:00.747663Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:02:00.747946Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:02:00.748437Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:02:00.748541Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:02:00.748790Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:02:00.750004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:00.754963Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:02:00.755030Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:02:01.176651Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:02:01.215586Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:02:01.244912Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:02:01.258007Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:672:2573] 2025-03-28T09:02:01.275996Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:02.140359Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:02:02.148369Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:02:02.149084Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-03-28T09:02:02.157709Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:02.488356Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:02:02.502510Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:02.511884Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:02.561589Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:02.565323Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:02.566767Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:02.571343Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:02.572366Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:02.573194Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:705:2573] in generation 1 2025-03-28T09:02:02.579441Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:02.579515Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:02.617794Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:02:02.618181Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:02:02.618215Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:02:02.641432Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:02.641916Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:02.641973Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-03-28T09:02:02.657581Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:03.047631Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:03.048544Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:03.049999Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-03-28T09:02:03.062706Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:03.063078Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:03.063470Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:03.078790Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:672:2573], Recipient [1:672:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:03.079506Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:03.080177Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:03.080210Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:02:03.080635Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:03.081361Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-03-28T09:02:03.081768Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:02:03.081791Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:02:03.081811Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:02:03.095922Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:03.096027Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:03.096852Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:03.096883Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:03.097367Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:03.097682Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:03.098301Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:02:03.098641Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:03.114808Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:03.115536Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:03.116384Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:03.138543Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:687:2582], Recipient [1:672:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:03.138592Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:03.138870Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:665:2569], serverId# [1:687:2582], sessionId# [0:0:0] 2025-03-28T09:02:03.139553Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:02:03.140002Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:02:03.144968Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404 ... 25-03-28T09:02:20.970389Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:02:20.970429Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:02:20.970482Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715660] at 72075186224037888 2025-03-28T09:02:20.970515Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-03-28T09:02:20.970537Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:20.970556Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-28T09:02:20.970583Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-28T09:02:20.970618Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:02:20.970670Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715660] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191926 2025-03-28T09:02:20.980404Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:02:20.980511Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:02:20.980559Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-28T09:02:20.980611Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:02:20.980647Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:20.980743Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:02:20.980774Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:02:20.980804Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:02:20.980836Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:02:20.980876Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-03-28T09:02:20.980900Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:02:20.980934Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715660] at 72075186224037888 has finished 2025-03-28T09:02:20.994643Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:20.994747Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:20.994808Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715660 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:02:20.994957Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:20.996837Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-03-28T09:02:20.996895Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715661 ProcessProposeTransaction 2025-03-28T09:02:20.996969Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:857:2692] DataReq marker# P0 2025-03-28T09:02:20.997092Z node 2 :TX_PROXY DEBUG: Actor# [2:857:2692] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2025-03-28T09:02:20.997299Z node 2 :TX_PROXY DEBUG: Actor# [2:857:2692] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2025-03-28T09:02:20.997535Z node 2 :TX_PROXY DEBUG: Actor# [2:857:2692] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-03-28T09:02:20.997628Z node 2 :TX_PROXY DEBUG: Actor# [2:857:2692] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2025-03-28T09:02:20.997916Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:857:2692], Recipient [2:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 857 RawX2: 8589937284 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\tY\003\000\000\000\000\000\000\021\204\n\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2025-03-28T09:02:20.997971Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:02:20.998077Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:20.998249Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-03-28T09:02:20.998345Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-03-28T09:02:20.998395Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-28T09:02:20.998432Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-03-28T09:02:20.998467Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:20.998498Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:02:20.998539Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-03-28T09:02:20.998592Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-03-28T09:02:20.998629Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-28T09:02:20.998663Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:21.003778Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2025-03-28T09:02:21.003842Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2025-03-28T09:02:21.003886Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-28T09:02:21.003916Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2025-03-28T09:02:21.003941Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2025-03-28T09:02:21.003970Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-03-28T09:02:21.004027Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:857:2692] for [0:281474976715661] at 72075186224037888 2025-03-28T09:02:21.004064Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-03-28T09:02:21.004121Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:21.004226Z node 2 :TX_PROXY DEBUG: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2025-03-28T09:02:21.004308Z node 2 :TX_PROXY DEBUG: Collected all clerance requests, txid: 281474976715661 2025-03-28T09:02:21.004347Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2025-03-28T09:02:21.004496Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:857:2692], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2025-03-28T09:02:21.004542Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-03-28T09:02:21.004654Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:857:2692], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-03-28T09:02:21.004692Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-28T09:02:21.004775Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:21.004809Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:21.004878Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:21.004921Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:02:21.004969Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:02:21.005003Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2025-03-28T09:02:21.005048Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715661] at 72075186224037888 2025-03-28T09:02:21.005088Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-28T09:02:21.005135Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2025-03-28T09:02:21.005169Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2025-03-28T09:02:21.005204Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2025-03-28T09:02:21.005482Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2025-03-28T09:02:21.005519Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:02:21.005559Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:02:21.005594Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:21.005625Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:21.005692Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:21.006248Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:863:2697], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:02:21.006310Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] >> KqpWrite::InsertRevert [GOOD] >> TLocksTest::Range_CorrectNullDot >> TColumnShardTestSchema::ExportAfterFail [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> TFlatTest::SplitEmptyAndWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14171, MsgBus: 5267 2025-03-28T09:01:30.574323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782938471535174:2226];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:30.575501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013c7/r3tmp/tmpNh76r5/pdisk_1.dat 2025-03-28T09:01:31.167506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:31.168242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:31.168383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:31.174023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14171, node 1 2025-03-28T09:01:31.443290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:31.443323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:31.443339Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:31.443469Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5267 2025-03-28T09:01:35.574110Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782938471535174:2226];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:35.574192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:5267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:37.756483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:37.972620Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:01:38.087207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:41.503396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:42.486597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:43.204926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:46.173034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:46.173644Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:47.634717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783011485980863:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:47.634820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:47.977445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.090229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.224473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.525198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.808963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.898572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:49.049066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783020075915992:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:49.049209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:49.049479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783020075915997:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:49.053284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:49.071752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783020075915999:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:01:49.185349Z node 1 :TX_PROXY ERROR: Actor# [1:7486783020075916069:3502] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:52.732872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:01:55.166081Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-03-28T09:01:55.196840Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:01:55.197012Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:01:55.197203Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486783045845720445:2526], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [1:7486783032960818269:2526]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[1:7486783045845720445:2526].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:01:55.197695Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486783045845720425:2526], SessionActorId: [1:7486783032960818269:2526], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7486783032960818269:2526]. isRollback=0 2025-03-28T09:01:55.197895Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmMzYTQyZGYtNzM3MmFkMTktYzg0NTcwNzgtYmQyYjhkZmY=, ActorId: [1:7486783032960818269:2526], ActorState: ExecuteState, TraceId: 01jqdzs04y18d6psnnp45wtqmw, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486783045845720439:2526] from: [1:7486783045845720425:2526] 2025-03-28T09:01:55.197957Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486783045845720439:2526] TxId: 281474976710674. Ctx: { TraceId: 01jqdzs04y18d6psnnp45wtqmw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmMzYTQyZGYtNzM3MmFkMTktYzg0NTcwNzgtYmQyYjhkZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:01:55.198667Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmMzYTQyZGYtNzM3MmFkMTktYzg0NTcwNzgtYmQyYjhkZmY=, ActorId: [1:7486783032960818269:2526], ActorState: ExecuteState, TraceId: 01jqdzs04y18d6psnnp45wtqmw, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 11176, MsgBus: 8232 2025-03-28T09:02:05.442750Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783089490023587:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:05.444877Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013c7/r3tmp/tmpwsNlTv/pdisk_1.dat 2025-03-28T09:02:10.260223Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:10.339874Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:10.505324Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783089490023587:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:10.506662Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:10.582493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:10.582575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:10.617948Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11176, node 2 2025-03-28T09:02:15.272307Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:15.272338Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:15.272355Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:15.272578Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8232 TClient is connected to server localhost:8232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:16.274161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.283302Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:16.308008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.473566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.694151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.790853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.169502Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783149619567503:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.169604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.221042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.267131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.319728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.368285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.445207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.531123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.616070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783149619568021:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.616158Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.622885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783149619568026:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.629060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:19.644853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783149619568028:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:19.701184Z node 2 :TX_PROXY ERROR: Actor# [2:7486783149619568083:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:21.072395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.079280Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486783162504470519:2524], TxId: 281474976710676, task: 1. Ctx: { TraceId : 01jqdzsvx73rxc0deq700xnw61. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OTA3NTAwYTYtZjc1MzU5MTUtMzZjYjdlMmEtNzljYThjMjA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:02:22.079903Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486783162504470520:2525], TxId: 281474976710676, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=OTA3NTAwYTYtZjc1MzU5MTUtMzZjYjdlMmEtNzljYThjMjA=. CustomerSuppliedId : . TraceId : 01jqdzsvx73rxc0deq700xnw61. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486783162504470516:2492], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:02:22.080266Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTA3NTAwYTYtZjc1MzU5MTUtMzZjYjdlMmEtNzljYThjMjA=, ActorId: [2:7486783158209502931:2492], ActorState: ExecuteState, TraceId: 01jqdzsvx73rxc0deq700xnw61, Create QueryResponse for error on request, msg: >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4958, MsgBus: 16225 2025-03-28T09:01:27.320020Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782925734260506:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:27.320895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ca/r3tmp/tmpwTCQuG/pdisk_1.dat 2025-03-28T09:01:30.666915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:31.058481Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:31.100732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:31.100819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:31.111934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4958, node 1 2025-03-28T09:01:31.307289Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:31.307316Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:31.307323Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:31.307461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16225 2025-03-28T09:01:32.327404Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782925734260506:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:32.328358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:16225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:37.329651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:37.375984Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:01:37.393116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:38.450876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:42.006227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:01:42.360953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:01:46.047819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:46.047849Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:47.297677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783011633608249:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:47.298211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:48.468021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.631951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.710954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.757570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.800279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.903456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:49.046068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783020223543389:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:49.046142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:49.046438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783020223543394:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:49.049799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:49.072024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783020223543396:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:01:49.149602Z node 1 :TX_PROXY ERROR: Actor# [1:7486783020223543450:3492] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:51.135921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2054, MsgBus: 19026 2025-03-28T09:02:03.952505Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783079968699988:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:03.952560Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ca/r3tmp/tmpWIrII8/pdisk_1.dat 2025-03-28T09:02:05.166401Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:06.216898Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:06.217385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:06.484000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:06.484068Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:06.626845Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2054, node 2 2025-03-28T09:02:09.292550Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783079968699988:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:09.292826Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:10.087096Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:10.087112Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:10.087118Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:10.087194Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19026 TClient is connected to server localhost:19026 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:02:15.862170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:15.896004Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:15.904513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.074989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.327317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.456802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.488056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783148688178523:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.488166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.553145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.588541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.642860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.692590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.764833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.824819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.896034Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783148688179035:2469], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.896101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.896414Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783148688179040:2472], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.900351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:19.916988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783148688179042:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:19.982738Z node 2 :TX_PROXY ERROR: Actor# [2:7486783148688179096:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:21.162819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:02:21.162849Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:21.348367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DatashardReplySize [GOOD] Test command err: Trying to start YDB, gRPC: 7387, MsgBus: 10394 2025-03-28T08:59:07.023299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782324499671960:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.023355Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ba/r3tmp/tmpdr5fZC/pdisk_1.dat 2025-03-28T08:59:12.040726Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782324499671960:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.051402Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:12.309850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.438920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.439083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.453504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:12.642981Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.104571s 2025-03-28T08:59:12.643375Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.104808s 2025-03-28T08:59:12.644228Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7387, node 1 2025-03-28T08:59:13.565400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:13.565632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:13.565639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:13.565903Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10394 TClient is connected to server localhost:10394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:23.976145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.008224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.014419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:26.073094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T08:59:26.451654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T08:59:27.599319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.599344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:32.996524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782431873856197:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:32.996661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.750735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.988895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.178326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.284380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.423173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.587055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.677026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782440463791338:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.677098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.677402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782440463791343:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.682082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:34.716367Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T08:59:34.717475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782440463791345:2480], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:34.819572Z node 1 :TX_PROXY ERROR: Actor# [1:7486782440463791401:3503] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:40.570326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:10.444533Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486782595082615571:2715] TxId: 281474976710672. Ctx: { TraceId: 01jqdznvmabhfs5qfb7e41fec8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFmMDRlMmMtMzg3YzI2ODUtZGFjN2E5OWUtMzNiZTdlOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. } 2025-03-28T09:00:10.444719Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmFmMDRlMmMtMzg3YzI2ODUtZGFjN2E5OWUtMzNiZTdlOTE=, ActorId: [1:7486782595082615554:2715], ActorState: ExecuteState, TraceId: 01jqdznvmabhfs5qfb7e41fec8, Create QueryResponse for error on request, msg:
: Error: Memory limit exception at WaitResolveState, current limit is 1024 bytes. Trying to start YDB, gRPC: 62279, MsgBus: 9591 2025-03-28T09:00:14.705902Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782613200672070:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:14.705950Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ba/r3tmp/tmpzkbHcw/pdisk_1.dat 2025-03-28T09:00:15.654303Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:15.846917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:15.846987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:15.848438Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:00:15.849682Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62279, node 2 2025-03-28T09:00:15.996294Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:15.996321Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:15.996329Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:15.996463Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9591 TClient is connected to server localhost:9591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057 ... ER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486782689321001431:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:38.010879Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:00:42.989253Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782732270675482:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:42.989355Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:42.989721Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782732270675494:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:42.993842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:00:43.012291Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782732270675496:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:00:43.089502Z node 3 :TX_PROXY ERROR: Actor# [3:7486782736565642846:2630] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:48.479091Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:48.479113Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:50.026790Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486782736565642881:2357] TxId: 281474976715661. Ctx: { TraceId: 01jqdzpvk275jzxw4pxcam6vvm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGVjOGYxN2QtZGRlMjk4Yy1iODQyOGY5Ni1mYjg5NTkzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution. Task #1 size is too big: 100442499 > 50331648 2025-03-28T09:00:50.027325Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGVjOGYxN2QtZGRlMjk4Yy1iODQyOGY5Ni1mYjg5NTkzMg==, ActorId: [3:7486782723680740842:2357], ActorState: ExecuteState, TraceId: 01jqdzpvk275jzxw4pxcam6vvm, Create QueryResponse for error on request, msg:
: Error: Datashard program size limit exceeded (100442499 > 50331648), code: 200509 Trying to start YDB, gRPC: 6114, MsgBus: 13845 2025-03-28T09:00:56.167515Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486782792908365451:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:56.167566Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ba/r3tmp/tmp2w4V50/pdisk_1.dat 2025-03-28T09:00:56.943459Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:00:57.013725Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:00:57.013827Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:00:57.015725Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6114, node 4 2025-03-28T09:00:57.780111Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:00:57.780137Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:00:57.780145Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:00:57.780250Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13845 2025-03-28T09:01:01.171347Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486782792908365451:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:01.171411Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:13845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:01.584990Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:01.603765Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:01:01.621849Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:01.742557Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:02.081908Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:03.029674Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:07.769026Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782840153007530:2419], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:07.769128Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:07.851994Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:07.886400Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:07.928365Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:07.986021Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:08.026457Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:08.081929Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:08.163083Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782844447975349:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:08.163211Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:08.163270Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782844447975354:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:08.168060Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:08.181761Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486782844447975356:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:01:08.244999Z node 4 :TX_PROXY ERROR: Actor# [4:7486782844447975411:3470] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:10.159794Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:11.923158Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:11.923181Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:21.640562Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZThmZTU3ZTEtZTI5YmVlOTQtZDQ3NTY2MzgtYjZjYTk4YmQ=, ActorId: [4:7486783145095688665:2902], ActorState: ExecuteState, TraceId: 01jqdzssay0kdzg5kymfpz7c2k, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (200003947 > 50331648), code: 2013 >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 27110, MsgBus: 23227 2025-03-28T09:01:26.306784Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782923289042733:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:26.307268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d9/r3tmp/tmp5WeHyg/pdisk_1.dat 2025-03-28T09:01:27.060821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:27.064379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:27.064472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:27.072022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27110, node 1 2025-03-28T09:01:27.340110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:27.340131Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:27.340141Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:27.340267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23227 TClient is connected to server localhost:23227 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:31.055007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:31.074216Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:01:31.085381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:31.244984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:31.390959Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782923289042733:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:31.391360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:01:31.473932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:31.569435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:42.040885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782992008521160:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:42.041022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:42.041161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:42.041178Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:43.151438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:43.239231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:43.333796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:43.394891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:43.508296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:43.676588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.019049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783000598456324:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:44.019146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:44.019395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783000598456329:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:44.023356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:44.050228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783000598456331:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:01:44.140008Z node 1 :TX_PROXY ERROR: Actor# [1:7486783000598456389:3507] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:47.930825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:01:49.021677Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486783022073293368:2552], TxId: 281474976710673, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzrve021fcdm57yhr9j76v. SessionId : ydb://session/3?node_id=1&id=MzE1MTAxYS04ZWFiMzBhNy00YTM2YjFiMy1lNTk0ZjczZg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:01:49.022193Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486783022073293369:2553], TxId: 281474976710673, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzrve021fcdm57yhr9j76v. SessionId : ydb://session/3?node_id=1&id=MzE1MTAxYS04ZWFiMzBhNy00YTM2YjFiMy1lNTk0ZjczZg==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486783022073293365:2530], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:01:49.022626Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzE1MTAxYS04ZWFiMzBhNy00YTM2YjFiMy1lNTk0ZjczZg==, ActorId: [1:7486783013483358573:2530], ActorState: ExecuteState, TraceId: 01jqdzrve021fcdm57yhr9j76v, Create QueryResponse for error on request, msg:
: Error: Conflict with existing key., code: 2012 2025-03-28T09:01:49.447656Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486783022073293424:2564], TxId: 281474976710676, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=MzE1MTAxYS04ZWFiMzBhNy00YTM2YjFiMy1lNTk0ZjczZg==. CustomerSuppliedId : . TraceId : 01jqdzrw3x7k08rt1701c6958h. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-28T09:01:49.447961Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486783022073293425:2565], TxId: 281474976710676, task: 2. Ctx: { TraceId : 01jqdzrw3x7k08rt1701c6958h. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MzE1MTAxYS04ZWFiMzBhNy00YTM2YjFiMy1lNTk0ZjczZg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486783022073293421:2530], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:01:49.448239Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzE1MTAxYS04ZWFiMzBhNy00YTM2YjFiMy1lNTk0ZjczZg==, ActorId: [1:7486783013483358573:2530], ActorState: ExecuteState, TraceId: 01jqdzrw3x7k08rt1701c6958h, Create QueryResponse for error on request, msg:
: Error: Duplicated keys found., code: 2012 Trying to start YDB, gRPC: 22384, MsgBus: 26994 2025-03-28T09:01:59.974916Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783063507859554:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:59.974964Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d9/r3tmp/tmp5sJWrN/pdisk_1.dat 2025-03-28T09:02:03.216314Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:03.422046Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:03.437166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:03.437240Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:03.498954Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22384, node 2 2025-03-28T09:02:04.979496Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783063507859554:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:04.979546Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:05.197013Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:05.197033Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:05.197040Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:05.197144Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26994 TClient is connected to server localhost:26994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:15.509637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:15.526711Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:15.538033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:15.720123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.154430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.306207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.419112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:02:17.419139Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:17.819680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783140817272678:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:17.819764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:17.913120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:17.989198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:18.058805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:18.152334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:18.273484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:18.367553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:18.441922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783145112240492:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:18.442026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:18.442279Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783145112240497:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:18.445874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:18.464024Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:02:18.464529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783145112240499:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:18.544136Z node 2 :TX_PROXY ERROR: Actor# [2:7486783145112240554:3477] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:20.153016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] >> TLocksFatTest::RangeSetBreak >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |75.2%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |75.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143153030.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153030.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123151830.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-03-28T09:00:33.386490Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:00:33.601884Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:00:33.640191Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:00:33.640473Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:00:33.656639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:00:33.656900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:00:33.657233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:00:33.657379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:00:33.657507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:00:33.657626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:00:33.657739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:00:33.657880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:00:33.658011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:00:33.658127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:00:33.658260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:00:33.658377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:00:33.701811Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:00:33.702171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:00:33.702245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:00:33.702419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:00:33.702603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:00:33.702706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:00:33.702757Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:00:33.702851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:00:33.702931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:00:33.702982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:00:33.703016Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:00:33.703269Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:00:33.703380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:00:33.703450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:00:33.703501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:00:33.703597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:00:33.703659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:00:33.703714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:00:33.703748Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:00:33.703821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:00:33.703863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:00:33.703900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:00:33.703956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:00:33.704003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:00:33.704037Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:00:33.704489Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=74; 2025-03-28T09:00:33.704593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-03-28T09:00:33.704668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-03-28T09:00:33.704752Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-03-28T09:00:33.704917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:00:33.704967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:00:33.705000Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:00:33.705197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:00:33.705245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:00:33.705273Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:00:33.705416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:00:33.705456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:00:33.705484Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:00:33.705645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... 25-03-28T09:02:23.668849Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:23.668882Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:02:23.668916Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:02:23.669026Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:02:23.669116Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:23.669150Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:02:23.669227Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=14867; 2025-03-28T09:02:23.669270Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=118936;num_rows=14867;batch_columns=timestamp; 2025-03-28T09:02:23.669394Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1270:3279];bytes=118936;rows=14867;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1271:3280]->[1:1270:3279] 2025-03-28T09:02:23.669503Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:23.669592Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:23.669677Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:23.669788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:02:23.669863Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:23.669936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:23.669970Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1271:3280] finished for tablet 9437184 2025-03-28T09:02:23.670412Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1270:3279];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.006},{"events":["l_bootstrap"],"t":0.013},{"events":["f_processing","f_task_result"],"t":0.015},{"events":["f_ack","l_task_result"],"t":1.164},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":1.18}],"full":{"a":1743152542489211,"name":"_full_task","f":1743152542489211,"d_finished":0,"c":0,"l":1743152543670019,"d":1180808},"events":[{"name":"bootstrap","f":1743152542489399,"d_finished":13547,"c":1,"l":1743152542502946,"d":13547},{"a":1743152543669772,"name":"ack","f":1743152543654090,"d_finished":14976,"c":7,"l":1743152543669699,"d":15223},{"a":1743152543669759,"name":"processing","f":1743152542504797,"d_finished":628430,"c":56,"l":1743152543669701,"d":628690},{"name":"ProduceResults","f":1743152542495838,"d_finished":28601,"c":65,"l":1743152543669955,"d":28601},{"a":1743152543669958,"name":"Finish","f":1743152543669958,"d_finished":0,"c":0,"l":1743152543670019,"d":61},{"name":"task_result","f":1743152542504819,"d_finished":612250,"c":49,"l":1743152543653788,"d":612250}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:23.670480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1270:3279];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:02:23.671405Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1270:3279];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.006},{"events":["l_bootstrap"],"t":0.013},{"events":["f_processing","f_task_result"],"t":0.015},{"events":["f_ack","l_task_result"],"t":1.164},{"events":["l_ProduceResults","f_Finish"],"t":1.18},{"events":["l_ack","l_processing","l_Finish"],"t":1.181}],"full":{"a":1743152542489211,"name":"_full_task","f":1743152542489211,"d_finished":0,"c":0,"l":1743152543670518,"d":1181307},"events":[{"name":"bootstrap","f":1743152542489399,"d_finished":13547,"c":1,"l":1743152542502946,"d":13547},{"a":1743152543669772,"name":"ack","f":1743152543654090,"d_finished":14976,"c":7,"l":1743152543669699,"d":15722},{"a":1743152543669759,"name":"processing","f":1743152542504797,"d_finished":628430,"c":56,"l":1743152543669701,"d":629189},{"name":"ProduceResults","f":1743152542495838,"d_finished":28601,"c":65,"l":1743152543669955,"d":28601},{"a":1743152543669958,"name":"Finish","f":1743152543669958,"d_finished":0,"c":0,"l":1743152543670518,"d":560},{"name":"task_result","f":1743152542504819,"d_finished":612250,"c":49,"l":1743152543653788,"d":612250}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1271:3280]->[1:1270:3279] 2025-03-28T09:02:23.671499Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:02:22.488770Z;index_granules=0;index_portions=7;index_batches=1260;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10402524;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10402524;selected_rows=0; 2025-03-28T09:02:23.671535Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:02:23.671798Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1271:3280];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 160000/10402332 160000/10402524 |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: 2025-03-28T09:01:26.045025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:26.045513Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:26.045572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e00/r3tmp/tmpUJ5S0T/pdisk_1.dat 2025-03-28T09:01:26.423728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:26.466660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:26.506763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:26.506879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:26.518263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:26.615045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:27.124551Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2025-03-28T09:01:27.132036Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" 2025-03-28T09:01:27.171919Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 1} TUpsertActor finished in 0.039525s, errors=0 2025-03-28T09:01:27.172189Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2025-03-28T09:01:27.172274Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2025-03-28T09:01:27.173260Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 2 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2025-03-28T09:01:27.173388Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} started fullscan actor# [1:750:2632] 2025-03-28T09:01:27.173481Z node 1 :DS_LOAD_TEST INFO: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Bootstrap called, sample# 100 2025-03-28T09:01:27.173530Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Connect to# 72075186224037888 called 2025-03-28T09:01:27.184536Z node 1 :DS_LOAD_TEST DEBUG: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} Handle TEvClientConnected called, Status# OK 2025-03-28T09:01:27.185693Z node 1 :DS_LOAD_TEST NOTICE: ReadIteratorScan# {Tag: 0, parent: [1:747:2629], subTag: 1} finished in 0.000941s, sampled# 100, iter finished# 1, oks# 100 2025-03-28T09:01:27.185890Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} received keyCount# 100 2025-03-28T09:01:27.186150Z node 1 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 3} started# 10 actors each with inflight# 1 2025-03-28T09:01:27.186238Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} Bootstrap called 2025-03-28T09:01:27.186280Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-28T09:01:27.186322Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} Bootstrap called 2025-03-28T09:01:27.186345Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-28T09:01:27.186374Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} Bootstrap called 2025-03-28T09:01:27.186396Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-28T09:01:27.186419Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} Bootstrap called 2025-03-28T09:01:27.186464Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-28T09:01:27.186506Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} Bootstrap called 2025-03-28T09:01:27.186529Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-28T09:01:27.186562Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} Bootstrap called 2025-03-28T09:01:27.186590Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-28T09:01:27.186626Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} Bootstrap called 2025-03-28T09:01:27.186651Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-28T09:01:27.186674Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} Bootstrap called 2025-03-28T09:01:27.186727Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-28T09:01:27.186764Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} Bootstrap called 2025-03-28T09:01:27.186779Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-28T09:01:27.186801Z node 1 :DS_LOAD_TEST INFO: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} Bootstrap called 2025-03-28T09:01:27.186822Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2025-03-28T09:01:27.187924Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 2} session: ydb://session/3?node_id=1&id=YWMwMjY4N2EtNDVjOTU5MDAtZTA4ZWIxZGEtMjdjOTA1MQ== 2025-03-28T09:01:27.189746Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 3} session: ydb://session/3?node_id=1&id=ZTgxOTlkMWEtMTU3NDI4ZjMtOGU3NDllZWUtOGIwYWQyZGY= 2025-03-28T09:01:27.189837Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 4} session: ydb://session/3?node_id=1&id=NzAzMjI1OGYtMjBhZTcyLTMxNzQwMWE2LWM1NDk3ZjI2 2025-03-28T09:01:27.190712Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 5} session: ydb://session/3?node_id=1&id=NzE1MTJjYTYtMjU4YmFmOTMtN2NhYzlhLTU0OGRkMzA1 2025-03-28T09:01:27.192010Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 6} session: ydb://session/3?node_id=1&id=NTRkYjJmOTAtNzM5Zjg4MTQtNzZlMGQ3Ny1iYWQ5YTFmZg== 2025-03-28T09:01:27.193308Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 7} session: ydb://session/3?node_id=1&id=Nzc4NjdmNGEtZWE1NDM1NjEtMTE1OTY3YzctNzIzNWU2OTQ= 2025-03-28T09:01:27.196002Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 8} session: ydb://session/3?node_id=1&id=OTNjMzQyZmMtMTEyMjViMzctYTljOWQxMTItY2RjZjY5MTM= 2025-03-28T09:01:27.196101Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 9} session: ydb://session/3?node_id=1&id=ZDcxY2VhZTQtOTc5MmRiMTYtNWYzYzlkYzAtZmFiZDM2OWE= 2025-03-28T09:01:27.197530Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 10} session: ydb://session/3?node_id=1&id=M2QyYTc5OGQtYmM3MWUwZGMtZjg5NmJhNjQtNmMwOTg5YTc= 2025-03-28T09:01:27.198981Z node 1 :DS_LOAD_TEST DEBUG: TKqpSelectActor# {Tag: 0, parent: [1:747:2629], subTag: 11} session: ydb://session/3?node_id=1&id=ZWY1OGJmOTctZmVjOGNmOWUtZjNmYjkzNmQtZTk3ZGQ5NTU= 2025-03-28T09:01:27.203177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:774:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:27.203298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:803:2679], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:27.203364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:804:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:27.203411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:805:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:27.203458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:806:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:27.203525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:808:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:27.203585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:810:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:27.203632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:812:2688], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:27.204898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:822:2698], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resou ... 2025-03-28T09:02:16.252774Z node 2 :TX_PROXY ERROR: Actor# [2:852:2725] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:02:16.253146Z node 2 :TX_PROXY ERROR: Actor# [2:853:2726] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:02:16.253804Z node 2 :TX_PROXY ERROR: Actor# [2:861:2727] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:02:16.254228Z node 2 :TX_PROXY ERROR: Actor# [2:863:2728] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:02:16.255430Z node 2 :TX_PROXY ERROR: Actor# [2:866:2729] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:02:16.256335Z node 2 :TX_PROXY ERROR: Actor# [2:869:2730] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:02:16.421323Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:829:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:16.421441Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:830:2706], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:16.421499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:831:2707], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:16.421549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:832:2708], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:16.421607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:833:2709], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:16.421664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:834:2710], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:16.421735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:835:2711], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:16.421793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:838:2714], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:16.421862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:845:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:16.421939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:847:2723], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:16.460710Z node 2 :TX_PROXY ERROR: Actor# [2:986:2822] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:16.965503Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 4} finished in 0.778457s, errors=0 2025-03-28T09:02:16.965750Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 4 { Tag: 4 DurationMs: 778 OperationsOK: 100 OperationsError: 0 } 2025-03-28T09:02:16.979664Z node 2 :TX_PROXY ERROR: Actor# [2:1915:3144] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:17.344525Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 3} finished in 1.163144s, errors=0 2025-03-28T09:02:17.344823Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 3 { Tag: 3 DurationMs: 1163 OperationsOK: 100 OperationsError: 0 } 2025-03-28T09:02:17.359726Z node 2 :TX_PROXY ERROR: Actor# [2:2822:3450] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:17.840431Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 5} finished in 1.651671s, errors=0 2025-03-28T09:02:17.840606Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 5 { Tag: 5 DurationMs: 1651 OperationsOK: 100 OperationsError: 0 } 2025-03-28T09:02:17.855487Z node 2 :TX_PROXY ERROR: Actor# [2:3729:3756] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:18.355449Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 11} finished in 2.140281s, errors=0 2025-03-28T09:02:18.355773Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 11 { Tag: 11 DurationMs: 2140 OperationsOK: 100 OperationsError: 0 } 2025-03-28T09:02:18.379524Z node 2 :TX_PROXY ERROR: Actor# [2:4636:4062] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:18.905148Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 7} finished in 2.698580s, errors=0 2025-03-28T09:02:18.905458Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 7 { Tag: 7 DurationMs: 2698 OperationsOK: 100 OperationsError: 0 } 2025-03-28T09:02:18.924875Z node 2 :TX_PROXY ERROR: Actor# [2:5543:4368] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:19.688581Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 9} finished in 3.479042s, errors=0 2025-03-28T09:02:19.688804Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 9 { Tag: 9 DurationMs: 3479 OperationsOK: 100 OperationsError: 0 } 2025-03-28T09:02:19.708800Z node 2 :TX_PROXY ERROR: Actor# [2:6450:4674] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:20.364407Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 10} finished in 4.150695s, errors=0 2025-03-28T09:02:20.364771Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 10 { Tag: 10 DurationMs: 4150 OperationsOK: 100 OperationsError: 0 } 2025-03-28T09:02:20.383936Z node 2 :TX_PROXY ERROR: Actor# [2:7357:4980] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:21.781268Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 8} finished in 5.573199s, errors=0 2025-03-28T09:02:21.781508Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 8 { Tag: 8 DurationMs: 5573 OperationsOK: 100 OperationsError: 0 } 2025-03-28T09:02:21.802439Z node 2 :TX_PROXY ERROR: Actor# [2:8264:5286] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:22.932553Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 6} finished in 6.727567s, errors=0 2025-03-28T09:02:22.932958Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 6 { Tag: 6 DurationMs: 6727 OperationsOK: 100 OperationsError: 0 } 2025-03-28T09:02:22.961731Z node 2 :TX_PROXY ERROR: Actor# [2:9171:5592] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:23.697755Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActor# {Tag: 0, parent: [2:747:2629], subTag: 2} finished in 7.518045s, errors=0 2025-03-28T09:02:23.698338Z node 2 :DS_LOAD_TEST DEBUG: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished: 2 { Tag: 2 DurationMs: 7518 OperationsOK: 100 OperationsError: 0 } 2025-03-28T09:02:23.698407Z node 2 :DS_LOAD_TEST NOTICE: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:738:2620], subTag: 3} finished in 7.541589s, oks# 1000, errors# 0 2025-03-28T09:02:23.698780Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:747:2629] with tag# 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 14000, MsgBus: 30454 2025-03-28T09:01:27.208173Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782924390132403:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:27.208213Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013cd/r3tmp/tmpnAu5Eo/pdisk_1.dat 2025-03-28T09:01:29.480691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:30.602877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:30.980782Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:30.987142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:30.987559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:30.997258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14000, node 1 2025-03-28T09:01:31.165539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:31.165565Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:31.165578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:31.165714Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30454 2025-03-28T09:01:32.211708Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782924390132403:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:32.211756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:30454 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:37.245156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:37.343174Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:01:37.360868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:37.942055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:41.664298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:42.028259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:45.951753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:45.951770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:47.336249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783010289480189:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:47.336333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:48.142049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.320376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.468659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.622185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.665928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.757994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:48.850566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783014584448029:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:48.850672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:48.850987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783014584448034:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:48.855080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:48.869706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783014584448036:2483], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:01:48.948973Z node 1 :TX_PROXY ERROR: Actor# [1:7486783014584448089:3511] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:54.189652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 12979, MsgBus: 14363 2025-03-28T09:02:10.783318Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783108957058379:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:10.783650Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013cd/r3tmp/tmpE7HTKO/pdisk_1.dat 2025-03-28T09:02:15.368081Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:15.368164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:15.391603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12979, node 2 2025-03-28T09:02:15.714467Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:15.724111Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:15.724138Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:15.724147Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:15.724269Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:02:15.783385Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783108957058379:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:15.783440Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:14363 TClient is connected to server localhost:14363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:16.585147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.596195Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:16.605667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.750133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.193100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.316372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.730840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783147611765832:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.730944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.805503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.868005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.921405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.015547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.099971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.175661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.265043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783151906733659:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.265135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.265366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783151906733664:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.269587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:20.294489Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783151906733666:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:20.388592Z node 2 :TX_PROXY ERROR: Actor# [2:7486783151906733721:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:22.069442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpWrite::ProjectReplace-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |75.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 21264, MsgBus: 20226 2025-03-28T09:01:28.886042Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782929605998656:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:28.886174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d0/r3tmp/tmpPVpSjI/pdisk_1.dat 2025-03-28T09:01:31.255168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:31.338290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:31.338391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:31.340582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:31.343214Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21264, node 1 2025-03-28T09:01:31.547239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:31.547263Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:31.547270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:31.547396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20226 2025-03-28T09:01:33.888547Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782929605998656:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:33.888595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:20226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:39.892050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:40.900021Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:01:41.081264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:42.897656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:44.020799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:44.365842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:46.345375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:46.345410Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:49.781351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783019800313603:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:49.781506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:51.056813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:51.193232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:51.455886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:52.360962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:52.527657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:52.633307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:52.875908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783032685216054:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:52.875997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:52.876281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783032685216060:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:52.887944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:52.907933Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:01:52.908206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783032685216062:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:01:53.008439Z node 1 :TX_PROXY ERROR: Actor# [1:7486783032685216114:3523] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:00.091915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62747, MsgBus: 26538 2025-03-28T09:02:15.338594Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783128731245216:2207];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d0/r3tmp/tmpuP3TyT/pdisk_1.dat 2025-03-28T09:02:15.523080Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:02:15.732776Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:15.754517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:15.754600Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:15.763918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62747, node 2 2025-03-28T09:02:16.054102Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:16.054129Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:16.054137Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:16.054243Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26538 TClient is connected to server localhost:26538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:17.102754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.122799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.193551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.461079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:17.538753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.238795Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783128731245216:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:20.238863Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:20.465512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783154501050603:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.465606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.507666Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.567268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.664111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.721775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.770771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.853652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.994155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783154501051122:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.994243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.999153Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783154501051128:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:21.004239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:21.029844Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783154501051130:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:21.131348Z node 2 :TX_PROXY ERROR: Actor# [2:7486783158796018484:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:22.861451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> TFlatTest::PartBloomFilter [GOOD] >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> KqpExplain::AggGroupLimit [GOOD] >> KqpExplain::ComplexJoin >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10164, MsgBus: 1468 2025-03-28T09:01:35.407638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782961680268454:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:35.476485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013c4/r3tmp/tmpALsuIX/pdisk_1.dat 2025-03-28T09:01:42.607389Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782961680268454:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:42.607555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:01:43.237520Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:43.401497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:43.439559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:43.439624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:43.500333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10164, node 1 2025-03-28T09:01:44.313286Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:44.313304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:44.313309Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:44.314073Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1468 TClient is connected to server localhost:1468 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:49.679960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:49.962198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:52.815808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:54.884062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:55.667207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:58.067202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:58.067227Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:05.200434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783090529289209:2435], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:05.200534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:07.532405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:07.741461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:07.912079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:08.321433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:08.558941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:09.280977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:10.085676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783112004126291:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:10.085744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:10.089127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783112004126296:2509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:10.153113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:10.358380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783112004126298:2510], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:10.484433Z node 1 :TX_PROXY ERROR: Actor# [1:7486783112004126353:3575] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30859, MsgBus: 29334 2025-03-28T09:02:18.086029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783144956545937:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:18.086179Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013c4/r3tmp/tmp2kKdke/pdisk_1.dat 2025-03-28T09:02:18.327017Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:18.336037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:18.336119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:18.343910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30859, node 2 2025-03-28T09:02:18.547204Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:18.547226Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:18.547234Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:18.547345Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29334 TClient is connected to server localhost:29334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:19.344767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.360310Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:19.365668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.513407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.690272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.793343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:22.695720Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783162136416817:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:22.695796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:22.748486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.814334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.857992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.892699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.937124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:23.003272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:23.091007Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783144956545937:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:23.091059Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:23.134860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783166431384628:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:23.134971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:23.137444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783166431384633:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:23.140976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:23.166627Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:02:23.166894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783166431384636:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:23.245237Z node 2 :TX_PROXY ERROR: Actor# [2:7486783166431384692:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> TExternalTableTest::DropTableTwice |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 18844, MsgBus: 25773 2025-03-28T09:01:37.568774Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782968574488078:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:37.569217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013be/r3tmp/tmpadUBVX/pdisk_1.dat 2025-03-28T09:01:42.565555Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782968574488078:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:42.565595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:01:43.244289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:43.786361Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:43.923591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:43.923682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:43.928306Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18844, node 1 2025-03-28T09:01:44.024964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:45.034117Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:45.047976Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:45.048000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:45.048110Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25773 TClient is connected to server localhost:25773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:49.268865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:49.280479Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:01:49.305107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:49.441963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:49.782093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:50.031813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:58.975186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:58.975203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:59.829116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783063063770316:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:59.829578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:02.869921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:03.287555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:03.417465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:03.630950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:03.707726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:04.286196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:04.620973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783084538607394:2509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:04.621041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:04.623293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783084538607399:2512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:04.663047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:04.750817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783084538607401:2513], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:04.853775Z node 1 :TX_PROXY ERROR: Actor# [1:7486783084538607466:3571] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:15.242620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 61257, MsgBus: 10212 2025-03-28T09:02:17.506062Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783140813773956:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:17.506102Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013be/r3tmp/tmplKQtRh/pdisk_1.dat 2025-03-28T09:02:17.666714Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:17.691048Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:17.691128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:17.693844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61257, node 2 2025-03-28T09:02:17.750937Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:17.750959Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:17.750966Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:17.751064Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10212 TClient is connected to server localhost:10212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:18.294872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:18.304847Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:02:18.316306Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:18.412327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:18.591820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:18.703065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:21.447305Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783157993644899:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:21.447400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:21.545531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:21.633803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:21.712473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:21.764259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:21.842871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:21.934677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.061969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783162288612718:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:22.062113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:22.062881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783162288612723:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:22.072012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:22.106253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783162288612726:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:02:22.209961Z node 2 :TX_PROXY ERROR: Actor# [2:7486783162288612784:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:22.506741Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783140813773956:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:22.506815Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:24.149711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:02:25.229308Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmY0MTEzZGMtYjk5ZDExMzEtNWZiODk1ODktYTg3ZDUwMGM=, ActorId: [2:7486783170878547662:2494], ActorState: ExecuteState, TraceId: 01jqdzsza8a9x845jm9s3vjfsg, Create QueryResponse for error on request, msg: >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2025-03-28T09:02:15.418563Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783132489530378:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:15.443364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f25/r3tmp/tmpbLw6RE/pdisk_1.dat 2025-03-28T09:02:16.406399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:16.492274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:16.492376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:16.493921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:16.503340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6967 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:16.944876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:16.978746Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:16.993542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:20.490457Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783154910578915:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f25/r3tmp/tmpncg5fe/pdisk_1.dat 2025-03-28T09:02:20.574188Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:02:20.682777Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:20.730366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:20.730438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:20.736629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61247 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:21.059948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:21.090111Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:02:21.098240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TTransferTests::Create >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] Test command err: 2025-03-28T09:02:05.811632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:05.843017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:05.843105Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001680/r3tmp/tmpccXHwG/pdisk_1.dat 2025-03-28T09:02:11.934252Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.111370s 2025-03-28T09:02:11.943179Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.120620s 2025-03-28T09:02:12.069431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:12.599051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:12.777537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:12.777663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:12.796718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:13.079635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:13.639915Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:02:13.686642Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:02:13.696229Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:02:13.706939Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:15.179161Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:02:15.180050Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:15.180209Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:15.188693Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:15.188785Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:15.188833Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:15.189166Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:15.189297Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:15.189378Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:02:15.200455Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:15.364678Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:15.364895Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:15.365026Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:02:15.365085Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:15.365127Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:15.365165Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:15.365421Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:15.365479Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:15.365824Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:15.365921Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:15.366433Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:15.366482Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:15.366525Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:02:15.366559Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:15.366590Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:15.366633Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:15.390858Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:15.391161Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:15.391216Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:15.391271Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:02:15.391374Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:02:15.391417Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:02:15.391554Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:15.391871Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:02:15.391946Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:15.392074Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:02:15.392155Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:02:15.392207Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:02:15.392262Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:02:15.392302Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:02:15.392662Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:02:15.392729Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:02:15.392791Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:02:15.392827Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:15.392917Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:02:15.392958Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:02:15.392994Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:02:15.393029Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:02:15.393113Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:02:15.399383Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:02:15.399459Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:15.412819Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:15.413362Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:02:15.413625Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:15.414231Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:02:15.414591Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:15.596694Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:15.596751Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:15.596790Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:02:15.597174Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:02:15.597217Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:02:15.597337Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:02:15.597389Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:02:15.597426Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:02:15.597464Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:02:15.616609Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:15.616700Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:15.617558Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipi ... 025-03-28T09:02:26.205735Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2825] TxId: 281474976715665. Ctx: { TraceId: 01jqdzszpm4q6njtrfsztpbv8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZiZWUxNTMtOTA5MGIxNTItYmVlYzNkOTEtZjhlYjc2NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1051:2847], CA [2:1052:2848], 2025-03-28T09:02:26.205916Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2825] TxId: 281474976715665. Ctx: { TraceId: 01jqdzszpm4q6njtrfsztpbv8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZiZWUxNTMtOTA5MGIxNTItYmVlYzNkOTEtZjhlYjc2NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1051:2847], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 422 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 180 FinishTimeMs: 1743152546205 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 115 BuildCpuTimeUs: 65 HostName: "ghrun-mdsjyjltvm" NodeId: 2 CreateTimeMs: 1743152546192 } MaxMemoryUsage: 1048576 } 2025-03-28T09:02:26.205961Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jqdzszpm4q6njtrfsztpbv8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZiZWUxNTMtOTA5MGIxNTItYmVlYzNkOTEtZjhlYjc2NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1051:2847] 2025-03-28T09:02:26.206028Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2825] TxId: 281474976715665. Ctx: { TraceId: 01jqdzszpm4q6njtrfsztpbv8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZiZWUxNTMtOTA5MGIxNTItYmVlYzNkOTEtZjhlYjc2NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1052:2848], 2025-03-28T09:02:26.206059Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2825] TxId: 281474976715665. Ctx: { TraceId: 01jqdzszpm4q6njtrfsztpbv8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZiZWUxNTMtOTA5MGIxNTItYmVlYzNkOTEtZjhlYjc2NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1052:2848], 2025-03-28T09:02:26.206379Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2825] TxId: 281474976715665. Ctx: { TraceId: 01jqdzszpm4q6njtrfsztpbv8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZiZWUxNTMtOTA5MGIxNTItYmVlYzNkOTEtZjhlYjc2NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1052:2848], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 395 DurationUs: 4000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 157 FinishTimeMs: 1743152546206 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 115 BuildCpuTimeUs: 42 HostName: "ghrun-mdsjyjltvm" NodeId: 2 StartTimeMs: 1743152546202 CreateTimeMs: 1743152546192 } MaxMemoryUsage: 1048576 } 2025-03-28T09:02:26.206433Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715665. Ctx: { TraceId: 01jqdzszpm4q6njtrfsztpbv8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZiZWUxNTMtOTA5MGIxNTItYmVlYzNkOTEtZjhlYjc2NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1052:2848] 2025-03-28T09:02:26.209751Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1040:2825] TxId: 281474976715665. Ctx: { TraceId: 01jqdzszpm4q6njtrfsztpbv8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZiZWUxNTMtOTA5MGIxNTItYmVlYzNkOTEtZjhlYjc2NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 13072 DurationUs: 1743152544199301 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } ExecuterCpuTimeUs: 7885 StartTimeMs: 2007 FinishTimeMs: 1743152546206 Stages { StageId: 5 StageGuid: "91007561-b7b7b7e3-f7e7c66d-ecf9ab5d" Program: "(\n(return (lambda \'($1) (FromFlow (Take (ToFlow $1) (Uint64 \'\"1001\")))))\n)\n" ComputeActors { CpuTimeUs: 422 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 180 FinishTimeMs: 1743152546205 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 115 BuildCpuTimeUs: 65 HostName: "ghrun-mdsjyjltvm" NodeId: 2 CreateTimeMs: 1743152546192 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743152546198 } Stages { StageGuid: "5776016c-b951f001-c45ad1be-9cd16c0c" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743152546198 } Stages { StageId: 3 StageGuid: "a7e121ef-97f4dcaa-618fc38f-8c63b31c" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743152546198 } Stages { StageId: 2 StageGuid: "e06052a0-5570e354-30b63906-71dce532" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743152546198 } Stages { StageId: 4 StageGuid: "1eb893b2-90030479-e451830e-85e7fffb" Program: "(\n(return (lambda \'($1 $2) (block \'(\n (let $3 (lambda \'($6 $7) (AsStruct \'(\'\"key\" $6) \'(\'\"value\" $7))))\n (let $4 (Sort (Extend (NarrowMap (ToFlow $1) $3) (NarrowMap (ToFlow $2) $3)) (Bool \'true) (lambda \'($8) (Member $8 \'\"key\"))))\n (let $5 (lambda \'($9) (Member $9 \'\"key\") (Member $9 \'\"value\")))\n (return (FromFlow (ExpandMap $4 $5)))\n))))\n)\n" BaseTimeMs: 1743152546198 } Stages { StageId: 6 StageGuid: "919ba64d-eb73ee7b-e2ebc67f-abbab391" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'\"1001\")) (lambda \'($2 $3) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $3)))))))\n)\n" BaseTimeMs: 1743152546198 } Stages { StageId: 1 StageGuid: "7ba8bb4b-89f1cf4f-d1b521ca-2bc26ad0" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743152546198 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":17,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":16,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":14}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":15,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":14,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":12}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":13,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":12,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Sort-Union\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Sort\",\"SortBy\":\"row.key\"},{\"Inputs\":[{\"ExternalPlanNodeId\":10},{\"ExternalPlanNodeId\":5}],\"Name\":\"Union\"}],\"PlanNodeId\":11,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":10,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":9,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":8,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":6}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":7,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-1\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (3)\"],\"Scan\":\"Sequential\",\"Table\":\"table-1\"}],\"PlanNodeId\":6,\"StageGuid\":\"\",\"Tables\":[\"table-1\"]}],\"StageGuid\":\"5776016c-b951f001-c45ad1be-9cd16c0c\",\"Stats\":{\"BaseTimeMs\":1743152546198,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"7ba8bb4b-89f1cf4f-d1b521ca-2bc26ad0\",\"Stats\":{\"BaseTimeMs\":1743152546198,\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"},{\"Node Type\":\"UnionAll\",\"PlanNodeId\":5,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-2\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (4)\"],\"Scan\":\"Sequential\",\"Table\":\"table-2\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"table-2\"]}],\"StageGuid\":\"e06052a0-5570e354-30b63906-71dce532\",\"Stats\":{\"BaseTimeMs\":1743152546198,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"a7e121ef-97f4dcaa-618fc38f-8c63b31c\",\"Stats\":{\"BaseTimeMs\":1743152546198,\"FinishedTasks\":0,\"PhysicalStageId\":3,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"1eb893b2-90030479-e451830e-85e7fffb\",\"Stats\":{\"BaseTimeMs\":1743152546198,\"FinishedTasks\":0,\"PhysicalStageId\":4,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"key (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"91007561-b7b7b7e3-f7e7c66d-ecf9ab5d\",\"Stats\":{\"BaseTimeMs\":1743152546198,\"ComputeNodes\":[{\"CpuTimeUs\":422,\"Tasks\":[{\"ComputeTimeUs\":115,\"FinishTimeMs\":1743152546205,\"Host\":\"ghrun-mdsjyjltvm\",\"InputBytes\":7,\"InputRows\":2,\"NodeId\":2,\"OutputBytes\":7,\"OutputRows\":2,\"TaskId\":6}]}],\"FinishedTasks\":0,\"PhysicalStageId\":5,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"919ba64d-eb73ee7b-e2ebc67f-abbab391\",\"Stats\":{\"BaseTimeMs\":1743152546198,\"FinishedTasks\":0,\"PhysicalStageId\":6,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 3894 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\013\010\213\003\020\376\017\030\303( \007" } } 2025-03-28T09:02:26.209873Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2825] TxId: 281474976715665. Ctx: { TraceId: 01jqdzszpm4q6njtrfsztpbv8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZiZWUxNTMtOTA5MGIxNTItYmVlYzNkOTEtZjhlYjc2NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:02:26.209941Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1040:2825] TxId: 281474976715665. Ctx: { TraceId: 01jqdzszpm4q6njtrfsztpbv8m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGZiZWUxNTMtOTA5MGIxNTItYmVlYzNkOTEtZjhlYjc2NjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.005187s ReadRows: 2 ReadBytes: 16 ru: 3 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction >> ExternalBlobsMultipleChannels::SingleChannel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 24393, MsgBus: 11518 2025-03-28T08:59:12.102600Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782344229855314:2228];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.102777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e7/r3tmp/tmpi58UVu/pdisk_1.dat 2025-03-28T08:59:16.378593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:16.383121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:16.694674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24393, node 1 2025-03-28T08:59:19.380648Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782344229855314:2228];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:19.442988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:19.444464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:19.464006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:19.483917Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:19.483937Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:19.483949Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:19.484056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11518 TClient is connected to server localhost:11518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:23.597047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.607088Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:23.620687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.828343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T08:59:24.014853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T08:59:24.107331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:31.398139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782425834235623:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.398327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.008148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.131190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.271204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:33.271222Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:33.425741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.552132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.699593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:33.821583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.388305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782438719138097:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.388385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.390172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782438719138102:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.428295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:34.484948Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T08:59:34.487856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782438719138104:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:34.590881Z node 1 :TX_PROXY ERROR: Actor# [1:7486782438719138160:3527] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:39.329534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:11.067573Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152471016, txId: 281474976710672] shutting down 2025-03-28T09:01:11.329959Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-28T09:01:13.178109Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152472462, txId: 281474976710674] shutting down 2025-03-28T09:01:14.705869Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152474628, txId: 281474976710676] shutting down 2025-03-28T09:01:16.318380Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152476311, txId: 281474976710678] shutting down 2025-03-28T09:01:17.691752Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152477674, txId: 281474976710680] shutting down 2025-03-28T09:01:19.040738Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152479022, txId: 281474976710682] shutting down 2025-03-28T09:01:20.619218Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152480446, txId: 281474976710684] shutting down 2025-03-28T09:01:22.148520Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152482125, txId: 281474976710686] shutting down 2025-03-28T09:01:23.686475Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152483662, txId: 281474976710688] shutting down 2025-03-28T09:01:25.565495Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152485538, txId: 281474976710690] shutting down 2025-03-28T09:01:26.910082Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152486886, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1949767B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1995C4FF 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1903CBE8 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1904FBF7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1904FBF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1904FBF7 8. /-S/contrib/libs/cxxsup ... rror, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:5:13: Error: At function: SqlProjectItem
:6:22: Error: At function: ==
:6:22: Error: Uncompatible types in compare: Optional '==' Int32
:5:13: Error: At function: SqlProjectItem
:7:22: Error: At function: !=
:7:22: Error: Uncompatible types in compare: Optional '!=' Int32
:5:13: Error: At function: SqlProjectItem
:8:22: Error: At function: >
:8:22: Error: Uncompatible types in compare: Optional '>' Int32
:5:13: Error: At function: SqlProjectItem
:9:22: Error: At function: <=
:9:22: Error: Uncompatible types in compare: Optional '<=' Int32 2025-03-28T09:02:03.894134Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486783080021758068:2543], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional 2025-03-28T09:02:03.904619Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2NjYTc1ZGYtOGFkMTJjNjEtNGZlODZjMTUtOTdjZWU0OTY=, ActorId: [2:7486783071431823341:2527], ActorState: ExecuteState, TraceId: 01jqdzsafaeaqdxf7dqn5s98e5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:6:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject
:6:13: Error: At function: SqlProjectItem
:7:22: Error: At function: ==
:7:22: Error: Uncompatible types in compare: Optional '==' Optional
:6:13: Error: At function: SqlProjectItem
:8:22: Error: At function: !=
:8:22: Error: Uncompatible types in compare: Optional '!=' Optional
:6:13: Error: At function: SqlProjectItem
:9:22: Error: At function: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional Trying to start YDB, gRPC: 3401, MsgBus: 17716 2025-03-28T09:02:14.381933Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783126041036273:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:14.386732Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e7/r3tmp/tmpGLfl9s/pdisk_1.dat 2025-03-28T09:02:15.487364Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:15.740645Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:15.859113Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:15.859237Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:15.872015Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3401, node 3 2025-03-28T09:02:16.075306Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:16.075336Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:16.075343Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:16.075463Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17716 TClient is connected to server localhost:17716 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:17.148791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.182449Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:17.196282Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.310857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.818871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:17.928216Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.382813Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486783126041036273:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:19.394926Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:20.803817Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783151810841840:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.803908Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.861204Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.910442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.988531Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:21.071262Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:21.153424Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:21.321625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:21.460717Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783156105809663:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:21.460799Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:21.461210Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783156105809668:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:21.465838Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:21.487149Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486783156105809670:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:21.582287Z node 3 :TX_PROXY ERROR: Actor# [3:7486783156105809726:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> KqpLimits::QSReplySize-useSink [GOOD] >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] Test command err: 2025-03-28T09:02:15.815645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:15.816065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:15.816118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001676/r3tmp/tmpgYeeei/pdisk_1.dat 2025-03-28T09:02:16.410489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:16.531687Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:16.591046Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:02:16.592012Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:02:16.592248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:16.592360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:16.603802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:16.696567Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:02:16.696643Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:02:16.696832Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:02:16.893835Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:02:16.893939Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:02:16.894580Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:02:16.894713Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:02:16.895047Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:02:16.895234Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:02:16.895328Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:02:16.895622Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:02:16.897058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:16.898065Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:02:16.898148Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:02:16.955564Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:02:16.956766Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:02:16.957576Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:02:16.958012Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:672:2573] 2025-03-28T09:02:16.958281Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:17.008443Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:672:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:02:17.008824Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:02:17.009204Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:674:2575] 2025-03-28T09:02:17.009393Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:17.018077Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:674:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:02:17.018745Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:17.018863Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:17.020513Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:17.020642Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:17.020709Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:17.021188Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:17.021427Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:17.021528Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:705:2573] in generation 1 2025-03-28T09:02:17.021924Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:17.021994Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:17.023343Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:02:17.023411Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:02:17.023451Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:02:17.023763Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:17.023867Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:17.023927Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-03-28T09:02:17.035340Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:17.070418Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:17.070620Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:17.070779Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-03-28T09:02:17.070839Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:17.070880Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:17.070914Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:17.071168Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:672:2573], Recipient [1:672:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:17.071214Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:17.071505Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:17.071539Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:02:17.071591Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:17.071641Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-03-28T09:02:17.071681Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:02:17.071705Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:02:17.071728Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:02:17.071891Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:17.071984Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:17.072094Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:674:2575], Recipient [1:674:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:17.072124Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:17.072288Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:17.072329Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:17.072373Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:02:17.072404Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:17.072438Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:17.072471Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:17.072540Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:17.073009Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:687:2582], Recipient [1:672:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:17.073057Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:17.073098Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:665:2569], serverId# [1:687:2582], sessionId# [0:0:0] 2025-03-28T09:02:17.073149Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:02:17.073215Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:02:17.073343Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404 ... "ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=" } RequestContext { key: "TraceId" value: "01jqdzt145e74mx1e5k2my0q4t" } EnableSpilling: false DisableMetering: true 2025-03-28T09:02:27.113793Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2025-03-28T09:02:27.113899Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-03-28T09:02:27.114011Z node 2 :KQP_EXECUTER INFO: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:02:27.114063Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-03-28T09:02:27.114110Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-03-28T09:02:27.114177Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-03-28T09:02:27.114232Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-28T09:02:27.114651Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:973:2782], Recipient [2:927:2748]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:27.114728Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:27.114785Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:972:2781], serverId# [2:973:2782], sessionId# [0:0:0] 2025-03-28T09:02:27.115057Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:969:2765], Recipient [2:927:2748]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 969 RawX2: 8589937357 } TxBody: " \0018\000`\200\200\200\005j\246\006\010\001\022\225\006\010\001\022\024\n\022\t\311\003\000\000\000\000\000\000\021\315\n\000\000\002\000\000\000\032\256\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\0 2025-03-28T09:02:27.115101Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:02:27.115232Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [2:927:2748], Recipient [2:927:2748]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:02:27.115270Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:02:27.115344Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:27.115741Z node 2 :TX_DATASHARD TRACE: TxId: 281474976715662, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2025-03-28T09:02:27.115817Z node 2 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2025-03-28T09:02:27.115875Z node 2 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-03-28T09:02:27.116192Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CheckDataTx 2025-03-28T09:02:27.116263Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-03-28T09:02:27.116308Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CheckDataTx 2025-03-28T09:02:27.116350Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:27.116382Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:02:27.116426Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-03-28T09:02:27.116484Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715662] at 72075186224037888 2025-03-28T09:02:27.116526Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-03-28T09:02:27.116552Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:27.116575Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-28T09:02:27.116601Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-28T09:02:27.116647Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715661 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-03-28T09:02:27.116706Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715662] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191926 2025-03-28T09:02:27.116908Z node 2 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:02:27.116984Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:02:27.117019Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-28T09:02:27.117055Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:02:27.117096Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:27.117165Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:02:27.117202Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:02:27.117238Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:02:27.117275Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:02:27.117321Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-03-28T09:02:27.117345Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:02:27.117376Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-03-28T09:02:27.128441Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:27.128516Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:27.128572Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:02:27.128667Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:27.128999Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-03-28T09:02:27.129162Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:02:27.129214Z node 2 :KQP_EXECUTER TRACE: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-03-28T09:02:27.129299Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:969:2765] TxId: 281474976715662. Ctx: { TraceId: 01jqdzt145e74mx1e5k2my0q4t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2E4ODY0MDAtZTg4YmEwOTktOGJiMWQxNGMtYzY4Yzc4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2025-03-28T09:02:17.844602Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783140635126740:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:17.845007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f3c/r3tmp/tmp8Ag2j5/pdisk_1.dat 2025-03-28T09:02:18.593131Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:18.598920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:18.598994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:18.607512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3467 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:19.046547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.063382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.226305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.255179Z node 1 :TX_PROXY ERROR: Actor# [1:7486783149225061932:2398] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-03-28T09:02:19.260724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.289114Z node 1 :TX_PROXY ERROR: Actor# [1:7486783149225061972:2432] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-03-28T09:02:22.612631Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783162870063000:2147];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:22.622014Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f3c/r3tmp/tmp1sxoco/pdisk_1.dat 2025-03-28T09:02:22.945164Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:22.955720Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:22.955799Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:22.963675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23078 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:23.238241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:23.259487Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:23.266074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:23.406546Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:02:23.447048Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710660: 2025-03-28T09:02:23.447336Z node 2 :TX_PROXY ERROR: Actor# [2:7486783167165030926:2393] txid# 281474976710660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-28T09:02:23.447421Z node 2 :TX_PROXY ERROR: Actor# [2:7486783167165030926:2393] txid# 281474976710660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-28T09:02:23.447446Z node 2 :TX_PROXY ERROR: Actor# [2:7486783167165030926:2393] txid# 281474976710660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-03-28T09:02:23.452780Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710661: 2025-03-28T09:02:23.453001Z node 2 :TX_PROXY ERROR: Actor# [2:7486783167165030934:2398] txid# 281474976710661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-28T09:02:23.453061Z node 2 :TX_PROXY ERROR: Actor# [2:7486783167165030934:2398] txid# 281474976710661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-28T09:02:23.453076Z node 2 :TX_PROXY ERROR: Actor# [2:7486783167165030934:2398] txid# 281474976710661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2025-03-28T09:02:23.466320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2025-03-28T09:02:17.798611Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783140504006437:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:17.799135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f3e/r3tmp/tmpQNp6Kj/pdisk_1.dat 2025-03-28T09:02:18.468343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:18.468439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:18.474711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:18.484952Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8660 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:18.981127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:18.999244Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:02:19.042429Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152539037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "A" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1743152539121 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "B" PathId: 4 Sche... (TRUNCATED) 2025-03-28T09:02:19.082903Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-03-28T09:02:22.232632Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783162612120740:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:22.232666Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f3e/r3tmp/tmpUavxxN/pdisk_1.dat 2025-03-28T09:02:22.663389Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:22.676780Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:22.676876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:22.684039Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15323 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:23.064304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:23.076225Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:23.097419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:23.920597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 waiting... >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] >> ExternalBlobsMultipleChannels::Simple >> TExternalTableTest::DropTableTwice [GOOD] >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead >> TTransferTests::Create [GOOD] >> TTransferTests::CreateSequential |75.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:02:28.510881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:28.510972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:28.511009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:28.511043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:28.511087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:28.511121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:28.511181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:28.511297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:28.511616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:28.671807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:02:28.671883Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:28.681301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:28.681768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:28.681929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:28.710307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:28.710549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:28.711316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:28.711522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:28.721418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:28.724429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:28.724506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:28.724869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:28.724928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:28.724988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:28.725686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:28.733299Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:02:28.882128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:28.882443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:28.886751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:28.887083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:28.887163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:28.895755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:28.895923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:28.896124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:28.896186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:28.896221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:28.896257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:28.903680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:28.903753Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:28.903790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:28.911839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:28.911908Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:28.911986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:28.912070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:28.915886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:28.927525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:28.927737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:28.928742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:28.928900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:28.928954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:28.929256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:28.929307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:28.929457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:28.929527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:28.931977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:28.932043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:28.932261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:28.932307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:02:28.932383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:28.932431Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:28.932526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:28.932561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:28.932597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:28.932624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:28.932661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:28.932722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:28.932756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:28.932789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:28.932857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:28.932932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:28.932966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:28.935776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... LocalPathId: 3] was 2 2025-03-28T09:02:29.007766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:02:29.008179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:02:29.009905Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:29.009942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:29.010100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:02:29.010200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:02:29.010317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:29.010348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:02:29.010381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-03-28T09:02:29.010404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-03-28T09:02:29.010896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.010941Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:02:29.011040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:02:29.011088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:02:29.011140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:02:29.011175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:02:29.011218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:02:29.011264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:02:29.011326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:02:29.011365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:02:29.011437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:02:29.011476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:02:29.011541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-03-28T09:02:29.011584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-28T09:02:29.012282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:02:29.012353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:02:29.012379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:02:29.012435Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-03-28T09:02:29.012473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:02:29.013808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:02:29.013884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:02:29.013910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:02:29.013938Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:02:29.013963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:02:29.014043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:02:29.015944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:02:29.017170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:02:29.017350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:02:29.017383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:02:29.017856Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:02:29.017943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:02:29.017991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:331:2322] TestWaitNotification: OK eventTxId 102 2025-03-28T09:02:29.018509Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:02:29.018746Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 244us result status StatusSuccess 2025-03-28T09:02:29.019037Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-03-28T09:02:29.022278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:29.022611Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-03-28T09:02:29.022787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists:1 2025-03-28T09:02:29.022989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2025-03-28T09:02:29.025505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-03-28T09:02:29.025664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:02:29.025990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:02:29.026041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:02:29.026512Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:02:29.026616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:02:29.026651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 103 |75.3%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut >> Cdc::KeysOnlyLog[PqRunner] |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TKesusTest::TestReleaseLockFailure >> ExternalBlobsMultipleChannels::WithCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::DropTableTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:02:28.832671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:28.832755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:28.832790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:28.832828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:28.832868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:28.832907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:28.832977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:28.833051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:28.833349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:29.019096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:02:29.019167Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:29.031103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:29.031507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:29.031672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:29.064149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:29.064397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:29.065032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:29.065220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:29.081951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:29.091464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:29.091567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:29.092013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:29.092081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:29.092125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:29.092809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.100104Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:02:29.228170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:29.228494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.228712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:29.228928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:29.228989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.232130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:29.232259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:29.232467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.232542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:29.232577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:29.232617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:29.234710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.234776Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:29.234809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:29.236797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.236845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.236886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:29.236943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.240674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:29.242857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:29.243025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:29.244043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:29.244182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:29.244246Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:29.244552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:29.244626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:29.244794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:29.244873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:29.247109Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:29.247161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:29.247356Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:29.247409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:02:29.247495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.247541Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:29.247647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:29.247690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.247745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:29.247779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.247814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:29.247850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.247883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:29.247942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:29.248010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:29.248046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:29.248076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:29.251327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-03-28T09:02:29.389705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:29.389839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:29.389898Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalTable TPropose opId# 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-03-28T09:02:29.390118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:02:29.390257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-03-28T09:02:29.390432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:29.390494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:02:29.390529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:02:29.398618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:02:29.399352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Erasing txId 103 2025-03-28T09:02:29.402986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:29.403037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:29.403212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:02:29.403322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:02:29.403545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:29.403601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-28T09:02:29.403725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-28T09:02:29.403757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:02:29.404101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.404150Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:02:29.404279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:02:29.404317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:02:29.404380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:02:29.404430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:02:29.404468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-28T09:02:29.404531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:02:29.404571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:02:29.404609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:02:29.404721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:02:29.404759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:02:29.404798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2025-03-28T09:02:29.404834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-28T09:02:29.404879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:02:29.404903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-03-28T09:02:29.411585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:02:29.411740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:02:29.411787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:02:29.411829Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:02:29.411869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:02:29.412566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:02:29.412615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:02:29.412683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:02:29.413232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:02:29.413311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:02:29.413341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:02:29.413371Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-28T09:02:29.413472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:29.419889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:02:29.420035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:02:29.420068Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:02:29.420101Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:02:29.420151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:02:29.420253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-28T09:02:29.425108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:02:29.431682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:02:29.431852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:02:29.432528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:02:29.432837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:02:29.432889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:02:29.433348Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:02:29.433450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:02:29.433486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:368:2359] TestWaitNotification: OK eventTxId 103 2025-03-28T09:02:29.434026Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:02:29.434266Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 224us result status StatusPathDoesNotExist 2025-03-28T09:02:29.434457Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:01:02.875774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:01:02.875855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:01:02.875887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:01:02.875917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:01:02.875954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:01:02.875976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:01:02.876033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:01:02.876116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:01:02.876392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:03.024462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:01:03.024511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:03.111393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:03.111749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:01:03.111890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:01:03.154825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:01:03.155791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:01:03.157963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:01:03.158392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:01:03.175360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:01:03.176428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:01:03.176487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:01:03.176633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:01:03.176676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:01:03.176708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:01:03.176774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.195503Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:01:03.791248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:01:03.791460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.791669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:01:03.791925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:01:03.791987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.800595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:01:03.800722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:01:03.800921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.800988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:01:03.801049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:01:03.801086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:01:03.807525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.807596Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:01:03.807634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:01:03.813166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.813220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.813256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:01:03.813296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:01:03.833771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:01:03.855367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:01:03.855564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:01:03.856544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:01:03.856672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:01:03.856723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:01:03.856998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:01:03.857051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:01:03.857186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:01:03.857266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:01:03.863515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:01:03.863572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:01:03.863733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:01:03.863815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:01:03.864198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:03.864251Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:01:03.864338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:01:03.864372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:03.864407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:01:03.864439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:03.864473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:01:03.864512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:03.864544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:01:03.864583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:01:03.864662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:01:03.864705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:01:03.864735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:01:03.866607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:01:03.866755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:01:03.866790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:29.469198Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:29.469363Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.521928Z node 27 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [27:125:2151] sender: [27:239:2058] recipient: [27:15:2062] 2025-03-28T09:02:29.548595Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:29.548911Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.549183Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:29.549480Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:29.549565Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.560486Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:29.560662Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:29.560976Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.561083Z node 27 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:29.561169Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:29.561247Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:29.563716Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.563802Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:29.563885Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:29.575611Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.575688Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.575782Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:29.575899Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.576148Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:29.578362Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:29.578722Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:29.579950Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:29.580148Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 115964119147 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:29.580248Z node 27 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:29.580651Z node 27 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:29.580772Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:29.581132Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:29.581272Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:29.592457Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:29.592556Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:29.592852Z node 27 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:29.592946Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [27:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:02:29.593085Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.593167Z node 27 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:29.593412Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:29.593490Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.593574Z node 27 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:29.593638Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.593718Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:29.593797Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.593870Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:29.593937Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:29.594047Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:29.594123Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:29.594191Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:29.600156Z node 27 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:29.600400Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:29.600476Z node 27 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:02:29.600556Z node 27 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:02:29.600635Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:29.600806Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:02:29.612048Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:02:29.612869Z node 27 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:02:29.614300Z node 27 :TX_PROXY DEBUG: actor# [27:269:2260] Bootstrap 2025-03-28T09:02:29.734109Z node 27 :TX_PROXY DEBUG: actor# [27:269:2260] Become StateWork (SchemeCache [27:274:2265]) 2025-03-28T09:02:29.742255Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:29.744130Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.744360Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-03-28T09:02:29.745122Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-03-28T09:02:29.751683Z node 27 :TX_PROXY DEBUG: actor# [27:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:02:29.759607Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:29.759971Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-28T09:02:29.760903Z node 27 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:02:25.991756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:25.991872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:25.991922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:25.991971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:25.992034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:25.992068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:25.992133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:25.992215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:25.992599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:26.106778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:26.106844Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:26.126994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:26.127308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:26.127465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:26.142711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:26.142954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:26.143684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:26.143936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:26.146228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:26.147611Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:26.147678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:26.147857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:26.147927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:26.147971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:26.148065Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:26.175876Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:02:26.311512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:26.311743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:26.311960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:26.312202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:26.312264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:26.314982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:26.315144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:26.315392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:26.315447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:26.315483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:26.315514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:26.317716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:26.317774Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:26.317847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:26.319938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:26.319992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:26.320047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:26.320100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:26.329362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:26.331744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:26.331945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:26.333000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:26.333152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:26.333199Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:26.333489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:26.333545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:26.333713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:26.333836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:26.336175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:26.336223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:26.336437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:26.336492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:26.336867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:26.336911Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:26.337002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:26.337035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:26.337074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:26.337103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:26.337136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:26.337182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:26.337216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:26.337247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:26.337316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:26.337348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:26.337392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:26.339329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:26.339432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:26.339471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:02:29.359260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-28T09:02:29.359294Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-03-28T09:02:29.359325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:29.360330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:02:29.360402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:02:29.360425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-28T09:02:29.360449Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-03-28T09:02:29.360479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-03-28T09:02:29.360566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-03-28T09:02:29.363059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-03-28T09:02:29.363168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-03-28T09:02:29.364462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:29.364576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:29.364620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-03-28T09:02:29.364703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:29.364729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-03-28T09:02:29.364761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 134 2025-03-28T09:02:29.366194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-28T09:02:29.366577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-28T09:02:29.367716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.367760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:29.367864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-03-28T09:02:29.368009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:29.368090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 FAKE_COORDINATOR: Erasing txId 175 2025-03-28T09:02:29.370230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:29.370300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:29.370448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-03-28T09:02:29.370545Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:29.370574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-03-28T09:02:29.370615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-03-28T09:02:29.370930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.370968Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-03-28T09:02:29.370994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-03-28T09:02:29.371934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:02:29.372029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:02:29.372069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-28T09:02:29.372106Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-03-28T09:02:29.372137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:29.373683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:02:29.373785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:02:29.373821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-28T09:02:29.373851Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-03-28T09:02:29.373882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-03-28T09:02:29.373959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-03-28T09:02:29.376146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.376202Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-03-28T09:02:29.376292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-28T09:02:29.376323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-28T09:02:29.376354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-28T09:02:29.376395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-28T09:02:29.376427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-03-28T09:02:29.376465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-28T09:02:29.376494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-03-28T09:02:29.376524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-03-28T09:02:29.376590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-03-28T09:02:29.377035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:02:29.377071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-28T09:02:29.377122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-03-28T09:02:29.377260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:02:29.377292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-28T09:02:29.377352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:29.378508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-28T09:02:29.388465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-28T09:02:29.394752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:02:29.394891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-03-28T09:02:29.396027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-03-28T09:02:29.396068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-03-28T09:02:29.397457Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-03-28T09:02:29.397567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-03-28T09:02:29.397597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2596:4587] TestWaitNotification: OK eventTxId 175 >> KqpLimits::DataShardReplySizeExceeded [GOOD] |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |75.4%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 9875, MsgBus: 15340 2025-03-28T08:59:05.758324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782313953331968:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:05.758604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b8/r3tmp/tmpT8NcEV/pdisk_1.dat 2025-03-28T08:59:12.067546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.067724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.085632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9875, node 1 2025-03-28T08:59:12.259137Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782313953331968:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.259376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:12.259479Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.383043Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T08:59:13.430879Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:13.481495Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T08:59:13.507089Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:13.507106Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:13.507112Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:13.507726Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15340 TClient is connected to server localhost:15340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:23.601948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.675959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.161881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.400984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.957535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:27.927266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:27.927742Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:31.731937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782425622483513:2424], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.732005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:33.502048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.246371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.321751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.391861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.532446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.615810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:34.756746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782438507385951:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.756836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.757242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782438507385956:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:34.761171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:34.781200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782438507385958:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T08:59:34.865358Z node 1 :TX_PROXY ERROR: Actor# [1:7486782438507386015:3516] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:38.771456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:50.767813Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782507226863796:2618], SessionActorId: [1:7486782507226863777:2618], statusCode=PRECONDITION_FAILED. Issue=
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 . sessionActorId=[1:7486782507226863777:2618]. isRollback=0 2025-03-28T08:59:50.778352Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzBiOWEyMmItZDg4MjViMjktNDZiNjQwNDUtZDY1YjdjNTU=, ActorId: [1:7486782507226863777:2618], ActorState: ExecuteState, TraceId: 01jqdzn7zs5w2b81m4hbvv5m4j, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486782507226863797:2618] from: [1:7486782507226863796:2618] 2025-03-28T08:59:50.778454Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486782507226863797:2618] TxId: 281474976710672. Ctx: { TraceId: 01jqdzn7zs5w2b81m4hbvv5m4j, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBiOWEyMmItZDg4MjViMjktNDZiNjQwNDUtZDY1YjdjNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 } 2025-03-28T08:59:50.778600Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782507226863805:2631], TxId: 281474976710672, task: 5. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzn7zs5w2b81m4hbvv5m4j. SessionId : ydb://session/3?node_id=1&id=YzBiOWEyMmItZDg4MjViMjktNDZiNjQwNDUtZDY1YjdjNTU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486782507226863797:2618], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T08:59:50.778979Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzBiOWEyMmItZDg4MjViMjktNDZiNjQwNDUtZDY1YjdjNTU=, ActorId: [1:7486782507226863777:2618], ActorState: ExecuteState, TraceId: 01jqdzn7zs5w2b81m4hbvv5m4j, Create QueryResponse for error on request, msg:
: Error: Memory limit exception, current limit is 1024 bytes., code: 2029 Trying to start YDB, gRPC: 63261, MsgBus: 26837 2025-03-28T08:59:53.354449Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782520341645166:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b8/r3tmp/tmpXulwwB/pdisk_1.dat 2025-03-28T08:59:53.416595Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T08:59:53.660555Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:53.715543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:53.715632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:53.725845Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63261, node 2 2025-03-28T08:59:54.386888Z node 2 :NET_CLASSIFIER WARN: distribu ... 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:00:25.940533Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:00:26.116165Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:00:26.341699Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:00:26.648735Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782662065435564:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:26.648818Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:26.649437Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486782662065435569:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:26.660477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:26.737128Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782662065435572:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:26.816969Z node 3 :TX_PROXY ERROR: Actor# [3:7486782662065435628:3477] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:32.327009Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:32.327036Z node 3 :IMPORT WARN: Table profiles were not loaded
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=3&id=ZDZkMjFjMC1iNTNiZTQ0ZS00MDkwNWU1Zi1lYmMyZDk5MQ== Trying to start YDB, gRPC: 18077, MsgBus: 30834 2025-03-28T09:01:24.349494Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486782914134762212:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:24.376813Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014b8/r3tmp/tmphawbpz/pdisk_1.dat 2025-03-28T09:01:25.877318Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:25.897226Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:25.922567Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:25.922918Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:25.928149Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18077, node 4 2025-03-28T09:01:26.180094Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:01:26.180121Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:01:26.180129Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:01:26.180291Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30834 TClient is connected to server localhost:30834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:27.548096Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:27.768831Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:28.005690Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:29.081792Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:29.845082Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486782914134762212:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:29.845146Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:01:30.011126Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:01:34.974538Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782957084436984:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:35.004386Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:35.140036Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:01:35.347257Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:01:35.559559Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:01:35.913976Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:01:36.169655Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:01:36.354396Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:01:36.916015Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782965674372145:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:36.916096Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:36.918740Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486782965674372150:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:36.928432Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:01:36.983828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486782965674372152:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:01:37.074750Z node 4 :TX_PROXY ERROR: Actor# [4:7486782969969339506:3501] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:40.855070Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:01:40.855095Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:41.951874Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:28.030280Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZDNkZDViZDgtODA3Y2RiOTEtMmI3Y2UyZWYtMWNmMmQ3ZDQ=, ActorId: [4:7486783176127771283:2734], ActorState: ExecuteState, TraceId: 01jqdzsze183hx4emq0vhpcnrr, Create QueryResponse for error on request, msg:
: Error: Intermediate data materialization exceeded size limit (88240925 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] Test command err: 2025-03-28T09:02:16.019682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:16.020209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:16.020270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00167e/r3tmp/tmpNYp7rt/pdisk_1.dat 2025-03-28T09:02:17.089359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:17.128067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:17.173084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:17.173232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:17.184754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:17.288720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:17.369927Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:02:17.371222Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:02:17.371758Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:02:17.372028Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:17.427571Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:02:17.428386Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:17.428558Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:17.430412Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:17.430506Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:17.430563Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:17.431052Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:17.431190Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:17.431289Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:02:17.443312Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:17.491143Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:17.491346Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:17.491506Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:02:17.491551Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:17.491607Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:17.491666Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:17.491870Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:17.491932Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:17.492221Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:17.492304Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:17.492715Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:17.492759Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:17.492826Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:02:17.492865Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:17.492899Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:17.492931Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:17.492972Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:17.493104Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:17.493153Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:17.493203Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:02:17.493268Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:02:17.493307Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:02:17.493394Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:17.493619Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:02:17.493685Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:17.493773Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:02:17.493821Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:02:17.493874Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:02:17.493913Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:02:17.493950Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:02:17.494254Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:02:17.494316Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:02:17.494373Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:02:17.494408Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:17.494457Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:02:17.494502Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:02:17.494536Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:02:17.494570Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:02:17.494593Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:02:17.496757Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:02:17.496837Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:17.507812Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:17.507886Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:02:17.507938Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:17.507998Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:02:17.508089Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:17.684951Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:17.685011Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:17.685049Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:02:17.685553Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:02:17.685622Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:02:17.685764Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:02:17.685825Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:02:17.685886Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:02:17.685926Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:02:17.696256Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:17.696406Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:17.697326Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:17.697374Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:17.697438Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:1 ... 89:2881], 2025-03-28T09:02:29.164756Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jqdzt2tn54j4j31ewzayh8ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUyYjU4Y2QtYzkwNTkzYmQtZWQ1MjljMjItNzgyYzg0MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1090:2882], CA [2:1089:2881], 2025-03-28T09:02:29.164916Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jqdzt2tn54j4j31ewzayh8ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUyYjU4Y2QtYzkwNTkzYmQtZWQ1MjljMjItNzgyYzg0MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1089:2881], task: 6, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 488 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 220 FinishTimeMs: 1743152549164 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 146 BuildCpuTimeUs: 74 HostName: "ghrun-mdsjyjltvm" NodeId: 2 CreateTimeMs: 1743152549151 } MaxMemoryUsage: 1048576 } 2025-03-28T09:02:29.164961Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jqdzt2tn54j4j31ewzayh8ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUyYjU4Y2QtYzkwNTkzYmQtZWQ1MjljMjItNzgyYzg0MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1089:2881] 2025-03-28T09:02:29.165034Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jqdzt2tn54j4j31ewzayh8ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUyYjU4Y2QtYzkwNTkzYmQtZWQ1MjljMjItNzgyYzg0MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1090:2882], 2025-03-28T09:02:29.165069Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jqdzt2tn54j4j31ewzayh8ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUyYjU4Y2QtYzkwNTkzYmQtZWQ1MjljMjItNzgyYzg0MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1090:2882], 2025-03-28T09:02:29.165340Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jqdzt2tn54j4j31ewzayh8ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUyYjU4Y2QtYzkwNTkzYmQtZWQ1MjljMjItNzgyYzg0MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1090:2882], task: 7, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 405 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 171 FinishTimeMs: 1743152549165 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 41 HostName: "ghrun-mdsjyjltvm" NodeId: 2 StartTimeMs: 1743152549164 CreateTimeMs: 1743152549152 } MaxMemoryUsage: 1048576 } 2025-03-28T09:02:29.165419Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jqdzt2tn54j4j31ewzayh8ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUyYjU4Y2QtYzkwNTkzYmQtZWQ1MjljMjItNzgyYzg0MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1090:2882] 2025-03-28T09:02:29.168749Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jqdzt2tn54j4j31ewzayh8ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUyYjU4Y2QtYzkwNTkzYmQtZWQ1MjljMjItNzgyYzg0MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 14213 DurationUs: 1743152547144753 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } Tables { TablePath: "/Root/table-2" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } ExecuterCpuTimeUs: 8912 StartTimeMs: 2020 FinishTimeMs: 1743152549165 Stages { StageId: 5 StageGuid: "8e166d0c-b6e912f3-ec601846-2e6082c3" Program: "(\n(return (lambda \'($1) (FromFlow (Take (ToFlow $1) (Uint64 \'\"1001\")))))\n)\n" ComputeActors { CpuTimeUs: 488 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 220 FinishTimeMs: 1743152549164 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 146 BuildCpuTimeUs: 74 HostName: "ghrun-mdsjyjltvm" NodeId: 2 CreateTimeMs: 1743152549151 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743152549159 } Stages { StageGuid: "1259eaad-878e8af2-8719192f-2390f0e6" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743152549159 } Stages { StageId: 3 StageGuid: "bf527b49-4a9724ff-51abd7a5-f0b028ff" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743152549159 } Stages { StageId: 2 StageGuid: "84daebef-24209b88-e87ca6f7-10fe4b94" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743152549159 } Stages { StageId: 4 StageGuid: "a12c9eac-e14d70a-2c6c5e25-50cf9bbf" Program: "(\n(return (lambda \'($1 $2) (block \'(\n (let $3 (lambda \'($6 $7) (AsStruct \'(\'\"key\" $6) \'(\'\"value\" $7))))\n (let $4 (Sort (Extend (NarrowMap (ToFlow $1) $3) (NarrowMap (ToFlow $2) $3)) (Bool \'true) (lambda \'($8) (Member $8 \'\"key\"))))\n (let $5 (lambda \'($9) (Member $9 \'\"key\") (Member $9 \'\"value\")))\n (return (FromFlow (ExpandMap $4 $5)))\n))))\n)\n" BaseTimeMs: 1743152549159 } Stages { StageId: 6 StageGuid: "a34e0bb5-56d9a298-bb5a1985-6cb767bc" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'\"1001\")) (lambda \'($2 $3) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $3)))))))\n)\n" BaseTimeMs: 1743152549159 } Stages { StageId: 1 StageGuid: "bf30013-c964b1-53720834-67622b7" Program: "(\n(return (lambda \'($1) (FromFlow (WideTop (ToFlow $1) (Uint64 \'\"1001\") \'(\'(\'0 (Bool \'true)))))))\n)\n" BaseTimeMs: 1743152549159 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":17,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":16,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":14}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":15,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":14,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":12}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":13,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":12,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Sort-Union\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Sort\",\"SortBy\":\"row.key\"},{\"Inputs\":[{\"ExternalPlanNodeId\":10},{\"ExternalPlanNodeId\":5}],\"Name\":\"Union\"}],\"PlanNodeId\":11,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":10,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":9,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":8,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":6}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":7,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-1\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (3)\"],\"Scan\":\"Sequential\",\"Table\":\"table-1\"}],\"PlanNodeId\":6,\"StageGuid\":\"\",\"Tables\":[\"table-1\"]}],\"StageGuid\":\"1259eaad-878e8af2-8719192f-2390f0e6\",\"Stats\":{\"BaseTimeMs\":1743152549159,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"bf30013-c964b1-53720834-67622b7\",\"Stats\":{\"BaseTimeMs\":1743152549159,\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"},{\"Node Type\":\"UnionAll\",\"PlanNodeId\":5,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1001\",\"Name\":\"Top\",\"TopBy\":\"row.key\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TablePointLookup\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TablePointLookup\",\"Path\":\"\\/Root\\/table-2\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRange\":[\"key (4)\"],\"Scan\":\"Sequential\",\"Table\":\"table-2\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"table-2\"]}],\"StageGuid\":\"84daebef-24209b88-e87ca6f7-10fe4b94\",\"Stats\":{\"BaseTimeMs\":1743152549159,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"bf527b49-4a9724ff-51abd7a5-f0b028ff\",\"Stats\":{\"BaseTimeMs\":1743152549159,\"FinishedTasks\":0,\"PhysicalStageId\":3,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"a12c9eac-e14d70a-2c6c5e25-50cf9bbf\",\"Stats\":{\"BaseTimeMs\":1743152549159,\"FinishedTasks\":0,\"PhysicalStageId\":4,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"key (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"8e166d0c-b6e912f3-ec601846-2e6082c3\",\"Stats\":{\"BaseTimeMs\":1743152549159,\"ComputeNodes\":[{\"CpuTimeUs\":488,\"Tasks\":[{\"ComputeTimeUs\":146,\"FinishTimeMs\":1743152549164,\"Host\":\"ghrun-mdsjyjltvm\",\"InputBytes\":7,\"InputRows\":2,\"NodeId\":2,\"OutputBytes\":7,\"OutputRows\":2,\"TaskId\":6}]}],\"FinishedTasks\":0,\"PhysicalStageId\":5,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"a34e0bb5-56d9a298-bb5a1985-6cb767bc\",\"Stats\":{\"BaseTimeMs\":1743152549159,\"FinishedTasks\":0,\"PhysicalStageId\":6,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 3889 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\013\010\225\003\020\313\n\030\265) \007" } } 2025-03-28T09:02:29.168864Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jqdzt2tn54j4j31ewzayh8ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUyYjU4Y2QtYzkwNTkzYmQtZWQ1MjljMjItNzgyYzg0MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:02:29.168945Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1078:2859] TxId: 281474976715667. Ctx: { TraceId: 01jqdzt2tn54j4j31ewzayh8ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmUyYjU4Y2QtYzkwNTkzYmQtZWQ1MjljMjItNzgyYzg0MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.005301s ReadRows: 2 ReadBytes: 16 ru: 3 rate limiter was not found force flag: 1 { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } >> TKesusTest::TestKesusConfig >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TKesusTest::TestQuoterResourceDescribe >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TTxDataShardUploadRows::TestUploadShadowRows >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit >> TKesusTest::TestSessionTimeoutAfterDetach >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TTxDataShardUploadRows::TestUploadRows >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-03-28T08:59:51.926152Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-03-28T08:59:51.926219Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-03-28T08:59:51.930819Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-03-28T08:59:51.930989Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-03-28T08:59:51.934636Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-03-28T08:59:51.934809Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-03-28T08:59:51.935297Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:35:2066] 2025-03-28T08:59:51.935352Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-03-28T08:59:51.935452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T08:59:51.935922Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:41:2068] 2025-03-28T08:59:51.935971Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant 2025-03-28T08:59:51.936079Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T08:59:51.936228Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:42:2068] 2025-03-28T08:59:51.936253Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/tenant 2025-03-28T08:59:51.936287Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T08:59:51.936424Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2068] 2025-03-28T08:59:51.936447Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/tenant 2025-03-28T08:59:51.936476Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T08:59:51.936538Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2025-03-28T08:59:51.936610Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-03-28T08:59:51.936653Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2025-03-28T08:59:51.936703Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-03-28T08:59:51.936751Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2025-03-28T08:59:51.936798Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-03-28T08:59:51.936870Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:38:2068] 2025-03-28T08:59:51.936930Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:39:2068] 2025-03-28T08:59:51.936969Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/tenant] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T08:59:51.937094Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2068] 2025-03-28T08:59:51.937144Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/tenant] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-03-28T08:59:51.937349Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 103 2025-03-28T08:59:51.937391Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-28T08:59:51.937473Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-28T08:59:51.937628Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2025-03-28T08:59:51.937686Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-03-28T08:59:51.937738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:38:2068] 2025-03-28T08:59:51.937789Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/tenant] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-03-28T08:59:52.800374Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-03-28T08:59:52.800429Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Successful handshake: owner# 800, generation# 1 2025-03-28T08:59:52.800561Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-03-28T08:59:52.800601Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-03-28T08:59:52.800655Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:34:2065] 2025-03-28T08:59:52.800682Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:3:2050] Commit generation: owner# 800, generation# 1 2025-03-28T08:59:52.800872Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:35:2066] 2025-03-28T08:59:52.800910Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-03-28T08:59:52.800985Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T08:59:52.801360Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:41:2068] 2025-03-28T08:59:52.801391Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Upsert description: path# /root/tenant 2025-03-28T08:59:52.801463Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:3:2050] Subscribe: subscriber# [3:41:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T08:59:52.801595Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:42:2068] 2025-03-28T08:59:52.801615Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/tenant 2025-03-28T08:59:52.801654Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:42:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T08:59:52.801755Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2068] 2025-03-28T08:59:52.801771Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Upsert description: path# /root/tenant 2025-03-28T08:59:52.801796Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:9:2056] Subscribe: subscriber# [3:43:2068], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T08:59:52.801850Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:41:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2025-03-28T08:59:52.801894Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:41:2068] 2025-03-28T08:59:52.801935Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:42:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2025-03-28T08:59:52.801969Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:42:2068] 2025-03-28T08:59:52.802012Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:43:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:9:2056] 2025-03-28T08:59:52.802039Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:43:2068] 2025-03-28T08:59:52.802107Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:38:2068] 2025-03-28T08:59:52.802156Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:39:2068] 2025-03-28T08:59:52.802235Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:37:2068][/root/tenant] Set up state: owner# [3:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T08:59:52.802293Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:37:2068][/ ... omainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-03-28T09:02:31.497087Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-03-28T09:02:31.497173Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Successful handshake: owner# 910, generation# 1 2025-03-28T09:02:31.497364Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:34:2065] 2025-03-28T09:02:31.497398Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:3:2050] Commit generation: owner# 910, generation# 1 2025-03-28T09:02:31.497449Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-03-28T09:02:31.497481Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Successful handshake: owner# 910, generation# 1 2025-03-28T09:02:31.497694Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [397:35:2066] 2025-03-28T09:02:31.497725Z node 397 :SCHEME_BOARD_REPLICA NOTICE: [397:6:2053] Commit generation: owner# 910, generation# 1 2025-03-28T09:02:31.497812Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:02:31.498255Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:41:2068] 2025-03-28T09:02:31.498296Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-03-28T09:02:31.498367Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:3:2050] Subscribe: subscriber# [397:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T09:02:31.498506Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:42:2068] 2025-03-28T09:02:31.498529Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-03-28T09:02:31.498568Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:6:2053] Subscribe: subscriber# [397:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T09:02:31.498721Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [397:43:2068] 2025-03-28T09:02:31.498747Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-03-28T09:02:31.498791Z node 397 :SCHEME_BOARD_REPLICA INFO: [397:9:2056] Subscribe: subscriber# [397:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T09:02:31.498857Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:3:2050] 2025-03-28T09:02:31.498907Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:41:2068] 2025-03-28T09:02:31.498972Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:6:2053] 2025-03-28T09:02:31.499025Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:42:2068] 2025-03-28T09:02:31.499067Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][397:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:9:2056] 2025-03-28T09:02:31.499108Z node 397 :SCHEME_BOARD_REPLICA DEBUG: [397:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [397:43:2068] 2025-03-28T09:02:31.499192Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:38:2068] 2025-03-28T09:02:31.499255Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:39:2068] 2025-03-28T09:02:31.499300Z node 397 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][397:37:2068][/Root/Tenant/table_inside] Set up state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:02:31.499360Z node 397 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][397:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [397:40:2068] 2025-03-28T09:02:31.499396Z node 397 :SCHEME_BOARD_SUBSCRIBER INFO: [main][397:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [397:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2025-03-28T09:02:32.079348Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-03-28T09:02:32.079405Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Successful handshake: owner# 910, generation# 1 2025-03-28T09:02:32.079525Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:34:2065] 2025-03-28T09:02:32.079554Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:3:2050] Commit generation: owner# 910, generation# 1 2025-03-28T09:02:32.079599Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-03-28T09:02:32.079628Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Successful handshake: owner# 910, generation# 1 2025-03-28T09:02:32.079796Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:35:2066] 2025-03-28T09:02:32.079825Z node 399 :SCHEME_BOARD_REPLICA NOTICE: [399:6:2053] Commit generation: owner# 910, generation# 1 2025-03-28T09:02:32.079901Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:02:32.080291Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:41:2068] 2025-03-28T09:02:32.080324Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2025-03-28T09:02:32.080391Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:3:2050] Subscribe: subscriber# [399:41:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T09:02:32.080517Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:42:2068] 2025-03-28T09:02:32.080541Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2025-03-28T09:02:32.080575Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:6:2053] Subscribe: subscriber# [399:42:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T09:02:32.080704Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2068] 2025-03-28T09:02:32.080726Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2025-03-28T09:02:32.080763Z node 399 :SCHEME_BOARD_REPLICA INFO: [399:9:2056] Subscribe: subscriber# [399:43:2068], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T09:02:32.080823Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:41:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2025-03-28T09:02:32.080870Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:41:2068] 2025-03-28T09:02:32.080908Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:42:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2025-03-28T09:02:32.080945Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:42:2068] 2025-03-28T09:02:32.080984Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][399:43:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2025-03-28T09:02:32.081020Z node 399 :SCHEME_BOARD_REPLICA DEBUG: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2068] 2025-03-28T09:02:32.081099Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:38:2068] 2025-03-28T09:02:32.081157Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:39:2068] 2025-03-28T09:02:32.081201Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][399:37:2068][/Root/Tenant/table_inside] Set up state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:02:32.081259Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][399:37:2068][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2068] 2025-03-28T09:02:32.081296Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: [main][399:37:2068][/Root/Tenant/table_inside] Ignore empty state: owner# [399:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() >> TKesusTest::TestAcquireSemaphoreTimeout >> TKesusTest::TestRegisterProxy >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> TKesusTest::TestAcquireWaiterDowngrade >> TFlatTest::RejectByIncomingReadSetSize [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> TFlatTest::SplitBoundaryRead [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile >> TKesusTest::TestSemaphoreSessionFailures [GOOD] >> TKesusTest::TestSessionDetach ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2025-03-28T09:02:22.766142Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783163862427319:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:22.766603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f36/r3tmp/tmpfNaVKt/pdisk_1.dat 2025-03-28T09:02:23.705591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:23.705688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:23.708391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:23.729754Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:23.746891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:16483 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:24.181984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:24.227515Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:24.247853Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:24.253631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f36/r3tmp/tmpyIVSwE/pdisk_1.dat 2025-03-28T09:02:28.670896Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:28.690717Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:28.718644Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:28.726841Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:28.734200Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1169 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:29.052921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:29.062844Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:29.071348Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:29.075580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2025-03-28T09:02:21.857580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783157299003514:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:21.861175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f38/r3tmp/tmpBS20M2/pdisk_1.dat 2025-03-28T09:02:22.690841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:22.704999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:22.705098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:22.720024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16826 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:23.584164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:23.606359Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:23.742788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:02:23.743005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:02:23.743115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:02:23.743146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-28T09:02:23.743199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:02:23.743405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:02:23.743454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 waiting... 2025-03-28T09:02:23.745554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-28T09:02:23.745681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2025-03-28T09:02:23.745842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:02:23.745858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:02:23.745952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:02:23.746027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:02:23.746040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486783161593971350:2383], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-28T09:02:23.746052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486783161593971350:2383], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-28T09:02:23.746100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:02:23.746120Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:02:23.746176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 2025-03-28T09:02:23.750821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:23.752477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:02:23.752591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:02:23.752612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-28T09:02:23.752630Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-03-28T09:02:23.752696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:02:23.752933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:02:23.752970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:02:23.752977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-28T09:02:23.752986Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-03-28T09:02:23.752996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:02:23.753037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-28T09:02:23.753146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:23.753157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-28T09:02:23.753179Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:23.757593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-03-28T09:02:23.757717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:02:23.757814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-28T09:02:23.757847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-28T09:02:23.760885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743152543804, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:02:23.761007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743152543804 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:02:23.761039Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1743152543804, at schemeshard: 72057594046644480 2025-03-28T09:02:23.761144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-03-28T09:02:23.761294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:02:23.761368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:02:23.763416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:02:23.763439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:02:23.763543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:02:23.763596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:02:23.763607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486783161593971350:2383], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-28T09:02:23.763618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486783161593971350:2383], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-28T09:02:23.763649Z node 1 ... Kikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:02:24.027839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-03-28T09:02:24.027903Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-03-28T09:02:24.028053Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:02:24.028095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-03-28T09:02:24.028109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710661 2025-03-28T09:02:24.028121Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 9 2025-03-28T09:02:24.028204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2025-03-28T09:02:24.028286Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{37, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-03-28T09:02:24.028317Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:02:24.028342Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:21} Tx{21, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-28T09:02:24.028375Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:21} Tx{21, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:02:24.028405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-03-28T09:02:24.028421Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:21} Tx{21, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:02:24.028461Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:21} Tx{21, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:02:24.028489Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2025-03-28T09:02:24.028504Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:02:24.028539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2025-03-28T09:02:24.028545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710661 2025-03-28T09:02:24.028555Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 3 2025-03-28T09:02:24.028562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-28T09:02:24.028570Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [1:7486783161593971330:2362] 2025-03-28T09:02:24.028579Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [1:7486783161593971330:2362] 2025-03-28T09:02:24.028595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710661, subscribers: 1 2025-03-28T09:02:24.028604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7486783170183906324:2304] 2025-03-28T09:02:24.028646Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{38, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2025-03-28T09:02:24.028685Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:02:24.028775Z node 1 :PIPE_SERVER DEBUG: [72057594046382081] HandleSend Sender# [1:7486783161593971326:2362] EventType# 269156352 2025-03-28T09:02:24.030450Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:02:24.030496Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:121:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:02:24.030531Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:02:24.030546Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:118:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:02:24.030565Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:02:24.030578Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:131:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:02:24.030721Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2025-03-28T09:02:24.030812Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2025-03-28T09:02:24.030838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2025-03-28T09:02:24.030860Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2025-03-28T09:02:24.030871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2025-03-28T09:02:24.031064Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:20:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:02:24.031080Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:20:1:24576:109:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:02:24.031119Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:21} commited cookie 1 for step 20 2025-03-28T09:02:24.031280Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] received poison pill [1:7486783170183906325:2304] 2025-03-28T09:02:24.031309Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594046644480] notify reset [1:7486783170183906325:2304] 2025-03-28T09:02:24.031344Z node 1 :PIPE_SERVER DEBUG: [72057594046644480] Got PeerClosed from# [1:7486783170183906325:2304] 2025-03-28T09:02:27.645924Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783181684309693:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:27.645962Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f38/r3tmp/tmpKxkQ6Z/pdisk_1.dat 2025-03-28T09:02:27.974222Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:27.986896Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:28.009628Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:28.011597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6577 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:28.246450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:28.260249Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:28.284448Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:28.306112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:32.702194Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783181684309693:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:32.722988Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:33.179476Z node 2 :TX_PROXY ERROR: Actor# [2:7486783207454114480:2621] txid# 281474976710700 FailProposedRequest: Transaction incoming read set size 1000086 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2025-03-28T09:02:33.179575Z node 2 :TX_PROXY ERROR: Actor# [2:7486783207454114480:2621] txid# 281474976710700 RESPONSE Status# ExecError marker# P13c >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate |75.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |75.4%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |75.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TKesusTest::TestGetQuoterResourceCounters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2025-03-28T09:02:31.365704Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:31.365814Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:31.399878Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:31.400003Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:31.433664Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:31.434275Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=5951434419626299842, session=0, seqNo=0) 2025-03-28T09:02:31.434456Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:31.448399Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=5951434419626299842, session=1) 2025-03-28T09:02:31.448710Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=6233758095231334583, session=0, seqNo=0) 2025-03-28T09:02:31.448824Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:02:31.461669Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=6233758095231334583, session=2) 2025-03-28T09:02:31.462028Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=111, name="Lock1") 2025-03-28T09:02:31.474068Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=111) 2025-03-28T09:02:31.474386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:02:31.474559Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:02:31.474720Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:02:31.488790Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-03-28T09:02:31.489107Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=333, name="Lock1") 2025-03-28T09:02:31.502776Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=333) 2025-03-28T09:02:32.193223Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:32.193344Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:32.241779Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:32.241984Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:32.281602Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:32.282208Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=8035981929629706001, session=0, seqNo=0) 2025-03-28T09:02:32.282396Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:32.303617Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=8035981929629706001, session=1) 2025-03-28T09:02:32.304072Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=1831040664210481835, session=0, seqNo=0) 2025-03-28T09:02:32.304229Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:02:32.327607Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=1831040664210481835, session=2) 2025-03-28T09:02:32.328308Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:143:2167], cookie=7212551258024461550, name="Sem1", limit=1) 2025-03-28T09:02:32.328477Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-28T09:02:32.346835Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:143:2167], cookie=7212551258024461550) 2025-03-28T09:02:32.347225Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-03-28T09:02:32.347390Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-28T09:02:32.347566Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-03-28T09:02:32.371696Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-03-28T09:02:32.371821Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=222) 2025-03-28T09:02:32.372527Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:151:2175], cookie=450528540936876167, name="Sem1") 2025-03-28T09:02:32.372647Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:151:2175], cookie=450528540936876167) 2025-03-28T09:02:32.373126Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:154:2178], cookie=10286752163126612006, name="Sem1") 2025-03-28T09:02:32.373201Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:154:2178], cookie=10286752163126612006) 2025-03-28T09:02:32.373479Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:132:2158], cookie=333, name="Sem1") 2025-03-28T09:02:32.373600Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2025-03-28T09:02:32.388297Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:132:2158], cookie=333) 2025-03-28T09:02:32.389082Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:159:2183], cookie=10159937779574114876, name="Sem1") 2025-03-28T09:02:32.389185Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:159:2183], cookie=10159937779574114876) 2025-03-28T09:02:32.389707Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:162:2186], cookie=8921967094126669196, name="Sem1") 2025-03-28T09:02:32.389800Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:162:2186], cookie=8921967094126669196) 2025-03-28T09:02:32.390081Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:132:2158], cookie=444, name="Sem1") 2025-03-28T09:02:32.390209Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-03-28T09:02:32.408825Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:132:2158], cookie=444) 2025-03-28T09:02:32.409631Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:167:2191], cookie=7726709476519374100, name="Sem1") 2025-03-28T09:02:32.409733Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:167:2191], cookie=7726709476519374100) 2025-03-28T09:02:32.410314Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:170:2194], cookie=351553177862734722, name="Sem1") 2025-03-28T09:02:32.410390Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:170:2194], cookie=351553177862734722) 2025-03-28T09:02:33.033457Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:33.033567Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:33.084131Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:33.084269Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:33.103308Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:33.103631Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:132:2158], cookie=13430120410342678106, name="Sem1", limit=1) 2025-03-28T09:02:33.103818Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-28T09:02:33.135595Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:132:2158], cookie=13430120410342678106) 2025-03-28T09:02:33.136206Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:141:2165], cookie=1455428382752705721, name="Sem2", limit=1) 2025-03-28T09:02:33.136350Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-03-28T09:02:33.151365Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:141:2165], cookie=1455428382752705721) 2025-03-28T09:02:33.151925Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:146:2170], cookie=12102495291273092741, name="Sem1") 2025-03-28T09:02:33.152013Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:146:2170], cookie=12102495291273092741) 2025-03-28T09:02:33.152439Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:149:2173], cookie=7767487941231958885, name="Sem2") 2025-03-28T09:02:33.152501Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:149:2173], cookie=7767487941231958885) 2025-03-28T09:02:33.177730Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:33.177842Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:33.178403Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:33.183075Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:33.224608Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:33.224923Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:189:2203], cookie=376975673188608849, name="Sem1") 2025-03-28T09:02:33.224995Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:189:2203], cookie=376975673188608849) 2025-03-28T09:02:33.225530Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:195:2208], cookie=7902126582851354935, name="Sem2") 2025-03-28T09:02:33.225597Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:195:2208], cookie=7902126582851354935) 2025-03-28T09:02:33.226079Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:198:2211], cookie=12405301948584166008, name="Sem1", limit=1) 2025-03-28T09:02:33.252541Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:198:2211], cookie=12405301948584166008) 2025-03-28T09:02:33.253169Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:203:2216], cookie=3272106802988546293, name="Sem2", limit=1) 2025-03-28T09:02:33.268480Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:203:2216], cookie=3272106802988546293) 2025-03-28T09:02:33.269139Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:208:2221], cookie=17367673678482769745, name="Sem1") 2025-03-28T09:02:33.269222Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:208:2221], cookie=1736767367848276 ... :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 9 "Sem1" 2025-03-28T09:02:34.357844Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:224:2247], cookie=15207553039592764659) 2025-03-28T09:02:34.358520Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:229:2252], cookie=7537070841879358547, name="Sem1", force=0) 2025-03-28T09:02:34.358601Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 9 "Sem1" 2025-03-28T09:02:34.370461Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:229:2252], cookie=7537070841879358547) 2025-03-28T09:02:34.371153Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:234:2257], cookie=16264547792289658145, name="Sem1", limit=1) 2025-03-28T09:02:34.371274Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 10 "Sem1" 2025-03-28T09:02:34.383683Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:234:2257], cookie=16264547792289658145) 2025-03-28T09:02:34.384131Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[4:239:2262], cookie=4171954933687747968, name="Sem1", force=0) 2025-03-28T09:02:34.384200Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 10 "Sem1" 2025-03-28T09:02:34.397285Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[4:239:2262], cookie=4171954933687747968) 2025-03-28T09:02:34.397979Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:244:2267], cookie=10316939013601932265, name="Sem1", limit=1) 2025-03-28T09:02:34.398147Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 11 "Sem1" 2025-03-28T09:02:34.410409Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:244:2267], cookie=10316939013601932265) 2025-03-28T09:02:34.410967Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-03-28T09:02:34.411139Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2025-03-28T09:02:34.423395Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=111) 2025-03-28T09:02:34.424020Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-03-28T09:02:34.448876Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=222) 2025-03-28T09:02:34.449630Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:132:2158], cookie=333, name="Sem1") 2025-03-28T09:02:34.449753Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2025-03-28T09:02:34.470326Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:132:2158], cookie=333) 2025-03-28T09:02:34.471088Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=444, session=2, semaphore="Sem1" count=1) 2025-03-28T09:02:34.483633Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=444) 2025-03-28T09:02:34.484250Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:132:2158], cookie=555, name="Sem1") 2025-03-28T09:02:34.484360Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2025-03-28T09:02:34.484426Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2025-03-28T09:02:34.496513Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:132:2158], cookie=555) 2025-03-28T09:02:35.056088Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:35.056212Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:35.079725Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:35.080344Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:35.105168Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:35.105738Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=9663943191511500827, session=0, seqNo=0) 2025-03-28T09:02:35.105901Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:35.118402Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=9663943191511500827, session=1) 2025-03-28T09:02:35.118849Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=112, name="Sem1", limit=5) 2025-03-28T09:02:35.119055Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-28T09:02:35.137342Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=112) 2025-03-28T09:02:35.137787Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=113, name="Sem1") 2025-03-28T09:02:35.153130Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=113) 2025-03-28T09:02:35.153533Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=114, name="Sem1", force=0) 2025-03-28T09:02:35.153678Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-03-28T09:02:35.168939Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=114) 2025-03-28T09:02:35.169268Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[5:132:2158], cookie=13414370501455562644 2025-03-28T09:02:35.169623Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=115, name="Sem1", limit=5) 2025-03-28T09:02:35.187280Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=115) 2025-03-28T09:02:35.187715Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=116, name="Sem1") 2025-03-28T09:02:35.203909Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=116) 2025-03-28T09:02:35.204338Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=117, name="Sem1", force=0) 2025-03-28T09:02:35.217265Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=117) 2025-03-28T09:02:35.217661Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=118, session=1, semaphore="Sem1" count=1) 2025-03-28T09:02:35.233062Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=118) 2025-03-28T09:02:35.233487Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=119, name="Sem1") 2025-03-28T09:02:35.248431Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=119) 2025-03-28T09:02:35.248832Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=120, name="Sem1") 2025-03-28T09:02:35.248949Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=120) 2025-03-28T09:02:35.249231Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:132:2158], cookie=13173877927119677559, session=1) 2025-03-28T09:02:35.249342Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-28T09:02:35.265002Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:132:2158], cookie=13173877927119677559) 2025-03-28T09:02:35.265388Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=121, name="Sem1", limit=5) 2025-03-28T09:02:35.280876Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=121) 2025-03-28T09:02:35.281304Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=122, name="Sem1") 2025-03-28T09:02:35.297416Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=122) 2025-03-28T09:02:35.297809Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=123, name="Sem1", force=0) 2025-03-28T09:02:35.312903Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=123) 2025-03-28T09:02:35.313310Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=124, session=1, semaphore="Sem1" count=1) 2025-03-28T09:02:35.335537Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=124) 2025-03-28T09:02:35.335961Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=125, name="Sem1") 2025-03-28T09:02:35.367744Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=125) 2025-03-28T09:02:35.368294Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=126, name="Sem1") 2025-03-28T09:02:35.368405Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=126) 2025-03-28T09:02:35.369180Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:132:2158], cookie=127, name="Sem1", limit=5) 2025-03-28T09:02:35.369275Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:132:2158], cookie=127) 2025-03-28T09:02:35.369590Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:132:2158], cookie=128, name="Sem1") 2025-03-28T09:02:35.369667Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:132:2158], cookie=128) 2025-03-28T09:02:35.369937Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:132:2158], cookie=129, name="Sem1", force=0) 2025-03-28T09:02:35.370018Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:132:2158], cookie=129) 2025-03-28T09:02:35.370300Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=130, session=1, semaphore="Sem1" count=1) 2025-03-28T09:02:35.370381Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=130) 2025-03-28T09:02:35.370633Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=131, name="Sem1") 2025-03-28T09:02:35.370766Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=131) 2025-03-28T09:02:35.371038Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:132:2158], cookie=132, name="Sem1") 2025-03-28T09:02:35.371109Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:132:2158], cookie=132) >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TKesusTest::TestGetQuoterResourceCounters [GOOD] >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2025-03-28T09:02:32.264643Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:32.264783Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:32.282387Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:32.282476Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:32.312557Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:32.334528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=1142586767211545822, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-03-28T09:02:32.334893Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:02:32.351735Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=1142586767211545822) 2025-03-28T09:02:32.352392Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:139:2163], cookie=1740939308261957788, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-03-28T09:02:32.352598Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2025-03-28T09:02:32.372582Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:139:2163], cookie=1740939308261957788) 2025-03-28T09:02:32.373348Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:144:2168], cookie=13255681550576431603, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-03-28T09:02:32.373565Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2025-03-28T09:02:32.399315Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:144:2168], cookie=13255681550576431603) 2025-03-28T09:02:32.399944Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:149:2173], cookie=1117974086622402223, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-03-28T09:02:32.400162Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-03-28T09:02:32.415653Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:149:2173], cookie=1117974086622402223) 2025-03-28T09:02:32.416424Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:154:2178], cookie=17051797180588133409, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-03-28T09:02:32.416649Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2025-03-28T09:02:32.428944Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:154:2178], cookie=17051797180588133409) 2025-03-28T09:02:32.429595Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:159:2183], cookie=15836087985823803897, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2025-03-28T09:02:32.429819Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2025-03-28T09:02:32.441830Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:159:2183], cookie=15836087985823803897) 2025-03-28T09:02:32.442550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:164:2188], cookie=15926213886928088796, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2025-03-28T09:02:32.442753Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 7 "Root2" 2025-03-28T09:02:32.456473Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:164:2188], cookie=15926213886928088796) 2025-03-28T09:02:32.457224Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:169:2193], cookie=11466923471671106291, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2025-03-28T09:02:32.457427Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2025-03-28T09:02:32.475525Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:169:2193], cookie=11466923471671106291) 2025-03-28T09:02:32.476296Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:174:2198], cookie=15624905410714898142, ids=[100], paths=[], recursive=0) 2025-03-28T09:02:32.476391Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:174:2198], cookie=15624905410714898142) 2025-03-28T09:02:32.476959Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:177:2201], cookie=7336749004310199296, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-03-28T09:02:32.477043Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:177:2201], cookie=7336749004310199296) 2025-03-28T09:02:32.477590Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:180:2204], cookie=16105469500274625851, ids=[], paths=[/Root, ], recursive=0) 2025-03-28T09:02:32.477676Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:180:2204], cookie=16105469500274625851) 2025-03-28T09:02:32.478278Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:183:2207], cookie=10277630327159049725, ids=[1, 1], paths=[], recursive=0) 2025-03-28T09:02:32.478339Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:183:2207], cookie=10277630327159049725) 2025-03-28T09:02:32.478918Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:186:2210], cookie=14942540643550934637, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-03-28T09:02:32.479021Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:186:2210], cookie=14942540643550934637) 2025-03-28T09:02:32.479626Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:189:2213], cookie=12128151430442220013, ids=[], paths=[], recursive=1) 2025-03-28T09:02:32.479791Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:189:2213], cookie=12128151430442220013) 2025-03-28T09:02:32.480586Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:192:2216], cookie=9433288188193733834, ids=[], paths=[], recursive=0) 2025-03-28T09:02:32.480656Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:192:2216], cookie=9433288188193733834) 2025-03-28T09:02:32.481264Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:195:2219], cookie=14845044572181412043, ids=[3, 2], paths=[], recursive=1) 2025-03-28T09:02:32.481331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:195:2219], cookie=14845044572181412043) 2025-03-28T09:02:32.482008Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:198:2222], cookie=373962504230930709, ids=[3, 2], paths=[], recursive=0) 2025-03-28T09:02:32.482073Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:198:2222], cookie=373962504230930709) 2025-03-28T09:02:32.483876Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:201:2225], cookie=14605274137887084408, ids=[], paths=[Root2/], recursive=1) 2025-03-28T09:02:32.483972Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:201:2225], cookie=14605274137887084408) 2025-03-28T09:02:32.484600Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:204:2228], cookie=3818165138114641574, ids=[], paths=[Root2/], recursive=0) 2025-03-28T09:02:32.484662Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:204:2228], cookie=3818165138114641574) 2025-03-28T09:02:32.499031Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:32.499142Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:32.499694Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:32.500302Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:32.555374Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:32.555764Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:244:2258], cookie=13927207551161529625, ids=[100], paths=[], recursive=0) 2025-03-28T09:02:32.555863Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:244:2258], cookie=13927207551161529625) 2025-03-28T09:02:32.556623Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:250:2263], cookie=12967861752838030573, ids=[], paths=[Nonexistent/Path], recursive=0) 2025-03-28T09:02:32.556720Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:250:2263], cookie=12967861752838030573) 2025-03-28T09:02:32.557433Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:253:2266], cookie=17483975247759534279, ids=[], paths=[/Root, ], recursive=0) 2025-03-28T09:02:32.557539Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:253:2266], cookie=17483975247759534279) 2025-03-28T09:02:32.558282Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:256:2269], cookie=3010532988646643315, ids=[1, 1], paths=[], recursive=0) 2025-03-28T09:02:32.558344Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:256:2269], cookie=3010532988646643315) 2025-03-28T09:02:32.559343Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:259:2272], cookie=12236088048801349387, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2025-03-28T09:02:32.559437Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:259:2272], cookie=12236088048801349387) 2025-03-28T09:02:32.560160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:262:2275], cookie=1995965375769088566, ids=[], paths=[], recursive=1) 2025-03-28T09:02:32.560259Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:262:2275], cookie=1995965375769088566) 2025-03-28T09:02:32.561237Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:265:2278], cookie=15235267528590414809, ids=[], paths=[], recursive=0) 2025-03-28T09:02:32.561317Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:265:2278], cookie=15235267528590414809) 2025-03-28T09:02:32.562146Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:268:2281], cookie=15621813560115904730, ids=[3, 2], paths=[], recursive=1) 2025-03-28T09:02:32.562259Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:268:2281], cookie=15621813560115904730) 2025-03-28T09:02:32.570087Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:271:2284], cookie=1517789414444709785, ids=[3, 2], paths=[], ... _TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:35.484900Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:35.485298Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:132:2158], cookie=5190403574521991811, path="/Root", config={ MaxUnitsPerSecond: 1 }) 2025-03-28T09:02:35.485513Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:02:35.503515Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:132:2158], cookie=5190403574521991811) 2025-03-28T09:02:35.504248Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:141:2165], cookie=16802688443748450781, path="/Root/Q", config={ }) 2025-03-28T09:02:35.504506Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Q" 2025-03-28T09:02:35.518291Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:141:2165], cookie=16802688443748450781) 2025-03-28T09:02:35.518969Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:146:2170], cookie=12709848692082288930, path="/Root/Folder", config={ }) 2025-03-28T09:02:35.519194Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Folder" 2025-03-28T09:02:35.536171Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:146:2170], cookie=12709848692082288930) 2025-03-28T09:02:35.537026Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:151:2175], cookie=3346020474758135398, path="/Root/Folder/Q1", config={ }) 2025-03-28T09:02:35.537264Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2025-03-28T09:02:35.552721Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:151:2175], cookie=3346020474758135398) 2025-03-28T09:02:35.553368Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:156:2180], cookie=1813471971387734174, ids=[], paths=[], recursive=1) 2025-03-28T09:02:35.553468Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:156:2180], cookie=1813471971387734174) 2025-03-28T09:02:35.554424Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:162:2186], cookie=5494908139065039469, ids=[], paths=[], recursive=1) 2025-03-28T09:02:35.554512Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:162:2186], cookie=5494908139065039469) 2025-03-28T09:02:35.555464Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:168:2192], cookie=8691808076295847582, ids=[], paths=[], recursive=1) 2025-03-28T09:02:35.555545Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:168:2192], cookie=8691808076295847582) 2025-03-28T09:02:35.556050Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:171:2195], cookie=17264667803959288915, id=0, path="/Root/Folder/NonexistingRes") 2025-03-28T09:02:35.556146Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:171:2195], cookie=17264667803959288915) 2025-03-28T09:02:35.556765Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:174:2198], cookie=6921381899852083576, ids=[], paths=[], recursive=1) 2025-03-28T09:02:35.556844Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:174:2198], cookie=6921381899852083576) 2025-03-28T09:02:35.557356Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:177:2201], cookie=2438121051029772449, id=100, path="") 2025-03-28T09:02:35.557433Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:177:2201], cookie=2438121051029772449) 2025-03-28T09:02:35.557954Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:180:2204], cookie=5775549324271962474, ids=[], paths=[], recursive=1) 2025-03-28T09:02:35.558022Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:180:2204], cookie=5775549324271962474) 2025-03-28T09:02:35.558564Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:183:2207], cookie=11622519605047542606, id=3, path="") 2025-03-28T09:02:35.558634Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:183:2207], cookie=11622519605047542606) 2025-03-28T09:02:35.559192Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:186:2210], cookie=11214484667062164964, ids=[], paths=[], recursive=1) 2025-03-28T09:02:35.559269Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:186:2210], cookie=11214484667062164964) 2025-03-28T09:02:35.559811Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:189:2213], cookie=161107069466523065, id=0, path="/Root/Folder/Q1") 2025-03-28T09:02:35.559969Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2025-03-28T09:02:35.572299Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:189:2213], cookie=161107069466523065) 2025-03-28T09:02:35.573121Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:194:2218], cookie=4352552600865593339, ids=[], paths=[], recursive=1) 2025-03-28T09:02:35.573226Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:194:2218], cookie=4352552600865593339) 2025-03-28T09:02:35.599293Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:35.599385Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:35.599955Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:35.600276Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:35.652515Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:35.652826Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:234:2248], cookie=9196151986063018689, ids=[], paths=[], recursive=1) 2025-03-28T09:02:35.652918Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:234:2248], cookie=9196151986063018689) 2025-03-28T09:02:35.653594Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:240:2253], cookie=18014932714978501415, id=3, path="") 2025-03-28T09:02:35.653722Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2025-03-28T09:02:35.683941Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:240:2253], cookie=18014932714978501415) 2025-03-28T09:02:35.684655Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:245:2258], cookie=15353474884467190206, ids=[], paths=[], recursive=1) 2025-03-28T09:02:35.684742Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:245:2258], cookie=15353474884467190206) 2025-03-28T09:02:35.702421Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:35.702523Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:35.708231Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:35.708994Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:35.755459Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:35.755884Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:285:2288], cookie=18044629441550384008, ids=[], paths=[], recursive=1) 2025-03-28T09:02:35.755994Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:285:2288], cookie=18044629441550384008) 2025-03-28T09:02:36.722416Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:36.722520Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:36.743108Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:36.743628Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:36.774321Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:36.774732Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=6973129644326680219, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2025-03-28T09:02:36.774963Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Q1" 2025-03-28T09:02:36.788465Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=6973129644326680219) 2025-03-28T09:02:36.789103Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=5098001234821636400, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2025-03-28T09:02:36.789291Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Q2" 2025-03-28T09:02:36.804164Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=5098001234821636400) 2025-03-28T09:02:36.805934Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 3866720108702110141. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:02:36.806015Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=3866720108702110141) 2025-03-28T09:02:36.809147Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 14298197941087119505. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2025-03-28T09:02:36.809221Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=14298197941087119505) >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2025-03-28T09:02:32.127155Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:32.127295Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:32.163730Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:32.163883Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:32.199604Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:32.199963Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:130:2156], cookie=16627613152299337571, path="/foo/bar/baz") 2025-03-28T09:02:32.213352Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:130:2156], cookie=16627613152299337571, status=SUCCESS) 2025-03-28T09:02:32.214011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:139:2163], cookie=11966165017593346893) 2025-03-28T09:02:32.226535Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:139:2163], cookie=11966165017593346893) 2025-03-28T09:02:32.227204Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:144:2168], cookie=11723106065688279607, path="/foo/bar/baz") 2025-03-28T09:02:32.240605Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:144:2168], cookie=11723106065688279607, status=SUCCESS) 2025-03-28T09:02:32.241164Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:149:2173], cookie=17263192149841958993) 2025-03-28T09:02:32.254381Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:149:2173], cookie=17263192149841958993) 2025-03-28T09:02:32.281460Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:32.281570Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:32.282160Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:32.287061Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:32.359851Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:32.360265Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Execute (sender=[1:191:2205], cookie=4185063666313880943) 2025-03-28T09:02:32.383737Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigGet::Complete (sender=[1:191:2205], cookie=4185063666313880943) 2025-03-28T09:02:32.384437Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:199:2212], cookie=14226330794856512846, path="/foo/bar/baz") 2025-03-28T09:02:32.398425Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:199:2212], cookie=14226330794856512846, status=SUCCESS) 2025-03-28T09:02:32.399231Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[1:204:2217], cookie=6130647539583969608, path="/foo/bar/baz") 2025-03-28T09:02:32.399329Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[1:204:2217], cookie=6130647539583969608, status=PRECONDITION_FAILED) 2025-03-28T09:02:33.163551Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:33.163648Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:33.183145Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:33.183330Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:33.211450Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:33.211774Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:132:2158], cookie=4255450343093615111, name="Lock1") 2025-03-28T09:02:33.211865Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:132:2158], cookie=4255450343093615111) 2025-03-28T09:02:33.766302Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:33.766396Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:33.792749Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:33.792851Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:33.808712Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:33.809167Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=17971384879155716119, session=0, seqNo=0) 2025-03-28T09:02:33.809304Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:33.835394Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=17971384879155716119, session=1) 2025-03-28T09:02:33.835652Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:02:33.835776Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:02:33.835850Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:02:33.851569Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=111) 2025-03-28T09:02:33.852161Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:143:2167], cookie=4933908064275216035, name="Lock1", force=0) 2025-03-28T09:02:33.869394Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:143:2167], cookie=4933908064275216035) 2025-03-28T09:02:33.869936Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:148:2172], cookie=11189625230331512625, name="Sem1", force=0) 2025-03-28T09:02:33.881953Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:148:2172], cookie=11189625230331512625) 2025-03-28T09:02:33.882554Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:153:2177], cookie=18413344930367976096, name="Sem1", limit=42) 2025-03-28T09:02:33.882731Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-03-28T09:02:33.899280Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:153:2177], cookie=18413344930367976096) 2025-03-28T09:02:33.899853Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:158:2182], cookie=6713888063629238067, name="Sem1", force=0) 2025-03-28T09:02:33.899951Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 2 "Sem1" 2025-03-28T09:02:33.916496Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:158:2182], cookie=6713888063629238067) 2025-03-28T09:02:33.917040Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:163:2187], cookie=11556006209703189497, name="Sem1", force=0) 2025-03-28T09:02:33.934230Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:163:2187], cookie=11556006209703189497) 2025-03-28T09:02:34.393767Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:34.393879Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:34.410571Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:34.411237Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:34.440755Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:34.441226Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=9836569499616118809, session=0, seqNo=0) 2025-03-28T09:02:34.441363Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:34.453651Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=9836569499616118809, session=1) 2025-03-28T09:02:34.453949Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=11448011241298025020, session=0, seqNo=0) 2025-03-28T09:02:34.454078Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:02:34.470495Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=11448011241298025020, session=2) 2025-03-28T09:02:34.470810Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=2 from sender=[4:132:2158], cookie=98465197613914476 2025-03-28T09:02:34.471326Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:144:2168], cookie=719315465196357842, name="Sem1", limit=3) 2025-03-28T09:02:34.471460Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-28T09:02:34.486997Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:144:2168], cookie=719315465196357842) 2025-03-28T09:02:34.487295Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=112, name="Sem1") 2025-03-28T09:02:34.487378Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=112) 2025-03-28T09:02:34.487610Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=113, name="Sem1") 2025-03-28T09:02:34.487668Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=113) 2025-03-28T09:02:34.487916Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=3040438016621986807, session=2, seqNo=0) 2025-03-28T09:02:34.503902Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=3040438016621986807, session=2) 2025-03-28T09:02:34.504211Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=114, name="Sem1") 2025-03-28T09:02:34.504293Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=114) 2025-03-28T09:02:34.504519Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=115, name="Sem1") 2025-03-28T09:02:34.504577Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=115) 2025-03-28T09:02:34.505041Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[4:151:2175], cookie=12597182900082904880, name="Sem1") 2025-03-28T09:02:34.522182Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[4:151:2175], cookie=12597182900082904880) 2025-03-28T09:02:34.522649Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=116, session=1, semaphore="Sem1" count=1) 2025-03-28T09:02:34.522816Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-28T09:02:34.538244Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=116) 2025-03-28T09:02:34.538631Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=117, session=2, semaphore="Sem1" count=2) 2025-03-28T09:02:34.538801Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-03-28T09:02:34.553011Z node 4 :KESUS_TABLET DEBUG: [72057594 ... 640132Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem2" 2025-03-28T09:02:35.652510Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:202:2220], cookie=1164655364991494870) 2025-03-28T09:02:35.652815Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=126, session=1, semaphore="Sem2" count=3) 2025-03-28T09:02:35.652947Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Sem2" queue: next order #5 session 1 2025-03-28T09:02:35.666648Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=126) 2025-03-28T09:02:35.667099Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=127, name="Sem2") 2025-03-28T09:02:35.667223Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=127) 2025-03-28T09:02:35.667528Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=128, session=1, semaphore="Sem2" count=3) 2025-03-28T09:02:35.682052Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=128) 2025-03-28T09:02:36.087030Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:36.107370Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:36.119174Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=129, session=1, semaphore="Sem2" count=2) 2025-03-28T09:02:36.132132Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=129) 2025-03-28T09:02:36.132585Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=130, name="Sem2") 2025-03-28T09:02:36.132679Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=130) 2025-03-28T09:02:36.132958Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=131, session=1, semaphore="Sem2" count=1) 2025-03-28T09:02:36.145498Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=131) 2025-03-28T09:02:36.145948Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=132, name="Sem2") 2025-03-28T09:02:36.146059Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=132) 2025-03-28T09:02:36.146376Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:132:2158], cookie=133, name="Sem2") 2025-03-28T09:02:36.146497Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:132:2158], cookie=133) 2025-03-28T09:02:36.838078Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:36.838185Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:36.859814Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:36.860306Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:36.896905Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:36.908962Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=17765003893648392386, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2025-03-28T09:02:36.909238Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root1" 2025-03-28T09:02:36.921392Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=17765003893648392386) 2025-03-28T09:02:36.921971Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=12984716065318759095, path="/Root1/Res", config={ }) 2025-03-28T09:02:37.059925Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2025-03-28T09:02:37.072408Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=12984716065318759095) 2025-03-28T09:02:37.073059Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:146:2170], cookie=8987997124473110031, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2025-03-28T09:02:37.073229Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root2" 2025-03-28T09:02:37.087182Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:146:2170], cookie=8987997124473110031) 2025-03-28T09:02:37.087736Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:151:2175], cookie=1715551821048275059, path="/Root2/Res", config={ }) 2025-03-28T09:02:37.087941Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2025-03-28T09:02:37.099993Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:151:2175], cookie=1715551821048275059) 2025-03-28T09:02:37.100614Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:156:2180], cookie=795097279596353224, path="/Root2/Res/Subres", config={ }) 2025-03-28T09:02:37.101261Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2025-03-28T09:02:37.115511Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:156:2180], cookie=795097279596353224) 2025-03-28T09:02:37.116790Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:161:2185]. Cookie: 10395189958462789119. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:02:37.116859Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:161:2185], cookie=10395189958462789119) 2025-03-28T09:02:37.161310Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:37.205273Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:37.238466Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:37.239130Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:168:2189]. Cookie: 17244702560175985751. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-03-28T09:02:37.239939Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2192]. Cookie: 14343398974874279065. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:02:37.239993Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2192], cookie=14343398974874279065) 2025-03-28T09:02:37.273404Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:37.319051Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:37.319782Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:176:2196]. Cookie: 5608467134987053125. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2025-03-28T09:02:37.320589Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:161:2185]. Cookie: 13273125997704141927. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:02:37.320652Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:161:2185], cookie=13273125997704141927) 2025-03-28T09:02:37.321366Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:171:2192]. Cookie: 453660846055023998. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:02:37.321423Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:171:2192], cookie=453660846055023998) 2025-03-28T09:02:37.354603Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:161:2185]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:37.354722Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:171:2192]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:37.355457Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:183:2203]. Cookie: 6902515397205673761. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2025-03-28T09:02:24.797148Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783171554177184:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:24.798453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f2f/r3tmp/tmpfM35Vi/pdisk_1.dat 2025-03-28T09:02:25.605987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:25.618887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:25.621297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:25.682570Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:7334 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:26.166023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:26.193164Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:26.202349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:26.512517Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:02:26.529200Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:02:26.589689Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.008s,wait=0.004s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-28T09:02:26.602780Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.006s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152546345 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-03-28T09:02:26.885305Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-28T09:02:26.885662Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-28T09:02:26.885869Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-28T09:02:26.886163Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-28T09:02:26.891416Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 3} end=0, 4 blobs 2r (max 2), put Spent{time=0.005s,wait=0.004s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2025-03-28T09:02:26.901077Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.34, eph 3} end=0, 4 blobs 8r (max 8), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152546345 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-28T09:02:27.203327Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:02:27.267914Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-28T09:02:27.267944Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-03-28T09:02:27.267958Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:02:27.276841Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f2f/r3tmp/tmpssY0uh/pdisk_1.dat 2025-03-28T09:02:30.684939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:30.693078Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:30.749603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:30.749678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:30.754383Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5712 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:02:31.103089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:31.128688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:31.271411Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:02:31.293290Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.010s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:02:31.313821Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-28T09:02:31.344517Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.021s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-03-28T09:02:31.394593Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3307 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152551245 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSu ... 4046644480 2025-03-28T09:02:31.555539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037891 splitOp: 281474976710678:0 alterVersion: 1 at tablet: 72057594046644480 2025-03-28T09:02:31.555941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037890 cookie: 72057594046644480:3 msg type: 269553152 2025-03-28T09:02:31.556064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037891 cookie: 72057594046644480:4 msg type: 269553152 2025-03-28T09:02:31.556139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037890 2025-03-28T09:02:31.556149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037891 waiting... 2025-03-28T09:02:31.610869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037890 2025-03-28T09:02:31.610944Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976710678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710678 TabletId: 72075186224037890 2025-03-28T09:02:31.611225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037891 2025-03-28T09:02:31.611266Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976710678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976710678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976710678 TabletId: 72075186224037891 2025-03-28T09:02:31.611290Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710678:0 3 -> 131 2025-03-28T09:02:31.611695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:02:31.611779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:02:31.611834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:02:31.611855Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976710678:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:02:31.611886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TSplitMerge TTransferData operationId# 281474976710678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976710678:0 at tablet 72057594046644480 2025-03-28T09:02:31.612196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2025-03-28T09:02:31.612287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710678, partId: 0, tablet: 72075186224037888 2025-03-28T09:02:31.615082Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-28T09:02:31.615372Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-28T09:02:31.615565Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-28T09:02:31.615746Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph 1} end=0, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-28T09:02:31.615961Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.30, eph -9223372036854775808} end=0, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2025-03-28T09:02:31.619122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710678, at schemeshard: 72057594046644480 2025-03-28T09:02:31.619148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 0/1, is published: true 2025-03-28T09:02:31.619174Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710678, at schemeshard: 72057594046644480 2025-03-28T09:02:31.632950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2025-03-28T09:02:31.633016Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976710678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2025-03-28T09:02:31.633664Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710678:0 131 -> 132 2025-03-28T09:02:31.633763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-03-28T09:02:31.634249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:02:31.634380Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:02:31.634397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710678, path id: [OwnerId: 72057594046644480, LocalPathId: 3] 2025-03-28T09:02:31.634611Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:02:31.634626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:7486783195675509008:2242], at schemeshard: 72057594046644480, txId: 281474976710678, path id: 3 2025-03-28T09:02:31.634665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:02:31.634712Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710678:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:02:31.634735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976710678 at tablet 72057594046644480 2025-03-28T09:02:31.635839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2025-03-28T09:02:31.637491Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710678 2025-03-28T09:02:31.637575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710678 2025-03-28T09:02:31.637592Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710678 2025-03-28T09:02:31.637610Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710678, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 4 2025-03-28T09:02:31.637627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-03-28T09:02:31.637691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 0/1, is published: true 2025-03-28T09:02:31.637980Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710678 2025-03-28T09:02:31.647060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976710678 TabletId: 72075186224037888 2025-03-28T09:02:31.647110Z node 2 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976710678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-03-28T09:02:31.647182Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710678:0 progress is 1/1 2025-03-28T09:02:31.647195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2025-03-28T09:02:31.647216Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710678:0 progress is 1/1 2025-03-28T09:02:31.647231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2025-03-28T09:02:31.647249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710678, ready parts: 1/1, is published: true 2025-03-28T09:02:31.647289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7486783199970476783:2363] message: TxId: 281474976710678 2025-03-28T09:02:31.647326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710678 ready parts: 1/1 2025-03-28T09:02:31.647345Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710678:0 2025-03-28T09:02:31.647354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710678:0 2025-03-28T09:02:31.647514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-03-28T09:02:31.648011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:02:31.648023Z node 2 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976710678:0 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152551245 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> TLocksTest::SetEraseSet [GOOD] >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 61420, MsgBus: 25143 2025-03-28T08:59:06.801858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782319752694204:2262];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:06.802434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001520/r3tmp/tmphOJiGw/pdisk_1.dat 2025-03-28T08:59:10.042277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.009230Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782319752694204:2262];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:12.009310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.009329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:12.249171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.249282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.251903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:12.292558Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61420, node 1 2025-03-28T08:59:12.483360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.483388Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.483395Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.483528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25143 TClient is connected to server localhost:25143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:17.905290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:18.432888Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T08:59:18.770986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.311526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782397062106411:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.311630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.318815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782397062106438:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:24.322952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T08:59:24.340106Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T08:59:24.340886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782397062106440:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T08:59:24.442729Z node 1 :TX_PROXY ERROR: Actor# [1:7486782397062106491:2634] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:26.205672Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=4;memory=1048576; 2025-03-28T08:59:26.205705Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 4. [Mem] memory 1048576 NOT granted 2025-03-28T08:59:26.214217Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782405652041148:2379], TxId: 281474976710661, task: 4. Ctx: { TraceId : 01jqdzmczba3nbwx798cg8cjzw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTJlZDM1ODMtNzRmMTc1OTktMjVlNjgxNjktMTc4YWVkODQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 4: 10, host: ghrun-mdsjyjltvm, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-03-28T08:59:26.188607Z }, code: 2029 }. 2025-03-28T08:59:26.218263Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=5;memory=1048576; 2025-03-28T08:59:26.218283Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 5. [Mem] memory 1048576 NOT granted 2025-03-28T08:59:26.218635Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782405652041149:2380], TxId: 281474976710661, task: 5. Ctx: { TraceId : 01jqdzmczba3nbwx798cg8cjzw. SessionId : ydb://session/3?node_id=1&id=ZTJlZDM1ODMtNzRmMTc1OTktMjVlNjgxNjktMTc4YWVkODQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 5: 10, host: ghrun-mdsjyjltvm, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 40B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 4, started at: 2025-03-28T08:59:26.188607Z }, code: 2029 }. 2025-03-28T08:59:26.219372Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782405652041145:2376], TxId: 281474976710661, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTJlZDM1ODMtNzRmMTc1OTktMjVlNjgxNjktMTc4YWVkODQ=. TraceId : 01jqdzmczba3nbwx798cg8cjzw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486782405652041120:2361], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-28T08:59:26.219515Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=2;memory=1048576; 2025-03-28T08:59:26.219526Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 2. [Mem] memory 1048576 NOT granted 2025-03-28T08:59:26.219807Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782405652041146:2377], TxId: 281474976710661, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=ZTJlZDM1ODMtNzRmMTc1OTktMjVlNjgxNjktMTc4YWVkODQ=. TraceId : 01jqdzmczba3nbwx798cg8cjzw. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-mdsjyjltvm, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 0B, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-03-28T08:59:26.188607Z }, code: 2029 }. 2025-03-28T08:59:26.220128Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486782405652041147:2378], TxId: 281474976710661, task: 3. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZTJlZDM1ODMtNzRmMTc1OTktMjVlNjgxNjktMTc4YWVkODQ=. TraceId : 01jqdzmczba3nbwx798cg8cjzw. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486782405652041120:2361], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-28T08:59:26.244462Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTJlZDM1ODMtNzRmMTc1OTktMjVlNjgxNjktMTc4YWVkODQ=, ActorId: [1:7486782397062106409:2361], ActorState: ExecuteState, TraceId: 01jqdzmczba3nbwx798cg8cjzw, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 4: 10, host: ghrun-mdsjyjltvm, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 50B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 5, started at: 2025-03-28T08:59:26.188607Z } , code: 2029 query_phases { duration_us: 62608 table_access { name: "/Root/LargeTable" partitions_count: 2 } cpu_time_us: 68661 affected_shards: 8 } compilation { duration_us: 1702432 cpu_time_us: 1653848 } process_cpu_time_us: 1775 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tabl ... efault, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:00:02.433825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:00:02.573591Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486782562678066816:2474], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:00:02.679486Z node 3 :TX_PROXY ERROR: Actor# [3:7486782562678066877:3508] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:00:06.175181Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:00:06.175203Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:08.429928Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTAwODcxOGEtMzVhYjg4N2MtNGY0M2NjODMtZjZhYjM1MGY=, ActorId: [3:7486782588447871005:2533], ActorState: ExecuteState, TraceId: 01jqdznsv6c38m0zdsz5gxtkv6, ReplyQueryCompileError, status TIMEOUT remove tx with tx_id:
: Error: Query compilation timed out. assertion failed at ydb/core/kqp/ut/query/kqp_limits_ut.cpp:787, virtual void NKikimr::NKqp::NTestSuiteKqpLimits::TTestCaseComputeNodeMemoryLimit::Execute_(NUnitTest::TTestContext &): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (TIMEOUT != PRECONDITION_FAILED) , with diff: (TIM|PR)E(|C)O(U|NDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1949767B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1995C4FF 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:787: Execute_ @ 0x18D19A8E 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56: operator() @ 0x18D71E37 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56:1) &> @ 0x18D71E37 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56:1) &> @ 0x18D71E37 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D71E37 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D71E37 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19993525 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19993525 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19993525 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19963078 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56: Execute @ 0x18D71003 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19964945 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1998DA9C 15. ??:0: ?? @ 0x7FDFF4104D8F 16. ??:0: ?? @ 0x7FDFF4104E3F 17. ??:0: ?? @ 0x1640D028 Trying to start YDB, gRPC: 28149, MsgBus: 19349 2025-03-28T09:02:15.349566Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783133534893332:2251];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:15.434399Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001520/r3tmp/tmpJuG7WP/pdisk_1.dat 2025-03-28T09:02:15.917342Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:15.954499Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:15.962931Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:15.988528Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28149, node 4 2025-03-28T09:02:16.311313Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:16.311345Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:16.311360Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:16.311568Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19349 TClient is connected to server localhost:19349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:17.191731Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.241365Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.375742Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.858589Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.962707Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:20.182883Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783133534893332:2251];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:20.182978Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:21.936338Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783159304698675:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:21.936469Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:22.047502Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.136763Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.261839Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.362938Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.415034Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.496054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.580078Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783163599666494:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:22.580171Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:22.580560Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783163599666499:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:22.589592Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:22.606142Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486783163599666501:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:02:22.679395Z node 4 :TX_PROXY ERROR: Actor# [4:7486783163599666555:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:24.127351Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:02:29.862256Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjI0MzgyZDktYTY5MmEyNzUtYWFkZmRmNjEtYzVmOTgzOGM=, ActorId: [4:7486783172189601430:2497], ActorState: ExecuteState, TraceId: 01jqdzt39542x7zvjw9xq7sazg, Create QueryResponse for error on request, msg: >> TKesusTest::TestAllocatesResources [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2025-03-28T09:02:34.165113Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:34.165260Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:34.183846Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:34.183930Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:34.208374Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:34.877885Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:34.877978Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:34.896967Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:34.897153Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:34.924541Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:35.649417Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:35.649529Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:35.688744Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:35.688865Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:35.708463Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:36.743669Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:36.743761Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:36.770655Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:36.770902Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:36.801933Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:36.806822Z node 4 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 5 2025-03-28T09:02:36.807394Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:185:2157]) 2025-03-28T09:02:37.535583Z node 6 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:37.535688Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:37.556281Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:37.556423Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute ... waiting for register request 2025-03-28T09:02:37.581831Z node 6 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2025-03-28T09:02:37.582713Z node 6 :PIPE_SERVER ERROR: [72057594037927937] NodeDisconnected NodeId# 7 2025-03-28T09:02:37.583229Z node 6 :KESUS_TABLET TRACE: Got TEvServerDisconnected([6:187:2159]) >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2025-03-28T09:02:38.312345Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:02:38.312868Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/00113a/r3tmp/tmpUM2Au5/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:02:38.313468Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/00113a/r3tmp/tmpUM2Au5/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/00113a/r3tmp/tmpUM2Au5/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6936764258273241420 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:02:38.329132Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:02:38.329686Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/00113a/r3tmp/tmpUM2Au5/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:02:38.329894Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/00113a/r3tmp/tmpUM2Au5/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/00113a/r3tmp/tmpUM2Au5/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12497158277315202459 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2025-03-28T09:02:34.517953Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:34.518094Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:34.550020Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:34.550129Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:34.587663Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:34.588212Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=4954609912433725275, session=0, seqNo=0) 2025-03-28T09:02:34.588417Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:34.607613Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=4954609912433725275, session=1) 2025-03-28T09:02:34.607984Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=7396599087681333096, session=0, seqNo=0) 2025-03-28T09:02:34.608121Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:02:34.620532Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=7396599087681333096, session=2) 2025-03-28T09:02:34.620865Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2025-03-28T09:02:34.621020Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:02:34.621151Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:02:34.633402Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-28T09:02:34.633748Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:02:34.634096Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=333, session=2, semaphore="Lock1" count=1) 2025-03-28T09:02:34.634250Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2025-03-28T09:02:34.655474Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-03-28T09:02:34.655595Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=333) 2025-03-28T09:02:34.656222Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=10060156658668752891, name="Lock1") 2025-03-28T09:02:34.656334Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=10060156658668752891) 2025-03-28T09:02:35.405381Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:35.405529Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:35.440392Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:35.440703Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:35.475621Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:35.476221Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=9681733910003701704, session=0, seqNo=0) 2025-03-28T09:02:35.476408Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:35.495487Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=9681733910003701704, session=1) 2025-03-28T09:02:35.495856Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=6454617813027223641, session=0, seqNo=0) 2025-03-28T09:02:35.495996Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:02:35.515406Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=6454617813027223641, session=2) 2025-03-28T09:02:35.515767Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:02:35.515934Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:02:35.516033Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:02:35.528652Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-03-28T09:02:35.529025Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-28T09:02:35.529402Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:02:35.543366Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=222) 2025-03-28T09:02:35.543453Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=333) 2025-03-28T09:02:35.544025Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:148:2172], cookie=6512151268879020457, name="Lock1") 2025-03-28T09:02:35.544132Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:148:2172], cookie=6512151268879020457) 2025-03-28T09:02:35.544639Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:151:2175], cookie=6860873887926112696, name="Lock1") 2025-03-28T09:02:35.544724Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:151:2175], cookie=6860873887926112696) 2025-03-28T09:02:36.566761Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:36.566905Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:36.590395Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:36.590502Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:36.615348Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:36.615848Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=9779369217252840281, session=0, seqNo=0) 2025-03-28T09:02:36.615999Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:36.647388Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=9779369217252840281, session=1) 2025-03-28T09:02:36.647750Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=822324492950037438, session=0, seqNo=0) 2025-03-28T09:02:36.647890Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:02:36.667353Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=822324492950037438, session=2) 2025-03-28T09:02:36.668104Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:02:36.668281Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:02:36.668390Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:02:36.680540Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=111) 2025-03-28T09:02:36.680867Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-28T09:02:36.681199Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:132:2158], cookie=333, session=2, semaphore="Lock1" count=1) 2025-03-28T09:02:36.681274Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-03-28T09:02:36.696145Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=222) 2025-03-28T09:02:36.696231Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:132:2158], cookie=333) 2025-03-28T09:02:36.696753Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:151:2175], cookie=2905040401078602418, name="Lock1") 2025-03-28T09:02:36.696853Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:151:2175], cookie=2905040401078602418) 2025-03-28T09:02:36.697242Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:154:2178], cookie=3448645937107373269, name="Lock1") 2025-03-28T09:02:36.697298Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:154:2178], cookie=3448645937107373269) 2025-03-28T09:02:36.732166Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:36.732292Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:36.732941Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:36.733674Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:36.809472Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:36.809657Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:02:36.810059Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:194:2208], cookie=15499359490722048550, name="Lock1") 2025-03-28T09:02:36.810152Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:194:2208], cookie=15499359490722048550) 2025-03-28T09:02:36.810795Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:202:2215], cookie=4895685913821106583, name="Lock1") 2025-03-28T09:02:36.810874Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:202:2215], cookie=4895685913821106583) 2025-03-28T09:02:37.613649Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:37.613768Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:37.679836Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:37.687203Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:37.727482Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:37.728028Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=6850508299927272002, session=0, seqNo=0) 2025-03-28T09:02:37.728196Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:37.740697Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=6850508299927272002, session=1) 2025-03-28T09:02:37.741022Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=17753601779336740416, session=0, seqNo=0) 2025-03-28T09:02:37.741161Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:02:37.763421Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=17753601779336740416, session=2) 2025-03-28T09:02:37.763800Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:02:37.763972Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:02:37.764070Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:02:37.779741Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=111) 2025-03-28T09:02:37.780120Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:132:2158], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-28T09:02:37.780511Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:132:2158], cookie=333, name="Lock1") 2025-03-28T09:02:37.780601Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-03-28T09:02:37.796847Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:132:2158], cookie=222) 2025-03-28T09:02:37.796942Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:132:2158], cookie=333) 2025-03-28T09:02:38.461832Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:38.461939Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:38.522531Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:38.527124Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:38.567509Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:38.590416Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=11970195885521848407, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-03-28T09:02:38.590671Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:02:38.607440Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=11970195885521848407) 2025-03-28T09:02:38.608124Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=6692158288906790045, path="/Root/Res", config={ }) 2025-03-28T09:02:38.608404Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-03-28T09:02:38.622796Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=6692158288906790045) 2025-03-28T09:02:38.631387Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 6527111630051458572. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:02:38.631500Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=6527111630051458572) 2025-03-28T09:02:38.632231Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:146:2170]. Cookie: 11448415676681090640. Data: { } 2025-03-28T09:02:38.632287Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:146:2170], cookie=11448415676681090640) 2025-03-28T09:02:38.682223Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:38.731107Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:38.767107Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:38.801793Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2025-03-28T09:02:38.851291Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } >> TDynamicNameserverTest::CacheMissDifferentDeadline [GOOD] >> TTicketParserTest::LoginBad >> KqpExplain::ComplexJoin [GOOD] >> TTicketParserTest::NebiusAuthenticationUnavailable >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |75.5%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> TTicketParserTest::AccessServiceAuthenticationOk >> TKesusTest::TestSessionStealingAnyKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2025-03-28T09:02:23.431751Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783164697257395:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:23.432372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f33/r3tmp/tmpJbS1lW/pdisk_1.dat 2025-03-28T09:02:24.219607Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:24.261137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:24.261230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:24.271220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20082 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:24.674079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:24.699323Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:24.718415Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:24.723867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:24.999948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:25.102894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:25.184827Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710662: Validate (783): Key validation status: 3 2025-03-28T09:02:25.186821Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192737:2503] txid# 281474976710662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-28T09:02:25.186925Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192737:2503] txid# 281474976710662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-28T09:02:25.186962Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192737:2503] txid# 281474976710662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-03-28T09:02:25.196590Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710663: Validate (783): Key validation status: 3 2025-03-28T09:02:25.196932Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192759:2510] txid# 281474976710663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-28T09:02:25.196994Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192759:2510] txid# 281474976710663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-28T09:02:25.197035Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192759:2510] txid# 281474976710663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-03-28T09:02:25.200564Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710664: Validate (783): Key validation status: 3 2025-03-28T09:02:25.200860Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192766:2514] txid# 281474976710664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-28T09:02:25.200942Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192766:2514] txid# 281474976710664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-28T09:02:25.200979Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192766:2514] txid# 281474976710664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-03-28T09:02:25.203991Z node 1 :TX_DATASHARD ERROR: Shard 72075186224037888 cannot parse tx 281474976710665: Validate (783): Key validation status: 3 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2025-03-28T09:02:25.204239Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192772:2517] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2025-03-28T09:02:25.204298Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192772:2517] txid# 281474976710665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2025-03-28T09:02:25.204330Z node 1 :TX_PROXY ERROR: Actor# [1:7486783173287192772:2517] txid# 281474976710665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2025-03-28T09:02:28.787146Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783189781010747:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f33/r3tmp/tmpmN99KD/pdisk_1.dat 2025-03-28T09:02:28.861227Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:02:29.016534Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:29.017632Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:29.017728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:29.043299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27511 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:02:29.368333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:29.406156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:29.497442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:29.573150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f33/r3tmp/tmpF9ctmu/pdisk_1.dat 2025-03-28T09:02:33.714827Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:33.819326Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:33.887701Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:33.887779Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:33.889484Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10495 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:34.204278Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:34.213043Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:34.229412Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:34.234351Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:34.321384Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:02:34.381973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline [GOOD] Test command err: 2025-03-28T09:02:40.119555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:40.119637Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationGood >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> TTicketParserTest::BulkAuthorizationRetryError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ComplexJoin [GOOD] Test command err: 2025-03-28T08:59:10.327452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782337101480687:2278];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:10.327906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a4/r3tmp/tmpsQw9jI/pdisk_1.dat 2025-03-28T08:59:12.931982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:14.806918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:15.312539Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782337101480687:2278];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:15.312596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:17.135186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:17.135210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:18.455309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:18.455532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:18.459680Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:18.666243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:18.666474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:18.775847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3676, node 1 2025-03-28T08:59:20.645458Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:20.645480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:20.645486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:20.645785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T08:59:22.589806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:19690 2025-03-28T08:59:24.000033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T08:59:24.074919Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782397456332286:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:24.074969Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T08:59:24.157582Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T08:59:24.556322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:24.556382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:24.595665Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T08:59:24.616675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:25.590030Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:25.591468Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:25.599873Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T08:59:25.602252Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T08:59:25.612994Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T08:59:25.613655Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T08:59:25.614303Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T08:59:25.615151Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T08:59:25.615989Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T08:59:25.616610Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T08:59:25.616650Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T08:59:25.645117Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:27.773353Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T08:59:27.773407Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T08:59:27.801459Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:28.949050Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T08:59:28.949101Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T08:59:28.966953Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T08:59:28.967393Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T08:59:28.967635Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T08:59:28.967882Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T08:59:28.968100Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T08:59:28.981726Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T08:59:28.987731Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T08:59:28.994559Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7486782410341234717:2285] 2025-03-28T08:59:29.022079Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T08:59:29.079885Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782397456332286:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:29.079949Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:29.110157Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T08:59:29.110182Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T08:59:29.110418Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T08:59:29.251432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T08:59:29.361316Z node 2 :TX_PROXY ERROR: Actor# [2:7486782418931169503:2448] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2025-03-28T08:59:29.669983Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-03-28T08:59:29.712676Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T08:59:29.712916Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:7486782418931169572:2331], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T08:59:29.800113Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T08:59:29.861800Z node 2 :TX_PROXY ERROR: Actor# [2:7486782418931169581:2492] txid# 281474976720658, issues: { message: "Schemeshard not available" severity: 1 } 2025-03-28T08:59:29.868595Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2025-03-28T08:59:29.873342Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7486782418931169603:2507] 2025-03-28T08:59:29.874028Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7486782418931169603:2507], schemeshard id = 72075186224037897 2025-03-28T08:59:30.091304Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T08:59:30.564304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:1, at schemeshard: 72075186224037897 2025-03-28T08:59:30.615295Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720659 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T08:59:30.624979Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720659 2025-03-28T08:59:31.779516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:31.779715Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:33.007200Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720659. Doublechecking... 2025-03-28T08:59:33.268846Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T08:59:41.536321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782465950500966:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:41.536501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions ... :23.746954Z node 5 :TX_PROXY ERROR: Actor# [5:7486783164545283508:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1}],"GroupBy":"item.App","Aggregation":"{MAX(item.Message),MIN(item.Message)}","Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2}],"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Logs","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-Filter-TableFullScan"}],"Node Type":"HashShuffle","KeyColumns":["App"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"columns":["App","Message","Ts"],"scan_by":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["App (-∞, +∞)","Ts (-∞, +∞)","Host (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/Logs","E-Rows":"No estimate","Table":"Logs","ReadColumns":["App","Message","Ts"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Ts \u003E 1 AND item.Ts \u003C= 4 OR item.App == \"ydb\"","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Operators":[{"GroupBy":"item.App","Aggregation":"{MAX(item.Message),MIN(item.Message)}","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"App\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 20475, MsgBus: 65340 2025-03-28T09:02:27.447346Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486783183471426129:2221];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a4/r3tmp/tmpQoqoAW/pdisk_1.dat 2025-03-28T09:02:27.574007Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:02:27.918158Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:27.921207Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:27.921296Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:27.926377Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20475, node 6 2025-03-28T09:02:28.143352Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:28.143381Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:28.143390Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:28.143544Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65340 TClient is connected to server localhost:65340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:29.223950Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:29.235156Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:29.266900Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:29.468558Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:29.898567Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:30.057037Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:32.422597Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486783183471426129:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:32.458281Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:32.926870Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783204946264204:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:32.926997Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:33.033980Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:33.137809Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:33.258215Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:33.345672Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:33.476932Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:33.620920Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:33.715757Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783209241232026:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:33.715861Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:33.716255Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783209241232031:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:33.721799Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:33.743077Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:02:33.743353Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486783209241232033:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:33.830949Z node 6 :TX_PROXY ERROR: Actor# [6:7486783209241232091:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:35.584195Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:36.181504Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:02:36.273095Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> TTransferTests::Alter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2025-03-28T09:02:36.285329Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:36.285475Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:36.320439Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:36.320545Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:36.352911Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:36.353502Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=6811416586630383133, session=0, seqNo=0) 2025-03-28T09:02:36.353688Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:36.366802Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=6811416586630383133, session=1) 2025-03-28T09:02:36.368799Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:130:2156], cookie=12437192385589849994, session=2) 2025-03-28T09:02:36.368902Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:130:2156], cookie=12437192385589849994) 2025-03-28T09:02:36.369968Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:130:2156], cookie=9454514853675441201 2025-03-28T09:02:36.383672Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=13372000618794928981, session=1, seqNo=0) 2025-03-28T09:02:36.399064Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=13372000618794928981, session=1) 2025-03-28T09:02:36.399499Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:02:36.399695Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:02:36.399802Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:02:36.400032Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[1:130:2156], cookie=10848171718136421746, session=1) 2025-03-28T09:02:36.410386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-28T09:02:36.410471Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-28T09:02:36.410535Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-03-28T09:02:36.425094Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-28T09:02:36.425192Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[1:130:2156], cookie=10848171718136421746) 2025-03-28T09:02:36.425250Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-28T09:02:37.369767Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:37.370933Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:37.420082Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:37.420350Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:37.451524Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:37.451888Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[2:132:2158], cookie=15201603100893661732, path="") 2025-03-28T09:02:37.471562Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[2:132:2158], cookie=15201603100893661732, status=SUCCESS) 2025-03-28T09:02:37.472444Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:141:2165], cookie=111, session=0, seqNo=0) 2025-03-28T09:02:37.472590Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:37.472792Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Execute (sender=[2:141:2165], cookie=17322873591718856301, session=1) 2025-03-28T09:02:37.485072Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-28T09:02:37.485169Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-28T09:02:37.500893Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:141:2165], cookie=111, session=1) 2025-03-28T09:02:37.500986Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDetach::Complete (sender=[2:141:2165], cookie=17322873591718856301) 2025-03-28T09:02:37.501100Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-28T09:02:38.461820Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:38.461939Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:38.507515Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:38.507644Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:38.528716Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:38.529237Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=4310202002949278491, session=0, seqNo=0) 2025-03-28T09:02:38.529424Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:38.555532Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=4310202002949278491, session=1) 2025-03-28T09:02:38.556359Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:132:2158], cookie=879083045472155087, session=1) 2025-03-28T09:02:38.556479Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-28T09:02:38.568665Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:132:2158], cookie=879083045472155087) 2025-03-28T09:02:38.569638Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:149:2173], cookie=2425847043940307218) 2025-03-28T09:02:38.569721Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:149:2173], cookie=2425847043940307218) 2025-03-28T09:02:38.570525Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:152:2176], cookie=5464771328141456010, session=0, seqNo=0) 2025-03-28T09:02:38.570672Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:02:38.584796Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:152:2176], cookie=5464771328141456010, session=2) 2025-03-28T09:02:38.586234Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:132:2158], cookie=7218252929851470999, session=2) 2025-03-28T09:02:38.586352Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 2025-03-28T09:02:38.600798Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:132:2158], cookie=7218252929851470999) 2025-03-28T09:02:39.809814Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:39.809938Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:39.859898Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:39.860665Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:39.891519Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:39.892490Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=12345, session=0, seqNo=0) 2025-03-28T09:02:39.892667Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:39.921970Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=12345, session=1) 2025-03-28T09:02:39.922824Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:139:2163], cookie=23456, session=1, seqNo=0) 2025-03-28T09:02:39.940945Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:139:2163], cookie=23456, session=1) 2025-03-28T09:02:40.958026Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:40.958173Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:41.001651Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:41.002399Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:41.028788Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:41.029764Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=12345, session=0, seqNo=0) 2025-03-28T09:02:41.029933Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:41.044696Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=12345, session=1) 2025-03-28T09:02:41.045606Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:139:2163], cookie=23456, session=1, seqNo=0) 2025-03-28T09:02:41.061119Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:139:2163], cookie=23456, session=1) >> TTicketParserTest::LoginRefreshGroupsWithError |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |75.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TTicketParserTest::LoginGood >> TCancelTx::ImmediateReadOnly [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:02:28.887701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:28.887773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:28.887810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:28.887835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:28.887873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:28.887897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:28.887951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:28.888023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:28.888283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:28.990180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:28.990252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:29.020806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:29.021151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:29.021440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:29.027924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:29.028108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:29.028731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:29.028935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:29.030855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:29.031975Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:29.032026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:29.032149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:29.032182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:29.032207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:29.032263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.037876Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:02:29.192533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:29.192784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.193008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:29.193284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:29.193350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.195703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:29.195834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:29.196021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.196076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:29.196129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:29.196164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:29.198154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.198205Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:29.198257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:29.199983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.200043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.200084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:29.200129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.203834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:29.206106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:29.206288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:29.207305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:29.207427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:29.207473Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:29.207732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:29.207783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:29.207937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:29.208011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:29.209990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:29.210051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:29.210222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:29.210277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:29.210624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:29.210671Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:29.210779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:29.210823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.210865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:29.210894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.210924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:29.210961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:29.210994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:29.211026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:29.211097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:29.211133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:29.211160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:29.212932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:29.213037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:29.213069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RD TRACE: StateWork, received event# 274137603, Sender [6:207:2209], Recipient [6:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 } 2025-03-28T09:02:42.159128Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-03-28T09:02:42.159211Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:02:42.159302Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:02:42.159346Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:02:42.159397Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:02:42.159451Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:42.159568Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:02:42.160652Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:207:2209], Recipient [6:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 } 2025-03-28T09:02:42.160694Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-03-28T09:02:42.160757Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:02:42.160831Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:02:42.160860Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:02:42.160890Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:02:42.160921Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:02:42.160993Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:02:42.161039Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:02:42.163772Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:02:42.164606Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:02:42.164643Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:02:42.165648Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:02:42.165684Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:02:42.165927Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:02:42.165982Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:02:42.166431Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:405:2360], Recipient [6:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:42.166486Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:42.166534Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:02:42.166655Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [6:402:2357], Recipient [6:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 101 2025-03-28T09:02:42.166708Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:02:42.166790Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:02:42.166893Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:02:42.166937Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:403:2358] 2025-03-28T09:02:42.167116Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:405:2360], Recipient [6:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:02:42.167153Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:02:42.167196Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-03-28T09:02:42.167576Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [6:406:2361], Recipient [6:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-28T09:02:42.167632Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:02:42.167735Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:02:42.168010Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 281us result status StatusSuccess 2025-03-28T09:02:42.168347Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Transfer" PathDescription { Self { Name: "Transfer" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTransfer CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ReplicationDescription { Name: "Transfer" Config { SrcConnectionParams { StaticCredentials { User: "user" } } ConsistencySettings { Row { } } TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } } } PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 ControllerId: 72075186233409546 State { StandBy { } } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-03-28T09:02:42.169025Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [6:407:2362], Recipient [6:123:2149]: {TEvModifySchemeTransaction txid# 102 TabletId# 72057594046678944} 2025-03-28T09:02:42.169084Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:02:42.171906Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTransfer AlterReplication { Name: "Transfer" State { Paused { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:42.172117Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterReplication Propose: opId# 102:0, path# /MyRoot/Transfer, pathId# 2025-03-28T09:02:42.172212Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot switch state, at schemeshard: 72057594046678944 2025-03-28T09:02:42.195034Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:02:42.203893Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot switch state" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:42.204081Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot switch state, operation: ALTER TRANSFER, no path 2025-03-28T09:02:42.204137Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-03-28T09:02:42.204851Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [6:411:2366], Recipient [6:123:2149]: {TEvModifySchemeTransaction txid# 103 TabletId# 72057594046678944} 2025-03-28T09:02:42.204917Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:02:42.207752Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTransfer AlterReplication { Name: "Transfer" State { StandBy { } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:42.207989Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterReplication Propose: opId# 103:0, path# /MyRoot/Transfer, pathId# 2025-03-28T09:02:42.208099Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Cannot switch state, at schemeshard: 72057594046678944 2025-03-28T09:02:42.208331Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:02:42.212533Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Cannot switch state" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:42.212730Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot switch state, operation: ALTER TRANSFER, no path 2025-03-28T09:02:42.212794Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] Test command err: 2025-03-28T09:01:43.218303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:43.218634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:43.218707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011db/r3tmp/tmpxnQd9M/pdisk_1.dat 2025-03-28T09:01:44.949084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:45.314346Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:45.523906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:45.526097Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:45.549517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:45.752554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:46.351589Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:01:46.352785Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:01:46.353242Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:01:46.353527Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:46.779218Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:01:46.803377Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:46.804612Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:01:46.851345Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:01:46.852720Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:01:46.853780Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:01:46.859744Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:01:46.860557Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:01:46.861824Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:01:46.877068Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:01:47.121476Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:01:47.121704Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:01:47.121868Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:01:47.121906Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:01:47.121944Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:01:47.121996Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:01:47.122242Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:47.122296Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:47.122656Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:01:47.122778Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:01:47.123219Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:01:47.123270Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:01:47.123323Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:01:47.123361Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:01:47.123403Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:01:47.123433Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:01:47.123492Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:01:47.123614Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:47.123649Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:47.123713Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:01:47.123783Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:01:47.123833Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:01:47.123962Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:01:47.124208Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:01:47.124264Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:01:47.124365Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:01:47.124422Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:01:47.124470Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:01:47.124510Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:01:47.124544Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:01:47.124862Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:01:47.124901Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:01:47.124998Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:01:47.125032Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:01:47.125105Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:01:47.125148Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:01:47.125191Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:01:47.125225Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:01:47.125250Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:01:47.126703Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:01:47.126761Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:01:47.139471Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:01:47.139559Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:01:47.139597Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:01:47.139688Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:01:47.139764Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:01:47.309183Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:47.309242Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:47.309280Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:01:47.309705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:01:47.309755Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:01:47.309887Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:01:47.309932Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:01:47.309988Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:01:47.310027Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:01:47.314820Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:01:47.314904Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:01:47.315721Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:47.315766Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:47.317940Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:01:4 ... 4248Z node 6 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2025-03-28T09:02:40.114298Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-03-28T09:02:40.114355Z node 6 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 1500:100 keys extracted: 3 2025-03-28T09:02:40.114402Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-28T09:02:40.114431Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadWriteDetails 2025-03-28T09:02:40.114462Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:40.114491Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:02:40.114562Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically complete end at 72075186224037888 2025-03-28T09:02:40.114602Z node 6 :TX_DATASHARD TRACE: Operation [1500:100] is the new logically incomplete end at 72075186224037888 2025-03-28T09:02:40.114645Z node 6 :TX_DATASHARD TRACE: Activated operation [1500:100] at 72075186224037888 2025-03-28T09:02:40.118966Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-28T09:02:40.119046Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:40.119081Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit BuildWriteOutRS 2025-03-28T09:02:40.119116Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit BuildWriteOutRS 2025-03-28T09:02:40.119181Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-28T09:02:40.119203Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit BuildWriteOutRS 2025-03-28T09:02:40.119224Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-03-28T09:02:40.119248Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-03-28T09:02:40.119271Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-28T09:02:40.119317Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-03-28T09:02:40.119342Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-03-28T09:02:40.119367Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit PrepareWriteTxInRS 2025-03-28T09:02:40.119393Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-28T09:02:40.119414Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-03-28T09:02:40.119435Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-03-28T09:02:40.119457Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit LoadAndWaitInRS 2025-03-28T09:02:40.119480Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-28T09:02:40.122721Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-03-28T09:02:40.122821Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit ExecuteWrite 2025-03-28T09:02:40.122858Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit ExecuteWrite 2025-03-28T09:02:40.122903Z node 6 :TX_DATASHARD DEBUG: Executing write operation for [1500:100] at 72075186224037888 2025-03-28T09:02:40.123095Z node 6 :TX_DATASHARD DEBUG: Executed write operation for [1500:100] at 72075186224037888, row count=3 2025-03-28T09:02:40.123155Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:02:40.123249Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:02:40.123306Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit ExecuteWrite 2025-03-28T09:02:40.123364Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompleteWrite 2025-03-28T09:02:40.123420Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompleteWrite 2025-03-28T09:02:40.123654Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is DelayComplete 2025-03-28T09:02:40.123695Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompleteWrite 2025-03-28T09:02:40.123735Z node 6 :TX_DATASHARD TRACE: Add [1500:100] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:02:40.123771Z node 6 :TX_DATASHARD TRACE: Trying to execute [1500:100] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:02:40.123806Z node 6 :TX_DATASHARD TRACE: Execution status for [1500:100] at 72075186224037888 is Executed 2025-03-28T09:02:40.123835Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1500:100] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:02:40.123870Z node 6 :TX_DATASHARD TRACE: Execution plan for [1500:100] at 72075186224037888 has finished 2025-03-28T09:02:40.123918Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:40.123955Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:02:40.124004Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:40.124047Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:40.138456Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-28T09:02:40.138565Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:40.138620Z node 6 :TX_DATASHARD TRACE: Complete execution for [1500:100] at 72075186224037888 on unit CompleteWrite 2025-03-28T09:02:40.138756Z node 6 :TX_DATASHARD DEBUG: Complete write [1500 : 100] from 72075186224037888 at tablet 72075186224037888 send result to client [6:593:2518] 2025-03-28T09:02:40.138820Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:40.143601Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:767:2637], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:40.143679Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:40.143770Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:766:2636], serverId# [6:767:2637], sessionId# [0:0:0] 2025-03-28T09:02:40.143977Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:765:2635], Recipient [6:666:2570]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-28T09:02:40.145274Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:770:2640], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:40.145396Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:40.145456Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:769:2639], serverId# [6:770:2640], sessionId# [0:0:0] 2025-03-28T09:02:40.145713Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:768:2638], Recipient [6:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-28T09:02:40.145842Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:02:40.145896Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1500/100 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:02:40.145946Z node 6 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1500/18446744073709551615 2025-03-28T09:02:40.146034Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-03-28T09:02:40.146149Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:02:40.146212Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:02:40.146263Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:40.146305Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:02:40.146355Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-03-28T09:02:40.146409Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:02:40.146448Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:40.146486Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:02:40.146540Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:02:40.146667Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-28T09:02:40.155106Z node 6 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[6:768:2638], 1000} after executionsCount# 1 2025-03-28T09:02:40.155203Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:768:2638], 1000} sends rowCount# 3, bytes# 96, quota rows left# 18446744073709551612, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:40.155304Z node 6 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[6:768:2638], 1000} finished in read 2025-03-28T09:02:40.155387Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:02:40.155420Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:02:40.155452Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:02:40.155480Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:02:40.155537Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:02:40.155558Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:02:40.155584Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-03-28T09:02:40.155746Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:02:40.155890Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |75.5%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> TTicketParserTest::LoginBad [GOOD] |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> ExternalBlobsMultipleChannels::Simple [GOOD] >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2025-03-28T09:02:19.659210Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783150796472265:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:19.659685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f3a/r3tmp/tmpgvrhVG/pdisk_1.dat 2025-03-28T09:02:20.456304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:20.456395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:20.467418Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:20.470095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26063 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:21.018049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:21.077184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:26063 2025-03-28T09:02:21.963288Z node 1 :TX_PROXY ERROR: Actor# [1:7486783159386407477:2398] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-28T09:02:21.963384Z node 1 :TX_PROXY ERROR: Actor# [1:7486783159386407477:2398] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:22.002420Z node 1 :TX_PROXY ERROR: Actor# [1:7486783159386407491:2409] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-28T09:02:22.002488Z node 1 :TX_PROXY ERROR: Actor# [1:7486783159386407491:2409] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:22.032566Z node 1 :TX_PROXY ERROR: Actor# [1:7486783163681374800:2419] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-28T09:02:22.032649Z node 1 :TX_PROXY ERROR: Actor# [1:7486783163681374800:2419] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:22.083311Z node 1 :TX_PROXY ERROR: Actor# [1:7486783163681374826:2439] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-28T09:02:22.083393Z node 1 :TX_PROXY ERROR: Actor# [1:7486783163681374826:2439] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:22.109032Z node 1 :TX_PROXY ERROR: Actor# [1:7486783163681374839:2449] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-28T09:02:22.109088Z node 1 :TX_PROXY ERROR: Actor# [1:7486783163681374839:2449] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:22.132758Z node 1 :TX_PROXY ERROR: Actor# [1:7486783163681374854:2461] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-28T09:02:22.132829Z node 1 :TX_PROXY ERROR: Actor# [1:7486783163681374854:2461] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:25.472485Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783173125132338:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f3a/r3tmp/tmpvi39RH/pdisk_1.dat 2025-03-28T09:02:25.559227Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:02:25.671853Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:25.719878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:25.719951Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:25.726062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23443 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:26.072023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:26.091312Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:26.101096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:23443 2025-03-28T09:02:30.588120Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783196090203695:2220];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:30.627098Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f3a/r3tmp/tmpmtm7b5/pdisk_1.dat 2025-03-28T09:02:30.755097Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:30.755199Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:30.755831Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:30.767878Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17411 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:31.066544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:31.087506Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:02:31.091781Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:17411 2025-03-28T09:02:31.538252Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171529:2384] txid# 281474976715660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-28T09:02:31.538337Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171529:2384] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:31.568085Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171546:2398] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-28T09:02:31.568157Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171546:2398] txid# 281474976715661 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:31.594359Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171560:2409] txid# 281474976715662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2025-03-28T09:02:31.594435Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171560:2409] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:31.647622Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171588:2431] txid# 281474976715664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-28T09:02:31.647698Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171588:2431] txid# 281474976715664 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:31.672489Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171611:2444] txid# 281474976715665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-28T09:02:31.672554Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171611:2444] txid# 281474976715665 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:31.695739Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171625:2455] txid# 281474976715666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2025-03-28T09:02:31.695810Z node 3 :TX_PROXY ERROR: Actor# [3:7486783200385171625:2455] txid# 281474976715666 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:36.902224Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783222300233463:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:36.902266Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f3a/r3tmp/tmp9D9y8A/pdisk_1.dat 2025-03-28T09:02:37.293350Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:37.300862Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:37.300961Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:37.301981Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30512 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:37.740046Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:37.751612Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:02:37.829813Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:30512 2025-03-28T09:02:38.388838Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-03-28T09:02:38.390520Z node 4 :TX_PROXY ERROR: Actor# [4:7486783230890168794:2389] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2025-03-28T09:02:38.412187Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2025-03-28T09:02:38.413518Z node 4 :TX_PROXY ERROR: Actor# [4:7486783230890168809:2398] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] Test command err: 2025-03-28T09:02:36.024209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:36.024722Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:36.024785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00171f/r3tmp/tmpHwL4oU/pdisk_1.dat 2025-03-28T09:02:36.703232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:36.766121Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:36.814612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:36.814754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:36.826646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:37.058827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:37.097950Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:02:37.098229Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:37.144828Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:37.144962Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:37.146455Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:37.146522Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:37.146557Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:37.146881Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:37.147061Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:37.147143Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:02:37.157955Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:37.188133Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:37.188380Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:37.188524Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:02:37.188585Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:37.188624Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:37.188658Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:37.189110Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:37.189197Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:37.189559Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:37.189592Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:37.189620Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:37.189649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:37.189709Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:02:37.189815Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:37.190007Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:37.190089Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:02:37.191619Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:37.202648Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:37.202786Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:37.359984Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:02:37.364601Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:37.364683Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:37.365401Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:37.365444Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:02:37.365488Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:02:37.365687Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:02:37.365812Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:37.366618Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:37.366714Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:02:37.367139Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:02:37.367545Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:37.368845Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:02:37.368880Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:37.369504Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:02:37.369558Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:37.370559Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:37.370609Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:37.370662Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:02:37.370754Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:02:37.370808Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:02:37.370923Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:37.374478Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:37.376855Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:02:37.376932Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:02:37.377178Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:02:37.385532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:37.385672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:37.385758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:37.390589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:02:37.396413Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:37.567539Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:37.569942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:37.635032Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:38.037580Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdztba75qvpt38ycwxr8bbd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2MzNjk2YmEtM2NhNTNhNTMtOTE0MGJhNWQtOWY0NTA3MzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.043926Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:02:38.044187Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:38.059247Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... aChangedResult 2025-03-28T09:02:44.081507Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-03-28T09:02:44.081566Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:02:44.087130Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:941:2767], Recipient [2:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 941 RawX2: 8589937359 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020\002\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t\255\003\000\000\000\000\000\000\021\317\n\000\000\002\000\000\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-03-28T09:02:44.087215Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:02:44.087335Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:44.087543Z node 2 :TX_DATASHARD TRACE: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:2:0] 2025-03-28T09:02:44.087626Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-03-28T09:02:44.087686Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:02:44.087728Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-03-28T09:02:44.087787Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:44.087820Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:02:44.087861Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-03-28T09:02:44.087916Z node 2 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-03-28T09:02:44.087952Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:02:44.087975Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:44.087996Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit MakeScanSnapshot 2025-03-28T09:02:44.088019Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit MakeScanSnapshot 2025-03-28T09:02:44.088044Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:02:44.088066Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit MakeScanSnapshot 2025-03-28T09:02:44.088086Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit WaitForStreamClearance 2025-03-28T09:02:44.088104Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-03-28T09:02:44.088151Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:941:2767] for [0:281474976715665] at 72075186224037888 2025-03-28T09:02:44.088182Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-03-28T09:02:44.088397Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:941:2767], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715665 2025-03-28T09:02:44.088434Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-03-28T09:02:44.088531Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:941:2767], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715665 Cleared: true 2025-03-28T09:02:44.088562Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-28T09:02:44.088660Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:44.088688Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:44.088748Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:44.088790Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:02:44.088839Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:02:44.088875Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit WaitForStreamClearance 2025-03-28T09:02:44.088922Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715665] at 72075186224037888 2025-03-28T09:02:44.088956Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:02:44.088994Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit WaitForStreamClearance 2025-03-28T09:02:44.089027Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ReadTableScan 2025-03-28T09:02:44.089060Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-03-28T09:02:44.089325Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Continue 2025-03-28T09:02:44.089355Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:02:44.089389Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:02:44.089429Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:44.089459Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:44.090002Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:947:2772], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:02:44.090042Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-28T09:02:44.090272Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:44.090320Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:44.090561Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2025-03-28T09:02:44.095241Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:934:2760], Recipient [2:666:2570]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [2:934:2760] ServerId: [2:936:2762] } 2025-03-28T09:02:44.095311Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:02:44.095656Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:02:44.095836Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2025-03-28T09:02:44.095882Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2025-03-28T09:02:44.097899Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:02:44.097954Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715665, at: 72075186224037888 2025-03-28T09:02:44.098135Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:44.098186Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:44.098247Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:44.098279Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:02:44.098316Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2025-03-28T09:02:44.098345Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ReadTableScan 2025-03-28T09:02:44.098380Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715665] at 72075186224037888 error: , IsFatalError: 0 2025-03-28T09:02:44.098426Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:02:44.098456Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ReadTableScan 2025-03-28T09:02:44.098482Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:02:44.098509Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:44.098555Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:02:44.098625Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-03-28T09:02:44.098654Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:02:44.102831Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:02:44.102899Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:02:44.102972Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:02:44.103004Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:02:44.103033Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-03-28T09:02:44.103075Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:44.103102Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:02:44.103132Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:44.103158Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:44.103233Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:44.103274Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:44.103318Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-03-28T09:02:43.853557Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:43.856302Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:43.856518Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-28T09:02:43.856556Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:43.856591Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-28T09:02:43.856629Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:43.856683Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:43.856748Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-28T09:02:43.857213Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:262:2254], now have 1 active actors on pipe 2025-03-28T09:02:43.857279Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:43.874303Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:43.877074Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:43.877289Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:43.878150Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:43.878316Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:43.878855Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:43.879207Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:270:2260] 2025-03-28T09:02:43.881740Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-28T09:02:43.881806Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2260] 2025-03-28T09:02:43.881863Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:43.881909Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:43.882875Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:273:2262], now have 1 active actors on pipe 2025-03-28T09:02:43.954463Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:43.958808Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:43.959130Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:02:43.959174Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:43.959225Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-28T09:02:43.959265Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:43.959310Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:43.959360Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:02:43.959988Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:407:2362], now have 1 active actors on pipe 2025-03-28T09:02:43.960103Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:43.960261Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:43.962531Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:43.962664Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:43.963503Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:43.963610Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:43.963976Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:43.964169Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:415:2368] 2025-03-28T09:02:43.966109Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:02:43.966192Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:415:2368] 2025-03-28T09:02:43.966261Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:43.966304Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:43.967135Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:418:2370], now have 1 active actors on pipe 2025-03-28T09:02:43.968642Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:426:2373], now have 1 active actors on pipe 2025-03-28T09:02:43.968950Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:428:2374], now have 1 active actors on pipe 2025-03-28T09:02:43.969105Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:426:2373] destroyed 2025-03-28T09:02:43.969465Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:428:2374] destroyed 2025-03-28T09:02:44.476987Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:44.480242Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:44.480537Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-28T09:02:44.480584Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:44.480642Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-28T09:02:44.480685Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:44.480741Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:44.480802Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-28T09:02:44.481461Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:261:2253], now have 1 active actors on pipe 2025-03-28T09:02:44.481535Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:44.481711Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:44.484435Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:44.484570Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:44.485322Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:44.485440Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:44.485790Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:44.485988Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:269:2259] 2025-03-28T09:02:44.488562Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing c ... xId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:44.564967Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-28T09:02:44.565011Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:44.565055Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:44.565128Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-28T09:02:44.565783Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:466:2406], now have 1 active actors on pipe 2025-03-28T09:02:44.566151Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:44.566354Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 5(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:44.568851Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:44.568972Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:44.569759Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 5 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 5 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:44.569875Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:44.570290Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:44.570509Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:474:2412] 2025-03-28T09:02:44.572444Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-28T09:02:44.572507Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:474:2412] 2025-03-28T09:02:44.572563Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:44.572608Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:44.573385Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:477:2414], now have 1 active actors on pipe 2025-03-28T09:02:44.589546Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:44.593356Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:44.593667Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:02:44.593724Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:44.593788Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-28T09:02:44.593849Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:44.593896Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:44.593953Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:02:44.594648Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:526:2451], now have 1 active actors on pipe 2025-03-28T09:02:44.594862Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:44.595044Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 6(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:44.597502Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:44.597678Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:44.598479Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 6 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:44.598605Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:44.598996Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:44.599198Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:534:2457] 2025-03-28T09:02:44.601254Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:02:44.601327Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:534:2457] 2025-03-28T09:02:44.601387Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:44.601444Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:44.602370Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:537:2459], now have 1 active actors on pipe 2025-03-28T09:02:44.604038Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:545:2462], now have 1 active actors on pipe 2025-03-28T09:02:44.604150Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:546:2463], now have 1 active actors on pipe 2025-03-28T09:02:44.604240Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:547:2463], now have 1 active actors on pipe 2025-03-28T09:02:44.615227Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:552:2467], now have 1 active actors on pipe 2025-03-28T09:02:44.646112Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:44.647985Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:44.648327Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:02:44.648390Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:44.648538Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:44.649178Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:44.649224Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:02:44.649340Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:44.649593Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:44.649777Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:609:2512] 2025-03-28T09:02:44.651445Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-28T09:02:44.652473Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-28T09:02:44.652736Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-28T09:02:44.653095Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-28T09:02:44.653387Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-28T09:02:44.653438Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:02:44.653474Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:02:44.653508Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:02:44.653566Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:609:2512] 2025-03-28T09:02:44.653625Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:44.653666Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:44.654370Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:546:2463] destroyed 2025-03-28T09:02:44.654444Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:545:2462] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: 2025-03-28T09:01:43.218514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:43.220167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:43.220396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011ed/r3tmp/tmpuaKnKn/pdisk_1.dat 2025-03-28T09:01:45.965272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:46.326051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:46.468808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:46.469965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:46.516732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:46.900358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:47.132323Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:01:47.133519Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:01:47.134024Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:01:47.134298Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:47.193244Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:01:47.193962Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:47.194096Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:01:47.195817Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:01:47.195899Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:01:47.195955Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:01:47.196358Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:01:47.196494Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:01:47.196582Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:01:47.211244Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:01:47.251615Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:01:47.251869Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:01:47.252010Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:01:47.252049Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:01:47.252089Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:01:47.252142Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:01:47.252405Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:47.252469Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:47.252794Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:01:47.252893Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:01:47.253334Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:01:47.253393Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:01:47.253468Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:01:47.253505Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:01:47.253540Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:01:47.253572Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:01:47.253624Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:01:47.253753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:47.253790Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:47.253857Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:01:47.253934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:01:47.253983Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:01:47.254101Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:01:47.254362Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:01:47.254437Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:01:47.254537Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:01:47.254587Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:01:47.254643Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:01:47.254753Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:01:47.254799Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:01:47.255132Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:01:47.255178Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:01:47.255215Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:01:47.255252Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:01:47.255312Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:01:47.255352Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:01:47.255392Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:01:47.255425Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:01:47.255451Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:01:47.256939Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:01:47.256992Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:01:47.268429Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:01:47.268504Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:01:47.268543Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:01:47.268620Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:01:47.268688Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:01:47.447170Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:47.447232Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:47.447269Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:01:47.447736Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:01:47.447778Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:01:47.447895Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:01:47.447935Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:01:47.447993Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:01:47.448028Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:01:47.460980Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:01:47.461097Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:01:47.461984Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:47.462029Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:47.462091Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:01:4 ... chemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-28T09:02:44.093858Z node 6 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[6:1002:2819], 1001} after executionsCount# 1 2025-03-28T09:02:44.093898Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1002:2819], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:44.093981Z node 6 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[6:1002:2819], 1001} finished in read 2025-03-28T09:02:44.094027Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-28T09:02:44.094053Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-03-28T09:02:44.094076Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:02:44.094101Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:02:44.094142Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-28T09:02:44.094179Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:02:44.094206Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-03-28T09:02:44.094234Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-28T09:02:44.094305Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-28T09:02:44.095082Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1008:2825], Recipient [6:717:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:44.095126Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:44.095163Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1007:2824], serverId# [6:1008:2825], sessionId# [0:0:0] 2025-03-28T09:02:44.095276Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1006:2823], Recipient [6:717:2597]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-28T09:02:44.095979Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1011:2828], Recipient [6:717:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:44.096018Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:44.096053Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [6:1010:2827], serverId# [6:1011:2828], sessionId# [0:0:0] 2025-03-28T09:02:44.096163Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1009:2826], Recipient [6:717:2597]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-28T09:02:44.096262Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-03-28T09:02:44.096298Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:02:44.096332Z node 6 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-03-28T09:02:44.096377Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2025-03-28T09:02:44.096437Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-03-28T09:02:44.096462Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2025-03-28T09:02:44.096489Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:44.096515Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2025-03-28T09:02:44.096556Z node 6 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037890 2025-03-28T09:02:44.096587Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-03-28T09:02:44.096613Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:44.096634Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2025-03-28T09:02:44.096658Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2025-03-28T09:02:44.096728Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-28T09:02:44.096865Z node 6 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[6:1009:2826], 1002} after executionsCount# 1 2025-03-28T09:02:44.096906Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1009:2826], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:44.096994Z node 6 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[6:1009:2826], 1002} finished in read 2025-03-28T09:02:44.097040Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-03-28T09:02:44.097066Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2025-03-28T09:02:44.097095Z node 6 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2025-03-28T09:02:44.097118Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2025-03-28T09:02:44.097156Z node 6 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037890 is Executed 2025-03-28T09:02:44.097177Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2025-03-28T09:02:44.097201Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037890 has finished 2025-03-28T09:02:44.097229Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-03-28T09:02:44.097305Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-03-28T09:02:44.098058Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1015:2832], Recipient [6:714:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:44.098098Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:44.098131Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1014:2831], serverId# [6:1015:2832], sessionId# [0:0:0] 2025-03-28T09:02:44.098216Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [6:1013:2830], Recipient [6:714:2595]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-28T09:02:44.099099Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1018:2835], Recipient [6:714:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:44.099151Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:44.099201Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [6:1017:2834], serverId# [6:1018:2835], sessionId# [0:0:0] 2025-03-28T09:02:44.099348Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [6:1016:2833], Recipient [6:714:2595]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-28T09:02:44.099462Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-03-28T09:02:44.099515Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:02:44.099559Z node 6 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v4000/18446744073709551615 2025-03-28T09:02:44.099615Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2025-03-28T09:02:44.099698Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-03-28T09:02:44.099743Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2025-03-28T09:02:44.099784Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:44.099830Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2025-03-28T09:02:44.099901Z node 6 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037891 2025-03-28T09:02:44.099944Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-03-28T09:02:44.099970Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:44.099994Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2025-03-28T09:02:44.100017Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2025-03-28T09:02:44.100088Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-28T09:02:44.100189Z node 6 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[6:1016:2833], 1003} after executionsCount# 1 2025-03-28T09:02:44.100230Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1016:2833], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:44.100280Z node 6 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[6:1016:2833], 1003} finished in read 2025-03-28T09:02:44.100321Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-03-28T09:02:44.100345Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2025-03-28T09:02:44.100366Z node 6 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2025-03-28T09:02:44.100389Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2025-03-28T09:02:44.100425Z node 6 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037891 is Executed 2025-03-28T09:02:44.100451Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2025-03-28T09:02:44.100473Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037891 has finished 2025-03-28T09:02:44.100501Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-03-28T09:02:44.100569Z node 6 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } |75.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: 2025-03-28T09:02:32.903658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:32.904218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:32.904305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000bf9/r3tmp/tmp41MqPU/pdisk_1.dat 2025-03-28T09:02:33.621613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:33.665826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:33.712618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:33.712779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:33.728037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:33.822595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:34.297092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.297221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.297291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.304046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:02:34.474710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:34.563960Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:35.053896Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdzt89pa5ahxah1b2scjve5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMwNmZiOTItM2YxNTU4NGEtNmI0YTM0YzktZDFiNGU4OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.137615Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdzt91vbk64agy750d2wm88, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWExYmRiY2YtYjFiMGIwNzktM2I5YjE3MDctZmU5NGFjZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.213517Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdzt9473w0hmfayk32teq04, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q0ODExNC04NzgxMDY1Ni0zODlkYzZlMi00Y2Q3ZWJmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.289130Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdzt96jdrpc0gj3cgrba6c4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ1NDA1ZWMtY2M4NWRjZjAtN2I2YTcwOWEtOThhMTZkMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.364138Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqdzt98y4znpgc60t5gj6015, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY0NzY3YTgtNTM2ZGIwYTMtYWZjOTQzYTMtNGFhYmZlNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.451277Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqdzt9b99chwx5kgvts5tvm1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTNkYjI0ODAtNzdjNmMxMjItZTdhZmYyZWItZDQzYTY0NzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.526842Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqdzt9e0fr4ehazvkzqnzs0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzAzNGExMDktOTE5YTdhYzItMzEwMjkxYmMtMzA0NThlNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.600120Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqdzt9gc0s69250m9sww0cnk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTlhYjBlYzMtZWNjN2MyNjItYmMwOGU2YmEtNDc3MDliYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.673184Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqdzt9jn5n3v7a56xp4v96am, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzY0MjAwYWYtNzc5NTc5N2QtMzFhODI2ODEtNWM5NTg5MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.743456Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqdzt9myexggrkcfd4tb45jq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZmMGIxZGItZjllZTE1NDgtMzdiMzRkZTgtMjQwMGU3MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.814647Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqdzt9q4b00vj9fs77bf89sk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDE0NGE1OGUtMTZlZDg1My1hOTcyMWY0YS00ZjdhYWI5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.889175Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqdzt9sbd9txyrzgypcpy4hc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzk0NzgzN2QtNmU0N2RkZDUtYzk3ZGY0NjMtMmNmMWRjOTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.963472Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqdzt9vp75222f96xt58927a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDViMDNkMTUtYWRiZDZmMDItM2RmYTFlNy01ODU5ZWNmYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.034537Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqdzt9y0169v96jfcmgmp8k8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjgwMDljNmYtNDZiMTM4NWEtYmY4ZmViZS1hOTE4ZmFmOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.113238Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqdzta0731f3n8hh035hwdbr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIxZjQ5OTctMmVlNTE3N2QtZjNjNGEwMzktNWVkMmE0YzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.207402Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqdzta2n6234q247vfn3q3fk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBlNDc4ZWItZDMyMDMxNDAtNTBmNTQ1ZGQtZTgxZjU0NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.280266Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqdzta5m0qq898yq0fmd3hv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjMxMzI5NzItN2I1ODg1ZjgtYzY5ZGRjYzYtMTczNWQzN2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.349296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqdzta7xfejhabw102p3r9xw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWNhNTQ4NGMtOGJmNTYxNGYtZTA3MDYzZDAtNTIwMDI0MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.415478Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqdztaa1an3e65qc7cyfme52, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE5ZDhiOTgtMzU1ZDg2OGUtNTY2YzY5ZTgtNzkzNWUzMGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.484853Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqdztac3c2ezd5ez9430mz0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdiYTA0ZDUtYjk0OGExZTQtM2EyZTJlZjMtZWM3Y2JiNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.644085Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqdztae9b4j5s7h3yg4e51j9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjE3MDU4ZGEtMmE4NzA1NGMtM2M5ZWIyYTYtNTMzODFiYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.713862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqdztak9f8h4ts3ra2r9a4qr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZjN2UwODktNTVkNmQwNjItOTg5NDIzMjgtOWU2MDYyODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.833424Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqdztane12q4n69j6kmfqmqt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MThkMzEyOS1hMmQzNmQyMS1hOTY1ZTg2Yi0yZTg5ZmM0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.994458Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqdztasm317r028fymh8yfk9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNkMDc5ZjctYjA3NjU2NDktZDRmMzgwOWEtMWM2MjIyZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.052294Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqdztay62mwfwm2hp9v8v4v7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjkwYWY1NGQtYTI2NDM1YWUtY2E1NTlhNGMtOTI5MjM4NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.111825Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jqdztb00drmm48j4a9baekkx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWE5ZWUxZmEtYzNmNzY3MTAtMjgzM2ZiMDctMzliM2Fl ... 13Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jqdztepa2k18apgst686rvay, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWI4ZDNiNzUtZmI4NWUxNmItNTRhOTc1ZDUtNzdlOTE0MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.028807Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jqdztet9453n0b2b2qv8vv59, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM1NjY2MGQtOGJjM2NhYmItZTFiNmVlMWEtYzhjYWFlNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.091450Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jqdztew9485psker7vzsw7za, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDg2MzhkNWEtNDJiNDMzMTktYjkxNDQzMTgtMTE2MTNhN2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.154787Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqdztey7c8crhv0012575xwp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWQ5MTEzMTctNWRhM2IxMDAtNGZmZjc2NzMtM2Y0YzEzNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.218583Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jqdztf076d3yxt1gcrwqwq3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmNlODAwYTAtY2NmYjA4NjktMjEyZDM3ZmEtMzVhOWQwMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.283604Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jqdztf272k1915bhcqs5nh1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDI1ZGE5ZS0yMDE2YjI2MS0zY2JkNGRhMS1iNTY3MmMyZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.346860Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jqdztf489a3zcdxgrs9k43jg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTU5OTZiMDgtYWM3YjkzMmMtNDY3NmY5NTAtOGI3NTJmODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.412695Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqdztf679gvy4vv5b830zngj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjczYWY1YjUtMzVjMTBjYWMtYjRmMWM5N2EtMzFiYjYwMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.477014Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jqdztf898kwqrmmw9na15fsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA1OTFhYTAtY2I2Yjk0NzktNmNmNjA3OTctOGFhYjE3MDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.543549Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jqdztfa9cvj5vrrr5jcxq4xq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY2Mzg3MjctN2ZhMmRhNTMtMjY0NDkzODMtMmYwOWU3MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.628091Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jqdztfcccwadsp9xqzwrramf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjIyNjE4YjUtM2Q5ZmY3MjktMzMyYTgwYTItN2I1NDUxNmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.712497Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jqdztff057rzwzqhxde7srn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTEwNjNiNDAtNTk4Yjk0YjQtM2Y0YmRhZTktOGJkNmJkMmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.789292Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jqdztfhqdqrdpwhnaavnqkzc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RjNDI5ZWEtNTFiZjY1MWMtYmI3NzJhZTItYzE1ZDdiOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.869396Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jqdztfm231m6jtkm3a7xsjzv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODZkOWZkNzAtZmMwYmM3OTUtZDE2YzI3ODAtZTBiOGQyOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.948385Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jqdztfpj8pm8byv1p3w3fcxj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODBjMDk0ODktODc2NzJlM2QtYzY0OGVjOTEtZmI5MTdlMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.020332Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jqdztfs2640wvpbwgtkb04n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGVhYmU5ZDAtYjExZjE5Zi01ZTAwYmIzMy1kNjY5ZmU3Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.094837Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jqdztfv9800m77wrqj6q79jx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTE4ZGM1ODktOWRmY2U0YjUtMWFiY2NmMzMtYmViM2E5NDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.190599Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jqdztfxk8ckbmm6ke2zn5075, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTNlMTgzYzItYTdmNjBkN2QtNzFkNWQ4NTMtMTYyNGU4MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.265420Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jqdztg0k9rvmbq8mv84nsnge, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI5ODM0YjEtZDU5MTM2ODktYThjNDlhNDktODIzZDM3Yzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.361755Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jqdztg2yevp0nwqdtzb3m4wb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZlNjQ1ZjQtNzcwN2MxOTctMmFiYmVjNDQtYzZhODcxN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.423081Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqdztg5y6stssj0cpmvhy4h0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc4NzA2NWMtMWU1NTdiNGItYWQ5NjkxNDAtYmIzM2M3NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.489100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jqdztg7w2r105kksrtxm91kj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFjOTRkZjEtNDYxMjhiMjMtNzU4MmRjYzQtMmE2ZGRiOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.553264Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jqdztg9x1qvn9c5qaak31vpf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGYyM2ZjMDgtZTczZjcxMTAtYjQ1NTFhNjMtOGNmNTdhMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.619220Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jqdztgbybam6vx1msppz793k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTk3MGJiMmQtN2U4MmEyZGMtOTE2YmRkZDUtMmQwYjI1ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.681813Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqdztge04r214q8adqb082as, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVlYWIwM2UtYjdhNzMxMTgtZjVmYjU4YjktNDg4OTk0YWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.876341Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jqdztgfy6df6gqg8ccxfmtqd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjIzNzE0ZGEtYmRlYzI3NzctYjk4MDc2YzUtMmQxMDU5ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.010802Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jqdztgpad1qxwkyts3n9mk4q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y4YjllYTAtNGFkZjU1NTctMzM4ODkzYTktNThmZTRmNmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.085232Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jqdztgt75sc76n6ccgejtwb5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNmY2RiODAtM2Y0YTFlMTMtOWRmOWVlNzctYzAwMTZlZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.213402Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jqdztgwjb4y7veg3r1sen9w8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWU1YWIwNDAtZWExMTg4OGUtMjg4YTk1NTQtNTcxNzlkZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.297340Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqdzth0j3mp4fkc8rdnrwnk8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2E5N2JjMDAtN2NmNTc3Y2UtNWE2YjJhZWItMzk3NDQ3Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.369530Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jqdzth366pe7cq5fqc58hery, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTgyZGYyZjItYWQwYjUyOGYtNzBhMzViZjEtYWQ5MjE3OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.513617Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jqdzth5de7t2wygq1a2xp7tz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDIwZTYyYS0yN2EzNGVlYi00MDM1MjEwNC0yN2RlNjhjMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.653648Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jqdzth9ycwnz22064qtwqgvp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE0YjU3YTUtMjIzMzNiMWMtMjQxODZjMDktYzkyZGViNzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.799088Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jqdzthem2yvmgssz2t6yq4mf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTI1M2RkNTItYWI3YzMyMTQtNDcyMjMzMzYtYzRlMDc2MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.865721Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jqdzthjwf2mm2xyvf8h4h4pc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI3MWMwOGQtZjQ3YTg5ZTUtMjkwOGZlMTItNjIyMDZjMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.919373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jqdzthyq907vx2nggy5q7nx1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDJmYjJjZTYtZGUyMDU3YWMtNDEyNTdkMDItYjMxN2FmOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: 2025-03-28T09:02:33.052779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:33.053335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:33.053403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000cb5/r3tmp/tmptdrObx/pdisk_1.dat 2025-03-28T09:02:33.858832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:33.908061Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:33.958561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:33.958824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:33.970950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:34.089870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:34.467534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.467678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.467768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.473941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:02:34.652882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:34.727372Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:35.186843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdzt8f076vgeehw7c4r1q6t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjExOTI3MzAtYWQ2MDc5YzEtMzVhMGJiZTAtNjUxMDVkYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.364142Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdzt97gdt83g5ywef06qd4r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRjNmRhYTEtZWM4OTE1MTYtZmZhYzlkYzctMjU2ZDhhMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.451290Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdzt9b94pxjgx1d5g514ek0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTUzOWZmNTgtN2I2YmIzOTQtYzUyNjA2MWQtNWY2YWQ5ZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.605405Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdzt9e1dq2x4wm8phyycpcc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmUwOWQ3M2QtN2U3YTNiMDAtMzYxNWZmYzktNjgwNDhjYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.796839Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqdzt9k097ceky13w4nqs8h8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTk5YThiYmEtY2Y1YjcxZDItZjViZGRmMDItY2JhMDhlNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.930627Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqdzt9s59yb0kkbrg0z8t8yk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2VmOTU4MDctN2EwMjQyMi03NDc2MDNkMi0xZmNjYjdkNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.014895Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqdzt9wzbxj1f7260pm76mdy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmNhODE4OWMtNzJmYmMxYzMtN2M3MDYyZjMtMjVlYmY0ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.155683Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqdzt9zk2fqvdh8ge3w61h93, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWU2ZGFjN2QtMTg2NzIyMTUtZjE3NjEwZTQtOGYyMThhNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.248094Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqdzta43az67dva0zkkn9j9t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQwNzA1ODYtOTY1NTBlMWItOTExYWY0ZGMtZDE4ZmMxNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.409061Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqdzta6x1bwa719xjxk8h9h9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTM5NDI2MjktMWQ3MTM0YzAtZTM5NjNmYWMtZWU4ODQxODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.612239Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqdztabyfdv78v83p9kkc8bk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjgwOTg0ZDUtYzY4ZjE3NmYtODIxNDljMGItNDhiMDgwNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.749731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqdztaj9d1vtpanqjjk784cw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2Y5OGE0MGUtY2E2Yzc2MDYtYzMyODkxYy02OTMxZTUw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.915229Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqdztapj7172vbgwj8re2fpd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTk5OWYxYjUtZjlmNDEzMGUtNGM1NGRmYTktM2Q1NmQwOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.035664Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqdztaw28dsk6ad8rbjnab7m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2UxYjkwOC02NTljNGRiNS00NDIyM2JjMS02NmI2MGFkZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.108537Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqdztazg15czmygje7v82v1d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk0ODMwZTgtZmMzNDlkN2MtYmJiZTMxYWQtNjg0OTI1Y2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.180096Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqdztb1s322t3vds0kpy27be, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmYyM2JmOGItN2RiYmIxOWMtYzA5OTczY2YtZDJlNDU2OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.253693Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqdztb406c0kbfk2bnrjg9r6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTVkOTllMzktZjAxZWQ2NzEtYzhhYmUyYTUtOTQ4YjYzY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.325377Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqdztb6aeg70wkzj2gmmzj5k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2I2NzYxM2UtZGMwNmEyZmEtODc2OGE0YzktYTM1MjAwMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.400960Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqdztb8j3b4p6exrzv1f46wx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFiYWE5NmQtMzU1MjE2NGMtN2NjMDE2MDUtN2ViNzIyYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.480856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqdztbayfch1y2ahj9vwhpzj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWFlNmE0MWMtN2FkMjdmZGQtM2FiNGVjNjYtYzdkYTQzODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.557386Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqdztbde1yzx8wp57wsa4gst, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjU2M2YxMi01Njg3YTcyMy01M2QxODRjZC1iZDQ3YWM3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.636274Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqdztbft8f4a9gaekzrsjtrb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Q2ZGYxZDQtODQzN2IzMjYtMTVhOWZmN2YtYzYyNTVkMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.733304Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqdztbj9cmxvk5ahgzmtt4sa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTg3YmJmMjktMzQ2ZGViNmQtNWRkNjE4Zi1hOTcyYTAzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.811535Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqdztbnb3bkev07pkeeqesq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzYxMGExM2ItNDQ5MmViMi0xNjU0NmQyZi1hN2U1M2E0Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.890853Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqdztbqrcvdj6pvr263rrynv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWVjYTczOWMtZTYwYzg4ZDgtOWVkN2QwYTMtYWZkYWI5ZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.982247Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jqdztbt980vwrgakkrcwktdx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzVlZjY5MGUtZDVmZTRhYjctNThiMTdhMzMtYjYwYzExYmY= ... 465298Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jqdztf7f5v122bp8tf34eb7s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmUwNzlhMTYtODBlZTFlYmEtZjdiOGVlYjMtYWFlOGI4ZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.541565Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jqdztf9z9san8b5xz842nvmv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDA5ZTZkNTQtODE3ZDY4NDAtNDUyZmZlNjEtNzBkZWU5YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.622710Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jqdztfcbfpd7291zjg9wcscr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYyYTMzYS1kYzA4ZTNjMS02OTYxNzBjNy1hZjEyODJhZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.689856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqdztfew9rgfe80dy9fwzmd7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjFkM2QxYTAtM2QzNzc0MjktYzk1Y2MxNWMtNjg2Nzk1ZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.760982Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jqdztfgy770vg6asgekmmnhk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGNiYWEwZi1iZGIxZjIwMi02NzZkMmEzZS02N2NmODhjNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.861471Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jqdztfk668kdxxc2y1t56apc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2NmMWNhNTMtODY4MDY0Ni04ZTVkZTQ1ZC02ZGQzMjM5Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.939775Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jqdztfpa7e42fghnt1sww8dr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQ3ZjUwNmEtNGIyZTE3MTYtY2Q2OTUyMTAtM2Q0MGJmYzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.046170Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqdztfrsa634hf0e7fk6v5as, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM1Njg1YzgtOWNiMDViYTYtYmRlZWQ4ODYtZjFlMGY1ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.254221Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jqdztfw8a6kvsjv281znebjm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRjMmViMDctNGZiYWM3YzYtOTMyMmZjM2MtZjIyYmRjODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.425686Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jqdztg2wevmeq787vytbj1p5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGE0Yjc3N2YtMjljMjQ5NmEtMzgwZTQwMzQtYTdhNzIzOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.666944Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jqdztg8k3tgnrpb8jaddwj5h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2VkZDQyODYtODkzMTE4M2YtZTZhNTVjNWItOWY0NTQ5Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.860046Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jqdztgfgff7sja711qb2k329, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWQ3NWY3NzMtOGJhMmIxN2EtMzIxNDZjOTUtMmRiYmM1NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.106230Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jqdztgnq9xxc3tgws1jeyftk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFkODA1MWYtMmQ3MGVkYjYtMTY5ZWFlNWEtM2FmN2Q2Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.287022Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jqdztgxs1c685tcp6a1es2jq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzg0N2E3ODYtMTYxYjBjZjktOTU2NzkyOGItZTZiNDc3MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.368429Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jqdzth2ybhk1eqk4eyy3vcfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGQ2MWZmYi04ODY0OTU1MS1kNTgxOWE3NC04ZjE4ZjEwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.435353Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jqdzth5cc8n9kmwb30dgh7q9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2EwZDhkYWEtMTQyMmExZTktODQ4MDNhYTEtOGJlMGY4MTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.527585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jqdzth7g0483vggvqfmys589, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWYyODFmZDItZTEwMmY2ZTEtZjQyMTI3ZTUtODZkM2ZkZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.605265Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jqdzthad0x7jz395vp6bamny, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTVkNzIwZTItZmU5MDU4YjYtYTdiNzkzMjItNjBiOGRkYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.684178Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jqdzthct2a6mqcrw7htwt77b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjY4NzQ1NDgtYTBlNDZjOTYtNzZkM2JlYjItYjk1ZWE4NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.773722Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jqdzthf939e6pfkhgf51sets, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGM5ZWI3NzctYjdjZjY3YzctNjQ1NjRjMDMtM2YzNWFkNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.851734Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqdzthj34c7ks491bfta9bxe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAwMmE2NjktNmY5M2RiZC0xNDc0ODZlZi03ZDFmMjZi, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.918861Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jqdzthmh82d3zt1kh81tdjnj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjhlZGU3YzctN2ZjMjA1MzUtMjg4MDFkMTYtODI2NjcwY2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.048288Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jqdzthr92aj6yrc6scpapbpw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTQ3NzczM2ItZjA2ZDUzODMtMTBlNmE2OTMtMmFlYjE1NmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.124021Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jqdzthtn7dg5hpjs1gjbs8z4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTlmNDEzY2EtNmUzNTU2MDgtOGI2NDlmODAtYTNjYzY3YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.192824Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqdzthx19m5wvk892xz5ej04, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ2ZTZkYTUtNjI4YWRkNDItNzUwMDExMTctYzUxNjA4Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.268570Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jqdzthz56bde8ycrrfnjh0w9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODQzYTg3MTAtOTMzZjI0YzUtNjg3NTViNTAtZDJlMjg1MDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.350157Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jqdztj1j21303nmh8gb61d5n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzgwODNmMmEtMjYwYzlkYjctMWIwMGYxMTQtYWRhNmE4M2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.434930Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jqdztj49d3w9phkxdagdddkd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGJlZDY3My05NmE0ODFlNi0xZjgyMWY3ZC1hMzU1Mzk2ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.513199Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jqdztj6r5d66eatpjr80ns5t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2NkNjhlZWQtYjRkY2Q5OTMtMTIzOWNhNmYtYjFjMmNhY2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.591626Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqdztj963cpkq1fjed12nkfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzM2MDljMjUtZjEyZjEwYTMtZjc0OWYwYmYtYTQ2MDczNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.700072Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jqdztjbnf3ms2rss24sgjp92, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQyMzYxZmUtNjRhNGJmNzktNjJhY2NkMTItNGRjMmQ4ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.774314Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jqdztjf1c788pppmbtfzkwan, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWIzZTBkZWQtYmQ3N2JmZGMtMjk0NzUwNC0xYTRkMjI2Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.847560Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jqdztjhbesnv9k6fhzrn1c9k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzcxNWQ4Y2YtYTYxMzdjNzMtZWFkOGM0OTgtY2JhZDQ1ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.921018Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jqdztjkm5xvmzqwk2ytv23w9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2YxYzgwMmUtMTNlNTY3OGEtZmZhNjJlNWQtOTVlYjIzMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.993572Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jqdztjny0swbakbyxgt1t0w4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI5OThmMTYtNTg2ZjgyOTItZWY4NWFmNGYtY2I1NzYxMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.145565Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jqdztjrc8j502pmky4rtxv3c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNhMjUzYTAtYTU0MTVhMTAtY2RjNzViNjItMTBkZGNlOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: 2025-03-28T09:02:33.104018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:33.104484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:33.104557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000cf9/r3tmp/tmpVvcdlY/pdisk_1.dat 2025-03-28T09:02:33.706791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:33.766889Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:33.818205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:33.818359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:33.833769Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:33.935148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:34.416866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-03-28T09:02:34.726105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:817:2675], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.726256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:826:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.728770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.737451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:02:34.916371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:831:2683], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:34.981443Z node 1 :TX_PROXY ERROR: Actor# [1:890:2723] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:35.469381Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdzt8q2fhn2h7chba2n3n44, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVkMzdiYzUtYzY3OTE3N2MtNjc5M2JkNjktYjI3N2VkMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.585635Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdzt9fd4835es1zxwh6bst8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ4YTFhOTEtZjNiZTdiNDAtYjZkNDZlNzAtYmFlMWRhYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.743456Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdzt9j6at9fk236qv065tvj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTk4NGIyNTYtYzQ2NjAyOWQtNzRiM2JlMjYtYTQyNGVmNDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.838256Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdzt9q412qdhxzjqa8va36x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmMyNmYxMmMtZGNkMGVjMmItOTdhMGYxZGUtMTY2YjQ3NGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.954049Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqdzt9tc8vpsw865prneyc0s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzczODBlOWUtNGZjZWUwNzAtYmVmYTc3YTYtZDdmMzk3Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.097680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqdzt9xw6h0d4yae0q39rpk8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRlYzlhMjYtZDYzM2Q3YTAtNTQ3YTBkYjMtMzczNDNlNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.252567Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqdzta2g9hfjyqk6x110sxnd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDc3YjdlNGMtNjNjYjM1ZWMtOGZiMTI1ZmMtMzEyOTY1Zjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.413485Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqdzta729h1d89je6eh4cg9r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTExN2Y0ZmQtZTIxMGNmMGMtNGE2MDE3YmItOTcyMTFhOWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.529600Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqdztac352jjxwsee14zwqka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWIxODBlYzktMTI3MWJkMTktZGQ3ZGQ0YTItMTJjMGQ2YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.686951Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqdztafv4axp23fay8jtyqpc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGMyZGJlNWEtMTYxZDlkMzgtZTg1MTdmOTktN2E0Nzg4NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.765981Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqdztamm4bvjvqxgmzdnbhqm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzVjMmY0ZWItYThjYWFhOGYtZTE3NDlhM2YtODBiYzQwZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.853114Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqdztaq304mhs7hd4q840fpv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTc1MzE0OTUtY2EyNzFjODAtOTJiNjMwYjQtYjIwNGY4OTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.099946Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqdztaswd4kw0ravmw9r18rq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjcwM2UyZTctYTE5MzMyZTYtZTM2MTA4NTgtY2I5MDY5NzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.175488Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqdztb1h75bsms0fyfbvy6h9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJlYzlmMTYtZGZiYTg1NjEtYTRlMTBhMC0xMjM3YzA0Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.249784Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqdztb3wc688x35pa8sdj1j6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjM0NTIxNWItNThlY2MzMjctZmVkOWYyZmQtMjc5YzBmN2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.321939Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqdztb66fmjcqjprmchxchj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzExMGRjMDUtOGQ2ZDhiYzItNjk2NmE5MzQtMjcxNzU2NTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.407683Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqdztb8f84xhavzb9jevpfn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDkwMjM1ODMtYjE1YjBjN2MtODFhZWRmZjYtYzU0M2QyZDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.496446Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqdztbb52v87wm73smq4stq5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFkNzEzYmYtODk1MDI5ZmUtMzQyZTQzZTQtMmU4MmQ0ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.568304Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqdztbdy312v7ff6s95xxyjg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTRiZmNlOGUtNTc3ZGFiZjgtOWUxOTA5NjQtZjFmZDk0NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.645615Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqdztbg52nkzat48em0wbawz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdiN2FlNzctZWU1ZGYxNi04MmU3NzY5Ny0yYzMzYzAy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.722125Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqdztbjkbq3e3n5jt03dap7e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQxZDgxMWYtM2U1YmE2OWMtNmQ4NTIxN2EtMzNkMGQ3OTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.801290Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqdztbmz1qwkg9ymdejk3pd7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY1YjVkN2QtYTM4ZGZjNzMtMjBjZWFjYmItYzk4MWI1MTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.878801Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqdztbqe7sax5qf3jb52ebry, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQ2NmIyZjktYzFkN2RhNjYtYWIzZjAxZWEtNzk3NmJjMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.956734Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqdztbsw7a3eyme655b23pf3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRlMzgyMGYtZTRiNDBjOGMtZmIzZjYxZmYtZDQ2Y2JmNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.037017Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqdztbw93wb22an874tt36p4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJhZTAzODAtYjU2NzMwODMtYmI5ZTk4OTAtNjY2ZDcxZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.118969Z n ... 564511Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jqdztfb4et8myjs74tw8cha7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDdmMjBlMTItNDMxNWU4ODUtZDFhZDZmMzQtNTllOTA2Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.671103Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jqdztfd1bhx28j163qb8mtem, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzRiMWQwNGQtZmQ2MzUxMjgtMjA0OTljOWYtYmI0ZGE5Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.734306Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jqdztfgc9ah0wnp47cspx233, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ2NWJhZjktNzY2ZjUxNGUtYjFhM2RlMDUtMTIwNjQyMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.803213Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqdztfjaaw3jp7145amfkcha, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFlYWRhMzQtODc4OWE0NzItYTI2NDc2ZDUtMTBjZDc3ODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.880675Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jqdztfmgd5y4f5b42zy04eck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmJmMTdiNGUtM2M5Y2ZmMTgtYTM4MjMxY2YtNTE5MzdiOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.960726Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jqdztfpy9tn7vev0ar7fm3fq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjJlY2UxYmYtNjc4MTY4MTItZTEwZDk3M2ItYjQzMzcxOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.033856Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jqdztfsdd1t4ktyxq0j7w5t8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFlMmIxMWEtNmUwOGI1YjUtZTEyMzY1YzAtMTczMDI5NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.108265Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqdztfvq29e0462v6fkzcs43, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmQ5ZTAwNzQtMTFmMDViYzktNWZkZjZkZWItNGRlNWVlOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.163699Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jqdztfy094cp4scd7bg85488, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmZiOWUzOWQtNjcwMjdhYmEtZTgzMWE1ZjgtOGM1MGRlNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.224277Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jqdztfzq3w30ecc8qyj1dyh8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzAwYzA0ZWMtOTVkMDIxMmQtODVjNWNkZWEtOTFmNDA2ZjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.299282Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jqdztg1p16v46qq4svffc7da, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRiZTEzNy1kMzk4NTRkMy0zNTNlMmFlZC01NGZlYTlkNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.360118Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jqdztg3zfr2z7zksvwv76hsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWVmOGJmNmQtZjUyYjRlYTAtNWUxODJiNzUtMjM5YzEyZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.431132Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jqdztg5wf9v7dpxnvqsn2yj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWI2ZGNkNGUtYTBkNTY3MGItNzk2NTY0YmYtNWUyZTBiMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.516069Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jqdztg84f5pejwjpkqr71b1k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2YxM2U1NGItNmNhZTM0MzgtMjY3ZjNjYjUtYjk5NGRiNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.599428Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jqdztgasc6pp6bkjwmhvmztt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2NhM2JjNGMtNTcwMDIzOTctYTY5N2QyOGItMjhjMjNmYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.682710Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jqdztgdd2mdbajj97xy7181f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjFmZDEwZTMtN2ExZWUzNy02NThkNmU2Yi1jYmIwYzlmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.802539Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jqdztgfz251epe2azak0zxw7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWQ4N2MzYi1lMDg0OWQyLTE4NTNkMWI0LWVmZTNmZjll, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.892604Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jqdztgkt2jdf53bx7pvn3k65, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmE2YWM4NjYtZmM4NjRlMzUtYjM4MDNlMWYtMzA5YTU2YjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:42.981746Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jqdztgpj75wqy4p8w7z0nhps, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTJiNDY4MDItMjVlZDg5MDMtZTllOTNhNTItYmVhZjE3MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.081044Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jqdztgsb39d6xwnb7d4s0fxn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODc2NWI1MTctMmVjY2Y1ZC03MjllZjJjNS1kZjE1MDRhMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.157028Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqdztgwh2cv7w96htqe7hcy2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI0YzBmMzktNDg1YzIyMzMtNzY1NTNhNGQtMzVhMDFmMzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.223636Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jqdztgysa46h7hy0tzx9vvwm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTlhNjFiMGUtMjg2YzkzNy0zMzFlYjU3NC1mMDM0NjM0OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.294324Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jqdzth0w6vna70gjbj1estpe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjc2NmNlOTAtOWY1ZTgwY2ItNjgwN2VmZjAtM2FiMGMxOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.361972Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jqdzth33an1zedgkrgbzs2q8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRkYWE2NWMtYTA0OGZkNTktZjVjOTQ0N2MtNmQyMjRiNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.438620Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqdzth56f5xxa9jv3x50rjbt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzZkNWQzNmUtZWM0M2Y2ZjItYjk4NWNmODMtN2QyZmY1MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.534232Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jqdzth7pdrqmhzjkm0gs4qs4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTcyYTUwZTUtZjAzNGM4MWItOTY5MjY5Mi0xODg2MjM0OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.693902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jqdzthakecwsmey2n5zt6r0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA2YTk0MjItMTYwM2Y2NTMtNjQ3YTZkMTYtNjc2ZmFmZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.817803Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jqdzthfqdvbdhmqrts7wpstb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjU4NjFmZDAtZGVjMWZmNTMtZDNkMmY3MDgtNmNmNGE4YmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:43.968095Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jqdzthkj29v21hj34a77z256, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjRmNzQyMTktZWRjOTRjNmItYTY2NmU4MjMtN2I1NmQ0OWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.133854Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqdzthr64019wjvvfdebxvef, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODEyNTQ3NTctMjc1ZDBhNGEtZjc4NDA1MTUtYjFiMDYzM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.229731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jqdzthxm9hbxmy1qdhymr5j8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmMzZmRiMjgtYWY2N2QxNjctZmI2M2JiZTktZmY0ZGM5NDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.294022Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jqdztj0a5c9k3xb75d6zseb3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzU4ODFmMGYtZTk0YjhiMGUtZGY4OGQ2YTMtY2JiNmFlYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.362085Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jqdztj2a6xy3gqv9hvvwtcvx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWVjOWVhNjktZDI0N2I1MzktZjcxYmIzNjMtZjdmODgwOWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.429797Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jqdztj4f5ntq9yxze59vxt9z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGRlMmI2NC04MTA3MzQ1Ny01MDU1OWNmZS02MzEzZTFhNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.494868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jqdztj6j3n6syswv10616y2j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmFkZjQ4ZTAtNWY4ZTZlZjctMzZiMGE5ODQtNzBlODM0MDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.919228Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jqdztje30zjfm0tfj7a18pta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjBhNDE1NjktY2ZmMzA4YmQtMWQ1Y2M3YmQtZWUzNGFmOWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest |75.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |75.7%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-03-28T09:02:46.211478Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:46.215882Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:46.216332Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-28T09:02:46.216405Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:46.216456Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-28T09:02:46.216512Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:46.216583Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.216676Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-28T09:02:46.217456Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:261:2253], now have 1 active actors on pipe 2025-03-28T09:02:46.217617Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:46.253618Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.258882Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.259056Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.260104Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.260310Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:46.260816Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:46.261215Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-03-28T09:02:46.264002Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-28T09:02:46.264093Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-03-28T09:02:46.264154Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:46.264224Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:46.265236Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-03-28T09:02:46.357328Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:46.363566Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:46.363949Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-03-28T09:02:46.364010Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:46.364062Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-03-28T09:02:46.364106Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:46.364168Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.364238Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-03-28T09:02:46.364982Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:405:2360], now have 1 active actors on pipe 2025-03-28T09:02:46.365149Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:46.365385Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.368544Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.368682Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.369550Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.369684Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:46.370060Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:46.370315Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:413:2366] 2025-03-28T09:02:46.372952Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-03-28T09:02:46.373036Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:413:2366] 2025-03-28T09:02:46.373114Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:46.373166Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:46.374016Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:416:2368], now have 1 active actors on pipe 2025-03-28T09:02:46.406901Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:46.411800Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:46.412172Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-03-28T09:02:46.412225Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:46.412267Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-28T09:02:46.412309Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:46.412366Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.412452Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-28T09:02:46.413259Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:465:2405], now have 1 active actors on pipe 2025-03-28T09:02:46.413319Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:46.413510Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:46.416850Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:46.417030Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.417927Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:46.418066Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:46.418532Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:46.418815Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:473:2411] 2025-03-28T09:02:46.420977Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-28T09:02:46.421061Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:473:2411] 2025-03-28T09:02:46.421129Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:46.421183Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Proc ... edTxs.size=0 2025-03-28T09:02:47.271632Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:47.271706Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:02:47.272501Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:524:2449], now have 1 active actors on pipe 2025-03-28T09:02:47.272628Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:47.272850Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:47.276084Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:47.276288Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:47.277404Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:47.277563Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:47.278040Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:47.278327Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:532:2455] 2025-03-28T09:02:47.280795Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:02:47.280888Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:532:2455] 2025-03-28T09:02:47.280958Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:47.281014Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:47.282046Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:535:2457], now have 1 active actors on pipe 2025-03-28T09:02:47.283553Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:541:2460], now have 1 active actors on pipe 2025-03-28T09:02:47.283865Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:02:47.284028Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:542:2461], now have 1 active actors on pipe 2025-03-28T09:02:47.284329Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:02:47.284535Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:543:2461], now have 1 active actors on pipe 2025-03-28T09:02:47.284669Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:02:47.295803Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:551:2468], now have 1 active actors on pipe 2025-03-28T09:02:47.324322Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:47.327853Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:47.328278Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:02:47.328338Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:47.328511Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:47.329510Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:47.329574Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:02:47.329711Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:47.330099Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:47.330391Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:608:2513] 2025-03-28T09:02:47.332811Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-28T09:02:47.334353Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-28T09:02:47.334710Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-28T09:02:47.335117Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-28T09:02:47.335461Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-28T09:02:47.335517Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:02:47.335565Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:02:47.335616Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:02:47.335685Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:608:2513] 2025-03-28T09:02:47.335755Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:47.335826Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:47.336927Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:542:2461] destroyed 2025-03-28T09:02:47.337044Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:541:2460] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> BackupRestoreS3::RestoreViewQueryText >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: 2025-03-28T09:02:36.861125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:36.861604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:36.861673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001714/r3tmp/tmpvm0JSr/pdisk_1.dat 2025-03-28T09:02:37.506285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:37.558071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:37.602487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:37.602619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:37.619140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:37.704578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:37.753842Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:02:37.755178Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:02:37.755698Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:02:37.755988Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:37.804959Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:02:37.805721Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:37.805840Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:37.807678Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:37.807752Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:37.807811Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:37.808194Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:37.808355Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:37.808463Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:02:37.819414Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:37.845687Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:37.845971Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:37.846117Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:02:37.846170Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:37.846224Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:37.846264Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:37.846508Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:37.846574Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:37.846925Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:37.847044Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:37.847519Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:37.847565Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:37.847644Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:02:37.847692Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:37.847728Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:37.847763Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:37.847817Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:37.847973Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:37.848012Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:37.848055Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:02:37.848136Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:02:37.848170Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:02:37.848293Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:37.848525Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:02:37.848607Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:37.848731Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:02:37.848793Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:02:37.848834Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:02:37.848869Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:02:37.848903Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:02:37.849129Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:02:37.849152Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:02:37.849193Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:02:37.849260Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:37.849308Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:02:37.849331Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:02:37.849356Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:02:37.849379Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:02:37.849395Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:02:37.850524Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:02:37.850582Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:37.863309Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:37.863392Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:02:37.863425Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:37.863462Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:02:37.863511Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:38.030221Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:38.030284Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:38.030344Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:02:38.030819Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:02:38.030878Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:02:38.030993Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:02:38.031040Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:02:38.031080Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:02:38.031128Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:02:38.042241Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:38.042338Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:38.043232Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:38.043278Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:38.043329Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:3 ... 47.297450Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715668, MessageQuota: 0 2025-03-28T09:02:47.411384Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-03-28T09:02:47.411467Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037889 2025-03-28T09:02:47.411811Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:958:2780], Recipient [2:958:2780]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:47.411883Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:47.412009Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:02:47.412052Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:47.412095Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037889 for ReadTableScan 2025-03-28T09:02:47.412129Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit ReadTableScan 2025-03-28T09:02:47.412179Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037889 error: , IsFatalError: 0 2025-03-28T09:02:47.412241Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-03-28T09:02:47.412281Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit ReadTableScan 2025-03-28T09:02:47.412311Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompleteOperation 2025-03-28T09:02:47.412344Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:02:47.412565Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is DelayComplete 2025-03-28T09:02:47.412607Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2025-03-28T09:02:47.412663Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:02:47.412717Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:02:47.412759Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037889 is Executed 2025-03-28T09:02:47.412788Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:02:47.412823Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037889 has finished 2025-03-28T09:02:47.412864Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:47.412894Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:02:47.412942Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-28T09:02:47.413002Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-28T09:02:47.427564Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:02:47.427647Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:02:47.427688Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:02:47.427767Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [2:1150:2943], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:02:47.427844Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:02:47.428101Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3500} 2025-03-28T09:02:47.428174Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:02:47.428216Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:02:47.428494Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1150:2943], Recipient [2:963:2782]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2025-03-28T09:02:47.428543Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-28T09:02:47.428692Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:963:2782], Recipient [2:963:2782]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:47.428725Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:47.428791Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:02:47.428830Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:47.428872Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-03-28T09:02:47.428906Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2025-03-28T09:02:47.428951Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [3500:281474976715668] at 72075186224037890 2025-03-28T09:02:47.428993Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-03-28T09:02:47.429027Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2025-03-28T09:02:47.429057Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2025-03-28T09:02:47.429087Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-03-28T09:02:47.429332Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Continue 2025-03-28T09:02:47.429366Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:47.429394Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-03-28T09:02:47.429424Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2025-03-28T09:02:47.429454Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-28T09:02:47.430422Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1170:2961], Recipient [2:963:2782]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:02:47.430468Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-28T09:02:47.430668Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-03-28T09:02:47.431893Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:02:47.441040Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-03-28T09:02:47.441124Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-03-28T09:02:47.542857Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-28T09:02:47.542932Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2025-03-28T09:02:47.543189Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:963:2782], Recipient [2:963:2782]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:47.543237Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:47.543331Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:02:47.543373Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:47.543416Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [3500:281474976715668] at 72075186224037890 for ReadTableScan 2025-03-28T09:02:47.543448Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit ReadTableScan 2025-03-28T09:02:47.543486Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [3500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2025-03-28T09:02:47.543532Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-03-28T09:02:47.543568Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2025-03-28T09:02:47.543601Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2025-03-28T09:02:47.543632Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-03-28T09:02:47.543829Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is DelayComplete 2025-03-28T09:02:47.543866Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2025-03-28T09:02:47.543917Z node 2 :TX_DATASHARD TRACE: Add [3500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2025-03-28T09:02:47.543947Z node 2 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715668] at 72075186224037890 on unit CompletedOperations 2025-03-28T09:02:47.543981Z node 2 :TX_DATASHARD TRACE: Execution status for [3500:281474976715668] at 72075186224037890 is Executed 2025-03-28T09:02:47.544008Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2025-03-28T09:02:47.544034Z node 2 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715668] at 72075186224037890 has finished 2025-03-28T09:02:47.544066Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:47.544096Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-28T09:02:47.544125Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-28T09:02:47.544154Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-28T09:02:47.555066Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:02:47.555140Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:02:47.555179Z node 2 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715668] at 72075186224037890 on unit CompleteOperation 2025-03-28T09:02:47.555239Z node 2 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1150:2943], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:02:47.555288Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> TColumnShardTestSchema::HotTiersTtl [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> TLocksFatTest::RangeSetNotBreak [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: 2025-03-28T09:02:33.341771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:33.342201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:33.342282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000c78/r3tmp/tmpXAwcNf/pdisk_1.dat 2025-03-28T09:02:34.052910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:34.102639Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:34.146968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:34.147115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:34.160379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:34.255896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:34.639053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.639234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.639330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:34.645815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:02:34.839741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:34.919039Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:35.404625Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdzt8mc1sp4bc4807s630nc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODk2MDQyZTctZmVhMTI5NzgtOGMxNTk3ZjEtYjQ5NDJhNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.509681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdzt9d8988ypc0889hd3b41, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWYzMWE0ZmQtYmQyZWQxZGMtYjZlNTNmYzAtNmY4ZmU3YTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.585638Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdzt9fv9rzzv1t7v22d0g14, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWMxN2Y2ZDktNjFiNDE5MGUtNTc5ZjQzOTMtMzBlMWY3MDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.684437Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdzt9j61xgkx81cpzvd50dd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2MyMjc5OWEtZGY3NmFmMWYtZmJjMjkwZGYtN2E0NTc2NGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.802702Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqdzt9n9ety9sawta1qhqs42, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFhM2E3MDgtYzMxYjE5OTMtZGYyYzIxOGEtOWVlODdlMTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:35.884696Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqdzt9rz5p7cprtc8djhzqpc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWIxNTYxZmItZTQwYjQ0NWYtZDVmMDNiNGEtMjgwZmU2NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.017638Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqdzt9w8fwycgm14ymjjhncx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYyYjAxMmEtZjk3MWYwMDQtYmJjY2UwNGEtOWQyOGRmNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.165457Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqdzt9zp4yyrk3dc693bp51q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTAxN2E0M2YtOGFiNDA4NjgtN2I0YzQ3MmQtZWQwZDk3Mzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.281087Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqdzta4e59wa7kc7c9mcqx9j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTQzNzRmODQtYzBjMGM1Mi00NmYwODJkOS1lNGUwMjdmYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.489685Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqdzta7yfpdrrkb3wswwge86, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTExYmU0YzctY2MyOGZhOS00YjkyZWY2Ny04NmMzZTQwMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.748571Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqdztaep38vtr8whw8rwzve0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjJjMzBkMTYtNTBmNWY0MWItNTQ3Njg0NzktOWRlN2ZiNTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:36.914654Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqdztapp1dtnnb5n99chmrxe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY1OWJkZmQtNDI4Y2ZjNjktNTVmMGM3ZWYtZWNkNGMyOTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.143801Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqdztawb9jtfk32f982k3gsk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE1NjM3YWQtMzU3N2MyZGEtMWI1MmY0ZWItODVjMWJmZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.335821Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqdztb356drznnvf1tk1nafz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMyYzM5OWYtMTk3ZDNmNGQtNDgwMTgxYzAtMjM3YmIwMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.480401Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqdztb8wezxz4a8y5szkh39y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY2NmU4MjctNWJhY2Y4ZDMtMmM5YzUyZWItOTY3ZGFiYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.602017Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqdztbdd1byhy9sewhn90rbs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2YwM2RkYTctZTEwNmNkYS1jODYwZDkxZC00YTkyNjg0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.718351Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqdztbhbfef5kgzgd6b3w4x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjE2NjY1NjEtZjc2Mjc5NWEtOGZmZDNkYWEtMzFjMjk0NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.846028Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqdztbmv0hv25wjcchmvc7ja, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTg5YWM5NjgtOTU3MWI4OGItNDc2YzYzMjEtOTg3MWE0NTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.938155Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqdztbrv7qm7dw3s65ttrw1d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI5MDVhNzktY2M3YTY2M2EtYTkwNzVlYzItM2NlZjdiZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.017674Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqdztbvq2t8y1nbxfsz75th6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRlM2FiNDYtMjhmNGZkYmEtOWJlZTYzYjYtYTdlNTdmZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.093859Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqdztby66r6q0c1ghrrg2sfy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRkZjY0ODYtNzA1OGY5MzgtZDkzMTNiYTktZWExMmNkNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.168422Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqdztc0k0hk5d4h80mmbbmg8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMwZTkwZWEtOTg4YTY5YTctNmYwY2I3ZmYtY2NiYjAzNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.260609Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqdztc2x0zcv1mfdxwrttkbn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTIwYTA2ZjEtOGZiNWU2ZDEtMzM0ZjhhODMtN2IxZjY0MjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.385833Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqdztc5w7j929qgp1459whgq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjI5ZGVlY2UtMzQ1ZWQ3MGQtN2I0Mzg2NjItMjk4NTE0NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.460800Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqdztc9qf233f827g5xqpxbj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI1N2MxNzItM2Y2YTczZmUtMTU0ZGI5ZTAtNWNjMzliNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.546910Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jqdztcc17h9b3f3xjm8tcmj5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODlmOTIzZWYtYzBlNTZmMzktNzI0OTEyNzItOTQ1MjFi ... 9:02:44.616890Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715725. Ctx: { TraceId: 01jqdztja7dfvqyqxwv7j159m6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmE2MjQyZTYtNzMxYWUwYjItYWM3YTQ4NzQtMzVmYmZkMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.701937Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jqdztjcd6nn73cxg8fp1cqa4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTVjMmNkMGMtNmIzNDMxZGEtYTA3Y2MzODMtMzJkMmExOGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.786122Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jqdztjf38y1rhq47bsq72nbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JiNzFjNDYtMThhMjVhM2ItYzlmNDZkMDMtMWEwMDAyYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.855636Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqdztjhqbcfv198jvzcq0fm4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTc1MmI2NGQtNTQ2MjAxNGItY2Y4ZjViNDQtZDM4MTM3Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:44.930207Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jqdztjkxcdq37yyd1hr5en4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThhYTlhZS05NTJkYTg4My0xZmIxYmI2My05MTE5NzYwMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.019591Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jqdztjphf8thp1kxzk4qszr4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2IzZjQ0MmUtNWJjNmI3ODItNTFkNjc4MDQtMjExMWQ2Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.097994Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jqdztjs1391w1f9ntpde0zjp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWYyOTlmODYtYTc4YzUwODEtYjQ2YzhmMDgtZmVlOTFhYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.200031Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqdztjvk492hk6ez2jv3rvz7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmI3MmIyMS05ZjY2NjZlNy04MTQ5Y2Y5Ni01NGIyMmQxMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.273049Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jqdztjyn4yf2520e3f0p4qhp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU0ZWY4MTYtMWQwOTE2NWEtYWVlNTRhMGUtN2FiM2RhNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.372078Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jqdztk0y6cky7770tzh666tb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTM3OWYxMGItNTBiOTdlOWYtMzQzNTlmNGUtYWJlMjc2YWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.473288Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jqdztk426dnrq7ckqrswvsv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVhMTkwYWMtNmFkYjFkNGMtZjU4NGJmZTQtYWRmZTc4ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.559902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jqdztk769fttdjtgtytdj6hg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE0YzU2Y2EtMWNiNTZkZWUtZjM4ZDU3M2EtNDY3ZWNmYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.639865Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jqdztk9x4j5fbe6bj01ht8za, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU4ZGNlNjktMTg1NzE0NTYtNDA0YTJhODYtZjg3Y2QwNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.727943Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jqdztkcddbya01jkzf1a9s3z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGU1YWMyNzItY2E3MzI3YWEtN2E5OGIzMy02Y2U2ZDdhZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.808186Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jqdztkf64bgy0fp3nanc5bmk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmQwODMxYzItOWY1YjM1YzEtOTBiNDg3OWQtYWFjM2VkNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.888084Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jqdztkhna7c2nmg93t2bwafe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDc0ODljNTEtNzI3YTczYjctMzdhMDg4OWUtNThiNWFkZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:45.992162Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jqdztkm5dt5rwkr7tqtmp1rm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM0MGE0MTUtODMwMWI2ZmYtYjUwMTcwMjYtMTBjZWNhMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.062280Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jqdztkqd7e15vc7bfp14pz6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA1NGNmMDQtYjlmZWFiZGQtMThmYTMyMDctNDgzMWQxMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.133927Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jqdztksk0yx2qp0zw47xzfj9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjU0ZmRjZjAtNDljNGVmMGMtMTVjMGUxNzEtZGZiZGY1OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.205955Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jqdztkvv8n1yp37f520ftfhc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2MyMmVhZDctMWYyZTM5NTMtYzZmMTMwOTItYzlkYWJhNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.277271Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqdztky2f6zeqahahqanvsmb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU2NjA2NGUtMTQ2ZjA2NDgtYjMyMjg4Mi1lN2RhMmM5Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.348804Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jqdztm0a80e0sb1s18f6e3x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGY1NDQzZWMtYjZjOGEyNGMtNTRhZDI5Ni00NTNlODMyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.420860Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jqdztm2h1z9bjk5rf8sz2zfh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjBhZWEzNjctMTYwOThkMjktMTUzMDJmM2ItZmI5YTAyYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.497050Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jqdztm4s2cs4v3gbd9z01xxf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTVlYzRkZjQtN2Y5ZjNlLTJlODUyNTIzLTNhYzdiNGE2, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.573490Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqdztm760mknmnjtefre1tcf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWYxNGMwOWUtZDJhYzYwNWQtNzE0ZjVkZTYtOGQwMDcxNDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.676923Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jqdztm9jarrvjhyq0678m0ek, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2EyZGU2NGQtYzg2ODNiZGUtYmRiNjI5Ny0xMDA1OWZjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.753315Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jqdztmct3391abm12aa2e2ev, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDY1Y2E4LTU5YWJlZmQyLTVhY2U3YWEzLTVkNmQyMGU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.829373Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jqdztmf6d5x52382rmw74cy4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWFmMDFmN2MtM2Y2MWFmYWYtYmFiZjNiZC1lNDc1NTFjMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.904232Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jqdztmhj71k6nrpex9fzgh6y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWY4MzZkZTYtM2I5OWY5OTEtZGQyOGEzNTAtYzFiYjBlYWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.980266Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqdztmkxbqmvfxx8rffsdw0f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTY4MTcwNGItOTBmMWQ5NTItYWY3ZjQwNTItZmVmYmE5ZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.055878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jqdztmp9b56f381321dj12rr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzdlNjllZTItZGIxZTM3M2MtMTliZWFjY2ItMjNmMWM0ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.135100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jqdztmrnbydwtfrrwf7xr3r4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjdmM2U4ZmUtOGRkNzMwLTFjODMzZTE5LWY5MGFmZjk5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.213521Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jqdztmv49c7r1skdyja34417, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjAyYzU2ODktMWFkOGVkOGItOGZmNmEzZmItYjcxZDA4MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.294919Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jqdztmxnbjbq044ck6w5g81p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjc1Y2U3ZTUtNGVmNjNlNDgtNjhhNGEzN2EtYWM4YTE0Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.398193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jqdztn047v6azj6zvx0cx6re, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWQ5YTE5ZjYtZGQyMjdhNjAtNGJkYTAyNjEtY2Q1YWEyNWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.578444Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jqdztn3r2p0xz1er2pkh718n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzFjNjEzMDEtM2FmNTg1ZmQtODg4NDc3MTYtNDRmYzlmNDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2025-03-28T09:02:46.469139Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:46.480613Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:46.480989Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-28T09:02:46.481061Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:46.481119Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-28T09:02:46.481184Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:46.481254Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.481331Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-28T09:02:46.482104Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:262:2254], now have 1 active actors on pipe 2025-03-28T09:02:46.482240Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:46.520629Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.527763Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.527951Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.528913Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.529083Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:46.529526Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:46.529934Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [1:270:2260] 2025-03-28T09:02:46.536729Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-28T09:02:46.536824Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:270:2260] 2025-03-28T09:02:46.536886Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:46.536936Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:46.537877Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [1:273:2262], now have 1 active actors on pipe 2025-03-28T09:02:46.655799Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:46.659550Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:46.659911Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-03-28T09:02:46.659967Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:46.660004Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-03-28T09:02:46.660046Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:46.660090Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.660144Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-03-28T09:02:46.660886Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:408:2363], now have 1 active actors on pipe 2025-03-28T09:02:46.660955Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:46.661149Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.668737Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.668872Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.669822Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:46.669984Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:46.670403Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:46.670621Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [1:416:2369] 2025-03-28T09:02:46.673104Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-03-28T09:02:46.673181Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:416:2369] 2025-03-28T09:02:46.673236Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:46.673294Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:46.674178Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [1:419:2371], now have 1 active actors on pipe 2025-03-28T09:02:46.695027Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:46.713865Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:46.714276Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-03-28T09:02:46.714327Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:46.714367Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-28T09:02:46.714419Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:46.714465Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.714523Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-28T09:02:46.715265Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:468:2408], now have 1 active actors on pipe 2025-03-28T09:02:46.715370Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:46.715547Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:46.722671Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:46.722838Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:46.723684Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [1:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:46.723828Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:46.724204Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:46.724437Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [1:476:2414] 2025-03-28T09:02:46.726323Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-28T09:02:46.726385Z node 1 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [1:476:2414] 2025-03-28T09:02:46.726433Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:46.726535Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:46.727437Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [1:479:2416], ... SQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:49.263223Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 11(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:49.265505Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:49.265630Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.266261Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:49.266392Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:49.266715Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:49.266912Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:475:2413] 2025-03-28T09:02:49.268917Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-28T09:02:49.268985Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:475:2413] 2025-03-28T09:02:49.269040Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:49.269091Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:49.269794Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:478:2415], now have 1 active actors on pipe 2025-03-28T09:02:49.287847Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:49.291636Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:49.291942Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:02:49.291993Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:49.292035Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-28T09:02:49.292096Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:49.292147Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.292204Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:02:49.292829Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:527:2452], now have 1 active actors on pipe 2025-03-28T09:02:49.292937Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:49.293122Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:49.295607Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:49.295728Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.296278Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:49.296389Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:49.296698Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:49.296879Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:535:2458] 2025-03-28T09:02:49.299006Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:02:49.299077Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:535:2458] 2025-03-28T09:02:49.299136Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:49.299185Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:49.299906Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:538:2460], now have 1 active actors on pipe 2025-03-28T09:02:49.301490Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:547:2463], now have 1 active actors on pipe 2025-03-28T09:02:49.301946Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:549:2464], now have 1 active actors on pipe 2025-03-28T09:02:49.302057Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:550:2464], now have 1 active actors on pipe 2025-03-28T09:02:49.302240Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:551:2464], now have 1 active actors on pipe 2025-03-28T09:02:49.302872Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:564:2475], now have 1 active actors on pipe 2025-03-28T09:02:49.325949Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:49.328280Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:49.328584Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:02:49.328631Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:49.328785Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:49.329248Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.329295Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:02:49.329396Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:49.329714Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:49.329899Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:621:2520] 2025-03-28T09:02:49.332101Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-28T09:02:49.333285Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-28T09:02:49.333574Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-28T09:02:49.333860Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-28T09:02:49.334069Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-28T09:02:49.334111Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:02:49.334172Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:02:49.334209Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:02:49.334266Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:621:2520] 2025-03-28T09:02:49.334322Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:49.334373Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:49.339499Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:547:2463] destroyed 2025-03-28T09:02:49.339586Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server disconnected, pipe [3:549:2464] destroyed 2025-03-28T09:02:49.339662Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:550:2464] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } } } |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |75.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> BackupRestore::RestoreTablePartitioningSettings >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: 2025-03-28T09:02:38.064804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:38.065379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:38.065443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00170b/r3tmp/tmpDXvTkc/pdisk_1.dat 2025-03-28T09:02:38.681159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:38.739815Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:38.804324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:38.804456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:38.820237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:38.918166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:39.030575Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:02:39.035681Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:02:39.036217Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:02:39.036493Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:39.164482Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:02:39.165228Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:39.165336Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:39.177265Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:39.177362Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:39.177422Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:39.177815Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:39.177996Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:39.178101Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:02:39.189861Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:39.249824Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:39.330639Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:39.330820Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:02:39.330885Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:39.330937Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:39.330979Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:39.331199Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:39.331246Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:39.331543Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:39.331666Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:39.332144Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:39.332189Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:39.332244Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:02:39.332282Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:39.332315Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:39.332346Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:39.332407Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:39.332533Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:39.332564Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:39.332615Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:02:39.332689Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:02:39.332722Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:02:39.332822Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:39.333013Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:02:39.333082Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:39.333183Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:02:39.333236Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:02:39.333310Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:02:39.333349Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:02:39.333379Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:02:39.333641Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:02:39.333694Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:02:39.333740Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:02:39.333772Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:39.333868Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:02:39.333905Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:02:39.333939Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:02:39.333973Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:02:39.334005Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:02:39.342557Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:02:39.345961Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:39.356960Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:39.357029Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:02:39.357062Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:02:39.357101Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:02:39.357163Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:39.528410Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:39.528474Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:39.528523Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:02:39.528944Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:02:39.528984Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:02:39.529098Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:02:39.529162Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:02:39.529206Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:02:39.529239Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:02:39.554024Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:39.554121Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:39.555201Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:39.555250Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:39.555307Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:3 ... t 72075186224037890 to execution unit CompleteOperation 2025-03-28T09:02:43.348147Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-03-28T09:02:43.348311Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is DelayComplete 2025-03-28T09:02:43.348343Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompleteOperation 2025-03-28T09:02:43.348368Z node 1 :TX_DATASHARD TRACE: Add [3000:281474976715667] at 72075186224037890 to execution unit CompletedOperations 2025-03-28T09:02:43.348391Z node 1 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715667] at 72075186224037890 on unit CompletedOperations 2025-03-28T09:02:43.348422Z node 1 :TX_DATASHARD TRACE: Execution status for [3000:281474976715667] at 72075186224037890 is Executed 2025-03-28T09:02:43.348445Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715667] at 72075186224037890 executing on unit CompletedOperations 2025-03-28T09:02:43.348469Z node 1 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715667] at 72075186224037890 has finished 2025-03-28T09:02:43.348498Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:43.348520Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-28T09:02:43.348546Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-28T09:02:43.348582Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-28T09:02:43.359376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:02:43.359439Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:02:43.359471Z node 1 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715667] at 72075186224037890 on unit CompleteOperation 2025-03-28T09:02:43.359522Z node 1 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715667] from 72075186224037890 at tablet 72075186224037890 send result to client [1:1116:2910], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:02:43.359568Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:02:47.588574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:47.588894Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:47.588949Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00170b/r3tmp/tmpL6agGz/pdisk_1.dat 2025-03-28T09:02:48.168039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:48.201196Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:48.399935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:48.400066Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:48.416177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:48.506273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:48.527507Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-03-28T09:02:48.527726Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:48.585487Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:48.585641Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:48.589267Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:48.589359Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:48.589417Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:48.589765Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:48.589919Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:48.590017Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-03-28T09:02:48.603228Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:48.603308Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:48.603420Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:48.603505Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-03-28T09:02:48.603542Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:48.603581Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:48.603619Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:48.603938Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:48.604052Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:48.604148Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:48.604190Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:48.604236Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:48.604285Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:48.604350Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:673:2574], sessionId# [0:0:0] 2025-03-28T09:02:48.604488Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:48.604724Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:48.604826Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:02:48.606776Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:48.619250Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:48.619366Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:48.783821Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:704:2594], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-28T09:02:48.784952Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:48.785013Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:48.785564Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:48.785619Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:02:48.785662Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:02:48.785882Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:02:48.786009Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:48.786257Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:48.786313Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:02:48.789795Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:02:48.790219Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:48.792247Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:02:48.792296Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:48.793155Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:02:48.793228Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:48.794178Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:48.794228Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:48.794286Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:02:48.794348Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:02:48.794400Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:02:48.794482Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:48.807826Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:48.809658Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:02:48.809730Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:02:48.810572Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:02:48.820700Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-03-28T09:02:48.820867Z node 2 :TX_DATASHARD NOTICE: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-03-28T09:02:49.419916Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:49.423978Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:49.424358Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-28T09:02:49.424423Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:49.424468Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-28T09:02:49.424519Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:49.424582Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.424659Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-28T09:02:49.425436Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:261:2253], now have 1 active actors on pipe 2025-03-28T09:02:49.425556Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:49.443739Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:49.447076Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:49.447308Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.448232Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:49.448423Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:49.448847Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:49.449188Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:269:2259] 2025-03-28T09:02:49.451859Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-28T09:02:49.451927Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:269:2259] 2025-03-28T09:02:49.451997Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:49.452073Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:49.452959Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:272:2261], now have 1 active actors on pipe 2025-03-28T09:02:49.510424Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:49.515216Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:49.515714Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:02:49.515768Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:49.515810Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-28T09:02:49.515855Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:49.515912Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.515972Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:02:49.516840Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:403:2358], now have 1 active actors on pipe 2025-03-28T09:02:49.516946Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:49.517323Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:49.520059Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:49.520228Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.521376Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:49.521513Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:49.521878Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:49.522079Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:411:2364] 2025-03-28T09:02:49.524714Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:02:49.524806Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:411:2364] 2025-03-28T09:02:49.524865Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:49.524913Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:49.525774Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:414:2366], now have 1 active actors on pipe 2025-03-28T09:02:49.527052Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:420:2369], now have 1 active actors on pipe 2025-03-28T09:02:49.527453Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:422:2370], now have 1 active actors on pipe 2025-03-28T09:02:49.527946Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:420:2369] destroyed 2025-03-28T09:02:49.528273Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:422:2370] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |75.7%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143153032.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143153032.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143153032.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143153032.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143153032.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123153032.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143153032.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143153032.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123151832.000000s;Name=;Codec=}; 2025-03-28T09:00:33.304862Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:00:33.590216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:00:33.633851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:00:33.634204Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:00:33.642899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:00:33.643167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:00:33.643471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:00:33.643596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:00:33.643708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:00:33.643844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:00:33.643960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:00:33.644095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:00:33.644223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:00:33.644338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:00:33.644481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:00:33.644617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:00:33.686763Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:00:33.687240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:00:33.687416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:00:33.687631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:00:33.687842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:00:33.687943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:00:33.687997Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:00:33.688102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:00:33.688167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:00:33.688212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:00:33.688248Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:00:33.688539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:00:33.688643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:00:33.688694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:00:33.688726Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:00:33.688814Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:00:33.688870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:00:33.688914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:00:33.688947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:00:33.689027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:00:33.689067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:00:33.689102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:00:33.689154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:00:33.689196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:00:33.689235Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:00:33.691853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=81; 2025-03-28T09:00:33.692007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-03-28T09:00:33.692097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-03-28T09:00:33.692196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-03-28T09:00:33.692454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:00:33.692530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:00:33.692576Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:00:33.692805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:00:33.692859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:00:33.692894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:00:33.693059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... ST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:02:49.520396Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-28T09:02:49.520455Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:02:49.520520Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:02:49.520576Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:02:49.520687Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:02:49.520928Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000008:max} readable: {1000000008:max} at tablet 9437184 2025-03-28T09:02:49.521049Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:02:49.521210Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:02:49.521275Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:02:49.521732Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:02:49.521831Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:02:49.522368Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:2013:4022];trace_detailed=; 2025-03-28T09:02:49.522884Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:02:49.523144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:02:49.523348Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:49.523493Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:49.523915Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:02:49.524044Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:49.524184Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:49.524231Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:2013:4022] finished for tablet 9437184 2025-03-28T09:02:49.524739Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:2012:4021];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743152569522288,"name":"_full_task","f":1743152569522288,"d_finished":0,"c":0,"l":1743152569524303,"d":2015},"events":[{"name":"bootstrap","f":1743152569522523,"d_finished":1005,"c":1,"l":1743152569523528,"d":1005},{"a":1743152569523888,"name":"ack","f":1743152569523888,"d_finished":0,"c":0,"l":1743152569524303,"d":415},{"a":1743152569523863,"name":"processing","f":1743152569523863,"d_finished":0,"c":0,"l":1743152569524303,"d":440},{"name":"ProduceResults","f":1743152569523259,"d_finished":533,"c":2,"l":1743152569524214,"d":533},{"a":1743152569524217,"name":"Finish","f":1743152569524217,"d_finished":0,"c":0,"l":1743152569524303,"d":86}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:49.524840Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:2012:4021];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:02:49.525317Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:2012:4021];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743152569522288,"name":"_full_task","f":1743152569522288,"d_finished":0,"c":0,"l":1743152569524898,"d":2610},"events":[{"name":"bootstrap","f":1743152569522523,"d_finished":1005,"c":1,"l":1743152569523528,"d":1005},{"a":1743152569523888,"name":"ack","f":1743152569523888,"d_finished":0,"c":0,"l":1743152569524898,"d":1010},{"a":1743152569523863,"name":"processing","f":1743152569523863,"d_finished":0,"c":0,"l":1743152569524898,"d":1035},{"name":"ProduceResults","f":1743152569523259,"d_finished":533,"c":2,"l":1743152569524214,"d":533},{"a":1743152569524217,"name":"Finish","f":1743152569524217,"d_finished":0,"c":0,"l":1743152569524898,"d":681}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:2013:4022]->[1:2012:4021] 2025-03-28T09:02:49.525424Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:02:49.521801Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:02:49.525476Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:02:49.525597Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2013:4022];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |75.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] Test command err: 2025-03-28T09:01:40.896415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:40.896759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:40.896821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00122f/r3tmp/tmpxMhvAf/pdisk_1.dat 2025-03-28T09:01:43.064372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:43.186113Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:43.285763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:43.286569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:43.303966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:43.670704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:43.850501Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:01:43.851759Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:01:43.852256Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:01:43.852531Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:44.014192Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:01:44.014906Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:44.015033Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:01:44.016654Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:01:44.016729Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:01:44.016785Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:01:44.017152Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:01:44.017282Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:01:44.017352Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:01:44.031823Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:01:44.086749Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:01:44.086918Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:01:44.087022Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:01:44.087052Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:01:44.087084Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:01:44.087129Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:01:44.087326Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:44.087371Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:44.087606Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:01:44.087679Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:01:44.088033Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:01:44.088066Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:01:44.088096Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:01:44.088127Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:01:44.088171Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:01:44.088198Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:01:44.088234Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:01:44.088345Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:44.088376Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:44.088419Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:01:44.088477Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:01:44.088506Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:01:44.088604Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:01:44.088802Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:01:44.088842Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:01:44.088924Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:01:44.088968Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:01:44.089015Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:01:44.089049Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:01:44.089077Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:01:44.089352Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:01:44.089387Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:01:44.089415Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:01:44.089442Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:01:44.089496Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:01:44.089537Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:01:44.089574Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:01:44.089602Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:01:44.089629Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:01:44.090993Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:01:44.091046Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:01:44.102222Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:01:44.102298Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:01:44.102336Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:01:44.102390Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:01:44.102463Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:01:44.305283Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:44.305349Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:44.305385Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:01:44.305828Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:01:44.305874Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:01:44.306001Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:01:44.306046Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:01:44.306096Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:01:44.306131Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:01:44.310466Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:01:44.310549Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:01:44.324749Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:44.324826Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:44.324896Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:01:4 ... d: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-28T09:02:49.492014Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:934:2775], 1001} after executionsCount# 1 2025-03-28T09:02:49.492057Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:934:2775], 1001} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:49.492099Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:934:2775], 1001} finished in read 2025-03-28T09:02:49.492142Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-28T09:02:49.492164Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-03-28T09:02:49.492189Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:02:49.492225Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:02:49.492267Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-28T09:02:49.492289Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:02:49.492312Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-03-28T09:02:49.492338Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-28T09:02:49.492415Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-28T09:02:49.492814Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:939:2780], Recipient [7:719:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:49.492839Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:49.492863Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:938:2779], serverId# [7:939:2780], sessionId# [0:0:0] 2025-03-28T09:02:49.492944Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:937:2778], Recipient [7:719:2597]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-28T09:02:49.493534Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:942:2783], Recipient [7:719:2597]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:49.493567Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:49.493598Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [7:941:2782], serverId# [7:942:2783], sessionId# [0:0:0] 2025-03-28T09:02:49.493733Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:940:2781], Recipient [7:719:2597]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-28T09:02:49.493838Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-03-28T09:02:49.493878Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037890 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:02:49.493917Z node 7 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-03-28T09:02:49.493956Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CheckRead 2025-03-28T09:02:49.494005Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-03-28T09:02:49.494027Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CheckRead 2025-03-28T09:02:49.494056Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:49.494081Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit BuildAndWaitDependencies 2025-03-28T09:02:49.494114Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037890 2025-03-28T09:02:49.494153Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-03-28T09:02:49.494169Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:49.494182Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit ExecuteRead 2025-03-28T09:02:49.494196Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit ExecuteRead 2025-03-28T09:02:49.494244Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-28T09:02:49.494359Z node 7 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[7:940:2781], 1002} after executionsCount# 1 2025-03-28T09:02:49.494397Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:940:2781], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:49.494457Z node 7 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[7:940:2781], 1002} finished in read 2025-03-28T09:02:49.494487Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-03-28T09:02:49.494507Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit ExecuteRead 2025-03-28T09:02:49.494528Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037890 to execution unit CompletedOperations 2025-03-28T09:02:49.494543Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037890 on unit CompletedOperations 2025-03-28T09:02:49.494568Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037890 is Executed 2025-03-28T09:02:49.494581Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037890 executing on unit CompletedOperations 2025-03-28T09:02:49.494602Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037890 has finished 2025-03-28T09:02:49.494622Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-03-28T09:02:49.494670Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-03-28T09:02:49.495228Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:945:2786], Recipient [7:714:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:49.495258Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:49.495289Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:944:2785], serverId# [7:945:2786], sessionId# [0:0:0] 2025-03-28T09:02:49.495377Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [7:943:2784], Recipient [7:714:2595]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-28T09:02:49.496020Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [7:948:2789], Recipient [7:714:2595]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:49.496057Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:49.496082Z node 7 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [7:947:2788], serverId# [7:948:2789], sessionId# [0:0:0] 2025-03-28T09:02:49.496237Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:946:2787], Recipient [7:714:2595]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-28T09:02:49.496329Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-03-28T09:02:49.496360Z node 7 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v1004/1000004 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:02:49.496385Z node 7 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v1004/18446744073709551615 2025-03-28T09:02:49.496426Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CheckRead 2025-03-28T09:02:49.496469Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-03-28T09:02:49.496485Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CheckRead 2025-03-28T09:02:49.496500Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:49.496516Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit BuildAndWaitDependencies 2025-03-28T09:02:49.496543Z node 7 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037891 2025-03-28T09:02:49.496562Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-03-28T09:02:49.496576Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:49.496588Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit ExecuteRead 2025-03-28T09:02:49.496602Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit ExecuteRead 2025-03-28T09:02:49.496657Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2025-03-28T09:02:49.496817Z node 7 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[7:946:2787], 1003} after executionsCount# 1 2025-03-28T09:02:49.496856Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:946:2787], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:49.496895Z node 7 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[7:946:2787], 1003} finished in read 2025-03-28T09:02:49.496923Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-03-28T09:02:49.496937Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit ExecuteRead 2025-03-28T09:02:49.496950Z node 7 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037891 to execution unit CompletedOperations 2025-03-28T09:02:49.496963Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037891 on unit CompletedOperations 2025-03-28T09:02:49.496988Z node 7 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037891 is Executed 2025-03-28T09:02:49.497000Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037891 executing on unit CompletedOperations 2025-03-28T09:02:49.497013Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037891 has finished 2025-03-28T09:02:49.497028Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-03-28T09:02:49.497084Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-03-28T09:02:49.201685Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:49.205343Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:49.205659Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-28T09:02:49.205727Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:49.205767Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-28T09:02:49.205816Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:49.205865Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.205937Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-28T09:02:49.206559Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:262:2254], now have 1 active actors on pipe 2025-03-28T09:02:49.206658Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:49.222067Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:49.224968Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:49.225096Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.225941Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:49.226083Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:49.226473Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:49.226802Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:270:2260] 2025-03-28T09:02:49.229018Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-28T09:02:49.229088Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2260] 2025-03-28T09:02:49.229136Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:49.229182Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:49.230103Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:273:2262], now have 1 active actors on pipe 2025-03-28T09:02:49.300545Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:49.307002Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:49.307345Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-03-28T09:02:49.307394Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:49.307450Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-03-28T09:02:49.307490Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:49.307539Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.307594Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-03-28T09:02:49.308265Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:407:2362], now have 1 active actors on pipe 2025-03-28T09:02:49.308376Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:49.308543Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:49.312175Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:49.312320Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.313132Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:02:49.313258Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:49.315956Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:49.316234Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [2:415:2368] 2025-03-28T09:02:49.321361Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-03-28T09:02:49.321461Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:415:2368] 2025-03-28T09:02:49.321530Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:49.321580Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:49.322427Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [2:418:2370], now have 1 active actors on pipe 2025-03-28T09:02:49.354335Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:49.360057Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:49.360389Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-03-28T09:02:49.360436Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:49.360499Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-28T09:02:49.360536Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:49.360579Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.360659Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-28T09:02:49.361426Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:467:2407], now have 1 active actors on pipe 2025-03-28T09:02:49.361483Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:49.361645Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:49.369050Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:49.369238Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:49.369992Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:49.370121Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:49.370560Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:49.370822Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:475:2413] 2025-03-28T09:02:49.372811Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-28T09:02:49.372879Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:475:2413] 2025-03-28T09:02:49.372929Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:49.372977Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Proc ... EBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:50.844021Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-28T09:02:50.844072Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:50.844122Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:50.844182Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-28T09:02:50.844903Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:462:2402], now have 1 active actors on pipe 2025-03-28T09:02:50.845011Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:50.845276Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 11(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:50.848183Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:50.848332Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:50.848947Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 11 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 11 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:02:50.849067Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:50.849423Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:50.849611Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:470:2408] 2025-03-28T09:02:50.851941Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-28T09:02:50.852019Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:470:2408] 2025-03-28T09:02:50.852082Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:50.852138Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:50.853158Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:473:2410], now have 1 active actors on pipe 2025-03-28T09:02:50.877618Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:50.880903Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:50.881311Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:02:50.881469Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:50.881589Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-28T09:02:50.881670Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:50.881718Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:50.881785Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:02:50.882631Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:522:2447], now have 1 active actors on pipe 2025-03-28T09:02:50.882782Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:02:50.882958Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 12(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:50.885231Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:50.885350Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:50.885955Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 12 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:02:50.886090Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:50.886442Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:50.886624Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:530:2453] 2025-03-28T09:02:50.888445Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:02:50.888503Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:530:2453] 2025-03-28T09:02:50.888554Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:50.888597Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:50.889247Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:533:2455], now have 1 active actors on pipe 2025-03-28T09:02:50.890247Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:540:2459], now have 1 active actors on pipe 2025-03-28T09:02:50.890330Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:539:2458], now have 1 active actors on pipe 2025-03-28T09:02:50.890615Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:541:2459], now have 1 active actors on pipe 2025-03-28T09:02:50.903780Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:549:2466], now have 1 active actors on pipe 2025-03-28T09:02:50.946849Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:50.950968Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:50.951318Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:02:50.951377Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:50.951562Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:50.952025Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:50.952069Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:02:50.952234Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:02:50.952575Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:02:50.952765Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:606:2511] 2025-03-28T09:02:50.954852Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-28T09:02:50.956082Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-28T09:02:50.956376Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-28T09:02:50.956668Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-28T09:02:50.956894Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-28T09:02:50.956949Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:02:50.956990Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:02:50.957031Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:02:50.957087Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:606:2511] 2025-03-28T09:02:50.957143Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:02:50.957188Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:02:50.957960Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:540:2459] destroyed 2025-03-28T09:02:50.958017Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:539:2458] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } } } >> ColumnBuildTest::AlreadyExists >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2025-03-28T09:02:25.643032Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783174471110870:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:25.646858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f2d/r3tmp/tmpRf8oeA/pdisk_1.dat 2025-03-28T09:02:26.103020Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:26.105487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:26.105579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:26.108223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11067 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:26.401059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:26.414011Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:26.431490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:26.572754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:26.645542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:30.651775Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783174471110870:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:30.653130Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:37.514934Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783225844125189:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:37.538170Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f2d/r3tmp/tmpoLwc8w/pdisk_1.dat 2025-03-28T09:02:37.915144Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:37.946404Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:37.950914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:37.960792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26255 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:38.393094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:38.403486Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:38.419463Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:38.424122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:38.597473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:38.741777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:02:42.491083Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783225844125189:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:42.491152Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: 2025-03-28T09:02:34.999091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:34.999592Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:34.999671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b56/r3tmp/tmpzw5AeB/pdisk_1.dat 2025-03-28T09:02:35.449502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:35.539095Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:35.596572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:35.596706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:35.612104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:35.700527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:36.162036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:742:2623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:36.162147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:36.162229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:36.169759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:02:36.363231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:756:2631], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:36.481189Z node 1 :TX_PROXY ERROR: Actor# [1:830:2674] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:37.169397Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdzta3z27tb03j46978sjet, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBiZDcwOWYtYmJmMzFkZGEtNGMyMDJkYjMtN2VkNjg1ZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.294319Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdztb4bbfg05t34pt7bweh4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDI3ZDQxNjEtZGMyNTMwZDctYTg3NGU2NC01NTg3MWMxMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.404215Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdztb85dvw9fkhggzb9xg0a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM0ZGQ2YjUtNGUzMDQ4ZmEtYmNiMWY3ZTQtNzhjNDExZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.537101Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdztbbe9621sxsef4yxq9x3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZhZGI0ZTMtY2IxMzQzMzQtZjFkZDc2Y2QtMTAyNzliMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.659624Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqdztbfgbf746dzn47yd7yhz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTFjM2UwMjctNzIyNTZmOTEtNzdjYThkZWItMmNkYTQ5NjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.766256Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqdztbkcdhqrdhzv1ee7d7ka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzkxYzcyYTUtNTI5MDEzZTQtYzEwMDRjZTEtNzY3YzJmMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.858960Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqdztbpnbhwes1g4d9meb7b1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MThlYWNkOTAtZmMwYmRmOTYtZTYyNjNiYzMtZDU3MDU5ODU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:37.945425Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqdztbsk52f58et55stzg78q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDZjMWIyNDMtZmM0YjhjZGEtMjIzNGQ2YWYtYmM0ZDk2NDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.058432Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqdztbwadtr13wf2vgycac67, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTcyM2RiZjgtZTMyOTkwNTYtY2NmMDc4NTQtZGRmY2RjNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.195731Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqdztc00amz7fx1mvd77k8p4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2FiNTI1MjEtNDg1NmVjYzAtNTA3YzI5ZTUtZmRhNzUzZDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.295996Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqdztc431nqxe8a6kyt720jd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRlMzhlMzYtZTdmMDkxNTgtNjZkMzBmN2MtODJkMmRmMTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.501091Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqdztc7a3k4y3nyr8gbjek9x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBkNDA4NmItMWJhYmVjNjYtNWYwYjk2OTYtYzYzZjU5ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.662391Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqdztcdp8d1sg3kwtd4h2pkd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDExN2U2NzgtM2E0OTNiZGYtMjFlNGFhMWEtOTI1NTcyMGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:38.843861Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqdztcjr32av8ef5w2n0brgy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjAzMmY1YzQtMjdkOWZkZDYtM2MxZjczZDYtYzFhMzRhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:39.009459Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqdztcrf48adkbbde82ems6b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBkOTZjMDMtOTZkM2I3OTYtN2I3MDFiNjUtY2Q4MTU1MmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:39.203897Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqdztcxq7bpb92ydnz7fnffy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDUwYzllMDMtZWVhYTBjMDItZjc3YTc0NzAtNTRiMWYxMGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:39.523489Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqdztd3m7t277bzfz2a1000v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWU5OWNmMS0yZDU5ZGM3MS1mODU3ZWJmZS1mMzVkODhmZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:39.650013Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqdztddm22bh3hwetzemqwnp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg0NWJkMjgtYWNmMDllMzMtNzhiYTVhZGUtZjgyODI3NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:39.793135Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqdztdhj9y073xtp9tq54gfh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVkYTY4NGQtZTc3ZmM4MmQtZTVjNDYzMzYtNTdmZjdjYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:40.037480Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqdztdp755fh497bn987revt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDhhMzVmZjktNDVmZDZhMjItYjMxYzUwYmQtNTdkMWM4MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:40.380022Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqdztdy25fzw4rxwh4gc6vh8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWViN2ZiYTctNjEzNjM2MTctOTAyNDU4YTYtYWRkMzQwMDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:40.648985Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqdzte8merx37gds4gq8c2pz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWVhZDhjNmMtNTZjOGJjOC02OWE3YjJkNy1hYzhiOGIzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:40.970375Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqdztegt5nmf1ws57zba7b9z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWRkNjJmYzMtODUzMjFjZmUtOGZkYzVlZjQtNGU3Y2IxNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.176652Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqdztevadbsn9095nf1q4rxx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWI4ZTIwMGUtMjljZWFiNy1jOGYwZWYxOC04YzQ3NWZlZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.261375Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqdztf1acd825tjx9w94jed8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjdmYjVmMjgtMjE4Y2QwYzAtZTc2MDgwZjMtOWZjOTRhNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:41.352724Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jqdztf3yekgnpj8d5f9z8kgp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTUyYTYxYmQtYTdlMWU5MTQtN2U2NjJjYTAtNzcxNDM0 ... 0MTRhZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.799951Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715726. Ctx: { TraceId: 01jqdztmdh28xanb66gekx1wd8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM0NDZiNjEtYjJlMzUwMy0yZTNhZjhjNC01NmM1NWM0Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.891585Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715727. Ctx: { TraceId: 01jqdztmh32f8200p26qqz162h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGY0YTQwODctYmYyNjkwOTMtMmYzMjhkOWItMTA5NDkzMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:46.988529Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqdztmkvffyk8afwccxqsdej, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTNkYjk1ODktNDRmYjQzY2YtZjY1OWE5Y2EtNzlmMjExODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.073072Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715729. Ctx: { TraceId: 01jqdztmpwann2xvb229s73f8x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQwYjUwZmQtNWI3ZDM2MS01ODczYmRjZS05MWNkMDBhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.164440Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715730. Ctx: { TraceId: 01jqdztmsmbzmk3vb81ds257cd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQyYzc5ODctYTg0MmM4ZWMtNGQyZmIwNGUtN2EyOTU0NGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.281035Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715731. Ctx: { TraceId: 01jqdztmwc2tzwzsyndcq31dh7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2UxMjliMDctYmE2MTI1NzItY2NiNTViZTMtZGVhMDIyYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.441312Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqdztn01cxwc95we915rzfcs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGI5MjBlOTUtNzVhNmRmZjAtZDQ1NzI3YjUtYTQwZGFjNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.578804Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715733. Ctx: { TraceId: 01jqdztn539acvw75y3b2cqskn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNlZjRmN2EtZmRjMWRiNi0xMGM3ZGNiZS1iZWZmYzE1Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.680053Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715734. Ctx: { TraceId: 01jqdztn9cec4dqsmr0ttbsj5b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDAyYmFlODgtZTJiYmU5ZDktMmNkMjU0MWItMTUxMzdjMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.795180Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715735. Ctx: { TraceId: 01jqdztnck1vhnz4zxv6r6qrr1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTI2MTBjYTUtYzhhZWE0NGUtOWZmNDJmMTctNDBiMjM0MjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.898680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715736. Ctx: { TraceId: 01jqdztng75fpahxc5yzz932yz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjczYjBkYzItYTMzMjRhOTgtOTFhZTZjNzUtZjAzZDNlN2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:47.997222Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715737. Ctx: { TraceId: 01jqdztnkbdgb7mzyeayst49hp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTMwMjliODItNzJkYWYzMDEtZTg0MWIyZmUtZGE3NDg0MWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:48.098570Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715738. Ctx: { TraceId: 01jqdztnpdfk3cmw4vxx9reevx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmEyMWM4MDItYjZiNzhiYTktNDc2NzQ2MDctNmIwZjQzODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:48.199103Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715739. Ctx: { TraceId: 01jqdztnskbb08vkffq9qv5tm9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRmYzE2OC1jYThjMTE2OC0zZDYwOTkyOS05NWVmNjA4ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:48.379869Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715740. Ctx: { TraceId: 01jqdztnwvda5mn7zzcxrz0fpe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQ1MGM3YTQtMmQ1MmFhYjMtZmUxZmRmMDAtZWE4NzFlMjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:48.586444Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715741. Ctx: { TraceId: 01jqdztp2h3yeyh1b1kxm9sgms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWExNzAzMzEtODU3YzFhNGItZTg1ZjAzZjAtZDhjYmEwMjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:48.684507Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715742. Ctx: { TraceId: 01jqdztp9436m0b3p3b6jra396, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2IxM2FmNjgtNzAzNmU0YjctOWEzZGJkNDUtZTc4NDk3OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:48.776052Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715743. Ctx: { TraceId: 01jqdztpc073nb0c7x2365kdmm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDNlYzAxOTYtNDVkMTVjYTItNzA0NmJiNzYtNDZhYjg1MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:48.871903Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715744. Ctx: { TraceId: 01jqdztpew8sxdap0bxawgcqjz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU4ZGJiMjUtOTMzMDczZTgtZDE0Y2E4ZWEtMTA4NTNjNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:48.967012Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715745. Ctx: { TraceId: 01jqdztphw6z9pep2dhw5n5f3t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQ5ZjczZmItY2RlZjY0ZWUtOTBjNTNjNmYtZDBlMDI3MGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:49.050134Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715746. Ctx: { TraceId: 01jqdztpmqavasxkxgz5svyavt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZiZmYzNTUtMmIzODA3NjktN2M0MTdmMGQtNmY2Y2Q4ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:49.131189Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715747. Ctx: { TraceId: 01jqdztpqae3gnff0x2rv5v999, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTRlYWNlOGItZjk2NmRkYS1hM2U1OGNhYi02NjZjOGU1Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:49.212517Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715748. Ctx: { TraceId: 01jqdztpswbkkaqbbymt5bhsj9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmMyNjgxNjAtODRjNWY3YmYtMjZlMTg3YTAtNjU0MzllNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:49.288914Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715749. Ctx: { TraceId: 01jqdztpwc8vkeb95hscjqjytm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDVlYjdmYTItMjhhOWFkMC00ZTVjZTAzLTJjMzFiMGIw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:49.415997Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715750. Ctx: { TraceId: 01jqdztpys4nykwq5y6seshete, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQzYzc5NDEtODZiZGZlOTYtZjY0Mjg3YTQtMzBjNDljNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:49.499434Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715751. Ctx: { TraceId: 01jqdztq2sf2e52zcpya3fbtba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTRiYzE4ZjItMWM1MDAwNWItYTJhZjdhOGQtMzI4Yzc5OWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:49.622596Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715752. Ctx: { TraceId: 01jqdztq5d1t22bvtrd2x8gvn2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQyNmFkZTItZmE1YmQ0MjMtZWY1NTM5MjktMzU5MmQwMzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:49.719902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715753. Ctx: { TraceId: 01jqdztq9a68thpq1kdzq73an5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWUwNTExOGUtZTkzNjU3NTUtMWM2ZWI1YTItZmYwOTk5MGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:49.822266Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqdztqc9e57vwzcn1ryvbfy5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTczZjNhMjAtNDViOTQ5NjItYjJhOWUxMTYtMWE4N2QwMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:49.942314Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715755. Ctx: { TraceId: 01jqdztqfn6cnhg65dv5gy95wy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc4YzZiNWMtNTI5NzZkNTktNWQ3Njc3NjQtNjEyZThjZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:50.027251Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715756. Ctx: { TraceId: 01jqdztqk97w61m8qc0rneh8me, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M1MjFlMmItNzFjZDExNTAtYTI1MzFlYTAtYTBiY2E4Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:50.121128Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715757. Ctx: { TraceId: 01jqdztqnvc96202jt17p8p3ve, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmY1MDhlYmYtMWVkMzExMjUtYTgyNWZmZDYtYjI2OTE1N2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:50.196947Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715758. Ctx: { TraceId: 01jqdztqrs29pzseqmg7sgbwgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDExMWQyNzEtNWEyZWQwM2UtZmVhNTlmZWMtNzI5OWM2MWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:50.310504Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715759. Ctx: { TraceId: 01jqdztqv48rrrk4h0hp0t3pdt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2RmMWJmYTUtZGU1ZTIyN2MtMmM3Y2YwYmQtYjVjZDBlMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:50.328828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480 2025-03-28T09:02:50.754059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715760. Ctx: { TraceId: 01jqdztr74fga67fsq8h0a6e32, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGQyYzljODUtNjllOTU4NzAtYmJlMDk0MWItZmQxOTY4OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TSchemeShardViewTest::EmptyQueryText >> TSchemeShardViewTest::CreateView >> TSchemeShardViewTest::AsyncDropSameView >> YdbSdkSessionsPool::StressTestSync10 >> YdbSdkSessionsPool::Get1Session >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationRetryError >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] >> YdbSdkSessionsPool::StressTestSync1 >> YdbSdkSessionsPool::WaitQueue10 >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] Test command err: 2025-03-28T09:01:42.235916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:42.236406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:42.236468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001204/r3tmp/tmpsfClCB/pdisk_1.dat 2025-03-28T09:01:44.306477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.369508Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:44.410673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:44.410863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:44.427873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:44.573802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.984166Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:01:45.005241Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:01:45.012598Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:01:45.017544Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:45.483162Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:01:45.491475Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:45.492728Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:01:45.528411Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:01:45.529235Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:01:45.530307Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:01:45.532932Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:01:45.533086Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:01:45.533186Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:01:45.548991Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:01:45.744066Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:01:45.745247Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:01:45.747824Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:01:45.748475Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:01:45.749462Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:01:45.750203Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:01:45.753073Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:45.753764Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:45.755419Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:01:45.756818Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:01:45.761906Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:01:45.762532Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:01:45.763429Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:01:45.763745Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:01:45.764104Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:01:45.764471Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:01:45.765154Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:01:45.767273Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:45.767313Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:45.768319Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:01:45.769990Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:01:45.770706Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:01:45.772449Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:01:45.774365Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:01:45.787685Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:01:45.788091Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:01:45.788723Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:01:45.789730Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:01:45.790059Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:01:45.805489Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:01:45.818813Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:01:45.818876Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:01:45.819222Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:01:45.822626Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:01:45.823119Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:01:45.823467Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:01:45.824284Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:01:45.824317Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:01:45.824668Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:01:45.855841Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:01:45.856646Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:01:45.873363Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:01:45.874034Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:01:45.874385Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:01:45.875430Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:01:45.876141Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:01:46.117652Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:46.118055Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:46.118474Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:01:46.126514Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:01:46.137594Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:01:46.139766Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:01:46.140211Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:01:46.147153Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:01:46.147561Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:01:46.265844Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:01:46.265939Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:01:46.266906Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:46.266955Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:46.267038Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:01:4 ... 0 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:02:50.401926Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:02:50.401955Z node 7 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715660 2025-03-28T09:02:50.402000Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 1526 txid# 281474976715660 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:02:50.402083Z node 7 :TX_DATASHARD DEBUG: Complete [1526 : 281474976715660] from 72075186224037888 at tablet 72075186224037888 send result to client [7:910:2664], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:02:50.402554Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:50.402859Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:02:50.403511Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:50.404586Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:50.405061Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-28T09:02:50.405473Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:699:2587], Recipient [7:696:2585]: {TEvReadSet step# 1526 txid# 281474976715660 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-03-28T09:02:50.405517Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:50.405583Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715660 2025-03-28T09:02:50.405971Z node 7 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-28T09:02:50.406053Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [7:696:2585], Recipient [7:699:2587]: {TEvReadSet step# 1526 txid# 281474976715660 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-03-28T09:02:50.406083Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:02:50.406114Z node 7 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715660 ... validating table 2025-03-28T09:02:51.919639Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdztr1qdkdrs6g3nmxt6m7e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=N2RhMTY4YzItYTg1ZTI3YzMtNGY2ZDg5ZS00MTljNjUwNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:52.003475Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:965:2783], Recipient [7:696:2585]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1526 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-28T09:02:52.003678Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:02:52.003772Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-03-28T09:02:52.003893Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:02:52.003951Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:02:52.004002Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:52.004044Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:02:52.004092Z node 7 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-03-28T09:02:52.004139Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:02:52.004163Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:52.004186Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:02:52.004209Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:02:52.004351Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1526 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-28T09:02:52.004646Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1526/18446744073709551615 2025-03-28T09:02:52.004719Z node 7 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[7:965:2783], 0} after executionsCount# 1 2025-03-28T09:02:52.004782Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:965:2783], 0} sends rowCount# 1, bytes# 64, quota rows left# 1000, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:52.004875Z node 7 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[7:965:2783], 0} finished in read 2025-03-28T09:02:52.004953Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:02:52.004982Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:02:52.005009Z node 7 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:02:52.005036Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:02:52.005080Z node 7 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:02:52.005102Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:02:52.005130Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-03-28T09:02:52.005178Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:02:52.005299Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:02:52.006473Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:965:2783], Recipient [7:696:2585]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:02:52.006537Z node 7 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-28T09:02:52.006962Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [7:965:2783], Recipient [7:699:2587]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1526 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-03-28T09:02:52.007088Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-28T09:02:52.007157Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CheckRead 2025-03-28T09:02:52.007224Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-28T09:02:52.007251Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CheckRead 2025-03-28T09:02:52.007292Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:52.007324Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:02:52.007378Z node 7 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037889 2025-03-28T09:02:52.007416Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-28T09:02:52.007440Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:52.007461Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit ExecuteRead 2025-03-28T09:02:52.007484Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit ExecuteRead 2025-03-28T09:02:52.007597Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 2 Columns: 3 Columns: 4 Columns: 1 Snapshot { Step: 1526 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-03-28T09:02:52.007806Z node 7 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v1526/18446744073709551615 2025-03-28T09:02:52.007843Z node 7 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[7:965:2783], 1} after executionsCount# 1 2025-03-28T09:02:52.007877Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:965:2783], 1} sends rowCount# 1, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:52.007982Z node 7 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[7:965:2783], 1} finished in read 2025-03-28T09:02:52.008031Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-28T09:02:52.008059Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit ExecuteRead 2025-03-28T09:02:52.008099Z node 7 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:02:52.008122Z node 7 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:02:52.008160Z node 7 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037889 is Executed 2025-03-28T09:02:52.008180Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:02:52.008201Z node 7 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037889 has finished 2025-03-28T09:02:52.008228Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-28T09:02:52.008300Z node 7 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-28T09:02:52.009101Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [7:965:2783], Recipient [7:699:2587]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-03-28T09:02:52.009150Z node 7 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 2 } items { int32_value: 3 } items { int32_value: 4 } }, { items { int32_value: 11 } items { int32_value: 12 } items { int32_value: 12 } items { int32_value: 12 } } >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> YdbSdkSessionsPool::RunSmallPlan |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] >> TSchemeShardViewTest::CreateView [GOOD] |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:02:53.946418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:53.946545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:53.946586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:53.946620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:53.946677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:53.946793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:53.946859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:53.946936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:53.947242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:54.033832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:54.033891Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:54.048802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:54.049064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:54.049220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:54.054938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:54.055154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:54.055820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.056031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:54.058032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.059460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:54.059526Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.059697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:54.059742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:54.059783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:54.059862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.066481Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:02:54.204854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:54.205094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.205291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:54.205527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:54.205593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.211434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.211596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:54.211837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.211901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:54.211935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:54.211972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:54.220266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.220353Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:54.220390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:54.227616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.227688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.227746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:54.227815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.231966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:54.235674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:54.235936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:54.237189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.237348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:54.237407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:54.237656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:54.237702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:54.237850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:54.237929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:54.249126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:54.249187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:54.249385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.249447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:54.249800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.249859Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:54.249963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:54.249999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.250031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:54.250059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.250092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:54.250145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.250179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:54.250202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:54.250282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:54.250314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:54.250358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:54.252173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:54.252286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:54.252325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:02:54.252359Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:02:54.252400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:54.252525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:02:54.266919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:02:54.267474Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:02:54.268080Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:02:54.289545Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:02:54.292456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:54.292710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2025-03-28T09:02:54.292806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2025-03-28T09:02:54.292949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:02:54.293050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:02:54.293119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:54.293971Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:02:54.307496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-03-28T09:02:54.307681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-03-28T09:02:54.308177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.308246Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2025-03-28T09:02:54.308308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-28T09:02:54.308435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:54.308933Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:02:54.310549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-28T09:02:54.310671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-28T09:02:54.311030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.311170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:54.311221Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-03-28T09:02:54.311393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:02:54.311571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:54.311634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:02:54.315802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:54.315884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:54.316070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:02:54.316172Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.316213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:02:54.316255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:02:54.316579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.316637Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:02:54.316736Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:02:54.316768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:02:54.316820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:02:54.316858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:02:54.316891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:02:54.316938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:02:54.317000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:02:54.317032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:02:54.317097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:02:54.317135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:02:54.317166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-28T09:02:54.317193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:02:54.317908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:02:54.318025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:02:54.318072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:02:54.318107Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-28T09:02:54.318162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:54.319253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:02:54.319373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:02:54.319405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:02:54.319442Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:02:54.319473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:02:54.319536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:02:54.325981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:02:54.326108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:02:54.362322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:54.362429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:54.362471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:54.362505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:54.362573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:54.362606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:54.362670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:54.362761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:54.363103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:54.445727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:54.445787Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:54.467314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:54.467656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:54.467833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:54.474317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:54.474560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:54.475281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.475528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:54.477745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.479155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:54.479220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.479396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:54.479447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:54.479490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:54.479606Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.486828Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:02:54.631354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:54.631596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.631808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:54.632058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:54.632116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.635451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.635674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:54.635897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.635964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:54.636002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:54.636033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:54.638340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.638391Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:54.638419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:54.640511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.640564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.640632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:54.640697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.649978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:54.653722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:54.653910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:54.655069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.655229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:54.655285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:54.655596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:54.655656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:54.655817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:54.655886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:54.658443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:54.658486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:54.658673Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.658748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:54.659062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.659105Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:54.659185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:54.659216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.659243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:54.659266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.659295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:54.659326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.659351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:54.659382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:54.659445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:54.659470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:54.659506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:54.661046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:54.661148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:54.661179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0 2025-03-28T09:02:54.688961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 100:0, viewDescription: Name: "MyView" QueryText: "Some query" 2025-03-28T09:02:54.689098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:02:54.689183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:02:54.689238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:54.690177Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:02:54.693821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusAccepted TxId: 100 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-03-28T09:02:54.694004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2025-03-28T09:02:54.694460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.694511Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 ProgressState 2025-03-28T09:02:54.694560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-03-28T09:02:54.694705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:54.695210Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:02:54.697053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-28T09:02:54.697210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-28T09:02:54.697543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.697679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:54.697733Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 100:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-03-28T09:02:54.697935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-28T09:02:54.698160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:54.698231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:02:54.700501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:54.700553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:54.700749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:02:54.700863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.700901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-28T09:02:54.700953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-28T09:02:54.701294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.701338Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-28T09:02:54.701427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:02:54.701462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:02:54.701500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:02:54.701532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:02:54.701565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-28T09:02:54.701613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:02:54.701661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-28T09:02:54.701702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-28T09:02:54.701790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:02:54.701835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-28T09:02:54.701867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-28T09:02:54.701908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:02:54.702648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:02:54.702779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:02:54.702827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:02:54.702877Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-28T09:02:54.702916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:54.703975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:02:54.704064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:02:54.704093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:02:54.704119Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:02:54.704160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:02:54.704233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-28T09:02:54.708505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:02:54.708631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 101 2025-03-28T09:02:54.708863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:02:54.708908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:02:54.709318Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:02:54.709426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:02:54.709462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:295:2286] TestWaitNotification: OK eventTxId 101 2025-03-28T09:02:54.709968Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:02:54.710178Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 226us result status StatusSuccess 2025-03-28T09:02:54.710533Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTicketParserTest::LoginRefreshGroupsWithError [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143153036.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143153036.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143153036.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143153036.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143153036.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123153036.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143153036.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143153036.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123151836.000000s;Name=;Codec=}; 2025-03-28T09:00:39.491651Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:00:39.835350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:00:39.895128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:00:39.896254Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:00:40.068457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:00:40.070530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:00:40.075818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:00:40.075990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:00:40.076656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:00:40.077022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:00:40.077396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:00:40.078047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:00:40.078398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:00:40.078801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:00:40.079406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:00:40.079776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:00:40.196982Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:00:40.198978Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:00:40.199297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:00:40.200647Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:00:40.202029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:00:40.202101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:00:40.202142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:00:40.202241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:00:40.202298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:00:40.202334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:00:40.202358Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:00:40.202496Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:00:40.202545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:00:40.202575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:00:40.202600Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:00:40.202666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:00:40.214985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:00:40.215308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:00:40.215609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:00:40.215688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:00:40.215720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:00:40.215746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:00:40.215782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:00:40.216064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:00:40.216443Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:00:40.228000Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=651; 2025-03-28T09:00:40.229150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=532; 2025-03-28T09:00:40.229900Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=480; 2025-03-28T09:00:40.238938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=335; 2025-03-28T09:00:40.240558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:00:40.241129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:00:40.241172Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:00:40.247096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:00:40.247158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:00:40.247194Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:00:40.248477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateS ... EST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:02:54.203867Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-28T09:02:54.203931Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:02:54.204005Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:02:54.204066Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:02:54.204209Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:02:54.204566Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000008:max} readable: {1000000008:max} at tablet 9437184 2025-03-28T09:02:54.204750Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:02:54.204980Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:02:54.205053Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:02:54.205635Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:02:54.205753Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:02:54.206415Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000008:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:2012:4021];trace_detailed=; 2025-03-28T09:02:54.206995Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:02:54.207271Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:02:54.207498Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:54.207655Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:54.208144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2012:4021];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:02:54.208287Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2012:4021];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:54.208438Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2012:4021];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:54.208494Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:2012:4021] finished for tablet 9437184 2025-03-28T09:02:54.208977Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2012:4021];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:2011:4020];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743152574206323,"name":"_full_task","f":1743152574206323,"d_finished":0,"c":0,"l":1743152574208573,"d":2250},"events":[{"name":"bootstrap","f":1743152574206600,"d_finished":1092,"c":1,"l":1743152574207692,"d":1092},{"a":1743152574208110,"name":"ack","f":1743152574208110,"d_finished":0,"c":0,"l":1743152574208573,"d":463},{"a":1743152574208083,"name":"processing","f":1743152574208083,"d_finished":0,"c":0,"l":1743152574208573,"d":490},{"name":"ProduceResults","f":1743152574207404,"d_finished":577,"c":2,"l":1743152574208473,"d":577},{"a":1743152574208477,"name":"Finish","f":1743152574208477,"d_finished":0,"c":0,"l":1743152574208573,"d":96}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:02:54.209053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2012:4021];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:2011:4020];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:02:54.209463Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2012:4021];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:2011:4020];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743152574206323,"name":"_full_task","f":1743152574206323,"d_finished":0,"c":0,"l":1743152574209099,"d":2776},"events":[{"name":"bootstrap","f":1743152574206600,"d_finished":1092,"c":1,"l":1743152574207692,"d":1092},{"a":1743152574208110,"name":"ack","f":1743152574208110,"d_finished":0,"c":0,"l":1743152574209099,"d":989},{"a":1743152574208083,"name":"processing","f":1743152574208083,"d_finished":0,"c":0,"l":1743152574209099,"d":1016},{"name":"ProduceResults","f":1743152574207404,"d_finished":577,"c":2,"l":1743152574208473,"d":577},{"a":1743152574208477,"name":"Finish","f":1743152574208477,"d_finished":0,"c":0,"l":1743152574209099,"d":622}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:2012:4021]->[1:2011:4020] 2025-03-28T09:02:54.209580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2012:4021];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:02:54.205712Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:02:54.209638Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:2012:4021];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:02:54.209776Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:2012:4021];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:02:54.597408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:54.597533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:54.597580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:54.597619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:54.597684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:54.597713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:54.597777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:54.597868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:54.598244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:54.690584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:54.690641Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:54.712735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:54.713010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:54.713242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:54.719411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:54.719664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:54.720413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.720691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:54.723024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.724608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:54.724678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.724882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:54.724939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:54.724989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:54.725087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.733895Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:02:54.918669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:54.919127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.919348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:54.919647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:54.919719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.922513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.922664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:54.922908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.923000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:54.923042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:54.923079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:54.925659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.925740Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:54.925796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:54.928307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.928366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.928420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:54.928485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.932717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:54.936895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:54.937161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:54.938598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:54.938817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:54.938932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:54.939256Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:54.939319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:54.939507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:54.939588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:54.942098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:54.942172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:54.942427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:54.942496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:54.942960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:54.943007Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:54.943124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:54.943168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.943223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:54.943266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.943306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:54.943350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:54.943386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:54.943417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:54.943497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:54.943535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:54.943581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:54.946196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:54.946373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:54.946422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-03-28T09:02:55.030660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-03-28T09:02:55.031710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/MyView\', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 2 PathDropTxId: 102, at schemeshard: 72057594046678944 2025-03-28T09:02:55.031857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path is being deleted right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateDrop), operation: DROP VIEW, path: /MyRoot/MyView 2025-03-28T09:02:55.032612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:02:55.032753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-28T09:02:55.033161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:55.033272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:55.033327Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-03-28T09:02:55.033485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-28T09:02:55.033692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:55.033778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:02:55.036411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:55.036458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:55.036652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:02:55.036799Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:55.036837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:02:55.036892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:02:55.037358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:02:55.037409Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:02:55.037536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:02:55.037572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:02:55.037615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:02:55.037645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:02:55.037690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:02:55.037744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:02:55.037785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:02:55.037819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:02:55.037898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:02:55.037948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:02:55.037988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:02:55.038022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:02:55.038883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:02:55.038992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:02:55.039030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:02:55.039080Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:02:55.039145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:55.040147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:02:55.040234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:02:55.040263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:02:55.040293Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:02:55.040324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:02:55.040409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:02:55.041405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:02:55.041460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:02:55.041530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:55.044989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:02:55.045363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:02:55.047256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-03-28T09:02:55.047639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:02:55.047702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2025-03-28T09:02:55.047796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:02:55.047816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:02:55.048338Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:02:55.048467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:02:55.048508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:325:2316] 2025-03-28T09:02:55.048698Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:02:55.048772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:02:55.048795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:325:2316] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2025-03-28T09:02:55.049381Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:02:55.049601Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 252us result status StatusPathDoesNotExist 2025-03-28T09:02:55.049779Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> BackupRestoreS3::RestoreViewQueryText [GOOD] >> BackupRestoreS3::RestoreViewReferenceTable >> ResultFormatter::Tuple [GOOD] >> ResultFormatter::Tagged [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular >> ColumnBuildTest::AlreadyExists [GOOD] |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] >> GroupWriteTest::ByTableName >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 >> BackupRestore::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::RestoreTablePartitioningSettings >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] Test command err: 2025-03-28T09:01:43.113952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:01:43.114577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:43.114630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001214/r3tmp/tmp84hLtK/pdisk_1.dat 2025-03-28T09:01:44.647312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:01:44.940517Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:45.042244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:45.048492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:45.079143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:45.286428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:01:45.917974Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:01:45.943490Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:01:45.952966Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:01:45.958088Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:46.247108Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:01:46.247866Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:46.247988Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:01:46.319412Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:01:46.320147Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:01:46.320866Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:01:46.326519Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:01:46.326996Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:01:46.328046Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:01:46.342162Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:01:46.393777Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:01:46.393942Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:01:46.394050Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:01:46.394081Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:01:46.394108Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:01:46.394143Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:01:46.394322Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:46.394362Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:46.394599Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:01:46.394676Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:01:46.395056Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:01:46.395100Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:01:46.395131Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:01:46.395159Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:01:46.395184Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:01:46.395212Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:01:46.395248Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:01:46.395352Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:46.395376Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:46.395421Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:01:46.395494Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:01:46.395522Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:01:46.395623Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:01:46.395798Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:01:46.395835Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:01:46.395902Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:01:46.395939Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:01:46.395975Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:01:46.396012Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:01:46.396037Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:01:46.396245Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:01:46.396270Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:01:46.396333Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:01:46.396359Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:01:46.396402Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:01:46.396433Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:01:46.396463Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:01:46.396492Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:01:46.396511Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:01:46.397597Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:01:46.397634Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:01:46.408555Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:01:46.408608Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:01:46.408635Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:01:46.408680Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:01:46.408736Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:01:46.618847Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:46.619569Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:46.619608Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:01:46.624109Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:01:46.624464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:01:46.625499Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:01:46.625536Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:01:46.625582Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:01:46.625959Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:01:46.701970Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:01:46.702348Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:01:46.714709Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:46.714758Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:46.714889Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:01:4 ... 75186224037888 is Executed 2025-03-28T09:02:54.959169Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:02:54.959223Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:02:54.959448Z node 7 :TX_DATASHARD DEBUG: Planned transaction txId 1234567890011 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 1234567890011 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:54.959502Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:54.959935Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [7:810:2667], Recipient [7:810:2667]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:54.959985Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:54.960066Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:54.960126Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:02:54.960182Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:02:54.960241Z node 7 :TX_DATASHARD DEBUG: Found ready operation [3500:1234567890011] in PlanQueue unit at 72075186224037888 2025-03-28T09:02:54.960291Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PlanQueue 2025-03-28T09:02:54.960343Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-28T09:02:54.960404Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PlanQueue 2025-03-28T09:02:54.960460Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadWriteDetails 2025-03-28T09:02:54.960509Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadTxDetails 2025-03-28T09:02:54.960892Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-03-28T09:02:54.960994Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-03-28T09:02:54.961065Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-03-28T09:02:54.961172Z node 7 :TX_DATASHARD DEBUG: LoadWriteDetails at 72075186224037888 loaded writeOp from db 3500:1234567890011 keys extracted: 1 2025-03-28T09:02:54.961234Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-28T09:02:54.961264Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadWriteDetails 2025-03-28T09:02:54.961295Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:54.961326Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:02:54.961397Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically complete end at 72075186224037888 2025-03-28T09:02:54.961449Z node 7 :TX_DATASHARD TRACE: Operation [3500:1234567890011] is the new logically incomplete end at 72075186224037888 2025-03-28T09:02:54.961502Z node 7 :TX_DATASHARD TRACE: Activated operation [3500:1234567890011] at 72075186224037888 2025-03-28T09:02:54.961555Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-28T09:02:54.961580Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:54.961607Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit BuildWriteOutRS 2025-03-28T09:02:54.961635Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit BuildWriteOutRS 2025-03-28T09:02:54.961683Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-28T09:02:54.961708Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit BuildWriteOutRS 2025-03-28T09:02:54.961731Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit StoreAndSendWriteOutRS 2025-03-28T09:02:54.961758Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit StoreAndSendWriteOutRS 2025-03-28T09:02:54.961785Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-28T09:02:54.961809Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit StoreAndSendWriteOutRS 2025-03-28T09:02:54.961832Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit PrepareWriteTxInRS 2025-03-28T09:02:54.961856Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit PrepareWriteTxInRS 2025-03-28T09:02:54.961883Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-28T09:02:54.961910Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit PrepareWriteTxInRS 2025-03-28T09:02:54.961934Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit LoadAndWaitInRS 2025-03-28T09:02:54.961957Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit LoadAndWaitInRS 2025-03-28T09:02:54.961984Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-28T09:02:54.962008Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit LoadAndWaitInRS 2025-03-28T09:02:54.962032Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit ExecuteWrite 2025-03-28T09:02:54.962056Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-03-28T09:02:54.962096Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-03-28T09:02:54.962482Z node 7 :TX_DATASHARD TRACE: Tablet 72075186224037888 is not ready for [3500:1234567890011] execution 2025-03-28T09:02:54.962579Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 released its data 2025-03-28T09:02:54.962639Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Restart 2025-03-28T09:02:54.962678Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:54.963883Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:02:54.963947Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:54.964013Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:54.964563Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:54.964636Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit ExecuteWrite 2025-03-28T09:02:54.964684Z node 7 :TX_DATASHARD DEBUG: Executing write operation for [3500:1234567890011] at 72075186224037888 2025-03-28T09:02:54.965103Z node 7 :TX_DATASHARD TRACE: Parsing write transaction for 1234567890011 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 1234567890011 TxMode: MODE_PREPARE Locks { Op: Commit } 2025-03-28T09:02:54.965208Z node 7 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, write point (Int32 : 1) 2025-03-28T09:02:54.965290Z node 7 :TX_DATASHARD TRACE: -- AddWriteRange: (Int32 : 1) table: [72057594046644480:2:1] 2025-03-28T09:02:54.965405Z node 7 :TX_DATASHARD DEBUG: tx 1234567890011 at 72075186224037888 restored its data 2025-03-28T09:02:54.965641Z node 7 :TX_DATASHARD DEBUG: Executed write operation for [3500:1234567890011] at 72075186224037888, row count=1 2025-03-28T09:02:54.965716Z node 7 :TX_DATASHARD TRACE: Lock 1234567890001 marked broken at v{min} 2025-03-28T09:02:54.965835Z node 7 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:02:54.965923Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:02:54.965984Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit ExecuteWrite 2025-03-28T09:02:54.966038Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompleteWrite 2025-03-28T09:02:54.966090Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-03-28T09:02:54.966837Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is DelayComplete 2025-03-28T09:02:54.966902Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompleteWrite 2025-03-28T09:02:54.966951Z node 7 :TX_DATASHARD TRACE: Add [3500:1234567890011] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:02:54.967001Z node 7 :TX_DATASHARD TRACE: Trying to execute [3500:1234567890011] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:02:54.967048Z node 7 :TX_DATASHARD TRACE: Execution status for [3500:1234567890011] at 72075186224037888 is Executed 2025-03-28T09:02:54.967086Z node 7 :TX_DATASHARD TRACE: Advance execution plan for [3500:1234567890011] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:02:54.967137Z node 7 :TX_DATASHARD TRACE: Execution plan for [3500:1234567890011] at 72075186224037888 has finished 2025-03-28T09:02:54.967192Z node 7 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:54.967265Z node 7 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:02:54.967320Z node 7 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:02:54.967366Z node 7 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:02:54.968306Z node 7 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-03-28T09:02:54.968735Z node 7 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:54.968800Z node 7 :TX_DATASHARD TRACE: Complete execution for [3500:1234567890011] at 72075186224037888 on unit CompleteWrite 2025-03-28T09:02:54.968891Z node 7 :TX_DATASHARD DEBUG: Complete write [3500 : 1234567890011] from 72075186224037888 at tablet 72075186224037888 send result to client [7:803:2661] 2025-03-28T09:02:54.968960Z node 7 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> BackupRestore::RestoreTablePartitioningSettings [GOOD] >> BackupRestore::RestoreIndexTablePartitioningSettings >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: 2025-03-28T09:02:37.591057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:37.591622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:37.591698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00171a/r3tmp/tmpduoEzi/pdisk_1.dat 2025-03-28T09:02:38.028922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:38.079429Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:38.124655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:38.124824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:38.137514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:38.228087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:38.278072Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-03-28T09:02:38.278464Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:38.330030Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:38.330250Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:38.332511Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:38.332633Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:38.332697Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:38.333129Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:38.333512Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:38.333601Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-28T09:02:38.335628Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-03-28T09:02:38.335880Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:38.344777Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:38.344957Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:38.346737Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:02:38.346828Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:02:38.346887Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:02:38.347225Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:38.347374Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:38.347447Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-28T09:02:38.358832Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:38.395839Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:38.396205Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:38.396564Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-28T09:02:38.396611Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:38.396651Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:38.396713Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:38.397080Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:38.397125Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:02:38.397190Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:38.397283Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-28T09:02:38.397323Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:02:38.397349Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:02:38.397374Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:02:38.397908Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:38.398034Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:38.398135Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:38.398180Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:38.398416Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:38.398475Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:38.398552Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:02:38.398616Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:02:38.399201Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:686:2582], sessionId# [0:0:0] 2025-03-28T09:02:38.399268Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:02:38.399300Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:38.399332Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:02:38.399368Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:02:38.399536Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:38.399789Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:38.399926Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:02:38.400441Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:696:2590], sessionId# [0:0:0] 2025-03-28T09:02:38.400644Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:02:38.400821Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:02:38.400885Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:02:38.402890Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:38.402988Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:02:38.414102Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:38.414313Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:38.414506Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:02:38.414550Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:38.585476Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-28T09:02:38.585665Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-28T09:02:38.590417Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-28T09:02:38.590507Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:02:38.599349Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:38.599431Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:38.599622Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:02:38.599671Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:02:38.599743Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-28T09:02:38.600112Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:02:38.600272Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:38.600563Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:38.600598Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:02:38.600632Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:02:38.600794Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:02:38.600889Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:38.600978Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:02:38.601058Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:02:38.614754Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T ... n plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:54.558289Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit DirectOp 2025-03-28T09:02:54.558327Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit DirectOp 2025-03-28T09:02:54.558380Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v3000/18446744073709551615 2025-03-28T09:02:54.558556Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:02:54.558590Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit DirectOp 2025-03-28T09:02:54.558638Z node 3 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:02:54.558673Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:02:54.558760Z node 3 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:02:54.558800Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:02:54.558830Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-03-28T09:02:54.569966Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-28T09:02:54.570050Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:5] at 72075186224037888 on unit DirectOp 2025-03-28T09:02:54.570127Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status SCHEME_ERROR 2025-03-28T09:02:55.360403Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] Handle TEvExecuteKqpTransaction 2025-03-28T09:02:55.360477Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] TxId# 281474976715662 ProcessProposeKqpTransaction 2025-03-28T09:02:55.361266Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdztw3e3cwgpbx9z4gdq0tc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDE1ZWExOTMtOGFlMzM4MTgtYTJlNjFhYmYtZmIxMzE0YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:55.411430Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1110:2909], Recipient [3:670:2574]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-28T09:02:55.411674Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:02:55.411737Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v8000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v8000/18446744073709551615 ImmediateWriteEdgeReplied# v8000/18446744073709551615 2025-03-28T09:02:55.411782Z node 3 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-03-28T09:02:55.411845Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CheckRead 2025-03-28T09:02:55.411942Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-28T09:02:55.411983Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:02:55.412021Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:55.412059Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:02:55.412116Z node 3 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037888 2025-03-28T09:02:55.412175Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-28T09:02:55.412201Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:55.412225Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:02:55.412247Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:02:55.412378Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-28T09:02:55.419448Z node 3 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[3:1110:2909], 0} after executionsCount# 1 2025-03-28T09:02:55.419544Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1110:2909], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:55.419681Z node 3 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[3:1110:2909], 0} finished in read 2025-03-28T09:02:55.419752Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-28T09:02:55.419798Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:02:55.419830Z node 3 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:02:55.419863Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:02:55.419908Z node 3 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-28T09:02:55.419929Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:02:55.419959Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-03-28T09:02:55.419998Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:02:55.420096Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:02:55.448642Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1110:2909], Recipient [3:670:2574]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:02:55.448740Z node 3 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2025-03-28T09:02:55.671274Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] Handle TEvExecuteKqpTransaction 2025-03-28T09:02:55.671341Z node 3 :TX_PROXY DEBUG: actor# [3:59:2106] TxId# 281474976715663 ProcessProposeKqpTransaction 2025-03-28T09:02:55.672045Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdztwyycsfh9kz470m59658, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NWE1Zjk5YjAtZTE1ZmI3NS0zMWExMzc5Yi02ZGE4YTY5, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:55.707948Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [3:1141:2934], Recipient [3:906:2739]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2025-03-28T09:02:55.708122Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-28T09:02:55.708181Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v6000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v5000/18446744073709551615 ImmediateWriteEdgeReplied# v5000/18446744073709551615 2025-03-28T09:02:55.708223Z node 3 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v8000/18446744073709551615 2025-03-28T09:02:55.708281Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-03-28T09:02:55.708365Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-28T09:02:55.708404Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-03-28T09:02:55.708441Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:55.708477Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:02:55.708520Z node 3 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-03-28T09:02:55.708559Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-28T09:02:55.708581Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:02:55.708634Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-03-28T09:02:55.708665Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-03-28T09:02:55.708766Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 8 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-03-28T09:02:55.709098Z node 3 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[3:1141:2934], 0} after executionsCount# 1 2025-03-28T09:02:55.709154Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1141:2934], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:02:55.709273Z node 3 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[3:1141:2934], 0} finished in read 2025-03-28T09:02:55.709333Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-28T09:02:55.709357Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-03-28T09:02:55.709381Z node 3 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:02:55.709404Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:02:55.709445Z node 3 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-28T09:02:55.709464Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:02:55.709493Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-03-28T09:02:55.709531Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-28T09:02:55.709618Z node 3 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-28T09:02:55.748202Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [3:1141:2934], Recipient [3:906:2739]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:02:55.748295Z node 3 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] Test command err: 2025-03-28T09:02:37.917540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:02:37.917892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:37.917947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001711/r3tmp/tmpzLmt0Y/pdisk_1.dat 2025-03-28T09:02:38.281140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:38.331036Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:38.372982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:38.373103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:38.387988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:38.471962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:38.525683Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:682:2579] 2025-03-28T09:02:38.525961Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:38.580857Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:38.581063Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:38.591201Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:38.591344Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:38.591432Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:38.591873Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:38.592247Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:38.592314Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:716:2579] in generation 1 2025-03-28T09:02:38.594003Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:686:2581] 2025-03-28T09:02:38.594210Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:38.610853Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:38.611289Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:689:2583] 2025-03-28T09:02:38.611492Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:38.619915Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:38.621456Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:02:38.621535Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:02:38.621588Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:02:38.621869Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:38.622982Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:38.623049Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:741:2581] in generation 1 2025-03-28T09:02:38.623761Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:38.623872Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:38.625215Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-03-28T09:02:38.625275Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-03-28T09:02:38.625317Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-03-28T09:02:38.625636Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:38.625907Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:38.625954Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [1:745:2583] in generation 1 2025-03-28T09:02:38.627385Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2585] 2025-03-28T09:02:38.627572Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:38.637317Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:38.637404Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:38.638773Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-28T09:02:38.638833Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-28T09:02:38.638880Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-28T09:02:38.639172Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:38.639344Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:38.639407Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:753:2585] in generation 1 2025-03-28T09:02:38.655617Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:38.709023Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:38.709257Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:38.709388Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:758:2621] 2025-03-28T09:02:38.709432Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:38.709469Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:38.709506Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:38.709819Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:38.709856Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:02:38.709911Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:38.709962Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:759:2622] 2025-03-28T09:02:38.709985Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:02:38.710025Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:02:38.710068Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:02:38.710525Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:38.710639Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:38.720077Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:38.720188Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-03-28T09:02:38.720319Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:38.720411Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:760:2623] 2025-03-28T09:02:38.720450Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-03-28T09:02:38.720477Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-03-28T09:02:38.720507Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-28T09:02:38.720939Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:38.720993Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:38.721035Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:38.721106Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:38.721160Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:02:38.721244Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:02:38.721335Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:38.721378Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-28T09:02:38.721453Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:38.721522Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:761:2624] 2025-03-28T09:02:38.721547Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-28T09:02:38.721570Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-28T09:02:38.721595Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:02:38.722364Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2573], serverId# [1:712:2596], sessionId# [0:0:0] 2025-03-28T09:02:38.722451Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:02:38.722489Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:38.722531Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:02:38.722582Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:02:38.722932Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:38.723184Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:38.723316Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:02:38.723907Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:672:2574], serverId# [1:718:2599], sessionId# [0:0:0] 2025-03-28T09:02:38.723963Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-03-28T09:02:38.724054Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-03-28T09:02:38.724281Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:02:38.724454Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 720751862240 ... 463Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:53.314576Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:684:2580] 2025-03-28T09:02:53.314621Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:53.314665Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:53.314733Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:53.315192Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:53.315344Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:53.315459Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:53.315513Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:53.315580Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:53.315646Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:53.316108Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-28T09:02:53.316265Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:53.316502Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:53.316600Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:02:53.335572Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:53.346516Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:53.346665Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:53.520063Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-28T09:02:53.520542Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:53.520625Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:53.521636Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:53.521704Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:02:53.521781Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:02:53.522097Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:02:53.522278Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:53.523014Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:53.523091Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:02:53.523579Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:02:53.524042Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:53.525698Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:02:53.525757Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:53.526596Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:02:53.526855Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:53.527892Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:53.527938Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:53.527998Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:02:53.528076Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:02:53.528144Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:02:53.528258Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:53.529687Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:53.532561Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:02:53.532641Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:02:53.532846Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:02:53.543785Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:53.543902Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:53.543991Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:53.551246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:02:53.558268Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:53.728022Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:53.731758Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:02:53.816084Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:54.300382Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdztv3583bt9b27c1sw1kem, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjNhZGMwYy03OTQ1Mjg5LTQ2MGE3NGY1LTliNjUyNjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:54.307343Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:852:2688], serverId# [3:853:2689], sessionId# [0:0:0] 2025-03-28T09:02:54.307591Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:54.323524Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:54.323680Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:55.318928Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqdztvx88s526xc7zfvnczye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzExOGIyNjctZDUwN2QzMC00YTY5ZTc0NC0zZTVkMjA5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:55.418512Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint32_value: 300 } } 2025-03-28T09:02:55.476256Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-03-28T09:02:55.487705Z node 3 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-28T09:02:55.487783Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:55.487846Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-03-28T09:02:55.488427Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-03-28T09:02:55.488492Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:55.557895Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdztx019wps02g90ehfpnqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzExOGIyNjctZDUwN2QzMC00YTY5ZTc0NC0zZTVkMjA5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:55.558552Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:55.570557Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:55.570718Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:55.598432Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzExOGIyNjctZDUwN2QzMC00YTY5ZTc0NC0zZTVkMjA5NA==, ActorId: [3:859:2694], ActorState: ExecuteState, TraceId: 01jqdztx019wps02g90ehfpnqc, Create QueryResponse for error on request, msg: 2025-03-28T09:02:55.599505Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdztx019wps02g90ehfpnqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YzExOGIyNjctZDUwN2QzMC00YTY5ZTc0NC0zZTVkMjA5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:55.599897Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:55.600288Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:55.600349Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:02:53.103832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:53.103943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:53.103982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:53.104014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:53.104066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:53.104100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:53.104189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:53.104267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:53.104566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:53.215925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:53.215978Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:53.231051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:53.231327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:53.231470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:53.236961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:53.237166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:53.237785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:53.237977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:53.239867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:53.241087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:53.241145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:53.241291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:53.241335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:53.241375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:53.241451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:53.267659Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:02:53.433235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:53.433451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:53.433652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:53.433855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:53.433908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:53.435955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:53.436081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:53.436248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:53.436313Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:53.436347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:53.436395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:53.438251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:53.438306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:53.438342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:53.439837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:53.439878Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:53.439929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:53.439963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:53.442444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:53.448171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:53.448377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:53.449169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:53.449286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:53.449322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:53.449509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:53.449542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:53.449911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:53.450058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:53.452210Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:53.452261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:53.452429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:53.452504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:53.452851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:53.452910Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:53.453001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:53.453031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:53.453077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:53.453113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:53.453147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:53.453183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:53.453219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:53.453262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:53.453330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:53.453365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:53.453410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:53.455465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:53.455571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:53.455612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... atisfy waiter [1:781:2662] TestWaitNotification: OK eventTxId 105 2025-03-28T09:02:56.758488Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2025-03-28T09:02:56.758932Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 493us result status StatusSuccess 2025-03-28T09:02:56.759410Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 2 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2025-03-28T09:02:56.761927Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 106 DatabaseName: "/MyRoot/ServerLessDB" Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_shards_in_flight: 2 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } 2025-03-28T09:02:56.780551Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-28T09:02:56.780695Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1146:3017], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:02:56.780860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 106, at schemeshard: 72075186233409549 2025-03-28T09:02:56.780952Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 106, txId# 281474976725757 2025-03-28T09:02:56.781032Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1146:3017], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:02:56.787808Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-28T09:02:56.787905Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:02:56.790489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerLessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" Columns { Name: "value" Type: "Uint64" DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: true } } Internal: true } TxId: 281474976725757 TabletId: 72075186233409549 FailOnExist: true , at schemeshard: 72075186233409549 2025-03-28T09:02:56.792560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/ServerLessDB/Table, pathId: , opId: 281474976725757:0, at schemeshard: 72075186233409549 2025-03-28T09:02:56.792929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976725757:1, propose status:StatusInvalidParameter, reason: Cannot alter type for column 'value', at schemeshard: 72075186233409549 2025-03-28T09:02:56.800417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976725757, response: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2025-03-28T09:02:56.800609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976725757, database: /MyRoot/ServerLessDB, subject: , status: StatusInvalidParameter, reason: Cannot alter type for column 'value', operation: ALTER TABLE, path: /MyRoot/ServerLessDB/Table 2025-03-28T09:02:56.800839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976725757, status# StatusInvalidParameter 2025-03-28T09:02:56.800921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-03-28T09:02:56.801033Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 106, cookie: 106, txId: 281474976725757, status: StatusInvalidParameter 2025-03-28T09:02:56.801170Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: AlterMainTable, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusInvalidParameter Reason: "Cannot alter type for column \'value\'" TxId: 281474976725757 SchemeshardId: 72075186233409549 2025-03-28T09:02:56.801827Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuilder::TTxReply: ReplyOnCreation, BuildIndexId: 106, status: BAD_REQUEST, error: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column 'value', replyTo: [1:1146:3017] 2025-03-28T09:02:56.802145Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Message: TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 106 Status: BAD_REQUEST Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } IndexBuild { Id: 106 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Cannot alter type for column \'value\'" severity: 1 } State: STATE_PREPARING Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless |75.9%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot |75.9%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |75.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] |75.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |75.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |75.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> YdbSdkSessionsPool::Get1Session [GOOD] >> YdbSdkSessionsPool::PeriodicTask1 |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> YdbSdkSessionsPool::RunSmallPlan [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> GroupWriteTest::WithRead >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] >> GroupWriteTest::TwoTables >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::RunSmallPlan [GOOD] Test command err: 2025-03-28T09:02:55.331158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783304090390698:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:55.331230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013da/r3tmp/tmphm4hh0/pdisk_1.dat 2025-03-28T09:02:55.987675Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:55.998221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:55.998341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:56.010365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25480, node 1 2025-03-28T09:02:56.271525Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:56.271551Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:56.271557Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:56.271698Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:56.942344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... >> YdbSdkSessionsPool::WaitQueue10 [GOOD] >> GroupWriteTest::Simple |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan |75.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |76.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows >> GroupWriteTest::WriteHardRateDispatcher >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BuildColumnDoesnotRestoreDeletedRows [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:02:52.397676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:52.397779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:52.397816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:52.397851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:52.397903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:52.397930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:52.397986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:52.398064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:52.398402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:52.486304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:52.486368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:52.527273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:52.527555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:52.527696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:52.550190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:52.550399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:52.550989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:52.551196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:52.559706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:52.560998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:52.561058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:52.561211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:52.561253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:52.561291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:52.561375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:52.575556Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:02:52.864323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:52.864555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:52.864751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:52.865006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:52.865061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:52.871603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:52.871747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:52.871963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:52.872027Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:52.872059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:52.872089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:52.874400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:52.874457Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:52.874489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:52.876351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:52.876406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:52.876448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:52.876489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:52.880112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:52.882079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:52.882263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:52.883247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:52.883375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:52.883436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:52.883698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:52.883750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:52.883910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:52.884002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:52.886010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:52.886054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:52.886244Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:52.886309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:52.886664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:52.886729Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:52.886866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:52.886909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:52.886952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:52.886980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:52.887017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:52.887054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:52.887094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:52.887143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:52.887219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:52.887254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:52.887316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:52.889341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:52.889444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:52.889480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... MKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'27))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.145101Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2056:3919], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'28))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.169167Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2057:3920], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'29))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.185862Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2058:3921], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'30))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.206591Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2059:3922], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'31))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.247288Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2060:3923], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'32))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.263083Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2061:3924], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'33))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.283604Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2062:3925], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'34))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.299975Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2063:3926], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'35))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.316422Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2064:3927], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'36))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.326577Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2065:3928], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'37))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.334181Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2066:3929], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'38))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.344102Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2067:3930], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'39))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.356508Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2068:3931], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'40))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.368208Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2069:3932], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'41))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.375939Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2070:3933], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'42))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.384789Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2071:3934], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'43))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.397948Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2072:3935], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'44))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.414465Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2073:3936], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'45))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.429467Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2074:3937], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'46))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.600129Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2075:3938], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'47))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.607731Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2076:3939], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'48))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.616952Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2077:3940], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'49))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } 2025-03-28T09:03:01.634466Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268830210, Sender [1:2078:3941], Recipient [1:753:2642]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "\n (\n (let key \'(\'(\'key (Uint64 \'50))))\n (let select \'(\'key))\n (return (AsList\n (SetResult \'Result (SelectRow \'__user__Table key select))\n ))\n )\n " } } |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |76.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue10 [GOOD] Test command err: 2025-03-28T09:02:55.089117Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783305175634070:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:55.089269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ee/r3tmp/tmpmzT5f2/pdisk_1.dat 2025-03-28T09:02:55.833475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:55.833573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:55.837242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:55.840284Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17082, node 1 2025-03-28T09:02:56.205940Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:56.205961Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:56.205967Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:56.206050Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:57.169407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:00.090546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783305175634070:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:00.094903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> TFlatTableExecutor_StickyPages::TestSticky >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:131:2155] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:110:2142] 2025-03-28T09:00:57.092541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:57.092639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:57.092678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:57.092717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:57.092784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:57.092810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:57.092869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:57.092951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:57.093313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:57.327042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:00:57.327087Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:131:2155] sender: [1:168:2058] recipient: [1:15:2062] 2025-03-28T09:00:57.332856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:57.333060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:57.333219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:57.350531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:57.350728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:57.351354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:57.351566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:57.359496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:57.365626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:57.365787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:57.366124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:57.366181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:57.366225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:57.369398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:216:2058] recipient: [1:214:2215] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:216:2058] recipient: [1:214:2215] Leader for TabletID 72057594037968897 is [1:220:2219] sender: [1:221:2058] recipient: [1:214:2215] 2025-03-28T09:00:57.376760Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:241:2058] recipient: [1:15:2062] 2025-03-28T09:00:57.493502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:57.493728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.493929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:57.494160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:57.494233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.496709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:57.496836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:57.497008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.497056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:57.497099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:57.497147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:57.499359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.499411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:57.499442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:57.501376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.501418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.501469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:57.501522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:57.505021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:57.507409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:57.507579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:256:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:57.508483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:57.508603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:57.508651Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:57.508947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:57.508996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:57.509151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:57.509221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:57.511591Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:57.511634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:57.511815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:57.511854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:00:57.512259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:57.512300Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:57.512402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:57.512432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:57.512464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... 51 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:03.562834Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1003 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 219043334251 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:03.562870Z node 51 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 1003:0 HandleReply TEvOperationPlan, operationId: 1003:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-03-28T09:03:03.563073Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 128 -> 129 2025-03-28T09:03:03.563177Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:03:03.569642Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:03.569706Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:03:03.570023Z node 51 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:03.570079Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [51:206:2208], at schemeshard: 72057594046678944, txId: 1003, path id: 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-03-28T09:03:03.570601Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:03:03.570649Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:03:03.571280Z node 51 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:03:03.571369Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:03:03.571402Z node 51 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-28T09:03:03.571436Z node 51 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-03-28T09:03:03.571471Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:03:03.571551Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1003 2025-03-28T09:03:03.575093Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 947 } } 2025-03-28T09:03:03.575133Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-03-28T09:03:03.575237Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 947 } } 2025-03-28T09:03:03.575310Z node 51 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 947 } } 2025-03-28T09:03:03.576153Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 219043334415 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-28T09:03:03.576181Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-03-28T09:03:03.576265Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 332 RawX2: 219043334415 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-28T09:03:03.576301Z node 51 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:03:03.576353Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 332 RawX2: 219043334415 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-28T09:03:03.576390Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:03.576413Z node 51 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:03:03.576436Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:03:03.576475Z node 51 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-03-28T09:03:03.576890Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:03:03.578269Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:03:03.578368Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:03:03.578569Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:03:03.578600Z node 51 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-03-28T09:03:03.578662Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-28T09:03:03.578701Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-28T09:03:03.578737Z node 51 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-28T09:03:03.578763Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-28T09:03:03.578798Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-03-28T09:03:03.578844Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-28T09:03:03.578880Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-03-28T09:03:03.578908Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-03-28T09:03:03.579003Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-03-28T09:03:03.581811Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-03-28T09:03:03.581841Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-03-28T09:03:03.582163Z node 51 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-03-28T09:03:03.582225Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-03-28T09:03:03.582247Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:455:2428] TestWaitNotification: OK eventTxId 1003 2025-03-28T09:03:03.582587Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:03.582783Z node 51 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 221us result status StatusSuccess 2025-03-28T09:03:03.583180Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication >> ResultFormatter::EmptyDict [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorization >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] Test command err: 2025-03-28T09:02:41.070560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783242774085203:2216];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fdf/r3tmp/tmptnp7ii/pdisk_1.dat 2025-03-28T09:02:41.389391Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:02:41.616773Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:41.656398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:41.656493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:41.667381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18715, node 1 2025-03-28T09:02:41.752663Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:41.752690Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:41.752697Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:41.752805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2752 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:42.095684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:42.116107Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:42.174663Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:02:42.174973Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:02:42.175006Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:42.175518Z node 1 :TICKET_PARSER DEBUG: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2025-03-28T09:02:42.175534Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Token is not in correct format 2025-03-28T09:02:42.175562Z node 1 :TICKET_PARSER ERROR: Ticket **** (5DAB89DE): Token is not in correct format 2025-03-28T09:02:44.928634Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783257133568663:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:44.931930Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fdf/r3tmp/tmpHgEgaC/pdisk_1.dat 2025-03-28T09:02:45.160979Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:45.174261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:45.174339Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:45.176571Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24616, node 2 2025-03-28T09:02:45.223903Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:45.223924Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:45.223933Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:45.224033Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:45.581016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:45.588975Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:02:45.599145Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:02:45.599184Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:45.599193Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:02:45.599308Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-03-28T09:02:45.599387Z node 2 :GRPC_CLIENT DEBUG: [517000031488] Connect to grpc://localhost:19454 2025-03-28T09:02:45.604554Z node 2 :GRPC_CLIENT DEBUG: [517000031488] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-03-28T09:02:45.629320Z node 2 :GRPC_CLIENT DEBUG: [517000031488] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-03-28T09:02:45.629761Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-03-28T09:02:45.629874Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:02:45.632713Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:02:45.632745Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:45.632753Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:02:45.632820Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-03-28T09:02:45.633055Z node 2 :GRPC_CLIENT DEBUG: [517000031488] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-03-28T09:02:45.635868Z node 2 :GRPC_CLIENT DEBUG: [517000031488] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2025-03-28T09:02:45.636093Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-03-28T09:02:45.636154Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fdf/r3tmp/tmp3CzMy7/pdisk_1.dat 2025-03-28T09:02:49.254828Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:49.331107Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:49.343147Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:49.343233Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:49.348413Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8852, node 3 2025-03-28T09:02:49.437948Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:49.437969Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:49.437977Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:49.438110Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmp ... node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-28T09:02:53.140755Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783295132866915:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:53.140792Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fdf/r3tmp/tmpASTEuG/pdisk_1.dat 2025-03-28T09:02:53.509320Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:53.571736Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:53.571822Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:53.576561Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64008, node 4 2025-03-28T09:02:53.691226Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:53.691254Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:53.691263Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:53.691400Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16326 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:53.973842Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:53.988713Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:53.995019Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:02:53.995062Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:53.995072Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:02:53.995234Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2025-03-28T09:02:53.995294Z node 4 :GRPC_CLIENT DEBUG: [517000118b88] Connect to grpc://localhost:26273 2025-03-28T09:02:53.996507Z node 4 :GRPC_CLIENT DEBUG: [517000118b88] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2025-03-28T09:02:54.007013Z node 4 :GRPC_CLIENT DEBUG: [517000118b88] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2025-03-28T09:02:54.007347Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2025-03-28T09:02:54.007368Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2025-03-28T09:02:54.007380Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2025-03-28T09:02:54.007394Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2025-03-28T09:02:54.007412Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-03-28T09:02:54.007580Z node 4 :GRPC_CLIENT DEBUG: [517000118f08] Connect to grpc://localhost:14285 2025-03-28T09:02:54.008336Z node 4 :GRPC_CLIENT DEBUG: [517000118f08] Request GetUserAccountRequest { user_account_id: "user1" } 2025-03-28T09:02:54.017871Z node 4 :GRPC_CLIENT DEBUG: [517000118f08] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-03-28T09:02:54.018190Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-28T09:02:58.250573Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486783318559806607:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:58.271479Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fdf/r3tmp/tmpTYfyJP/pdisk_1.dat 2025-03-28T09:02:58.529826Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:58.633773Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:58.633881Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:58.640103Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31368, node 5 2025-03-28T09:02:58.795499Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:58.795521Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:58.795529Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:58.795667Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:59.126133Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:59.138424Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:02:59.138461Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:59.138472Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:02:59.138570Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-03-28T09:02:59.138624Z node 5 :GRPC_CLIENT DEBUG: [5170000b6808] Connect to grpc://localhost:65375 2025-03-28T09:02:59.139676Z node 5 :GRPC_CLIENT DEBUG: [5170000b6808] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-03-28T09:02:59.152394Z node 5 :GRPC_CLIENT DEBUG: [5170000b6808] Status 14 Service Unavailable 2025-03-28T09:02:59.152900Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:59.152926Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:59.152958Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:59.153059Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-03-28T09:02:59.153357Z node 5 :GRPC_CLIENT DEBUG: [5170000b6808] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-03-28T09:02:59.156633Z node 5 :GRPC_CLIENT DEBUG: [5170000b6808] Status 14 Service Unavailable 2025-03-28T09:02:59.158757Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:59.158783Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:59.158814Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> TTicketParserTest::BulkAuthorization [GOOD] |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |76.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |76.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg >> ResultFormatter::Pg [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex >> BackupRestore::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestore::RestoreTableSplitBoundaries >> BackupRestoreS3::RestoreTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] >> YdbSdkSessionsPool::PeriodicTask10 >> BackupRestoreS3::RestoreViewReferenceTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal >> YdbSdkSessionsPool::StressTestAsync1 >> YdbSdkSessionsPool::StressTestSync10 [GOOD] >> YdbSdkSessionsPool::StressTestAsync10 |76.0%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbSdkSessionsPool::WaitQueue1 >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately >> YdbSdkSessionsPool::StressTestSync1 [GOOD] >> YdbSdkSessionsPool::CustomPlan >> TSchemeShardViewTest::EmptyName |76.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |76.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |76.1%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.1%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync10 [GOOD] Test command err: 2025-03-28T09:02:54.651280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783301003876119:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:54.651319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013f3/r3tmp/tmpmn4jEy/pdisk_1.dat 2025-03-28T09:02:55.380485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:55.391566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:55.391660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:55.412105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9433, node 1 2025-03-28T09:02:55.779249Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:55.779270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:55.779285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:55.781741Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:56.283315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:59.654395Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783301003876119:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:59.654468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestSync1 [GOOD] Test command err: 2025-03-28T09:02:55.225263Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783303104118519:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:55.225359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013e8/r3tmp/tmpIlnkTs/pdisk_1.dat 2025-03-28T09:02:55.985670Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:55.993023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:55.993128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:56.017684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7518, node 1 2025-03-28T09:02:56.267339Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:56.267367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:56.267374Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:56.267530Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:56.833714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:00.210849Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783303104118519:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:00.210906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardViewTest::EmptyName [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] >> TSchemeShardMoveTest::Boot >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:08.949696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:08.949845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:08.949887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:08.950027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:08.950103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:08.950136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:08.950206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:08.950307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:08.950640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:09.037060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:09.037125Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:09.057771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:09.058373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:09.058612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:09.064468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:09.064706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:09.065439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:09.065670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:09.067572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:09.069735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:09.069810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:09.070003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:09.070121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:09.070177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:09.070273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:09.077469Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:09.216277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:09.216534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:09.216764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:09.217033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:09.217112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:09.220537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:09.220704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:09.220911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:09.221013Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:09.221059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:09.221091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:09.227623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:09.227704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:09.227752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:09.231541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:09.231604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:09.231659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:09.231732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:09.242662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:09.245092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:09.245315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:09.246479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:09.246635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:09.246724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:09.247001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:09.247055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:09.247213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:09.247316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:09.249661Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:09.249718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:09.249946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:09.250014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:09.250427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:09.250528Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:09.250643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:09.250680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:09.250741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:09.250775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:09.250809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:09.250850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:09.250882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:09.250915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:09.251006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:09.251041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:09.251098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:09.253004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:09.253135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:09.253180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:03:09.253220Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:03:09.253261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:09.253389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:03:09.259816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:03:09.260430Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:03:09.261096Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:03:09.284817Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:03:09.287966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:09.288197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2025-03-28T09:03:09.288322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2025-03-28T09:03:09.288427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-03-28T09:03:09.289446Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:03:09.296513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:09.296792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2025-03-28T09:03:09.297406Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationModify >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::BulkAuthorizationModify >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] >> BindQueue::Basic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] Test command err: 2025-03-28T09:02:42.034938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783246076970370:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:42.034984Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ec6/r3tmp/tmpD88YqN/pdisk_1.dat 2025-03-28T09:02:42.556866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:42.556981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:42.564928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:42.569271Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17334, node 1 2025-03-28T09:02:42.759520Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:42.759542Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:42.759549Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:42.759657Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10804 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:43.298106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:43.340027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:43.352212Z node 1 :TICKET_PARSER DEBUG: Ticket D6C2D38C6371437808B54B866E3B1B637E6BE87929573581707E696AE49F75CA () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-28T09:02:46.847859Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783262936601951:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:46.854816Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ec6/r3tmp/tmp3M5RZk/pdisk_1.dat 2025-03-28T09:02:47.051694Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:47.102146Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:47.102239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:47.105249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3358, node 2 2025-03-28T09:02:47.323601Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:47.323630Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:47.323638Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:47.323760Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11017 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:47.991940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:48.011142Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:02:48.021920Z node 2 :TICKET_PARSER DEBUG: Ticket 0FE09F17E78D7CF03AECB4269A4A94C1226424AB75D908C6BA0D462FFD35C4BC () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-28T09:02:52.477493Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783291705115083:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:52.478388Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ec6/r3tmp/tmpWnokjp/pdisk_1.dat 2025-03-28T09:02:52.857508Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:52.868476Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:52.874999Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:52.884270Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6501, node 3 2025-03-28T09:02:53.039577Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:53.039604Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:53.039620Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:53.039744Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:53.427069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:53.434528Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:53.437675Z node 3 :TICKET_PARSER DEBUG: Ticket 373E518F0FFF100A02D0BBDB6A8D142124C556E3847BDB56CDBCFD775B34EE59 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-28T09:02:53.438329Z node 3 :TICKET_PARSER ERROR: Ticket 373E518F0FFF100A02D0BBDB6A8D142124C556E3847BDB56CDBCFD775B34EE59: Cannot create token from certificate. Client certificate failed verification 2025-03-28T09:02:58.703301Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783316063803754:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:58.704309Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ec6/r3tmp/tmpi2GuI4/pdisk_1.dat 2025-03-28T09:02:59.064148Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:59.093031Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:59.093178Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:59.094440Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27642, node 4 2025-03-28T09:02:59.285041Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:59.285073Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:59.285082Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:59.285218Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:59.792074Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:59.803632Z node 4 :TICKET_PARSER DEBUG: Ticket 28E60785411B878EC4494AE7E98C1E4939E0E02BF7C6BCD1F5AAA77840672DD2 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-28T09:03:05.283790Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486783345327069195:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:05.284400Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ec6/r3tmp/tmpig1IDT/pdisk_1.dat 2025-03-28T09:03:05.469210Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:05.485819Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:05.485926Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:05.488894Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5646, node 5 2025-03-28T09:03:05.607549Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:05.607573Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:05.607582Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:05.607718Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:03:05.984107Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:03:06.001033Z node 5 :TICKET_PARSER DEBUG: Ticket E3B9D3F43AFA7D2D7905C85760002052AB518CED72489F01A1E205CDDDE4501A () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-28T09:03:06.001566Z node 5 :TICKET_PARSER ERROR: Ticket E3B9D3F43AFA7D2D7905C85760002052AB518CED72489F01A1E205CDDDE4501A: Cannot create token from certificate. Client certificate failed verification >> TBlobStorageWardenTest::TestDeleteStoragePool >> GroupWriteTest::WithRead [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 6262607997999946305 2025-03-28T09:03:03.674422Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-28T09:03:03.698467Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-28T09:03:03.698539Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-28T09:03:03.701170Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-28T09:03:03.714800Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:03.717348Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-28T09:03:11.524122Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:03:11.524240Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:11.524306Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:03:11.524352Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:11.624523Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-03-28T09:03:11.624642Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> TBlobStorageWardenTest::TestSendToInvalidGroupId >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:10.412649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:10.412753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:10.412793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:10.412834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:10.412880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:10.412917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:10.413013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:10.413090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:10.413417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:10.525452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:10.525521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:10.551280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:10.551672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:10.551838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:10.565483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:10.565767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:10.566411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:10.566639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:10.568792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:10.570073Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:10.570134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:10.570316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:10.570372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:10.570413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:10.570536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:10.579074Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:10.728315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:10.728545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:10.728776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:10.729066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:10.729155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:10.731786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:10.731923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:10.732138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:10.732200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:10.732261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:10.732298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:10.735428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:10.735489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:10.735528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:10.738637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:10.738746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:10.738823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:10.738882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:10.742622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:10.744970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:10.745218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:10.746354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:10.746480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:10.746529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:10.746880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:10.746953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:10.747128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:10.747230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:10.751677Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:10.751735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:10.751951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:10.751991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:10.752433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:10.752528Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:10.752639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:10.752672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:10.752733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:10.752769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:10.752809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:10.752860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:10.752896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:10.752927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:10.753004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:10.753046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:10.753081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:10.754844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:10.754969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:10.755008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... eply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 956 } } 2025-03-28T09:03:12.232280Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 103 Step: 5000004 OrderId: 103 ExecLatency: 0 ProposeLatency: 10 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 956 } } FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 FAKE_COORDINATOR: Erasing txId 103 2025-03-28T09:03:12.233398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936901 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:03:12.233437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 2 2025-03-28T09:03:12.233552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:2, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936901 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:03:12.233601Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:03:12.233677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 8589936901 } Origin: 72075186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:03:12.233736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:12.233771Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2025-03-28T09:03:12.233807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:03:12.233845Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:2 129 -> 240 2025-03-28T09:03:12.235024Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:03:12.235060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2025-03-28T09:03:12.235153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:03:12.235186Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:03:12.235238Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:03:12.235276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:12.235302Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:03:12.235326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:03:12.235350Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-03-28T09:03:12.242842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-03-28T09:03:12.246672Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:03:12.247930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2025-03-28T09:03:12.248520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-03-28T09:03:12.248564Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:12.248608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-28T09:03:12.248705Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2025-03-28T09:03:12.248737Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-03-28T09:03:12.248769Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 2/3 2025-03-28T09:03:12.248797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2025-03-28T09:03:12.248833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2025-03-28T09:03:12.249806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:03:12.250093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:03:12.250130Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:12.250161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-28T09:03:12.250218Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2025-03-28T09:03:12.250242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-03-28T09:03:12.250270Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 3/3 2025-03-28T09:03:12.250291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-03-28T09:03:12.250316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2025-03-28T09:03:12.250346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2025-03-28T09:03:12.250389Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:03:12.250424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:03:12.250529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-28T09:03:12.250567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:03:12.250601Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-03-28T09:03:12.250621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-03-28T09:03:12.250645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-28T09:03:12.250666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:03:12.250706Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-03-28T09:03:12.250725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-03-28T09:03:12.250780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-28T09:03:12.250810Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:03:12.251188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:03:12.251229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:03:12.251283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:03:12.251325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:03:12.251351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:03:12.251376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:03:12.251401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:12.258225Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:03:12.258944Z node 2 :TX_PROXY DEBUG: actor# [2:270:2261] Handle TEvGetProxyServicesRequest TestWaitNotification wait txId: 103 2025-03-28T09:03:12.315389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:03:12.315449Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:03:12.315889Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:03:12.315991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:03:12.316026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:677:2562] TestWaitNotification: OK eventTxId 103 >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] >> BackupRestore::RestoreTableSplitBoundaries [GOOD] >> BackupRestore::ImportDataShouldHandleErrors >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> BackupRestoreS3::RestoreIndexTablePartitioningSettings [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2025-03-28T09:03:12.613877Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:12.616894Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:12.619007Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:12.619493Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:12.623545Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:12.623662Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001657/r3tmp/tmpN69JnA/pdisk_1.dat 2025-03-28T09:03:13.217619Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:544:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1291:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:13.217801Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.217854Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.217884Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.217915Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.217945Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.217974Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.218038Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1291:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:13.218115Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG33 2025-03-28T09:03:13.218170Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG32 2025-03-28T09:03:13.218219Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG33 2025-03-28T09:03:13.218246Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG32 2025-03-28T09:03:13.218279Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG33 2025-03-28T09:03:13.218306Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG32 2025-03-28T09:03:13.218513Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:3] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:13.218594Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:2] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:13.218641Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:1] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:13.225912Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-28T09:03:13.226258Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-28T09:03:13.226387Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-28T09:03:13.226479Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-28T09:03:13.226562Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:13.226800Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.231 sample PartId# [72057594037932033:2:8:0:0:1291:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.232 sample PartId# [72057594037932033:2:8:0:0:1291:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.232 sample PartId# [72057594037932033:2:8:0:0:1291:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 8.56 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 8.867 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 8.984 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-28T09:03:13.321114Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] bootstrap ActorId# [1:589:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:229:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:13.321259Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.321300Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.321328Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.321355Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.321380Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.321407Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.321443Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] restore Id# [72057594037932033:2:9:0:0:229:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:13.321508Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG33 2025-03-28T09:03:13.321554Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG32 2025-03-28T09:03:13.321595Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG33 2025-03-28T09:03:13.321621Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG32 2025-03-28T09:03:13.321653Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG33 2025-03-28T09:03:13.321682Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG32 2025-03-28T09:03:13.321838Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:3] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:13.321913Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:2] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:13.321965Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:72:2098] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:1] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:13.327079Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-28T09:03:13.327284Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-03-28T09:03:13.327371Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000 ... Create Queue# [1:595:2504] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.384048Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:596:2505] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.384163Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:597:2506] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.384280Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:598:2507] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.384391Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:599:2508] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.384508Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:590:2500] Create Queue# [1:600:2509] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.384538Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:03:13.385176Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.385345Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.385446Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.385523Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.385578Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.385630Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.385677Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.385703Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-03-28T09:03:13.385732Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-03-28T09:03:13.385771Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-03-28T09:03:13.386548Z node 1 :BS_PROXY_PUT INFO: [1a43693427d0a82b] bootstrap ActorId# [1:601:2510] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:13.386658Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.387151Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:13.387215Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-03-28T09:03:13.387246Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-03-28T09:03:13.387364Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:13.480908Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-03-28T09:03:13.481061Z node 1 :BS_PROXY_PUT DEBUG: [1a43693427d0a82b] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-03-28T09:03:13.481124Z node 1 :BS_PROXY_PUT INFO: [1a43693427d0a82b] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:13.481257Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 89.067 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 94.534 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-03-28T09:03:13.481868Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:03:13.481917Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-03-28T09:03:13.482081Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2025-03-28T09:03:13.482723Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/2909/001657/r3tmp/tmpN69JnA//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-03-28T09:03:13.483568Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-03-28T09:03:13.483615Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:03:13.485513Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:605:2106] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.485667Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:606:2107] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.485779Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:607:2108] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.485889Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:608:2109] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.486019Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:609:2110] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.486135Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:610:2111] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.486247Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:603:2105] Create Queue# [2:611:2112] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:13.486279Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:03:13.487639Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.487939Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.488021Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.488191Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.488279Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.488409Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.488476Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:13.488508Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-03-28T09:03:13.488549Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-03-28T09:03:13.488772Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:605:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> GroupWriteTest::TwoTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] Test command err: 2025-03-28T09:03:13.206939Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:13.209013Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:13.209452Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:13.210565Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:13.211544Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:13.211640Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:13.211696Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001650/r3tmp/tmpFUYVKV/pdisk_1.dat 2025-03-28T09:03:13.839410Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:480:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1291:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:13.839561Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.839611Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.839641Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.839668Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.839697Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.839723Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1291:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:13.839763Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1291:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:13.839827Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG33 2025-03-28T09:03:13.839868Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1291:1] Marker# BPG32 2025-03-28T09:03:13.839910Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG33 2025-03-28T09:03:13.839937Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1291:2] Marker# BPG32 2025-03-28T09:03:13.839964Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG33 2025-03-28T09:03:13.839991Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1291:3] Marker# BPG32 2025-03-28T09:03:13.840165Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:3] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:13.840240Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:2] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:13.840287Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1291:1] FDS# 1291 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:13.851454Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-28T09:03:13.851643Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-28T09:03:13.851733Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1291:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90165 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-28T09:03:13.851802Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-28T09:03:13.851859Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1291:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:13.852051Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.054 sample PartId# [72057594037932033:2:8:0:0:1291:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.055 sample PartId# [72057594037932033:2:8:0:0:1291:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.055 sample PartId# [72057594037932033:2:8:0:0:1291:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 12.269 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 12.414 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 12.5 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-28T09:03:13.907946Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2025-03-28T09:03:13.910416Z node 1 :BS_PROXY CRIT: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2025-03-28T09:03:13.910809Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2025-03-28T09:03:13.910983Z node 1 :BS_PROXY DEBUG: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2025-03-28T09:02:41.562561Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783244142542633:2219];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:41.562811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f6a/r3tmp/tmp7MvtjD/pdisk_1.dat 2025-03-28T09:02:42.171099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:42.172481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:42.172577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:42.176640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23805, node 1 2025-03-28T09:02:42.262515Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:42.262536Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:42.262550Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:42.262744Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:42.556613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:42.583843Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:42.586989Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:02:42.587086Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:30343 2025-03-28T09:02:42.590073Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-03-28T09:02:42.640021Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:02:42.640305Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:02:46.111904Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783263777321585:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:46.111952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f6a/r3tmp/tmppK5mce/pdisk_1.dat 2025-03-28T09:02:46.333158Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:46.399572Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:46.399796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:46.400935Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22308, node 2 2025-03-28T09:02:46.493839Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:46.493858Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:46.493865Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:46.493969Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:46.747155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:46.755168Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:02:46.761187Z node 2 :TICKET_PARSER TRACE: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthentication 2025-03-28T09:02:46.761256Z node 2 :GRPC_CLIENT DEBUG: [517000031488] Connect to grpc://localhost:13041 2025-03-28T09:02:46.762271Z node 2 :GRPC_CLIENT DEBUG: [517000031488] Request AuthenticateRequest { api_key: "ApiK****alid (AB5B5EA8)" } 2025-03-28T09:02:46.784529Z node 2 :GRPC_CLIENT DEBUG: [517000031488] Response AuthenticateResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2025-03-28T09:02:46.784923Z node 2 :TICKET_PARSER DEBUG: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2025-03-28T09:02:49.921312Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783277774244522:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:49.921347Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f6a/r3tmp/tmpxQ2UgI/pdisk_1.dat 2025-03-28T09:02:50.099849Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26845, node 3 2025-03-28T09:02:50.126804Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:50.126900Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:50.130375Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:50.253910Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:50.253934Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:50.253942Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:50.254085Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13893 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:02:50.543624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:50.552928Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:02:50.552960Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:50.552968Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:02:50.553001Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:02:50.553047Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Connect to grpc://localhost:18243 2025-03-28T09:02:50.553888Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-03-28T09:02:50.567638Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Status 14 Service Unavailable 2025-03-28T09:02:50.568089Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:50.568119Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:02:50.568259Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-03-28T09:02:50.578818Z node 3 :GRPC_CLIENT DEBUG: [5170000f0088] Status 1 CANCELLED 2025-03-28T09:02:50.580835Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2025-03-28T09:02:54.424046Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783299292233664:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:54.424104Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f6a/r3tmp/tmp1qQRNF/pdisk_1.dat 2025-03-28T09:02:54.648299Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:54.664104Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:54.664199Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:54.666024Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19283, node 4 2025-03-28T09:02:54.762845Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:54.762879Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:54.762889Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:54.763055Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:55.114973Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:55.123478Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:55.130835Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-28T09:02:55.130939Z node 4 :GRPC_CLIENT DEBUG: [517000012a88] Connect to grpc://localhost:16988 2025-03-28T09:02:55.138717Z node 4 :GRPC_CLIENT DEBUG: [517000012a88] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-28T09:02:55.163170Z node 4 :GRPC_CLIENT DEBUG: [517000012a88] Status 14 Service Unavailable 2025-03-28T09:02:55.166829Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:55.166875Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-28T09:02:55.167109Z node 4 :GRPC_CLIENT DEBUG: [517000012a88] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-28T09:02:55.169091Z node 4 :GRPC_CLIENT DEBUG: [517000012a88] Status 14 Service Unavailable 2025-03-28T09:02:55.169375Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:56.462861Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-28T09:02:56.462934Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-28T09:02:56.463162Z node 4 :GRPC_CLIENT DEBUG: [517000012a88] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-28T09:02:56.469326Z node 4 :GRPC_CLIENT DEBUG: [517000012a88] Status 14 Service Unavailable 2025-03-28T09:02:56.469689Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:57.462938Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-28T09:02:57.462985Z node 4 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-28T09:02:57.463178Z node 4 :GRPC_CLIENT DEBUG: [517000012a88] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-28T09:02:57.476352Z node 4 :GRPC_CLIENT DEBUG: [517000012a88] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:02:57.476871Z node 4 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-03-28T09:02:59.430885Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783299292233664:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:59.430978Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:08.192187Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486783360379775729:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:08.192257Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f6a/r3tmp/tmpbEWw7W/pdisk_1.dat 2025-03-28T09:03:08.553140Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:08.616878Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:08.617003Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:08.620623Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2611, node 5 2025-03-28T09:03:08.850150Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:08.850180Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:08.850191Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:08.850345Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:09.207409Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:09.220066Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:03:09.230930Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-28T09:03:09.231024Z node 5 :GRPC_CLIENT DEBUG: [5170000db408] Connect to grpc://localhost:61353 2025-03-28T09:03:09.232094Z node 5 :GRPC_CLIENT DEBUG: [5170000db408] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-28T09:03:09.257455Z node 5 :GRPC_CLIENT DEBUG: [5170000db408] Status 14 Service Unavailable 2025-03-28T09:03:09.258260Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:03:09.258287Z node 5 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2025-03-28T09:03:09.258535Z node 5 :GRPC_CLIENT DEBUG: [5170000db408] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2025-03-28T09:03:09.267579Z node 5 :GRPC_CLIENT DEBUG: [5170000db408] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:03:09.267953Z node 5 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as >> TTicketParserTest::NebiusAuthorizationModify [GOOD] >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> YdbSdkSessionsPool::WaitQueue1 [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginCheckRemovedUser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] Test command err: 2025-03-28T09:03:11.623381Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:11.625340Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:11.626245Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:11.626679Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:11.629139Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:11.629240Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00165b/r3tmp/tmp1Tc7aL/pdisk_1.dat 2025-03-28T09:03:12.271417Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:478:2460] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1294:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:12.271555Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.271586Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.271604Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.271619Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.271633Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.271646Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.271673Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1294:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:12.271730Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1294:1] Marker# BPG33 2025-03-28T09:03:12.271767Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1294:1] Marker# BPG32 2025-03-28T09:03:12.271793Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1294:2] Marker# BPG33 2025-03-28T09:03:12.271811Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1294:2] Marker# BPG32 2025-03-28T09:03:12.271828Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1294:3] Marker# BPG33 2025-03-28T09:03:12.271844Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1294:3] Marker# BPG32 2025-03-28T09:03:12.271981Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:3] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:12.272031Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:2] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:12.272077Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:1] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:12.274208Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-28T09:03:12.274427Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-28T09:03:12.274587Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-28T09:03:12.274655Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-28T09:03:12.274724Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:12.274905Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.812 sample PartId# [72057594037932033:2:8:0:0:1294:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.812 sample PartId# [72057594037932033:2:8:0:0:1294:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.812 sample PartId# [72057594037932033:2:8:0:0:1294:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 2.983 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.18 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.329 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-28T09:03:12.352669Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:523:2497] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:12.352826Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.352865Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.352890Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.352916Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.352940Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.352967Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:12.353003Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:12.353069Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-03-28T09:03:12.353114Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-03-28T09:03:12.353156Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-03-28T09:03:12.353182Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-03-28T09:03:12.353232Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-03-28T09:03:12.353255Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-03-28T09:03:12.353409Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:12.353475Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:12.353518Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:12.356078Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-28T09:03:12.356293Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-03-28T09:03:12.356401Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000 ... situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:14.035602Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:14.035652Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-03-28T09:03:14.035689Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-03-28T09:03:14.035793Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:14.038674Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-03-28T09:03:14.038802Z node 2 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-03-28T09:03:14.038866Z node 2 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:14.039024Z node 2 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.467 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 3.393 VDiskId# [82000002:1:0:0:0] NodeId# 2 Status# OK } ] } 2025-03-28T09:03:14.039505Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:03:14.039564Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-03-28T09:03:14.039674Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-03-28T09:03:14.040215Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/2909/00165b/r3tmp/tmpK6qDHa//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-03-28T09:03:14.041064Z node 3 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-03-28T09:03:14.041102Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:03:14.042906Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:607:2106] targetNodeId# 2 Marker# DSP01 2025-03-28T09:03:14.043027Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:608:2107] targetNodeId# 2 Marker# DSP01 2025-03-28T09:03:14.043134Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:609:2108] targetNodeId# 2 Marker# DSP01 2025-03-28T09:03:14.043235Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:610:2109] targetNodeId# 2 Marker# DSP01 2025-03-28T09:03:14.043332Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:611:2110] targetNodeId# 2 Marker# DSP01 2025-03-28T09:03:14.043429Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:612:2111] targetNodeId# 2 Marker# DSP01 2025-03-28T09:03:14.043529Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Actor# [3:605:2105] Create Queue# [3:613:2112] targetNodeId# 2 Marker# DSP01 2025-03-28T09:03:14.043553Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:03:14.044553Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:14.044712Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:14.044751Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:14.044853Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:14.044924Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:14.044996Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:14.045056Z node 3 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:14.045075Z node 3 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-03-28T09:03:14.045096Z node 3 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-03-28T09:03:14.045224Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] bootstrap ActorId# [3:614:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-03-28T09:03:14.045275Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 2 Marker# DSPB03 2025-03-28T09:03:14.045452Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:607:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 9183983658928693373 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-03-28T09:03:14.046642Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 2 Marker# DSPB01 2025-03-28T09:03:14.046730Z node 3 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-03-28T09:03:14.047038Z node 3 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-03-28T09:03:14.047207Z node 3 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-03-28T09:03:14.047499Z node 2 :BS_PROXY_PUT INFO: [91379e686f748e92] bootstrap ActorId# [2:615:2513] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:14.047654Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:14.047694Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:14.047755Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-03-28T09:03:14.047794Z node 2 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-03-28T09:03:14.047927Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:594:2503] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:14.048129Z node 2 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-03-28T09:03:14.048393Z node 2 :BS_PROXY_PUT INFO: [91379e686f748e92] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-03-28T09:03:14.048471Z node 2 :BS_PROXY_PUT ERROR: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-03-28T09:03:14.048524Z node 2 :BS_PROXY_PUT NOTICE: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:14.048625Z node 2 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.581 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 2 } ] } 2025-03-28T09:03:14.048966Z node 3 :BS_PROXY DEBUG: Send to queueActorId# [3:607:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 2859059898082994772 2025-03-28T09:03:03.924358Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-28T09:03:03.924446Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-28T09:03:03.949316Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-28T09:03:03.949397Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-28T09:03:03.949495Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-28T09:03:03.949518Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-28T09:03:03.954044Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-28T09:03:03.954162Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-28T09:03:03.972916Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:03.972990Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:03.975994Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-28T09:03:03.976061Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-28T09:03:14.408051Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:03:14.408161Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:14.408235Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:14.408280Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:03:14.408322Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:14.408373Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:14.408410Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:03:14.408451Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:14.408498Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:14.500925Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-03-28T09:03:14.501026Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2025-03-28T09:03:14.501076Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-03-28T09:03:14.501123Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 23 Channel# 0 Status# OK} 2025-03-28T09:03:14.501171Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} 2025-03-28T09:03:14.501220Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 24 Channel# 0 Status# OK} |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TTicketParserTest::BulkAuthorizationModify [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::WaitQueue1 [GOOD] Test command err: 2025-03-28T09:03:08.691350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783358429106412:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:08.691399Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013b9/r3tmp/tmp7wTijd/pdisk_1.dat 2025-03-28T09:03:09.436796Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16796, node 1 2025-03-28T09:03:09.610016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:09.610193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:09.669457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:09.691556Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:09.691579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:09.691585Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:09.691734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:10.111769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:13.691942Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783358429106412:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:13.692014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2025-03-28T09:02:41.137771Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783245409916117:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:41.142979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fc0/r3tmp/tmpFwBUxJ/pdisk_1.dat 2025-03-28T09:02:41.611117Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:41.616979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:41.617125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:41.620441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8472, node 1 2025-03-28T09:02:41.775591Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:41.775625Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:41.775633Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:41.775791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:42.143016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:42.167361Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:02:42.167414Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:42.167428Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:02:42.167848Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:02:42.167930Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:23627 2025-03-28T09:02:42.170328Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-03-28T09:02:42.182342Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-03-28T09:02:42.182897Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:42.182926Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:02:42.183097Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-03-28T09:02:42.184805Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-03-28T09:02:42.184909Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:45.808584Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783260863307770:2087];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fc0/r3tmp/tmpxzb86W/pdisk_1.dat 2025-03-28T09:02:45.917488Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:02:46.051278Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:46.057856Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:46.058052Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:46.061150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12522, node 2 2025-03-28T09:02:46.250922Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:46.250942Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:46.250953Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:46.251061Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:46.696571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:46.715552Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:02:46.718257Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:02:46.718291Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:46.718303Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:02:46.718404Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-28T09:02:46.718475Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Connect to grpc://localhost:26987 2025-03-28T09:02:46.719930Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-03-28T09:02:46.731886Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Status 14 Service Unavailable 2025-03-28T09:02:46.734977Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:46.735048Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:46.735122Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-28T09:02:46.735434Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-03-28T09:02:46.738642Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Status 14 Service Unavailable 2025-03-28T09:02:46.738953Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:46.739148Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:47.818829Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-03-28T09:02:47.818935Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-28T09:02:47.831162Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2025-03-28T09:02:47.837342Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Status 14 Service Unavailable 2025-03-28T09:02:47.837715Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:47.837750Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:48.823423Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-03-28T09:02:48.823509Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-28T09:02:48.823784Z node 2 :GRPC_CLIENT DEBUG: [517000031108] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessServic ... 25-03-28T09:03:06.718109Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:06.718158Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:06.718167Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:06.718251Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization( something.read) 2025-03-28T09:03:06.718522Z node 4 :GRPC_CLIENT DEBUG: [51700002ab88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (BE2EA0D0)" } } } 2025-03-28T09:03:06.721582Z node 4 :GRPC_CLIENT DEBUG: [51700002ab88] Response AuthorizeResponse { results { key: 0 value { resultCode: PERMISSION_DENIED } } } 2025-03-28T09:03:06.721800Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read access denied for subject "" 2025-03-28T09:03:06.721850Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2025-03-28T09:03:06.722373Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:06.722402Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:06.722409Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:06.722455Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-28T09:03:06.722656Z node 4 :GRPC_CLIENT DEBUG: [51700002ab88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "XXXXXXXX" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "XXXXXXXX" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-03-28T09:03:06.725207Z node 4 :GRPC_CLIENT DEBUG: [51700002ab88] Response AuthorizeResponse { results { key: 0 value { resultCode: PERMISSION_DENIED } } } 2025-03-28T09:03:06.725360Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read access denied for subject "" 2025-03-28T09:03:06.725391Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-03-28T09:03:06.725957Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:06.725984Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:06.725995Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:06.726056Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-28T09:03:06.726261Z node 4 :GRPC_CLIENT DEBUG: [51700002ab88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "XXXXXXXX" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "XXXXXXXX" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-03-28T09:03:06.728215Z node 4 :GRPC_CLIENT DEBUG: [51700002ab88] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-03-28T09:03:06.729057Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:03:06.729577Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:06.729602Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:06.729610Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:06.729661Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-28T09:03:06.729890Z node 4 :GRPC_CLIENT DEBUG: [51700002ab88] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "XXXXXXXX" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-03-28T09:03:06.732137Z node 4 :GRPC_CLIENT DEBUG: [51700002ab88] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-03-28T09:03:06.732439Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:03:10.680282Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486783366290846173:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:10.680375Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fc0/r3tmp/tmp0LML2w/pdisk_1.dat 2025-03-28T09:03:10.867118Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:10.892443Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:10.892553Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:10.894282Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2577, node 5 2025-03-28T09:03:10.936286Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:10.936311Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:10.936322Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:10.936493Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:11.235767Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:11.250267Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:11.250324Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:11.250332Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:11.250398Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2025-03-28T09:03:11.250441Z node 5 :GRPC_CLIENT DEBUG: [5170000c8008] Connect to grpc://localhost:32339 2025-03-28T09:03:11.251490Z node 5 :GRPC_CLIENT DEBUG: [5170000c8008] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-03-28T09:03:11.266352Z node 5 :GRPC_CLIENT DEBUG: [5170000c8008] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } } 2025-03-28T09:03:11.266592Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:03:11.269824Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:11.269853Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:11.269862Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:11.269935Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-03-28T09:03:11.270378Z node 5 :GRPC_CLIENT DEBUG: [5170000c8008] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } 0: "OK" 2025-03-28T09:03:11.282332Z node 5 :GRPC_CLIENT DEBUG: [5170000c8008] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { account { user_account { id: "user1" } } } } } 2025-03-28T09:03:11.282810Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] >> KqpPg::ReadPgArray >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableArrayInsert+useSink >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag >> ResourcePoolsDdl::TestPoolSwitchToLimitedState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2025-03-28T09:02:42.591490Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783248107432743:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:42.624988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000eb1/r3tmp/tmp7f0KLd/pdisk_1.dat 2025-03-28T09:02:43.713892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:43.713997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:43.714763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:43.721282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:43.727295Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29087, node 1 2025-03-28T09:02:43.943049Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:43.943074Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:43.943080Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:43.943198Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64866 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:44.310099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:44.332374Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:44.335814Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-28T09:02:44.336686Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Connect to grpc://localhost:64131 2025-03-28T09:02:44.342586Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-28T09:02:44.364871Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-03-28T09:02:44.365270Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:44.365329Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:44.365427Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-28T09:02:44.365742Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-28T09:02:44.368819Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-03-28T09:02:44.368958Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:44.368994Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:45.614830Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-28T09:02:45.614995Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-28T09:02:45.636748Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-28T09:02:45.645001Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Status 14 Service Unavailable 2025-03-28T09:02:45.646884Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:45.646929Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:46.622243Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-28T09:02:46.622315Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-28T09:02:46.653569Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-28T09:02:46.663525Z node 1 :GRPC_CLIENT DEBUG: [517000010408] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:02:46.669216Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-03-28T09:02:47.578486Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783248107432743:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:47.578576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000eb1/r3tmp/tmpcjinYJ/pdisk_1.dat 2025-03-28T09:02:57.799234Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:57.803445Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:57.840067Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:57.840153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:57.848233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15796, node 2 2025-03-28T09:02:58.039043Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:58.039062Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:58.039069Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:58.039188Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:58.454152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:58.467720Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:02:58.483742Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-28T09:02:58.483793Z node 2 :GRPC_CLIENT DEBUG: [5170000b2908] Connect to grpc://localhost:3643 2025-03-28T09:02:58.484718Z node 2 :GRPC_CLIENT DEBUG: [5170000b2908] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-28T09:02:58.524244Z node 2 :GRPC_CLIENT DEBUG: [5170000b2908] Status 14 Service Unavailable 2025-03-28T09:02:58.524502Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:02:58.524542Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:02:58.524627Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2025-03-28T09:02:58.524971Z node 2 :GRPC_CLIENT DEBUG: [5170000b2908] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something. ... ode(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29615, node 4 2025-03-28T09:03:06.875393Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:06.875413Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:06.875420Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:06.875552Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:07.169984Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:07.179340Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:03:07.182624Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:07.182656Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:07.182664Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:07.182719Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:03:07.182780Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2025-03-28T09:03:07.182809Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2025-03-28T09:03:07.182837Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-28T09:03:07.182884Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2025-03-28T09:03:07.182949Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Connect to grpc://localhost:10032 2025-03-28T09:03:07.186273Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:03:07.187550Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:03:07.187772Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:03:07.187870Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:03:07.188019Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:03:07.206880Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Status 16 Access Denied 2025-03-28T09:03:07.207383Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2025-03-28T09:03:07.222783Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Status 16 Access Denied 2025-03-28T09:03:07.222938Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2025-03-28T09:03:07.228290Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Status 16 Access Denied 2025-03-28T09:03:07.228610Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Status 16 Access Denied 2025-03-28T09:03:07.229296Z node 4 :GRPC_CLIENT DEBUG: [517000044188] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:03:07.229479Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-03-28T09:03:07.229523Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2025-03-28T09:03:07.229570Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-03-28T09:03:07.229601Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-03-28T09:03:07.254117Z node 4 :GRPC_CLIENT DEBUG: [5170000dfd88] Connect to grpc://localhost:1323 2025-03-28T09:03:07.255277Z node 4 :GRPC_CLIENT DEBUG: [5170000dfd88] Request GetUserAccountRequest { user_account_id: "user1" } 2025-03-28T09:03:07.276372Z node 4 :GRPC_CLIENT DEBUG: [5170000dfd88] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-03-28T09:03:07.279209Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-28T09:03:11.159062Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486783373289586887:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:11.171777Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000eb1/r3tmp/tmp3X0gEw/pdisk_1.dat 2025-03-28T09:03:11.344045Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:11.378002Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:11.378119Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:11.382065Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5322, node 5 2025-03-28T09:03:11.509218Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:11.509247Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:11.509257Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:11.509412Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:11.972724Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:11.979652Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:03:11.984992Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:11.985027Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:11.985037Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:11.985139Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2025-03-28T09:03:11.985198Z node 5 :GRPC_CLIENT DEBUG: [5170000b8788] Connect to grpc://localhost:17003 2025-03-28T09:03:11.986407Z node 5 :GRPC_CLIENT DEBUG: [5170000b8788] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2025-03-28T09:03:11.997249Z node 5 :GRPC_CLIENT DEBUG: [5170000b8788] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:03:12.001787Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:03:12.006661Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:12.006719Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:12.006729Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:12.006843Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2025-03-28T09:03:12.007134Z node 5 :GRPC_CLIENT DEBUG: [5170000b8788] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2025-03-28T09:03:12.010608Z node 5 :GRPC_CLIENT DEBUG: [5170000b8788] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:03:12.010882Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:131:2155] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:110:2142] 2025-03-28T09:00:58.118459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:58.118579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:58.118625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:58.118665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:58.118766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:58.118803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:58.118867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:58.118998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:58.119403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:58.292397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:00:58.292469Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:131:2155] sender: [1:168:2058] recipient: [1:15:2062] 2025-03-28T09:00:58.314110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:58.314325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:58.314519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:58.330518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:58.330765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:58.331423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:58.331691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:58.336605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:58.338944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:58.339010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:58.339256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:58.339308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:58.339352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:58.341813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:216:2058] recipient: [1:214:2215] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:216:2058] recipient: [1:214:2215] Leader for TabletID 72057594037968897 is [1:220:2219] sender: [1:221:2058] recipient: [1:214:2215] 2025-03-28T09:00:58.348171Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:241:2058] recipient: [1:15:2062] 2025-03-28T09:01:00.126388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:01:00.133836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:00.135258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:01:00.137816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:01:00.138514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:00.207904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:01:00.208070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:01:00.208269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:00.208329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:01:00.208378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:01:00.208435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:01:00.219696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:00.219762Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:01:00.219801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:01:00.227609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:00.227675Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:00.227727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:01:00.227770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:01:00.231075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:01:00.249415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:01:00.250123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:256:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:01:00.252292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:01:00.252437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:01:00.252496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:01:00.252794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:01:00.252852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:01:00.253023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:01:00.253149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:01:00.262739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:01:00.263086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:01:00.264067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:01:00.264305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:01:00.277700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:01:00.278298Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:01:00.282520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:01:00.282863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:01:00.283478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... LAT_TX_SCHEMESHARD INFO: TMoveTable TDeleteTableBarrier operationId: 1003:0 HandleReply TEvPrivate:TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-28T09:03:16.160867Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:03:16.161019Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 137 -> 129 2025-03-28T09:03:16.161103Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:03:16.161156Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:03:16.161431Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:03:16.164280Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:03:16.165002Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:03:16.165287Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:03:16.165709Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:16.165749Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:16.165913Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1003, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:03:16.166059Z node 62 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:16.166096Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:203:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2025-03-28T09:03:16.166136Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [62:203:2205], at schemeshard: 72057594046678944, txId: 1003, path id: 3 2025-03-28T09:03:16.166372Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:03:16.166420Z node 62 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:03:16.166500Z node 62 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:03:16.166533Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:03:16.166569Z node 62 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-03-28T09:03:16.167533Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:03:16.167650Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:03:16.167682Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-28T09:03:16.167719Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-28T09:03:16.167758Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:03:16.168675Z node 62 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:03:16.168764Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:03:16.168793Z node 62 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-28T09:03:16.168822Z node 62 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:03:16.168858Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:03:16.168927Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2025-03-28T09:03:16.172544Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:03:16.172592Z node 62 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:16.172637Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:03:16.172705Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-28T09:03:16.172729Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-28T09:03:16.172760Z node 62 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-28T09:03:16.172783Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-28T09:03:16.172815Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-03-28T09:03:16.172857Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-28T09:03:16.172893Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-03-28T09:03:16.172917Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-03-28T09:03:16.173009Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:03:16.173037Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:03:16.173509Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:03:16.173540Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:03:16.173583Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:03:16.174046Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:03:16.175188Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:03:16.176957Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-03-28T09:03:16.177279Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-03-28T09:03:16.177320Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-03-28T09:03:16.177619Z node 62 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-03-28T09:03:16.177711Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-03-28T09:03:16.177746Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:471:2444] TestWaitNotification: OK eventTxId 1003 2025-03-28T09:03:16.178237Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:16.178468Z node 62 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 247us result status StatusSuccess 2025-03-28T09:03:16.178935Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpWorkloadService::TestQueueSizeSimple >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] Test command err: 2025-03-28T09:03:16.456831Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.458733Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.459202Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.460253Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.461186Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.461277Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.461324Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001630/r3tmp/tmps14aZ6/pdisk_1.dat 2025-03-28T09:03:17.040113Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] bootstrap ActorId# [1:480:2462] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1298:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:17.040320Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.040382Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.040422Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.040452Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.040479Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.040504Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Id# [72057594037932033:2:8:0:0:1298:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.040541Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] restore Id# [72057594037932033:2:8:0:0:1298:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:17.040623Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1298:1] Marker# BPG33 2025-03-28T09:03:17.040671Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1298:1] Marker# BPG32 2025-03-28T09:03:17.040716Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1298:2] Marker# BPG33 2025-03-28T09:03:17.040745Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1298:2] Marker# BPG32 2025-03-28T09:03:17.040776Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1298:3] Marker# BPG33 2025-03-28T09:03:17.040803Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1298:3] Marker# BPG32 2025-03-28T09:03:17.041009Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1298:3] FDS# 1298 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.041084Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1298:2] FDS# 1298 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.041133Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1298:1] FDS# 1298 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.055728Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1298:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90220 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-28T09:03:17.055942Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1298:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90220 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-28T09:03:17.056038Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1298:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90220 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-28T09:03:17.056107Z node 1 :BS_PROXY_PUT DEBUG: [084d0c3a19bee089] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1298:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-28T09:03:17.056173Z node 1 :BS_PROXY_PUT INFO: [084d0c3a19bee089] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1298:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:17.056357Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.264 sample PartId# [72057594037932033:2:8:0:0:1298:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.265 sample PartId# [72057594037932033:2:8:0:0:1298:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.266 sample PartId# [72057594037932033:2:8:0:0:1298:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 15.907 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 16.076 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 16.167 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-28T09:03:17.115492Z node 1 :BS_PROXY_PUT INFO: [b6b2c6548553d7a5] bootstrap ActorId# [1:525:2499] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:17.115680Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.115730Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.115762Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.115794Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.115823Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.115854Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.115899Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:17.115977Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2025-03-28T09:03:17.116028Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2025-03-28T09:03:17.116076Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2025-03-28T09:03:17.116104Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2025-03-28T09:03:17.116139Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2025-03-28T09:03:17.116166Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2025-03-28T09:03:17.116346Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:39:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.116418Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.116469Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:1] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.126738Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-28T09:03:17.127030Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81763 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-03-28T09:03:17.127127Z node 1 :BS_PROXY_PUT DEBUG: [b6b2c6548553d7a5] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:224:2] {M ... XY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:10:0:0:238:1] Marker# BPG32 2025-03-28T09:03:17.163823Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:10:0:0:238:2] Marker# BPG33 2025-03-28T09:03:17.163855Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:10:0:0:238:2] Marker# BPG32 2025-03-28T09:03:17.163886Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:10:0:0:238:3] Marker# BPG33 2025-03-28T09:03:17.163915Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:10:0:0:238:3] Marker# BPG32 2025-03-28T09:03:17.164084Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:60:2104] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:3] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.164154Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:53:2097] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:2] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.164233Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:46:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:10:0:0:238:1] FDS# 238 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.166625Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 11 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 12 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-28T09:03:17.166851Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-03-28T09:03:17.166939Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:10:0:0:238:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81874 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-28T09:03:17.167048Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Result# TEvPutResult {Id# [72057594037932033:2:10:0:0:238:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-28T09:03:17.167115Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:10:0:0:238:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:17.167295Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.216 sample PartId# [72057594037932033:2:10:0:0:238:3] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.217 sample PartId# [72057594037932033:2:10:0:0:238:2] QueryCount# 1 VDiskId# [2000000:1:0:2:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.217 sample PartId# [72057594037932033:2:10:0:0:238:1] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.675 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.835 VDiskId# [2000000:1:0:2:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 3.938 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-28T09:03:17.168604Z node 1 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:03:17.168651Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:03:17.170466Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:530:2503] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:17.170599Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:531:2504] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:17.172024Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:532:2505] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:17.172182Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:533:2506] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:17.172291Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:534:2507] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:17.172392Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:535:2508] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:17.172499Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Actor# [1:526:2500] Create Queue# [1:536:2509] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:17.172524Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:03:17.173273Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:17.173379Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:17.173451Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:17.173524Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:17.173580Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:17.173629Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:17.173678Z node 1 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:17.173703Z node 1 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-03-28T09:03:17.173738Z node 1 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-03-28T09:03:17.173863Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] bootstrap ActorId# [1:537:2510] Group# 2181038082 TabletId# 1234 Generation# 1 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-03-28T09:03:17.173909Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Sending TEvVBlock Tablet# 1234 Generation# 1 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-03-28T09:03:17.174141Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 1 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 14728872648759112291 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-03-28T09:03:17.175341Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-03-28T09:03:17.175412Z node 1 :BS_PROXY_BLOCK DEBUG: [a55b41de52eb2a08] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-03-28T09:03:17.175742Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 2025-03-28T09:03:17.179630Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2025-03-28T09:03:17.180320Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] bootstrap ActorId# [1:539:2512] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-03-28T09:03:17.180377Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-03-28T09:03:17.180533Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 12253879319599393603 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-03-28T09:03:17.181424Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-03-28T09:03:17.181494Z node 1 :BS_PROXY_BLOCK DEBUG: [bba3bffd2e286f4b] Result# TEvBlockResult {Status# OK} Marker# DSPB04 2025-03-28T09:03:17.181859Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] bootstrap ActorId# [1:540:2513] Group# 2181038082 TabletId# 1234 Generation# 4 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-03-28T09:03:17.181909Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Sending TEvVBlock Tablet# 1234 Generation# 4 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-03-28T09:03:17.182061Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:530:2503] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 4 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 15003649632470115375 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-03-28T09:03:17.182866Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-03-28T09:03:17.182915Z node 1 :BS_PROXY_BLOCK DEBUG: [f913878b3da83702] Result# TEvBlockResult {Status# OK} Marker# DSPB04 >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] Test command err: Delete nodeId# 82 Disable nodeId# 90 Enable nodeId# 90 Add nodeId# 101 Delete nodeId# 81 Disable nodeId# 90 Add nodeId# 102 Pick Enable nodeId# 90 Delete nodeId# 36 Add nodeId# 103 Pick Delete nodeId# 56 Add nodeId# 104 Pick Disable nodeId# 2 Delete nodeId# 1 Delete nodeId# 32 Delete nodeId# 23 Pick Add nodeId# 105 Enable nodeId# 2 Pick Delete nodeId# 7 Add nodeId# 106 Add nodeId# 107 Disable nodeId# 95 Enable nodeId# 95 Delete nodeId# 15 Disable nodeId# 14 Pick Delete nodeId# 25 Add nodeId# 108 Disable nodeId# 34 Add nodeId# 109 Enable nodeId# 34 Disable nodeId# 6 Delete nodeId# 9 Delete nodeId# 24 Disable nodeId# 94 Disable nodeId# 44 Delete nodeId# 101 Delete nodeId# 4 Enable nodeId# 6 Add nodeId# 110 Disable nodeId# 75 Disable nodeId# 16 Add nodeId# 111 Add nodeId# 112 Delete nodeId# 62 Enable nodeId# 16 Pick Delete nodeId# 93 Pick Disable nodeId# 80 Pick Pick Disable nodeId# 31 Add nodeId# 113 Delete nodeId# 53 Add nodeId# 114 Add nodeId# 115 Add nodeId# 116 Disable nodeId# 70 Add nodeId# 117 Pick Disable nodeId# 104 Disable nodeId# 102 Delete nodeId# 46 Enable nodeId# 14 Enable nodeId# 80 Delete nodeId# 35 Pick Delete nodeId# 100 Delete nodeId# 34 Add nodeId# 118 Delete nodeId# 88 Pick Pick Disable nodeId# 8 Enable nodeId# 70 Delete nodeId# 76 Pick Add nodeId# 119 Disable nodeId# 2 Enable nodeId# 44 Add nodeId# 120 Pick Disable nodeId# 109 Pick Pick Enable nodeId# 31 Delete nodeId# 51 Pick Add nodeId# 121 Pick Add nodeId# 122 Pick Delete nodeId# 89 Enable nodeId# 75 Disable nodeId# 98 Disable nodeId# 87 Delete nodeId# 72 Pick Delete nodeId# 75 Pick Delete nodeId# 118 Enable nodeId# 8 Enable nodeId# 104 Disable nodeId# 59 Add nodeId# 123 Pick Enable nodeId# 87 Pick Enable nodeId# 109 Pick Disable nodeId# 43 Delete nodeId# 71 Add nodeId# 124 Add nodeId# 125 Delete nodeId# 20 Enable nodeId# 43 Disable nodeId# 19 Pick Pick Disable nodeId# 125 Delete nodeId# 99 Disable nodeId# 6 Disable nodeId# 55 Enable nodeId# 6 Add nodeId# 126 Enable nodeId# 2 Pick Disable nodeId# 124 Pick Enable nodeId# 19 Delete nodeId# 40 Add nodeId# 127 Pick Delete nodeId# 117 Add nodeId# 128 Enable nodeId# 102 Enable nodeId# 124 Enable nodeId# 125 Disable nodeId# 44 Disable nodeId# 73 Enable nodeId# 59 Add nodeId# 129 Add nodeId# 130 Disable nodeId# 128 Pick Disable nodeId# 42 Delete nodeId# 21 Add nodeId# 131 Add nodeId# 132 Add nodeId# 133 Pick Disable nodeId# 17 Disable nodeId# 112 Pick Enable nodeId# 17 Pick Disable nodeId# 110 Enable nodeId# 128 Disable nodeId# 8 Add nodeId# 134 Enable nodeId# 42 Enable nodeId# 94 Pick Pick Delete nodeId# 102 Disable nodeId# 13 Enable nodeId# 55 Pick Enable nodeId# 8 Pick Disable nodeId# 74 Disable nodeId# 59 Enable nodeId# 13 Delete nodeId# 120 Pick Delete nodeId# 124 Enable nodeId# 98 Delete nodeId# 109 Delete nodeId# 14 Pick Disable nodeId# 85 Disable nodeId# 65 Enable nodeId# 112 Pick Pick Delete nodeId# 105 Add nodeId# 135 Delete nodeId# 126 Enable nodeId# 59 Pick Add nodeId# 136 Delete nodeId# 79 Pick Enable nodeId# 74 Enable nodeId# 85 Enable nodeId# 110 Delete nodeId# 30 Enable nodeId# 44 Add nodeId# 137 Disable nodeId# 91 Enable nodeId# 73 Pick Delete nodeId# 115 Delete nodeId# 128 Delete nodeId# 3 Delete nodeId# 129 Delete nodeId# 113 Enable nodeId# 91 Pick Enable nodeId# 65 Add nodeId# 138 Disable nodeId# 26 Add nodeId# 139 Disable nodeId# 85 Enable nodeId# 85 Delete nodeId# 37 Add nodeId# 140 Add nodeId# 141 Add nodeId# 142 Pick Delete nodeId# 131 Add nodeId# 143 Disable nodeId# 47 Pick Add nodeId# 144 Pick Add nodeId# 145 Disable nodeId# 133 Pick Enable nodeId# 26 Delete nodeId# 19 Pick Disable nodeId# 140 Delete nodeId# 139 Delete nodeId# 38 Delete nodeId# 86 Add nodeId# 146 Add nodeId# 147 Enable nodeId# 47 Delete nodeId# 94 Add nodeId# 148 Disable nodeId# 64 Disable nodeId# 91 Disable nodeId# 47 Add nodeId# 149 Enable nodeId# 140 Add nodeId# 150 Pick Delete nodeId# 41 Add nodeId# 151 Disable nodeId# 59 Disable nodeId# 143 Enable nodeId# 47 Pick Disable nodeId# 80 Disable nodeId# 11 Add nodeId# 152 Delete nodeId# 66 Delete nodeId# 151 Enable nodeId# 64 Enable nodeId# 59 Enable nodeId# 80 Pick Pick Disable nodeId# 65 Enable nodeId# 65 Delete nodeId# 22 Pick Delete nodeId# 130 Disable nodeId# 50 Disable nodeId# 73 Delete nodeId# 146 Disable nodeId# 144 Pick Delete nodeId# 77 Disable nodeId# 47 Delete nodeId# 58 Delete nodeId# 132 Delete nodeId# 92 Disable nodeId# 122 Add nodeId# 153 Disable nodeId# 6 Disable nodeId# 61 Delete nodeId# 43 Delete nodeId# 16 Disable nodeId# 59 Delete nodeId# 61 Add nodeId# 154 Delete nodeId# 122 Enable nodeId# 59 Disable nodeId# 28 Add nodeId# 155 Add nodeId# 156 Enable nodeId# 28 Enable nodeId# 6 Disable nodeId# 64 Enable nodeId# 73 Pick Delete nodeId# 18 Disable nodeId# 149 Delete nodeId# 8 Disable nodeId# 6 Enable nodeId# 6 Enable nodeId# 64 Disable nodeId# 33 Delete nodeId# 133 Pick Enable nodeId# 50 Enable nodeId# 33 Enable nodeId# 47 Add nodeId# 157 Add nodeId# 158 Pick Add nodeId# 159 Add nodeId# 160 Enable nodeId# 144 Disable nodeId# 2 Add nodeId# 161 Pick Delete nodeId# 31 Add nodeId# 162 Disable nodeId# 48 Add nodeId# 163 Delete nodeId# 13 Delete nodeId# 103 Add nodeId# 164 Delete nodeId# 143 Disable nodeId# 68 Add nodeId# 165 Pick Disable nodeId# 152 Delete nodeId# 153 Pick Pick Pick Disable nodeId# 135 Disable nodeId# 59 Add nodeId# 166 Enable nodeId# 59 Delete nodeId# 57 Enable nodeId# 48 Enable nodeId# 152 Disable nodeId# 39 Pick Delete nodeId# 140 Delete nodeId# 119 Delete nodeId# 145 Enable nodeId# 135 Enable nodeId# 11 Enable nodeId# 91 Add nodeId# 167 Add nodeId# 168 Enable nodeId# 2 Add nodeId# 169 Add nodeId# 170 Disable nodeId# 136 Add nodeId# 171 Delete nodeId# 64 Disable nodeId# 70 Enable nodeId# 136 Enable nodeId# 149 Disable nodeId# 2 Disable nodeId# 12 Disable nodeId# 134 Disable nodeId# 50 Delete nodeId# 52 Pick Delete nodeId# 78 Enable nodeId# 134 Enable nodeId# 39 Disable nodeId# 125 Delete nodeId# 45 Pick Pick Add nodeId# 172 Add nodeId# 173 Disable nodeId# 49 Delete nodeId# 69 Disable nodeId# 111 Disable nodeId# 127 Pick Add nodeId# 174 Disable nodeId# 154 Disable nodeId# 134 Delete nodeId# 39 Enable nodeId# 125 Enable nodeId# 154 Pick Enable nodeId# 70 Delete nodeId# 148 Enable nodeId# 2 Enable nodeId# 127 Pick Disable nodeId# 127 Pick Enable nodeId# 134 Disable nodeId# 158 Enable nodeId# 68 Add nodeId# 175 Pick Enable nodeId# 127 Disable nodeId# 98 Delete nodeId# 116 Delete nodeId# 5 Disable nodeId# 152 Delete nodeId# 174 Pick Add nodeId# 176 Pick Pick Pick Add nodeId# 177 Add nodeId# 178 Disable nodeId# 60 Enable nodeId# 12 Enable nodeId# 50 Disable nodeId# 95 Disable nodeId# 156 Delete nodeId# 112 Pick Add nodeId# 179 Enable nodeId# 152 Delete nodeId# 158 Disable nodeId# 147 Add nodeId# 180 Disable nodeId# 65 Add nodeId# 181 Delete nodeId# 83 Delete nodeId# 177 Pick Disable nodeId# 67 Pick Pick Disable nodeId# 110 Delete nodeId# 156 Delete nodeId# 150 Pick Add nodeId# 182 Pick Enable nodeId# 110 Enable nodeId# 65 Disable nodeId# 12 Enable nodeId# 147 Enable nodeId# 98 Pick Disable nodeId# 2 Delete nodeId# 111 Add nodeId# 183 Delete nodeId# 167 Delete nodeId# 137 Delete nodeId# 42 Enable nodeId# 60 Add nodeId# 184 Pick Disable nodeId# 165 Delete nodeId# 183 Enable nodeId# 165 Delete nodeId# 67 Delete nodeId# 6 Add nodeId# 185 Pick Pick Delete nodeId# 138 Delete nodeId# 28 Delete nodeId# 185 Enable nodeId# 49 Pick Disable nodeId# 161 Add nodeId# 186 Add nodeId# 187 Add nodeId# 188 Enable nodeId# 2 Enable nodeId# 95 Pick Pick Pick Enable nodeId# 161 Pick Disable nodeId# 176 Add nodeId# 189 Add nodeId# 190 Delete nodeId# 157 Enable nodeId# 176 Add nodeId# 191 Pick Disable nodeId# 87 Disable nodeId# 135 Enable nodeId# 12 Pick Pick Enable nodeId# 135 Pick Enable nodeId# 87 Delete nodeId# 190 Disable nodeId# 47 Pick Disable nodeId# 186 Add nodeId# 192 Enable nodeId# 186 Disable nodeId# 181 Delete nodeId# 168 Add nodeId# 193 Pick Delete nodeId# 134 Add nodeId# 194 Pick Enable nodeId# 181 Pick Delete nodeId# 55 Delete nodeId# 125 Delete nodeId# 96 Delete nodeId# 181 Add nodeId# 195 Disable nodeId# 178 Add nodeId# 196 Enable nodeId# 178 Delete nodeId# 65 Add nodeId# 197 Enable nodeId# 47 Add nodeId# 198 Add nodeId# 199 Delete nodeId# 91 Add nodeId# 200 Add nodeId# 201 Pick Add nodeId# 202 Disable nodeId# 149 Add nodeId# 203 Enable nodeId# 149 Add nodeId# 204 Add nodeId# 205 Pick Disable nodeId# 48 Enable nodeId# 48 Pick Add nodeId# 206 Pick Disable nodeId# 184 Add nodeId# 207 Enable nodeId# 184 Pick Add nodeId# 208 Disable nodeId# 162 Pick Delete nodeId# 27 Pick Add nodeId# 209 Delete nodeId# 159 Pick Delete nodeId# 29 Pick Disable nodeId# 152 Pick Delete nodeId# 149 Delete nodeId# 163 Delete nodeId# 10 Disable nodeId# 178 Delete nodeId# 152 Delete nodeId# 165 Add nodeId# 210 Pick Pick Disable nodeId# 198 Enable nodeId# 162 Delete nodeId# 17 Enable nodeId# 178 Pick Enable nodeId# 198 Add nodeId# 211 Pick Delete nodeId# 127 Delete nodeId# 178 Delete nodeId# 162 Add nodeId# 212 Pick Add nodeId# 213 Add nodeId# 214 Disable nodeId# 206 Delete nodeId# 155 Delete nodeId# 208 Add nodeId# 215 Enable nodeId# 206 Pick Add nodeId# 216 Add nodeId# 217 Add nodeId# 218 Add nodeId# 219 Pick Pick Pick Add nodeId# 220 Pick Pick Disable nodeId# 169 Add nodeId# 221 Add nodeId# 222 Pick Disable nodeId# 164 Enable nodeId# 169 Add nodeId# 223 Delete nodeId# 207 Pick Delete nodeId# 26 Enable nodeId# 164 Add nodeId# 224 Delete nodeId# 213 Delete nodeId# 218 Delete nodeId# 50 Delete nodeId# 221 Pick Pick Pick Disable nodeId# 104 Enable nodeId# 104 Delete nodeId# 135 Delete nodeId# 192 Pick Delete nodeId# 44 Add nodeId# 225 Pick Add nodeId# 226 Delete nodeId# 107 Pick Pick Delete nodeId# 123 Add nodeId# 227 Disable nodeId# 211 Disable nodeId# 184 Enable nodeId# 184 Enable nodeId# 211 Add nodeId# 228 Delete nodeId# 193 Pick Delete nodeId# 217 Pick Add nodeId# 229 Disable nodeId# 98 Disable nodeId# 74 Pick Add nodeId# 230 Pick Delete nodeId# 229 Add nodeId# 231 Enable nodeId# 98 Add nodeId# 232 Disable nodeId# 85 Delete nodeId# 182 Pick Add nodeId# 233 Add nodeId# 234 Pick Add nodeId# 235 Add nodeId# 236 Enable nodeId# 74 Enable nodeId# 85 Pick Delete nodeId# 186 Delete nodeId# 142 Delete nodeId# 164 Delete nodeId# 232 Add nodeId# 237 Delete nodeId# 226 Disable nodeId# 173 Add nodeId# 238 Delete nodeId# 49 Pick Disable nodeId# 231 Enable nodeId# 173 Disable nodeId# 201 Disable nodeId# 188 Add nodeId# 239 Delete nodeId# 197 Pick Disable nodeId# 209 Delete nodeId# 147 Delete nodeId# 90 Delete nodeId# 12 Add nodeId# 240 Enable nodeId# 188 Pick Disable nodeId# 54 Pick Disable nodeId# 60 Enable nodeId# 201 Add nodeId# 241 Add nodeId# 242 Delete nodeId# 170 Add nodeId# 243 Add nodeId# 244 Pick Delete nodeId# 235 Pick Disable nodeId# 194 Enable nodeId# 60 Delete nodeId# 74 Delete nodeId# 97 Pick Enable nodeId# 209 Add nodeId# 245 Disable nodeId# 238 Disable nodeId# 211 Delete nodeId# 200 Disable nodeId# 173 Enable nodeId# 54 Delete nodeId# 173 Pick Pick Disable nodeId# 198 Disable nodeId# 189 Delete nodeId# 206 Pick Add nodeId# 246 Disable nodeId# 240 Add nodeId# 247 Add nodeId# 248 Add nodeId# 249 Add nodeId# 250 Disable nodeId# 70 Disable nodeId# 179 Disable nodeId# 202 Add nodeId# 251 Add nodeId# 252 Disable nodeId# 237 Enable nodeId# 202 Delete nodeId# 84 Disable nodeId# 220 Pick Add nodeId# 253 Disable nodeId# 242 Disable nodeId# 224 Disable nodeId# 180 Disable nodeId# 246 Enable nodeId# 211 Add nodeId# 254 Pick Pick Enable nodeId# 180 Add nodeId# 255 Disable nodeId# 239 Disable nodeId# 209 Pick Enable nodeId# 231 Add nodeId# 256 Enable nodeId# 242 Add nodeId# 257 Delete nodeId# 244 Delete nodeId# 223 Delete nodeId# 257 Disable nodeId# 225 Delete nodeId# 254 Enable nodeId# 246 Enable nodeId# 179 Disable nodeId# 212 Delete nodeId# 68 Add nodeId# 258 Add nodeId# 259 Add nodeId# 260 Pick Delete nodeId# 246 Add nodeId# 261 Add nodeId# 262 Pick Add nodeId# 263 Add nodeId# 264 Delete nodeId# 211 Pick Pick Enable nodeId# 239 Delete nodeId# 242 Pick Disable nodeId# 85 Disable nodeId# 108 Add nodeId# 265 Delete nodeId# 141 Enab ... elete nodeId# 20229 Pick Pick Delete nodeId# 20330 Delete nodeId# 20372 Delete nodeId# 20319 Pick Enable nodeId# 20314 Add nodeId# 20387 Pick Disable nodeId# 20333 Disable nodeId# 20367 Delete nodeId# 20332 Pick Enable nodeId# 20251 Pick Delete nodeId# 20382 Add nodeId# 20388 Pick Pick Disable nodeId# 20329 Pick Add nodeId# 20389 Enable nodeId# 20329 Pick Enable nodeId# 20249 Pick Disable nodeId# 20315 Add nodeId# 20390 Delete nodeId# 20205 Disable nodeId# 20308 Add nodeId# 20391 Pick Enable nodeId# 20333 Add nodeId# 20392 Pick Add nodeId# 20393 Delete nodeId# 20227 Enable nodeId# 20367 Enable nodeId# 20358 Delete nodeId# 20314 Delete nodeId# 20352 Disable nodeId# 20377 Enable nodeId# 20315 Disable nodeId# 20389 Disable nodeId# 20251 Delete nodeId# 20377 Pick Disable nodeId# 20386 Enable nodeId# 20357 Enable nodeId# 20308 Add nodeId# 20394 Enable nodeId# 20386 Delete nodeId# 20351 Pick Disable nodeId# 20347 Pick Enable nodeId# 20389 Pick Disable nodeId# 20147 Delete nodeId# 20393 Enable nodeId# 20147 Delete nodeId# 20308 Enable nodeId# 20251 Add nodeId# 20395 Disable nodeId# 20384 Enable nodeId# 20347 Disable nodeId# 20381 Pick Add nodeId# 20396 Pick Delete nodeId# 20257 Enable nodeId# 20384 Pick Delete nodeId# 20395 Delete nodeId# 20389 Add nodeId# 20397 Pick Enable nodeId# 20381 Disable nodeId# 20347 Pick Pick Enable nodeId# 20347 Add nodeId# 20398 Disable nodeId# 20388 Delete nodeId# 20300 Disable nodeId# 20362 Add nodeId# 20399 Disable nodeId# 20356 Delete nodeId# 20346 Disable nodeId# 20366 Delete nodeId# 20397 Pick Enable nodeId# 20366 Pick Pick Enable nodeId# 20388 Delete nodeId# 20315 Disable nodeId# 20115 Disable nodeId# 20256 Add nodeId# 20400 Enable nodeId# 20362 Enable nodeId# 20356 Enable nodeId# 20256 Pick Pick Enable nodeId# 20115 Add nodeId# 20401 Add nodeId# 20402 Delete nodeId# 20165 Pick Delete nodeId# 20133 Add nodeId# 20403 Add nodeId# 20404 Pick Pick Delete nodeId# 20391 Disable nodeId# 20235 Disable nodeId# 20282 Enable nodeId# 20282 Pick Delete nodeId# 20355 Disable nodeId# 20380 Disable nodeId# 20304 Add nodeId# 20405 Disable nodeId# 20293 Delete nodeId# 20310 Disable nodeId# 20359 Add nodeId# 20406 Delete nodeId# 20235 Delete nodeId# 20249 Pick Pick Add nodeId# 20407 Enable nodeId# 20359 Pick Delete nodeId# 20304 Pick Add nodeId# 20408 Pick Enable nodeId# 20380 Pick Add nodeId# 20409 Delete nodeId# 20381 Pick Add nodeId# 20410 Enable nodeId# 20293 Disable nodeId# 20403 Pick Delete nodeId# 20379 Add nodeId# 20411 Disable nodeId# 20376 Pick Enable nodeId# 20376 Delete nodeId# 20282 Enable nodeId# 20403 Pick Delete nodeId# 20388 Delete nodeId# 20369 Pick Pick Delete nodeId# 20238 Pick Pick Pick Delete nodeId# 20194 Pick Delete nodeId# 20293 Add nodeId# 20412 Add nodeId# 20413 Disable nodeId# 20312 Disable nodeId# 20401 Add nodeId# 20414 Add nodeId# 20415 Pick Pick Delete nodeId# 20333 Add nodeId# 20416 Delete nodeId# 20370 Delete nodeId# 20386 Delete nodeId# 20343 Enable nodeId# 20401 Disable nodeId# 20399 Pick Pick Delete nodeId# 20384 Delete nodeId# 20267 Enable nodeId# 20312 Delete nodeId# 20411 Add nodeId# 20417 Disable nodeId# 20396 Disable nodeId# 20405 Pick Add nodeId# 20418 Pick Disable nodeId# 20375 Enable nodeId# 20405 Delete nodeId# 20415 Enable nodeId# 20399 Add nodeId# 20419 Enable nodeId# 20375 Disable nodeId# 20360 Add nodeId# 20420 Enable nodeId# 20396 Enable nodeId# 20360 Add nodeId# 20421 Pick Disable nodeId# 20353 Add nodeId# 20422 Add nodeId# 20423 Enable nodeId# 20353 Pick Disable nodeId# 20367 Add nodeId# 20424 Enable nodeId# 20367 Add nodeId# 20425 Delete nodeId# 20367 Delete nodeId# 20107 Delete nodeId# 20260 Pick Disable nodeId# 20358 Pick Disable nodeId# 20364 Delete nodeId# 20378 Disable nodeId# 20380 Delete nodeId# 20376 Pick Pick Add nodeId# 20426 Delete nodeId# 20424 Disable nodeId# 20399 Add nodeId# 20427 Enable nodeId# 20358 Pick Disable nodeId# 20366 Add nodeId# 20428 Add nodeId# 20429 Add nodeId# 20430 Enable nodeId# 20364 Disable nodeId# 20164 Delete nodeId# 20366 Enable nodeId# 20380 Pick Disable nodeId# 20147 Delete nodeId# 20164 Pick Delete nodeId# 20383 Delete nodeId# 20421 Disable nodeId# 20410 Delete nodeId# 20269 Disable nodeId# 20420 Pick Pick Disable nodeId# 20403 Enable nodeId# 20399 Delete nodeId# 20380 Pick Disable nodeId# 20338 Enable nodeId# 20420 Add nodeId# 20431 Delete nodeId# 20405 Add nodeId# 20432 Pick Delete nodeId# 20431 Add nodeId# 20433 Add nodeId# 20434 Disable nodeId# 20426 Delete nodeId# 20398 Add nodeId# 20435 Delete nodeId# 20434 Add nodeId# 20436 Add nodeId# 20437 Disable nodeId# 20345 Pick Disable nodeId# 20423 Delete nodeId# 20433 Delete nodeId# 20436 Disable nodeId# 20364 Delete nodeId# 20256 Pick Disable nodeId# 20360 Add nodeId# 20438 Add nodeId# 20439 Pick Disable nodeId# 20435 Add nodeId# 20440 Disable nodeId# 20437 Add nodeId# 20441 Pick Add nodeId# 20442 Delete nodeId# 20251 Disable nodeId# 20114 Add nodeId# 20443 Delete nodeId# 20412 Delete nodeId# 20350 Disable nodeId# 20104 Enable nodeId# 20435 Pick Enable nodeId# 20364 Pick Pick Delete nodeId# 20423 Add nodeId# 20444 Delete nodeId# 20432 Disable nodeId# 20358 Delete nodeId# 20338 Add nodeId# 20445 Pick Add nodeId# 20446 Enable nodeId# 20426 Pick Add nodeId# 20447 Add nodeId# 20448 Delete nodeId# 20363 Delete nodeId# 20418 Pick Pick Disable nodeId# 20115 Pick Disable nodeId# 20399 Add nodeId# 20449 Add nodeId# 20450 Add nodeId# 20451 Delete nodeId# 20130 Pick Add nodeId# 20452 Disable nodeId# 20409 Disable nodeId# 20452 Enable nodeId# 20452 Delete nodeId# 20430 Add nodeId# 20453 Enable nodeId# 20358 Add nodeId# 20454 Delete nodeId# 20450 Delete nodeId# 20349 Enable nodeId# 20399 Delete nodeId# 20446 Enable nodeId# 20115 Add nodeId# 20455 Delete nodeId# 20327 Add nodeId# 20456 Enable nodeId# 20437 Add nodeId# 20457 Pick Delete nodeId# 20323 Pick Disable nodeId# 20404 Pick Pick Delete nodeId# 20443 Enable nodeId# 20360 Delete nodeId# 20440 Add nodeId# 20458 Delete nodeId# 20344 Disable nodeId# 20420 Disable nodeId# 20413 Disable nodeId# 20335 Delete nodeId# 20286 Disable nodeId# 20408 Disable nodeId# 20438 Enable nodeId# 20420 Add nodeId# 20459 Delete nodeId# 20454 Disable nodeId# 20459 Add nodeId# 20460 Disable nodeId# 20425 Add nodeId# 20461 Disable nodeId# 20329 Add nodeId# 20462 Enable nodeId# 20147 Enable nodeId# 20114 Delete nodeId# 20456 Disable nodeId# 20462 Delete nodeId# 20320 Add nodeId# 20463 Disable nodeId# 20444 Delete nodeId# 20462 Add nodeId# 20464 Delete nodeId# 20348 Enable nodeId# 20438 Enable nodeId# 20404 Pick Delete nodeId# 20435 Pick Disable nodeId# 20353 Enable nodeId# 20409 Delete nodeId# 20401 Pick Pick Disable nodeId# 20447 Pick Enable nodeId# 20104 Delete nodeId# 20399 Delete nodeId# 20387 Add nodeId# 20465 Delete nodeId# 20410 Enable nodeId# 20408 Delete nodeId# 20437 Pick Delete nodeId# 20322 Disable nodeId# 20429 Delete nodeId# 20359 Pick Pick Enable nodeId# 20335 Pick Enable nodeId# 20329 Enable nodeId# 20403 Delete nodeId# 20414 Add nodeId# 20466 Enable nodeId# 20447 Disable nodeId# 20438 Pick Pick Disable nodeId# 20403 Add nodeId# 20467 Enable nodeId# 20353 Enable nodeId# 20413 Delete nodeId# 20439 Enable nodeId# 20403 Disable nodeId# 20406 Pick Disable nodeId# 20428 Disable nodeId# 20464 Enable nodeId# 20428 Disable nodeId# 20461 Add nodeId# 20468 Enable nodeId# 20459 Enable nodeId# 20444 Pick Delete nodeId# 20407 Delete nodeId# 20464 Pick Enable nodeId# 20438 Disable nodeId# 20396 Disable nodeId# 20466 Add nodeId# 20469 Delete nodeId# 20336 Disable nodeId# 20409 Pick Enable nodeId# 20345 Disable nodeId# 20463 Enable nodeId# 20429 Disable nodeId# 20146 Pick Add nodeId# 20470 Delete nodeId# 20460 Enable nodeId# 20146 Pick Add nodeId# 20471 Disable nodeId# 20402 Delete nodeId# 20422 Disable nodeId# 20442 Enable nodeId# 20425 Add nodeId# 20472 Enable nodeId# 20463 Enable nodeId# 20461 Enable nodeId# 20402 Add nodeId# 20473 Delete nodeId# 20469 Pick Disable nodeId# 20445 Delete nodeId# 20451 Delete nodeId# 20428 Add nodeId# 20474 Pick Enable nodeId# 20466 Pick Pick Enable nodeId# 20406 Enable nodeId# 20409 Enable nodeId# 20396 Disable nodeId# 20357 Pick Pick Pick Enable nodeId# 20445 Disable nodeId# 20448 Enable nodeId# 20448 Add nodeId# 20475 Disable nodeId# 20408 Pick Add nodeId# 20476 Pick Add nodeId# 20477 Enable nodeId# 20357 Delete nodeId# 20474 Delete nodeId# 20357 Pick Pick Add nodeId# 20478 Delete nodeId# 20426 Add nodeId# 20479 Delete nodeId# 20375 Disable nodeId# 20250 Pick Disable nodeId# 20362 Pick Disable nodeId# 20364 Enable nodeId# 20362 Delete nodeId# 20467 Disable nodeId# 20329 Add nodeId# 20480 Disable nodeId# 20404 Add nodeId# 20481 Add nodeId# 20482 Pick Delete nodeId# 20354 Delete nodeId# 20480 Add nodeId# 20483 Disable nodeId# 20419 Disable nodeId# 20420 Disable nodeId# 20171 Disable nodeId# 20394 Add nodeId# 20484 Add nodeId# 20485 Enable nodeId# 20420 Delete nodeId# 20335 Disable nodeId# 20461 Enable nodeId# 20250 Enable nodeId# 20408 Delete nodeId# 20459 Add nodeId# 20486 Pick Enable nodeId# 20171 Enable nodeId# 20364 Delete nodeId# 20371 Delete nodeId# 20171 Delete nodeId# 20186 Disable nodeId# 20481 Enable nodeId# 20394 Enable nodeId# 20442 Disable nodeId# 20104 Disable nodeId# 20427 Disable nodeId# 20448 Enable nodeId# 20448 Delete nodeId# 20358 Disable nodeId# 20403 Disable nodeId# 20362 Delete nodeId# 20425 Add nodeId# 20487 Delete nodeId# 20250 Disable nodeId# 20115 Add nodeId# 20488 Delete nodeId# 20487 Add nodeId# 20489 Pick Delete nodeId# 20362 Disable nodeId# 20312 Enable nodeId# 20312 Disable nodeId# 20466 Add nodeId# 20490 Enable nodeId# 20419 Enable nodeId# 20403 Disable nodeId# 20452 Pick Pick Pick Enable nodeId# 20115 Delete nodeId# 20463 Enable nodeId# 20481 Enable nodeId# 20329 Disable nodeId# 20447 Enable nodeId# 20466 Delete nodeId# 20483 Delete nodeId# 20420 Enable nodeId# 20427 Disable nodeId# 20264 Pick Enable nodeId# 20461 Disable nodeId# 20490 Disable nodeId# 20262 Pick Disable nodeId# 20468 Delete nodeId# 20408 Delete nodeId# 20444 Add nodeId# 20491 Add nodeId# 20492 Add nodeId# 20493 Add nodeId# 20494 Pick Pick Disable nodeId# 20364 Disable nodeId# 20478 Enable nodeId# 20104 Add nodeId# 20495 Enable nodeId# 20447 Delete nodeId# 20466 Delete nodeId# 20364 Delete nodeId# 20470 Pick Disable nodeId# 20492 Enable nodeId# 20404 Delete nodeId# 20457 Enable nodeId# 20452 Pick Delete nodeId# 20442 Pick Pick Add nodeId# 20496 Enable nodeId# 20262 Delete nodeId# 20445 Enable nodeId# 20264 Disable nodeId# 20441 Add nodeId# 20497 Disable nodeId# 20482 Delete nodeId# 20390 Add nodeId# 20498 Pick Pick Enable nodeId# 20468 Pick Enable nodeId# 20490 Add nodeId# 20499 Pick Add nodeId# 20500 Disable nodeId# 20402 Enable nodeId# 20402 Enable nodeId# 20492 Enable nodeId# 20482 Enable nodeId# 20441 Add nodeId# 20501 Disable nodeId# 20447 Delete nodeId# 20496 Disable nodeId# 20385 Pick Enable nodeId# 20447 Enable nodeId# 20385 Pick Enable nodeId# 20478 Add nodeId# 20502 Pick Pick Delete nodeId# 20417 Add nodeId# 20503 Pick Delete nodeId# 20147 Add nodeId# 20504 Add nodeId# 20505 Delete nodeId# 20505 Pick Pick Pick Pick Pick Delete nodeId# 20492 Add nodeId# 20506 Add nodeId# 20507 Disable nodeId# 20400 Disable nodeId# 20471 Delete nodeId# 20499 Disable nodeId# 20501 Add nodeId# 20508 Disable nodeId# 20385 Disable nodeId# 20465 Delete nodeId# 20507 Enable nodeId# 20400 Add nodeId# 20509 Delete nodeId# 20262 Pick Pick Disable nodeId# 20347 Enable nodeId# 20471 Delete nodeId# 20484 Enable nodeId# 20347 Pick Add nodeId# 20510 Enable nodeId# 20465 Delete nodeId# 20356 Delete nodeId# 20353 Add nodeId# 20511 Enable nodeId# 20501 Add nodeId# 20512 Disable nodeId# 20427 Delete nodeId# 20476 Delete nodeId# 20488 Pick Pick Enable nodeId# 20427 Delete nodeId# 20453 Enable nodeId# 20385 Disable nodeId# 20503 Disable nodeId# 20461 Add nodeId# 20513 Delete nodeId# 20392 Pick Delete nodeId# 20312 Disable nodeId# 20506 Disable nodeId# 20495 Enable nodeId# 20495 Enable nodeId# 20506 Add nodeId# 20514 Disable nodeId# 20471 Delete nodeId# 20485 Pick Add nodeId# 20515 Add nodeId# 20516 >> KqpWorkloadServiceDistributed::TestDistributedQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] Test command err: 2025-03-28T09:03:16.683238Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.685507Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.687391Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.687545Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.691064Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:16.691945Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00162f/r3tmp/tmpEyYK8e/pdisk_1.dat Formatting pdisk Creating PDisk Creating pdisk Verify that PDisk returns ERROR 2025-03-28T09:03:17.366299Z node 1 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/00162f/r3tmp/tmpQGbW5u//new_pdisk.dat": no such file. PDiskId# 1001 2025-03-28T09:03:17.367153Z node 1 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1001 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/00162f/r3tmp/tmpQGbW5u//new_pdisk.dat": no such file. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/00162f/r3tmp/tmpQGbW5u//new_pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 504174861655196556 PDiskId# 1001 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 HashedMainKey[0]# 0x221976E60BD392C7 StartOwnerRound# 10 SectorMap# false EnableSectorEncryption # 1 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# Enable WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1001 2025-03-28T09:03:17.402832Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] bootstrap ActorId# [1:543:2461] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:349:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:17.403016Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.403059Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.403088Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.403118Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.403150Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.403178Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:349:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:17.403221Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:349:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:17.403309Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:349:1] Marker# BPG33 2025-03-28T09:03:17.403360Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:349:1] Marker# BPG32 2025-03-28T09:03:17.403408Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:349:2] Marker# BPG33 2025-03-28T09:03:17.403437Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:349:2] Marker# BPG32 2025-03-28T09:03:17.403473Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:349:3] Marker# BPG33 2025-03-28T09:03:17.403503Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:349:3] Marker# BPG32 2025-03-28T09:03:17.403673Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:3] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.403743Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:2] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.403793Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:349:1] FDS# 349 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:17.407338Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-28T09:03:17.407560Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-28T09:03:17.407647Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:349:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82748 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-28T09:03:17.407726Z node 1 :BS_PROXY_PUT DEBUG: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:349:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-28T09:03:17.407794Z node 1 :BS_PROXY_PUT INFO: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:349:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:17.407991Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.182 sample PartId# [72057594037932033:2:8:0:0:349:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.183 sample PartId# [72057594037932033:2:8:0:0:349:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.183 sample PartId# [72057594037932033:2:8:0:0:349:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 4.779 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.952 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 5.036 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcher >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool >> KqpPg::JoinWithQueryService+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2025-03-28T09:02:34.130537Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:34.130648Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:34.155811Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:34.155925Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:34.187006Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:34.187485Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=9891445798939599684, session=0, seqNo=0) 2025-03-28T09:02:34.187659Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:34.199166Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=9891445798939599684, session=1) 2025-03-28T09:02:34.199461Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=14335619644736637577, session=0, seqNo=0) 2025-03-28T09:02:34.199585Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:02:34.213348Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=14335619644736637577, session=2) 2025-03-28T09:02:34.213950Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:141:2165], cookie=5668396275831374030, name="Sem1", limit=1) 2025-03-28T09:02:34.214080Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-28T09:02:34.227482Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:141:2165], cookie=5668396275831374030) 2025-03-28T09:02:34.227825Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Sem1" count=1) 2025-03-28T09:02:34.228038Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-28T09:02:34.228233Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Sem1" count=1) 2025-03-28T09:02:34.243444Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-28T09:02:34.243520Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-03-28T09:02:34.244094Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:149:2173], cookie=9392298952995159981, name="Sem1") 2025-03-28T09:02:34.244188Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:149:2173], cookie=9392298952995159981) 2025-03-28T09:02:34.244699Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:152:2176], cookie=6805341193068306970, name="Sem1") 2025-03-28T09:02:34.244768Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:152:2176], cookie=6805341193068306970) 2025-03-28T09:02:34.675535Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:34.695272Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:35.117438Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:35.139611Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:35.539095Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:35.555474Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:35.950539Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:35.973304Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:36.388753Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:36.403363Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:36.826535Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:36.847674Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:37.223090Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:37.237386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:37.648688Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:37.671443Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:38.059491Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:38.087441Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:38.547118Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:38.571621Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:38.987053Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:39.003615Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:39.423836Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:39.444394Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:39.855067Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:39.875446Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:40.319119Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:40.339483Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:40.787104Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:40.807353Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:41.223021Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:41.239475Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:41.655059Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:41.675395Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:42.069577Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:42.091558Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:42.491267Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:42.508403Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:42.975983Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:43.007488Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:43.415183Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:43.435481Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:43.835110Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:43.848667Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:44.248225Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:44.271962Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:44.677541Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:44.699439Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:45.088383Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:45.107495Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:45.499034Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:45.514729Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:45.929710Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:45.946954Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:46.345406Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:46.371534Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:46.787088Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:46.807722Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:47.283192Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:47.305918Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:47.705448Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:47.723528Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:48.151187Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:48.164996Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:48.575977Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:48.596949Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:49.011883Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:49.034636Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:49.451304Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:49.471399Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:49.878059Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:49.895686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:50.311044Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:50.329097Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:50.694989Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:50.711487Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:51.100907Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:51.123610Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:51.607068Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:51.631470Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:52.054397Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:52.074141Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:52.489926Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:52.508516Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:52.931142Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:52.949671Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:53.367371Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:53.387442Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:53.837240Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:53.853015 ... 2057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:10.303737Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:10.687964Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:10.700739Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:11.095985Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:11.111792Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:11.518212Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:11.533465Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:11.899881Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:11.919291Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:12.286457Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:12.305446Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:12.673645Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:12.691865Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:13.044322Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:13.061732Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:13.443166Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:13.457219Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:13.827034Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:13.840761Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:14.196415Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:14.208695Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:14.578827Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:14.591503Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:14.950017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:14.963462Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:15.380387Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:15.394191Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:15.792853Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:15.805615Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:16.174937Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:16.187297Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:16.553576Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:16.565899Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:16.955250Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:16.968632Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:17.326451Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:17.339480Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:17.710488Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:17.722895Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:18.100194Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:18.114197Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:18.474131Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:18.486531Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:18.839809Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:18.856276Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:19.206566Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-28T09:03:19.206663Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-28T09:03:19.206744Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-03-28T09:03:19.220300Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-28T09:03:19.231400Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:411:2411], cookie=3875510764457309641, name="Sem1") 2025-03-28T09:03:19.231526Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:411:2411], cookie=3875510764457309641) 2025-03-28T09:03:19.902863Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:03:19.902963Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:03:19.934504Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:03:19.940041Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:03:19.972894Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:03:19.973423Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=8535144266008997881, session=0, seqNo=0) 2025-03-28T09:03:19.973623Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:03:19.989835Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=8535144266008997881, session=1) 2025-03-28T09:03:19.990232Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=17501087431580838716, session=0, seqNo=0) 2025-03-28T09:03:19.990401Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:03:20.003815Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=17501087431580838716, session=2) 2025-03-28T09:03:20.004229Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=3137374702999920369, session=0, seqNo=0) 2025-03-28T09:03:20.004399Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-03-28T09:03:20.023885Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=3137374702999920369, session=3) 2025-03-28T09:03:20.024603Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=15714014620826897867, name="Sem1", limit=3) 2025-03-28T09:03:20.024793Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-28T09:03:20.042171Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=15714014620826897867) 2025-03-28T09:03:20.042620Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=2) 2025-03-28T09:03:20.042840Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-28T09:03:20.043101Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=2, semaphore="Sem1" count=1) 2025-03-28T09:03:20.043210Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-03-28T09:03:20.043327Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=3, semaphore="Sem1" count=1) 2025-03-28T09:03:20.067472Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-03-28T09:03:20.067578Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-03-28T09:03:20.067617Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-03-28T09:03:20.068367Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:153:2177], cookie=1921351321744830914, name="Sem1") 2025-03-28T09:03:20.068500Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:153:2177], cookie=1921351321744830914) 2025-03-28T09:03:20.069033Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2180], cookie=15448069682456348859, name="Sem1") 2025-03-28T09:03:20.069122Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2180], cookie=15448069682456348859) 2025-03-28T09:03:20.069448Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=444, session=1, semaphore="Sem1" count=1) 2025-03-28T09:03:20.069588Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-03-28T09:03:20.091670Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=444) 2025-03-28T09:03:20.092460Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2185], cookie=7202473185410116272, name="Sem1") 2025-03-28T09:03:20.092584Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2185], cookie=7202473185410116272) 2025-03-28T09:03:20.093169Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:164:2188], cookie=13026024921417194666, name="Sem1") 2025-03-28T09:03:20.093262Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:164:2188], cookie=13026024921417194666) 2025-03-28T09:03:20.124776Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:03:20.124900Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:03:20.125530Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:03:20.126315Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:03:20.185262Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:03:20.185479Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-28T09:03:20.185536Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-03-28T09:03:20.185568Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-03-28T09:03:20.185932Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:204:2218], cookie=8627650794553957701, name="Sem1") 2025-03-28T09:03:20.186056Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:204:2218], cookie=8627650794553957701) 2025-03-28T09:03:20.186750Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:213:2226], cookie=2132059423059279738, name="Sem1") 2025-03-28T09:03:20.186852Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:213:2226], cookie=2132059423059279738) >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless >> BackupRestore::ImportDataShouldHandleErrors [GOOD] >> BackupRestore::RestoreKesusResources >> YdbSdkSessionsPool::CustomPlan [GOOD] >> YdbSdkSessionsPool::FailTest >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree >> TLocksTest::Range_EmptyKey [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] >> BackupRestoreS3::RestoreTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableSplitBoundaries >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> KqpPg::TypeCoercionBulkUpsert >> GroupWriteTest::Simple [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:00:52.220933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:00:52.221030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:52.221069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:00:52.221106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:00:52.221160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:00:52.221202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:00:52.221288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:00:52.221369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:00:52.221707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:00:52.361159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:00:52.361233Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:00:52.389212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:00:52.389324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:00:52.389451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:00:52.397839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:00:52.398038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:00:52.398769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:52.398973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:00:52.400975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:52.402315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:52.402376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:52.402525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:00:52.402577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:52.402623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:00:52.402798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:00:52.410065Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:00:52.675560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:00:52.677456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:52.682648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:00:52.694966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:00:52.695362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:52.726153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:52.737960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:00:52.740816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:52.746744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:00:52.746819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:00:52.746852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:00:52.748839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:52.748890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:00:52.748919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:00:52.750391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:52.750423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:52.750458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:52.750507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:00:52.784189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:00:52.797160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:00:52.798668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:00:52.803161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:00:52.803298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:00:52.803356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:52.803628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:00:52.803687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:00:52.803850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:00:52.803923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:00:52.824039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:00:52.824088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:00:52.838767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:00:52.839089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:00:52.841824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:00:52.842579Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:00:52.848477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:00:52.848834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:00:52.849465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... ocalPathId: 3] 2025-03-28T09:03:21.381505Z node 72 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:21.381536Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:203:2205], at schemeshard: 72057594046678944, txId: 1002, path id: 1 2025-03-28T09:03:21.381577Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [72:203:2205], at schemeshard: 72057594046678944, txId: 1002, path id: 3 2025-03-28T09:03:21.382549Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-03-28T09:03:21.382598Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:03:21.383575Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-03-28T09:03:21.383655Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 1002 2025-03-28T09:03:21.383685Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-03-28T09:03:21.383720Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:03:21.383753Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:03:21.384343Z node 72 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-03-28T09:03:21.384413Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2025-03-28T09:03:21.384440Z node 72 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2025-03-28T09:03:21.384468Z node 72 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:03:21.384499Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:03:21.384558Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 1002 2025-03-28T09:03:21.388818Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 956 } } 2025-03-28T09:03:21.388866Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-03-28T09:03:21.388994Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 956 } } 2025-03-28T09:03:21.389081Z node 72 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000003 OrderId: 1002 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 956 } } 2025-03-28T09:03:21.390153Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 331 RawX2: 309237647630 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-03-28T09:03:21.390197Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2025-03-28T09:03:21.390298Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 331 RawX2: 309237647630 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-03-28T09:03:21.390337Z node 72 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:03:21.390404Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 331 RawX2: 309237647630 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2025-03-28T09:03:21.390451Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:21.390482Z node 72 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2025-03-28T09:03:21.390515Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:03:21.390549Z node 72 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1002:0 129 -> 240 2025-03-28T09:03:21.390973Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-03-28T09:03:21.395716Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2025-03-28T09:03:21.397796Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-03-28T09:03:21.397923Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2025-03-28T09:03:21.398402Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2025-03-28T09:03:21.398448Z node 72 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1002:0 ProgressState 2025-03-28T09:03:21.398542Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-03-28T09:03:21.398571Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-03-28T09:03:21.398606Z node 72 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1002:0 progress is 1/1 2025-03-28T09:03:21.398634Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-03-28T09:03:21.398665Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2025-03-28T09:03:21.398730Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2025-03-28T09:03:21.398765Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1002:0 2025-03-28T09:03:21.398791Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1002:0 2025-03-28T09:03:21.398899Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2025-03-28T09:03:21.402202Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2025-03-28T09:03:21.402247Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2025-03-28T09:03:21.402573Z node 72 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2025-03-28T09:03:21.402649Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2025-03-28T09:03:21.402678Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:408:2381] TestWaitNotification: OK eventTxId 1002 2025-03-28T09:03:21.403068Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:21.403245Z node 72 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 214us result status StatusSuccess 2025-03-28T09:03:21.403653Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 18219663192521747841 2025-03-28T09:03:04.231296Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-28T09:03:04.251231Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-28T09:03:04.251298Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-28T09:03:04.253928Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-28T09:03:04.269945Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:04.272310Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-28T09:03:22.836468Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:03:22.836564Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:22.836614Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:03:22.836652Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:22.888053Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-03-28T09:03:22.888129Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2025-03-28T09:02:23.899195Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783165910151541:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:23.900100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f31/r3tmp/tmpJqEuqF/pdisk_1.dat 2025-03-28T09:02:24.662373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:24.662467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:24.662672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:24.684897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63205 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:25.359625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:25.383861Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:25.394987Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:25.407426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:02:25.684721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:25.776179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:29.384202Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783192424737794:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:29.384248Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f31/r3tmp/tmpXeHMcd/pdisk_1.dat 2025-03-28T09:02:29.689256Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:29.716592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:29.716770Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:29.720392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27990 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:30.084089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:30.095233Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:30.111847Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:02:30.127032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:30.255295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:30.366031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:35.068293Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783217749600445:2214];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:35.086011Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f31/r3tmp/tmpjiuQnp/pdisk_1.dat 2025-03-28T09:02:35.289149Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:35.299053Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:35.299148Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:35.300908Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9354 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:35.580021Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:35.599482Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:35.634268Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:02:35.641959Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:35.741122Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:35.834931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f31/r3tmp/tmpekw6lu/pdisk_1.dat 2025-03-28T09:02:41.629441Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:41.632228Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:41.725781Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:41.725912Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:41.732688Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7392 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: ... ished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:02:59.904548Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:59.935582Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:02:59.941332Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:00.167142Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:00.316908Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:05.073198Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486783344749547544:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:05.102486Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f31/r3tmp/tmpZv8nrv/pdisk_1.dat 2025-03-28T09:03:05.529080Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:05.572803Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:05.572905Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:05.584538Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61698 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:03:06.092406Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:06.110000Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:06.135103Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:03:06.144344Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:06.257429Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:06.347056Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:10.886407Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486783367281301818:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:10.886492Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f31/r3tmp/tmpxJ9CJb/pdisk_1.dat 2025-03-28T09:03:11.111304Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:11.149206Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:11.149316Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:11.151170Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64553 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:03:11.442363Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:11.449127Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:11.472077Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:03:11.561648Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:03:11.651046Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:16.503119Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486783392858710765:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:16.503388Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f31/r3tmp/tmpSh91p6/pdisk_1.dat 2025-03-28T09:03:16.657610Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:16.683234Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:16.683331Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:16.684644Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3217 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:03:16.998029Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:17.004729Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:17.027769Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:17.130134Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:17.207142Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState >> KqpPg::InsertNoTargetColumns_Simple+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2025-03-28T09:02:43.301812Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783251904049657:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:43.302250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ed4/r3tmp/tmpX5Cgbt/pdisk_1.dat 2025-03-28T09:02:43.935740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:43.935854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:43.959600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:44.031900Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63774, node 1 2025-03-28T09:02:44.092183Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:02:44.092214Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:02:44.186524Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:44.186547Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:44.186556Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:44.186709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15764 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:44.699544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:44.740041Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:45.041125Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 2025-03-28T09:02:45.050053Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:02:45.050090Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:45.050839Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****9SoA (C1EAF185) () has now retryable error message 'Security state is empty' 2025-03-28T09:02:45.051048Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:02:45.051063Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:45.051291Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****9SoA (C1EAF185) () has now retryable error message 'Security state is empty' 2025-03-28T09:02:45.051301Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-03-28T09:02:45.051314Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-03-28T09:02:45.051338Z node 1 :TICKET_PARSER ERROR: Ticket eyJh****9SoA (C1EAF185): Security state is empty 2025-03-28T09:02:46.266868Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****9SoA (C1EAF185) 2025-03-28T09:02:46.267145Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:02:46.267161Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:46.267414Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****9SoA (C1EAF185) () has now retryable error message 'Security state is empty' 2025-03-28T09:02:46.267425Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A2 error Security state is empty 2025-03-28T09:02:48.056522Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:02:48.283093Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783251904049657:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:48.283148Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:50.283696Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****9SoA (C1EAF185) 2025-03-28T09:02:50.283875Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:02:50.283890Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:50.284805Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****9SoA (C1EAF185) () has now valid token of user1 2025-03-28T09:02:50.284830Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-03-28T09:02:55.302871Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****9SoA (C1EAF185) 2025-03-28T09:02:55.303309Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****9SoA (C1EAF185) () has now valid token of user1 2025-03-28T09:02:56.052131Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783306482566776:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:56.052996Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ed4/r3tmp/tmpI9SHAN/pdisk_1.dat 2025-03-28T09:02:56.342921Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:56.411355Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:56.411436Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:56.420259Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21396, node 2 2025-03-28T09:02:56.559279Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:56.559301Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:56.559309Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:56.559410Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:56.984356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:56.991431Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:56.993454Z node 2 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:02:56.993517Z node 2 :GRPC_CLIENT DEBUG: [5170000a4908] Connect to grpc://localhost:1448 2025-03-28T09:02:56.996112Z node 2 :GRPC_CLIENT DEBUG: [5170000a4908] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-03-28T09:02:57.054981Z node 2 :GRPC_CLIENT DEBUG: [5170000a4908] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-03-28T09:02:57.055192Z node 2 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:03:02.104179Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783329257074871:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:02.104708Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ed4/r3tmp/tmpQ0iDf8/pdisk_1.dat 2025-03-28T09:03:02.334206Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:02.377237Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:02.377866Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:02.383875Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20326, node 3 2025-03-28T09:03:02.569688Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:02.569711Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:02.569718Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:02.569834Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: ... lVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:02.924121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:02.938993Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:03:02.944862Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:02.944900Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:02.944928Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:02.944964Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:03:02.945011Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Connect to grpc://localhost:20546 2025-03-28T09:03:02.945933Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-03-28T09:03:02.966406Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Status 14 Service Unavailable 2025-03-28T09:03:02.966585Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:03:02.967112Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:03:02.967302Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-03-28T09:03:02.969969Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Status 14 Service Unavailable 2025-03-28T09:03:02.970396Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:03:03.986837Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-03-28T09:03:03.986877Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:03:03.987009Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2025-03-28T09:03:03.992108Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Status 14 Service Unavailable 2025-03-28T09:03:03.992456Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:03:04.990995Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket **** (8E120919) 2025-03-28T09:03:04.991042Z node 3 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:03:05.061735Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-03-28T09:03:05.066386Z node 3 :GRPC_CLIENT DEBUG: [517000100388] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-03-28T09:03:05.066654Z node 3 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:03:06.976252Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486783329257074871:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:06.976337Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:15.812733Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783387641492331:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:15.812818Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ed4/r3tmp/tmpXAnhUP/pdisk_1.dat 2025-03-28T09:03:15.979924Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:15.991238Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:15.991342Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:15.992963Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5522, node 4 2025-03-28T09:03:16.062425Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:16.062446Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:16.062454Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:16.062595Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:16.358131Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:16.368175Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:03:16.368220Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:16.368240Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:03:16.368276Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:03:16.368325Z node 4 :GRPC_CLIENT DEBUG: [5170000e0f08] Connect to grpc://localhost:22512 2025-03-28T09:03:16.369430Z node 4 :GRPC_CLIENT DEBUG: [5170000e0f08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2025-03-28T09:03:16.378916Z node 4 :GRPC_CLIENT DEBUG: [5170000e0f08] Status 14 Service Unavailable 2025-03-28T09:03:16.379111Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:03:16.379164Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:03:16.379395Z node 4 :GRPC_CLIENT DEBUG: [5170000e0f08] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2025-03-28T09:03:16.382878Z node 4 :GRPC_CLIENT DEBUG: [5170000e0f08] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2025-03-28T09:03:16.383102Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:03:19.887075Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486783408655530479:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:19.887126Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ed4/r3tmp/tmpevvdxl/pdisk_1.dat 2025-03-28T09:03:20.027401Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:20.041604Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:20.041688Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:20.043613Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24337, node 5 2025-03-28T09:03:20.148975Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:20.148997Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:20.149010Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:20.149147Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:03:20.419742Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:03:20.432415Z node 5 :TICKET_PARSER ERROR: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries >> KqpPg::InsertFromSelect_Simple+useSink >> KqpPg::CreateTableSerialColumns+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] Test command err: 2025-03-28T09:01:15.076129Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782874902612725:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:15.076280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:01:15.161768Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782875385223682:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:15.161804Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:01:15.663702Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019a8/r3tmp/tmphtIsw1/pdisk_1.dat 2025-03-28T09:01:15.852441Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:01:16.589548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:16.664868Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:17.671288Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:18.067410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:18.850412Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:19.196615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:19.871677Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:20.046816Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782874902612725:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:20.046892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:01:20.163283Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782875385223682:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:20.163338Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:01:20.354817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:20.892242Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:21.175544Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:21.174889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:22.122729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:22.186995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:22.824028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:22.919508Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:23.135164Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:23.208831Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:23.578940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:23.842124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:24.154872Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:24.206913Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:24.481796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:24.482578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:24.497699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:01:24.497753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:01:24.627118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:01:24.632244Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:01:24.633255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:01:24.775753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12766, node 1 2025-03-28T09:01:25.895247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/0019a8/r3tmp/yandexHe8y9v.tmp 2025-03-28T09:01:25.895277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/0019a8/r3tmp/yandexHe8y9v.tmp 2025-03-28T09:01:25.895443Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/0019a8/r3tmp/yandexHe8y9v.tmp 2025-03-28T09:01:25.895560Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:01:25.984558Z INFO: TTestServer started on Port 30158 GrpcPort 12766 TClient is connected to server localhost:30158 PQClient connected to localhost:12766 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:01:26.618838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:01:26.815467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:01:37.299424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782969874504673:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:37.299525Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486782969874504647:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:37.299658Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:01:37.360525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:01:37.410786Z node 1 :KQP_PROXY ERROR: TraceId: "01jqdzr8a3e5w2y2bbp3b4z0ev", Request deadline has expired for 3.838041s seconds 2025-03-28T09:01:37.522881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486782969874504677:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:01:37.635482Z node 2 :TX_PROXY ERROR: Actor# [2:7486782969874504704:2209] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:01:37.672400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot ... T09:03:10.523440Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486783367250377643:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:10.523519Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:03:10.594146Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486783368937449431:2275];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:10.594373Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019a8/r3tmp/tmpsd59FI/pdisk_1.dat 2025-03-28T09:03:10.678167Z node 9 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:03:10.695753Z node 10 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:03:10.854821Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:10.943307Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:10.943424Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:10.946641Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:10.946740Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:10.950421Z node 9 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2025-03-28T09:03:10.950585Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:10.957850Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1295, node 9 2025-03-28T09:03:11.102800Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/0019a8/r3tmp/yandexGJfo8k.tmp 2025-03-28T09:03:11.102828Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/0019a8/r3tmp/yandexGJfo8k.tmp 2025-03-28T09:03:11.102982Z node 9 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/0019a8/r3tmp/yandexGJfo8k.tmp 2025-03-28T09:03:11.103141Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:03:11.181605Z INFO: TTestServer started on Port 21157 GrpcPort 1295 TClient is connected to server localhost:21157 PQClient connected to localhost:1295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:11.488266Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:03:11.557363Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:03:11.818938Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-03-28T09:03:15.527156Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7486783367250377643:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:15.527224Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:15.573664Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486783368937449431:2275];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:15.573737Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:15.575488Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7486783388725215212:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:15.575625Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7486783388725215228:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:15.575672Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:15.579433Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-03-28T09:03:15.611891Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715662, at schemeshard: 72057594046644480 2025-03-28T09:03:15.623886Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7486783388725215239:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-28T09:03:15.627838Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:03:15.726423Z node 9 :TX_PROXY ERROR: Actor# [9:7486783388725215451:2836] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:15.739957Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:03:15.763276Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7486783388725215494:2360], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:03:15.763461Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YzYxMzIzZTUtZGJlMmQzZDktOWI2M2ZjOGYtM2IxYjdkMzg=, ActorId: [9:7486783388725215207:2339], ActorState: ExecuteState, TraceId: 01jqdzvgkn38e3ess0q6e97zqj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:03:15.763829Z node 9 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:03:15.781509Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7486783390412286117:2320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:03:15.781955Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=Y2MwMGYzMWEtMTNhOWU1MTItOThhZDAwZWYtNzRlNDU1YzE=, ActorId: [10:7486783390412286093:2314], ActorState: ExecuteState, TraceId: 01jqdzvgs1fkdgw49esbm05eg4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:03:15.782417Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:03:15.880604Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:03:16.366079Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqdzvh4a1n1s9c301kfqrp5f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=Y2IxNDRhMDItNmUyNTEzNjItY2I2MjVjYzUtYmQxMDhiNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7486783393020183085:3104] === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2025-03-28T09:03:21.859047Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7486783414495020052:3378] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-03-28T09:03:21.859091Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7486783414495020052:3378] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::Insert_Serial+useSink >> YdbSdkSessionsPool::FailTest [GOOD] >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::LoginEmptyTicketBad >> BackupRestore::RestoreKesusResources [GOOD] >> BackupRestore::RestoreReplicationWithoutSecret >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> KqpPg::EmptyQuery+useSink >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::FailTest [GOOD] Test command err: 2025-03-28T09:03:08.663430Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783360791915720:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:08.664453Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013b5/r3tmp/tmpSH0Crz/pdisk_1.dat 2025-03-28T09:03:09.212537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:09.212650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:09.224413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:09.249436Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1562, node 1 2025-03-28T09:03:09.636474Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:03:09.643397Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:03:09.643612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:09.643622Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:09.643629Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:09.643761Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:03:09.754861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:6324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:10.195267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:13.663648Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783360791915720:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:13.663704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:21.559536Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783417120095003:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:21.562884Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013b5/r3tmp/tmpF7Mfbz/pdisk_1.dat 2025-03-28T09:03:21.785557Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29913, node 4 2025-03-28T09:03:21.873657Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:21.873682Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:21.873688Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:21.873828Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:03:21.887487Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:21.887599Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:21.905877Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:22.216639Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable >> KqpPg::CreateTableBulkUpsertAndRead ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask1 [GOOD] Test command err: 2025-03-28T09:02:54.903170Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783299381408801:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:54.903225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013e3/r3tmp/tmpYEsVG9/pdisk_1.dat 2025-03-28T09:02:55.558950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:55.559075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:55.566349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:55.601583Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19633, node 1 2025-03-28T09:02:55.638923Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:02:55.638959Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:02:55.781127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:55.781151Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:55.781180Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:55.781303Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27904 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:56.297901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:02.008162Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783332709727775:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:02.008973Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013e3/r3tmp/tmpWy1fjp/pdisk_1.dat 2025-03-28T09:03:02.539837Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:02.662288Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:02.662369Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:02.668226Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2647, node 4 2025-03-28T09:03:02.967194Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:02.967220Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:02.967228Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:02.967392Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:03.442716Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:07.012747Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783332709727775:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:07.012843Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:17.538031Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:03:17.538080Z node 4 :IMPORT WARN: Table profiles were not loaded >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] >> KqpPg::NoTableQuery+useSink |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |76.2%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2025-03-28T09:02:33.507739Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:02:33.507883Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:02:33.529229Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:02:33.529363Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:02:33.557731Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:02:33.558329Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=3520953393722297399, session=0, seqNo=0) 2025-03-28T09:02:33.558557Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:02:33.573116Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=3520953393722297399, session=1) 2025-03-28T09:02:33.573850Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path detach session=1 from sender=[1:130:2156], cookie=2315725116708715512 2025-03-28T09:02:33.574273Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:143:2167], cookie=15300941343206897371) 2025-03-28T09:02:33.574347Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:143:2167], cookie=15300941343206897371) 2025-03-28T09:02:34.039062Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:34.056545Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:34.447083Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:34.463543Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:34.865958Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:34.887645Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:35.258994Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:35.272825Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:35.683686Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:35.703681Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:36.114954Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:36.135399Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:36.526453Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:36.547173Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:36.967050Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:36.987369Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:37.398148Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:37.413954Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:37.887107Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:37.903758Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:38.297325Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:38.318601Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:38.743167Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:38.767382Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:39.171089Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:39.187535Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:39.615066Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:39.629058Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:40.091042Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:40.109589Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:40.519023Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:40.535474Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:40.979028Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:40.995514Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:41.391083Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:41.403550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:41.807076Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:41.824405Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:42.259024Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:42.279812Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:42.701248Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:42.731239Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:43.107054Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:43.124497Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:43.547115Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:43.563508Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:43.979074Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:44.000913Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:44.413019Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:44.425682Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:44.821319Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:44.835368Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:45.201076Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:45.215445Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:45.599265Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:45.617217Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:45.989142Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:46.002491Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:46.445123Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:46.459823Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:46.838743Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:46.853773Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:47.245884Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:47.260113Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:47.641244Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:47.659317Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:48.034191Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:48.046598Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:48.467083Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:48.488942Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:48.888311Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:48.903689Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:49.314701Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:49.327386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:49.738989Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:49.751927Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:50.155036Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:50.171965Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:50.615562Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:50.627930Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:51.011113Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:51.028421Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:51.441796Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:51.459582Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:51.863294Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:51.883436Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:52.295356Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:52.310657Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:52.761011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:52.776824Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:53.183166Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:53.207740Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:53.627498Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:53.647549Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:54.041063Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:54.057549Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:54.468665Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:54.491420Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:54.942203Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:54.960641Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:55.334377Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:55.347283Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:55.740497Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:55.757708Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:56.152389Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:56.172969Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:02:56.567364Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:02:56.581022Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTx ... k::Execute 2025-03-28T09:03:10.023352Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:10.407199Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:10.419528Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:10.777226Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:10.792537Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:11.172695Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:11.186860Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:11.585279Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:11.603294Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:11.960825Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:11.974942Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:12.410387Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:12.424240Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:12.780301Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:12.795687Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:13.152408Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:13.165897Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:13.526403Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:13.538476Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:13.927136Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:13.944648Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:14.317715Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:14.333695Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:14.698628Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:14.711118Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:15.070977Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:15.084728Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:15.459566Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:15.475545Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:15.825895Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:15.842822Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:16.250740Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:16.262996Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:16.643105Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:16.659946Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:17.029210Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:17.052267Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:17.419868Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:17.434879Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:17.803195Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:17.815895Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:18.241535Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:18.259844Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:18.622876Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:18.636653Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:19.021135Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:19.035455Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:19.412399Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:19.426466Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:19.796966Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:19.810328Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:20.215069Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:20.231637Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:20.601422Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:20.618210Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:20.983973Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:20.996655Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:21.357352Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:21.372770Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:21.749885Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:21.765949Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:22.143124Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:22.162207Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:22.563095Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:22.576659Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:22.925139Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:22.941146Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:23.322459Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:23.340297Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:23.709153Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:23.730180Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:24.249490Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:24.267652Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:24.649645Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:24.663622Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:25.013986Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:25.031590Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:25.431160Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:25.449133Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:25.815726Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:25.831505Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:26.199039Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:26.212782Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:26.602876Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:26.617106Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:26.990317Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:27.003140Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:27.373643Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:27.389156Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:27.734159Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:03:27.746563Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:03:28.087488Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-28T09:03:28.087577Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-28T09:03:28.103531Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-28T09:03:28.115231Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:572:2566], cookie=18071330041598306440) 2025-03-28T09:03:28.115333Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:572:2566], cookie=18071330041598306440) 2025-03-28T09:03:28.479011Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:03:28.479121Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:03:28.498408Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:03:28.498528Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:03:28.512784Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:03:28.513647Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=12345, session=0, seqNo=0) 2025-03-28T09:03:28.513800Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:03:28.537369Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=12345, session=1) 2025-03-28T09:03:28.538166Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:138:2163], cookie=23456, session=1, seqNo=0) 2025-03-28T09:03:28.555592Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:138:2163], cookie=23456, session=1) 2025-03-28T09:03:28.930624Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:03:28.930760Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:03:28.956317Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:03:28.958320Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:03:28.995525Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:03:28.996488Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:132:2158], cookie=12345, session=0, seqNo=0) 2025-03-28T09:03:28.996654Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:03:29.011663Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:132:2158], cookie=12345, session=1) 2025-03-28T09:03:29.012516Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:139:2163], cookie=23456, session=1, seqNo=0) 2025-03-28T09:03:29.025270Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:139:2163], cookie=23456, session=1) >> KqpPg::TypeCoercionInsert-useSink >> KqpPg::InsertNoTargetColumns_Simple+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink >> BackupRestoreS3::RestoreIndexTableSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed >> KqpPg::Insert_Serial+useSink [GOOD] >> KqpPg::Insert_Serial-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-03-28T09:02:49.335226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783277172136440:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:49.335274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001788/r3tmp/tmptls1p4/pdisk_1.dat 2025-03-28T09:02:50.034420Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15198, node 1 2025-03-28T09:02:50.096000Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:02:50.096030Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:02:50.203223Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:50.203249Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:50.203272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:50.203357Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:02:50.206487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:50.206568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:50.229205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:50.549809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:53.556459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783294352006708:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:53.556612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:53.559200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783294352006720:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:53.559678Z node 1 :TX_PROXY DEBUG: actor# [1:7486783277172136677:2117] Handle TEvProposeTransaction 2025-03-28T09:02:53.559719Z node 1 :TX_PROXY DEBUG: actor# [1:7486783277172136677:2117] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:02:53.559766Z node 1 :TX_PROXY DEBUG: actor# [1:7486783277172136677:2117] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486783294352006723:2627] 2025-03-28T09:02:53.643859Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-03-28T09:02:53.643945Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] txid# 281474976710658 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:02:53.643965Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] txid# 281474976710658 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-03-28T09:02:53.646265Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:02:53.646358Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:02:53.646532Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:02:53.646970Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:02:53.647033Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:02:53.647186Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:02:53.648717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:02:53.653532Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-03-28T09:02:53.653594Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006723:2627] txid# 281474976710658 SEND to# [1:7486783294352006722:2345] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-03-28T09:02:53.673617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783294352006722:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:02:53.769990Z node 1 :TX_PROXY DEBUG: actor# [1:7486783277172136677:2117] Handle TEvProposeTransaction 2025-03-28T09:02:53.770026Z node 1 :TX_PROXY DEBUG: actor# [1:7486783277172136677:2117] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:02:53.770081Z node 1 :TX_PROXY DEBUG: actor# [1:7486783277172136677:2117] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486783294352006795:2676] 2025-03-28T09:02:53.772557Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006795:2676] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-03-28T09:02:53.772614Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006795:2676] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:02:53.772627Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006795:2676] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-03-28T09:02:53.773254Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006795:2676] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:02:53.773317Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006795:2676] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:02:53.773477Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006795:2676] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:02:53.773600Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006795:2676] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:02:53.773638Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006795:2676] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-03-28T09:02:53.773752Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006795:2676] txid# 281474976710659 HANDLE EvClientConnected 2025-03-28T09:02:53.784663Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783294352006795:2676] txid# 281474976710659 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710659 Reason# Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-28T09:02:53.784901Z node 1 :TX_PROXY ERROR: Actor# [1:7486783294352006795:2676] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-0 ... Z node 13 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715762 2025-03-28T09:03:27.524005Z node 13 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:03:27.982868Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7486783440846682000:2426] [0] Resolve database: name# /Root 2025-03-28T09:03:27.983294Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7486783440846682000:2426] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:03:27.983316Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7486783440846682000:2426] [0] Send request: schemeShardId# 72057594046644480 2025-03-28T09:03:27.984147Z node 13 :TX_PROXY DEBUG: [GetImport] [13:7486783440846682000:2426] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710661 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:3323" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1743152606 } EndTime { seconds: 1743152607 } } 2025-03-28T09:03:27.994903Z node 13 :TX_PROXY DEBUG: actor# [13:7486783415076874852:2116] Handle TEvNavigate describe path /Root/table 2025-03-28T09:03:27.994950Z node 13 :TX_PROXY DEBUG: Actor# [13:7486783440846682006:4623] HANDLE EvNavigateScheme /Root/table 2025-03-28T09:03:27.995196Z node 13 :TX_PROXY DEBUG: Actor# [13:7486783440846682006:4623] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:03:27.995271Z node 13 :TX_PROXY DEBUG: Actor# [13:7486783440846682006:4623] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-03-28T09:03:27.997106Z node 13 :TX_PROXY DEBUG: Actor# [13:7486783440846682006:4623] Handle TEvDescribeSchemeResult Forward to# [13:7486783440846682004:2427] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715760 CreateStep: 1743152606923 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Group" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 9 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046644480 >> KqpPg::CreateTableSerialColumns+useSink [GOOD] >> KqpPg::CreateTableSerialColumns-useSink >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath >> TTicketParserTest::LoginEmptyTicketBad [GOOD] >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> GroupWriteTest::ByTableName [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2025-03-28T09:02:43.825197Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783250791468336:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:43.825673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ebc/r3tmp/tmpyeX8Ps/pdisk_1.dat 2025-03-28T09:02:44.591744Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:44.593764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:44.593863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:44.599829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61797, node 1 2025-03-28T09:02:44.801699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:44.801735Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:44.801746Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:44.801896Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:45.123998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:45.155748Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:45.278888Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:02:45.294944Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:02:45.294986Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:45.296490Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****uqLg (EC06D552) () has now valid token of user1 2025-03-28T09:02:45.296512Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ebc/r3tmp/tmpEvffvw/pdisk_1.dat 2025-03-28T09:02:49.014812Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:49.040408Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:49.052947Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:49.053019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:49.055023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10507, node 2 2025-03-28T09:02:49.139256Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:49.139277Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:49.139284Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:49.139382Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:49.540094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:49.546299Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:49.630831Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:02:49.641075Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:02:49.641272Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:49.641998Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****M1FQ (C3EC7CD8) () has now valid token of user1 2025-03-28T09:02:49.642015Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-03-28T09:02:53.096699Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783294964836023:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:53.097120Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ebc/r3tmp/tmpFzYbUV/pdisk_1.dat 2025-03-28T09:02:53.437386Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:53.461549Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:53.462610Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:53.465458Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26003, node 3 2025-03-28T09:02:53.636744Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:53.636763Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:53.636771Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:53.636888Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:53.997048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:54.011492Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:54.442927Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:02:54.451162Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:02:54.451194Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:02:54.451852Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****M-IQ (BDD743BF) () has now valid token of user1 2025-03-28T09:02:54.451863Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-03-28T09:02:54.452342Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:02:58.099037Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486783294964836023:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:58.099099Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:58.139145Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****M-IQ (BDD743BF) 2025-03-28T09:02:58.139583Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****M-IQ (BDD743BF) () has now valid token of user1 2025-03-28T09:03:03.151593Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****M-IQ (BDD743BF) 2025-03-28T09:03:03.151954Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****M-IQ (BDD743BF) () has now valid token of user1 2025-03-28T09:03:04.454255Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:03:08.159653Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****M-IQ (BDD743BF) 2025-03-28T09:03:08.159998Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****M-IQ (BDD743BF) () has now valid token of user1 2025-03-28T09:03:08.405791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:03:08.405832Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:13.171507Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****M-IQ (BDD743BF) 2025-03-28T09:03:13.171864Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****M-IQ (BDD743BF) () has now valid token of user1 2025-03-28T09:03:15.257952Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783389306482943:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:15.258149Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ebc/r3tmp/tmpIHoyO8/pdisk_1.dat 2025-03-28T09:03:15.531614Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:15.563693Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:15.563831Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:15.567521Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9617, node 4 2025-03-28T09:03:15.697959Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:15.697986Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:15.698011Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:15.698120Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:16.051836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:16.205592Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:03:16.215080Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:03:16.215118Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:03:16.215977Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****SgHA (0E946861) () has now valid token of user1 2025-03-28T09:03:16.216003Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A4 success 2025-03-28T09:03:16.216442Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:03:20.258434Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783389306482943:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:20.258517Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:20.273721Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****SgHA (0E946861) 2025-03-28T09:03:20.274070Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****SgHA (0E946861) () has now permanent error message 'User not found' 2025-03-28T09:03:24.282794Z node 4 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****SgHA (0E946861) 2025-03-28T09:03:27.036546Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486783439707876770:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:27.036755Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ebc/r3tmp/tmpLTGHd4/pdisk_1.dat 2025-03-28T09:03:27.207944Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:27.242612Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:27.243189Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:27.245997Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8244, node 5 2025-03-28T09:03:27.321062Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:27.321080Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:27.321086Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:27.321179Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:27.673461Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:27.684370Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:03:27.781824Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:03:27.812903Z node 5 :TICKET_PARSER ERROR: Ticket **** (00000000): Ticket is empty >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] >> TBlobStorageWardenTest::TestHttpMonPage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 3334857348475294642 2025-03-28T09:02:59.564339Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-28T09:02:59.593473Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-28T09:02:59.593548Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-28T09:02:59.596956Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-28T09:02:59.613162Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:02:59.616091Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-28T09:03:12.739406Z 4 00h01m11.510512s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:3:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 1106 2025-03-28T09:03:12.787330Z 5 00h01m11.610512s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:4:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 1100 2025-03-28T09:03:25.852132Z 2 00h01m23.910512s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:1:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 2644 2025-03-28T09:03:32.441490Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:03:32.441610Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:32.441670Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:03:32.441715Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:32.517238Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} 2025-03-28T09:03:32.517357Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 33 Channel# 0 Status# OK} >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::PeriodicTask10 [GOOD] Test command err: 2025-03-28T09:03:07.912678Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783353647131786:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:07.912775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d4/r3tmp/tmpPMTHEw/pdisk_1.dat 2025-03-28T09:03:08.489476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:08.489595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:08.491702Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:08.502933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4482, node 1 2025-03-28T09:03:08.728156Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:08.728184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:08.728197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:08.728308Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:09.092755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:12.913058Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783353647131786:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:12.913129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:23.495162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:03:23.495192Z node 1 :IMPORT WARN: Table profiles were not loaded >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] Test command err: 2025-03-28T09:03:32.217838Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:32.219898Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:32.222027Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:32.222206Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:32.227289Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:32.227408Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001621/r3tmp/tmpou0Isp/pdisk_1.dat 2025-03-28T09:03:32.949521Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] bootstrap ActorId# [1:545:2463] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1294:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:32.949695Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:32.949761Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:32.949789Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:32.949813Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:32.949837Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:32.949862Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Id# [72057594037932033:2:8:0:0:1294:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:32.949915Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] restore Id# [72057594037932033:2:8:0:0:1294:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:32.949988Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1294:1] Marker# BPG33 2025-03-28T09:03:32.950031Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1294:1] Marker# BPG32 2025-03-28T09:03:32.950066Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1294:2] Marker# BPG33 2025-03-28T09:03:32.950089Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1294:2] Marker# BPG32 2025-03-28T09:03:32.950117Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1294:3] Marker# BPG33 2025-03-28T09:03:32.950143Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1294:3] Marker# BPG32 2025-03-28T09:03:32.950335Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:3] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:32.950395Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:2] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:32.950436Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1294:1] FDS# 1294 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:32.959905Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-28T09:03:32.960133Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2025-03-28T09:03:32.960243Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1294:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90188 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2025-03-28T09:03:32.960330Z node 1 :BS_PROXY_PUT DEBUG: [5a9a1d6240d04444] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2025-03-28T09:03:32.960411Z node 1 :BS_PROXY_PUT INFO: [5a9a1d6240d04444] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1294:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:32.960605Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.099 sample PartId# [72057594037932033:2:8:0:0:1294:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.099 sample PartId# [72057594037932033:2:8:0:0:1294:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.099 sample PartId# [72057594037932033:2:8:0:0:1294:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 10.627 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 10.801 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 10.914 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2025-03-28T09:03:33.023417Z node 1 :BS_PROXY_PUT INFO: [8d27cf9df52bfb78] bootstrap ActorId# [1:590:2500] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:229:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:33.023577Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:33.023616Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:33.023642Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:33.023668Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:33.023694Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:33.023720Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:229:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:33.023760Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] restore Id# [72057594037932033:2:9:0:0:229:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:33.023854Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG33 2025-03-28T09:03:33.023896Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:229:1] Marker# BPG32 2025-03-28T09:03:33.023938Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG33 2025-03-28T09:03:33.023964Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:229:2] Marker# BPG32 2025-03-28T09:03:33.023993Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG33 2025-03-28T09:03:33.024019Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:229:3] Marker# BPG32 2025-03-28T09:03:33.024172Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:3] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:33.024230Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:2] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:33.024277Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:72:2098] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:229:1] FDS# 229 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:33.033436Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2025-03-28T09:03:33.033724Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:2:0] Marker# BPP01 2025-03-28T09:03:33.033845Z node 1 :BS_PROXY_PUT DEBUG: [8d27cf9df52bfb78] received {EvVPutResult Status# OK ID# [72057594037932033:2:9:0:0:229:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 81803 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [ ... ituation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:33.093923Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:33.093976Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2025-03-28T09:03:33.094024Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2025-03-28T09:03:33.094145Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:595:2504] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:33.105241Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-03-28T09:03:33.105395Z node 1 :BS_PROXY_PUT DEBUG: [f913878b3da83702] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2025-03-28T09:03:33.105465Z node 1 :BS_PROXY_PUT INFO: [f913878b3da83702] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:33.105595Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.547 sample PartId# [1234:2:0:0:0:5:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 11.692 VDiskId# [82000002:1:0:0:0] NodeId# 1 Status# OK } ] } 2025-03-28T09:03:33.106222Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:03:33.106270Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2025-03-28T09:03:33.106380Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2025-03-28T09:03:33.107012Z node 2 :BS_NODE ERROR: {NW19@node_warden_group.cpp:211} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/2909/001621/r3tmp/tmpou0Isp//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2025-03-28T09:03:33.107867Z node 2 :BS_PROXY INFO: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# true Marker# DSP02 2025-03-28T09:03:33.107904Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:03:33.109700Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:606:2105] Create Queue# [2:608:2106] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:33.109832Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:606:2105] Create Queue# [2:609:2107] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:33.109957Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:606:2105] Create Queue# [2:610:2108] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:33.110089Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:606:2105] Create Queue# [2:611:2109] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:33.110189Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:606:2105] Create Queue# [2:612:2110] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:33.110289Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:606:2105] Create Queue# [2:613:2111] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:33.110387Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Actor# [2:606:2105] Create Queue# [2:614:2112] targetNodeId# 1 Marker# DSP01 2025-03-28T09:03:33.110409Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:03:33.120097Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:33.120306Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:33.120405Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:33.120572Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:33.120656Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:33.120730Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:33.120782Z node 2 :BS_PROXY DEBUG: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:1:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257 GType# none}} Duration# 0.000000s Marker# DSP04 2025-03-28T09:03:33.120810Z node 2 :BS_PROXY INFO: Group# 2181038082 -> StateWork Marker# DSP11 2025-03-28T09:03:33.120847Z node 2 :BS_PROXY INFO: Group# 2181038082 SetStateWork Marker# DSP15 2025-03-28T09:03:33.121015Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] bootstrap ActorId# [2:615:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2025-03-28T09:03:33.121067Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:1:0:0:0] node# 1 Marker# DSPB03 2025-03-28T09:03:33.121259Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:608:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 2263221123408805363 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2025-03-28T09:03:33.127312Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Handle TEvVBlockResult status# OK From# [82000002:1:0:0:0] NodeId# 1 Marker# DSPB01 2025-03-28T09:03:33.127407Z node 2 :BS_PROXY_BLOCK DEBUG: [d70ef3c23a1a2346] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2025-03-28T09:03:33.127775Z node 2 :BS_PROXY INFO: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-03-28T09:03:33.127973Z node 2 :BS_PROXY DEBUG: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2025-03-28T09:03:33.128305Z node 1 :BS_PROXY_PUT INFO: [91379e686f748e92] bootstrap ActorId# [1:616:2514] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2025-03-28T09:03:33.128450Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:03:33.128500Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:03:33.128555Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2025-03-28T09:03:33.128601Z node 1 :BS_PROXY_PUT DEBUG: [91379e686f748e92] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2025-03-28T09:03:33.128728Z node 1 :BS_PROXY DEBUG: Send to queueActorId# [1:595:2504] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:03:33.128958Z node 1 :BS_VDISK_PUT ERROR: VDISK[82000002:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2025-03-28T09:03:33.129225Z node 1 :BS_PROXY_PUT INFO: [91379e686f748e92] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:1:0:0:0] Marker# BPP01 2025-03-28T09:03:33.129311Z node 1 :BS_PROXY_PUT ERROR: [91379e686f748e92] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2025-03-28T09:03:33.129364Z node 1 :BS_PROXY_PUT NOTICE: [91379e686f748e92] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:1:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:03:33.129467Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 2181038082 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.593 sample PartId# [1234:2:0:0:0:11:1] QueryCount# 1 VDiskId# [82000002:1:0:0:0] NodeId# 1 } ] } 2025-03-28T09:03:33.129824Z node 2 :BS_PROXY DEBUG: Send to queueActorId# [2:608:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> TSchemeShardMoveTest::MoveIndex >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource >> KqpPg::InsertNoTargetColumns_Simple-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial-useSink >> TSchemeShardMoveTest::MoveIndexSameDst >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> KqpPg::Insert_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2025-03-28T09:02:29.347386Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783192503131606:2157];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001584/r3tmp/tmpn7sNmQ/pdisk_1.dat 2025-03-28T09:02:29.715436Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:02:29.744178Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:02:29.867937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:29.907247Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:02:30.673274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:30.673426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:30.679296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:30.679389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:30.692642Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:02:30.692789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:30.699459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:30.712568Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14567, node 1 2025-03-28T09:02:31.125154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001584/r3tmp/yandexbs6jZ0.tmp 2025-03-28T09:02:31.125182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001584/r3tmp/yandexbs6jZ0.tmp 2025-03-28T09:02:31.125308Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001584/r3tmp/yandexbs6jZ0.tmp 2025-03-28T09:02:31.125429Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:02:31.249555Z INFO: TTestServer started on Port 5847 GrpcPort 14567 TClient is connected to server localhost:5847 PQClient connected to localhost:14567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:31.777695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:31.889961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:02:32.287289Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:34.326100Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783192503131606:2157];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:34.326196Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:36.027337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783216333593480:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:36.027574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:36.028109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783220628560805:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:36.032394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:02:36.077230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783220628560808:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:02:36.774902Z node 1 :TX_PROXY ERROR: Actor# [1:7486783220628560896:2776] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:36.808782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:37.156644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:37.214336Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486783220628560906:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:02:37.216162Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTljMDNkN2ItNzA0Mjk0NDgtYTlkNjA2ZmQtY2JmNzU5NjY=, ActorId: [1:7486783216333593467:2337], ActorState: ExecuteState, TraceId: 01jqdzt9yzf303tvpz7ejcmkq0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:02:37.224599Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:02:37.368389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:02:37.753258Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqdztbf562n29yvdvvhdrkm4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWFhOTI5YmQtNWYzODI2YzktMjE0YzQyN2UtODU1YzBiZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486783224923528658:3117] === CheckClustersList. Ok 2025-03-28T09:02:44.207914Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486783190563788826:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:02:44.208153Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486783190563788826:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-03-28T09:02:44.208247Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486783190563788826:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486783190563788895:2152] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152551868 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:02:44.208330Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486783190563788826:2127], cacheItem# { Subscriber: { Subscriber: [1:7486783190563788895:2152] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152551868 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 14 IsSync: true Partial: 0 } 2025-03-28T09:02:44.208486Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486783254988300186:3409], recipient# [1:7486783254988300185:3408], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 7205759 ... sion v1 cookie: 1 sessionId: 123|4c8b389a-b9cf9d7a-d4b7543f-635aeeb2_0 grpc read done: success: 0 data: 2025-03-28T09:03:32.395046Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|4c8b389a-b9cf9d7a-d4b7543f-635aeeb2_0 grpc read failed 2025-03-28T09:03:32.395079Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|4c8b389a-b9cf9d7a-d4b7543f-635aeeb2_0 grpc closed 2025-03-28T09:03:32.395091Z node 7 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 123|4c8b389a-b9cf9d7a-d4b7543f-635aeeb2_0 is DEAD 2025-03-28T09:03:32.395559Z node 7 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:03:32.395641Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11084424359011136232_v1 2025-03-28T09:03:32.395658Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11084424359011136232_v1 2025-03-28T09:03:32.395674Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: some@random@consumer_7_1_11084424359011136232_v1 2025-03-28T09:03:32.395801Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [7:7486783463259193332:2521] destroyed 2025-03-28T09:03:32.395847Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:03:32.486000Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7486783411719582882:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:03:32.486151Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7486783411719582882:2127], cacheItem# { Subscriber: { Subscriber: [7:7486783416014550811:2557] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:03:32.486232Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7486783463259193404:4272], recipient# [7:7486783463259193403:2555], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:03:32.508683Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7486783411898467167:2101], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:03:32.508856Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7486783411898467167:2101], cacheItem# { Subscriber: { Subscriber: [8:7486783416193434573:2149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:03:32.508957Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7486783463438075259:2431], recipient# [8:7486783463438075258:2356], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:03:32.640798Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7486783411898467167:2101], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:03:32.640968Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7486783411898467167:2101], cacheItem# { Subscriber: { Subscriber: [8:7486783416193434573:2149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:03:32.641078Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7486783463438075261:2432], recipient# [8:7486783463438075260:2357], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:03:32.725045Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [7:7486783411719582882:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:03:32.725143Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [7:7486783411719582882:2127], cacheItem# { Subscriber: { Subscriber: [7:7486783433194420524:2903] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743152605782 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:03:32.725188Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [7:7486783411719582882:2127], cacheItem# { Subscriber: { Subscriber: [7:7486783433194420326:2768] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 16 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743152605481 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:03:32.725433Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7486783463259193414:4277], recipient# [7:7486783463259193413:2519], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:03:32.726407Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7486783411719582882:2127], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:03:32.726508Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7486783411719582882:2127], cacheItem# { Subscriber: { Subscriber: [7:7486783416014550677:2449] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 27 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152601379 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:03:32.726659Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7486783463259193417:4278], recipient# [7:7486783463259193416:2556], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] Test command err: 2025-03-28T09:03:33.344542Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:33.346630Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:33.347196Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:33.348291Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:33.349158Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:33.349226Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:33.349259Z node 1 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00160e/r3tmp/tmpWrvDPV/pdisk_1.dat 2025-03-28T09:03:34.645565Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:34.645707Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:34.645803Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:34.647149Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:34.650162Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:34.651310Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:3:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:03:34.651922Z node 2 :BS_SYNCLOG WARN: VDISK[2000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00160e/r3tmp/tmpjmW2R9/pdisk_1.dat >> TSchemeShardMoveTest::MoveTableForBackup >> TSchemeShardMoveTest::TwoTables >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert+useSink >> KqpPg::CreateTableSerialColumns-useSink [GOOD] >> KqpPg::DropIndex >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] >> TSchemeShardMoveTest::Chain >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp >> TSchemeShardMoveTest::TwoTables [GOOD] >> TSchemeShardMoveTest::MoveMigratedTable >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:35.248364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:35.248457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:35.248493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:35.248529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:35.248592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:35.248620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:35.248687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:35.248757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:35.249077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:35.334591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:35.334641Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:35.363252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:35.363602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:35.363769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:35.391297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:35.391573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:35.392230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:35.392443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:35.394652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:35.395942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:35.396002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:35.396169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:35.396227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:35.396269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:35.396365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:35.405437Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:35.554717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:35.554970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:35.555263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:35.555563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:35.555626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:35.561799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:35.561980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:35.562213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:35.562284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:35.562334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:35.562367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:35.567675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:35.567749Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:35.567791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:35.570364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:35.570448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:35.570523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:35.570574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:35.574438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:35.579914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:35.580166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:35.581183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:35.581344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:35.581396Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:35.581681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:35.581743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:35.581922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:35.582006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:35.584248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:35.584307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:35.584498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:35.584540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:35.584925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:35.584988Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:35.585081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:35.585113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:35.585147Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:35.585176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:35.585215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:35.585256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:35.585287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:35.585313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:35.585379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:35.585411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:35.585439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:35.588794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:35.588953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:35.588991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... t: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:37.674269Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:37.674418Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 173us result status StatusSuccess 2025-03-28T09:03:37.674815Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:37.675329Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:03:37.675554Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 242us result status StatusSuccess 2025-03-28T09:03:37.676145Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:37.676687Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:03:37.676877Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 191us result status StatusSuccess 2025-03-28T09:03:37.677551Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable |76.3%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:36.949209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:36.949313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:36.949369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:36.949414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:36.949474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:36.949505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:36.949583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:36.949707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:36.950022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:37.058175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:37.058233Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:37.095127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:37.095476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:37.095640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:37.103370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:37.103656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:37.104334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:37.104569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:37.106760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:37.108191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:37.108292Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:37.108515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:37.108586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:37.108632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:37.108739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:37.117128Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:37.274981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:37.275203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:37.275424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:37.275705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:37.275780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:37.287829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:37.287963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:37.288176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:37.288234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:37.288275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:37.288324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:37.294697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:37.294781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:37.294827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:37.298072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:37.298121Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:37.298185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:37.298238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:37.301768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:37.303805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:37.304033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:37.305088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:37.305227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:37.305274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:37.305549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:37.305597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:37.305778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:37.305894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:37.308586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:37.308634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:37.308810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:37.308848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:37.309247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:37.309302Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:37.309402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:37.309445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:37.309486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:37.309512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:37.309560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:37.309596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:37.309626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:37.309668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:37.309732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:37.309765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:37.309798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:37.311622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:37.311724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:37.311755Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... and all the parts is done, operation id: 103:1 2025-03-28T09:03:37.875828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-03-28T09:03:37.875884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:03:37.875908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:03:37.876230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:03:37.876278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:03:37.876339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:03:37.876376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:03:37.876414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:03:37.881178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:03:37.881249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:502:2462] 2025-03-28T09:03:37.881337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-03-28T09:03:37.881939Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:37.882171Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 235us result status StatusPathDoesNotExist 2025-03-28T09:03:37.882333Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:03:37.882867Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:37.883062Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 202us result status StatusSuccess 2025-03-28T09:03:37.883426Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:37.884127Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:37.884279Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 157us result status StatusPathDoesNotExist 2025-03-28T09:03:37.884408Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:03:37.884820Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:37.885071Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 251us result status StatusSuccess 2025-03-28T09:03:37.885396Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:37.886094Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:37.886290Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 174us result status StatusSuccess 2025-03-28T09:03:37.886662Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TIterator::External [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:02:55.644280Z 00000.007 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.010 II| FAKE_ENV: Starting storage for BS group 0 00000.011 II| FAKE_ENV: Starting storage for BS group 1 00000.011 II| FAKE_ENV: Starting storage for BS group 2 00000.011 II| FAKE_ENV: Starting storage for BS group 3 00000.016 II| TABLET_FLATBOOT: Leader{1:2:-} booting Deps{0:0 entries 0} {nil} 00000.016 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 1, has 1 jobs, Boot{ 2 que, 2 refs } 00000.016 II| TABLET_FLATBOOT: Leader{1:2:-} loading { Alter 0, Turns 0, Loans 0, GCExt 0 } 00000.016 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 2, has 4 jobs, Boot{ 5 que, 2 refs } 00000.016 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 3, has 1 jobs, Boot{ 2 que, 2 refs } 00000.016 II| TABLET_FLATBOOT: Leader{1:2:-} redo log has 0 records, last before 0:0 00000.016 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 4, has 1 jobs, Boot{ 2 que, 2 refs } 00000.016 II| TABLET_FLATBOOT: Leader{1:2:-} result: db change {1 -> 1} snap on 0 00000.016 DD| TABLET_FLATBOOT: Leader{1:2:-} fired stage 5, has 0 jobs, Boot{ 1 que, 2 refs } 00000.017 II| TABLET_FLATBOOT: Leader{1:2:-} booting completed, took 0.000s 00000.086 II| TABLET_FLATBOOT: Leader{1:3:-} booting Deps{2:1 entries 252} {nil} 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} fired stage 1, has 1 jobs, Boot{ 2 que, 2 refs } 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} snap in deps on 2:1, TLargeGlobId{[1:2:1:1:28672:35:0] ~35b, grp 1} 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} process snap gc entry, + [ ], - [ ] 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} Loading TLargeGlobId{[1:2:1:1:28672:35:0] ~35b, grp 1} 00000.086 II| TABLET_FLATBOOT: Leader{1:3:-} snap on 2:1 change 1, 25b, ABI 28 of [1, 28], GC{ +0 -0 } 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} process gc snapshot, + [ ], - [ ] 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:2:1:8192:209:0] ], - [ ] 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:3:1:24576:74:0] ], - [ ] 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:4:1:24576:79:0] ], - [ ] 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:5:1:24576:81:0] ], - [ ] 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:6:1:24576:81:0] ], - [ ] 00000.086 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:7:1:24576:81:0] ], - [ ] 00000.088 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:8:1:24576:79:0] ], - [ ] 00000.088 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:9:1:24576:79:0] ], - [ ] 00000.088 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:10:1:24576:81:0] ], - [ ] 00000.088 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:11:1:24576:81:0] ], - [ ] 00000.088 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:12:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:13:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:14:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:15:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:16:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:17:1:24576:79:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:18:1:24576:79:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:19:1:24576:79:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:20:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:21:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:22:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:23:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:24:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:25:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:26:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:27:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:28:1:24576:79:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:29:1:24576:79:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:30:1:24576:79:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:31:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:32:1:24576:79:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:33:1:24576:83:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:34:1:24576:82:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:35:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:36:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:37:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:38:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:39:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:40:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:41:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:42:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:43:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:44:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:45:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:46:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:47:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:48:1:24576:79:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:49:1:24576:79:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:50:1:24576:82:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:51:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:52:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:53:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:54:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:55:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:56:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:57:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:58:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:59:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:60:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:61:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:62:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:63:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:64:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:65:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:66:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:67:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:68:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:69:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:70:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:71:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:72:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:73:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:74:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:75:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:76:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:77:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:78:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:79:1:24576:81:0] ], - [ ] 00000.089 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:80:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:81:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:82:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:83:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:84:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:85:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:86:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:87:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:88:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:89:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:90:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:91:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:92:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:93:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:94:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:95:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:96:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:97:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:98:1:24576:81:0] ], - [ ] 00000.090 DD| TABLET_FLATBOOT: Leader{1:3:-} process log gc entry, + [ [1:2:99:1:24576:81:0] ], - ... 24576:100:0], [1:2:19:1:24576:97:0], [1:2:20:1:24576:96:0], [1:2:21:1:24576:97:0], [1:2:22:1:24576:97:0], [1:2:23:1:24576:97:0], [1:2:24:1:24576:97:0], [1:2:25:1:24576:97:0], [1:2:26:1:24576:97:0], [1:2:27:1:24576:97:0], [1:2:28:1:24576:96:0], [1:2:29:1:24576:100:0], [1:2:30:1:24576:97:0], [1:2:31:1:24576:96:0], [1:2:32:1:24576:96:0], [1:2:33:1:24576:104:0], [1:2:34:1:24576:97:0], [1:2:35:1:24576:99:0], [1:2:36:1:24576:97:0], [1:2:37:1:24576:97:0], [1:2:38:1:24576:97:0], [1:2:39:1:24576:97:0], [1:2:40:1:24576:97:0], [1:2:41:1:24576:97:0], [1:2:42:1:24576:97:0], [1:2:43:1:24576:97:0], [1:2:44:1:24576:97:0], [1:2:45:1:24576:97:0], [1:2:46:1:24576:97:0], [1:2:47:1:24576:97:0], [1:2:48:1:24576:97:0], [1:2:49:1:24576:97:0], [1:2:50:1:24576:97:0], [1:2:51:1:24576:97:0], [1:2:52:1:24576:97:0], [1:2:53:1:24576:97:0], [1:2:54:1:24576:97:0], [1:2:55:1:24576:97:0], [1:2:56:1:24576:97:0], [1:2:57:1:24576:97:0], [1:2:58:1:24576:97:0], [1:2:59:1:24576:97:0], [1:2:60:1:24576:97:0], [1:2:61:1:24576:97:0], [1:2:62:1:24576:97:0], [1:2:63:1:24576:97:0], [1:2:64:1:24576:97:0], [1:2:65:1:24576:97:0], [1:2:66:1:24576:97:0], [1:2:67:1:24576:97:0], [1:2:68:1:24576:97:0], [1:2:69:1:24576:97:0], [1:2:70:1:24576:97:0], [1:2:71:1:24576:97:0], [1:2:72:1:24576:97:0], [1:2:73:1:24576:101:0], [1:2:74:1:24576:102:0], [1:2:75:1:24576:101:0], [1:2:76:1:24576:102:0], [1:2:77:1:24576:104:0], [1:2:78:1:24576:104:0], [1:2:79:1:24576:104:0], [1:2:80:1:24576:104:0], [1:2:81:1:24576:103:0], [1:2:82:1:24576:101:0], [1:2:83:1:24576:104:0], [1:2:84:1:24576:104:0], [1:2:85:1:24576:104:0], [1:2:86:1:24576:104:0], [1:2:87:1:24576:104:0], [1:2:88:1:24576:104:0], [1:2:89:1:24576:104:0], [1:2:90:1:24576:101:0], [1:2:91:1:24576:104:0], [1:2:92:1:24576:104:0], [1:2:93:1:24576:98:0], [1:2:94:1:24576:104:0], [1:2:95:1:24576:104:0], [1:2:96:1:24576:104:0], [1:2:97:1:24576:104:0], [1:2:98:1:24576:104:0], [1:2:99:1:24576:104:0], [1:2:100:1:24576:104:0], [1:2:101:1:24576:97:0], [1:2:102:1:24576:100:0], [1:2:103:1:24576:104:0], [1:2:104:1:24576:104:0], [1:2:105:1:24576:104:0], [1:2:106:1:24576:104:0], [1:2:107:1:24576:104:0], [1:2:108:1:24576:104:0], [1:2:109:1:24576:104:0], [1:2:110:1:24576:104:0], [1:2:111:1:24576:104:0], [1:2:112:1:24576:104:0], [1:2:113:1:24576:104:0], [1:2:114:1:24576:104:0], [1:2:115:1:24576:104:0], [1:2:116:1:24576:104:0], [1:2:117:1:24576:104:0], [1:2:118:1:24576:104:0], [1:2:119:1:24576:104:0], [1:2:120:1:24576:104:0], [1:2:121:1:24576:104:0], [1:2:122:1:24576:104:0], [1:2:123:1:24576:104:0], [1:2:124:1:24576:104:0], [1:2:125:1:24576:104:0], [1:2:126:1:24576:104:0], [1:2:127:1:24576:104:0], [1:2:128:1:24576:104:0], [1:2:129:1:24576:104:0], [1:2:130:1:24576:104:0], [1:2:131:1:24576:104:0], [1:2:132:1:24576:104:0], [1:2:133:1:24576:104:0], [1:2:134:1:24576:104:0], [1:2:135:1:24576:104:0], [1:2:136:1:24576:104:0], [1:2:137:1:24576:104:0], [1:2:138:1:24576:104:0], [1:2:139:1:24576:104:0], [1:2:140:1:24576:104:0], [1:2:141:1:24576:104:0], [1:2:142:1:24576:104:0], [1:2:145:1:24576:60:0], [1:2:146:1:24576:60:0] } 00000.182 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [35:212:2237] class Online from cache [ ] already requested [ ] to request [ 22 23 24 25 ] 00000.182 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:143:1:12288:758:0] status OK pages [ 22 23 24 25 ] 00000.183 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.183 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 1880b, wait} done, Waste{2:0, 141856b +(140, 14018b), 146 trc} 00000.184 DD| TABLET_SAUSAGECACHE: Attach page collection [1:2:143:1:12288:758:0] owner [35:212:2237] 00000.185 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] owner [35:212:2237] class AsyncLoad from cache [ ] already requested [ 22 23 24 25 ] to request [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.185 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:143:1:12288:758:0] async queue pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.185 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 00000.185 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:143:1:12288:758:0] owner [35:212:2237] pages [ 22 23 24 25 ] 00000.186 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:143:1:12288:758:0] status OK pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 ] 00000.186 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{26 pages [1:2:143:1:12288:758:0] ok OK}, category 2 00000.186 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:143:1:12288:758:0] owner [35:212:2237] pages [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 ] 00000.187 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan 00000.187 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.188 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} hope 1 -> done Change{145, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.188 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_StickyPages::TTxFullScan} release 4194304b of static, Memory{0 dyn 0} 00000.188 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.189 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 141856b +(0, 0b), 1 trc, -14018b acc} 00000.189 DD| TABLET_SAUSAGECACHE: Unregister owner [35:212:2237] 00000.189 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {6 1077b} miss {50 281387b} 00000.189 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.190 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {14354b, 149} 00000.190 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.190 II| FAKE_ENV: DS.1 gone, left {143736b, 8}, put {157893b, 150} 00000.190 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.190 II| FAKE_ENV: All BS storage groups are stopped 00000.190 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.190 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 782}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:03:05.780204Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.047 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.048 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 512b} miss {0 0b} 00000.048 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.048 II| FAKE_ENV: DS.1 gone, left {6814b, 23}, put {6814b, 23} 00000.048 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.048 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.048 II| FAKE_ENV: DS.0 gone, left {1356b, 12}, put {1376b, 13} 00000.048 II| FAKE_ENV: All BS storage groups are stopped 00000.048 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.048 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:03:05.838342Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.278 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.278 NN| TABLET_SAUSAGECACHE: Poison cache serviced 10 reqs hit {860 5551893b} miss {0 0b} 00000.278 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.278 II| FAKE_ENV: DS.1 gone, left {6751256b, 17}, put {6751256b, 17} 00000.281 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.281 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.281 II| FAKE_ENV: DS.0 gone, left {1201b, 13}, put {1221b, 14} 00000.282 II| FAKE_ENV: All BS storage groups are stopped 00000.282 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.282 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:03:06.133683Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00015.134 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00015.134 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4109 reqs hit {2091 2366986b} miss {6144 6340608b} 00015.134 II| FAKE_ENV: Shut order, stopping 4 BS groups 00015.135 II| FAKE_ENV: DS.0 gone, left {1761b, 14}, put {1781b, 15} 00015.135 II| FAKE_ENV: DS.1 gone, left {6927727b, 27}, put {6927727b, 27} 00015.137 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00015.137 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00015.137 II| FAKE_ENV: All BS storage groups are stopped 00015.137 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00015.137 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:03:21.287605Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00014.446 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00014.446 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4106 reqs hit {43 253450b} miss {4096 4227072b} 00014.447 II| FAKE_ENV: Shut order, stopping 4 BS groups 00014.447 II| FAKE_ENV: DS.0 gone, left {44744b, 2}, put {164747b, 16} 00014.447 II| FAKE_ENV: DS.1 gone, left {2764621b, 2068}, put {2764621b, 2068} 00014.467 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00014.467 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00014.467 II| FAKE_ENV: All BS storage groups are stopped 00014.467 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00014.467 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:03:35.784197Z 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:03:35.825044Z 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:03:35.876423Z 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:03:35.924495Z 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:35.761170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:35.761270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:35.761307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:35.761340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:35.761393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:35.761424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:35.761510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:35.761581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:35.761923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:35.836634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:35.836691Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:35.855263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:35.855610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:35.855768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:35.869726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:35.869925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:35.870377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:35.870547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:35.876805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:35.878067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:35.878139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:35.878304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:35.878360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:35.878414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:35.878515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:35.891086Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:36.017372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:36.017574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.017804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:36.018081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:36.018168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.022470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:36.022609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:36.022861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.022935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:36.023018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:36.023060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:36.025557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.025626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:36.025662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:36.027603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.027667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.027735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:36.027784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:36.031404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:36.033231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:36.033474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:36.034489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:36.034614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:36.034662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:36.034971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:36.035040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:36.035208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:36.035298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:36.037178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:36.037241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:36.037412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:36.037452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:36.037832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.037924Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:36.038007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:36.038040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:36.038078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:36.038106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:36.038144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:36.038185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:36.038215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:36.038241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:36.038301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:36.038338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:36.038368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:36.040208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:36.040311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:36.040343Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... thId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:450:2411], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:03:38.090465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-03-28T09:03:38.090602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-03-28T09:03:38.090919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-28T09:03:38.090955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-03-28T09:03:38.091015Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-28T09:03:38.091167Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:38.091271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936749 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:38.091316Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-03-28T09:03:38.091362Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-03-28T09:03:38.093399Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.093449Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-28T09:03:38.093537Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:03:38.093567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:03:38.093603Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:03:38.093645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:03:38.093677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-28T09:03:38.093738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:122:2148] message: TxId: 281474976710760 2025-03-28T09:03:38.093775Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:03:38.093807Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-28T09:03:38.093832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-28T09:03:38.093912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-28T09:03:38.095737Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-28T09:03:38.095830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-28T09:03:38.095902Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-03-28T09:03:38.095982Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:450:2411], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:03:38.097679Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:03:38.097764Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:450:2411], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:03:38.097823Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:03:38.099600Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:03:38.099681Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:450:2411], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:03:38.099717Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-28T09:03:38.099831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:03:38.099883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:631:2580] TestWaitNotification: OK eventTxId 102 2025-03-28T09:03:38.100533Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:38.100781Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 289us result status StatusSuccess 2025-03-28T09:03:38.101259Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 5 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:36.670249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:36.670342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:36.670379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:36.670413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:36.670459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:36.670487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:36.670563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:36.670639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:36.670997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:36.758573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:36.758635Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:36.777633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:36.777958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:36.778125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:36.789964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:36.790206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:36.790912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:36.791132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:36.793201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:36.794423Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:36.794477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:36.794641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:36.794710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:36.794752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:36.794842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.801620Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:36.938370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:36.938609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.938819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:36.939042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:36.939100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.943693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:36.943825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:36.944038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.944096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:36.944153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:36.944188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:36.947732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.947795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:36.947830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:36.949776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.949841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.949916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:36.949965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:36.959570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:36.968982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:36.969236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:36.970273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:36.970441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:36.970496Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:36.970816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:36.970881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:36.971044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:36.971139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:36.981616Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:36.981681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:36.981885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:36.981928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:36.982339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:36.982412Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:36.982523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:36.982553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:36.982588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:36.982618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:36.982668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:36.982738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:36.982770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:36.982799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:36.982868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:36.982904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:36.982934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:36.984861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:36.984977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:36.985010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD DEBUG: TMoveSequence TDropParts HandleReply TEvDropSequenceResult shardId# 72075186233409546 status# SUCCESS operationId# 102:1 at tablet 72057594046678944 2025-03-28T09:03:38.606845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:03:38.606892Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 4 -> 240 2025-03-28T09:03:38.608851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-28T09:03:38.609020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-28T09:03:38.609072Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveSequence TDone, operationId: 102:1 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:38.609129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveSequence TDone, operationId: 102:1 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 3], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-28T09:03:38.609233Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-03-28T09:03:38.609261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-28T09:03:38.609294Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-03-28T09:03:38.609318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-28T09:03:38.609344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-03-28T09:03:38.609415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:373:2342] message: TxId: 102 2025-03-28T09:03:38.609479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-28T09:03:38.609527Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:03:38.609561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:03:38.609688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:03:38.609726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:03:38.609765Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-28T09:03:38.609784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-28T09:03:38.609822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-28T09:03:38.609843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:03:38.610278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:03:38.610325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:03:38.610384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:03:38.610664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:03:38.610723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:03:38.613348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:03:38.613396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:472:2428] 2025-03-28T09:03:38.613881Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-03-28T09:03:38.617522Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:38.617763Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 243us result status StatusPathDoesNotExist 2025-03-28T09:03:38.617942Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:03:38.618349Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:38.618492Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 158us result status StatusPathDoesNotExist 2025-03-28T09:03:38.618617Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:03:38.618952Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:38.619139Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 194us result status StatusSuccess 2025-03-28T09:03:38.619513Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:38.620031Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:03:38.620204Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 187us result status StatusSuccess 2025-03-28T09:03:38.620467Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BackupRestoreS3::RestoreIndexTableDecimalSplitBoundaries [GOOD] >> BackupRestoreS3::RestoreViewDependentOnAnotherView >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink >> TSchemeShardMoveTest::Reject >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex >> KqpPg::InsertNoTargetColumns_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault+useSink >> TKeyValueTracingTest::WriteHuge >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless >> TKeyValueTracingTest::ReadSmall >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> TKeyValueTracingTest::WriteSmall >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestDefaultPoolRestrictions >> KqpPg::DropIndex [GOOD] >> KqpPg::CreateUniqPgColumn+useSink |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> KqpPg::InsertValuesFromTableWithDefaultText+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:38.677645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:38.677858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:38.677926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:38.677962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:38.678011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:38.678037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:38.678108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:38.678179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:38.678494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:38.768593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:38.768652Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:38.795325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:38.795691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:38.795849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:38.810301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:38.810552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:38.811237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:38.811464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:38.817733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:38.819651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:38.819719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:38.819888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:38.819940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:38.819994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:38.820096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.835240Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:38.955842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:38.956071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.956307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:38.956534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:38.956593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.960006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:38.960150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:38.960380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.960453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:38.960513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:38.960548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:38.962914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.962971Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:38.963010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:38.965057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.965134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.965183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:38.965230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:38.968793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:38.970907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:38.971182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:38.972271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:38.972399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:38.972453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:38.972747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:38.972814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:38.972973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:38.973062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:38.975263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:38.975312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:38.975476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:38.975514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:38.975907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.975962Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:38.976053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:38.976089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:38.976125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:38.976155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:38.976190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:38.976228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:38.976259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:38.976287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:38.976356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:38.976392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:38.976421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:38.978291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:38.978422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:38.978461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 12 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 825 } } 2025-03-28T09:03:41.340311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2025-03-28T09:03:41.340407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 12 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 825 } } 2025-03-28T09:03:41.340472Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 12 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 825 } } FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:03:41.340940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:03:41.340982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:03:41.341093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:03:41.341142Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:03:41.341223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 8589936904 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:03:41.341281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:41.341317Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.341348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:03:41.341385Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:03:41.342086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936901 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:03:41.342124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 2 2025-03-28T09:03:41.342231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:2, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936901 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:03:41.342266Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:03:41.342321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 325 RawX2: 8589936901 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:03:41.342360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:41.342387Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-28T09:03:41.342411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:03:41.342434Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:2 129 -> 240 2025-03-28T09:03:41.344009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.346857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-28T09:03:41.347507Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.347898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.347942Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:41.347994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 2], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-28T09:03:41.348093Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 2/3 2025-03-28T09:03:41.348129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-03-28T09:03:41.348168Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 2/3 2025-03-28T09:03:41.348197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2025-03-28T09:03:41.348231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2025-03-28T09:03:41.348801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-28T09:03:41.349004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-28T09:03:41.349038Z node 2 :FLAT_TX_SCHEMESHARD INFO: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:41.349070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 4], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-28T09:03:41.349129Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/3 2025-03-28T09:03:41.349152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-03-28T09:03:41.349183Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/3 2025-03-28T09:03:41.349203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-03-28T09:03:41.349226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2025-03-28T09:03:41.349291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:378:2346] message: TxId: 102 2025-03-28T09:03:41.349335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2025-03-28T09:03:41.349388Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:03:41.349422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:03:41.349524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-28T09:03:41.349557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:03:41.349594Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-28T09:03:41.349615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-28T09:03:41.349642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-28T09:03:41.349664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:03:41.349684Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-03-28T09:03:41.349702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-03-28T09:03:41.349741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-28T09:03:41.349764Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:03:41.350126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:03:41.350169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:03:41.350236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:03:41.350275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:03:41.350310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:03:41.350336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:03:41.350363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:41.361118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:03:41.361188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:476:2437] 2025-03-28T09:03:41.361389Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 >> TKeyValueTracingTest::WriteHuge [FAIL] >> TKeyValueTracingTest::WriteSmall [FAIL] >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable >> TKeyValueTracingTest::ReadSmall [FAIL] >> TKeyValueTracingTest::ReadHuge ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2025-03-28T09:02:50.147320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783284256611726:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:50.147370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpAp5omq/pdisk_1.dat 2025-03-28T09:02:50.774206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:50.774313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:50.778681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:50.837388Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18395, node 1 2025-03-28T09:02:50.899702Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:02:50.899742Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:02:51.103383Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:51.103407Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:51.103416Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:51.103550Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:51.841924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:54.911801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783301436481960:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:54.911917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783301436481968:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:54.911967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:54.916366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:02:54.960347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783301436481974:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:02:55.029453Z node 1 :TX_PROXY ERROR: Actor# [1:7486783305731449346:2693] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:55.154806Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783284256611726:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:55.154897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Backup "/Root" to "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/"Create temporary directory "/Root/~backup_20250328T090255" in databaseProcess "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/topic"Backup topic "/Root/topic" to "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/topic"Write topic into "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/topic/create_topic.pb"Write ACL into "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/topic/permissions.pb"Remove temporary directory "/Root/~backup_20250328T090255" in database2025-03-28T09:02:55.809615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-28T09:02:55.863848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:55.904830Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-28T09:02:55.904864Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:02:55.904948Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:02:55.930446Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-28T09:02:55.930511Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Restore "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/topic"Restore topic "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/topic" to "/Root/topic"Read topic from "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/topic/create_topic.pb"Created "/Root/topic"Restore ACL "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/topic" to "/Root/topic"Read ACL from "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpfjDK4p/topic/permissions.pb"2025-03-28T09:02:56.124436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-28T09:02:58.765471Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783315713497486:2186];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:58.855907Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpgpC1ri/pdisk_1.dat 2025-03-28T09:02:59.203874Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:59.232577Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:59.232663Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:59.244614Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20307, node 4 2025-03-28T09:02:59.529803Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:59.533953Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:59.541495Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:59.541655Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:59.880335Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:59.978735Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpsNnpgY/"Create temporary directory "/Root/~backup_20250328T090300" in databaseProcess "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpsNnpgY/kesus"Backup coordination node "/Root/kesus" to "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpsNnpgY/kesus"Write coordination node into "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpsNnpgY/kesus/create_coordination_node.pb"Write ACL into "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpsNnpgY/kesus/permissions.pb"Remove temporary directory "/Root/~backup_20250328T090300" in database2025-03-28T09:03:00.285356Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715660:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-28T09:03:00.344717Z node 4 :FL ... 0 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/"Create temporary directory "/Root/~backup_20250328T090332" in databaseProcess "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalTable"Backup external table "/Root/externalTable" to "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalTable"Write external table into "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalTable/create_external_table.sql"Write ACL into "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalTable/permissions.pb"Process "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250328T090332" in database2025-03-28T09:03:33.144064Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715663:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalDataSource/create_external_data_source.sql"2025-03-28T09:03:33.366004Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:03:33.377695Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486783443670468580:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:33.377799Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalDataSource/permissions.pb"2025-03-28T09:03:33.408614Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715667:0, at schemeshard: 72057594046644480 Process "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalTable"Restore external table "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalTable" to "/Root/externalTable"Read external table from "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalTable/create_external_table.sql"2025-03-28T09:03:33.458796Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 Created "/Root/externalTable"Restore ACL "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalTable" to "/Root/externalTable"Read ACL from "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpYd9mq1/externalTable/permissions.pb"2025-03-28T09:03:33.507068Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-28T09:03:35.367464Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486783473881764399:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:35.367556Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpS5D7mO/pdisk_1.dat 2025-03-28T09:03:35.634398Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7367, node 13 2025-03-28T09:03:35.727053Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:35.727163Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:35.791760Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:35.863364Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:35.863392Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:35.863405Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:35.863587Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:36.356630Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:39.857179Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486783491061634671:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:39.857321Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:39.857649Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486783491061634683:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:39.862601Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:39.904972Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486783491061634685:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:39.982809Z node 13 :TX_PROXY ERROR: Actor# [13:7486783491061634774:2692] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:40.008009Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710660:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/"Create temporary directory "/Root/~backup_20250328T090340" in databaseProcess "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/externalDataSource"Backup external data source "/Root/externalDataSource" to "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/externalDataSource"Write external data source into "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/externalDataSource/create_external_data_source.sql"Write ACL into "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/externalDataSource/permissions.pb"Remove temporary directory "/Root/~backup_20250328T090340" in database2025-03-28T09:03:40.172401Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/externalDataSource"Restore external data source "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/externalDataSource" to "/Root/externalDataSource"Read external data source from "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/externalDataSource/create_external_data_source.sql"2025-03-28T09:03:40.338122Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710664:0, at schemeshard: 72057594046644480 Created "/Root/externalDataSource"Restore ACL "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/externalDataSource" to "/Root/externalDataSource"Read ACL from "/home/runner/.ya/build/build_root/2909/00177b/r3tmp/tmpi3lrPY/externalDataSource/permissions.pb"2025-03-28T09:03:40.369133Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7486783473881764399:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:40.369220Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:40.377006Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId >> TSchemeShardMoveTest::Replace >> KqpPg::TableArrayInsert+useSink [GOOD] >> KqpPg::TableArrayInsert-useSink >> TSchemeShardMoveTest::OneTable [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool >> TKeyValueTracingTest::ReadHuge [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:40.930448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:40.930586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:40.930640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:40.930679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:40.930746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:40.930794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:40.930876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:40.930958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:40.931265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:41.021232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:41.021288Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:41.040559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:41.040902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:41.041062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:41.047290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:41.047555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:41.048226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:41.048449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:41.050812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:41.052075Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:41.052140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:41.052324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:41.052379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:41.052419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:41.052536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.059886Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:41.205239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:41.205451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.205653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:41.205950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:41.206004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.208485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:41.208623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:41.208813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.208889Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:41.208947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:41.208982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:41.211009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.211068Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:41.211113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:41.212810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.212873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.212923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:41.212992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:41.216755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:41.218726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:41.218964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:41.220047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:41.220177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:41.220233Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:41.220541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:41.220612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:41.220780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:41.220875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:41.222797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:41.222844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:41.222986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:41.223027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:41.223457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:41.223520Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:41.223634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:41.223671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:41.223722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:41.223758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:41.223806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:41.223846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:41.223880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:41.223910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:41.223968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:41.224004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:41.224035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:41.226019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:41.226128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:41.226165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8T09:03:44.333004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 108:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:03:44.333052Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 108, done: 0, blocked: 1 2025-03-28T09:03:44.333147Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 108:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 108 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-28T09:03:44.333297Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 137 -> 129 2025-03-28T09:03:44.333452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:44.333524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:03:44.334985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:03:44.337388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:03:44.337667Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:44.337720Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:44.337906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:03:44.338062Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:44.338110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-03-28T09:03:44.338158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 108, path id: 4 2025-03-28T09:03:44.338522Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:03:44.338581Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:03:44.338726Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:03:44.338772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:03:44.338819Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-03-28T09:03:44.339947Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-03-28T09:03:44.340075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2025-03-28T09:03:44.340115Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-03-28T09:03:44.340163Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2025-03-28T09:03:44.340217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:44.341138Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-03-28T09:03:44.341226Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2025-03-28T09:03:44.341257Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-03-28T09:03:44.341290Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:03:44.341324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:03:44.341404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-03-28T09:03:44.344881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:03:44.344941Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:44.345195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:03:44.345302Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-28T09:03:44.345346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-28T09:03:44.345382Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-28T09:03:44.345411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-28T09:03:44.345446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-03-28T09:03:44.345518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:336:2315] message: TxId: 108 2025-03-28T09:03:44.345570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-28T09:03:44.345616Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-03-28T09:03:44.345654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-03-28T09:03:44.345754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:03:44.347746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-03-28T09:03:44.348192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-03-28T09:03:44.349987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-28T09:03:44.350046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:830:2788] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-03-28T09:03:44.350880Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-28T09:03:44.350933Z node 2 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2025-03-28T09:03:44.369413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 8589936885 } TabletId: 72075186233409546 State: 4 2025-03-28T09:03:44.369545Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-03-28T09:03:44.372765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:03:44.373194Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:03:44.373351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:44.373570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409546 2025-03-28T09:03:44.373917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:03:44.373956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:03:44.374025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:44.381336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:03:44.381417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:03:44.381979Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2025-03-28T09:03:44.382805Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:44.383013Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 298us result status StatusSuccess 2025-03-28T09:03:44.383290Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardMoveTest::ResetCachedPath >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD595EC) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFD5FDCE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7F2B7DD0CD90) __libc_start_main+128 (0x7F2B7DD0CE40) _start+41 (0xD6F6029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD53C1D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFD5F748) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7F32F87E1D90) __libc_start_main+128 (0x7F32F87E1E40) _start+41 (0xD6F6029) >> KqpPg::InsertValuesFromTableWithDefault+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink >> KqpPg::InsertValuesFromTableWithDefaultText-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD53C1D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFD5FA58) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7FAE0285BD90) __libc_start_main+128 (0x7FAE0285BE40) _start+41 (0xD6F6029) |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |76.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |76.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |76.3%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |76.3%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |76.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardViewTest::AsyncCreateSameView >> TSchemeShardMoveTest::Index [GOOD] >> TSchemeShardMoveTest::Replace [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> TSchemeShardViewTest::AsyncCreateDifferentViews |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD595EC) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFD601BE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7F7940F00D90) __libc_start_main+128 (0x7F7940F00E40) _start+41 (0xD6F6029) >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> TSchemeShardMoveTest::ResetCachedPath [GOOD] >> TSchemeShardViewTest::DropView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:38.472095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:38.472192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:38.472252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:38.472291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:38.472337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:38.472368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:38.472455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:38.472535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:38.472902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:38.566564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:38.566628Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:38.582656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:38.583022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:38.583244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:38.590719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:38.590984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:38.591654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:38.591897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:38.594130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:38.595533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:38.595595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:38.595791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:38.595855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:38.595906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:38.596018Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.618838Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:38.787979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:38.788201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.788453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:38.788765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:38.788837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.793270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:38.793428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:38.793674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.793744Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:38.793805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:38.793844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:38.796396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.796477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:38.796519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:38.798668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.798767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.798851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:38.798914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:38.803826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:38.806347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:38.806556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:38.807890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:38.808054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:38.808118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:38.808436Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:38.808505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:38.808723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:38.808835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:38.811081Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:38.811153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:38.811351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:38.811397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:38.811835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:38.811917Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:38.812030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:38.812082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:38.812128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:38.812162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:38.812208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:38.812257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:38.812300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:38.812335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:38.812412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:38.812456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:38.812491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:38.814711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:38.814860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:38.814905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:47.069617Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:47.069838Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 239us result status StatusSuccess 2025-03-28T09:03:47.070190Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 10 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:47.070645Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:03:47.070923Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 281us result status StatusSuccess 2025-03-28T09:03:47.071700Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:47.072404Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:03:47.172408Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 100ms result status StatusSuccess 2025-03-28T09:03:47.173268Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Replace [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:45.022209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:45.022335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:45.022403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:45.022453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:45.022503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:45.022541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:45.022641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:45.022761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:45.023113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:45.114646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:45.114727Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:45.138452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:45.138723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:45.138854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:45.144391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:45.144600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:45.145295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:45.145509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:45.148138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:45.149316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:45.149406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:45.149615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:45.149675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:45.149719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:45.149870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:45.159345Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:45.309678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:45.309907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:45.310119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:45.310354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:45.310474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:45.314179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:45.314342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:45.314579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:45.314640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:45.314713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:45.314772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:45.317192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:45.317275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:45.317334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:45.320659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:45.320738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:45.320814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:45.320882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:45.332851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:45.335135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:45.335401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:45.336356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:45.336525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:45.336591Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:45.336890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:45.336950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:45.337146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:45.337242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:45.339800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:45.339849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:45.340022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:45.340070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:45.340478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:45.340531Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:45.340649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:45.340702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:45.340744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:45.340776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:45.340818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:45.340865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:45.340905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:45.340939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:45.341030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:45.341070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:45.341105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:45.343362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:45.343483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:45.343521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: 72057594046678944, LocalPathId: 12] was 3 2025-03-28T09:03:47.106480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409547 disconnected Forgetting tablet 72075186233409548 2025-03-28T09:03:47.108607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Transaction 105 reset current state at schemeshard 72057594046678944 because pipe to tablet 72075186233409548 disconnected 2025-03-28T09:03:47.109680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:03:47.109729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1414:3176] 2025-03-28T09:03:47.109924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 5 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-28T09:03:47.110066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:03:47.110196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:03:47.110241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-03-28T09:03:47.110319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 1 2025-03-28T09:03:47.110363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 15], at schemeshard: 72057594046678944 2025-03-28T09:03:47.110417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 2 2025-03-28T09:03:47.110676Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-03-28T09:03:47.113123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:03:47.113358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 1 2025-03-28T09:03:47.123943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:03:47.124042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:03:47.124245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:03:47.124273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409548 2025-03-28T09:03:47.129322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-28T09:03:47.129497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:03:47.129538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 14], at schemeshard: 72057594046678944 2025-03-28T09:03:47.129620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-03-28T09:03:47.129659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-03-28T09:03:47.129692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 1 2025-03-28T09:03:47.129721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 12], at schemeshard: 72057594046678944 2025-03-28T09:03:47.129769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:47.130062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:03:47.130116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409546 2025-03-28T09:03:47.131895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-03-28T09:03:47.132424Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-28T09:03:47.132556Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-03-28T09:03:47.132604Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-03-28T09:03:47.133228Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Src" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:47.133418Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Src" took 224us result status StatusPathDoesNotExist 2025-03-28T09:03:47.133606Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Src\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Src" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:03:47.134204Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:47.134426Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dst" took 241us result status StatusSuccess 2025-03-28T09:03:47.135192Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dst" PathDescription { Self { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Async" LocalPathId: 23 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 25 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 22 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:47.136162Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:47.136318Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 175us result status StatusSuccess 2025-03-28T09:03:47.136694Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 28 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 28 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 26 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 22 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> BackupRestoreS3::RestoreViewDependentOnAnotherView [GOOD] >> BackupRestoreS3::PrefixedVectorIndex |76.4%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:108:2140] 2025-03-28T09:03:46.358652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:46.358785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:46.358832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:46.358878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:46.358942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:46.358985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:46.359082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:46.359182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:46.359581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:46.453251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:46.453319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:46.471482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:46.471712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:46.471906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:46.479489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:46.479902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:46.480584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:46.480894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:46.486370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:46.487783Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:46.487940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:46.488103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:46.488166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:46.488211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:46.488426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:46.496146Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-28T09:03:46.659082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:46.659328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:46.659547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:46.659764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:46.659823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:46.665784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:46.665952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:46.666177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:46.666253Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:46.666293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:46.666326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:46.682961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:46.683033Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:46.683073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:46.694085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:46.694150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:46.694209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:46.694251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:46.703620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:46.707601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:46.707822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:46.708946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:46.709093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:46.709145Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:46.709425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:46.709483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:46.709643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:46.709743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:46.715321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:46.715409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:46.715607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:46.715662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:46.716165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:46.716216Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:46.716328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:46.716368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:46.716426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:46.716474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:46.716521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:46.716560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:46.716596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:46.716629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:46.716708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:46.716753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:46.716792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:46.718754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:46.718888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:46.718934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-03-28T09:03:47.723962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 154500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 220 } } 2025-03-28T09:03:47.724100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 105:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: PREPARED TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 PrepareArriveTime: 154500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 220 } } 2025-03-28T09:03:47.724142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-28T09:03:47.724245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409549, shardIdx: 72057594046678944:4, operationId: 105:0, left await: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:47.724287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 3 -> 128 2025-03-28T09:03:47.726631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:03:47.726858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:03:47.726908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:47.726991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-03-28T09:03:47.727156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:47.729255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2025-03-28T09:03:47.729416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000004 2025-03-28T09:03:47.729938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:47.730073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:47.730140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-03-28T09:03:47.730510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 129 2025-03-28T09:03:47.730649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-03-28T09:03:47.738026Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:47.738091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:03:47.738348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:47.738408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-03-28T09:03:47.739404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:03:47.739468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 105 2025-03-28T09:03:47.740141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:03:47.740266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:03:47.740300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:03:47.740354Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 8 2025-03-28T09:03:47.740397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:03:47.740497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-03-28T09:03:47.741429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1399 } } 2025-03-28T09:03:47.741474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-03-28T09:03:47.741614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1399 } } 2025-03-28T09:03:47.741712Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000004 OrderId: 105 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1399 } } 2025-03-28T09:03:47.742290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 670 RawX2: 4294969906 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-03-28T09:03:47.742361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2025-03-28T09:03:47.742487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 670 RawX2: 4294969906 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-03-28T09:03:47.742534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:03:47.742610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 670 RawX2: 4294969906 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2025-03-28T09:03:47.742666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:47.742727Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:03:47.742767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:03:47.742805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-03-28T09:03:47.746114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:03:47.747706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:03:47.747851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:03:47.748179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:03:47.748226Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-28T09:03:47.748352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:03:47.748390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:03:47.748431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:03:47.748463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:03:47.748512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-03-28T09:03:47.748609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:334:2313] message: TxId: 105 2025-03-28T09:03:47.748673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:03:47.748732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:03:47.748767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-28T09:03:47.748880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:03:47.750809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:03:47.750865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:835:2756] TestWaitNotification: OK eventTxId 105 >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:47.850409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:47.850516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:47.850556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:47.850586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:47.850648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:47.850679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:47.850764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:47.850838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:47.851187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:47.931114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:47.931180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:47.947491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:47.947789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:47.947995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:47.957561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:47.957827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:47.958519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:47.958815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:47.961193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:47.962575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:47.962640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:47.962842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:47.962890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:47.962937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:47.963031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:47.975042Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:48.098388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:48.098581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.098789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:48.099022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:48.099069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.101579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:48.101735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:48.101970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.102033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:48.102087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:48.102123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:48.105972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.106038Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:48.106076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:48.107987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.108048Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.108087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:48.108148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:48.111362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:48.113273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:48.113436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:48.114425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:48.114573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:48.114635Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:48.114960Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:48.115017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:48.115184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:48.115255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:48.117469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:48.117516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:48.117723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:48.117800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:48.118150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.118188Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:48.118277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:48.118307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:48.118336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:48.118364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:48.118399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:48.118457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:48.118490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:48.118518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:48.118573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:48.118597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:48.118630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:48.120260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:48.120369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:48.120432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athStateCreate)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-03-28T09:03:48.175801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/MyView', error: path exists but creating right now (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateCreate), operation: CREATE VIEW, path: /MyRoot/MyView 2025-03-28T09:03:48.183212Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:03:48.184056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-28T09:03:48.184264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-28T09:03:48.184705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:48.184866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:48.184922Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-03-28T09:03:48.185074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:03:48.185274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:48.185346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:03:48.190482Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:48.190550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:48.194900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:03:48.195117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:48.195168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:03:48.195231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:03:48.195621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.195682Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:03:48.195806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:03:48.195864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:03:48.195910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:03:48.195947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:03:48.196000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:03:48.196059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:03:48.196099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:03:48.196133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:03:48.196222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:03:48.196266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:03:48.196304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-28T09:03:48.196341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:03:48.197230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:03:48.197333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:03:48.197369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:03:48.197408Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-28T09:03:48.197448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:48.198452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:03:48.198551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:03:48.198589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:03:48.198618Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:03:48.198647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:03:48.200177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:03:48.209196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:03:48.209562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-03-28T09:03:48.210057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:03:48.210110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-28T09:03:48.210216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:03:48.210237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-03-28T09:03:48.210287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:03:48.210313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:03:48.210967Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:03:48.211148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:03:48.211186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:303:2294] 2025-03-28T09:03:48.211375Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:03:48.211454Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:03:48.211511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:03:48.211537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:303:2294] 2025-03-28T09:03:48.211679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:03:48.211713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:303:2294] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-03-28T09:03:48.212216Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:48.212408Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 214us result status StatusSuccess 2025-03-28T09:03:48.212758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> BackupRestore::RestoreReplicationWithoutSecret [GOOD] >> BackupRestore::RestoreExternalDataSourceWithoutSecret >> TSchemeShardViewTest::DropView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:48.524895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:48.525026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:48.525068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:48.525106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:48.525159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:48.525194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:48.525253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:48.525322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:48.525646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:48.609880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:48.609928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:48.627401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:48.627703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:48.627892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:48.634147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:48.634367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:48.635049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:48.635285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:48.637314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:48.638652Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:48.638736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:48.638922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:48.638972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:48.639011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:48.639102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.646293Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:48.766846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:48.767039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.767185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:48.767365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:48.767410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.769851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:48.769986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:48.770175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.770238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:48.770267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:48.770298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:48.772960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.773023Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:48.773064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:48.775015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.775073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.775110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:48.775176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:48.778162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:48.780664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:48.780807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:48.784279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:48.784470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:48.784545Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:48.784855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:48.784917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:48.785083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:48.785168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:48.787597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:48.787652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:48.787877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:48.787939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:48.788346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.788391Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:48.788494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:48.788534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:48.788571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:48.788607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:48.788640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:48.788696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:48.788731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:48.788761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:48.788840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:48.788874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:48.788917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:48.791255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:48.791374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:48.791413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 48.913319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-28T09:03:48.913345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-28T09:03:48.915132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:03:48.915903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:03:48.915954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:48.915982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:03:48.916015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-03-28T09:03:48.916818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:03:48.916898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:03:48.916926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:03:48.916976Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:03:48.917009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:03:48.917727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:03:48.917823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:03:48.917849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:03:48.917875Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:03:48.917915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:03:48.917975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:03:48.923409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:03:48.923711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2025-03-28T09:03:48.923993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:03:48.924032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-28T09:03:48.924127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:03:48.924166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-03-28T09:03:48.924216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:03:48.924238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:03:48.924776Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:03:48.924947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:03:48.924983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:334:2325] 2025-03-28T09:03:48.925163Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:03:48.925269Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:03:48.925344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:03:48.925368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:334:2325] 2025-03-28T09:03:48.925457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:03:48.925476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:334:2325] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-03-28T09:03:48.926038Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:48.926216Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 229us result status StatusSuccess 2025-03-28T09:03:48.926660Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:48.927219Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:48.927402Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 184us result status StatusSuccess 2025-03-28T09:03:48.927691Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:48.928204Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:48.928372Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 185us result status StatusSuccess 2025-03-28T09:03:48.928594Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:48.699605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:48.699740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:48.699785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:48.699823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:48.699888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:48.699928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:48.700001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:48.700081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:48.700419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:48.803083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:48.803147Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:48.823633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:48.823965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:48.824163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:48.833361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:48.833599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:48.834291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:48.834548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:48.837024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:48.838406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:48.838468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:48.838641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:48.838723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:48.838771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:48.838878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:48.848260Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:49.003223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:49.003472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:49.003701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:49.003937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:49.003992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:49.006407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:49.006538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:49.006729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:49.006812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:49.006848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:49.006872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:49.009090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:49.009154Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:49.009191Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:49.011288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:49.011347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:49.011403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:49.011453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:49.020934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:49.023254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:49.023499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:49.024690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:49.024899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:49.024961Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:49.025222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:49.025278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:49.025478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:49.025581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:49.028059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:49.028111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:49.028348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:49.028408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:49.028816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:49.028876Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:49.028983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:49.029013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:49.029066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:49.029103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:49.029151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:49.029193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:49.029226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:49.029252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:49.029336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:49.029371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:49.029415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:49.031387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:49.031523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:49.031574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... dReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-03-28T09:03:49.077940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropView Drop { Name: "MyView" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:49.078102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropView Propose, opId: 102:0, path: /MyRoot/MyView 2025-03-28T09:03:49.078228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:03:49.078279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:49.083848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAccepted TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-03-28T09:03:49.084049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP VIEW, path: /MyRoot/MyView 2025-03-28T09:03:49.084274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:03:49.084343Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 ProgressState 2025-03-28T09:03:49.084410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-28T09:03:49.084522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:49.086890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:03:49.087039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-28T09:03:49.087412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:49.087540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:49.087603Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000003 2025-03-28T09:03:49.087779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-28T09:03:49.087963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:49.088040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:03:49.090484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:49.090546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:49.090731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:03:49.090864Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:49.090916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:03:49.090959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:03:49.091331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:03:49.091378Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:03:49.091483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:03:49.091518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:03:49.091556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:03:49.091584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:03:49.091620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:03:49.091677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:03:49.091716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:03:49.091745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:03:49.091829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:03:49.091863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:03:49.091902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:03:49.091932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:03:49.092744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:03:49.092853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:03:49.092892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:03:49.092934Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:03:49.092980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:49.093835Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:03:49.093924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:03:49.093956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:03:49.093982Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:03:49.094005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:03:49.094070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:03:49.095361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:03:49.095422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:03:49.095486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:49.100805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:03:49.101499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:03:49.101608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:03:49.101867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:03:49.101921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:03:49.102371Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:03:49.102477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:03:49.102517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:319:2310] TestWaitNotification: OK eventTxId 102 2025-03-28T09:03:49.103028Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:49.103208Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 213us result status StatusPathDoesNotExist 2025-03-28T09:03:49.103388Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardViewTest::ReadOnlyMode |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> KqpPg::CreateUniqPgColumn+useSink [GOOD] >> KqpPg::CreateUniqPgColumn-useSink >> ColumnBuildTest::BaseCase >> ColumnBuildTest::CancelBuild >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg >> ColumnBuildTest::ValidDefaultValue >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2025-03-28T09:01:47.651102Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:01:47.658287Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:01:47.658824Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:01:47.659073Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:47.700478Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:01:47.783790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:01:47.783832Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:47.796038Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:47.797340Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:01:47.799154Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:01:47.799244Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:01:47.799309Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:01:47.799690Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:01:47.799783Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:01:47.799886Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:01:47.891488Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:01:47.991521Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:01:47.992045Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:01:47.992599Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:01:47.992742Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:01:47.992879Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:01:47.993000Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:01:47.994705Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:47.995115Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:47.996101Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:01:47.996644Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:01:47.997071Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:01:47.997898Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:01:47.998329Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:01:47.998548Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:01:48.001173Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:01:48.001736Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:01:48.002036Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:01:48.003047Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:48.003084Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:48.003535Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:01:48.012419Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:01:48.012476Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:01:48.012555Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:01:48.012701Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:01:48.016713Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:01:48.017015Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:01:48.017608Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:01:48.018173Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:01:48.018510Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:01:48.018881Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:01:48.023451Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:01:48.023492Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:01:48.023760Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:01:48.024275Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:01:48.024632Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:01:48.024654Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:01:48.024941Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:01:48.025885Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:01:48.026185Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:01:48.064091Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:01:48.064449Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:01:48.065535Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:01:48.065573Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:01:48.066602Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:01:48.082521Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:48.083469Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:48.083790Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:01:48.085953Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:01:48.086270Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:01:48.090732Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:01:48.090806Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:01:48.090857Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:01:48.090901Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:01:48.094479Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:01:48.094537Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:01:48.094821Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:48.094868Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:48.094930Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:01:48.094964Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:01:48.094995Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:01:48.095035Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:01:48.095086Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:01:48.095136Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:01:48.095191Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:01:48.095231Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:01:48.095280Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:01:48.095460Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:01:48.095506Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:01:48.095534Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:01:48.095557Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:01:48.095582Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:01:48.095654Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:01:48.095679Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:01:48.095711Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:01:48.095746Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:01:48.095814Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:01:48.095850Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:01:48.095893Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:01:48.095939Z node 1 :TX_DATA ... ropose latency: 1 ms 2025-03-28T09:03:47.692349Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.692501Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.692530Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.692568Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.692596Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.692772Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.692801Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.692840Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.692871Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.693028Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.693055Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.693090Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.693118Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.693293Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.693324Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.693367Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.693397Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.693594Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.693622Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.693657Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.693684Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.693876Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.693905Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.693942Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.693974Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.694163Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.694191Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.694229Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.694259Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.694446Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.694474Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.694514Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.694555Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.694789Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.694820Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.694859Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.694891Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.695104Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.695132Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.695172Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.695203Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.695385Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.695423Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.695480Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.695512Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.695667Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.695696Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.695735Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.695769Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.695935Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:47.695963Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-03-28T09:03:47.696002Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:47.696032Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:47.696313Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-03-28T09:03:47.696358Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:47.696399Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-03-28T09:03:47.696598Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-28T09:03:47.696630Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:47.696667Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-03-28T09:03:47.696807Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-28T09:03:47.696839Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:47.696866Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-03-28T09:03:47.696944Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-28T09:03:47.696971Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:47.696998Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-03-28T09:03:47.697226Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-28T09:03:47.697262Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:47.697291Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-03-28T09:03:47.697777Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-28T09:03:47.697842Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:47.697875Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-03-28T09:03:47.698504Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-28T09:03:47.698558Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:47.698587Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-03-28T09:03:47.698796Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-28T09:03:47.698828Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:47.698856Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-03-28T09:03:47.699069Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-03-28T09:03:47.699101Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:47.699128Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 31 31 29 23 31 31 31 26 31 31 21 27 28 23 29 22 23 23 25 29 - 28 3 26 29 28 3 - 25 - - - actual 31 31 29 23 31 31 31 26 31 31 21 27 28 23 29 22 23 23 25 29 - 28 3 26 29 28 3 - 25 - - - interm 4 3 4 5 4 5 6 6 3 1 6 3 1 6 6 6 1 - 1 1 - - 3 - - 3 3 - - - - - >> PartitionStats::CollectorOverload [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] |76.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertValuesFromTableWithDefault-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:236:2060] recipient: [1:218:2142] 2025-03-28T09:02:50.494665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:50.494802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:50.494838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:50.494872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:50.494911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:50.494938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:50.494996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:50.495126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:50.495457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:50.587279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:50.587336Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:50.601890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:50.602323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:50.602479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:50.609691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:50.611495Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:50.612100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:50.612305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:50.615208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:50.616355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:50.616416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:50.616585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:50.616629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:50.616668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:50.616811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.623092Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:348:2060] recipient: [1:17:2064] 2025-03-28T09:02:50.736465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:50.736685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.736899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:50.737121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:50.737180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.743567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:50.743703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:50.743948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.744019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:50.744059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:50.744094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:50.747112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.747196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:50.747235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:50.749493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.749562Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.749605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:50.749655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:50.752894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:50.754600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:50.754786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:50.755708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:50.755841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 243 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:50.755895Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:50.756180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:50.756239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:50.756401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:50.756468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:50.758558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:50.758600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:50.758791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:50.758841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:315:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:02:50.759192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.759242Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:50.759336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:50.759372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:50.759409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:50.759443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:50.759488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:50.759540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:50.759627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:50.759660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:50.759729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:50.759761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:50.759795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:50.761691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:50.761795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:50.761840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hemaChanged> complete, operationId: 104:2, at schemeshard: 72057594046678944 2025-03-28T09:03:51.417093Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:03:51.417141Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:2 2025-03-28T09:03:51.417264Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:974:2743] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-03-28T09:03:51.417397Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:235:2153], Recipient [7:974:2743]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-03-28T09:03:51.417434Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-28T09:03:51.417478Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409550 state Ready 2025-03-28T09:03:51.417567Z node 7 :TX_DATASHARD DEBUG: 72075186233409550 Got TEvSchemaChangedResult from SS at 72075186233409550 2025-03-28T09:03:51.417889Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:03:51.417939Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:03:51.417993Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-03-28T09:03:51.418033Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:2 ProgressState 2025-03-28T09:03:51.418163Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:03:51.418208Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2025-03-28T09:03:51.418246Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-03-28T09:03:51.418283Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 2/3 2025-03-28T09:03:51.418322Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-03-28T09:03:51.418361Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-03-28T09:03:51.418681Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.418757Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:03:51.418787Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 104:0 2025-03-28T09:03:51.418848Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:978:2746] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2025-03-28T09:03:51.418942Z node 7 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [7:235:2153], Recipient [7:978:2746]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2025-03-28T09:03:51.418969Z node 7 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-28T09:03:51.418996Z node 7 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 104 datashard 72075186233409551 state Ready 2025-03-28T09:03:51.419054Z node 7 :TX_DATASHARD DEBUG: 72075186233409551 Got TEvSchemaChangedResult from SS at 72075186233409551 2025-03-28T09:03:51.419265Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:03:51.419296Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:03:51.419347Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.419380Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-28T09:03:51.419437Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:03:51.419463Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-03-28T09:03:51.419500Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-28T09:03:51.419531Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-03-28T09:03:51.419559Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-28T09:03:51.419584Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-03-28T09:03:51.419642Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:573:2399] message: TxId: 104 2025-03-28T09:03:51.419689Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-28T09:03:51.419759Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:03:51.419826Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:03:51.419965Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-03-28T09:03:51.420011Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-03-28T09:03:51.420031Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-03-28T09:03:51.420069Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 3 2025-03-28T09:03:51.420093Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-03-28T09:03:51.420111Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-03-28T09:03:51.420158Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 3 2025-03-28T09:03:51.422936Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:03:51.423079Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:03:51.423185Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:573:2399] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2025-03-28T09:03:51.423386Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:03:51.423429Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1031:2784] 2025-03-28T09:03:51.423649Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1033:2786], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:03:51.423683Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:03:51.423716Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-28T09:03:51.424700Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:551:2102], Recipient [7:235:2153] 2025-03-28T09:03:51.424752Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:03:51.427509Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:51.428069Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-28T09:03:51.428133Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-28T09:03:51.454832Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:03:51.457884Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:51.458135Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2025-03-28T09:03:51.458218Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-28T09:03:51.458751Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:03:51.458808Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:03:51.459293Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1103:2856], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:03:51.459356Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:03:51.459399Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:03:51.459548Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:573:2399], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2025-03-28T09:03:51.459586Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:03:51.459665Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:03:51.459824Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:03:51.459870Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1101:2854] 2025-03-28T09:03:51.460076Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1103:2856], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:03:51.460116Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:03:51.460163Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 >> KqpPg::InsertFromSelect_Simple+useSink [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:50.716888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:50.717030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:50.717076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:50.717120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:50.717178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:50.717212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:50.717274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:50.717371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:50.717684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:50.831539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:50.831599Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:50.851372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:50.851660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:50.851834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:50.859415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:50.859643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:50.860344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:50.860562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:50.863148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:50.864363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:50.864429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:50.864591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:50.864635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:50.864675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:50.864756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:50.872451Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:51.017015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:51.017258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.017476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:51.017744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:51.017839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.021556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.021705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:51.021928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.021989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:51.022027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:51.022062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:51.024324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.024399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:51.024438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:51.027249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.027307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.027359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.027432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.038759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:51.041948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:51.042153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:51.043229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.043383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:51.043443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.043760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:51.043818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.043973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:51.044047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:51.046254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:51.046304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:51.046500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:51.046560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:51.046950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.047014Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:51.047117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:51.047152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.047204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:51.047257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.047292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:51.047335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.047376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:51.047406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:51.047481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:51.047527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:51.047575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:51.049482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:51.049611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:51.049659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.373271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.373439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.373545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.373634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.373871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.373984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.374132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.374349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.374550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.374621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.374675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.384397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:52.384508Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:52.384768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:52.384838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:52.384896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:52.385093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:378:2347] sender: [1:433:2058] recipient: [1:15:2062] 2025-03-28T09:03:52.425263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:52.425491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2025-03-28T09:03:52.425573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2025-03-28T09:03:52.425744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:03:52.425837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-28T09:03:52.425892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:52.433566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-03-28T09:03:52.433782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2025-03-28T09:03:52.433996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.434045Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2025-03-28T09:03:52.434101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-03-28T09:03:52.434225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:52.437730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-03-28T09:03:52.437971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000003 2025-03-28T09:03:52.438727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:52.438847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:52.438902Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003 2025-03-28T09:03:52.439047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-03-28T09:03:52.439221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:52.439289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 FAKE_COORDINATOR: Erasing txId 103 2025-03-28T09:03:52.442250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:52.442314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:52.442494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:03:52.442593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:52.442634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:427:2385], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-28T09:03:52.442680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:427:2385], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-28T09:03:52.443165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.443220Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:03:52.443345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:03:52.443383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:03:52.443421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:03:52.443469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:03:52.443506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-28T09:03:52.443564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:03:52.443604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:03:52.443654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:03:52.443751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:03:52.443805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-03-28T09:03:52.443837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:03:52.443865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-28T09:03:52.444580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:03:52.445530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:03:52.445589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:03:52.445634Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:03:52.445676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:03:52.446575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:03:52.446922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:03:52.446959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:03:52.447001Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:03:52.447032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:03:52.447119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-28T09:03:52.468424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:03:52.468550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |76.5%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters |76.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |76.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |76.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |76.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues >> ColumnBuildTest::CancelBuild [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |76.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan |76.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |76.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |76.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:51.257795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:51.257906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:51.257942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:51.257973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:51.258022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:51.258047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:51.258126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:51.258195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:51.258499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:51.341572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:51.341633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:51.367439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:51.367715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:51.367867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:51.373875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:51.374081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:51.374713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.374923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:51.377513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:51.379006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:51.379070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:51.379239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:51.379291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:51.379340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:51.379428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.386895Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:51.506794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:51.507046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.507288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:51.507541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:51.507608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.509892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.509988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:51.510130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.510185Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:51.510210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:51.510231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:51.512031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.512087Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:51.512122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:51.513622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.513656Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.513693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.513738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.516412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:51.518084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:51.518220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:51.519195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.519305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:51.519348Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.519539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:51.519581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.519704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:51.519756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:51.521439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:51.521477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:51.521646Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:51.521686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:51.521972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.522013Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:51.522091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:51.522116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.522151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:51.522172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.522195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:51.522235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.522262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:51.522285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:51.522338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:51.522377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:51.522410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:51.523799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:51.523875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:51.523902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:03:55.173835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-03-28T09:03:55.173966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000007 2025-03-28T09:03:55.174309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-03-28T09:03:55.174344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-03-28T09:03:55.174392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-03-28T09:03:55.174538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:55.174636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:55.174716Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710761:0 HandleReply TEvOperationPlan: step# 5000007 2025-03-28T09:03:55.174759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710761:0 128 -> 240 2025-03-28T09:03:55.176896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710761:0, at schemeshard: 72057594046678944 2025-03-28T09:03:55.176944Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710761:0 ProgressState 2025-03-28T09:03:55.177028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-03-28T09:03:55.177072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-28T09:03:55.177112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-03-28T09:03:55.177134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-28T09:03:55.177175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: true 2025-03-28T09:03:55.177242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:123:2149] message: TxId: 281474976710761 2025-03-28T09:03:55.177298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-28T09:03:55.177328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-03-28T09:03:55.177353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-03-28T09:03:55.177417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-03-28T09:03:55.179498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-03-28T09:03:55.179573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-03-28T09:03:55.179652Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfoId: 102 2025-03-28T09:03:55.179736Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710761, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1177:3030], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:03:55.181556Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:03:55.181645Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1177:3030], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:03:55.181711Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-03-28T09:03:55.183561Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:03:55.183670Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [1:1177:3030], AlterMainTableTxId: 281474976710757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976710758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:03:55.183709Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-28T09:03:55.183849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:03:55.183893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1201:3054] TestWaitNotification: OK eventTxId 102 2025-03-28T09:03:55.186319Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-03-28T09:03:55.186591Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 0 } 2025-03-28T09:03:55.189212Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:03:55.189474Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 293us result status StatusSuccess 2025-03-28T09:03:55.189900Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "DefaultValue" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel >> PartitionStats::Collector [GOOD] >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpPg::CopyTableSerialColumns+useSink >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2025-03-28T09:02:06.443714Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:02:06.810974Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:02:06.812729Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:02:06.838445Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:07.696172Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:02:09.798199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:09.798564Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:10.052261Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:10.099165Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:10.139867Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:02:10.140729Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:02:10.141141Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:02:10.157705Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:10.158205Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:10.171335Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:02:11.359734Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:12.011531Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:02:12.011719Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:12.011814Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:02:12.012768Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:02:12.013470Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:02:12.014173Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:02:12.024936Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:12.026025Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:12.035331Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:02:12.035423Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:02:12.036489Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:02:12.037510Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:12.037862Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:02:12.038193Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:02:12.051040Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:02:12.051727Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:02:12.052103Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:02:12.053617Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:12.053654Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:12.054023Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:02:12.124090Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:02:12.124506Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:02:12.125623Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:02:12.146025Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:02:12.146891Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:02:12.147295Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:02:12.148038Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:02:12.148784Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:02:12.149509Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:02:12.149894Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:02:12.183737Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:02:12.184227Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:02:12.184635Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:02:12.185040Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:02:12.185108Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:02:12.186344Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:02:12.198373Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:02:12.199146Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:02:12.199197Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:02:12.248555Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:02:12.248975Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:02:12.249663Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:02:12.250044Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:02:12.251193Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:12.269291Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:12.270046Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:02:12.270421Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:02:12.272159Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:02:12.272189Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:02:12.272673Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:02:12.283025Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:02:12.283732Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:02:12.284095Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:02:12.385697Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:02:12.386407Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:02:12.394837Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:12.394888Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:02:12.395570Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:02:12.395923Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:02:12.396565Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:02:12.397297Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:02:12.397976Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:02:12.398374Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:02:12.410585Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:02:12.411362Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:02:12.411696Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:02:12.413543Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:02:12.413892Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:02:12.413917Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:02:12.413937Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:02:12.413955Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:02:12.414016Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:02:12.414357Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:02:12.423093Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:02:12.423827Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:02:12.424237Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:02:12.424904Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:02:12.425619Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:02:12.426281Z node 1 :TX_DATA ... y: 1 ms 2025-03-28T09:03:54.463445Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.463617Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.463649Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.463693Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.463726Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.463937Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.463968Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.464010Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.464041Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.464202Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.464233Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.464278Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.464307Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.464465Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.464497Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.464539Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.464572Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.464723Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.464753Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.464794Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.464823Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.465030Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.465061Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.465102Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.465132Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.465296Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.465328Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.465367Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.465397Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.465593Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.465624Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.465665Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.465696Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.465916Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.465949Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.465990Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.466022Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.466231Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.466266Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.466309Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.466342Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.466515Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.466545Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.466587Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.466618Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.466834Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.466868Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.466908Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.466941Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.467090Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:03:54.467120Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-03-28T09:03:54.467161Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:03:54.467192Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:03:54.467573Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-03-28T09:03:54.467625Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:54.467669Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 2025-03-28T09:03:54.467797Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-28T09:03:54.467832Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:54.467864Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-03-28T09:03:54.468225Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-28T09:03:54.468271Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:54.468304Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-03-28T09:03:54.468667Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-28T09:03:54.468733Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:54.468765Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-03-28T09:03:54.469052Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-28T09:03:54.469089Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:54.469119Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-03-28T09:03:54.469222Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-28T09:03:54.469253Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:54.469283Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-03-28T09:03:54.471632Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-28T09:03:54.471686Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:54.471828Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-03-28T09:03:54.472112Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-03-28T09:03:54.472151Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:54.472186Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 2025-03-28T09:03:54.472475Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:234:2227], Recipient [32:346:2313]: {TEvReadSet step# 1000004 txid# 14 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 10} 2025-03-28T09:03:54.472514Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:03:54.472547Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 14 expect 27 30 26 27 28 29 30 31 30 30 24 20 19 28 28 27 28 27 27 26 28 12 20 28 26 24 13 13 13 14 - - actual 27 30 26 27 28 29 30 31 30 30 24 20 19 28 28 27 28 27 27 26 28 12 20 28 26 24 13 13 13 14 - - interm 5 1 6 - 6 - 5 2 - - - 5 - 5 - 6 6 1 - 6 6 - - - 6 - - - - - - - |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] >> ColumnBuildTest::BaseCase [GOOD] >> ColumnBuildTest::ValidDefaultValue [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> ResourcePoolsDdl::TestDropResourcePool >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:50.961074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:50.961186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:50.961225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:50.961259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:50.961314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:50.961343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:50.961396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:50.961472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:50.961798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:51.054161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:51.054208Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:51.087764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:51.088066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:51.088227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:51.114670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:51.114954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:51.115638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.115873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:51.121380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:51.122814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:51.123003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:51.123187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:51.123238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:51.123281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:51.123375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.131137Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:51.284914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:51.285143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.285360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:51.285622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:51.285685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.291802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.291951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:51.292135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.292193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:51.292231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:51.292261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:51.294567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.294624Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:51.294656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:51.296547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.296595Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.296648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.296695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.308925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:51.311147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:51.311316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:51.312319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.312464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:51.312512Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.312796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:51.312851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.313001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:51.313079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:51.315551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:51.315598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:51.315760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:51.315810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:51.316139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.316192Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:51.316292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:51.316326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.316359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:51.316386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.316421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:51.316469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.316508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:51.316537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:51.316601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:51.316634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:51.316681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:51.318535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:51.318642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:51.318680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-03-28T09:03:56.638719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-03-28T09:03:56.638770Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-03-28T09:03:56.638808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-03-28T09:03:56.638905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-03-28T09:03:56.642813Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-28T09:03:56.642929Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-28T09:03:56.643100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-03-28T09:03:56.643208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-03-28T09:03:56.643400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-03-28T09:03:56.643435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-03-28T09:03:56.643468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-03-28T09:03:56.657299Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1817:3680], Recipient [1:753:2642]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1817:3680] ServerId: [1:1820:3683] } 2025-03-28T09:03:56.657359Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:03:56.734432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-03-28T09:03:56.734569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-03-28T09:03:56.734673Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-03-28T09:03:56.734743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-03-28T09:03:56.747549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-03-28T09:03:56.747616Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-03-28T09:03:56.747696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-03-28T09:03:56.747726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-28T09:03:56.747760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-03-28T09:03:56.747783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-28T09:03:56.747822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-03-28T09:03:56.747882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:564:2502] message: TxId: 281474976725761 2025-03-28T09:03:56.747923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-28T09:03:56.747950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-03-28T09:03:56.747992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-03-28T09:03:56.748065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-03-28T09:03:56.765335Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-03-28T09:03:56.765418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-03-28T09:03:56.765522Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-03-28T09:03:56.765613Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-28T09:03:56.770444Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-28T09:03:56.770563Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-28T09:03:56.770624Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:03:56.779746Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-28T09:03:56.779892Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-28T09:03:56.779930Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-03-28T09:03:56.780113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:03:56.780154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1164:3035] TestWaitNotification: OK eventTxId 106 2025-03-28T09:03:56.782667Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-03-28T09:03:56.783025Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "DefaultValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } } Progress: 100 } |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::ValidDefaultValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:51.553743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:51.553874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:51.553931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:51.553964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:51.554024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:51.554053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:51.554139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:51.554215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:51.554524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:51.639057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:51.639119Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:51.658766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:51.659059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:51.659264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:51.665292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:51.665517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:51.666209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.666467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:51.668461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:51.669751Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:51.669828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:51.669990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:51.670039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:51.670084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:51.670176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.676997Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:51.822185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:51.822439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.822710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:51.822975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:51.823034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.825589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.825746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:51.825994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.826077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:51.826134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:51.826166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:51.828402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.828468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:51.828504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:51.830540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.830590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.830632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.830703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.834329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:51.836688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:51.836886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:51.837950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:51.838100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:51.838152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.838415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:51.838462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:51.838649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:51.838771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:51.840854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:51.840905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:51.841118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:51.841199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:51.841563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:51.841615Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:51.841731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:51.841792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.841831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:51.841883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.841918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:51.841958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:51.841990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:51.842019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:51.842088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:51.842126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:51.842178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:51.844054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:51.844166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:51.844207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }}, record: Status: StatusAccepted TxId: 281474976725761 SchemeshardId: 72075186233409549 PathId: 2 2025-03-28T09:03:56.819253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-03-28T09:03:56.819305Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 ProgressState 2025-03-28T09:03:56.819356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976725761 ready parts: 1/1 2025-03-28T09:03:56.819465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409550 message:Transaction { AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 281474976725761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2025-03-28T09:03:56.829505Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-28T09:03:56.829626Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-28T09:03:56.829864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976725761:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725761 msg type: 269090816 2025-03-28T09:03:56.829968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976725761, partId: 4294967295, tablet: 72075186233409550 2025-03-28T09:03:56.830129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976725761, at schemeshard: 72075186233409549 2025-03-28T09:03:56.830161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 0/1, is published: true 2025-03-28T09:03:56.830195Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976725761, at schemeshard: 72075186233409549 2025-03-28T09:03:56.847811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:1817:3680], Recipient [1:753:2642]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409549 ClientId: [1:1817:3680] ServerId: [1:1819:3682] } 2025-03-28T09:03:56.847887Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:03:56.916418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 650, transactions count in step: 1, at schemeshard: 72075186233409549 2025-03-28T09:03:56.916581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725761 AckTo { RawX1: 0 RawX2: 0 } } Step: 650 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2025-03-28T09:03:56.916647Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDropLock TPropose opId# 281474976725761:0 HandleReply TEvOperationPlan: step# 650 2025-03-28T09:03:56.916688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976725761:0 128 -> 240 2025-03-28T09:03:56.921151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976725761:0, at schemeshard: 72075186233409549 2025-03-28T09:03:56.921225Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409549] TDone opId# 281474976725761:0 ProgressState 2025-03-28T09:03:56.921345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-03-28T09:03:56.921376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-28T09:03:56.921410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976725761:0 progress is 1/1 2025-03-28T09:03:56.921437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-28T09:03:56.921468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2025-03-28T09:03:56.921537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:564:2502] message: TxId: 281474976725761 2025-03-28T09:03:56.921577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2025-03-28T09:03:56.921606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976725761:0 2025-03-28T09:03:56.921650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976725761:0 2025-03-28T09:03:56.921755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-03-28T09:03:56.926770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2025-03-28T09:03:56.926870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976725761 2025-03-28T09:03:56.926962Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfoId: 106 2025-03-28T09:03:56.927050Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976725761, buildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-28T09:03:56.929306Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-28T09:03:56.929444Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-28T09:03:56.929512Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:03:56.932909Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 106 2025-03-28T09:03:56.933034Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 2], IndexType: EIndexTypeInvalid, IndexName: , State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:1146:3017], AlterMainTableTxId: 281474976725757, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725758, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }, Billed: { upload rows: 101, upload bytes: 2424, read rows: 101, read bytes: 2424 }} 2025-03-28T09:03:56.933082Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2025-03-28T09:03:56.933266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:03:56.933311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1164:3035] TestWaitNotification: OK eventTxId 106 2025-03-28T09:03:56.935961Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2025-03-28T09:03:56.936272Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 column_build_operation { column { ColumnName: "ColumnValue" default_from_literal { type { type_id: UINT64 } value { uint64_value: 1111 } } } } } Progress: 100 } |76.6%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |76.6%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |76.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |76.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:02:57.046713Z 00000.009 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.017 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.018 II| FAKE_ENV: Starting storage for BS group 0 00000.018 II| FAKE_ENV: Starting storage for BS group 1 00000.018 II| FAKE_ENV: Starting storage for BS group 2 00000.018 II| FAKE_ENV: Starting storage for BS group 3 00000.039 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [1:30:2062]) priority=200 resources={1, 0} 00000.040 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.040 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [1:30:2062]) from queue queue_background_compaction 00000.040 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [1:30:2062]) to queue queue_background_compaction 00000.040 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 50.000000 (insert task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.050 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [1:30:2062]) (release resources {1, 0}) 00000.050 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 50.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [1:30:2062])) 00000.060 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.060 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.061 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.061 II| FAKE_ENV: DS.0 gone, left {771b, 9}, put {791b, 10} 00000.061 II| FAKE_ENV: DS.1 gone, left {1347b, 10}, put {1347b, 10} 00000.061 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.061 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.061 II| FAKE_ENV: All BS storage groups are stopped 00000.061 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.061 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 23}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:02:57.160583Z 00000.010 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.016 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.017 II| FAKE_ENV: Starting storage for BS group 0 00000.017 II| FAKE_ENV: Starting storage for BS group 1 00000.017 II| FAKE_ENV: Starting storage for BS group 2 00000.017 II| FAKE_ENV: Starting storage for BS group 3 00000.029 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [2:8:2055]) priority=0 resources={1, 0} 00000.029 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.029 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [2:8:2055]) from queue queue_background_compaction 00000.029 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [2:8:2055]) to queue queue_background_compaction 00000.029 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [2:8:2055])) 00000.031 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [2:30:2062]) priority=200 resources={1, 0} 00000.032 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_background_compaction 00000.032 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.033 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [2:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.033 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.033 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [2:30:2062]) from queue queue_compaction_gen0 00000.034 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [2:30:2062]) to queue queue_compaction_gen0 00000.034 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.041 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [2:30:2062]) (release resources {1, 0}) 00000.041 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [2:30:2062])) 00000.048 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.048 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.048 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.048 II| FAKE_ENV: DS.0 gone, left {1262b, 14}, put {1282b, 15} 00000.048 II| FAKE_ENV: DS.1 gone, left {1890b, 15}, put {1890b, 15} 00000.048 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.048 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.048 II| FAKE_ENV: All BS storage groups are stopped 00000.048 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.048 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 31}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:02:57.224237Z 00000.011 DD| RESOURCE_BROKER: TResourceBrokerActor bootstrap 00000.013 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.013 II| FAKE_ENV: Starting storage for BS group 0 00000.014 II| FAKE_ENV: Starting storage for BS group 1 00000.014 II| FAKE_ENV: Starting storage for BS group 2 00000.014 II| FAKE_ENV: Starting storage for BS group 3 00000.026 DD| RESOURCE_BROKER: Submitted new background_compaction task bckg-block (987987987987 by [3:8:2055]) priority=0 resources={1, 0} 00000.026 DD| RESOURCE_BROKER: Assigning waiting task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.026 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987987 by [3:8:2055]) from queue queue_background_compaction 00000.026 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987987 by [3:8:2055]) to queue queue_background_compaction 00000.026 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 0.000000 to 300.000000 (insert task bckg-block (987987987987 by [3:8:2055])) 00000.032 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (1 by [3:30:2062]) priority=200 resources={1, 0} 00000.033 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_background_compaction 00000.033 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.034 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (1 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.034 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.034 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (1 by [3:30:2062]) from queue queue_compaction_gen0 00000.034 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [3:30:2062]) to queue queue_compaction_gen0 00000.034 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 5.000000 (insert task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.042 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [3:30:2062]) (release resources {1, 0}) 00000.042 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 5.000000 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [3:30:2062])) 00000.048 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (2 by [3:30:2062]) priority=200 resources={1, 0} 00000.048 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_background_compaction 00000.049 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.050 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (2 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.050 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.050 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [3:30:2062]) from queue queue_compaction_gen0 00000.050 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [3:30:2062]) to queue queue_compaction_gen0 00000.050 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.750000 (insert task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.090 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [3:30:2062]) (release resources {1, 0}) 00000.090 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.750000 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [3:30:2062])) 00000.096 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (3 by [3:30:2062]) priority=200 resources={1, 0} 00000.096 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_background_compaction 00000.096 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.097 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (3 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.097 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.098 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (3 by [3:30:2062]) from queue queue_compaction_gen0 00000.098 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (3 by [3:30:2062]) to queue queue_compaction_gen0 00000.098 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.500000 (insert task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.105 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (3 by [3:30:2062]) (release resources {1, 0}) 00000.105 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 4.500000 to 0.000000 (remove task gen0-table-101-tablet-1 (3 by [3:30:2062])) 00000.106 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (4 by [3:30:2062]) priority=200 resources={1, 0} 00000.106 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_background_compaction 00000.106 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.112 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (4 by [3:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.112 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.112 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (4 by [3:30:2062]) from queue queue_compaction_gen0 00000.112 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (4 by [3:30:2062]) to queue queue_compaction_gen0 00000.113 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 4.250000 (insert task gen0-table-101-tablet-1 (4 by [3:30:2062])) 00000.124 DD| R ... eader{1:2:97} starting compaction 00000.072 II| TABLET_EXECUTOR: Leader{1:2:98} starting Scan{15 on 101, Compact{1.2.97, eph 8}} 00000.072 II| TABLET_EXECUTOR: Leader{1:2:98} started compaction 15 00000.072 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} begin on TSubset{head 9, 1m 1p 0c} 00000.074 II| TABLET_OPS_HOST: Scan{15 on 101, Compact{1.2.97, eph 8}} end=0, 80r seen, TFwd{fetch=1.93KiB,saved=1.93KiB,usage=1.93KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.075 II| TABLET_EXECUTOR: Leader{1:2:99} Compact 15 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 97, product {1 parts epoch 9} done 00000.078 II| TABLET_EXECUTOR: Leader{1:2:109} starting compaction 00000.078 II| TABLET_EXECUTOR: Leader{1:2:110} starting Scan{17 on 101, Compact{1.2.109, eph 9}} 00000.078 II| TABLET_EXECUTOR: Leader{1:2:110} started compaction 17 00000.078 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} begin on TSubset{head 10, 1m 1p 0c} 00000.080 II| TABLET_OPS_HOST: Scan{17 on 101, Compact{1.2.109, eph 9}} end=0, 90r seen, TFwd{fetch=2.21KiB,saved=2.21KiB,usage=2.21KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.082 II| TABLET_EXECUTOR: Leader{1:2:111} Compact 17 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 109, product {1 parts epoch 10} done 00000.085 II| TABLET_EXECUTOR: Leader{1:2:121} starting compaction 00000.086 II| TABLET_EXECUTOR: Leader{1:2:122} starting Scan{19 on 101, Compact{1.2.121, eph 10}} 00000.086 II| TABLET_EXECUTOR: Leader{1:2:122} started compaction 19 00000.086 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} begin on TSubset{head 11, 1m 1p 0c} 00000.088 II| TABLET_OPS_HOST: Scan{19 on 101, Compact{1.2.121, eph 10}} end=0, 100r seen, TFwd{fetch=2.48KiB,saved=2.48KiB,usage=2.48KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.092 II| TABLET_EXECUTOR: Leader{1:2:122} Compact 19 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 121, product {1 parts epoch 11} done 00000.096 II| TABLET_EXECUTOR: Leader{1:2:133} starting compaction 00000.096 II| TABLET_EXECUTOR: Leader{1:2:134} starting Scan{21 on 101, Compact{1.2.133, eph 11}} 00000.097 II| TABLET_EXECUTOR: Leader{1:2:134} started compaction 21 00000.097 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} begin on TSubset{head 12, 1m 1p 0c} 00000.098 II| TABLET_OPS_HOST: Scan{21 on 101, Compact{1.2.133, eph 11}} end=0, 110r seen, TFwd{fetch=2.75KiB,saved=2.75KiB,usage=2.75KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.100 II| TABLET_EXECUTOR: Leader{1:2:135} Compact 21 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 133, product {1 parts epoch 12} done 00000.103 II| TABLET_EXECUTOR: Leader{1:2:137} starting Scan{24 on 101, DummyScan} 00000.104 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} begin on TSubset{head 12, 1m 1p 0c} 00000.113 II| TABLET_EXECUTOR: Leader{1:2:146} starting compaction 00000.113 II| TABLET_EXECUTOR: Leader{1:2:147} starting Scan{25 on 101, Compact{1.2.146, eph 12}} 00000.113 II| TABLET_EXECUTOR: Leader{1:2:147} started compaction 25 00000.113 II| TABLET_OPS_HOST: Scan{25 on 101, Compact{1.2.146, eph 12}} begin on TSubset{head 13, 1m 1p 0c} 00000.115 II| TABLET_OPS_HOST: Scan{25 on 101, Compact{1.2.146, eph 12}} end=0, 120r seen, TFwd{fetch=3.03KiB,saved=3.03KiB,usage=3.03KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.116 II| TABLET_EXECUTOR: Leader{1:2:147} Compact 25 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 146, product {1 parts epoch 13} done 00000.120 II| TABLET_EXECUTOR: Leader{1:2:158} starting compaction 00000.120 II| TABLET_EXECUTOR: Leader{1:2:159} starting Scan{27 on 101, Compact{1.2.158, eph 13}} 00000.120 II| TABLET_EXECUTOR: Leader{1:2:159} started compaction 27 00000.120 II| TABLET_OPS_HOST: Scan{27 on 101, Compact{1.2.158, eph 13}} begin on TSubset{head 14, 1m 1p 0c} 00000.122 II| TABLET_OPS_HOST: Scan{27 on 101, Compact{1.2.158, eph 13}} end=0, 130r seen, TFwd{fetch=3.44KiB,saved=3.44KiB,usage=3.44KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.123 II| TABLET_EXECUTOR: Leader{1:2:160} Compact 27 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 158, product {1 parts epoch 14} done 00000.126 II| TABLET_EXECUTOR: Leader{1:2:170} starting compaction 00000.126 II| TABLET_EXECUTOR: Leader{1:2:171} starting Scan{29 on 101, Compact{1.2.170, eph 14}} 00000.126 II| TABLET_EXECUTOR: Leader{1:2:171} started compaction 29 00000.126 II| TABLET_OPS_HOST: Scan{29 on 101, Compact{1.2.170, eph 14}} begin on TSubset{head 15, 1m 1p 0c} 00000.128 II| TABLET_OPS_HOST: Scan{29 on 101, Compact{1.2.170, eph 14}} end=0, 140r seen, TFwd{fetch=3.87KiB,saved=3.87KiB,usage=3.87KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.132 II| TABLET_EXECUTOR: Leader{1:2:172} Compact 29 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 170, product {1 parts epoch 15} done 00000.135 II| TABLET_EXECUTOR: Leader{1:2:182} starting compaction 00000.135 II| TABLET_EXECUTOR: Leader{1:2:183} starting Scan{31 on 101, Compact{1.2.182, eph 15}} 00000.135 II| TABLET_EXECUTOR: Leader{1:2:183} started compaction 31 00000.135 II| TABLET_OPS_HOST: Scan{31 on 101, Compact{1.2.182, eph 15}} begin on TSubset{head 16, 1m 1p 0c} 00000.137 II| TABLET_OPS_HOST: Scan{31 on 101, Compact{1.2.182, eph 15}} end=0, 150r seen, TFwd{fetch=4.3KiB,saved=4.3KiB,usage=4.3KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.139 II| TABLET_EXECUTOR: Leader{1:2:183} Compact 31 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 182, product {1 parts epoch 16} done 00000.142 II| TABLET_EXECUTOR: Leader{1:2:194} starting compaction 00000.142 II| TABLET_EXECUTOR: Leader{1:2:195} starting Scan{33 on 101, Compact{1.2.194, eph 16}} 00000.142 II| TABLET_EXECUTOR: Leader{1:2:195} started compaction 33 00000.142 II| TABLET_OPS_HOST: Scan{33 on 101, Compact{1.2.194, eph 16}} begin on TSubset{head 17, 1m 1p 0c} 00000.144 II| TABLET_OPS_HOST: Scan{33 on 101, Compact{1.2.194, eph 16}} end=0, 160r seen, TFwd{fetch=4.73KiB,saved=4.73KiB,usage=4.73KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.147 II| TABLET_EXECUTOR: Leader{1:2:195} Compact 33 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 194, product {1 parts epoch 17} done 00000.150 II| TABLET_EXECUTOR: Leader{1:2:206} starting compaction 00000.150 II| TABLET_EXECUTOR: Leader{1:2:207} starting Scan{35 on 101, Compact{1.2.206, eph 17}} 00000.151 II| TABLET_EXECUTOR: Leader{1:2:207} started compaction 35 00000.151 II| TABLET_OPS_HOST: Scan{35 on 101, Compact{1.2.206, eph 17}} begin on TSubset{head 18, 1m 1p 0c} 00000.152 II| TABLET_OPS_HOST: Scan{35 on 101, Compact{1.2.206, eph 17}} end=0, 170r seen, TFwd{fetch=5.16KiB,saved=5.16KiB,usage=5.16KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.154 II| TABLET_EXECUTOR: Leader{1:2:207} Compact 35 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 206, product {1 parts epoch 18} done 00000.157 II| TABLET_EXECUTOR: Leader{1:2:218} starting compaction 00000.157 II| TABLET_EXECUTOR: Leader{1:2:219} starting Scan{37 on 101, Compact{1.2.218, eph 18}} 00000.157 II| TABLET_EXECUTOR: Leader{1:2:219} started compaction 37 00000.157 II| TABLET_OPS_HOST: Scan{37 on 101, Compact{1.2.218, eph 18}} begin on TSubset{head 19, 1m 1p 0c} 00000.160 II| TABLET_OPS_HOST: Scan{37 on 101, Compact{1.2.218, eph 18}} end=0, 180r seen, TFwd{fetch=5.59KiB,saved=5.59KiB,usage=5.59KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.167 II| TABLET_EXECUTOR: Leader{1:2:220} Compact 37 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 218, product {1 parts epoch 19} done 00000.169 II| TABLET_EXECUTOR: Leader{1:2:230} starting compaction 00000.169 II| TABLET_EXECUTOR: Leader{1:2:231} starting Scan{39 on 101, Compact{1.2.230, eph 19}} 00000.169 II| TABLET_EXECUTOR: Leader{1:2:231} started compaction 39 00000.169 II| TABLET_OPS_HOST: Scan{39 on 101, Compact{1.2.230, eph 19}} begin on TSubset{head 20, 1m 1p 0c} 00000.171 II| TABLET_OPS_HOST: Scan{39 on 101, Compact{1.2.230, eph 19}} end=0, 190r seen, TFwd{fetch=6.02KiB,saved=6.02KiB,usage=6.02KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.173 II| TABLET_EXECUTOR: Leader{1:2:231} Compact 39 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 230, product {1 parts epoch 20} done 00000.175 II| TABLET_EXECUTOR: Leader{1:2:242} starting compaction 00000.175 II| TABLET_EXECUTOR: Leader{1:2:243} starting Scan{41 on 101, Compact{1.2.242, eph 20}} 00000.175 II| TABLET_EXECUTOR: Leader{1:2:243} started compaction 41 00000.175 II| TABLET_OPS_HOST: Scan{41 on 101, Compact{1.2.242, eph 20}} begin on TSubset{head 21, 1m 1p 0c} 00000.177 II| TABLET_OPS_HOST: Scan{41 on 101, Compact{1.2.242, eph 20}} end=0, 200r seen, TFwd{fetch=6.45KiB,saved=6.45KiB,usage=6.45KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.179 II| TABLET_EXECUTOR: Leader{1:2:244} Compact 41 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 242, product {1 parts epoch 21} done 00000.182 II| TABLET_EXECUTOR: Leader{1:2:254} starting compaction 00000.182 II| TABLET_EXECUTOR: Leader{1:2:255} starting Scan{43 on 101, Compact{1.2.254, eph 21}} 00000.182 II| TABLET_EXECUTOR: Leader{1:2:255} started compaction 43 00000.183 II| TABLET_OPS_HOST: Scan{43 on 101, Compact{1.2.254, eph 21}} begin on TSubset{head 22, 1m 1p 0c} 00000.185 II| TABLET_OPS_HOST: Scan{43 on 101, Compact{1.2.254, eph 21}} end=0, 210r seen, TFwd{fetch=6.88KiB,saved=6.88KiB,usage=6.88KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.186 II| TABLET_EXECUTOR: Leader{1:2:255} Compact 43 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 254, product {1 parts epoch 22} done 00000.190 II| TABLET_EXECUTOR: Leader{1:2:266} starting compaction 00000.191 II| TABLET_EXECUTOR: Leader{1:2:267} starting Scan{45 on 101, Compact{1.2.266, eph 22}} 00000.191 II| TABLET_EXECUTOR: Leader{1:2:267} started compaction 45 00000.191 II| TABLET_OPS_HOST: Scan{45 on 101, Compact{1.2.266, eph 22}} begin on TSubset{head 23, 1m 1p 0c} 00000.194 II| TABLET_OPS_HOST: Scan{45 on 101, Compact{1.2.266, eph 22}} end=0, 220r seen, TFwd{fetch=7.43KiB,saved=7.43KiB,usage=7.43KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.198 II| TABLET_EXECUTOR: Leader{1:2:268} Compact 45 on TGenCompactionParams{101: gen 0 epoch +inf, 1 parts} step 266, product {1 parts epoch 23} done 00000.201 II| TABLET_OPS_HOST: Scan{24 on 101, DummyScan} end=0, 111r seen, TFwd{fetch=3.03KiB,saved=3.03KiB,usage=3.03KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1} 00000.201 II| TABLET_EXECUTOR: Leader{1:2:270} suiciding, Waste{2:0, 8879b +(262, 99769b), 269 trc, -99769b acc} 00000.204 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.204 NN| TABLET_SAUSAGECACHE: Poison cache serviced 23 reqs hit {24 76962b} miss {0 0b} 00000.204 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.204 II| FAKE_ENV: DS.0 gone, left {27013b, 269}, put {27033b, 270} 00000.205 II| FAKE_ENV: DS.1 gone, left {111147b, 290}, put {111147b, 290} 00000.206 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.206 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.206 II| FAKE_ENV: All BS storage groups are stopped 00000.206 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.206 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 153}, stopped |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |76.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> KqpPg::DropTablePg [GOOD] |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpPg::DropTablePgMultiple >> KqpSystemView::NodesRange1 |76.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |76.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> KqpSysColV1::InnerJoinSelect |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |76.7%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> TWebLoginService::AuditLogLoginSuccess >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink >> KqpSysColV1::InnerJoinTables >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> KqpPg::CreateUniqPgColumn-useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink >> KqpSysColV1::SelectRowById >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> DataShardSnapshots::VolatileSnapshotSplit >> DataShardSnapshots::MvccSnapshotTailCleanup |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |76.7%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |76.7%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> TWebLoginService::AuditLogLoginBadPassword >> DataShardSnapshots::MvccSnapshotAndSplit |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |76.7%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |76.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> KqpPg::TypeCoercionInsert+useSink [GOOD] >> KqpPg::TableSelect+useSink >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TWebLoginService::AuditLogLogout >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> TWebLoginService::AuditLogLogout [GOOD] >> KqpPg::CopyTableSerialColumns+useSink [GOOD] >> KqpPg::CopyTableSerialColumns-useSink >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:04:02.156536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:02.156643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:02.156690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:02.156719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:02.156760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:02.156784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:02.156836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:02.156935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:02.157288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:02.251398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:04:02.251474Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:02.266384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:02.266639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:02.266826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:02.280759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:02.281077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:02.281830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:02.285756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:02.292000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:02.293738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:02.293833Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:02.294399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:02.294452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:02.294490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:02.294675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:02.302413Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:04:02.481309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:02.481548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:02.481776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:02.482011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:02.482062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:02.491969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:02.492216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:02.492536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:02.492623Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:02.492667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:02.492714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:02.495222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:02.495301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:02.495346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:02.498592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:02.498672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:02.498742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:02.498797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:02.503348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:02.505822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:02.506047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:02.507273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:02.507439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:02.507489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:02.507791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:02.507849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:02.508057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:02.508139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:02.513223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:02.513300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:02.513512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:02.513579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:02.513664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:02.513733Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:02.513853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:02.513890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:02.513945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:02.513984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:02.514023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:02.514068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:02.514122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:02.514170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:02.514253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:02.514314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:02.514351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:02.516424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:02.516530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:02.516563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:05.367759Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:04:05.367811Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:04:05.367863Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:05.367981Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:04:05.372181Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:04:05.372802Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:04:05.373520Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] Bootstrap 2025-03-28T09:04:05.399714Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] Become StateWork (SchemeCache [4:274:2265]) 2025-03-28T09:04:05.403183Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:05.408910Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:05.409102Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:04:05.409160Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:05.409212Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:04:05.409259Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:05.409339Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:05.409412Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:04:05.409462Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:05.409508Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:04:05.409552Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-03-28T09:04:05.409599Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-28T09:04:05.411157Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:05.414894Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:05.415048Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-03-28T09:04:05.415924Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:05.415994Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:05.416224Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:05.416274Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:04:05.417329Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:05.417460Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:05.417511Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:04:05.417560Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-28T09:04:05.417613Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:05.417771Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:04:05.418381Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:04:05.420238Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-03-28T09:04:05.420779Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-03-28T09:04:05.423036Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:04:05.423098Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-28T09:04:05.497723Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODQ1LCJpYXQiOjE3NDMxNTI2NDUsInN1YiI6InVzZXIxIn0.qx4WiPF1NvyXA53sik6bMyGPnuxa-iJKqMByKv8RIgIQ-5Fyhpo3PpYyNVbIa16zwdU1FeJc2vfRSKrnCAp5hsyg3jKoTsiAIC8XdRvJXGjrVmROBjWIqf9KGJXc_tpie2jodkoCnCrw24SzO2wHiP1oL1M0VisCGYIalo7EOR4RU0n0JEEbiD0-Ob_CNLROMnnoJ46RnzxCu_EL1NA9FbzVBKcaZKLibHV5TE1UhpK4yFo13Hud5xL5iz_4F09cTUAnFcngAD3o3Q28uqBEWduJ-NORFgVIIBvhZNxtqUuU1JpDhHxoQQ_voKN1RffI4m4xaGnHer4BOIREQm9p1g" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODQ1LCJpYXQiOjE3NDMxNTI2NDUsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-03-28T09:04:05.497845Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:05.497884Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:05.498096Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:05.498138Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-28T09:04:05.499579Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-03-28T09:04:05.500387Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:05.500612Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 267us result status StatusSuccess 2025-03-28T09:04:05.501096Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuo4fnohjSI5k1PCBqKfr\nIHJlVALinp8g51oWkWSZJiDqXtJJWltSqeu7XClUMuI7IXcRLGKRStXqoSozGglA\n3gGj3mOT4fIzg2ArWPxZkRiX0FjmCbYggQ5d3/S3Bx0ZyqlbceJzSg3BQoex16/B\nP//sdKu9BhlxS/+pbU84L2oVop17LECplGjc+jDGq3VgB//MkeOUtGcRyFZQIcoo\np7ZbQGym6s9/wF+HiEs9g01Oay0Uz81epxwudWqHmNVdFjQj2yG93b4ll+28npBe\npSAGv22i7ym5/cLPXQ6ev3Ifaato3E4WxukmT5koDXz+TQhPjEvJdHSNhtxe6C06\n9wIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743239045487 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:05.501597Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-03-28T09:04:05.501665Z node 4 :HTTP ERROR: Logout: No ydb_session_id cookie 2025-03-28T09:04:05.502264Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout 2025-03-28T09:04:05.503066Z node 4 :TICKET_PARSER ERROR: Ticket **** (589A015B): Token is not in correct format 2025-03-28T09:04:05.503159Z node 4 :HTTP ERROR: Logout: Token is not in correct format 2025-03-28T09:04:05.503539Z node 4 :HTTP WARN: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2025-03-28T09:04:05.352362Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-03-28T09:04:05.408707Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-03-28T09:04:05.498556Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODQ1LCJpYXQiOjE3NDMxNTI2NDUsInN1YiI6InVzZXIxIn0.**, login_user_level=admin 2025-03-28T09:04:05.504819Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODQ1LCJpYXQiOjE3NDMxNTI2NDUsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2025-03-28T09:04:05.504819Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODQ1LCJpYXQiOjE3NDMxNTI2NDUsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge >> KqpScan::RemoteShardScan >> ResourcePoolsDdl::TestDropResourcePool [GOOD] |76.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |76.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |76.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropTableIfExists >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] Test command err: 2025-03-28T09:03:20.732855Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783410032390251:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:20.733052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cad/r3tmp/tmpe3A4jt/pdisk_1.dat 2025-03-28T09:03:21.120776Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:21.123663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:21.123775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:21.127813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7473, node 1 2025-03-28T09:03:21.246553Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:21.246584Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:21.246591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:21.246809Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:21.605085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:24.175521Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:24.175711Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:24.175729Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:03:24.175775Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783427212260075:2329], Start check tables existence, number paths: 2 2025-03-28T09:03:24.184220Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmIxYzk3ZTMtNTYxZGMxZDQtZjUzZmRmZGEtYWRkMWY5NDU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmIxYzk3ZTMtNTYxZGMxZDQtZjUzZmRmZGEtYWRkMWY5NDU= 2025-03-28T09:03:24.184744Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmIxYzk3ZTMtNTYxZGMxZDQtZjUzZmRmZGEtYWRkMWY5NDU=, ActorId: [1:7486783427212260091:2330], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:24.184905Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-28T09:03:24.184963Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783427212260075:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:03:24.185017Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783427212260075:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:03:24.185047Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783427212260075:2329], Successfully finished 2025-03-28T09:03:24.196933Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:03:24.202713Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783427212260093:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:24.206382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:24.208514Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783427212260093:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-28T09:03:24.208696Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783427212260093:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:03:24.219249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783427212260093:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:24.285778Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783427212260093:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:24.290092Z node 1 :TX_PROXY ERROR: Actor# [1:7486783427212260145:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:24.290255Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783427212260093:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:03:24.301374Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-28T09:03:24.301410Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-28T09:03:24.301475Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783427212260154:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-28T09:03:24.301662Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmIxYzk3ZTMtNTYxZGMxZDQtZjUzZmRmZGEtYWRkMWY5NDU=, ActorId: [1:7486783427212260091:2330], ActorState: ReadyState, TraceId: 01jqdzvs4c6qa54y371j0wfqmg, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE RESOURCE POOL my_pool WITH ( CONCURRENT_QUERY_LIMIT=1, QUEUE_SIZE=0 ); rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-28T09:03:24.303667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783427212260154:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:24.303758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:24.659039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:03:24.665774Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZmIxYzk3ZTMtNTYxZGMxZDQtZjUzZmRmZGEtYWRkMWY5NDU=, ActorId: [1:7486783427212260091:2330], ActorState: ExecuteState, TraceId: 01jqdzvs4c6qa54y371j0wfqmg, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7486783427212260163:2330] WorkloadServiceCleanup: 0 2025-03-28T09:03:24.667259Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmIxYzk3ZTMtNTYxZGMxZDQtZjUzZmRmZGEtYWRkMWY5NDU=, ActorId: [1:7486783427212260091:2330], ActorState: CleanupState, TraceId: 01jqdzvs4c6qa54y371j0wfqmg, EndCleanup, isFinal: 0 2025-03-28T09:03:24.667359Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmIxYzk3ZTMtNTYxZGMxZDQtZjUzZmRmZGEtYWRkMWY5NDU=, ActorId: [1:7486783427212260091:2330], ActorState: CleanupState, TraceId: 01jqdzvs4c6qa54y371j0wfqmg, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7486783410032390486:2276] 2025-03-28T09:03:24.684852Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTRmNTMxOTctYjlkMjUyNjMtOWNjYzNiNjYtODczOTI0YWQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZTRmNTMxOTctYjlkMjUyNjMtOWNjYzNiNjYtODczOTI0YWQ= 2025-03-28T09:03:24.685328Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-03-28T09:03:24.685377Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTRmNTMxOTctYjlkMjUyNjMtOWNjYzNiNjYtODczOTI0YWQ=, ActorId: [1:7486783427212260185:2333], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:24.685492Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZTRmNTMxOTctYjlkMjUyNjMtOWNjYzNiNjYtODczOTI0YWQ=, ActorId: [1:7486783427212260185:2333], ActorState: ReadyState, TraceId: 01jqdzvsgd203d81kdgtg7m09b, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7486783427212260184:2362] database: Root databaseId: /Root pool id: my_pool 2025-03-28T09:03:24.685544Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7486783427212260185:2333], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=1&id=ZTRmNTMxOTctYjlkMjUyNjMtOWNjYzNiNjYtODczOTI0YWQ= 2025-03-28T09:03:24.685598Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783427212260187:2334], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-03-28T09:03:24.685666Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783427212260188:2335], Database: /Root, Start database fetching 2025-03-28T09:03:24.686339Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783427212260188:2335], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-28T09:03:24.686384Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-28T09:03:24.686433Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486783427212260195:2336], DatabaseId: /Root, PoolId: my_pool, SessionId: ... FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [7:7486783615545888522:2458] database: /Root databaseId: /Root pool id: default 2025-03-28T09:04:08.514024Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ReadyState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, request placed into pool from cache: default 2025-03-28T09:04:08.516071Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486783615545888521:2457], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-28T09:04:08.516160Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486783615545888524:2460], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-28T09:04:08.516416Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, ExecutePhyTx, tx: 0x000050C00036E5D8 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-03-28T09:04:08.516500Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, Sending to Executer TraceId: 0 8 2025-03-28T09:04:08.516593Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, Created new KQP executer: [7:7486783615545888529:2447] isRollback: 0 2025-03-28T09:04:08.516706Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-28T09:04:08.516819Z node 7 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [7:7486783615545888523:2459], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=MTFkYWQ4Y2YtZTVkNzZjZDktODcyNjQ4ZC1lNDc0ZmY0, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-28T09:04:08.516928Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [7:7486783615545888523:2459], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=MTFkYWQ4Y2YtZTVkNzZjZDktODcyNjQ4ZC1lNDc0ZmY0, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-03-28T09:04:08.517052Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [7:7486783615545888519:2456]: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2025-03-28T09:04:08.517155Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MTFkYWQ4Y2YtZTVkNzZjZDktODcyNjQ4ZC1lNDc0ZmY0, ActorId: [7:7486783615545888519:2456], ActorState: ExecuteState, TraceId: 01jqdzx4a11s39bk6kne1ghezs, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-03-28T09:04:08.517297Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MTFkYWQ4Y2YtZTVkNzZjZDktODcyNjQ4ZC1lNDc0ZmY0, ActorId: [7:7486783615545888519:2456], ActorState: ExecuteState, TraceId: 01jqdzx4a11s39bk6kne1ghezs, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-03-28T09:04:08.517439Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [7:7486783615545888519:2456], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=7&id=MTFkYWQ4Y2YtZTVkNzZjZDktODcyNjQ4ZC1lNDc0ZmY0 2025-03-28T09:04:08.517497Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MTFkYWQ4Y2YtZTVkNzZjZDktODcyNjQ4ZC1lNDc0ZmY0, ActorId: [7:7486783615545888519:2456], ActorState: CleanupState, TraceId: 01jqdzx4a11s39bk6kne1ghezs, EndCleanup, isFinal: 1 2025-03-28T09:04:08.517604Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MTFkYWQ4Y2YtZTVkNzZjZDktODcyNjQ4ZC1lNDc0ZmY0, ActorId: [7:7486783615545888519:2456], ActorState: CleanupState, TraceId: 01jqdzx4a11s39bk6kne1ghezs, Sent query response back to proxy, proxyRequestId: 18, proxyId: [7:7486783568301246966:2073] 2025-03-28T09:04:08.517634Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MTFkYWQ4Y2YtZTVkNzZjZDktODcyNjQ4ZC1lNDc0ZmY0, ActorId: [7:7486783615545888519:2456], ActorState: unknown state, TraceId: 01jqdzx4a11s39bk6kne1ghezs, Cleanup temp tables: 0 2025-03-28T09:04:08.517764Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MTFkYWQ4Y2YtZTVkNzZjZDktODcyNjQ4ZC1lNDc0ZmY0, ActorId: [7:7486783615545888519:2456], ActorState: unknown state, TraceId: 01jqdzx4a11s39bk6kne1ghezs, Session actor destroyed 2025-03-28T09:04:08.529642Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-03-28T09:04:08.529809Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, ExecutePhyTx, tx: 0x000050C00036E698 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-28T09:04:08.535604Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-28T09:04:08.535934Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, txInfo Status: Committed Kind: ReadOnly TotalDuration: 19.605 ServerDuration: 19.47 QueriesCount: 2 2025-03-28T09:04:08.536628Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:04:08.536781Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:08.536837Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, EndCleanup, isFinal: 0 2025-03-28T09:04:08.536921Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ExecuteState, TraceId: 01jqdzx4a1drpaxkg3fygmgbf9, Sent query response back to proxy, proxyRequestId: 19, proxyId: [7:7486783568301246966:2073] 2025-03-28T09:04:08.537524Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, TxId: 2025-03-28T09:04:08.537704Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: my_pool, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, TxId: 2025-03-28T09:04:08.537908Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7486783594071051487:2340], DatabaseId: /Root, PoolId: my_pool, succefully refreshed pool state, in flight: 0, delayed: 0 2025-03-28T09:04:08.538883Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:04:08.538922Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:08.538955Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:04:08.538995Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:04:08.539094Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=MjdlYjVkMzctNTI2NjliZDQtOTc4YjcxOWQtOGQ3ZjBlMmE=, ActorId: [7:7486783615545888463:2447], ActorState: unknown state, Session actor destroyed 2025-03-28T09:04:08.543865Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ODMxNWI1NmItYmZiZDg0MTAtMzc5YWY4M2UtMzU5NTNkNQ==, ActorId: [7:7486783594071051356:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:04:08.543912Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=ODMxNWI1NmItYmZiZDg0MTAtMzc5YWY4M2UtMzU5NTNkNQ==, ActorId: [7:7486783594071051356:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:08.543942Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODMxNWI1NmItYmZiZDg0MTAtMzc5YWY4M2UtMzU5NTNkNQ==, ActorId: [7:7486783594071051356:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:04:08.543968Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODMxNWI1NmItYmZiZDg0MTAtMzc5YWY4M2UtMzU5NTNkNQ==, ActorId: [7:7486783594071051356:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:04:08.544037Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=ODMxNWI1NmItYmZiZDg0MTAtMzc5YWY4M2UtMzU5NTNkNQ==, ActorId: [7:7486783594071051356:2331], ActorState: unknown state, Session actor destroyed >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink >> KqpSysColV1::InnerJoinTables [GOOD] >> KqpSysColV1::InnerJoinSelect [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink >> KqpSysColV1::SelectRowById [GOOD] >> KqpScan::ScanRetryRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 12708, MsgBus: 64950 2025-03-28T09:04:01.451372Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783588964384685:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:01.452289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fb1/r3tmp/tmpKrCqXa/pdisk_1.dat 2025-03-28T09:04:02.097335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:02.097457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:02.104394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:02.109631Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12708, node 1 2025-03-28T09:04:02.359254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:02.359275Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:02.359281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:02.359393Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64950 TClient is connected to server localhost:64950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:03.400843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:03.458313Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:04:03.477359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:03.649836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:03.950461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:04.107704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:06.457032Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783588964384685:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:06.471457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:06.656799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783610439222938:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:06.656929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:07.109027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.200630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.244624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.336972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.389743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.464906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.621753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783614734190769:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:07.621830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:07.622201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783614734190774:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:07.626229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:07.647487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783614734190776:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:07.751184Z node 1 :TX_PROXY ERROR: Actor# [1:7486783614734190832:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 24228, MsgBus: 28414 2025-03-28T09:04:00.870546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783582819505482:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:00.871571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fa5/r3tmp/tmpLx2ETn/pdisk_1.dat 2025-03-28T09:04:01.451564Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:01.463256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:01.463345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:01.466654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24228, node 1 2025-03-28T09:04:01.692801Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:01.692829Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:01.692842Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:01.692962Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28414 TClient is connected to server localhost:28414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:02.625630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:02.647555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:02.865909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:03.130293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:03.305640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:05.851856Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783582819505482:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:05.851930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:06.140478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783608589310882:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:06.140575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:06.597541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:06.679292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:06.744015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:06.798417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:06.866633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:06.997407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.070410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783612884278700:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:07.070565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:07.074543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783612884278706:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:07.083685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:07.105000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783612884278710:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:07.167641Z node 1 :TX_PROXY ERROR: Actor# [1:7486783612884278766:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2025-03-28T09:03:17.029588Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783396995370948:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:17.029753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d29/r3tmp/tmp45ZRm8/pdisk_1.dat 2025-03-28T09:03:17.437097Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:17.441697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:17.441842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:17.444954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14921, node 1 2025-03-28T09:03:17.523154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:17.523172Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:17.523178Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:17.523282Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:17.778816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:20.060292Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:20.060327Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was disabled 2025-03-28T09:03:20.063583Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWNmYmU0OTctYmZlYmVkOTgtZGM4NjYxMWUtZDlmMGUzMg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NWNmYmU0OTctYmZlYmVkOTgtZGM4NjYxMWUtZDlmMGUzMg== 2025-03-28T09:03:20.064058Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWNmYmU0OTctYmZlYmVkOTgtZGM4NjYxMWUtZDlmMGUzMg==, ActorId: [1:7486783409880273444:2327], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:20.088782Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU= 2025-03-28T09:03:20.088862Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:20.089208Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ReadyState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7486783409880273445:2296] database: Root databaseId: /Root pool id: 2025-03-28T09:03:20.089340Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, Sending CompileQuery request 2025-03-28T09:03:20.367204Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, ExecutePhyTx, tx: 0x000050C000246898 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-28T09:03:20.367254Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, Sending to Executer TraceId: 0 8 2025-03-28T09:03:20.367388Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, Created new KQP executer: [1:7486783409880273450:2328] isRollback: 0 2025-03-28T09:03:20.418708Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, Forwarded TEvStreamData to [1:7486783409880273445:2296] 2025-03-28T09:03:20.432397Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-28T09:03:20.432591Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, txInfo Status: Committed Kind: Pure TotalDuration: 65.566 ServerDuration: 65.464 QueriesCount: 2 2025-03-28T09:03:20.432663Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:03:20.432821Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:03:20.432854Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, EndCleanup, isFinal: 1 2025-03-28T09:03:20.432895Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: ExecuteState, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7486783396995371170:2277] 2025-03-28T09:03:20.432915Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: unknown state, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, Cleanup temp tables: 0 2025-03-28T09:03:20.433310Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Yzg2MzkwNjgtNGRlOWJkMWYtYTFjMDRmZGItNTA1NmVmOGU=, ActorId: [1:7486783409880273446:2328], ActorState: unknown state, TraceId: 01jqdzvn0s0mdvcatkrk6ewqkg, Session actor destroyed 2025-03-28T09:03:20.442505Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NWNmYmU0OTctYmZlYmVkOTgtZGM4NjYxMWUtZDlmMGUzMg==, ActorId: [1:7486783409880273444:2327], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:03:20.442563Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NWNmYmU0OTctYmZlYmVkOTgtZGM4NjYxMWUtZDlmMGUzMg==, ActorId: [1:7486783409880273444:2327], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:03:20.442585Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWNmYmU0OTctYmZlYmVkOTgtZGM4NjYxMWUtZDlmMGUzMg==, ActorId: [1:7486783409880273444:2327], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:03:20.442606Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWNmYmU0OTctYmZlYmVkOTgtZGM4NjYxMWUtZDlmMGUzMg==, ActorId: [1:7486783409880273444:2327], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:03:20.442667Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWNmYmU0OTctYmZlYmVkOTgtZGM4NjYxMWUtZDlmMGUzMg==, ActorId: [1:7486783409880273444:2327], ActorState: unknown state, Session actor destroyed 2025-03-28T09:03:21.552813Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783414833903094:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:21.559371Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d29/r3tmp/tmpHRAdXQ/pdisk_1.dat 2025-03-28T09:03:21.953304Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:22.032967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:22.033056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18151, node 2 2025-03-28T09:03:22.044179Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:22.115682Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:22.115704Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:22.115712Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:22.115841Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVe ... :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] RunDataQuery: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; 2025-03-28T09:04:05.667156Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-28T09:04:05.667331Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ReadyState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, received request, proxyRequestId: 6 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; rpcActor: [8:7486783602545822750:2375] database: /Root databaseId: /Root pool id: default 2025-03-28T09:04:05.667376Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [8:7486783602545822749:2374], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ= 2025-03-28T09:04:05.667419Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486783602545822752:2376], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-28T09:04:05.667518Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7486783602545822753:2377], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, Start pool fetching 2025-03-28T09:04:05.667539Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486783602545822754:2378], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-28T09:04:05.671252Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486783602545822752:2376], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-03-28T09:04:05.671375Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486783602545822754:2378], DatabaseId: /Root, PoolId: default, Pool info successfully fetched 2025-03-28T09:04:05.671424Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root 2025-03-28T09:04:05.671490Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [8:7486783602545822753:2377], DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, Pool info successfully resolved 2025-03-28T09:04:05.671565Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool default, DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ= 2025-03-28T09:04:05.671656Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7486783598250855406:2361], DatabaseId: /Root, PoolId: default, Received new request, worker id: [8:7486783602545822749:2374], session id: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ= 2025-03-28T09:04:05.671697Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7486783598250855406:2361], DatabaseId: /Root, PoolId: default, Reply continue success to [8:7486783602545822749:2374], session id: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, local in flight: 1 2025-03-28T09:04:05.671725Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: default, SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ= 2025-03-28T09:04:05.671775Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ExecuteState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, continue request, pool id: default 2025-03-28T09:04:05.673970Z node 8 :KQP_SESSION INFO: Scheme error, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], status: PathNotTable 2025-03-28T09:04:06.321513Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ExecuteState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, ExecutePhyTx, tx: 0x000050C00054CD18 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-03-28T09:04:06.321587Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ExecuteState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, Sending to Executer TraceId: 0 8 2025-03-28T09:04:06.321684Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ExecuteState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, Created new KQP executer: [8:7486783606840790073:2374] isRollback: 0 2025-03-28T09:04:06.341435Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ExecuteState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-03-28T09:04:06.341512Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ExecuteState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, ExecutePhyTx, tx: 0x000050C0004C0558 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-28T09:04:06.342086Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ExecuteState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-28T09:04:06.342206Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ExecuteState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, txInfo Status: Committed Kind: ReadOnly TotalDuration: 20.802 ServerDuration: 20.727 QueriesCount: 2 2025-03-28T09:04:06.342283Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ExecuteState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:04:06.342341Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ExecuteState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-03-28T09:04:06.343131Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7486783598250855406:2361], DatabaseId: /Root, PoolId: default, Received cleanup request, worker id: [8:7486783602545822749:2374], session id: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, duration: 0.674913s, cpu consumed: 0.001688s 2025-03-28T09:04:06.343178Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [8:7486783598250855406:2361], DatabaseId: /Root, PoolId: default, Reply cleanup success to [8:7486783602545822749:2374], session id: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, local in flight: 0 2025-03-28T09:04:06.343228Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: CleanupState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, EndCleanup, isFinal: 0 2025-03-28T09:04:06.343292Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: CleanupState, TraceId: 01jqdzx1h3ck4kb9h3xw9prttw, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7486783542416279599:2279] 2025-03-28T09:04:06.343648Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root, PoolId: default, Duration: 0.674913s, CpuConsumed: 0.001688s, AdjustCpuQuota: 0 2025-03-28T09:04:06.343902Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, TxId: 2025-03-28T09:04:06.343992Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TCpuLoadFetcherActor] Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, TxId: 2025-03-28T09:04:06.344324Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:04:06.344356Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:06.344388Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:04:06.344412Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:04:06.344497Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MWI2Y2VjZTEtOGZlNmMzMWItNTdlNjY2NzEtNjQ5YjY1YmQ=, ActorId: [8:7486783602545822749:2374], ActorState: unknown state, Session actor destroyed 2025-03-28T09:04:06.407382Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YzMyM2I2ODUtMzQxMDg1OTctYzA5MWYxOWEtZTg0ZGNiMDQ=, ActorId: [8:7486783598250855244:2350], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:04:06.407459Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YzMyM2I2ODUtMzQxMDg1OTctYzA5MWYxOWEtZTg0ZGNiMDQ=, ActorId: [8:7486783598250855244:2350], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:06.407516Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzMyM2I2ODUtMzQxMDg1OTctYzA5MWYxOWEtZTg0ZGNiMDQ=, ActorId: [8:7486783598250855244:2350], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:04:06.407565Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzMyM2I2ODUtMzQxMDg1OTctYzA5MWYxOWEtZTg0ZGNiMDQ=, ActorId: [8:7486783598250855244:2350], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:04:06.407686Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YzMyM2I2ODUtMzQxMDg1OTctYzA5MWYxOWEtZTg0ZGNiMDQ=, ActorId: [8:7486783598250855244:2350], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 13360, MsgBus: 10149 2025-03-28T09:04:01.654168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783585771925547:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:01.661933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fbd/r3tmp/tmpzzjg33/pdisk_1.dat 2025-03-28T09:04:02.311601Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:02.349073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:02.350256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:02.354295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13360, node 1 2025-03-28T09:04:02.581924Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:02.581949Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:02.581957Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:02.582117Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10149 TClient is connected to server localhost:10149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:03.818590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:03.850380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:04.081648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:04.414613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:04.517598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:06.642829Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783585771925547:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:06.642931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:06.864620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783607246763681:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:06.864738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:07.568215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.615415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.720212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.821947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.886181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.932350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:08.010920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783615836698798:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:08.011091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:08.014894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783615836698803:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:08.028141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:08.063538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783615836698805:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:08.129144Z node 1 :TX_PROXY ERROR: Actor# [1:7486783615836698860:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |76.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |76.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |76.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> KqpPg::CreateUniqComplexPgColumn+useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] >> KqpPg::TypeCoercionInsert-useSink [GOOD] >> KqpPg::V1CreateTable >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate >> TSchemeShardLoginTest::UserLogin >> KqpScan::ScanDuringSplit10 |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] Test command err: 2025-03-28T09:03:17.212978Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783400152318875:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:17.213129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d0a/r3tmp/tmpzsWH6b/pdisk_1.dat 2025-03-28T09:03:17.579206Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61802, node 1 2025-03-28T09:03:17.643693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:17.644079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:17.651520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:17.673375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:17.673400Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:17.673409Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:17.673553Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:18.011740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:20.396372Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:20.396505Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783413037221381:2329], Start check tables existence, number paths: 2 2025-03-28T09:03:20.397074Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:20.397104Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:03:20.402037Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzE2ZDEwYjgtYTFhYzc1NTEtOTIyZjBhNDctMWZiYmE5N2Y=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MzE2ZDEwYjgtYTFhYzc1NTEtOTIyZjBhNDctMWZiYmE5N2Y= 2025-03-28T09:03:20.403020Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783413037221381:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:03:20.403094Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783413037221381:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:03:20.403123Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783413037221381:2329], Successfully finished 2025-03-28T09:03:20.412982Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzE2ZDEwYjgtYTFhYzc1NTEtOTIyZjBhNDctMWZiYmE5N2Y=, ActorId: [1:7486783413037221397:2330], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:20.413180Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-28T09:03:20.413221Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:03:20.430915Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783413037221399:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:20.438046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:20.441668Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783413037221399:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-28T09:03:20.448489Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783413037221399:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:03:20.454006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783413037221399:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:20.546567Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783413037221399:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:20.550887Z node 1 :TX_PROXY ERROR: Actor# [1:7486783413037221450:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:20.551215Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783413037221399:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:03:20.553875Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmU1YjdmNzktN2I3MDhmMGMtZjQ3Yjc1YTgtNWU5MTJjYjM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmU1YjdmNzktN2I3MDhmMGMtZjQ3Yjc1YTgtNWU5MTJjYjM= 2025-03-28T09:03:20.554174Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-28T09:03:20.554195Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-28T09:03:20.554244Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmU1YjdmNzktN2I3MDhmMGMtZjQ3Yjc1YTgtNWU5MTJjYjM=, ActorId: [1:7486783413037221457:2331], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:20.554472Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmU1YjdmNzktN2I3MDhmMGMtZjQ3Yjc1YTgtNWU5MTJjYjM=, ActorId: [1:7486783413037221457:2331], ActorState: ReadyState, TraceId: 01jqdzvnfa5d2r500x3rn11d45, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7486783413037221456:2339] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-28T09:03:20.554520Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7486783413037221457:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmU1YjdmNzktN2I3MDhmMGMtZjQ3Yjc1YTgtNWU5MTJjYjM= 2025-03-28T09:03:20.554583Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783413037221459:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:20.554663Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783413037221460:2333], Database: /Root, Start database fetching 2025-03-28T09:03:20.555748Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783413037221460:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-28T09:03:20.555850Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783413037221459:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:20.555880Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-28T09:03:20.555928Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-03-28T09:03:20.555962Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-28T09:03:20.556212Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783413037221471:2335], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-28T09:03:20.556286Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486783413037221470:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmU1YjdmNzktN2I3MDhmMGMtZjQ3Yjc1YTgtNWU5MTJjYjM=, Start pool fetching 2025-03-28T09:03:20.556352Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783413037221473:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:20.556572Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783413037221473:2336], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:20.556624Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486783413037221470:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmU1YjdmNzktN2I3MDhmMGMtZjQ3Yjc1YTgtNWU5MTJjYjM=, Pool info successfully resolved 2025-03-28T09:03:20.556680Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU1YjdmNzktN2I3MDhmMGMtZjQ3Yjc1YTgtNWU5MTJjYjM= 2025-03-28T09:03:20.556743Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783413037221471:2335], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7486783413037221457:2331], session id: ydb://session/3?node_id=1&id=ZmU1YjdmNzktN2I3MDhmMGMtZjQ3Yjc1YTgtNWU5MTJjYjM= 2025-03-28T09:03:20.556796Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783413037221471:2335], DatabaseId: /Root, PoolId: sample_pool_id, Reply continue success to [1:7486783413037221457:2331], session id: ydb://session/3?node_id=1&id=ZmU1YjdmNzktN2I3MDhmMGMtZjQ3Yjc1YTgtNWU5MTJjYjM=, local in flight: 1 2025-03-28T09:03:20.556831Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, ... eceived request, proxyRequestId: 5 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT * FROM `.sys/resource_pools` WHERE "a" < Name AND Name < "c" rpcActor: [7:7486783628592574039:3420] database: /Root/test-dedicated databaseId: /Root/test-dedicated pool id: default 2025-03-28T09:04:11.404205Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ReadyState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, request placed into pool from cache: default 2025-03-28T09:04:11.404307Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, Sending CompileQuery request 2025-03-28T09:04:11.655075Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, ExecutePhyTx, tx: 0x000050C0001AC758 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-28T09:04:11.655139Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, Sending to Executer TraceId: 0 8 2025-03-28T09:04:11.655225Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, Created new KQP executer: [8:7486783632098489476:2413] isRollback: 0 2025-03-28T09:04:11.667811Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, Forwarded TEvStreamData to [7:7486783628592574039:3420] 2025-03-28T09:04:11.670249Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-28T09:04:11.670404Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, txInfo Status: Committed Kind: ReadOnly TotalDuration: 15.445 ServerDuration: 15.349 QueriesCount: 2 2025-03-28T09:04:11.670471Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:04:11.670928Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:11.670960Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, EndCleanup, isFinal: 1 2025-03-28T09:04:11.671014Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: ExecuteState, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, Sent query response back to proxy, proxyRequestId: 5, proxyId: [8:7486783589148815383:2277] 2025-03-28T09:04:11.671034Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: unknown state, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, Cleanup temp tables: 0 2025-03-28T09:04:11.672606Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=YTg1NjJmYTgtM2Y3ZmEyNTAtNjQ4YzNhNTAtYzc5NTU3NjA=, ActorId: [8:7486783632098489464:2413], ActorState: unknown state, TraceId: 01jqdzx74c7z6hd5yqc9tzqpw3, Session actor destroyed 2025-03-28T09:04:11.684091Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ== 2025-03-28T09:04:11.684332Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:04:11.684607Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ReadyState, TraceId: 01jqdzx7d441z9fxj8matryvbe, received request, proxyRequestId: 6 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT * FROM `.sys/resource_pools` WHERE Name >= "default" rpcActor: [7:7486783628592574043:3421] database: /Root/test-dedicated databaseId: /Root/test-dedicated pool id: default 2025-03-28T09:04:11.684637Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ReadyState, TraceId: 01jqdzx7d441z9fxj8matryvbe, request placed into pool from cache: default 2025-03-28T09:04:11.684745Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, Sending CompileQuery request 2025-03-28T09:04:11.812156Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, ExecutePhyTx, tx: 0x000050C00037E7D8 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-28T09:04:11.812218Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, Sending to Executer TraceId: 0 8 2025-03-28T09:04:11.812320Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, Created new KQP executer: [8:7486783632098489500:2422] isRollback: 0 2025-03-28T09:04:11.823965Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, Forwarded TEvStreamData to [7:7486783628592574043:3421] 2025-03-28T09:04:11.826765Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-28T09:04:11.826924Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, txInfo Status: Committed Kind: ReadOnly TotalDuration: 14.886 ServerDuration: 14.786 QueriesCount: 2 2025-03-28T09:04:11.826991Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:04:11.827396Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:11.827437Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, EndCleanup, isFinal: 1 2025-03-28T09:04:11.827488Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: ExecuteState, TraceId: 01jqdzx7d441z9fxj8matryvbe, Sent query response back to proxy, proxyRequestId: 6, proxyId: [8:7486783589148815383:2277] 2025-03-28T09:04:11.827513Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: unknown state, TraceId: 01jqdzx7d441z9fxj8matryvbe, Cleanup temp tables: 0 2025-03-28T09:04:11.828001Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZDA2NDdhYjktMjljZTI3MTAtMTYzM2Y2MjgtNTc4YTkzYQ==, ActorId: [8:7486783632098489490:2422], ActorState: unknown state, TraceId: 01jqdzx7d441z9fxj8matryvbe, Session actor destroyed 2025-03-28T09:04:11.833840Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-03-28T09:04:11.834187Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:04:11.844058Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-03-28T09:04:11.844457Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:04:11.859844Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NzE4ODUyNjgtOWViNWQ5YmMtMTc5YjhmZmUtOGZlNGNiNWU=, ActorId: [7:7486783581347932547:2335], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:04:11.859888Z node 7 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=7&id=NzE4ODUyNjgtOWViNWQ5YmMtMTc5YjhmZmUtOGZlNGNiNWU=, ActorId: [7:7486783581347932547:2335], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:11.859919Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzE4ODUyNjgtOWViNWQ5YmMtMTc5YjhmZmUtOGZlNGNiNWU=, ActorId: [7:7486783581347932547:2335], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:04:11.859943Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzE4ODUyNjgtOWViNWQ5YmMtMTc5YjhmZmUtOGZlNGNiNWU=, ActorId: [7:7486783581347932547:2335], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:04:11.860009Z node 7 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=7&id=NzE4ODUyNjgtOWViNWQ5YmMtMTc5YjhmZmUtOGZlNGNiNWU=, ActorId: [7:7486783581347932547:2335], ActorState: unknown state, Session actor destroyed 2025-03-28T09:04:13.095669Z node 8 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [8:7486783589148815307:2206], processor id# 72075186224037891, database# /Root/test-dedicated >> KqpPg::CopyTableSerialColumns-useSink [GOOD] >> KqpPg::CreateIndex |76.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |76.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |76.8%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit |76.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |76.8%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |76.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] >> BackupRestore::RestoreExternalDataSourceWithoutSecret [GOOD] >> BackupRestore::PrefixedVectorIndex >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> TSchemeShardLoginTest::UserLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync1 [GOOD] Test command err: 2025-03-28T09:03:08.200025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783358133976865:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:08.200137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013c5/r3tmp/tmpGHf4mn/pdisk_1.dat 2025-03-28T09:03:08.700445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:08.700590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:08.707464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:08.717398Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13848, node 1 2025-03-28T09:03:08.995363Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:08.995392Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:08.995403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:09.002359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:09.419872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:13.202836Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783358133976865:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:13.202922Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:23.714714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:03:23.714742Z node 1 :IMPORT WARN: Table profiles were not loaded >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::TestExternalLogin >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |76.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |76.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |76.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:00.180538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:00.180661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:00.180761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:00.180801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:00.180857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:00.180913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:00.180981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:00.181064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:00.181382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:00.308866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:00.308925Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:00.356362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:00.356715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:00.356914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:00.365402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:00.365685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:00.366432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:00.366728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:00.369330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:00.370825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:00.370909Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:00.371115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:00.371166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:00.371210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:00.371298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.380148Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:00.489676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:00.489905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.490066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:00.490409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:00.490466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.492752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:00.492879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:00.493042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.493106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:00.493141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:00.493179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:00.495261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.495328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:00.495389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:00.497281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.497322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.497366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:00.497440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:00.530461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:00.536307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:00.536537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:00.537888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:00.538039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:00.538123Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:00.538431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:00.538497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:00.542877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:00.543056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:00.545779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:00.545853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:00.546055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:00.546115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:00.546458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.546514Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:00.546604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:00.546640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:00.546675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:00.546728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:00.546770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:00.546810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:00.546845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:00.546899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:00.546983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:00.547035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:00.547067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:00.549013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:00.549465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:00.549506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:04:18.958242Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:04:19.004275Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:354:2332]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:19.004359Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:19.004466Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:354:2332], Recipient [3:354:2332]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:19.004503Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:19.036921Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [3:354:2332]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-03-28T09:04:19.037019Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-03-28T09:04:19.037168Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:354:2332]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-03-28T09:04:19.037203Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-03-28T09:04:19.037235Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-03-28T09:04:19.037319Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-03-28T09:04:19.037408Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-03-28T09:04:19.141023Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:763:2648]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-28T09:04:19.141102Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-28T09:04:19.141203Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409552 outdated step 200 last cleanup 0 2025-03-28T09:04:19.141275Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:19.141315Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409552 2025-03-28T09:04:19.141366Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409552 has no attached operations 2025-03-28T09:04:19.141407Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409552 2025-03-28T09:04:19.141523Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:763:2648]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:04:19.141665Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 2 2025-03-28T09:04:19.141972Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:763:2648], Recipient [3:893:2750]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 30 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 118 TableOwnerId: 72075186233409549 FollowerId: 0 2025-03-28T09:04:19.142006Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:04:19.142046Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.003 2025-03-28T09:04:19.142143Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:04:19.142178Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-28T09:04:19.153726Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:765:2649]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-28T09:04:19.153823Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-28T09:04:19.153914Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409553 outdated step 200 last cleanup 0 2025-03-28T09:04:19.153996Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:19.154033Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409553 2025-03-28T09:04:19.154068Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409553 has no attached operations 2025-03-28T09:04:19.154101Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409553 2025-03-28T09:04:19.154251Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:765:2649]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:04:19.154411Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 2 2025-03-28T09:04:19.154804Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:765:2649], Recipient [3:893:2750]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 28 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 118 TableOwnerId: 72075186233409549 FollowerId: 0 2025-03-28T09:04:19.154853Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:04:19.154909Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0028 2025-03-28T09:04:19.155014Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:04:19.166480Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:893:2750]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:19.166563Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:19.166674Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:893:2750], Recipient [3:893:2750]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:19.166730Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:19.182901Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435095, Sender [0:0:0], Recipient [3:893:2750]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2025-03-28T09:04:19.182993Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-03-28T09:04:19.183268Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:893:2750]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-03-28T09:04:19.183305Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-03-28T09:04:19.183338Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-03-28T09:04:19.183430Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-03-28T09:04:19.183503Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-03-28T09:04:19.183695Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269746180, Sender [3:2044:3861], Recipient [3:893:2750]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-03-28T09:04:19.183740Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2025-03-28T09:04:19.207674Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2047:3864], Recipient [3:763:2648]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:19.207758Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:19.207819Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409552, clientId# [3:2046:3863], serverId# [3:2047:3864], sessionId# [0:0:0] 2025-03-28T09:04:19.208033Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2045:3862], Recipient [3:763:2648]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } 2025-03-28T09:04:19.208597Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:2050:3867], Recipient [3:765:2649]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:19.208624Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:19.208649Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409553, clientId# [3:2049:3866], serverId# [3:2050:3867], sessionId# [0:0:0] 2025-03-28T09:04:19.208708Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:2048:3865], Recipient [3:765:2649]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 2 } |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |76.9%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess >> YdbProxy::CopyTable >> TStorageTenantTest::DeclareAndDefine >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |76.9%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> BackupRestoreS3::PrefixedVectorIndex [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:04:17.344301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:17.344409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:17.344461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:17.344496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:17.344534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:17.344577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:17.344631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:17.344729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:17.345024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:17.448824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:04:17.448881Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:17.472118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:17.472348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:17.472513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:17.478233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:17.478419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:17.479104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:17.479313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:17.481348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:17.482584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:17.482641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:17.482829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:17.482891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:17.482932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:17.483055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:17.489870Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:04:17.661533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:17.661809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:17.662046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:17.662291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:17.662356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:17.683698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:17.683864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:17.684061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:17.684111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:17.684146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:17.684180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:17.691521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:17.691586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:17.691643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:17.699508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:17.699584Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:17.699622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:17.699663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:17.726076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:17.747492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:17.747747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:17.748894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:17.749071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:17.749122Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:17.749400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:17.749460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:17.749659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:17.749738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:17.760362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:17.760476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:17.760692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:17.760746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:17.761133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:17.761184Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:17.761305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:17.761338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:17.761385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:17.761414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:17.761448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:17.761488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:17.761545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:17.761580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:17.761685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:17.761729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:17.761763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:17.776165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:17.776321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:17.776363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:21.771914Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:21.772012Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:21.780137Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:21.780218Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:21.780472Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:21.780530Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:21.780960Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:21.781021Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:21.781159Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:21.781206Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:21.781265Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:21.781307Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:21.781355Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:21.781408Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:21.781450Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:21.781487Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:21.781605Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:21.781655Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:21.781697Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:21.782397Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:21.782516Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:21.782563Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:04:21.782610Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:04:21.782658Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:21.786875Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:04:21.791085Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:04:21.791657Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:04:21.792201Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] Bootstrap 2025-03-28T09:04:21.830334Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] Become StateWork (SchemeCache [4:273:2264]) 2025-03-28T09:04:21.833768Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "user1" Password: "password1" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:21.838971Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:21.839148Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:04:21.839193Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:21.839250Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:04:21.839290Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:21.839365Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:21.839431Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:04:21.839481Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:21.839522Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:04:21.839566Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-03-28T09:04:21.839607Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-28T09:04:21.840701Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:21.855993Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:21.856167Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-03-28T09:04:21.856688Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:21.856757Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:21.856992Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:21.857046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:04:21.857915Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:21.858051Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:21.858101Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:04:21.858154Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-28T09:04:21.858208Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:21.858342Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:04:21.858597Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:04:21.867005Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-03-28T09:04:21.867668Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-03-28T09:04:21.875554Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:04:21.875687Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-28T09:04:21.975172Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODYxLCJpYXQiOjE3NDMxNTI2NjEsInN1YiI6InVzZXIxIn0.nvzHN74d-K80vS7FoFLL8gAAuDyoyq3fmRQoMBhtq1RDSGkler-TLW3OFeZS0DaQxMpmJ6GM0rDE9-Rz6GgnuO-F5sVnELONudNy4NH9ospfKSvd8E9xj78_XksrqMLjnWS97CJbFTrfgnBDD1bXo3XfTuZc_i30SZzVRvJgHSlxB4_CGzCCmQOsuAJoemiUXY3xSAZ5-3IKLpAWrMf2oZd4XQDpY5NQA90zc4PB6-L0c9GL9lOXzND6vmt6PMk3-3L3sY_kkBoiHB4hWMVVhVhQPkEFHOVVyxAbWMInal18ev4g5scEDGXsXLmjq41u3ipb7TJ-1tVsrec5ofYFiw" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODYxLCJpYXQiOjE3NDMxNTI2NjEsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-03-28T09:04:21.975758Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:21.975813Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:21.976054Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:21.976103Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-28T09:04:21.977664Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 AUDIT LOG buffer(3): 2025-03-28T09:04:21.757015Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-03-28T09:04:21.838794Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2025-03-28T09:04:21.975513Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODYxLCJpYXQiOjE3NDMxNTI2NjEsInN1YiI6InVzZXIxIn0.**, login_user_level=admin AUDIT LOG checked line: 2025-03-28T09:04:21.975513Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODYxLCJpYXQiOjE3NDMxNTI2NjEsInN1YiI6InVzZXIxIn0.**, login_user_level=admin >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2025-03-28T09:04:23.012594Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:04:23.012726Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:04:23.028204Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:04:23.028313Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2 2025-03-28T09:04:23.028385Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-03-28T09:04:23.028421Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 2 2025-03-28T09:04:23.028484Z node 1 :PQ_READ_PROXY INFO: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2025-03-28T09:04:23.028532Z node 1 :PQ_READ_PROXY ALERT: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2025-03-28T09:04:23.028570Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-03-28T09:04:23.028640Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |76.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink >> TPQCachingProxyTest::MultipleSessions >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> TPQCachingProxyTest::MultipleSessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2025-03-28T09:03:18.254819Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783401667336519:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:18.254921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cd9/r3tmp/tmpCFs9jj/pdisk_1.dat 2025-03-28T09:03:18.610464Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:18.665144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:18.665248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 24592, node 1 2025-03-28T09:03:18.686596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:18.731051Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:18.731078Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:18.731086Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:18.731233Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:19.016743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:21.230377Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:21.233080Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjEyY2ViNjgtMWZlNGJiNjEtZmRiOWIxNGEtZmZkZDc5MmY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjEyY2ViNjgtMWZlNGJiNjEtZmRiOWIxNGEtZmZkZDc5MmY= 2025-03-28T09:03:21.233687Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-28T09:03:21.233716Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:21.233727Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:03:21.245110Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783414552239052:2328], Start check tables existence, number paths: 2 2025-03-28T09:03:21.245216Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjEyY2ViNjgtMWZlNGJiNjEtZmRiOWIxNGEtZmZkZDc5MmY=, ActorId: [1:7486783414552239057:2329], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:21.247149Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783414552239052:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:03:21.247206Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783414552239052:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:03:21.247234Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783414552239052:2328], Successfully finished 2025-03-28T09:03:21.247289Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:03:21.261210Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783414552239074:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:21.265414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:21.266851Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783414552239074:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2025-03-28T09:03:21.267043Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783414552239074:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:03:21.279069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783414552239074:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:03:21.360786Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783414552239074:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:21.364759Z node 1 :TX_PROXY ERROR: Actor# [1:7486783414552239127:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:21.364872Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783414552239074:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:03:21.367519Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmZlNGUzNDAtNmVmODEyODYtYzlmNGYyOGQtOWQ4MGEzMDg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MmZlNGUzNDAtNmVmODEyODYtYzlmNGYyOGQtOWQ4MGEzMDg= 2025-03-28T09:03:21.367841Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-28T09:03:21.367864Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-28T09:03:21.367919Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmZlNGUzNDAtNmVmODEyODYtYzlmNGYyOGQtOWQ4MGEzMDg=, ActorId: [1:7486783414552239135:2331], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:21.368093Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MmZlNGUzNDAtNmVmODEyODYtYzlmNGYyOGQtOWQ4MGEzMDg=, ActorId: [1:7486783414552239135:2331], ActorState: ReadyState, TraceId: 01jqdzvp8r71n51stehvf5ygj6, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7486783414552239134:2340] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-28T09:03:21.368143Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7486783414552239135:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MmZlNGUzNDAtNmVmODEyODYtYzlmNGYyOGQtOWQ4MGEzMDg= 2025-03-28T09:03:21.368217Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783414552239137:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:21.368293Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783414552239138:2333], Database: /Root, Start database fetching 2025-03-28T09:03:21.369585Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783414552239138:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-28T09:03:21.369724Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783414552239137:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:21.369776Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-28T09:03:21.369813Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-03-28T09:03:21.369829Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-28T09:03:21.370166Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783414552239149:2335], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-28T09:03:21.370254Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486783414552239148:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MmZlNGUzNDAtNmVmODEyODYtYzlmNGYyOGQtOWQ4MGEzMDg=, Start pool fetching 2025-03-28T09:03:21.370310Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783414552239150:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:21.371582Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783414552239150:2336], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:21.371645Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783414552239149:2335], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-03-28T09:03:21.371740Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486783414552239148:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MmZlNGUzNDAtNmVmODEyODYtYzlmNGYyOGQtOWQ4MGEzMDg=, Pool info successfully resolved 2025-03-28T09:03:21.371824Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZlNGUzNDAtNmVmODEyODYtYzlmNGYyOGQtOWQ4MGEzMDg= 2025-03-28T09:03:21.371895Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783414552239149:2335], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7486783414552239135:2331], session id: ydb://session/3?node_id=1&id=MmZlNGUzNDAtNmVmODEyODYtYzlmNGYyOGQtOWQ4MGEzMDg= 2025-03-28T09:03:21.371948Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MmZlNGUzNDAtNmVmODEyODYtYzlmNGYyOGQtOWQ4MGEzMDg= 2025-03-28T09:03:21.372006Z no ... essionId: ydb://session/3?node_id=6&id=ZDVjZjNkNmUtMzBhYmUyMmQtNTllZjJmNDctNDYyN2FiZGE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDVjZjNkNmUtMzBhYmUyMmQtNTllZjJmNDctNDYyN2FiZGE= 2025-03-28T09:04:22.808301Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7486783677755972217:2332], Start check tables existence, number paths: 2 2025-03-28T09:04:22.808451Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDVjZjNkNmUtMzBhYmUyMmQtNTllZjJmNDctNDYyN2FiZGE=, ActorId: [6:7486783677755972218:2333], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:04:22.808561Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:04:22.808588Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:04:22.808631Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-28T09:04:22.821350Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7486783677755972217:2332], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:04:22.821434Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7486783677755972217:2332], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:04:22.821477Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7486783677755972217:2332], Successfully finished 2025-03-28T09:04:22.821606Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:04:22.822528Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486783677755972245:2313], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:04:22.832450Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:04:22.838515Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486783677755972245:2313], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-28T09:04:22.839915Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486783677755972245:2313], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:04:22.879835Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486783677755972245:2313], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:04:22.978842Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486783677755972245:2313], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:04:22.982424Z node 6 :TX_PROXY ERROR: Actor# [6:7486783677755972298:2346] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:22.982583Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486783677755972245:2313], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:04:22.990005Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg= 2025-03-28T09:04:22.990422Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, ActorId: [6:7486783677755972305:2337], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:04:22.990624Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, ActorId: [6:7486783677755972305:2337], ActorState: ReadyState, TraceId: 01jqdzxjee2wabe2gnsqjpe94x, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [6:7486783677755972304:2351] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-28T09:04:22.990665Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-28T09:04:22.994716Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-28T09:04:22.994873Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [6:7486783677755972305:2337], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg= 2025-03-28T09:04:22.994959Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783677755972307:2338], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:04:22.995066Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7486783677755972308:2339], Database: /Root, Start database fetching 2025-03-28T09:04:22.996923Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [6:7486783677755972308:2339], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-28T09:04:22.997079Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783677755972307:2338], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:04:22.997409Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-28T09:04:22.997453Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-03-28T09:04:22.997476Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-28T09:04:22.997753Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7486783677755972319:2341], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-28T09:04:22.997818Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7486783677755972318:2340], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, Start pool fetching 2025-03-28T09:04:22.997858Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783677755972320:2342], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:04:23.002921Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783677755972320:2342], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:04:23.003022Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7486783677755972319:2341], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-03-28T09:04:23.003166Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [6:7486783677755972318:2340], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, Pool info successfully resolved 2025-03-28T09:04:23.003262Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg= 2025-03-28T09:04:23.003358Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg= 2025-03-28T09:04:23.003467Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, ActorId: [6:7486783677755972305:2337], ActorState: ExecuteState, TraceId: 01jqdzxjee2wabe2gnsqjpe94x, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id 2025-03-28T09:04:23.003623Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, ActorId: [6:7486783677755972305:2337], ActorState: ExecuteState, TraceId: 01jqdzxjee2wabe2gnsqjpe94x, Cleanup start, isFinal: 1 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 1 2025-03-28T09:04:23.003848Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Finished request with worker actor [6:7486783677755972305:2337], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg= 2025-03-28T09:04:23.003899Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, ActorId: [6:7486783677755972305:2337], ActorState: CleanupState, TraceId: 01jqdzxjee2wabe2gnsqjpe94x, EndCleanup, isFinal: 1 2025-03-28T09:04:23.004015Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, ActorId: [6:7486783677755972305:2337], ActorState: CleanupState, TraceId: 01jqdzxjee2wabe2gnsqjpe94x, Sent query response back to proxy, proxyRequestId: 3, proxyId: [6:7486783643396233242:2082] 2025-03-28T09:04:23.004047Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, ActorId: [6:7486783677755972305:2337], ActorState: unknown state, TraceId: 01jqdzxjee2wabe2gnsqjpe94x, Cleanup temp tables: 0 2025-03-28T09:04:23.004167Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=MjlkZjdmMTItNmM5NzliMzQtYzM0NDAxMmQtNzgwMzM2MDg=, ActorId: [6:7486783677755972305:2337], ActorState: unknown state, TraceId: 01jqdzxjee2wabe2gnsqjpe94x, Session actor destroyed 2025-03-28T09:04:23.029225Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZDVjZjNkNmUtMzBhYmUyMmQtNTllZjJmNDctNDYyN2FiZGE=, ActorId: [6:7486783677755972218:2333], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:04:23.029283Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZDVjZjNkNmUtMzBhYmUyMmQtNTllZjJmNDctNDYyN2FiZGE=, ActorId: [6:7486783677755972218:2333], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:23.029320Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDVjZjNkNmUtMzBhYmUyMmQtNTllZjJmNDctNDYyN2FiZGE=, ActorId: [6:7486783677755972218:2333], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:04:23.029346Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDVjZjNkNmUtMzBhYmUyMmQtNTllZjJmNDctNDYyN2FiZGE=, ActorId: [6:7486783677755972218:2333], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:04:23.029432Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZDVjZjNkNmUtMzBhYmUyMmQtNTllZjJmNDctNDYyN2FiZGE=, ActorId: [6:7486783677755972218:2333], ActorState: unknown state, Session actor destroyed >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::PrefixedVectorIndex [GOOD] Test command err: 2025-03-28T09:02:49.993238Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783276196427522:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:49.993303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpJWti6X/pdisk_1.dat 2025-03-28T09:02:50.669170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:50.669262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:50.676739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:50.679945Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12079, node 1 2025-03-28T09:02:50.878257Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:50.880880Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:50.880890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:50.885196Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:51.392996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:54.373946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783297671264858:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:54.374232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:54.374703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783297671264870:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:54.379827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:02:54.416359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783297671264872:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:02:54.506664Z node 1 :TX_PROXY ERROR: Actor# [1:7486783297671264941:2690] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:54.996504Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783276196427522:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:54.996550Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:55.264149Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqdztwht9mwex718dyxp97zk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFkZWE2MDgtZGRmMDJiNDItYTIyZjgyM2QtNjcxN2ZkMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/"Create temporary directory "/Root/~backup_20250328T090255" in databaseProcess "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/view"Write view into "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/view/permissions.pb"Remove temporary directory "/Root/~backup_20250328T090255" in database2025-03-28T09:02:55.440151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/view"Restore view "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpOifRlo/view/permissions.pb"2025-03-28T09:02:55.660755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710666:0, at schemeshard: 72057594046644480 Restore completed successfully2025-03-28T09:02:55.856681Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqdztx5w12ked1r6vgby6ybk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFkZWE2MDgtZGRmMDJiNDItYTIyZjgyM2QtNjcxN2ZkMGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:58.737352Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783317310118605:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:58.737415Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001794/r3tmp/tmpa3FmfM/pdisk_1.dat 2025-03-28T09:02:59.392235Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:59.488215Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:59.494036Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:59.504802Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29838, node 4 2025-03-28T09:02:59.870458Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:59.870484Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:59.870494Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:59.870610Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:03:00.118913Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:03:03.738788Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783317310118605:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:03.738896Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:04.145967Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783343079923501:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:04.146075Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:04.184736Z node 4 :TX_PROXY DEBUG: actor# [4:7486783317310118677:2124] Handle TEvProposeTransaction 2025-03-28T09:03:04.184794Z node 4 :TX_PROXY DEBUG: actor# [4:7486783317310118677:2124] TxId# 281474976715658 ProcessProposeTransaction 2025-03-28T09:03:04.184854Z node 4 :TX_PROXY DEBUG: actor# [4:7486783317310118677:2124] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [4:7486783343079923522:2648] 2025-03-28T09:03:04.257908Z node 4 :TX_PROXY DEBUG: Actor# [4:7486783343079923522:2648] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: ... : false } 2025-03-28T09:04:21.064774Z node 22 :TX_PROXY DEBUG: Actor# [22:7486783671854127577:11510] Handle TEvDescribeSchemeResult Forward to# [22:7486783671854127575:2517] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710760 CreateStep: 1743152635658 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Group" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableIndexes { Name: "value_idx" LocalPathId: 10 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "Group" KeyColumnNames: "Value" SchemaVersion: 2 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: SIMILARITY_INNER_PRODUCT vector_type: VECTOR_TYPE_FLOAT vector_dimension: 768 } clusters: 80 levels: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1450 Memory: 128568 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046644480 >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables >> KqpSystemView::NodesRange1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2025-03-28T09:04:25.204517Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:04:25.204604Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:04:25.219526Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:04:25.219623Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-03-28T09:04:25.219716Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-03-28T09:04:25.219785Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 2 for session: session1 2025-03-28T09:04:25.219820Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-03-28T09:04:25.219878Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 2 for session session1, Generation: 1 2025-03-28T09:04:25.219934Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 2 2025-03-28T09:04:25.220023Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 3 for session: session2 2025-03-28T09:04:25.220056Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 3 for session session2, Generation: 2 |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:02.637019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:02.637139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:02.637178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:02.637213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:02.637256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:02.637288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:02.637342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:02.637434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:02.637755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:02.736207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:02.736262Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:02.756615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:02.756871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:02.757032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:02.763347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:02.763607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:02.764284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:02.764506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:02.766520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:02.767838Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:02.767901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:02.768073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:02.768146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:02.768193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:02.768284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.778420Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:02.945317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:02.945557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.945795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:02.946018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:02.946098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.948680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:02.948824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:02.949052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.949122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:02.949177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:02.949214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:02.951366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.951421Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:02.951457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:02.954844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.954898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.954954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:02.955005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.963989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:02.972226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:02.972424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:02.973542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:02.973682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:02.973774Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:02.974041Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:02.974126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:02.974308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:02.974449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:02.976678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:02.976735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:02.976904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:02.976985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:02.977380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.977422Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:02.977524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:02.977560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.977611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:02.977642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.977683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:02.977719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.977759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:02.977796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:02.977858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:02.977895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:02.977924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:02.988913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:02.989070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:02.989110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 81701Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2025-03-28T09:04:24.581855Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2025-03-28T09:04:24.581892Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2025-03-28T09:04:24.581942Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-03-28T09:04:24.582040Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:04:24.582141Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:04:24.638729Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:325:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-28T09:04:24.638836Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-28T09:04:24.638938Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-03-28T09:04:24.639017Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:24.639051Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-03-28T09:04:24.639087Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-03-28T09:04:24.639118Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-03-28T09:04:24.639322Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:325:2307]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:04:24.639490Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-03-28T09:04:24.639948Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:325:2307], Recipient [3:124:2150]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 31 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-03-28T09:04:24.640004Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:04:24.640061Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0031 2025-03-28T09:04:24.640192Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:04:24.640236Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-28T09:04:24.651460Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:326:2308]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-28T09:04:24.651545Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-28T09:04:24.651652Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409547 outdated step 5000002 last cleanup 0 2025-03-28T09:04:24.651728Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:24.651771Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409547 2025-03-28T09:04:24.651804Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409547 has no attached operations 2025-03-28T09:04:24.651833Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409547 2025-03-28T09:04:24.651978Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:326:2308]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:04:24.652147Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 2 2025-03-28T09:04:24.652556Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:326:2308], Recipient [3:124:2150]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 2 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 26 Memory: 119352 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 43 TableOwnerId: 72057594046678944 FollowerId: 0 2025-03-28T09:04:24.652601Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:04:24.652662Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0026 2025-03-28T09:04:24.652763Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:04:24.701927Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:24.701993Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:24.702019Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-28T09:04:24.702073Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-03-28T09:04:24.702101Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-03-28T09:04:24.702177Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-03-28T09:04:24.702239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-28T09:04:24.702287Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-03-28T09:04:24.702359Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2025-03-28T09:04:24.702440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-28T09:04:24.702485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-28T09:04:24.702508Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:04:24.702534Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2025-03-28T09:04:24.702589Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:04:24.715085Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:24.715184Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:24.715223Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:04:24.750835Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1338:3258], Recipient [3:325:2307]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:24.750940Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:24.751006Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409546, clientId# [3:1337:3257], serverId# [3:1338:3258], sessionId# [0:0:0] 2025-03-28T09:04:24.751247Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1336:3256], Recipient [3:325:2307]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } 2025-03-28T09:04:24.753631Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:1341:3261], Recipient [3:326:2308]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:24.753700Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:24.753740Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186233409547, clientId# [3:1340:3260], serverId# [3:1341:3261], sessionId# [0:0:0] 2025-03-28T09:04:24.753947Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553213, Sender [3:1339:3259], Recipient [3:326:2308]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 2 } >> EraseRowsTests::EraseRowsShouldSuccess >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink [GOOD] >> KqpPg::CreateTempTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 27200, MsgBus: 4359 2025-03-28T09:04:00.832486Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783580830894136:2185];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:00.832868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:04:00.919068Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783581321656598:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:00.919118Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:04:01.083729Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783586834243653:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:01.083840Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:04:01.137157Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783587147964856:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:01.266884Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486783588046857785:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:01.267155Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:04:01.286829Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fcf/r3tmp/tmpa3uk3M/pdisk_1.dat 2025-03-28T09:04:02.593920Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:02.679510Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:02.778402Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:02.783672Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:02.794864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:03.632662Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:03.711661Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:03.805513Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:03.816901Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:03.994826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:04.229934Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:04.284961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:04.285149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:04.294400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:04.294478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:04.316246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:04.322812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:04.364570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:04.371656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:04.401907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:04.402005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:04.416203Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-28T09:04:04.416251Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-28T09:04:04.416415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:04.579739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:04.580008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:04.602101Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:04.602139Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:04:04.651022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:04.713129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27200, node 1 2025-03-28T09:04:05.484320Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:05.484351Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:05.484360Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:05.484493Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:04:05.821021Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783580830894136:2185];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:05.822482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:05.931571Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486783581321656598:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:05.932962Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:06.071029Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486783586834243653:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:06.071129Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:06.126848Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486783588046857785:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:06.126910Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:06.126975Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783587147964856:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:06.127025Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:4359 TClient is connected to server localhost:4359 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:08.994861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:09.262150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:10.242795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:12.700037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:14.787125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:18.343757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783658140307421:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:18.343929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:18.873160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:04:18.873205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:19.585322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:19.861023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:20.009840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:20.132937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:20.280070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:20.503411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:20.650927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783666730242701:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:20.651003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:20.651376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783666730242706:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:20.655195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:20.676913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783666730242708:2412], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:20.768369Z node 1 :TX_PROXY ERROR: Actor# [1:7486783666730242785:4130] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:22.460948Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152662419, txId: 281474976710671] shutting down 2025-03-28T09:04:22.634771Z node 4 :BS_PROXY_PUT ERROR: [08efb2af61d12202] Result# TEvPutResult {Id# [72075186224037915:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037915:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-28T09:04:22.600235Z node 3 :BS_PROXY_PUT ERROR: [e1e7467400deef43] Result# TEvPutResult {Id# [72075186224037888:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037888:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-28T09:04:22.750093Z node 2 :BS_PROXY_PUT ERROR: [5ddd6e7c4299826f] Result# TEvPutResult {Id# [72075186224037911:1:19:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037911:1:19:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-28T09:04:22.752334Z node 5 :BS_PROXY_PUT ERROR: [0bd02c888283cd31] Result# TEvPutResult {Id# [72075186224037899:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037899:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata >> DistributedEraseTests::ConditionalEraseRowsShouldErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:04:18.304412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:18.304545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:18.304618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:18.304657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:18.304700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:18.304730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:18.304788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:18.304890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:18.305208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:18.482089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:04:18.482160Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:18.505741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:18.505983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:18.506150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:18.512447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:18.512652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:18.513296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:18.513508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:18.515495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:18.516723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:18.516793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:18.516943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:18.517013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:18.517053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:18.517150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:18.524129Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:04:18.806161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:18.806441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:18.807621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:18.807932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:18.807996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:18.811736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:18.811903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:18.812190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:18.812241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:18.812276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:18.812314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:18.821328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:18.821395Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:18.821433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:18.827265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:18.827330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:18.827387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:18.827435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:18.835303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:18.843567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:18.843776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:18.844786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:18.844936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:18.844998Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:18.845366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:18.845421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:18.845599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:18.845674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:18.851539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:18.851625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:18.851837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:18.851885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:18.852285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:18.852350Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:18.852489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:18.852527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:18.852576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:18.852627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:18.852670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:18.852714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:18.852749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:18.852786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:18.852877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:18.852925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:18.852963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:18.868987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:18.869179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:18.869229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... DbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:26.761464Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-28T09:04:26.761512Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:04:26.761572Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:04:26.761608Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-03-28T09:04:26.761667Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2025-03-28T09:04:26.764183Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSuccess TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:26.764298Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2025-03-28T09:04:26.764536Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:26.764584Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:26.764746Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:26.764806Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:357:2333], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-28T09:04:26.765340Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:04:26.765459Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:04:26.765510Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:04:26.765616Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-03-28T09:04:26.765660Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:26.765759Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-28T09:04:26.768155Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:04:26.769395Z node 4 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [4:311:2298] sender: [4:406:2058] recipient: [4:102:2137] Leader for TabletID 72057594046678944 is [4:311:2298] sender: [4:409:2058] recipient: [4:15:2062] Leader for TabletID 72057594046678944 is [4:311:2298] sender: [4:410:2058] recipient: [4:408:2379] Leader for TabletID 72057594046678944 is [4:411:2380] sender: [4:412:2058] recipient: [4:408:2379] 2025-03-28T09:04:26.835496Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:26.835624Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:26.835686Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:26.835750Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:26.835793Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:26.835828Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:26.835892Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:26.835983Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:26.846386Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:26.866581Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:26.868082Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:26.868285Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:26.868504Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:04:26.868545Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:26.868699Z node 4 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:26.869482Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:26.869622Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.869705Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.870174Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.870270Z node 4 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:04:26.870465Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.870558Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.870659Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.870801Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.870886Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.871061Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.871376Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.871513Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.871902Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.871979Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.872146Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.872260Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.872351Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.872636Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.872744Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.872886Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.873117Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.873302Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.873364Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.873419Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:04:26.883481Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:26.883568Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:26.883824Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:26.883887Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:26.883939Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:26.884059Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [4:411:2380] sender: [4:466:2058] recipient: [4:15:2062] 2025-03-28T09:04:26.918978Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:04:26.919036Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-28T09:04:27.159980Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODY3LCJpYXQiOjE3NDMxNTI2NjcsInN1YiI6InVzZXIxIn0.C_oZs5XJLuL_tzu78XLX1TnVJjID4X8ZSvMWnPza3OAuvRWEKT6yX-DLkPio39WIv7Zk3LRz6f7l92RyzSoxsCNFrZT46cv1n9cphcovwQGp0QnG226rYGB_NhNA0JE4OZDR7EyV-yY12JHDzNiXICQ5eKygp0zbBf90dgSOYT5zcFN8zvKKIbkAuxgqTHjNI8w57EQVdyT4dUCo4OfL5d-xU1rjKQkwcjyjQWk3ggoDb4Q5rum4PwvOfSGfFoAt8luYedI1fbhxgjTu6H97foV99q-q9BmmEjY0qw84xJGJVHDLq2IMX7Ukcx5c9csVwLv9_ZlKlrm03A1DSknRRQ" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk1ODY3LCJpYXQiOjE3NDMxNTI2NjcsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-03-28T09:04:27.160153Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:27.160197Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:27.160406Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:27.160460Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:460:2418], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-28T09:04:27.161066Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 >> KikimrIcGateway::TestCreateExternalTable >> TStorageTenantTest::DeclareAndDefine [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2025-03-28T09:04:21.905320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783673168475825:2188];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:21.905371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001421/r3tmp/tmpadSJ3C/pdisk_1.dat 2025-03-28T09:04:22.580120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:22.592841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:22.592924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:22.616956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8851 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:04:23.019326Z node 1 :TX_PROXY DEBUG: actor# [1:7486783673168475962:2116] Handle TEvNavigate describe path dc-1 2025-03-28T09:04:23.019382Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783681758411050:2446] HANDLE EvNavigateScheme dc-1 2025-03-28T09:04:23.019497Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486783677463443281:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:04:23.019577Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783677463443731:2435][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486783677463443281:2129], cookie# 1 2025-03-28T09:04:23.021010Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486783677463443735:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486783677463443732:2435], cookie# 1 2025-03-28T09:04:23.021061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486783677463443736:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486783677463443733:2435], cookie# 1 2025-03-28T09:04:23.021077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486783677463443737:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486783677463443734:2435], cookie# 1 2025-03-28T09:04:23.021110Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486783673168475642:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486783677463443735:2435], cookie# 1 2025-03-28T09:04:23.021139Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486783673168475645:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486783677463443736:2435], cookie# 1 2025-03-28T09:04:23.021158Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486783673168475648:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486783677463443737:2435], cookie# 1 2025-03-28T09:04:23.021192Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486783677463443735:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486783673168475642:2050], cookie# 1 2025-03-28T09:04:23.021207Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486783677463443736:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486783673168475645:2053], cookie# 1 2025-03-28T09:04:23.021220Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486783677463443737:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486783673168475648:2056], cookie# 1 2025-03-28T09:04:23.021259Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783677463443731:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486783677463443732:2435], cookie# 1 2025-03-28T09:04:23.021282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783677463443731:2435][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:04:23.021296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783677463443731:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486783677463443733:2435], cookie# 1 2025-03-28T09:04:23.021320Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783677463443731:2435][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:04:23.021344Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783677463443731:2435][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486783677463443734:2435], cookie# 1 2025-03-28T09:04:23.021366Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783677463443731:2435][/dc-1] Unexpected sync response: sender# [1:7486783677463443734:2435], cookie# 1 2025-03-28T09:04:23.021415Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486783677463443281:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:04:23.035294Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486783677463443281:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486783677463443731:2435] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:04:23.035421Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486783677463443281:2129], cacheItem# { Subscriber: { Subscriber: [1:7486783677463443731:2435] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:04:23.037418Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486783681758411051:2447], recipient# [1:7486783681758411050:2446], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:04:23.037479Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783681758411050:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:04:23.095516Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783681758411050:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:04:23.104303Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783681758411050:2446] Handle TEvDescribeSchemeResult Forward to# [1:7486783681758411049:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:04:23.131108Z node 1 :TX_PROXY DEBUG: actor# [1:7486783673168475962:2116] Handle TEvProposeTransaction 2025-03-28T09:04:23.131137Z node 1 :TX_PROXY DEBUG: actor# [1:7486783673168475962:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:04:23.131244Z node 1 :TX_PROXY DEBUG: actor# [1:7486783673168475962:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486783681758411058:2453] 2025-03-28T09:04:23.236386Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783681758411058:2453] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:04:23.236438Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783681758411058:2453] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:04:23.236494Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783681758411058:2453] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:04:23.236625Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486783677463443281:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615 ... :0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:04:26.082907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783694643313718:3038][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:04:26.088211Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486783673168475642:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [1:7486783694643313729:3038] 2025-03-28T09:04:26.088243Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486783673168475642:2050] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-03-28T09:04:26.088307Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486783673168475642:2050] Subscribe: subscriber# [1:7486783694643313729:3038], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:04:26.088349Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486783673168475645:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [1:7486783694643313730:3038] 2025-03-28T09:04:26.088369Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486783673168475645:2053] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-03-28T09:04:26.088395Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486783673168475645:2053] Subscribe: subscriber# [1:7486783694643313730:3038], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:04:26.088426Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486783673168475648:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [1:7486783694643313731:3038] 2025-03-28T09:04:26.088434Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486783673168475648:2056] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2025-03-28T09:04:26.088453Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486783673168475648:2056] Subscribe: subscriber# [1:7486783694643313731:3038], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:04:26.088496Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486783694643313729:3038][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7486783673168475642:2050] 2025-03-28T09:04:26.088518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486783694643313730:3038][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7486783673168475645:2053] 2025-03-28T09:04:26.088539Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486783694643313731:3038][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7486783673168475648:2056] 2025-03-28T09:04:26.088599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783694643313718:3038][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7486783694643313726:3038] 2025-03-28T09:04:26.088645Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783694643313718:3038][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7486783694643313727:3038] 2025-03-28T09:04:26.088681Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486783694643313718:3038][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7486783677463443281:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:04:26.088703Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486783694643313718:3038][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7486783694643313728:3038] 2025-03-28T09:04:26.088727Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486783694643313718:3038][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [1:7486783677463443281:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:04:26.088766Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486783673168475642:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486783694643313729:3038] 2025-03-28T09:04:26.088791Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486783673168475645:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486783694643313730:3038] 2025-03-28T09:04:26.088805Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486783673168475648:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486783694643313731:3038] 2025-03-28T09:04:26.088880Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486783677463443281:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-03-28T09:04:26.088970Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486783677463443281:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7486783694643313718:3038] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:04:26.089061Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486783677463443281:2129], cacheItem# { Subscriber: { Subscriber: [1:7486783694643313718:3038] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:04:26.089161Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486783694643313732:3040], recipient# [1:7486783694643313717:2336], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:04:26.906977Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783673168475825:2188];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:26.907084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:26.939053Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486783677463443281:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:04:26.939174Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486783677463443281:2129], cacheItem# { Subscriber: { Subscriber: [1:7486783677463443740:2438] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:04:26.939274Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486783694643313749:3041], recipient# [1:7486783694643313748:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:04:27.058829Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486783677463443281:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:04:27.058969Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486783677463443281:2129], cacheItem# { Subscriber: { Subscriber: [1:7486783694643313699:3036] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:04:27.059078Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486783698938281052:3047], recipient# [1:7486783698938281051:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TExportToS3Tests::DropCopiesBeforeTransferring1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 15308, MsgBus: 14996 2025-03-28T09:02:00.043882Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783062192395459:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:00.043923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018ac/r3tmp/tmpvGRU2E/pdisk_1.dat 2025-03-28T09:02:06.093257Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783062192395459:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:06.093566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:10.007534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:10.007706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:14.060165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:14.167773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:15.210624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:15.290815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:15.526332Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:15.545541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:15.545642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:15.587698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15308, node 1 2025-03-28T09:02:15.979259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:15.979281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:15.979290Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:15.979399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14996 TClient is connected to server localhost:14996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:16.920920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.941332Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:16.956751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.294099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.635711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.786511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.355732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783148091743109:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.355847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.641122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.707134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.745821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.788432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.858798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.940775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.026378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783152386710924:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.026474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.026812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783152386710929:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.031119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:20.043247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783152386710931:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:20.120334Z node 1 :TX_PROXY ERROR: Actor# [1:7486783152386710986:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:21.535437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.933019Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqdzsx5z6n4kv77tedq6rae9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVkZWM2NzctZmRjOTVhYzAtODQxZjVhMDEtNjlhZjUxNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.943936Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqdzsx67an7fkscwsgec7yg2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODA2MjAxZS1kMzNmNGMwMi01MDQ3MzgxNi02YTQxY2ExMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.967519Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqdzsx6d3df6y49nphe431b8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGE2YTYxMzgtNmIwM2NlZmUtMTkyODIyYWItODBlZmI4MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.980337Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqdzsx6a8zjggter7fgtnesc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmE3YWJkMWItYTMzZjA0ZC1lY2I4MWU1Ny0xOTkxYTYwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.989846Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqdzsx5z6n4kv77tedq6rae9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVkZWM2NzctZmRjOTVhYzAtODQxZjVhMDEtNjlhZjUxNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:23.003151Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqdzsx6hceahkfxs1djtjxbx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGI3YWViNTEtNjFkYTNiZGMtNThiMDExYTUtNjBlOGU0ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:23.004105Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jqdzsx6mag97qct5r3b1aaev, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2JkYzg1MDgtMzE0YWZiYjQtZWMwNGU5MjItY2YxZWFkMDY=, CurrentExecutionId: , CustomerSup ... sion/3?node_id=2&id=OTg4ZmMzMDQtNGFjYzNlYi1iOTIyY2ZhMC01MjQ4Zjg1Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.660822Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721635. Ctx: { TraceId: 01jqdzxj373jv091apvfv4gg2k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDI1MDA5ZDktOThjYzlmNWUtN2EyNTI1YmItMjE1OGZiMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.665945Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721636. Ctx: { TraceId: 01jqdzxj283s6jc4e1hh549ftz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjNiMzAzN2QtZjU5ZjNjOTQtMmZkYjI3NTYtODlkMmFmZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.671760Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721638. Ctx: { TraceId: 01jqdzxj283s6jc4e1hh549ftz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjNiMzAzN2QtZjU5ZjNjOTQtMmZkYjI3NTYtODlkMmFmZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.672880Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721637. Ctx: { TraceId: 01jqdzxj27azcg0zzcej3m54ca, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTA4NGRiNmUtZWUyOTg1MS01ZmYyZDYzMS05MjkwNzA5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.685452Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721639. Ctx: { TraceId: 01jqdzxj4g43m3h9v4aas9x43x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDkzYzdjNi1jMzk3NmRkNS1kMWY3MDRhNi04OTkxODA5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.698082Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721640. Ctx: { TraceId: 01jqdzxj4g43m3h9v4aas9x43x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDkzYzdjNi1jMzk3NmRkNS1kMWY3MDRhNi04OTkxODA5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.706167Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721641. Ctx: { TraceId: 01jqdzxj4g43m3h9v4aas9x43x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDkzYzdjNi1jMzk3NmRkNS1kMWY3MDRhNi04OTkxODA5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.709261Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721642. Ctx: { TraceId: 01jqdzxj527r921j2x28pdz18k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODI1MGI1MTItNjgwZDAzNjEtZGE5MTRlY2UtOWE3NjNkMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.710153Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721643. Ctx: { TraceId: 01jqdzxj573jdyk10vrxx5kx45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDI1MDA5ZDktOThjYzlmNWUtN2EyNTI1YmItMjE1OGZiMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.722827Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721644. Ctx: { TraceId: 01jqdzxj4g43m3h9v4aas9x43x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDkzYzdjNi1jMzk3NmRkNS1kMWY3MDRhNi04OTkxODA5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.749794Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721645. Ctx: { TraceId: 01jqdzxj5m3n53h6sh349t9tvd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGMwMTBiYjYtYzE1NGI2OGItNTc4NmY0OS01ZTQ1MTQ2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.760195Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721646. Ctx: { TraceId: 01jqdzxj573jdyk10vrxx5kx45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDI1MDA5ZDktOThjYzlmNWUtN2EyNTI1YmItMjE1OGZiMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.763069Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721647. Ctx: { TraceId: 01jqdzxj527r921j2x28pdz18k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODI1MGI1MTItNjgwZDAzNjEtZGE5MTRlY2UtOWE3NjNkMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.780793Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721648. Ctx: { TraceId: 01jqdzxj573jdyk10vrxx5kx45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDI1MDA5ZDktOThjYzlmNWUtN2EyNTI1YmItMjE1OGZiMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.781778Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721649. Ctx: { TraceId: 01jqdzxj5m3n53h6sh349t9tvd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGMwMTBiYjYtYzE1NGI2OGItNTc4NmY0OS01ZTQ1MTQ2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.782554Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721650. Ctx: { TraceId: 01jqdzxj527r921j2x28pdz18k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODI1MGI1MTItNjgwZDAzNjEtZGE5MTRlY2UtOWE3NjNkMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.793730Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721651. Ctx: { TraceId: 01jqdzxj527r921j2x28pdz18k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ODI1MGI1MTItNjgwZDAzNjEtZGE5MTRlY2UtOWE3NjNkMTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.794080Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721652. Ctx: { TraceId: 01jqdzxj573jdyk10vrxx5kx45, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDI1MDA5ZDktOThjYzlmNWUtN2EyNTI1YmItMjE1OGZiMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.804420Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721653. Ctx: { TraceId: 01jqdzxj7a6y6ecngkm74jjyp1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjNiMzAzN2QtZjU5ZjNjOTQtMmZkYjI3NTYtODlkMmFmZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.804639Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721654. Ctx: { TraceId: 01jqdzxj7w177bqp9wv3xaprq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTA4NGRiNmUtZWUyOTg1MS01ZmYyZDYzMS05MjkwNzA5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.813930Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721655. Ctx: { TraceId: 01jqdzxj8kbcvw8n7dvdd709w4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDkzYzdjNi1jMzk3NmRkNS1kMWY3MDRhNi04OTkxODA5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.821867Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721656. Ctx: { TraceId: 01jqdzxj7w177bqp9wv3xaprq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTA4NGRiNmUtZWUyOTg1MS01ZmYyZDYzMS05MjkwNzA5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.823824Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721657. Ctx: { TraceId: 01jqdzxj8kbcvw8n7dvdd709w4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDkzYzdjNi1jMzk3NmRkNS1kMWY3MDRhNi04OTkxODA5Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.824063Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721658. Ctx: { TraceId: 01jqdzxj7a6y6ecngkm74jjyp1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjNiMzAzN2QtZjU5ZjNjOTQtMmZkYjI3NTYtODlkMmFmZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.831869Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721659. Ctx: { TraceId: 01jqdzxj7w177bqp9wv3xaprq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTA4NGRiNmUtZWUyOTg1MS01ZmYyZDYzMS05MjkwNzA5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-28T09:04:22.835555Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721660. Ctx: { TraceId: 01jqdzxj7a6y6ecngkm74jjyp1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjNiMzAzN2QtZjU5ZjNjOTQtMmZkYjI3NTYtODlkMmFmZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.846365Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721661. Ctx: { TraceId: 01jqdzxj7w177bqp9wv3xaprq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTA4NGRiNmUtZWUyOTg1MS01ZmYyZDYzMS05MjkwNzA5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-03-28T09:04:22.858913Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721662. Ctx: { TraceId: 01jqdzxja22v4tcckqwvxcqef7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGMwMTBiYjYtYzE1NGI2OGItNTc4NmY0OS01ZTQ1MTQ2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.866147Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721663. Ctx: { TraceId: 01jqdzxja22v4tcckqwvxcqef7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGMwMTBiYjYtYzE1NGI2OGItNTc4NmY0OS01ZTQ1MTQ2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.870828Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721664. Ctx: { TraceId: 01jqdzxja57s12qa1f23jve7fb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDI1MDA5ZDktOThjYzlmNWUtN2EyNTI1YmItMjE1OGZiMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.873146Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721665. Ctx: { TraceId: 01jqdzxja22v4tcckqwvxcqef7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGMwMTBiYjYtYzE1NGI2OGItNTc4NmY0OS01ZTQ1MTQ2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.882461Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721666. Ctx: { TraceId: 01jqdzxja22v4tcckqwvxcqef7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OGMwMTBiYjYtYzE1NGI2OGItNTc4NmY0OS01ZTQ1MTQ2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.883892Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721667. Ctx: { TraceId: 01jqdzxja57s12qa1f23jve7fb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDI1MDA5ZDktOThjYzlmNWUtN2EyNTI1YmItMjE1OGZiMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:22.889846Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721668. Ctx: { TraceId: 01jqdzxja57s12qa1f23jve7fb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDI1MDA5ZDktOThjYzlmNWUtN2EyNTI1YmItMjE1OGZiMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-28T09:04:22.899510Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721669. Ctx: { TraceId: 01jqdzxja57s12qa1f23jve7fb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDI1MDA5ZDktOThjYzlmNWUtN2EyNTI1YmItMjE1OGZiMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |77.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] >> TExportToS3Tests::CorruptedDyNumber >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool >> TExternalDataSourceTest::CreateExternalDataSource >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TExportToS3Tests::CompletedExportEndTime >> TExternalDataSourceTest::SchemeErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:04:32.139564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:32.139687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:32.139738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:32.139773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:32.139816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:32.139873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:32.139928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:32.140010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:32.140378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:32.215844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:04:32.215908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:32.231228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:32.231528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:32.231717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:32.237876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:32.238078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:32.238735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:32.238970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:32.240974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:32.242380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:32.242440Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:32.242618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:32.242723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:32.242772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:32.242868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:32.251558Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:04:32.377772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:32.378025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:32.378245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:32.378458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:32.378513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:32.384224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:32.384377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:32.384589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:32.384641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:32.384686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:32.384717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:32.386896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:32.386954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:32.387009Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:32.389001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:32.389054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:32.389089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:32.389141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:32.392770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:32.399450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:32.399726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:32.400732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:32.400883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:32.400940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:32.401209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:32.401257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:32.401420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:32.401518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:32.409514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:32.409614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:32.409821Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:32.409871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:32.410264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:32.410304Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:32.410389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:32.410419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:32.410453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:32.410480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:32.410522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:32.410562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:32.410590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:32.410634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:32.416596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:32.416683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:32.416718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:32.418757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:32.418899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:32.418933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 32.456370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:32.456496Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002 2025-03-28T09:04:32.456645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:04:32.456824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:32.456884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:04:32.458911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:32.458978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:32.459157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:04:32.459276Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:32.459312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:04:32.459353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:04:32.459685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:04:32.459731Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:04:32.459819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:04:32.459850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:32.459901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:04:32.459932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:32.459972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:04:32.460043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:32.460086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:04:32.460116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:04:32.460185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:04:32.460218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:04:32.460248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-28T09:04:32.460274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:04:32.460953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:32.461063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:32.461097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:04:32.461130Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-28T09:04:32.461166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:32.461917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:32.462001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:32.462029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:04:32.462066Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:04:32.462091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:04:32.462147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:04:32.475963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:04:32.476092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:04:32.476336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:04:32.476375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:04:32.476768Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:04:32.476869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:04:32.476901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:295:2286] TestWaitNotification: OK eventTxId 101 2025-03-28T09:04:32.477436Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:32.477653Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 242us result status StatusSuccess 2025-03-28T09:04:32.478031Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-03-28T09:04:32.486011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:32.486360Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-03-28T09:04:32.486439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2025-03-28T09:04:32.486600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2025-03-28T09:04:32.489375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-03-28T09:04:32.489565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:04:32.489895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:04:32.489933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:04:32.490327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:04:32.490428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:04:32.490480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:303:2294] TestWaitNotification: OK eventTxId 102 |77.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |77.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |77.0%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:04:32.522999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:32.523101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:32.523137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:32.523172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:32.523214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:32.523243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:32.523302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:32.523376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:32.523727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:32.624285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:04:32.624344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:32.634030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:32.634420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:32.634653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:32.664216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:32.664439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:32.665125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:32.665352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:32.684811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:32.687472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:32.687552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:32.687873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:32.687924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:32.687963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:32.688610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:32.695302Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:04:33.070607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:33.079126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.079407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:33.079648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:33.079715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.087589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:33.087761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:33.087962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.088030Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:33.088075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:33.088121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:33.095815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.095912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:33.095958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:33.102769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.102841Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.102898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:33.102960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:33.106556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:33.112409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:33.112634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:33.113800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:33.113979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:33.114045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:33.114366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:33.114412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:33.114601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:33.114678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:33.126449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:33.126512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:33.126718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:33.126796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:33.126907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.126954Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:33.127052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:33.127089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:33.127146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:33.127185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:33.127224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:33.127266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:33.127300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:33.127336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:33.127413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:33.127455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:33.127490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:33.139013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... SHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:04:33.232899Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-03-28T09:04:33.232937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:33.233823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:33.233898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:33.233921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:04:33.233950Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:04:33.233996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:04:33.234084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:04:33.242838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:04:33.243035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-28T09:04:33.243530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:33.243671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:33.243744Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000003 2025-03-28T09:04:33.243874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-28T09:04:33.244106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:33.244195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:04:33.245241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:04:33.245358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:04:33.247422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:33.247482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:33.247661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:04:33.247749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:04:33.247826Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:33.247857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:04:33.247904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:04:33.247937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:04:33.248345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.248391Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:04:33.248487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:04:33.248529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:04:33.248568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:04:33.248622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:04:33.248675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:04:33.248718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:04:33.248753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:04:33.248789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:04:33.248864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:04:33.248901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:04:33.248954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:04:33.248984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:04:33.249652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:33.249757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:33.249794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:04:33.249827Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:04:33.249882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:33.251387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:33.251468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:33.251498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:04:33.251526Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:04:33.251555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:04:33.251615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:04:33.253843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:04:33.255136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:04:33.255348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:04:33.255385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:04:33.255864Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:04:33.255972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:04:33.256007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:333:2324] TestWaitNotification: OK eventTxId 102 2025-03-28T09:04:33.256471Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:33.256726Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 241us result status StatusSuccess 2025-03-28T09:04:33.257060Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] >> YdbProxy::AlterTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:04:33.089440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:33.089555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:33.089593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:33.089630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:33.089675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:33.089718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:33.089786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:33.098255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:33.098818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:33.229221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:04:33.229280Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:33.245998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:33.246411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:33.246603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:33.283022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:33.283234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:33.283981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:33.284206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:33.291545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:33.294045Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:33.294139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:33.294465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:33.294521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:33.294565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:33.295248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.302431Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:04:33.451395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:33.451755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.452021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:33.452300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:33.452374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.456389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:33.456554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:33.456821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.456893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:33.456947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:33.456990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:33.459710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.459779Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:33.459823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:33.462955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.463030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.463107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:33.463168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:33.471301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:33.473991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:33.474207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:33.475287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:33.475468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:33.475546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:33.475861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:33.475914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:33.476084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:33.476238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:33.478737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:33.478800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:33.479012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:33.479078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:33.479182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:33.479230Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:33.479354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:33.479399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:33.479454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:33.479491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:33.479529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:33.479579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:33.479622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:33.479659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:33.479726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:33.479767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:33.479818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:33.483292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... opId# 101:0 ProgressState 2025-03-28T09:04:34.283559Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:04:34.283602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:34.283644Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:04:34.283681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:34.283729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:04:34.283771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:34.283805Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:04:34.283837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:04:34.283921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:04:34.283967Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:04:34.284004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:04:34.284043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:04:34.285098Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:34.285186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:34.285223Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:04:34.285265Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:04:34.285310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:34.286928Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:34.287014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:34.287044Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:04:34.287074Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:04:34.287106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:04:34.287188Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:04:34.290163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:04:34.290498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:04:34.290752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:04:34.290805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:04:34.291277Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:04:34.291384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:04:34.291425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:305:2296] TestWaitNotification: OK eventTxId 101 2025-03-28T09:04:34.291883Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:34.292103Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 270us result status StatusSuccess 2025-03-28T09:04:34.292418Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-03-28T09:04:34.295856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:34.296190Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2025-03-28T09:04:34.296271Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2025-03-28T09:04:34.296416Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-28T09:04:34.299115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2 PathCreateTxId: 101, at schemeshard: 72057594046678944 2025-03-28T09:04:34.299294Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:04:34.299644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:04:34.299704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:04:34.300119Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:04:34.300220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:04:34.300261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:313:2304] TestWaitNotification: OK eventTxId 102 2025-03-28T09:04:34.300845Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:34.301032Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 217us result status StatusSuccess 2025-03-28T09:04:34.301336Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> TExportToS3Tests::ChecksumsWithCompression >> TExternalDataSourceTest::SchemeErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:04:33.886523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:33.886755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:33.886798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:33.886849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:33.886901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:33.886932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:33.887008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:33.887095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:33.887470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:34.007232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:04:34.007315Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:34.031009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:34.031406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:34.031579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:34.065851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:34.066040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:34.066779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:34.066990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:34.074489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:34.076977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:34.077054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:34.077352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:34.077403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:34.077448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:34.078094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.085251Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:04:34.413703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:34.413976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.414188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:34.414404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:34.414460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.431344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:34.431514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:34.431694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.431907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:34.431954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:34.431990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:34.438528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.438599Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:34.438635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:34.443157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.443222Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.443298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:34.443376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:34.448131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:34.450264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:34.450427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:34.451426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:34.451563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:34.451615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:34.451875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:34.451924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:34.452073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:34.452147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:34.456269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:34.456324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:34.456504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:34.456570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:34.456662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.456707Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:34.456798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:34.456832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:34.456882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:34.456914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:34.456948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:34.456985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:34.457017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:34.457049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:34.457114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:34.457149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:34.457181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:34.459982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... thId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:04:34.597872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:04:34.597911Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-28T09:04:34.597945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:34.598798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:04:34.598893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:04:34.598936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:04:34.598967Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:04:34.598999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:04:34.599077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-28T09:04:34.604550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-03-28T09:04:34.604709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-03-28T09:04:34.605760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:34.605878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:34.605931Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000005 2025-03-28T09:04:34.606048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:04:34.606123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-28T09:04:34.607828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:34.607930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:04:34.609223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:04:34.610949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2025-03-28T09:04:34.612509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:34.612547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:34.612682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:04:34.612807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:34.612870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-03-28T09:04:34.612904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-28T09:04:34.612962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.612996Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-28T09:04:34.613086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:04:34.613116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:04:34.613154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:04:34.613186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:04:34.613219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-28T09:04:34.613253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:04:34.613288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:04:34.613333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:04:34.613399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:04:34.613432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-03-28T09:04:34.613500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-28T09:04:34.613532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:04:34.614218Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:04:34.614319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:04:34.614351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:04:34.614389Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:04:34.614434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:04:34.614863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:04:34.614912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:04:34.614983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:34.615293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:04:34.615356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:04:34.615387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:04:34.615421Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-28T09:04:34.615450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:34.615510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-28T09:04:34.621486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:04:34.622039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:04:34.622399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:04:34.622635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:04:34.622676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:04:34.623145Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:04:34.623230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:04:34.623265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:392:2383] TestWaitNotification: OK eventTxId 104 2025-03-28T09:04:34.623791Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:34.623983Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 185us result status StatusPathDoesNotExist 2025-03-28T09:04:34.624149Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink >> TPQTest::TestLowWatermark [GOOD] >> TPQTest::TestGetTimestamps >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:04:34.503699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:34.503788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:34.503829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:34.503869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:34.503916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:34.503946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:34.504006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:34.504088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:34.504434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:34.608729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:04:34.608797Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:34.617713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:34.618120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:34.618297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:34.634557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:34.634774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:34.635452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:34.635634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:34.642319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:34.644688Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:34.644767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:34.645080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:34.645155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:34.645203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:34.645843Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.652378Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:04:34.869077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:34.869398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.869660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:34.869979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:34.870048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.876881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:34.877063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:34.877270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.877365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:34.877429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:34.877488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:34.883581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.883666Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:34.883733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:34.891676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.891748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.891817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:34.891881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:34.895936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:34.903903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:34.904126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:34.905230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:34.905388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:34.905453Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:34.905762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:34.905819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:34.905981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:34.906086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:34.919298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:34.919372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:34.919581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:34.919656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:34.919748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:34.919795Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:34.919898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:34.919934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:34.919973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:34.920007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:34.920053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:34.920113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:34.920168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:34.920202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:34.920276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:34.920318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:34.920350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:34.928336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... e DataStream was not found" TxId: 125 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:35.170680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: (NKikimr::NExternalSource::TExternalSourceException) External source with type DataStream was not found, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2025-03-28T09:04:35.173997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:35.174405Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2025-03-28T09:04:35.174499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2025-03-28T09:04:35.174646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2025-03-28T09:04:35.181504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:35.181805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-03-28T09:04:35.185129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:35.185534Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-03-28T09:04:35.185625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2025-03-28T09:04:35.185788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-03-28T09:04:35.193756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:35.193998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-03-28T09:04:35.197314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:35.197722Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2025-03-28T09:04:35.274911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2025-03-28T09:04:35.275155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2025-03-28T09:04:35.283658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:35.283891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-03-28T09:04:35.287142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:35.287395Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2025-03-28T09:04:35.287496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2025-03-28T09:04:35.287613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2025-03-28T09:04:35.295853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:35.296106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2025-03-28T09:04:21.575520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783673972516681:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:21.587006Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e47/r3tmp/tmpXZAhR5/pdisk_1.dat 2025-03-28T09:04:22.127136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:22.127228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:22.129254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:22.156958Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8320 TServer::EnableGrpc on GrpcPort 21682, node 1 2025-03-28T09:04:22.465729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:22.465757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:22.465773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:22.465910Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:23.219253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:25.323504Z node 1 :TX_PROXY ERROR: Actor# [1:7486783691152386391:2308] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-28T09:04:25.343907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:04:26.063418Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783695916163108:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:26.063457Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e47/r3tmp/tmpQIcpV3/pdisk_1.dat 2025-03-28T09:04:26.278194Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:26.315422Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:26.315504Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:26.319772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31348 TServer::EnableGrpc on GrpcPort 8110, node 2 2025-03-28T09:04:26.519189Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:26.519207Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:26.519217Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:26.519346Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:26.816170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:30.204321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:04:30.258019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:04:31.120076Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783717801655157:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:31.172086Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e47/r3tmp/tmpYGCz1p/pdisk_1.dat 2025-03-28T09:04:31.259696Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:31.297010Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:31.297111Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:31.298829Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61244 TServer::EnableGrpc on GrpcPort 15316, node 3 2025-03-28T09:04:31.510520Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:31.510544Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:31.510552Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:31.510674Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:31.845955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:31.852199Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:04:32.002265Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:04:32.035035Z node 3 :TX_PROXY ERROR: Actor# [3:7486783722096623090:2395] txid# 281474976715660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestALterResourcePool >> TExternalDataSourceTest::ReadOnlyMode |77.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |77.1%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |77.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut >> TExportToS3Tests::ChecksumsWithCompression [GOOD] >> KqpPg::EquiJoin+useSink [GOOD] >> KqpPg::EquiJoin-useSink >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:04:37.112239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:37.112363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:37.112433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:37.112484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:37.112532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:37.112569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:37.112649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:37.112795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:37.113150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:37.216990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:04:37.217054Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:37.224251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:37.224663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:37.224829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:37.242955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:37.243164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:37.243899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:37.244088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:37.259696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:37.262310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:37.262390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:37.262741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:37.262801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:37.262853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:37.263561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.270408Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:04:37.440357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:37.440702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.440927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:37.441209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:37.441263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.443663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:37.443808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:37.444001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.444079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:37.444125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:37.444168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:37.446253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.446312Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:37.446348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:37.448226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.448275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.448319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:37.448376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:37.452367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:37.454532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:37.454743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:37.455827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:37.455961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:37.456079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:37.456366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:37.456420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:37.456583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:37.456697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:37.458926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:37.458997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:37.459221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:37.459265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:37.459342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.459387Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:37.459483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:37.459526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:37.459568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:37.459601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:37.459635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:37.459675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:37.459710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:37.459747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:37.459816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:37.459899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:37.459938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:37.469585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:37.469777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:37.469831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:04:37.469890Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:04:37.469941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:37.470057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:04:37.472924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:04:37.473530Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:04:37.474627Z node 1 :TX_PROXY DEBUG: actor# [1:272:2263] Bootstrap 2025-03-28T09:04:37.497948Z node 1 :TX_PROXY DEBUG: actor# [1:272:2263] Become StateWork (SchemeCache [1:277:2268]) 2025-03-28T09:04:37.500681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:37.501048Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2025-03-28T09:04:37.501144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-03-28T09:04:37.501204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-03-28T09:04:37.502829Z node 1 :TX_PROXY DEBUG: actor# [1:272:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:37.505629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:37.505843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2025-03-28T09:04:37.507127Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:04:37.507353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:04:37.507397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:04:37.507844Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:04:37.507968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:04:37.508021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:287:2278] TestWaitNotification: OK eventTxId 101 2025-03-28T09:04:37.508590Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:37.508781Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 213us result status StatusPathDoesNotExist 2025-03-28T09:04:37.509003Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpScanArrowInChanels::AggregateNoColumn >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TExternalDataSourceTest::DropTableTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] Test command err: 2025-03-28T09:03:20.205996Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783412703370037:2184];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:20.206079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cb6/r3tmp/tmpkAHiEG/pdisk_1.dat 2025-03-28T09:03:20.606194Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64453, node 1 2025-03-28T09:03:20.646207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:20.646288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:20.674103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:20.731097Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:20.731127Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:20.731139Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:20.731248Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:21.001589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:21.014181Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:03:23.290313Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:23.300553Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDFjMWM3N2UtODE2NzNjODEtOWNmZTU4NzUtYTUyNzVjYTA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDFjMWM3N2UtODE2NzNjODEtOWNmZTU4NzUtYTUyNzVjYTA= 2025-03-28T09:03:23.301268Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783425588272439:2327], Start check tables existence, number paths: 2 2025-03-28T09:03:23.301365Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDFjMWM3N2UtODE2NzNjODEtOWNmZTU4NzUtYTUyNzVjYTA=, ActorId: [1:7486783425588272440:2328], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:23.303296Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-28T09:03:23.303437Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:23.304357Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:03:23.319127Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783425588272439:2327], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:03:23.319192Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783425588272439:2327], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:03:23.319219Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783425588272439:2327], Successfully finished 2025-03-28T09:03:23.319388Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:03:23.327104Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425588272467:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:23.331212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:23.334978Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425588272467:2302], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-28T09:03:23.337500Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425588272467:2302], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:03:23.344267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425588272467:2302], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:23.432185Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425588272467:2302], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:23.436466Z node 1 :TX_PROXY ERROR: Actor# [1:7486783425588272519:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:23.436636Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425588272467:2302], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:03:23.437015Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783425588272526:2341], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:23.438186Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783425588272526:2341], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:23.452296Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZDFjMWM3N2UtODE2NzNjODEtOWNmZTU4NzUtYTUyNzVjYTA=, ActorId: [1:7486783425588272440:2328], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:03:23.452379Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=ZDFjMWM3N2UtODE2NzNjODEtOWNmZTU4NzUtYTUyNzVjYTA=, ActorId: [1:7486783425588272440:2328], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:03:23.452403Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDFjMWM3N2UtODE2NzNjODEtOWNmZTU4NzUtYTUyNzVjYTA=, ActorId: [1:7486783425588272440:2328], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:03:23.452427Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDFjMWM3N2UtODE2NzNjODEtOWNmZTU4NzUtYTUyNzVjYTA=, ActorId: [1:7486783425588272440:2328], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:03:23.452510Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDFjMWM3N2UtODE2NzNjODEtOWNmZTU4NzUtYTUyNzVjYTA=, ActorId: [1:7486783425588272440:2328], ActorState: unknown state, Session actor destroyed 2025-03-28T09:03:24.150214Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783426951820458:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:24.169576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cb6/r3tmp/tmpoWxx3J/pdisk_1.dat 2025-03-28T09:03:24.284790Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:24.305468Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:24.305549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:24.307040Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21335, node 2 2025-03-28T09:03:24.469193Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:24.469216Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:24.469227Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:24.469363Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:24.704635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:27.499226Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:27.501687Z node 2 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=2&id=MzUxMDM3ZS00MDRiNjljYS0zNWIwMThiMi1jNDYwYzM5Yg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MzUxMDM3ZS00MDRiNjljYS0zNWIwMThiMi1jNDYwYzM5Yg== 2025-03-28T09:03:27.502545Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [S ... rState: ReadyState, Session closed due to explicit close event 2025-03-28T09:04:34.157191Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=ZmFjOWUwMzctZWVhYzVhMTAtMjVjNmQ5YmMtYmYwMzk1ZWY=, ActorId: [8:7486783729995487281:5195], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:34.157216Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZmFjOWUwMzctZWVhYzVhMTAtMjVjNmQ5YmMtYmYwMzk1ZWY=, ActorId: [8:7486783729995487281:5195], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:04:34.157243Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZmFjOWUwMzctZWVhYzVhMTAtMjVjNmQ5YmMtYmYwMzk1ZWY=, ActorId: [8:7486783729995487281:5195], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:04:34.157305Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=ZmFjOWUwMzctZWVhYzVhMTAtMjVjNmQ5YmMtYmYwMzk1ZWY=, ActorId: [8:7486783729995487281:5195], ActorState: unknown state, Session actor destroyed 2025-03-28T09:04:34.184324Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxx9fezny509p91yxrsff, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-28T09:04:34.184487Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxx9fezny509p91yxrsff, txInfo Status: Committed Kind: ReadWrite TotalDuration: 87.54 ServerDuration: 87.407 QueriesCount: 2 2025-03-28T09:04:34.184572Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxx9fezny509p91yxrsff, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:04:34.184626Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxx9fezny509p91yxrsff, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:34.184654Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxx9fezny509p91yxrsff, EndCleanup, isFinal: 0 2025-03-28T09:04:34.184707Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxx9fezny509p91yxrsff, Sent query response back to proxy, proxyRequestId: 546, proxyId: [6:7486783506058309416:2279] 2025-03-28T09:04:34.185272Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, TxId: 2025-03-28T09:04:34.185379Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-03-28T09:04:34.185780Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ReadyState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, received request, proxyRequestId: 547 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [6:7486783729396619647:4949] database: /Root databaseId: /Root pool id: default 2025-03-28T09:04:34.185805Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ReadyState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, request placed into pool from cache: default 2025-03-28T09:04:34.187006Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, ExecutePhyTx, tx: 0x000050C0003F6658 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-03-28T09:04:34.187069Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, Sending to Executer TraceId: 0 8 2025-03-28T09:04:34.187158Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, Created new KQP executer: [6:7486783729396619650:4941] isRollback: 0 2025-03-28T09:04:34.220004Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-03-28T09:04:34.220097Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, ExecutePhyTx, tx: 0x000050C0003F6598 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-28T09:04:34.221072Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-28T09:04:34.221213Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, txInfo Status: Committed Kind: ReadOnly TotalDuration: 34.325 ServerDuration: 34.215 QueriesCount: 2 2025-03-28T09:04:34.221331Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:04:34.221389Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:34.221412Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, EndCleanup, isFinal: 0 2025-03-28T09:04:34.221461Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ExecuteState, TraceId: 01jqdzxxc99xq4sqnjqjp4avm2, Sent query response back to proxy, proxyRequestId: 547, proxyId: [6:7486783506058309416:2279] 2025-03-28T09:04:34.222388Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, TxId: 2025-03-28T09:04:34.222466Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, TxId: 2025-03-28T09:04:34.222651Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7486783523238179551:2344], DatabaseId: /Root, PoolId: sample_pool_id, succefully refreshed pool state, in flight: 0, delayed: 0 2025-03-28T09:04:34.223461Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:04:34.223521Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:34.223553Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:04:34.223580Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:04:34.223688Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZTNkOGRjYzctZjUwYzg0OWUtN2M2MWZiNjEtOGVhMzJlOTk=, ActorId: [6:7486783729396619606:4941], ActorState: unknown state, Session actor destroyed 2025-03-28T09:04:34.367694Z node 8 :BS_PROXY_PUT ERROR: [65a918aa5f7b7d86] Result# TEvPutResult {Id# [72075186224037889:1:902:0:0:42:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037889:1:902:0:0:42:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 6 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ChecksumsWithCompression [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:04:30.517100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:30.517213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:30.517252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:30.517290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:30.517334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:30.517368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:30.517425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:30.517540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:30.517921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:30.606787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:04:30.606856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:30.626931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:30.627180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:30.627322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:30.633917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:30.634199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:30.634968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:30.635193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:30.637180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:30.638428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:30.638488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:30.638644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:30.638717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:30.638758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:30.638874Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:30.645573Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:04:30.777642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:30.778093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:30.778423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:30.780872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:30.781025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:30.784839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:30.785038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:30.785347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:30.785436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:30.785509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:30.785560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:30.790876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:30.790958Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:30.791005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:30.793934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:30.793997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:30.794037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:30.794096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:30.798319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:30.800704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:30.800998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:30.802114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:30.802362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:30.802443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:30.802821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:30.802906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:30.803086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:30.803175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:30.806006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:30.806061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:30.806281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:30.806327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:30.806834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:30.806929Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:30.807025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:30.807057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:30.807098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:30.807128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:30.807164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:30.807226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:30.807284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:30.807327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:30.807407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:30.807452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:30.807483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:30.809365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:30.809520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:04:30.809564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... el: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:36.727212Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-03-28T09:04:36.727332Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-03-28T09:04:36.727924Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:36.728038Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 17179871338 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:36.728093Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-03-28T09:04:36.728222Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-03-28T09:04:36.728394Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:12438 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 0A4E6392-0BF9-4D28-9464-7B96A5524133 amz-sdk-request: attempt=1 content-length: 73 content-md5: a9Su4FHJt26Hhw4HV0+Ocg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-03-28T09:04:36.852639Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:36.852689Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:04:36.852923Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:36.852959Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 2025-03-28T09:04:36.853911Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:04:36.853971Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 REQUEST: PUT /metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:12438 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A68A478E-A5EC-4DF8-8DDA-BB06CA775B68 amz-sdk-request: attempt=1 content-length: 78 content-md5: 5v+lOCwt7SV92xRPjSiuqQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json.sha256 / / 78 2025-03-28T09:04:36.860852Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-28T09:04:36.860976Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-28T09:04:36.861007Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-03-28T09:04:36.861044Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-28T09:04:36.861082Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:04:36.861181Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:12438 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 01291E0D-A1C5-4229-9355-760F665D6EA2 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 2025-03-28T09:04:36.869323Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /scheme.pb.sha256 HTTP/1.1 HEADERS: Host: localhost:12438 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 64F250E0-84FD-4EF9-A161-8920550AD5CC amz-sdk-request: attempt=1 content-length: 74 content-md5: NWNhlq1fHKxcSj+x5Xq9NQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb.sha256 / / 74 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:12438 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A75AE83D-69BB-49A3-A183-01740E300C46 amz-sdk-request: attempt=1 content-length: 27 content-md5: CTqKvdXJPw0OgRdlsoR71Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 27 REQUEST: PUT /data_00.csv.sha256 HTTP/1.1 HEADERS: Host: localhost:12438 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FF1D0236-114B-41C6-80AA-C1F80398DF49 amz-sdk-request: attempt=1 content-length: 76 content-md5: gmOXObjloPe2DGxtDsgfpg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.sha256 / / 76 2025-03-28T09:04:36.902640Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 446 RawX2: 17179871599 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-03-28T09:04:36.902722Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-03-28T09:04:36.902888Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 446 RawX2: 17179871599 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-03-28T09:04:36.903018Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 446 RawX2: 17179871599 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 8 RowsProcessed: 1 } 2025-03-28T09:04:36.903098Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:36.903144Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:04:36.903186Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:04:36.903235Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-03-28T09:04:36.903466Z node 4 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:36.905868Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:04:36.906221Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:04:36.906312Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-03-28T09:04:36.906431Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-28T09:04:36.906460Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:04:36.906493Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-28T09:04:36.906519Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:04:36.906549Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-03-28T09:04:36.906621Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:133:2156] message: TxId: 281474976710759 2025-03-28T09:04:36.906660Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:04:36.906711Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-03-28T09:04:36.906739Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-03-28T09:04:36.906850Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:04:36.909148Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-03-28T09:04:36.909220Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-03-28T09:04:36.911755Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:04:36.911820Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:476:2437] TestWaitNotification: OK eventTxId 102 >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:04:37.873634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:37.873728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:37.873765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:37.873798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:37.873841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:37.873869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:37.873928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:37.874001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:37.874384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:37.962216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:04:37.962276Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:37.970530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:37.970943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:37.971144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:37.985721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:37.985903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:37.986491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:37.986700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:37.992986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:37.995288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:37.995363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:37.995672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:37.995729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:37.995769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:37.996442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.003187Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:04:38.140382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:38.140686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.140900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:38.141133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:38.141188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.144158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:38.144305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:38.144517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.144609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:38.144655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:38.144703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:38.148723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.148799Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:38.148870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:38.151307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.151365Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.151403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:38.151464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:38.155234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:38.158842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:38.159094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:38.160127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:38.160277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:38.160347Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:38.160634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:38.160686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:38.160853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:38.160924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:38.164360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:38.164418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:38.164604Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:38.164669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:38.164761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.164807Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:38.164894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:38.164926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:38.164967Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:38.164998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:38.165032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:38.165087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:38.165126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:38.165155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:38.165226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:38.165261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:38.165291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:38.168292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... T_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:38.511321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:04:38.511389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:04:38.513976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAccepted TxId: 128 SchemeshardId: 72057594046678944 PathId: 4, at schemeshard: 72057594046678944 2025-03-28T09:04:38.514111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-03-28T09:04:38.514372Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:38.514421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:38.514581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:04:38.514708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:38.514753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:455:2413], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-03-28T09:04:38.514823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:455:2413], at schemeshard: 72057594046678944, txId: 128, path id: 4 2025-03-28T09:04:38.515063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.515114Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 128:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:38.515170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 128 ready parts: 1/1 2025-03-28T09:04:38.515323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 128 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:38.516145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-03-28T09:04:38.516242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 128 2025-03-28T09:04:38.516280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-03-28T09:04:38.516317Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-03-28T09:04:38.516356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:04:38.517300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-03-28T09:04:38.517377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 2 PathOwnerId: 72057594046678944, cookie: 128 2025-03-28T09:04:38.517401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 128 2025-03-28T09:04:38.517428Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 2 2025-03-28T09:04:38.517472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:04:38.517533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 0/1, is published: true 2025-03-28T09:04:38.519970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 128:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:128 msg type: 269090816 2025-03-28T09:04:38.520156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 128, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 128 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 128 at step: 5000004 2025-03-28T09:04:38.521585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-03-28T09:04:38.523033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:38.523186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 128 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:38.523242Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 128:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-03-28T09:04:38.523400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 128:0 128 -> 240 2025-03-28T09:04:38.523594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:04:38.523685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:04:38.524339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-03-28T09:04:38.526037Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:38.526079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:38.526244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 128, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:04:38.526439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:38.526486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:455:2413], at schemeshard: 72057594046678944, txId: 128, path id: 1 2025-03-28T09:04:38.526523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:455:2413], at schemeshard: 72057594046678944, txId: 128, path id: 4 FAKE_COORDINATOR: Erasing txId 128 2025-03-28T09:04:38.527019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 128:0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.527071Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 128:0 ProgressState 2025-03-28T09:04:38.527169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-03-28T09:04:38.527206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-03-28T09:04:38.527252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#128:0 progress is 1/1 2025-03-28T09:04:38.527287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-03-28T09:04:38.527326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 128, ready parts: 1/1, is published: false 2025-03-28T09:04:38.527391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 128 ready parts: 1/1 2025-03-28T09:04:38.527428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 128:0 2025-03-28T09:04:38.527488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 128:0 2025-03-28T09:04:38.527559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:04:38.527601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 128, publications: 2, subscribers: 0 2025-03-28T09:04:38.527640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-28T09:04:38.527674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 128, [OwnerId: 72057594046678944, LocalPathId: 4], 3 2025-03-28T09:04:38.528221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-03-28T09:04:38.528305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 128 2025-03-28T09:04:38.528347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 128 2025-03-28T09:04:38.528398Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-28T09:04:38.528440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:04:38.529247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-03-28T09:04:38.529322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 128 2025-03-28T09:04:38.529354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 128 2025-03-28T09:04:38.529383Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 128, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-28T09:04:38.529426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:04:38.529514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 128, subscribers: 0 2025-03-28T09:04:38.535526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 2025-03-28T09:04:38.537057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 128 TestModificationResult got TxId: 128, wait until txId: 128 >> TPQTest::TestGetTimestamps [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:04:37.166821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:37.166923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:37.166999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:37.167040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:37.167083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:37.167111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:37.167163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:37.167245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:37.167567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:37.338476Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:04:37.338539Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:37.368752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:37.368883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:37.369051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:37.392040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:37.393054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:37.393757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:37.394584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:37.401412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:37.403001Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:37.403067Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:37.403314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:37.403397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:37.403454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:37.403682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.425015Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:04:37.615113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:37.615321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.615538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:37.615802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:37.615875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.619549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:37.619688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:37.619992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.620050Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:37.620093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:37.620130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:37.622298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.622356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:37.622393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:37.624372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.624428Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.624472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:37.624527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:37.628328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:37.630850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:37.631067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:37.632234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:37.632402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:37.632475Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:37.632744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:37.632798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:37.632983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:37.633072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:37.635236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:37.635285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:37.635487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:37.635543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:203:2205], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:37.635740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:37.635782Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:37.635875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:37.635907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:37.635946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:37.635975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:37.636011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:37.636061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:37.636113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:37.636138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:37.636208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:37.636250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:37.636303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:37.638291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... ation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:04:38.787759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:04:38.787812Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:04:38.787846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:04:38.787910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:04:38.787954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:04:38.787986Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-03-28T09:04:38.788014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:04:38.788044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:04:38.788071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-28T09:04:38.789064Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:38.789143Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:38.789177Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:04:38.789212Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:04:38.789249Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:04:38.790994Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:38.791068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:38.791109Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:04:38.791136Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:04:38.791164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:04:38.791971Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:38.792039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:04:38.792068Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:04:38.792093Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:04:38.792117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:04:38.792172Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:04:38.800353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:04:38.805561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:04:38.806824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:04:38.807025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:04:38.807065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:04:38.807463Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:04:38.807551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:04:38.807584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:337:2328] TestWaitNotification: OK eventTxId 101 2025-03-28T09:04:38.807993Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:38.808155Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 198us result status StatusSuccess 2025-03-28T09:04:38.808444Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-03-28T09:04:38.811805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:38.811943Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2025-03-28T09:04:38.812028Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2025-03-28T09:04:38.816793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:38.816969Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:04:38.817230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:04:38.817268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:04:38.817669Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:04:38.817760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:04:38.817811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:345:2336] TestWaitNotification: OK eventTxId 103 2025-03-28T09:04:38.818270Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:38.818428Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 178us result status StatusSuccess 2025-03-28T09:04:38.818718Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-03-28T09:04:29.992965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:29.993553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:29.993620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001663/r3tmp/tmpS6F7IP/pdisk_1.dat 2025-03-28T09:04:30.466727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:30.529604Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:30.570573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:30.570746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:30.583353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:30.673602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:30.732849Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:04:30.733125Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:30.784634Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:30.784789Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:30.786533Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:30.786613Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:30.786677Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:30.787104Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:30.787244Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:30.787332Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:04:30.799733Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:30.843797Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:04:30.843999Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:30.844159Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:04:30.844202Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:30.844237Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:04:30.844276Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:30.844798Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:04:30.844937Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:04:30.845427Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:30.845505Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:30.845549Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:04:30.845600Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:30.845735Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:04:30.845890Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:30.846137Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:04:30.846245Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:04:30.848155Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:04:30.859041Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:04:30.859186Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:04:31.033080Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:04:31.038042Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:04:31.038130Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:31.038820Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:31.038864Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:04:31.038911Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:04:31.039136Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:04:31.039263Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:04:31.039910Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:31.039964Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:04:31.041333Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:04:31.041655Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:31.042878Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:04:31.042930Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:31.043801Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:04:31.043878Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:31.044876Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:31.044922Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:31.044991Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:04:31.045062Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:04:31.045116Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:04:31.045202Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:31.048930Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:04:31.051638Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:04:31.051705Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:04:31.051955Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:04:31.061234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:31.061421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:31.061523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:31.067460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:04:31.074245Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:04:31.229195Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:04:31.233153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:04:31.299005Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:31.766505Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdzxtaj1trnwsb3bkn5kgkn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmRjYjI4LTM4ODI3ZDg2LTU0NzFkOTgwLWU3Zjk4ZWYy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:31.772991Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:04:31.773216Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:31.785494Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:04: ... 37968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:37.563412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:37.592587Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-03-28T09:04:37.592860Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:37.678501Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:37.678665Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:37.680604Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:37.680693Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:37.680770Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:37.681138Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:37.681280Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:37.681384Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-03-28T09:04:37.695260Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:37.695347Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:04:37.695451Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:37.695543Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-03-28T09:04:37.695587Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:37.695634Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:04:37.695705Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:37.696053Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:04:37.696158Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:04:37.696285Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:37.696336Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:37.696380Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:04:37.696432Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:37.696508Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:673:2574], sessionId# [0:0:0] 2025-03-28T09:04:37.696663Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:37.696865Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:04:37.696980Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:04:37.702916Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:04:37.715302Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:04:37.715423Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:04:37.887466Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:704:2594], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-28T09:04:37.888590Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:04:37.888648Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:37.889306Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:37.889360Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:04:37.889409Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:04:37.889680Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:04:37.889812Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:04:37.890097Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:37.890159Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:04:37.890592Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:04:37.893165Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:37.894998Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:04:37.895062Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:37.895955Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:04:37.896032Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:37.896990Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:37.897045Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:37.897118Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:04:37.897186Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:04:37.897241Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:04:37.897348Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:37.898325Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:04:37.908782Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:04:37.908893Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:04:37.909990Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:04:37.925401Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-03-28T09:04:37.925615Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:04:37.947787Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:04:37.947870Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:37.948215Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-03-28T09:04:37.950319Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:745:2627], serverId# [2:746:2628], sessionId# [0:0:0] 2025-03-28T09:04:37.950476Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:04:37.950712Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:04:37.950802Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:37.951032Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:745:2627], serverId# [2:746:2628], sessionId# [0:0:0] 2025-03-28T09:04:37.952886Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:750:2632], serverId# [2:751:2633], sessionId# [0:0:0] 2025-03-28T09:04:37.953041Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:04:37.953204Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:04:37.953269Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:37.953501Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:750:2632], serverId# [2:751:2633], sessionId# [0:0:0] 2025-03-28T09:04:37.959601Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:755:2637], serverId# [2:756:2638], sessionId# [0:0:0] 2025-03-28T09:04:37.959787Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:04:37.959992Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:04:37.960055Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:37.960293Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:755:2637], serverId# [2:756:2638], sessionId# [0:0:0] 2025-03-28T09:04:37.967510Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:760:2642], serverId# [2:761:2643], sessionId# [0:0:0] 2025-03-28T09:04:37.967731Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:04:37.968151Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:04:37.968210Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:37.968485Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:760:2642], serverId# [2:761:2643], sessionId# [0:0:0] 2025-03-28T09:04:37.970600Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:765:2647], serverId# [2:766:2648], sessionId# [0:0:0] 2025-03-28T09:04:37.970791Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:04:37.970988Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:04:37.971041Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:37.971276Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:765:2647], serverId# [2:766:2648], sessionId# [0:0:0] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink >> TExternalDataSourceTest::DropExternalDataSource [GOOD] >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout >> DataShardTxOrder::RandomPointsAndRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestGetTimestamps [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-28T09:01:09.769688Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:01:09.778419Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:01:09.778851Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-28T09:01:09.778909Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:01:09.778947Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-28T09:01:09.778995Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:01:09.779037Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:01:09.779098Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:180:2057] recipient: [1:14:2061] 2025-03-28T09:01:09.818551Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:179:2194], now have 1 active actors on pipe 2025-03-28T09:01:09.819100Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:01:09.838270Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-03-28T09:01:09.841592Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-03-28T09:01:09.841779Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:01:09.842614Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "user1" Generation: 1 Important: true } 2025-03-28T09:01:09.842746Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:01:09.843201Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:01:09.843567Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:186:2199] 2025-03-28T09:01:09.846184Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-03-28T09:01:09.846265Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:186:2199] 2025-03-28T09:01:09.846317Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:01:09.848851Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:01:09.849023Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-03-28T09:01:09.849078Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-03-28T09:01:09.849119Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2025-03-28T09:01:09.849148Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2025-03-28T09:01:09.849298Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:01:09.849337Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:01:09.849375Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:01:09.849412Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:01:09.849445Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:01:09.849469Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:01:09.849490Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser1 2025-03-28T09:01:09.849512Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser1 2025-03-28T09:01:09.849544Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:01:09.849662Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:01:09.850218Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:01:09.850279Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:01:09.850482Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:01:09.853613Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:01:09.854138Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:193:2204], now have 1 active actors on pipe 2025-03-28T09:01:09.856488Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:196:2206], now have 1 active actors on pipe 2025-03-28T09:01:09.856580Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-03-28T09:01:09.856630Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-03-28T09:01:09.857352Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size: 511957 2025-03-28T09:01:09.857851Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 1 messageNo: 0 size: 511957 2025-03-28T09:01:09.858368Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 2 messageNo: 0 size: 511957 2025-03-28T09:01:09.858880Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 3 messageNo: 0 size: 511957 2025-03-28T09:01:09.858976Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client PART message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size: 49324 2025-03-28T09:01:09.859026Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 4 messageNo: 0 size 49324 offset: 0 2025-03-28T09:01:09.859182Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-03-28T09:01:09.859328Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-03-28T09:01:09.859370Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-03-28T09:01:09.859735Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:198:2208], now have 1 active actors on pipe 2025-03-28T09:01:09.859867Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-03-28T09:01:09.859911Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-03-28T09:01:09.860043Z node 1 :PERSQUEUE INFO: new Cookie default|8f8a5f30-1f96e29d-d07e7a05-ae02edef_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:01:09.860176Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:01:09.860290Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:01:09.860600Z node 1 :PERSQUEUE DEBUG: [ ... leGenerations: 188 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 188 Important: false } Consumers { Name: "user1" Generation: 188 Important: false } 2025-03-28T09:04:38.197429Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [72:185:2198] 2025-03-28T09:04:38.202018Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [72:185:2198] 2025-03-28T09:04:38.205431Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [72:186:2199] 2025-03-28T09:04:38.207573Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [72:186:2199] 2025-03-28T09:04:38.216647Z node 72 :PERSQUEUE INFO: new Cookie default|fc668576-910eaa98-e9fe4bca-2c00e515_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:04:38.256781Z node 72 :PERSQUEUE INFO: new Cookie default|8c7ce732-635880ff-eccadaff-19b3c90_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [72:107:2139] sender: [72:280:2057] recipient: [72:99:2134] Leader for TabletID 72057594037927937 is [72:107:2139] sender: [72:283:2057] recipient: [72:14:2061] Leader for TabletID 72057594037927937 is [72:107:2139] sender: [72:284:2057] recipient: [72:282:2279] Leader for TabletID 72057594037927937 is [72:285:2280] sender: [72:286:2057] recipient: [72:282:2279] 2025-03-28T09:04:38.313115Z node 72 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:04:38.313197Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:04:38.313905Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [72:334:2321] 2025-03-28T09:04:38.317282Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [72:335:2322] 2025-03-28T09:04:38.328217Z node 72 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:04:38.328309Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [72:335:2322] 2025-03-28T09:04:38.330844Z node 72 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:04:38.330935Z node 72 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [72:334:2321] Leader for TabletID 72057594037927937 is [72:285:2280] sender: [72:364:2057] recipient: [72:14:2061] 2025-03-28T09:04:38.334134Z node 72 :PERSQUEUE INFO: new Cookie default|ef00a486-985bf1e4-f99c74f3-6f4ca55_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 100 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [72:177:2192] 2025-03-28T09:04:38.361884Z node 72 :PERSQUEUE INFO: Reading Timestamp failed for offset 50 ( 50 ) Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 104 Result { Offset: 100 Data: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" SourceId: "sourceid2" SeqNo: 1 WriteTimestampMS: 172800426 CreateTimestampMS: 100 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsCachedSize: 1064 SizeLag: 11563 RealReadOffset: 50 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 1086 LastOffset: 100 EndOffset: 104 StartOffset: 1 } } Leader for TabletID 72057594037927937 is [0:0:0] sender: [73:103:2057] recipient: [73:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [73:103:2057] recipient: [73:101:2135] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:108:2057] recipient: [73:101:2135] 2025-03-28T09:04:39.173505Z node 73 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:04:39.173586Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [73:149:2057] recipient: [73:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [73:149:2057] recipient: [73:147:2170] Leader for TabletID 72057594037927938 is [73:153:2174] sender: [73:154:2057] recipient: [73:147:2170] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:179:2057] recipient: [73:14:2061] 2025-03-28T09:04:39.194104Z node 73 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:04:39.195277Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 189 actor [73:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 189 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 189 ReadRuleGenerations: 189 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 189 Important: false } Consumers { Name: "user1" Generation: 189 Important: false } 2025-03-28T09:04:39.195970Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [73:185:2198] 2025-03-28T09:04:39.198944Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [73:185:2198] 2025-03-28T09:04:39.201880Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [73:186:2199] 2025-03-28T09:04:39.204042Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [73:186:2199] 2025-03-28T09:04:39.212026Z node 73 :PERSQUEUE INFO: new Cookie default|bb022a8-a68f4973-bc7868ad-d98fac2c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:04:39.248530Z node 73 :PERSQUEUE INFO: new Cookie default|1286e381-b8706186-2a688a5e-14d231aa_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:280:2057] recipient: [73:99:2134] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:283:2057] recipient: [73:14:2061] Leader for TabletID 72057594037927937 is [73:107:2139] sender: [73:284:2057] recipient: [73:282:2279] Leader for TabletID 72057594037927937 is [73:285:2280] sender: [73:286:2057] recipient: [73:282:2279] 2025-03-28T09:04:39.300243Z node 73 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:04:39.300309Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:04:39.301014Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [73:334:2321] 2025-03-28T09:04:39.303382Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [73:335:2322] 2025-03-28T09:04:39.313577Z node 73 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:04:39.313651Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [73:335:2322] 2025-03-28T09:04:39.315099Z node 73 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:04:39.315156Z node 73 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [73:334:2321] Leader for TabletID 72057594037927937 is [73:285:2280] sender: [73:364:2057] recipient: [73:14:2061] 2025-03-28T09:04:39.317548Z node 73 :PERSQUEUE INFO: new Cookie default|e5afc2f1-5fcd6fde-a24b4fba-9044da80_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 100 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [73:177:2192] 2025-03-28T09:04:39.340344Z node 73 :PERSQUEUE INFO: Reading Timestamp failed for offset 50 ( 50 ) Status: 1 ErrorCode: OK PartitionResponse { CmdReadResult { MaxOffset: 104 Result { Offset: 100 Data: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" SourceId: "sourceid2" SeqNo: 1 WriteTimestampMS: 172800426 CreateTimestampMS: 100 UncompressedSize: 0 PartitionKey: "" ExplicitHash: "" } BlobsCachedSize: 1064 SizeLag: 11563 RealReadOffset: 50 WaitQuotaTimeMs: 0 ReadFromTimestampMs: 0 SizeEstimate: 1086 LastOffset: 100 EndOffset: 104 StartOffset: 1 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 4002, MsgBus: 6420 2025-03-28T09:03:21.106915Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783416445441416:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:21.107033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00132a/r3tmp/tmp7lf3yO/pdisk_1.dat 2025-03-28T09:03:21.612967Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:21.619055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:21.619157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:21.622619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4002, node 1 2025-03-28T09:03:21.714939Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:21.714953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:21.714960Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:21.715042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6420 TClient is connected to server localhost:6420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:22.254223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:22.274667Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:03:24.397415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783429330343975:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:24.397537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:24.440055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:03:24.597756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783429330344082:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:24.597863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:24.621278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:03:24.681074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783429330344161:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:24.681176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:24.681367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783429330344166:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:24.685326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:03:24.699506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783429330344168:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:03:24.764596Z node 1 :TX_PROXY ERROR: Actor# [1:7486783429330344219:2447] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 14128, MsgBus: 7670 2025-03-28T09:03:26.335616Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783437300950299:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:26.335658Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00132a/r3tmp/tmpP1NOkl/pdisk_1.dat 2025-03-28T09:03:26.463169Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:26.477838Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:26.485736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:26.486897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14128, node 2 2025-03-28T09:03:26.546586Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:26.546608Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:26.546614Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:26.546720Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7670 TClient is connected to server localhost:7670 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:27.017263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:29.692370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783450185852816:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.692437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783450185852838:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.692504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.697632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:29.710528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783450185852844:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:03:29.772862Z node 2 :TX_PROXY ERROR: Actor# [2:7486783450185852896:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:29.800541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17616, MsgBus: 23353 2025-03-28T09:03:30.897637Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783452752868262:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:30.897713Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00132a/r3tmp/tmpb84bAn/pdisk_1.dat 2025-03-28T0 ... 8897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:24.219910Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29235, node 10 2025-03-28T09:04:24.399418Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:24.399450Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:24.399461Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:24.399635Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24203 TClient is connected to server localhost:24203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:25.579294Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:25.588943Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:04:28.793142Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486783681430999401:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:28.793242Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:30.590649Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486783711495771147:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:30.590659Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486783711495771155:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:30.590769Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:30.596719Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:04:30.614014Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486783711495771161:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:04:30.681803Z node 10 :TX_PROXY ERROR: Actor# [10:7486783711495771212:2346] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:30.720468Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7486783711495771228:2342], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-03-28T09:04:30.721502Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=OTBkZmQ3YWUtMzczNzBmYmQtNDQ1NDQ0YTUtYjZmODM4YTA=, ActorId: [10:7486783711495771145:2333], ActorState: ExecuteState, TraceId: 01jqdzxn0854sjmgk60q26nn6y, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" Trying to start YDB, gRPC: 21550, MsgBus: 26025 2025-03-28T09:04:31.920459Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7486783716558381204:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:31.920581Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00132a/r3tmp/tmp4oQvXQ/pdisk_1.dat 2025-03-28T09:04:32.130677Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:32.141073Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:32.141184Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:32.143942Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21550, node 11 2025-03-28T09:04:32.315435Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:32.315470Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:32.315485Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:32.315662Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26025 TClient is connected to server localhost:26025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:04:33.848008Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:33.859306Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:04:36.918807Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7486783716558381204:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:36.918907Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:37.943763Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486783742328185658:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:37.943852Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486783742328185647:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:37.944043Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:37.949428Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:04:37.988487Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7486783742328185661:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:04:38.071696Z node 11 :TX_PROXY ERROR: Actor# [11:7486783746623153011:2345] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:38.097000Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7486783746623153028:2342], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2025-03-28T09:04:38.097386Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=NWUyNjVmMTgtZmE2YThmM2UtMmNlMDhhYzYtZDA4ZjQ1NmM=, ActorId: [11:7486783742328185645:2332], ActorState: ExecuteState, TraceId: 01jqdzxx2s3dsk7rmajjzxdz3k, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" >> KqpScanArrowFormat::SingleKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:04:38.682446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:38.682535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:38.682573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:38.682606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:38.682643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:38.682672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:38.682770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:38.682882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:38.683222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:38.784064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:04:38.784130Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:38.795936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:38.796313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:38.796477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:38.820369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:38.820570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:38.821223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:38.821390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:38.830119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:38.832416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:38.832488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:38.832762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:38.832808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:38.832847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:38.833411Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.839739Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:04:38.992987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:38.993324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:38.993558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:38.993769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:38.993823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.000300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:39.001979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:39.002256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.002314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:39.002388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:39.002456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:39.013114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.013192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:39.013228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:39.019501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.019573Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.019619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:39.019693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:39.023254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:39.029569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:39.029750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:39.030699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:39.030853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:39.030913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:39.031174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:39.031224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:39.031366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:39.031448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:39.036774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:39.036831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:39.037003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:39.037061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:39.037132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.037183Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:39.037278Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:39.037326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:39.037365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:39.037392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:39.037423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:39.037477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:39.037519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:39.037548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:39.037614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:39.037646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:39.037691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:39.040324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... Id: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:39.762351Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:04:39.762386Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-03-28T09:04:39.762425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:39.763061Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:39.763142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:39.763169Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:04:39.763194Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:04:39.763221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:04:39.763280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:04:39.765554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:04:39.765671Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-28T09:04:39.766499Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:39.766610Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936750 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:39.766667Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000003 2025-03-28T09:04:39.766797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:04:39.766888Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-28T09:04:39.767081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:39.767141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:04:39.768040Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:04:39.768622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:04:39.770454Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:39.770487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:39.770605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:04:39.770992Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:39.771034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:04:39.771069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:208:2210], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:04:39.771302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.771361Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:04:39.771485Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:04:39.771533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:04:39.771573Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:04:39.771608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:04:39.771643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:04:39.771680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:04:39.771736Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:04:39.771771Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:04:39.771839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:04:39.771876Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:04:39.771909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:04:39.771968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:04:39.772393Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:39.772469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:39.772499Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:04:39.772538Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:04:39.772579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:04:39.772889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:04:39.772930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:04:39.772988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:39.773218Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:39.773281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:04:39.773307Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:04:39.773331Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:04:39.773355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:39.773418Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:04:39.777157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:04:39.777259Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:04:39.777322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:04:39.777576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:04:39.777617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:04:39.777989Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:04:39.778075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:04:39.778109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:333:2324] TestWaitNotification: OK eventTxId 102 2025-03-28T09:04:39.778558Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:39.778755Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 227us result status StatusPathDoesNotExist 2025-03-28T09:04:39.778913Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> KqpPg::TableSelect+useSink [GOOD] >> KqpPg::TableSelect-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:02.357529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:02.357645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:02.357695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:02.357752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:02.357803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:02.357850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:02.357904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:02.358002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:02.358393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:02.493305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:02.493374Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:02.531405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:02.531721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:02.531895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:02.544152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:02.544487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:02.545202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:02.545482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:02.555962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:02.557718Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:02.557819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:02.558037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:02.558134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:02.558182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:02.558316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.575851Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:02.744943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:02.745159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.745394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:02.745591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:02.745649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.750816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:02.750975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:02.751189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.751241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:02.751272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:02.751295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:02.753170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.753231Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:02.753263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:02.762125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.762196Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.762272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:02.762331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.770829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:02.774813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:02.775017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:02.776178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:02.776338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:02.776403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:02.776673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:02.776731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:02.776910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:02.777007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:02.779492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:02.779543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:02.779761Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:02.779816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:02.780238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.780290Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:02.780388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:02.780424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.780461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:02.780496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.780532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:02.780602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.780642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:02.780673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:02.780762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:02.780802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:02.780857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:02.782978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:02.783115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:02.783169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e set wakeup after delta# 0 seconds 2025-03-28T09:04:38.539039Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-03-28T09:04:38.541579Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:300:2289], Recipient [3:310:2297]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-28T09:04:38.554243Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:19.153000Z 2025-03-28T09:04:38.614104Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:38.614196Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:38.614240Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-03-28T09:04:38.614334Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:04:38.614381Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-28T09:04:38.614548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-03-28T09:04:38.614679Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-28T09:04:38.614756Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-03-28T09:04:38.614851Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:18.000000Z at schemeshard 72057594046678944 2025-03-28T09:04:38.614989Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:04:38.625832Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:38.625943Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:38.625987Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:04:39.040253Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:39.040356Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:39.040467Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:310:2297]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:04:39.040671Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:39.040714Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:39.088512Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-03-28T09:04:39.088674Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:18.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-03-28T09:04:39.088831Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 30 seconds 2025-03-28T09:04:39.089112Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [3:124:2150], Recipient [3:310:2297]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 2 } CompactSinglePartedShards: true 2025-03-28T09:04:39.089353Z node 3 :TX_DATASHARD INFO: Started background compaction# 7 of 72075186233409546 tableId# 2 localTid# 1001, requested from [3:124:2150], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-03-28T09:04:39.090604Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 6, ts 1970-01-01T00:00:19.153000Z 2025-03-28T09:04:39.090680Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 6, front# 7 2025-03-28T09:04:39.099386Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1272:3209], Recipient [3:310:2297]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-03-28T09:04:39.099533Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-03-28T09:04:39.100067Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:310:2297], Recipient [3:124:2150]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 6 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 22109 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-03-28T09:04:39.100144Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:04:39.100198Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 2.2109 2025-03-28T09:04:39.100336Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 19 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:04:39.100417Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-28T09:04:39.103544Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:300:2289], Recipient [3:310:2297]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-28T09:04:39.112195Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 7, ts 1970-01-01T00:00:20.152000Z 2025-03-28T09:04:39.112286Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 7, front# 7 2025-03-28T09:04:39.112335Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:124:2150]pathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:04:39.112726Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553211, Sender [3:310:2297], Recipient [3:124:2150]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 2 } Status: OK 2025-03-28T09:04:39.112782Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvCompactTableResult 2025-03-28T09:04:39.112864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 0 seconds 2025-03-28T09:04:39.112922Z node 3 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 2], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 3 ms, with status# 0, next wakeup in# 0.997000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-03-28T09:04:39.117036Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:300:2289], Recipient [3:310:2297]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-28T09:04:39.135150Z node 3 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186233409546, tableId# 2, last full compaction# 1970-01-01T00:00:20.152000Z 2025-03-28T09:04:39.189818Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:39.189906Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:39.189942Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-03-28T09:04:39.190018Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:04:39.190061Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-28T09:04:39.190230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-03-28T09:04:39.190322Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-28T09:04:39.190378Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-03-28T09:04:39.190456Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:19.000000Z at schemeshard 72057594046678944 2025-03-28T09:04:39.190579Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:04:39.201250Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:39.201336Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:04:39.201400Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 >> KikimrIcGateway::TestALterResourcePool [GOOD] >> KqpScanArrowFormat::AggregateCountStar >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] >> KqpScanArrowFormat::AllTypesColumns >> TestProtocols::TestConnectProtocol >> KqpScanArrowInChanels::AllTypesColumns >> TInterconnectTest::TestManyEvents >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:04:39.519883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:04:39.519983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:39.520020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:04:39.520055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:04:39.520096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:04:39.520124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:04:39.520177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:04:39.520264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:04:39.520587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:39.612808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:04:39.612881Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:39.620548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:39.620977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:04:39.621150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:04:39.638112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:04:39.638320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:04:39.639050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:39.639242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:04:39.646613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:39.649069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:39.649147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:39.649492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:04:39.649552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:39.649602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:04:39.650270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.657166Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:04:39.841495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:04:39.841801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.842014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:04:39.842273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:04:39.842333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.844848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:39.845020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:04:39.845251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.845308Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:04:39.845353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:04:39.845392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:04:39.847543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.847613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:04:39.847654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:04:39.849875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.849932Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.849980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:39.850064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:04:39.854082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:04:39.856396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:04:39.856578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:04:39.857696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:04:39.857874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:39.857947Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:39.858304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:04:39.858370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:04:39.858542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:04:39.858701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:04:39.860983Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:04:39.861035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:04:39.861243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:04:39.861307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:04:39.861383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:04:39.861443Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:04:39.861538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:39.861586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:39.861651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:04:39.861695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:39.861740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:04:39.861778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:04:39.861816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:04:39.861850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:04:39.861926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:04:39.861963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:04:39.862013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:04:39.864869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... nd EvNotifyTxCompletion 2025-03-28T09:04:40.945603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 2025-03-28T09:04:40.946144Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2025-03-28T09:04:40.946295Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-03-28T09:04:40.946345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2025-03-28T09:04:40.946393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:344:2335] 2025-03-28T09:04:40.946553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-03-28T09:04:40.946603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:344:2335] 2025-03-28T09:04:40.946749Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-03-28T09:04:40.946853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-03-28T09:04:40.946879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:344:2335] TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 2025-03-28T09:04:40.947392Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:40.947633Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 282us result status StatusSuccess 2025-03-28T09:04:40.948008Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:40.948817Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:40.949003Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 205us result status StatusSuccess 2025-03-28T09:04:40.949280Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:40.950046Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:40.954217Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 4.13ms result status StatusSuccess 2025-03-28T09:04:40.954747Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:40.955510Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:40.955743Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 258us result status StatusSuccess 2025-03-28T09:04:40.956025Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:04:40.956590Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:04:40.956758Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 193us result status StatusSuccess 2025-03-28T09:04:40.957047Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected >> TInterconnectTest::TestConnectAndDisconnect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestALterResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 29647, MsgBus: 27089 2025-03-28T09:04:28.635323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783702292317346:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:28.635390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001485/r3tmp/tmptZvviP/pdisk_1.dat 2025-03-28T09:04:29.056870Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:29.068863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:29.069361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:29.073255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29647, node 1 2025-03-28T09:04:29.209846Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:29.209879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:29.209890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:29.210042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27089 TClient is connected to server localhost:27089 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:29.773763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:29.794235Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:04:29.813143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:04:29.839189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:04:29.855045Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16043, MsgBus: 5676 2025-03-28T09:04:32.360219Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783718700783435:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:32.361171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001485/r3tmp/tmpXfsSWQ/pdisk_1.dat 2025-03-28T09:04:32.507343Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:32.525160Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:32.525253Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:32.530491Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16043, node 2 2025-03-28T09:04:32.702636Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:32.702661Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:32.702672Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:32.702793Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5676 TClient is connected to server localhost:5676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:33.747591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:33.754714Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:04:33.777624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11210, MsgBus: 10692 2025-03-28T09:04:36.923162Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783736633851205:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:36.924184Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001485/r3tmp/tmp0yjkPY/pdisk_1.dat 2025-03-28T09:04:37.141082Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:37.146314Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:37.146406Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:37.148576Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11210, node 3 2025-03-28T09:04:37.219158Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:37.219181Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:37.219189Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:37.219306Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10692 TClient is connected to server localhost:10692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:37.888374Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:37.896808Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:04:37.919082Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:04:37.940752Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool >> TInterconnectTest::TestSimplePingPong >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> TInterconnectTest::TestNotifyUndelivered >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor |77.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertFromSelect_Simple-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> KqpScan::ScanRetryReadRanges [GOOD] |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::DropSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 18972, MsgBus: 22887 2025-03-28T09:03:25.682530Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783431429446196:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:25.682743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001312/r3tmp/tmpGPTf9y/pdisk_1.dat 2025-03-28T09:03:26.148303Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:26.149342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:26.149455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:26.155201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18972, node 1 2025-03-28T09:03:26.267172Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:26.267194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:26.267207Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:26.267304Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22887 TClient is connected to server localhost:22887 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:26.828785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:28.989384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783444314348723:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:28.989579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:28.989981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783444314348757:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:28.997085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:29.014104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783444314348759:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:29.117132Z node 1 :TX_PROXY ERROR: Actor# [1:7486783448609316106:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:29.170233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62226, MsgBus: 14310 2025-03-28T09:03:30.277345Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783454511783783:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:30.277413Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001312/r3tmp/tmpbxJaiJ/pdisk_1.dat 2025-03-28T09:03:30.440273Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:30.456916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:30.457037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:30.459400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62226, node 2 2025-03-28T09:03:30.525335Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:30.525365Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:30.525372Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:30.525495Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14310 TClient is connected to server localhost:14310 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:30.972263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:30.978323Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:03:33.678514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783467396686303:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:33.678587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783467396686334:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:33.678678Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:33.684190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:33.693587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783467396686341:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:03:33.801599Z node 2 :TX_PROXY ERROR: Actor# [2:7486783467396686392:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:33.823785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22492, MsgBus: 27398 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001312/r3tmp/tmpawQAtR/pdisk_1.dat 2025-03-28T09:03:35.366995Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:03:35.371766Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:35.404808Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:35.404921Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:35.406475Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22492, node 3 2025-03-28T09:03:35.515180Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:35.515201Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:35.515209Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:35.515314Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27398 TClient is connected to server localhost:27398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVer ... 39Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001312/r3tmp/tmpXtIzhu/pdisk_1.dat 2025-03-28T09:04:24.641806Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:24.674160Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:24.674282Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:24.676254Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24112, node 10 2025-03-28T09:04:24.740901Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:24.740936Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:24.740947Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:24.741082Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16497 TClient is connected to server localhost:16497 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:25.634763Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:25.648822Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:04:29.490476Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486783685216345373:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:29.490574Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:30.590545Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486783710986149796:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:30.590759Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:30.593602Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486783710986149832:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:30.600239Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:04:30.619552Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486783710986149834:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:04:30.698723Z node 10 :TX_PROXY ERROR: Actor# [10:7486783710986149887:2347] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:30.745018Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:04:30.891589Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:04:31.010982Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7486783710986150123:2364], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-03-28T09:04:31.013589Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=M2ZlYWE0Y2YtYzkwMjg5ZTctMmM2MzFlODUtYjU3ZTk5ZjA=, ActorId: [10:7486783710986150121:2363], ActorState: ExecuteState, TraceId: 01jqdzxt7qbavk3js9hkhs9r8r, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 25121, MsgBus: 23252 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001312/r3tmp/tmpST6uuc/pdisk_1.dat 2025-03-28T09:04:33.056752Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:33.098398Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:33.098534Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:33.099972Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:33.126674Z node 11 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25121, node 11 2025-03-28T09:04:33.371442Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:33.371476Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:33.371493Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:33.371680Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23252 TClient is connected to server localhost:23252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:35.169414Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:35.195770Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:04:40.233919Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486783755583143884:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:40.234242Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:40.234848Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486783755583143919:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:40.240869Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:04:40.260048Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7486783755583143921:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:04:40.329440Z node 11 :TX_PROXY ERROR: Actor# [11:7486783755583143972:2348] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:40.379631Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:04:40.514023Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:04:40.661602Z node 11 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [11:7486783755583144208:2364], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2025-03-28T09:04:40.662925Z node 11 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=11&id=ZjZjZDlmNWUtYTVmMWM4ZDAtNjhmZjNkYTktNTMwNTE1ZjI=, ActorId: [11:7486783755583144206:2363], ActorState: ExecuteState, TraceId: 01jqdzy3m77m3kf8ge19gx06a4, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: 2025-03-28T09:04:22.852315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:22.852690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:22.852866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:04:22.853580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:22.853933Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:22.854166Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a96/r3tmp/tmpeWYVug/pdisk_1.dat 2025-03-28T09:04:23.694099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:23.924809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:24.043773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:24.043963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:24.050397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:24.050504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:24.069291Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:24.070028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:24.070515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:24.433407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:25.278807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1401:2836], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:25.278943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1412:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:25.279062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:25.295250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:04:25.907556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1415:2844], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:04:26.073932Z node 1 :TX_PROXY ERROR: Actor# [1:1543:2913] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:27.285767Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdzxmnpc2excjjmywrrbac2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBmMWEwMDgtNzVmYmQ4YzUtNTc2MzIzNTktZDFhYTVlMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 2 2025-03-28T09:04:28.115423Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdzxpnxbcp53xggqx76rmzx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJjNGI0YTgtMTA2NDk5MmUtMWQzMWQ4OWYtNDg0MjE0YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [1:1618:2966] -> [2:1574:2438] -- EvScanData from [2:1623:2445]: pass 2025-03-28T09:04:28.905421Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdzxpnxbcp53xggqx76rmzx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJjNGI0YTgtMTA2NDk5MmUtMWQzMWQ4OWYtNDg0MjE0YzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":1} 2025-03-28T09:04:28.908925Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-03-28T09:04:38.376563Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:38.376992Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:38.377147Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:04:38.384109Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:38.384879Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:38.385014Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a96/r3tmp/tmpamOJfN/pdisk_1.dat 2025-03-28T09:04:38.757682Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:38.973002Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:39.099331Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:39.099489Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:39.111804Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:39.111923Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:39.137617Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-28T09:04:39.138265Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:39.138673Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:39.443363Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:40.089687Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1403:2838], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:40.089833Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1414:2843], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:40.090347Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:40.096599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:04:40.613737Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1417:2846], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:04:40.706840Z node 3 :TX_PROXY ERROR: Actor# [3:1545:2915] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:41.588308Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdzy34q5sj5mkytqjvf9j74, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzQ5MDRjN2MtZWYwMmJhMDctNWQ0YjlmZmMtNzE3NGM1ZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- nodeId: 4 2025-03-28T09:04:42.388632Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdzy4mgexycc52md48saz3q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWU5YjAxMTUtYmVmOGM0MzYtZjUzNTVjNWItYWEzNmQ0YTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root -- EvScan [3:1621:2969] -> [4:1576:2438] -- EvScanData from [4:1625:2445]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}]},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}]},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2} 2025-03-28T09:04:42.415281Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2025-03-28T09:04:43.730295Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @201 (null) -> PendingActivation 2025-03-28T09:04:43.730376Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP01 ready to work 2025-03-28T09:04:43.731956Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 (null) -> PendingActivation 2025-03-28T09:04:43.732046Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP01 ready to work 2025-03-28T09:04:43.732570Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-03-28T09:04:43.734208Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:14296 2025-03-28T09:04:43.734390Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-03-28T09:04:43.734919Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH01 starting outgoing handshake 2025-03-28T09:04:43.735095Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-03-28T09:04:43.736167Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH05 connected to peer 2025-03-28T09:04:43.736712Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:58488 2025-03-28T09:04:43.737217Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 0] ICH02 starting incoming handshake 2025-03-28T09:04:43.739590Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 82951 ProgramStartTime: 1688496608072 Serial: 2187173436 ReceiverNodeId: 6 SenderActorId: "[5:2187173436:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 82951" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 82951" AcceptUUID: "Cluster for process with id: 82951" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\224\r\2016\030O\246\337\272\245Z\030fj\207B\361as\003\006\304&\2662^\223\217\356E\276\024" RequestXxhash: true RequestXdcShuffle: true 2025-03-28T09:04:43.740379Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 82951 ProgramStartTime: 1688496608072 Serial: 2187173436 ReceiverNodeId: 6 SenderActorId: "[5:2187173436:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 82951" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 82951" AcceptUUID: "Cluster for process with id: 82951" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "\224\r\2016\030O\246\337\272\245Z\030fj\207B\361as\003\006\304&\2662^\223\217\356E\276\024" RequestXxhash: true RequestXdcShuffle: true 2025-03-28T09:04:43.740490Z node 6 :INTERCONNECT WARN: Handshake [6:21:2057] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-03-28T09:04:43.740950Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2025-03-28T09:04:43.742336Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP02 configured for host ::1:24453 2025-03-28T09:04:43.742413Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP17 incoming handshake (actor [6:21:2057]) 2025-03-28T09:04:43.742469Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @483 PendingNodeInfo -> PendingConnection 2025-03-28T09:04:43.742559Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-03-28T09:04:43.742626Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2025-03-28T09:04:43.742675Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @220 PendingConnection -> PendingConnection 2025-03-28T09:04:43.743168Z node 6 :INTERCONNECT DEBUG: Handshake [6:21:2057] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 82951 ProgramStartTime: 1688519935240 Serial: 4269392857 SenderActorId: "[6:4269392857:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 82951" AcceptUUID: "Cluster for process with id: 82951" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-03-28T09:04:43.744989Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 82951 ProgramStartTime: 1688519935240 Serial: 4269392857 SenderActorId: "[6:4269392857:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 82951" AcceptUUID: "Cluster for process with id: 82951" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true } 2025-03-28T09:04:43.745073Z node 5 :INTERCONNECT WARN: Handshake [5:19:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-03-28T09:04:43.745255Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-03-28T09:04:43.746007Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:58498 2025-03-28T09:04:43.746443Z node 6 :INTERCONNECT DEBUG: Handshake [6:23:2058] [node 0] ICH02 starting incoming handshake 2025-03-28T09:04:43.747275Z node 5 :INTERCONNECT DEBUG: Handshake [5:19:2057] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\224\r\2016\030O\246\337\272\245Z\030fj\207B\361as\003\006\304&\2662^\223\217\356E\276\024" 2025-03-28T09:04:43.747386Z node 5 :INTERCONNECT INFO: Handshake [5:19:2057] [node 6] ICH04 handshake succeeded 2025-03-28T09:04:43.748394Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-03-28T09:04:43.748456Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:19:2057] poison: false 2025-03-28T09:04:43.748506Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 PendingConnection -> StateWork 2025-03-28T09:04:43.748689Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:24:2048] 2025-03-28T09:04:43.748757Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:19:2057] self: [5:2187173436:0] peer: [6:4269392857:0] socket: 24 2025-03-28T09:04:43.748815Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS10 traffic start 2025-03-28T09:04:43.748913Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2025-03-28T09:04:43.748976Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-03-28T09:04:43.749050Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-03-28T09:04:43.749108Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-03-28T09:04:43.749202Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS04 subscribe for session state for [5:17:2056] 2025-03-28T09:04:43.749355Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS01 InputSession created 2025-03-28T09:04:43.750276Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-03-28T09:04:43.750379Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.750734Z node 6 :INTERCONNECT INFO: Handshake [6:21:2057] [node 5] ICH04 handshake succeeded 2025-03-28T09:04:43.750963Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-03-28T09:04:43.751020Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:21:2057] poison: false 2025-03-28T09:04:43.751063Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 PendingConnection -> StateWork 2025-03-28T09:04:43.751183Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP22 created new session: [6:26:2048] 2025-03-28T09:04:43.751235Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:21:2057] self: [6:4269392857:0] peer: [5:2187173436:0] socket: 25 2025-03-28T09:04:43.751282Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS10 traffic start 2025-03-28T09:04:43.751357Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-03-28T09:04:43.751407Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-28T09:04:43.751442Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2025-03-28T09:04:43.751496Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-28T09:04:43.751586Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-03-28T09:04:43.751636Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.751749Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS01 InputSession created 2025-03-28T09:04:43.751796Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-03-28T09:04:43.751852Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.751898Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS02 ReceiveData called 2025-03-28T09:04:43.751925Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:25:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.751988Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-03-28T09:04:43.752021Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.752061Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-03-28T09:04:43.752108Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-03-28T09:04:43.752189Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-03-28T09:04:43.752220Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.752250Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-03-28T09:04:43.752275Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-03-28T09:04:43.752306Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-28T09:04:43.752329Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-28T09:04:43.752388Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-28T09:04:43.752422Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 0 2025-03-28T09:04:43.752533Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS02 send event from: [5:17:2056] to: [6:18:2056] 2025-03-28T09:04:43.752720Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS22 outgoing packet Serial# 1 Confirm# 0 DataSize# 84 InflightDataAmount# 84 2025-03-28T09:04:43.752812Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 0 2025-03-28T09:04:43.752909Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:27:2048] [node 5] ICIS02 ReceiveData called 2025-03-28T09:04:43.752963Z node 6 :INTERCONNECT ... 318Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# EndOfStream 2025-03-28T09:04:43.755384Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS15 start handshake 2025-03-28T09:04:43.755852Z node 6 :INTERCONNECT DEBUG: Handshake [6:28:2059] [node 5] ICH01 starting outgoing handshake 2025-03-28T09:04:43.756287Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH01 starting outgoing handshake 2025-03-28T09:04:43.756404Z node 6 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-03-28T09:04:43.756464Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-03-28T09:04:43.757766Z node 5 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:50934 2025-03-28T09:04:43.758172Z node 6 :INTERCONNECT DEBUG: Handshake [6:28:2059] [node 5] ICH05 connected to peer 2025-03-28T09:04:43.758326Z node 6 :INTERCONNECT DEBUG: Handshake [6:28:2059] [node 5] ICH07 SendExBlock ExRequest HandshakeId: "\025R\020\2575\222J\031*\021[\346f\224 \251\370\010\234\341\232\332)\002&\036\313\345)\251*\215" 2025-03-28T09:04:43.758678Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH05 connected to peer 2025-03-28T09:04:43.758842Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH07 SendExBlock ExRequest HandshakeId: "\035/t\004\tk\033+\264\270\200\026\006C\347a\337\260\214O\323#s\335\017\277\250\341b\006#\010" 2025-03-28T09:04:43.759281Z node 5 :INTERCONNECT DEBUG: Handshake [5:32:2059] [node 0] ICH02 starting incoming handshake 2025-03-28T09:04:43.759387Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:58502 2025-03-28T09:04:43.759805Z node 6 :INTERCONNECT DEBUG: Handshake [6:33:2060] [node 0] ICH02 starting incoming handshake 2025-03-28T09:04:43.760534Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP09 (actor [6:33:2060]) from: [5:2187173436:0] for: [6:4269392857:0] 2025-03-28T09:04:43.760602Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS08 incoming handshake Self# [5:2187173436:0] Peer# [6:4269392857:0] Counter# 1 LastInputSerial# 1 2025-03-28T09:04:43.760647Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP07 issued incoming handshake reply 2025-03-28T09:04:43.760738Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP09 (actor [5:32:2059]) from: [6:4269392857:0] for: [5:2187173436:0] 2025-03-28T09:04:43.760788Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS08 incoming handshake Self# [6:4269392857:0] Peer# [5:2187173436:0] Counter# 1 LastInputSerial# 1 2025-03-28T09:04:43.760833Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP06 reply for incoming handshake (actor [5:32:2059]) is held 2025-03-28T09:04:43.761569Z node 5 :INTERCONNECT DEBUG: ICR04 Host: ::1, RESOLVED address 2025-03-28T09:04:43.762591Z node 5 :INTERCONNECT DEBUG: Handshake [5:29:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\035/t\004\tk\033+\264\270\200\026\006C\347a\337\260\214O\323#s\335\017\277\250\341b\006#\010" 2025-03-28T09:04:43.762717Z node 5 :INTERCONNECT INFO: Handshake [5:29:2058] [node 6] ICH04 handshake succeeded 2025-03-28T09:04:43.763933Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2025-03-28T09:04:43.764011Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:32:2059] poison: true 2025-03-28T09:04:43.764090Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:29:2058] poison: false 2025-03-28T09:04:43.764154Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @350 StateWork -> StateWork 2025-03-28T09:04:43.764218Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS09 handshake done sender: [5:29:2058] self: [5:2187173436:0] peer: [6:4269392857:0] socket: 29 2025-03-28T09:04:43.764265Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS10 traffic start 2025-03-28T09:04:43.764375Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS11 registering socket in PollerActor 2025-03-28T09:04:43.764448Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-03-28T09:04:43.764495Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2025-03-28T09:04:43.764584Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-03-28T09:04:43.764695Z node 6 :INTERCONNECT DEBUG: ICListener: [0:0:0] ICL04 Accepted from: ::1:58508 2025-03-28T09:04:43.765198Z node 6 :INTERCONNECT DEBUG: Handshake [6:36:2061] [node 0] ICH02 starting incoming handshake 2025-03-28T09:04:43.766506Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS01 InputSession created 2025-03-28T09:04:43.766627Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-03-28T09:04:43.766745Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.768495Z node 6 :INTERCONNECT NOTICE: Proxy [6:9:2048] [node 5] ICP25 outgoing handshake failed, temporary: 0 explanation: outgoing handshake Peer# ::1(::1:24453) Socket error# connection unexpectedly closed state# ReceiveResponse processed# 0 remain# 52 incoming: [6:33:2060] held: no 2025-03-28T09:04:43.768559Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP052 dropped outgoing handshake: [6:28:2059] poison: false 2025-03-28T09:04:43.768611Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP28 other handshake is still going on 2025-03-28T09:04:43.769075Z node 6 :INTERCONNECT INFO: Handshake [6:33:2060] [node 5] ICH04 handshake succeeded 2025-03-28T09:04:43.769276Z node 6 :INTERCONNECT INFO: Proxy [6:9:2048] [node 5] ICP19 incoming handshake succeeded 2025-03-28T09:04:43.769332Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP111 dropped incoming handshake: [6:33:2060] poison: false 2025-03-28T09:04:43.769409Z node 6 :INTERCONNECT DEBUG: Proxy [6:9:2048] [node 5] ICP77 @350 StateWork -> StateWork 2025-03-28T09:04:43.769462Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS09 handshake done sender: [6:33:2060] self: [6:4269392857:0] peer: [5:2187173436:0] socket: 31 2025-03-28T09:04:43.769515Z node 6 :INTERCONNECT_SESSION INFO: Session [6:26:2048] [node 5] ICS10 traffic start 2025-03-28T09:04:43.769624Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS11 registering socket in PollerActor 2025-03-28T09:04:43.769676Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-28T09:04:43.769718Z node 6 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2025-03-28T09:04:43.769776Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-03-28T09:04:43.769833Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2025-03-28T09:04:43.769869Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-28T09:04:43.769916Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-03-28T09:04:43.769971Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.770061Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS01 InputSession created 2025-03-28T09:04:43.770303Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-03-28T09:04:43.770376Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2025-03-28T09:04:43.770473Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.770518Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.770636Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-03-28T09:04:43.770716Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.770774Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-03-28T09:04:43.770801Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.770837Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-03-28T09:04:43.770880Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-03-28T09:04:43.770926Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS02 ReceiveData called 2025-03-28T09:04:43.770955Z node 6 :INTERCONNECT_SESSION DEBUG: InputSession [6:37:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.770988Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-03-28T09:04:43.771015Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 1 2025-03-28T09:04:43.771122Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-28T09:04:43.771166Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-28T09:04:43.771220Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS02 send event from: [6:18:2056] to: [5:17:2056] 2025-03-28T09:04:43.771324Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 InflightDataAmount# 84 2025-03-28T09:04:43.771403Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-28T09:04:43.771476Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS02 ReceiveData called 2025-03-28T09:04:43.771518Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2025-03-28T09:04:43.771605Z node 5 :INTERCONNECT_SESSION DEBUG: InputSession [5:35:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2025-03-28T09:04:43.771662Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-28T09:04:43.771714Z node 6 :INTERCONNECT_SESSION DEBUG: Session [6:26:2048] [node 5] ICS23 confirm count: 1 2025-03-28T09:04:43.771753Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2025-03-28T09:04:43.771791Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2025-03-28T09:04:43.771860Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 1 packets 2025-03-28T09:04:43.771910Z node 5 :INTERCONNECT_SESSION DEBUG: Session [5:24:2048] [node 6] ICS23 confirm count: 2 2025-03-28T09:04:43.772003Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS01 socket: 29 reason# 2025-03-28T09:04:43.772057Z node 5 :INTERCONNECT INFO: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:24:2048] VirtualId# [5:2187173436:0] 2025-03-28T09:04:43.772119Z node 5 :INTERCONNECT DEBUG: Proxy [5:1:2048] [node 6] ICP77 @201 StateWork -> PendingActivation 2025-03-28T09:04:43.772175Z node 5 :INTERCONNECT_SESSION INFO: Session [5:24:2048] [node 6] ICS25 shutdown socket, reason# 2025-03-28T09:04:43.772279Z node 5 :INTERCONNECT_SESSION DEBUG: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges [GOOD] Test command err: 2025-03-28T09:01:42.161453Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:01:42.231255Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:01:42.231825Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:01:42.232099Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:01:42.610434Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:01:43.627352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:01:43.627708Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:01:43.830227Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:01:43.848518Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:01:43.850095Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:01:43.850180Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:01:43.850240Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:01:43.850655Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:01:43.850771Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:01:43.850845Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:01:44.046011Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:01:44.176256Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:01:44.176476Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:01:44.176581Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:01:44.176615Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:01:44.176645Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:01:44.176690Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:01:44.176948Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:44.177006Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:44.177257Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:01:44.177378Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:01:44.177455Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:01:44.177515Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:01:44.177551Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:01:44.177582Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:01:44.177610Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:01:44.177648Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:01:44.177689Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:01:44.177795Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:44.177831Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:44.177877Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:01:44.192316Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:01:44.192395Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:01:44.192485Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:01:44.192630Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:01:44.192670Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:01:44.192712Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:01:44.192756Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:01:44.192832Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:01:44.192865Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:01:44.192897Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:01:44.193197Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:01:44.193253Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:01:44.193285Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:01:44.193314Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:01:44.193375Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:01:44.193418Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:01:44.193448Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:01:44.193485Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:01:44.193515Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:01:44.244755Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:01:44.245052Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:01:44.245396Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:01:44.245915Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:01:44.246269Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:01:44.255867Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:44.256844Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:01:44.257234Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:01:44.263112Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:01:44.263152Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:01:44.265269Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:01:44.265924Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:01:44.266366Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:01:44.277005Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:01:44.335049Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:01:44.335145Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:01:44.335426Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:44.335476Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:01:44.335533Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:01:44.335588Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:01:44.335631Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:01:44.335668Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:01:44.335701Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:01:44.335744Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:01:44.335795Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:01:44.335834Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:01:44.335865Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:01:44.336071Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:01:44.336109Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:01:44.336131Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:01:44.336156Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:01:44.336177Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:01:44.336249Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:01:44.336271Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:01:44.336304Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:01:44.336334Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:01:44.336406Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:01:44.336444Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:01:44.336488Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:01:44.336533Z node 1 :TX_D ... :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437186 for LoadAndWaitInRS 2025-03-28T09:04:37.551451Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit LoadAndWaitInRS 2025-03-28T09:04:37.551480Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is Executed 2025-03-28T09:04:37.551509Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit LoadAndWaitInRS 2025-03-28T09:04:37.551538Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437186 to execution unit ExecuteDataTx 2025-03-28T09:04:37.551567Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit ExecuteDataTx 2025-03-28T09:04:37.553934Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437186 with status COMPLETE 2025-03-28T09:04:37.553999Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437186: {NSelectRow: 5, NSelectRange: 5, NUpdateRow: 6, NEraseRow: 0, SelectRowRows: 4, SelectRowBytes: 32, SelectRangeRows: 52, SelectRangeBytes: 416, UpdateRowBytes: 45, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:04:37.554058Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is ExecutedNoMoreRestarts 2025-03-28T09:04:37.554087Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit ExecuteDataTx 2025-03-28T09:04:37.554120Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437186 to execution unit CompleteOperation 2025-03-28T09:04:37.554152Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit CompleteOperation 2025-03-28T09:04:37.554407Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is DelayComplete 2025-03-28T09:04:37.554441Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit CompleteOperation 2025-03-28T09:04:37.554472Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437186 to execution unit CompletedOperations 2025-03-28T09:04:37.554502Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437186 on unit CompletedOperations 2025-03-28T09:04:37.554538Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437186 is Executed 2025-03-28T09:04:37.554563Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437186 executing on unit CompletedOperations 2025-03-28T09:04:37.554591Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437186 has finished 2025-03-28T09:04:37.554623Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:37.554649Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-03-28T09:04:37.554676Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-03-28T09:04:37.554738Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-03-28T09:04:37.574213Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:04:37.574287Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:402] at 9437186 on unit CompleteOperation 2025-03-28T09:04:37.574354Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 402] from 9437186 at tablet 9437186 send result to client [4:99:2134], exec latency: 4 ms, propose latency: 6 ms 2025-03-28T09:04:37.574431Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2025-03-28T09:04:37.574473Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:04:37.574799Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:04:37.574840Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit StoreAndSendOutRS 2025-03-28T09:04:37.574877Z node 4 :TX_DATASHARD DEBUG: Send RS 400 at 9437186 from 9437186 to 9437184 txId 403 2025-03-28T09:04:37.574930Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:04:37.574963Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:04:37.574991Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:04:37.575017Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437186 on unit CompleteOperation 2025-03-28T09:04:37.575064Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437186 at tablet 9437186 send result to client [4:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:04:37.575119Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-03-28T09:04:37.575150Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:04:37.575837Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:455:2397], Recipient [4:344:2311]: {TEvReadSet step# 1000004 txid# 402 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 399} 2025-03-28T09:04:37.575891Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:04:37.575927Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 402 2025-03-28T09:04:37.576151Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [4:455:2397], Recipient [4:234:2227]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-03-28T09:04:37.576191Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:04:37.576229Z node 4 :TX_DATASHARD DEBUG: Receive RS at 9437184 source 9437186 dest 9437184 producer 9437186 txId 403 2025-03-28T09:04:37.576309Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437184 got read set: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletProducer# 9437186 ReadSet.Size()# 7 Seqno# 400 Flags# 0} 2025-03-28T09:04:37.576358Z node 4 :TX_DATASHARD TRACE: Filled readset for [1000004:403] from=9437186 to=9437184origin=9437186 2025-03-28T09:04:37.576429Z node 4 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:04:37.576509Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:455:2397], Recipient [4:344:2311]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437185 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 400} 2025-03-28T09:04:37.576540Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:04:37.576571Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437186 consumer 9437186 txId 403 2025-03-28T09:04:37.576967Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [4:234:2227], Recipient [4:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:37.577014Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:37.577073Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:04:37.577112Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:04:37.577151Z node 4 :TX_DATASHARD DEBUG: Found ready candidate operation [1000004:403] at 9437184 for LoadAndWaitInRS 2025-03-28T09:04:37.577183Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit LoadAndWaitInRS 2025-03-28T09:04:37.577221Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-03-28T09:04:37.577255Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit LoadAndWaitInRS 2025-03-28T09:04:37.577290Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:04:37.577321Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit ExecuteDataTx 2025-03-28T09:04:37.581300Z node 4 :TX_DATASHARD TRACE: Executed operation [1000004:403] at tablet 9437184 with status COMPLETE 2025-03-28T09:04:37.581393Z node 4 :TX_DATASHARD TRACE: Datashard execution counters for [1000004:403] at 9437184: {NSelectRow: 6, NSelectRange: 6, NUpdateRow: 9, NEraseRow: 0, SelectRowRows: 5, SelectRowBytes: 40, SelectRangeRows: 219, SelectRangeBytes: 1752, UpdateRowBytes: 69, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:04:37.581480Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:04:37.581518Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:04:37.581550Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompleteOperation 2025-03-28T09:04:37.581586Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompleteOperation 2025-03-28T09:04:37.581883Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is DelayComplete 2025-03-28T09:04:37.581923Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompleteOperation 2025-03-28T09:04:37.581953Z node 4 :TX_DATASHARD TRACE: Add [1000004:403] at 9437184 to execution unit CompletedOperations 2025-03-28T09:04:37.581981Z node 4 :TX_DATASHARD TRACE: Trying to execute [1000004:403] at 9437184 on unit CompletedOperations 2025-03-28T09:04:37.582021Z node 4 :TX_DATASHARD TRACE: Execution status for [1000004:403] at 9437184 is Executed 2025-03-28T09:04:37.582052Z node 4 :TX_DATASHARD TRACE: Advance execution plan for [1000004:403] at 9437184 executing on unit CompletedOperations 2025-03-28T09:04:37.582081Z node 4 :TX_DATASHARD TRACE: Execution plan for [1000004:403] at 9437184 has finished 2025-03-28T09:04:37.582113Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:37.582141Z node 4 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:04:37.582172Z node 4 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:04:37.582200Z node 4 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:04:37.597208Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:04:37.597281Z node 4 :TX_DATASHARD TRACE: Complete execution for [1000004:403] at 9437184 on unit CompleteOperation 2025-03-28T09:04:37.597348Z node 4 :TX_DATASHARD DEBUG: Complete [1000004 : 403] from 9437184 at tablet 9437184 send result to client [4:99:2134], exec latency: 4 ms, propose latency: 5 ms 2025-03-28T09:04:37.597419Z node 4 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-03-28T09:04:37.597475Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:04:37.597905Z node 4 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [4:234:2227], Recipient [4:455:2397]: {TEvReadSet step# 1000004 txid# 403 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 400} 2025-03-28T09:04:37.597951Z node 4 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:04:37.597986Z node 4 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 403 >> BasicStatistics::SimpleGlobalIndex >> BasicStatistics::TwoTables |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> BasicStatistics::Simple >> KqpPg::EquiJoin-useSink [GOOD] >> KqpPg::ExplainColumnsReorder >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink >> BasicStatistics::ServerlessGlobalIndex |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AllTypesColumnsCellvec >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AggregateByColumn >> BasicStatistics::Serverless >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates >> KqpPg::InsertFromSelect_NoReorder-useSink [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpPg::DropSequence [GOOD] >> KqpPg::DeleteWithQueryService+useSink |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable >> KqpPg::ExplainColumnsReorder [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs >> KqpPg::TableArrayInsert-useSink [GOOD] >> BackupRestore::PrefixedVectorIndex [GOOD] >> KqpPg::Returning+useSink >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:236:2060] recipient: [1:218:2142] 2025-03-28T09:02:49.934220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:49.934314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:49.934353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:49.934386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:49.934445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:49.934499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:49.934575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:49.934671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:49.935106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:50.017033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:50.017098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:50.031923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:50.032089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:50.032282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:50.039605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:50.040115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:50.040763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:50.041016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:50.043437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:50.044803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:50.044867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:50.045047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:50.045100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:50.045140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:50.045294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.052548Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:235:2153] sender: [1:349:2060] recipient: [1:17:2064] 2025-03-28T09:02:50.188858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:02:50.189122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.189343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:02:50.189561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:02:50.189622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.196015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:50.196173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:02:50.196497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.196584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:02:50.196628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:02:50.196685Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:02:50.199634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.199714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:02:50.199749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:02:50.204053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.204124Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.204163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:50.204211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:02:50.207818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:02:50.210049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:02:50.210283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:02:50.211284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:50.211445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 242 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:02:50.211495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:50.211801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:02:50.211861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:02:50.212072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:02:50.212146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:02:50.214644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:50.214709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:50.214906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:50.214967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:316:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:02:50.215350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:02:50.215394Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:02:50.215485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:50.215514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:50.215553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:02:50.215585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:50.215619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:02:50.215669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:02:50.215706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:02:50.215730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:02:50.215804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:02:50.215835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:02:50.215864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:02:50.217624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:50.217738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:02:50.217785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:50.404645Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:50.404743Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:50.404777Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:50.783093Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:50.783171Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:50.783259Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:50.783294Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:51.174128Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:51.174224Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:51.174334Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:51.174373Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:51.526395Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:51.526476Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:51.526556Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:51.526585Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:51.922824Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:51.922900Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:51.922976Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:51.923009Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:52.283004Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:52.283094Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:52.283188Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:52.283218Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:52.633537Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:52.633635Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:52.633760Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:52.633803Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:53.023104Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:53.023185Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:53.023267Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:53.023298Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:53.393674Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:53.393771Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:53.393865Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:53.393896Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:53.781851Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:53.781931Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:04:53.782042Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [7:235:2153], Recipient [7:235:2153]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:53.782097Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:04:53.831487Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1080:2837], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-03-28T09:04:53.831577Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:04:53.831728Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:04:53.831997Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 254us result status StatusPathDoesNotExist 2025-03-28T09:04:53.832185Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:04:53.832799Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1081:2838], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-03-28T09:04:53.832870Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:04:53.832990Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:04:53.833164Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 178us result status StatusPathDoesNotExist 2025-03-28T09:04:53.833304Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:04:53.833823Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:1082:2839], Recipient [7:235:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2025-03-28T09:04:53.833879Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:04:53.833974Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:04:53.834141Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 169us result status StatusPathDoesNotExist 2025-03-28T09:04:53.834277Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 29360, MsgBus: 22353 2025-03-28T09:03:27.598771Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783441685205907:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:27.598832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001300/r3tmp/tmpxhSn3j/pdisk_1.dat 2025-03-28T09:03:27.945191Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29360, node 1 2025-03-28T09:03:28.036251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:28.036354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:28.038091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:28.055080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:28.055099Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:28.055105Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:28.055187Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22353 TClient is connected to server localhost:22353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:28.663789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:30.831888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783454570108464:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:30.831973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783454570108456:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:30.832099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:30.836562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:30.853011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783454570108470:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:30.944146Z node 1 :TX_PROXY ERROR: Actor# [1:7486783454570108521:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 65243, MsgBus: 2088 2025-03-28T09:03:31.934623Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783457440498749:2207];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001300/r3tmp/tmpnV5whw/pdisk_1.dat 2025-03-28T09:03:32.054567Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:03:32.172888Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:32.191945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:32.192036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:32.199306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65243, node 2 2025-03-28T09:03:32.279345Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:32.279360Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:32.279367Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:32.279445Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2088 TClient is connected to server localhost:2088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:32.787060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:35.293841Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783474620368443:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:35.293922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783474620368433:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:35.294043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:35.297652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:35.311347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783474620368447:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:35.416560Z node 2 :TX_PROXY ERROR: Actor# [2:7486783474620368498:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 4094, MsgBus: 10371 2025-03-28T09:03:36.157401Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783479717402602:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:36.157461Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001300/r3tmp/tmpY0tc7G/pdisk_1.dat 2025-03-28T09:03:36.327956Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:36.355694Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:36.355779Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:36.358140Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4094, node 3 2025-03-28T09:03:36.446477Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:36.446498Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:36.446508Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:36.446630Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10371 TClient is connected to server localhost:10371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } D ... n't have access permissions } 2025-03-28T09:04:33.878857Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:04:33.912062Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486783722983127932:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:04:33.995330Z node 10 :TX_PROXY ERROR: Actor# [10:7486783722983127984:2451] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12744, MsgBus: 19838 2025-03-28T09:04:38.000853Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7486783739767842650:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:38.000948Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001300/r3tmp/tmpeJRSis/pdisk_1.dat 2025-03-28T09:04:38.303327Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:38.306145Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:38.306265Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:38.308928Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12744, node 11 2025-03-28T09:04:38.383389Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:38.383424Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:38.383436Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:38.383592Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19838 TClient is connected to server localhost:19838 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:39.436046Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:39.451846Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:04:43.007051Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7486783739767842650:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:43.007164Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:44.074676Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486783769832614400:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:44.074859Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:44.098382Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:04:44.163189Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:04:44.244325Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486783769832614573:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:44.244446Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:44.244857Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486783769832614578:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:44.249546Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:04:44.269229Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7486783769832614580:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:04:44.354999Z node 11 :TX_PROXY ERROR: Actor# [11:7486783769832614632:2451] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:52.290505Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:311:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:52.290941Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:52.291275Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001300/r3tmp/tmpQUTSyq/pdisk_1.dat 2025-03-28T09:04:52.777160Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:52.825958Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:52.869093Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:52.869669Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:52.883935Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:52.985150Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:644:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:52.985260Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:654:2557], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:52.985369Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:52.991293Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:04:53.133478Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:658:2560], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:04:53.183370Z node 12 :TX_PROXY ERROR: Actor# [12:730:2601] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PreparedQuery: "2381e53e-f1591c84-65b5d6de-1deebcaf" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'218) \'(\'\"_id\" \'\"b7dca400-847e20d4-b096b240-2e0f7771\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn |77.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] >> KqpPg::CreateTableIfNotExists_GenericQuery >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |77.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |77.3%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::PrefixedVectorIndex [GOOD] Test command err: 2025-03-28T09:02:50.735327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783281875383699:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:50.739231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpa31zeT/pdisk_1.dat 2025-03-28T09:02:51.587636Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:51.733296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:51.733486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:51.738795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12547, node 1 2025-03-28T09:02:52.025514Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:52.025536Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:52.025543Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:52.025649Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:52.400546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:55.105508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783303350221278:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:55.105627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:55.320962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/"Create temporary directory "/Root/~backup_20250328T090255" in databaseProcess "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250328T090255/table" }2025-03-28T09:02:55.730260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783281875383699:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:55.730327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Backup table "/Root/~backup_20250328T090255/table" to "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table"Describe table "/Root/~backup_20250328T090255/table"Write scheme into "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table/permissions.pb"Read table "/Root/~backup_20250328T090255/table"Write data into "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table/data_00.csv"Drop table "/Root/~backup_20250328T090255/table"Remove temporary directory "/Root/~backup_20250328T090255" in database2025-03-28T09:02:56.088382Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-28T09:02:56.101305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-28T09:02:56.163257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783307645189169:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:56.163336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/" to "/Root"2025-03-28T09:02:56.279170Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table"Read scheme from "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table" to "/Root/table"2025-03-28T09:02:56.347302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table/data_00.csv"Restore ACL "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpNqBXoB/table/permissions.pb"2025-03-28T09:02:56.530451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-28T09:02:59.181542Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783322332298317:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:59.181617Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001776/r3tmp/tmprh7Djv/pdisk_1.dat 2025-03-28T09:02:59.401627Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:59.456040Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:59.456138Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:59.460678Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15485, node 4 2025-03-28T09:02:59.612087Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:59.612110Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:59.612117Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:59.612271Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:59.935991Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:03.958882Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783339512168603:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:03.959007Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:03.989471Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:03:04.123504Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783343807136132:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:04.123598Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:04.163470Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783322332298317:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:04.163579Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path e ... WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710738:0, at schemeshard: 72057594046644480 2025-03-28T09:04:10.934320Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710747:0, at schemeshard: 72057594046644480 2025-03-28T09:04:12.120258Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710756:0, at schemeshard: 72057594046644480 2025-03-28T09:04:13.282252Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710761:0, at schemeshard: 72057594046644480 2025-03-28T09:04:16.281541Z node 19 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=19&id=NWE4Y2QxZjMtMmFiYWM5OWYtOTNiYjk5ZWYtYTY5OGUzZTU=, ActorId: [19:7486783619426170336:2926], ActorState: ExecuteState, TraceId: 01jqdzx57bd4bhav1axtdv1286, Create QueryResponse for error on request, msg: 2025-03-28T09:04:16.284458Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976710777. Ctx: { TraceId: 01jqdzx57bd4bhav1axtdv1286, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=NWE4Y2QxZjMtMmFiYWM5OWYtOTNiYjk5ZWYtYTY5OGUzZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore failed: [ {
: Info: path: /home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpjg3Vck/externalDataSource } {
: Error: Secret "secret" does not exist or you do not have access permissions } ]Cleanup 2025-03-28T09:04:19.097266Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7486783664661505645:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:19.097380Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpcG7PCW/pdisk_1.dat 2025-03-28T09:04:19.448932Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:19.496938Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:19.497087Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:19.526382Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25324, node 22 2025-03-28T09:04:19.732383Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:19.732414Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:19.732427Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:19.732628Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:20.198965Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:24.098853Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7486783664661505645:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:24.098966Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:25.761923Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7486783690431310519:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:25.762055Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:25.810822Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/"Create temporary directory "/Root/~backup_20250328T090426" in databaseProcess "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250328T090426/table" }Backup table "/Root/~backup_20250328T090426/table" to "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table"Describe table "/Root/~backup_20250328T090426/table"Write scheme into "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table/permissions.pb"Read table "/Root/~backup_20250328T090426/table"Write data into "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table/data_00.csv"Drop table "/Root/~backup_20250328T090426/table"Remove temporary directory "/Root/~backup_20250328T090426" in database2025-03-28T09:04:26.745416Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037895 not found 2025-03-28T09:04:26.745475Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037894 not found 2025-03-28T09:04:26.745508Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037892 not found 2025-03-28T09:04:26.751809Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037893 not found 2025-03-28T09:04:26.779293Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-28T09:04:26.820903Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7486783694726279116:2420], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:26.821046Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/" to "/Root"Resolved db base path: "/Root"2025-03-28T09:04:27.064224Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037888 not found 2025-03-28T09:04:27.064294Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037889 not found 2025-03-28T09:04:27.064314Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037890 not found 2025-03-28T09:04:27.064341Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037891 not found Restore folder "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table"Read scheme from "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table" to "/Root/table"2025-03-28T09:04:27.136386Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-03-28T09:04:27.324236Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:04:27.527631Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:04:27.670937Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-03-28T09:04:28.033757Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710762:0, at schemeshard: 72057594046644480 2025-03-28T09:04:28.055969Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037900 not found 2025-03-28T09:04:34.356112Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:04:34.356147Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:47.217563Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-03-28T09:04:47.330583Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037901 not found 2025-03-28T09:04:47.345970Z node 22 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 22, TabletId: 72075186224037902 not found Restore ACL "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2909/001776/r3tmp/tmpGsZTo0/table/permissions.pb"2025-03-28T09:04:53.220975Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters >> KqpPg::InsertFromSelect_Serial+useSink [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink >> HttpRequest::ProbeServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: 2025-03-28T09:04:31.885709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:31.886214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:31.886284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001658/r3tmp/tmpG9R045/pdisk_1.dat 2025-03-28T09:04:32.314628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:32.361012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:32.407151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:32.407304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:32.418860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:32.513093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:32.569083Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-03-28T09:04:32.569374Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:32.641130Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:32.641352Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:32.643267Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:32.643363Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:32.643440Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:32.643857Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:32.644365Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:32.644458Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:717:2586] in generation 1 2025-03-28T09:04:32.646120Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2588] 2025-03-28T09:04:32.646340Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:32.686361Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:32.692674Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:32.694228Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:04:32.694307Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:04:32.694357Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:04:32.694726Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:32.695253Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2590] 2025-03-28T09:04:32.695451Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:32.703937Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:32.704019Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:735:2588] in generation 1 2025-03-28T09:04:32.705432Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:32.705564Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:32.706960Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-28T09:04:32.707029Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-28T09:04:32.707084Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-28T09:04:32.707375Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:32.707482Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:32.707534Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:740:2590] in generation 1 2025-03-28T09:04:32.719772Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:32.753074Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:04:32.753351Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:32.753543Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:744:2618] 2025-03-28T09:04:32.753586Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:32.753623Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:04:32.753662Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:32.754079Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:32.754120Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:04:32.754183Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:32.754278Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:745:2619] 2025-03-28T09:04:32.754304Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:04:32.754333Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:04:32.754355Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:04:32.754799Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:32.754843Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-28T09:04:32.754894Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:32.754947Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:746:2620] 2025-03-28T09:04:32.754969Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-28T09:04:32.755002Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-28T09:04:32.755043Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:04:32.755235Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:04:32.755339Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:04:32.755999Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:32.756055Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:32.756090Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:04:32.756133Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:32.756186Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:04:32.756261Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:04:32.756360Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:712:2601], sessionId# [0:0:0] 2025-03-28T09:04:32.756413Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:04:32.756460Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:32.756486Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:04:32.756534Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:04:32.756612Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-03-28T09:04:32.756693Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-03-28T09:04:32.756875Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:32.757091Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:04:32.757193Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:04:32.757693Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:713:2602], sessionId# [0:0:0] 2025-03-28T09:04:32.757753Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:04:32.757778Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:32.757816Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-03-28T09:04:32.759053Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:04:32.759374Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:04:32.759610Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:04:32.759679Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:04:32.760050Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:719:2605], sessionId# [0:0:0] 2025-03-28T09:04:32.760186Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-28T09:04:32.760322Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-03-28T09:04:32.760380Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-03-28T09:04:32.762500Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:04:32.762589Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:04:32.762643Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:04:32.775196Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:04:32.775326Z nod ... 80, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-03-28T09:04:55.532399Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 2500 TxId: 281474976715667 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-03-28T09:04:55.532502Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 2500 TxId: 281474976715667 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037891 2025-03-28T09:04:55.532794Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:55.532956Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-28T09:04:55.532993Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:04:55.533030Z node 3 :TX_DATASHARD DEBUG: Found ready operation [2500:281474976715667] in PlanQueue unit at 72075186224037893 2025-03-28T09:04:55.536278Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037893 loaded tx from db 2500:281474976715667 keys extracted: 0 2025-03-28T09:04:55.536461Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:04:55.548270Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037893 step# 2500} 2025-03-28T09:04:55.548401Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-28T09:04:55.561235Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 2500} 2025-03-28T09:04:55.561354Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-28T09:04:55.561421Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037891 from 72075186224037891 to 72075186224037893 txId 281474976715667 2025-03-28T09:04:55.561496Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-28T09:04:55.561568Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715667] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1435:3075], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:04:55.561709Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 14] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 11] SchemaVersion: 1 } 2025-03-28T09:04:55.561785Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-28T09:04:55.562138Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1435:3075] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715667, shard# 72075186224037891, status# 2 2025-03-28T09:04:55.562371Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715667 2025-03-28T09:04:55.562478Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 2500 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2025-03-28T09:04:55.562591Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037893 2025-03-28T09:04:55.563036Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2025-03-28T09:04:55.563407Z node 3 :TX_DATASHARD DEBUG: Send 3 change records: to# [3:1240:2958], at tablet# 72075186224037891 2025-03-28T09:04:55.563477Z node 3 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2025-03-28T09:04:55.563559Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-28T09:04:55.563601Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:04:55.563647Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2500:281474976715667] at 72075186224037893 for LoadAndWaitInRS 2025-03-28T09:04:55.564066Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:55.564642Z node 3 :TX_DATASHARD DEBUG: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2025-03-28T09:04:55.576655Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-28T09:04:55.576764Z node 3 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715667] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1435:3075], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:04:55.576866Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 2500 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2025-03-28T09:04:55.576931Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-28T09:04:55.577087Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1435:3075] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715667, shard# 72075186224037893, status# 2 2025-03-28T09:04:55.577145Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1435:3075] Reply: txId# 281474976715667, status# OK, error# 2025-03-28T09:04:55.577803Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715667 2025-03-28T09:04:55.578305Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-03-28T09:04:55.578385Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037891 2025-03-28T09:04:55.578626Z node 3 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2025-03-28T09:04:55.578665Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2025-03-28T09:04:55.578799Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2025-03-28T09:04:55.578838Z node 3 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2025-03-28T09:04:55.579108Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1430:3071], serverId# [3:1431:3072], sessionId# [0:0:0] 2025-03-28T09:04:55.579196Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-28T09:04:55.579234Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:55.579281Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-03-28T09:04:55.580703Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037893 2025-03-28T09:04:55.581844Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037893 2025-03-28T09:04:55.582148Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-28T09:04:55.582219Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:04:55.582279Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037893 for WaitForStreamClearance 2025-03-28T09:04:55.582564Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:04:55.582647Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-28T09:04:55.583380Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715668, MessageQuota: 1 2025-03-28T09:04:55.583551Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037893, TxId: 281474976715668, MessageQuota: 1 2025-03-28T09:04:55.632169Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037893 2025-03-28T09:04:55.632266Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037893 2025-03-28T09:04:55.632632Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-28T09:04:55.632687Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:04:55.632738Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037893 for ReadTableScan 2025-03-28T09:04:55.632913Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:55.632994Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-28T09:04:55.633056Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-28T09:04:55.634484Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-03-28T09:04:55.634835Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-03-28T09:04:55.635050Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-28T09:04:55.635094Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:04:55.635136Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715669] at 72075186224037892 for WaitForStreamClearance 2025-03-28T09:04:55.635334Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:04:55.635394Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-28T09:04:55.635959Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715669, MessageQuota: 1 2025-03-28T09:04:55.636096Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037892, TxId: 281474976715669, MessageQuota: 1 2025-03-28T09:04:55.637798Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-03-28T09:04:55.637842Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715669, at: 72075186224037892 2025-03-28T09:04:55.638075Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-28T09:04:55.638114Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:04:55.638152Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715669] at 72075186224037892 for ReadTableScan 2025-03-28T09:04:55.638266Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:55.638321Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-28T09:04:55.638366Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> HttpRequest::AnalyzeServerless >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> BasicStatistics::TwoNodes >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] >> KqpPg::DeleteWithQueryService+useSink [GOOD] >> KqpPg::DeleteWithQueryService-useSink >> KqpScan::ScanPg [GOOD] |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] |77.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |77.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |77.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: 2025-03-28T09:04:25.538674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:25.539033Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:25.539207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:04:25.539823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:25.540155Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:25.540367Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a8f/r3tmp/tmpBHNBNE/pdisk_1.dat 2025-03-28T09:04:26.029842Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:26.264345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:26.395704Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:26.403328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:26.403455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:26.404819Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:04:26.405404Z node 2 :TX_PROXY DEBUG: actor# [2:238:2129] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:26.415446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:26.415586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:26.416497Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-03-28T09:04:26.441131Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:26.441709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:26.442143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:26.775498Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Handle TEvProposeTransaction 2025-03-28T09:04:26.775566Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:04:26.775736Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1232:2745] 2025-03-28T09:04:26.968806Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:04:26.968909Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:04:26.969583Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:04:26.969674Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:04:26.970041Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:04:26.970218Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:04:26.970322Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:04:26.971983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:26.972385Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:04:26.974588Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:04:26.974652Z node 1 :TX_PROXY DEBUG: Actor# [1:1232:2745] txid# 281474976715657 SEND to# [1:1141:2687] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:04:27.094009Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1281:2390] 2025-03-28T09:04:27.094254Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:27.187642Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:27.187883Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:27.189683Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:27.189758Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:27.189831Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:27.190205Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:27.190550Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:27.190650Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1305:2390] in generation 1 2025-03-28T09:04:27.217255Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:27.258992Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:04:27.259208Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:27.259321Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1308:2407] 2025-03-28T09:04:27.259361Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:27.259395Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:04:27.259457Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:27.259890Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:04:27.260023Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:04:27.260128Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:27.260182Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:27.260224Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:04:27.260263Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:27.325498Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1264:2773], serverId# [2:1312:2408], sessionId# [0:0:0] 2025-03-28T09:04:27.326009Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:27.326251Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:04:27.326344Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:04:27.328497Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:04:27.351599Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:04:27.351725Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:04:27.671671Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-03-28T09:04:27.672059Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-03-28T09:04:27.672234Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:04:27.738116Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-03-28T09:04:27.738337Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /Root, board: kqpexch+/Root, with size: 2 2025-03-28T09:04:27.738510Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:04:27.738854Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:04:27.762179Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1342:2794], serverId# [2:1344:2418], sessionId# [0:0:0] 2025-03-28T09:04:27.783546Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:04:27.783668Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:27.784028Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:27.784081Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:04:27.784160Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:04:27.784504Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:04:27.784716Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025 ... TxId: 281474976715664, task: 1. Tasks execution finished 2025-03-28T09:04:45.566107Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:1645:2978], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jqdzy7417tnefhjms7xrwksq. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=NDVmYzhlN2UtZjU0OWY0NTYtMzU0M2JiNGMtM2YzMmMzNTk=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:04:45.566207Z node 3 :KQP_COMPUTE DEBUG: TxId: 281474976715664, task: 1. pass away 2025-03-28T09:04:45.566292Z node 3 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715664;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:04:45.566443Z node 3 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715664, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-28T09:04:45.566632Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1642:2939] TxId: 281474976715664. Ctx: { TraceId: 01jqdzy7417tnefhjms7xrwksq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDVmYzhlN2UtZjU0OWY0NTYtMzU0M2JiNGMtM2YzMmMzNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1645:2978], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2013 Tasks { TaskId: 1 CpuTimeUs: 603 FinishTimeMs: 1743152685565 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 62 BuildCpuTimeUs: 541 HostName: "ghrun-mdsjyjltvm" NodeId: 3 CreateTimeMs: 1743152685563 } MaxMemoryUsage: 1048576 } 2025-03-28T09:04:45.567051Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715664. Ctx: { TraceId: 01jqdzy7417tnefhjms7xrwksq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDVmYzhlN2UtZjU0OWY0NTYtMzU0M2JiNGMtM2YzMmMzNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1645:2978] 2025-03-28T09:04:45.567935Z node 3 :KQP_EXECUTER INFO: ActorId: [3:1642:2939] TxId: 281474976715664. Ctx: { TraceId: 01jqdzy7417tnefhjms7xrwksq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDVmYzhlN2UtZjU0OWY0NTYtMzU0M2JiNGMtM2YzMmMzNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 4078 DurationUs: 1743152683610640 ExecuterCpuTimeUs: 2065 StartTimeMs: 1956 FinishTimeMs: 1743152685567 Stages { StageGuid: "e10854e7-3f68ab6-7d8a412e-54812075" Program: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"column0\" (OptionalType (DataType \'Uint64))))))\n(return (lambda \'() (Iterator %kqp%tx_result_binding_0_0)))\n)\n" ComputeActors { CpuTimeUs: 2013 Tasks { TaskId: 1 CpuTimeUs: 603 FinishTimeMs: 1743152685565 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 62 BuildCpuTimeUs: 541 HostName: "ghrun-mdsjyjltvm" NodeId: 3 CreateTimeMs: 1743152685563 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743152685564 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":8,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":7,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"CTE Name\":\"precompute_0_0\",\"Node Type\":\"ConstantExpr\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"precompute_0_0\",\"Name\":\"Iterator\"}],\"PlanNodeId\":6,\"StageGuid\":\"e10854e7-3f68ab6-7d8a412e-54812075\",\"Stats\":{\"BaseTimeMs\":1743152685564,\"ComputeNodes\":[{\"CpuTimeUs\":2013,\"Tasks\":[{\"ComputeTimeUs\":62,\"FinishTimeMs\":1743152685565,\"Host\":\"ghrun-mdsjyjltvm\",\"NodeId\":3,\"OutputBytes\":6,\"OutputRows\":1,\"ResultBytes\":6,\"ResultRows\":1,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 684 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\022\013\010\335\017\020\335\017\030\335\017 \001" } } 2025-03-28T09:04:45.568004Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1642:2939] TxId: 281474976715664. Ctx: { TraceId: 01jqdzy7417tnefhjms7xrwksq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDVmYzhlN2UtZjU0OWY0NTYtMzU0M2JiNGMtM2YzMmMzNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:04:45.568046Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1642:2939] TxId: 281474976715664. Ctx: { TraceId: 01jqdzy7417tnefhjms7xrwksq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDVmYzhlN2UtZjU0OWY0NTYtMzU0M2JiNGMtM2YzMmMzNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-03-28T09:04:45.568084Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1642:2939] TxId: 281474976715664. Ctx: { TraceId: 01jqdzy7417tnefhjms7xrwksq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDVmYzhlN2UtZjU0OWY0NTYtMzU0M2JiNGMtM2YzMmMzNTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002013s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:04:45.568949Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-03-28T09:04:45.569047Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] Handle TEvProposeTransaction 2025-03-28T09:04:45.569081Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] TxId# 0 ProcessProposeTransaction 2025-03-28T09:04:45.569187Z node 3 :TX_PROXY DEBUG: actor# [3:208:2173] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1647:2979] SnapshotReq marker# P0 2025-03-28T09:04:45.570110Z node 3 :TX_PROXY DEBUG: Actor# [3:1649:2979] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-03-28T09:04:45.570330Z node 3 :TX_PROXY DEBUG: Actor# [3:1649:2979] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-03-28T09:04:45.570428Z node 3 :TX_PROXY DEBUG: Actor# [3:1647:2979] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-03-28T09:04:53.726621Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:501:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:53.727000Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:53.727199Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:04:53.729497Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:498:2160], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:53.729845Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:04:53.729957Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a8f/r3tmp/tmpOQ1D8h/pdisk_1.dat 2025-03-28T09:04:54.084540Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:54.260341Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:54.358375Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:54.358500Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:54.366529Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:54.366648Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:54.380811Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2025-03-28T09:04:54.381579Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:54.382010Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:54.681064Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:55.414648Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1402:2834], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:55.414798Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1413:2839], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:55.415342Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:55.421618Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:04:55.929767Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1416:2842], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:04:56.012099Z node 5 :TX_PROXY ERROR: Actor# [5:1545:2912] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:56.672936Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqdzyj3m1qqjx2669eqhjhym, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MWI1YmFmMTktNWJiMGFhODItMjdjZTkwYS02Y2VhZGEyOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:57.395218Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqdzykcebvdz0hqdgrf5mesd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTRmZDgzMTMtMzU3ZjVlOTUtODI1NWE0NTctZjZlZTFmYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:57.939775Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqdzykcebvdz0hqdgrf5mesd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTRmZDgzMTMtMzU3ZjVlOTUtODI1NWE0NTctZjZlZTFmYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:04:57.944572Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: 2025-03-28T09:04:19.113779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:19.114416Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:19.114650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:04:19.115745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:19.116372Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:19.116447Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ab1/r3tmp/tmpjSirsO/pdisk_1.dat 2025-03-28T09:04:20.003842Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:20.306499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:20.455834Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:20.465206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:20.465375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:20.466980Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:04:20.467689Z node 2 :TX_PROXY DEBUG: actor# [2:238:2129] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:20.469511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:20.469619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:20.476128Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976720656 RangeEnd# 281474976725656 txAllocator# 72057594046447617 2025-03-28T09:04:20.490813Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:20.491402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:20.491820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:20.820605Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Handle TEvProposeTransaction 2025-03-28T09:04:20.820694Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:04:20.820885Z node 1 :TX_PROXY DEBUG: actor# [1:208:2173] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1231:2744] 2025-03-28T09:04:20.989615Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:04:20.989761Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:04:20.995353Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:04:20.995481Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:04:20.995934Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:04:20.996162Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:04:20.996275Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:04:20.998564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:21.003080Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:04:21.016283Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:04:21.016393Z node 1 :TX_PROXY DEBUG: Actor# [1:1231:2744] txid# 281474976715657 SEND to# [1:1141:2687] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:04:21.134426Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1311:2804] 2025-03-28T09:04:21.134723Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:21.188244Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1313:2805] 2025-03-28T09:04:21.188503Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:21.209848Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1315:2806] 2025-03-28T09:04:21.210055Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:21.229473Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:21.230210Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:21.233894Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:04:21.233981Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:04:21.234061Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:04:21.234447Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:21.236204Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:21.236320Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:1407:2804] in generation 1 2025-03-28T09:04:21.252157Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:21.253656Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:21.255112Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-03-28T09:04:21.255203Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-03-28T09:04:21.255250Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-03-28T09:04:21.255549Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:21.255776Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:21.255856Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [1:1422:2805] in generation 1 2025-03-28T09:04:21.277010Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:21.277171Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:21.278717Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2025-03-28T09:04:21.278787Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037892 2025-03-28T09:04:21.278829Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037892 2025-03-28T09:04:21.279141Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:21.279253Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:21.279306Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037892 persisting started state actor id [1:1437:2806] in generation 1 2025-03-28T09:04:21.291373Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1414:2399] 2025-03-28T09:04:21.291548Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:21.366283Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [2:1420:2400] 2025-03-28T09:04:21.366497Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:21.379877Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037893 actor [2:1425:2402] 2025-03-28T09:04:21.380121Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:21.391406Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:21.391948Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:21.393229Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:21.393289Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:21.393342Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:21.393695Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:21.394813Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:21.394884Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1495:2399] in generation 1 2025-03-28T09:04:21.396413Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [2:1428:2403] 2025-03-28T09:04:21.396609Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:21.405589Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:21.405827Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:21.407276Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-03-28T09:04:21.407343Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-03-28T09:04:21.407390Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-03-28T09:04:21.407665Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:21.407994Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:21.408039Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [2:1503:2400] in generation 1 2025-03-28T09:04:21.417298Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:21.417937Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:21.419319Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: Q ... vayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:04:58.081498Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-03-28T09:04:58.081526Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-03-28T09:04:58.081569Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-03-28T09:04:58.081604Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-03-28T09:04:58.081655Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-03-28T09:04:58.081715Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-03-28T09:04:58.081952Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1978:2972] TxId: 281474976715667. Ctx: { TraceId: 01jqdzyhjd57nbsmytvayqg55z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1981:3162], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 473 Tasks { TaskId: 1 CpuTimeUs: 188 FinishTimeMs: 1743152698080 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 62 BuildCpuTimeUs: 126 HostName: "ghrun-mdsjyjltvm" NodeId: 5 CreateTimeMs: 1743152698079 } MaxMemoryUsage: 1048576 } 2025-03-28T09:04:58.082069Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1978:2972] TxId: 281474976715667. Ctx: { TraceId: 01jqdzyhjd57nbsmytvayqg55z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [5:1981:3162], ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } } SeqNo: 1 QueryResultIndex: 0 ChannelId: 1 2025-03-28T09:04:58.082397Z node 5 :KQP_EXECUTER DEBUG: TxId: 281474976715667, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 100, to: [5:1982:3162] 2025-03-28T09:04:58.082465Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Received channel data ack for channelId: 1, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2025-03-28T09:04:58.082512Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2025-03-28T09:04:58.082540Z node 5 :KQP_COMPUTE TRACE: TxId: 281474976715667, task: 1. Resume compute actor 2025-03-28T09:04:58.082626Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:04:58.082673Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll inputs 2025-03-28T09:04:58.082727Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Poll sources 2025-03-28T09:04:58.082937Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Resume execution, run status: Finished 2025-03-28T09:04:58.082966Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. ProcessOutputsState.Inflight: 0 2025-03-28T09:04:58.082991Z node 5 :KQP_COMPUTE TRACE: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Do not drain channelId: 1, finished 2025-03-28T09:04:58.083025Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. Tasks execution finished 2025-03-28T09:04:58.083055Z node 5 :KQP_COMPUTE DEBUG: SelfId: [5:1981:3162], TxId: 281474976715667, task: 1. Ctx: { TraceId : 01jqdzyhjd57nbsmytvayqg55z. SessionId : ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:04:58.083135Z node 5 :KQP_COMPUTE DEBUG: TxId: 281474976715667, task: 1. pass away 2025-03-28T09:04:58.083216Z node 5 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715667;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:04:58.083343Z node 5 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715667, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-28T09:04:58.083537Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1978:2972] TxId: 281474976715667. Ctx: { TraceId: 01jqdzyhjd57nbsmytvayqg55z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [5:1981:3162], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2353 Tasks { TaskId: 1 CpuTimeUs: 193 FinishTimeMs: 1743152698082 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 67 BuildCpuTimeUs: 126 HostName: "ghrun-mdsjyjltvm" NodeId: 5 CreateTimeMs: 1743152698079 } MaxMemoryUsage: 1048576 } 2025-03-28T09:04:58.083610Z node 5 :KQP_EXECUTER INFO: TxId: 281474976715667. Ctx: { TraceId: 01jqdzyhjd57nbsmytvayqg55z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [5:1981:3162] 2025-03-28T09:04:58.084459Z node 5 :KQP_EXECUTER INFO: ActorId: [5:1978:2972] TxId: 281474976715667. Ctx: { TraceId: 01jqdzyhjd57nbsmytvayqg55z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 4412 DurationUs: 1743152694549679 ExecuterCpuTimeUs: 2059 StartTimeMs: 3534 FinishTimeMs: 1743152698083 Stages { StageGuid: "6a851673-5666f064-3f41945a-3b713d64" Program: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"column0\" (OptionalType (DataType \'Uint64))))))\n(return (lambda \'() (Iterator %kqp%tx_result_binding_0_0)))\n)\n" ComputeActors { CpuTimeUs: 2353 Tasks { TaskId: 1 CpuTimeUs: 193 FinishTimeMs: 1743152698082 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 67 BuildCpuTimeUs: 126 HostName: "ghrun-mdsjyjltvm" NodeId: 5 CreateTimeMs: 1743152698079 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743152698080 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":8,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":7,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"CTE Name\":\"precompute_0_0\",\"Node Type\":\"ConstantExpr\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"precompute_0_0\",\"Name\":\"Iterator\"}],\"PlanNodeId\":6,\"StageGuid\":\"6a851673-5666f064-3f41945a-3b713d64\",\"Stats\":{\"BaseTimeMs\":1743152698080,\"ComputeNodes\":[{\"CpuTimeUs\":2353,\"Tasks\":[{\"ComputeTimeUs\":67,\"FinishTimeMs\":1743152698082,\"Host\":\"ghrun-mdsjyjltvm\",\"NodeId\":5,\"OutputBytes\":6,\"OutputRows\":1,\"ResultBytes\":6,\"ResultRows\":1,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 685 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\022\013\010\261\022\020\261\022\030\261\022 \001" } } 2025-03-28T09:04:58.084526Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1978:2972] TxId: 281474976715667. Ctx: { TraceId: 01jqdzyhjd57nbsmytvayqg55z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:04:58.084569Z node 5 :KQP_EXECUTER TRACE: ActorId: [5:1978:2972] TxId: 281474976715667. Ctx: { TraceId: 01jqdzyhjd57nbsmytvayqg55z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-03-28T09:04:58.084612Z node 5 :KQP_EXECUTER DEBUG: ActorId: [5:1978:2972] TxId: 281474976715667. Ctx: { TraceId: 01jqdzyhjd57nbsmytvayqg55z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=YTUwY2E3YjgtYTczMTFjNjktNzM0OWM2NTctOTUwMGE5NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.002353s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 938 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] Test command err: 2025-03-28T09:03:20.036044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783412409071326:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:20.038618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cc4/r3tmp/tmpmR5pWU/pdisk_1.dat 2025-03-28T09:03:20.459252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:20.459363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:20.467728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:20.501706Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20715, node 1 2025-03-28T09:03:20.579197Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:20.579214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:20.579220Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:20.579321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:20.894441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:20.913686Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:03:23.009552Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:23.016668Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjQ2Y2JlZWEtZGM1MDg4MDItYzY4YmMxNWYtNWRjOGExMDA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjQ2Y2JlZWEtZGM1MDg4MDItYzY4YmMxNWYtNWRjOGExMDA= 2025-03-28T09:03:23.017139Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783425293973712:2328], Start check tables existence, number paths: 2 2025-03-28T09:03:23.017240Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjQ2Y2JlZWEtZGM1MDg4MDItYzY4YmMxNWYtNWRjOGExMDA=, ActorId: [1:7486783425293973720:2329], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:23.017702Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-28T09:03:23.017727Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:23.017744Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:03:23.034178Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783425293973712:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:03:23.034230Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783425293973712:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:03:23.034252Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783425293973712:2328], Successfully finished 2025-03-28T09:03:23.034302Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:03:23.042814Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425293973737:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:23.049740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:23.052220Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425293973737:2300], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-28T09:03:23.057836Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425293973737:2300], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:03:23.077884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425293973737:2300], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:23.141430Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425293973737:2300], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:23.145578Z node 1 :TX_PROXY ERROR: Actor# [1:7486783425293973790:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:23.145714Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783425293973737:2300], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:03:23.149155Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzE2MGRmNTMtNTAxODcxMGYtODgyYWFiOTQtZDNlOGUyMTY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NzE2MGRmNTMtNTAxODcxMGYtODgyYWFiOTQtZDNlOGUyMTY= 2025-03-28T09:03:23.149469Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzE2MGRmNTMtNTAxODcxMGYtODgyYWFiOTQtZDNlOGUyMTY=, ActorId: [1:7486783425293973797:2331], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:23.149680Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NzE2MGRmNTMtNTAxODcxMGYtODgyYWFiOTQtZDNlOGUyMTY=, ActorId: [1:7486783425293973797:2331], ActorState: ReadyState, TraceId: 01jqdzvr0dbz0nf8qwxv9f08st, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7486783425293973796:2338] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-28T09:03:23.149718Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-28T09:03:23.149734Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-28T09:03:23.149789Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7486783425293973797:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NzE2MGRmNTMtNTAxODcxMGYtODgyYWFiOTQtZDNlOGUyMTY= 2025-03-28T09:03:23.149845Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783425293973799:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:23.149892Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783425293973800:2333], Database: /Root, Start database fetching 2025-03-28T09:03:23.151151Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783425293973799:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:23.151230Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-03-28T09:03:23.151244Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-28T09:03:23.151464Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783425293973810:2334], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-28T09:03:23.151521Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783425293973800:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-28T09:03:23.151568Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-28T09:03:23.151626Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486783425293973811:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NzE2MGRmNTMtNTAxODcxMGYtODgyYWFiOTQtZDNlOGUyMTY=, Start pool fetching 2025-03-28T09:03:23.151651Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783425293973812:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:23.152858Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783425293973812:2336], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:23.152987Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486783425293973811:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NzE2MGRmNTMtNTAxODcxMGYtODgyYWFiOTQtZDNlOGUyMTY=, Pool info successfully resolved 2025-03-28T09:03:23.153018Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783425293973810:2334], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2025-03-28T09:03:23.153040Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzE2MGRmNTMtNTAxODcxMGYtODgyYWFiOTQtZDNlOGUyMTY= 2025-03-28T09:03:23.153195Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783425293973810:2334], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7486783425293973797:2331], session id: ydb://session/3?node_id=1&id=NzE2MGRmNTMtNTAxODcxMGYtODgyYWFiOTQtZDNlOGUyMTY= 2025-03-28T09:03:23.153235Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:74 ... ror: Resource pool my_pool not found or you don't have access permissions } 2025-03-28T09:04:56.498963Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-28T09:04:56.501454Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTY0MmViYzUtODgyNGVlMi04MjMyZjcyZS1iMGE5MDIxYg==, ActorId: [8:7486783787458592235:2332], ActorState: ExecuteState, TraceId: 01jqdzyk4x7vgg8g882a3j0j26, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [8:7486783821818331974:2332] WorkloadServiceCleanup: 0 2025-03-28T09:04:56.503737Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTY0MmViYzUtODgyNGVlMi04MjMyZjcyZS1iMGE5MDIxYg==, ActorId: [8:7486783787458592235:2332], ActorState: CleanupState, TraceId: 01jqdzyk4x7vgg8g882a3j0j26, EndCleanup, isFinal: 0 2025-03-28T09:04:56.503798Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTY0MmViYzUtODgyNGVlMi04MjMyZjcyZS1iMGE5MDIxYg==, ActorId: [8:7486783787458592235:2332], ActorState: CleanupState, TraceId: 01jqdzyk4x7vgg8g882a3j0j26, Sent query response back to proxy, proxyRequestId: 53, proxyId: [8:7486783761688788045:2276] 2025-03-28T09:04:56.509932Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw== 2025-03-28T09:04:56.510124Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:04:56.510298Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: my_pool 2025-03-28T09:04:56.510358Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486783821818331997:2664], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2025-03-28T09:04:56.510454Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ReadyState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, received request, proxyRequestId: 54 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [8:7486783821818331995:3005] database: Root databaseId: /Root pool id: default 2025-03-28T09:04:56.510487Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ReadyState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, request placed into pool from cache: default 2025-03-28T09:04:56.510590Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, Sending CompileQuery request 2025-03-28T09:04:56.510867Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486783821818331997:2664], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-28T09:04:56.510971Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2025-03-28T09:04:56.595076Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, ExecutePhyTx, tx: 0x000050C000157B58 literal: 0 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-28T09:04:56.595152Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, Sending to Executer TraceId: 0 8 2025-03-28T09:04:56.595258Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, Created new KQP executer: [8:7486783821818332002:2663] isRollback: 0 2025-03-28T09:04:56.597240Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, Forwarded TEvStreamData to [8:7486783821818331995:3005] 2025-03-28T09:04:56.599087Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-28T09:04:56.599257Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, txInfo Status: Committed Kind: Pure TotalDuration: 4.297 ServerDuration: 4.208 QueriesCount: 2 2025-03-28T09:04:56.599337Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:04:56.599570Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:56.599604Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, EndCleanup, isFinal: 1 2025-03-28T09:04:56.599661Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: ExecuteState, TraceId: 01jqdzyk5y23z24terpkxyzj7w, Sent query response back to proxy, proxyRequestId: 54, proxyId: [8:7486783761688788045:2276] 2025-03-28T09:04:56.599691Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: unknown state, TraceId: 01jqdzyk5y23z24terpkxyzj7w, Cleanup temp tables: 0 2025-03-28T09:04:56.599995Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NjdhZmY5ZmYtNDViMTBkODEtOGY3OGM5MGItNmE2MWQyMw==, ActorId: [8:7486783821818331996:2663], ActorState: unknown state, TraceId: 01jqdzyk5y23z24terpkxyzj7w, Session actor destroyed 2025-03-28T09:04:56.609888Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTY0MmViYzUtODgyNGVlMi04MjMyZjcyZS1iMGE5MDIxYg==, ActorId: [8:7486783787458592235:2332], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:04:56.609938Z node 8 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=8&id=MTY0MmViYzUtODgyNGVlMi04MjMyZjcyZS1iMGE5MDIxYg==, ActorId: [8:7486783787458592235:2332], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:04:56.609971Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTY0MmViYzUtODgyNGVlMi04MjMyZjcyZS1iMGE5MDIxYg==, ActorId: [8:7486783787458592235:2332], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:04:56.610002Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTY0MmViYzUtODgyNGVlMi04MjMyZjcyZS1iMGE5MDIxYg==, ActorId: [8:7486783787458592235:2332], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:04:56.610094Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=MTY0MmViYzUtODgyNGVlMi04MjMyZjcyZS1iMGE5MDIxYg==, ActorId: [8:7486783787458592235:2332], ActorState: unknown state, Session actor destroyed 2025-03-28T09:04:56.621383Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=Y2MxNjYzODMtNzY3NzY0N2QtNzU2YjVmZTMtYWMxMWMwMjM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Y2MxNjYzODMtNzY3NzY0N2QtNzU2YjVmZTMtYWMxMWMwMjM= 2025-03-28T09:04:56.621810Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=Y2MxNjYzODMtNzY3NzY0N2QtNzU2YjVmZTMtYWMxMWMwMjM=, ActorId: [8:7486783821818332014:2669], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:04:56.624895Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTdjMDU1ZDgtNjZmN2M1YTktMjMxYjczNjAtODhlMWUxMTM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NTdjMDU1ZDgtNjZmN2M1YTktMjMxYjczNjAtODhlMWUxMTM= 2025-03-28T09:04:56.625367Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTdjMDU1ZDgtNjZmN2M1YTktMjMxYjczNjAtODhlMWUxMTM=, ActorId: [8:7486783821818332016:2671], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:04:56.625833Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=Y2MxNjYzODMtNzY3NzY0N2QtNzU2YjVmZTMtYWMxMWMwMjM=, ActorId: [8:7486783821818332014:2669], ActorState: ReadyState, TraceId: 01jqdzyk9h23cxksfa48zj3q0p, received request, proxyRequestId: 57 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [8:7486783821818332015:2670] database: /Root databaseId: /Root pool id: default 2025-03-28T09:04:56.625864Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=Y2MxNjYzODMtNzY3NzY0N2QtNzU2YjVmZTMtYWMxMWMwMjM=, ActorId: [8:7486783821818332014:2669], ActorState: ReadyState, TraceId: 01jqdzyk9h23cxksfa48zj3q0p, request placed into pool from cache: default 2025-03-28T09:04:56.625976Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=Y2MxNjYzODMtNzY3NzY0N2QtNzU2YjVmZTMtYWMxMWMwMjM=, ActorId: [8:7486783821818332014:2669], ActorState: ExecuteState, TraceId: 01jqdzyk9h23cxksfa48zj3q0p, Sending CompileQuery request 2025-03-28T09:04:56.626963Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTdjMDU1ZDgtNjZmN2M1YTktMjMxYjczNjAtODhlMWUxMTM=, ActorId: [8:7486783821818332016:2671], ActorState: ReadyState, TraceId: 01jqdzyk9j2ww14sh5nbayvvrs, received request, proxyRequestId: 58 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/.metadata/initialization/migrations`; rpcActor: [8:7486783821818332018:2672] database: /Root databaseId: /Root pool id: default 2025-03-28T09:04:56.626995Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTdjMDU1ZDgtNjZmN2M1YTktMjMxYjczNjAtODhlMWUxMTM=, ActorId: [8:7486783821818332016:2671], ActorState: ReadyState, TraceId: 01jqdzyk9j2ww14sh5nbayvvrs, request placed into pool from cache: default 2025-03-28T09:04:56.627079Z node 8 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=8&id=NTdjMDU1ZDgtNjZmN2M1YTktMjMxYjczNjAtODhlMWUxMTM=, ActorId: [8:7486783821818332016:2671], ActorState: ExecuteState, TraceId: 01jqdzyk9j2ww14sh5nbayvvrs, Sending CompileQuery request >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AggregateByColumn |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> HttpRequest::Status |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |77.4%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable |77.4%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> YdbSdkSessions::TestSessionPool >> YdbSdkSessions::TestMultipleSessions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 22769, MsgBus: 11391 2025-03-28T09:04:41.767857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783757963793673:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:41.767947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001687/r3tmp/tmpG2pPUL/pdisk_1.dat 2025-03-28T09:04:42.125537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:42.146091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:42.146259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:42.148837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22769, node 1 2025-03-28T09:04:42.237531Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:42.237551Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:42.237558Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:42.237666Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11391 TClient is connected to server localhost:11391 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:42.889286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:42.917839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:43.121531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:43.319707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:43.417356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:45.455844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783775143664622:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:45.455985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:45.824212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:45.865009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:45.918999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:46.003939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:46.043263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:46.128738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:46.190105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783779438632443:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:46.190148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:46.190265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783779438632448:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:46.192935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:46.202858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783779438632450:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:46.263027Z node 1 :TX_PROXY ERROR: Actor# [1:7486783779438632504:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:46.766814Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783757963793673:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:46.766892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:47.417648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 864000000000 2025-03-28T09:04:48.169996Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152688200, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 20044, MsgBus: 29365 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001687/r3tmp/tmp0unJde/pdisk_1.dat 2025-03-28T09:04:49.153613Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:49.251643Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:49.266249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:49.266331Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:49.268519Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20044, node 2 2025-03-28T09:04:49.329864Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:49.329890Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:49.329897Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:49.329998Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29365 TClient is connected to server localhost:29365 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:04:49.841861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:49.848747Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:04:49.853806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:49.932969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:04:50.122911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId ... AD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:52.394382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:52.476690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:52.521462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:52.588331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:52.624755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:52.669624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:52.721222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783805582225055:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:52.721312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:52.721660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783805582225060:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:52.725481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:52.736846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783805582225062:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:04:52.837390Z node 2 :TX_PROXY ERROR: Actor# [2:7486783805582225118:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:54.105674Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152694108, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 25117, MsgBus: 61771 2025-03-28T09:04:55.152198Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783817627114286:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:55.152243Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001687/r3tmp/tmpwgTmAh/pdisk_1.dat 2025-03-28T09:04:55.324580Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:55.324804Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:55.325572Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:55.343328Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25117, node 3 2025-03-28T09:04:55.467331Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:55.467359Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:55.467367Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:55.467491Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61771 TClient is connected to server localhost:61771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:56.004211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:56.021689Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:04:56.038083Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:56.132208Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:56.330390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:56.406758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:58.782793Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783830512017953:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.782885Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.826813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:58.858434Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:58.895311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:58.930626Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:58.962762Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:59.035398Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:59.101169Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783834806985764:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:59.101262Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:59.101362Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783834806985769:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:59.105353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:59.117087Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486783834806985771:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:04:59.218613Z node 3 :TX_PROXY ERROR: Actor# [3:7486783834806985826:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:00.153006Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486783817627114286:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:00.153090Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:00.620706Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152700653, txId: 281474976715671] shutting down 2025-03-28T09:05:00.849204Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152700884, txId: 281474976715673] shutting down >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> KqpPg::Returning+useSink [GOOD] >> KqpPg::Returning-useSink >> KqpPg::InsertFromSelect_Serial-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink >> YdbSdkSessions::MultiThreadSync >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> YdbSdkSessions::TestActiveSessionCountAfterBadSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:03:02.269788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:02.269901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:02.270013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:02.270053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:02.270118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:02.270174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:02.270245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:02.270335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:02.270661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:02.385398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:02.385464Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:02.427381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:02.427749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:02.427939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:02.444934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:02.445235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:02.445974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:02.446234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:02.448679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:02.450151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:02.450237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:02.450438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:02.450490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:02.450541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:02.450627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.472038Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:02.728067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:02.728345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.728565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:02.728875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:02.728946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.733146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:02.733326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:02.733562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.733630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:02.733685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:02.733732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:02.740082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.740155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:02.740221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:02.742385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.742445Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.742505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:02.742552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.746589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:02.756186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:02.756397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:02.757628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:02.757800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:02.757864Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:02.758178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:02.758245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:02.758459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:02.758588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:02.767549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:02.767635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:02.767820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:02.767864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:02.768252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:02.768308Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:02.768421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:02.768461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.768506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:02.768545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.768602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:02.768654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:02.768701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:02.768750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:02.768835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:02.768878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:02.768915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:02.774264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:02.774438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:02.774482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... EvMeasureSelfResponseTime 2025-03-28T09:05:00.297134Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:00.671775Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:00.671862Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:00.671952Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:00.671977Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:01.113337Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:01.113413Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:01.113504Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:01.113536Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:01.187087Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:310:2297]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-28T09:05:01.187176Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-28T09:05:01.187279Z node 3 :TX_DATASHARD TRACE: No cleanup at 72075186233409546 outdated step 5000002 last cleanup 0 2025-03-28T09:05:01.187361Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:05:01.187401Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186233409546 2025-03-28T09:05:01.187461Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186233409546 has no attached operations 2025-03-28T09:05:01.187500Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186233409546 2025-03-28T09:05:01.187680Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:310:2297]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:05:01.188040Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-03-28T09:05:01.189165Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:310:2297], Recipient [3:124:2150]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 142 Memory: 124232 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 263 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 444 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 2025-03-28T09:05:01.189222Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:05:01.189295Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0142 2025-03-28T09:05:01.189433Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:05:01.189481Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-28T09:05:01.191349Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [3:1061:3005], Recipient [3:310:2297]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-03-28T09:05:01.235925Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:05:01.236009Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:05:01.236045Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-03-28T09:05:01.236129Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:05:01.236167Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-28T09:05:01.236268Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-03-28T09:05:01.236337Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-28T09:05:01.236384Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-03-28T09:05:01.236455Z node 3 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2025-03-28T09:05:01.236597Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:05:01.247098Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:05:01.247184Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:05:01.247219Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:05:01.545841Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:01.545929Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:01.546019Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:01.546052Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:01.940649Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:01.940736Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:01.940840Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:01.940880Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:02.359677Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:02.359760Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:02.359855Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:02.359890Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:02.728084Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:02.728166Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:02.728263Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:02.728294Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:03.114566Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:03.114632Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:03.114727Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:03.114776Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:03.146570Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:310:2297]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:05:03.498866Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:03.498956Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:05:03.499057Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:05:03.499092Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateEmptySum |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> HttpRequest::Analyze [GOOD] >> KqpPg::DeleteWithQueryService-useSink [GOOD] >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: 2025-03-28T09:04:51.302904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:51.303203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:51.303267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0010ee/r3tmp/tmpawU1cc/pdisk_1.dat 2025-03-28T09:04:51.758538Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6335, node 1 2025-03-28T09:04:52.017145Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:52.017204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:52.017233Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:52.017702Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:04:52.020005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:52.134217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:52.134348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:52.150806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19338 2025-03-28T09:04:52.761337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:56.364468Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:04:56.411085Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:56.411214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:56.454121Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:56.457899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:56.748681Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.749355Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.749974Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.750143Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.750427Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.750533Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.750615Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.752744Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.752931Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.938006Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:56.938097Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:56.956798Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:57.146394Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:57.220341Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:04:57.220444Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:04:57.252784Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:04:57.252915Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:04:57.253077Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:04:57.253133Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:04:57.253187Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:04:57.253230Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:04:57.253271Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:04:57.253325Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:04:57.253733Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:04:57.288105Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:57.288260Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:57.297577Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:04:57.304764Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:04:57.304983Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:04:57.311978Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:04:57.334020Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:04:57.334081Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:04:57.334161Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:04:57.360592Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:04:57.370103Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:04:57.370233Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:04:57.555360Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:04:57.771823Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:04:57.852310Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:04:58.870228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.870364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.892489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:04:59.294962Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:04:59.295248Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:04:59.295653Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:04:59.295823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:04:59.295948Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:04:59.296079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:04:59.296208Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:04:59.296354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:04:59.296525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:04:59.296658Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:04:59.296793Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:04:59.296926Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:04:59.356628Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:04:59.356734Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=T ... et_id=72075186224037905;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:05:00.351063Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:00.358197Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:00.363899Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:00.371702Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:00.378434Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:00.385153Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:00.390193Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:00.394514Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:00.400213Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:00.405127Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:01.715045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3057:3173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.736107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.740323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-03-28T09:05:02.503097Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:02.503949Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:02.505184Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:02.505779Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:02.506248Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:02.506743Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:02.507210Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:02.509158Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:02.509756Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:02.510240Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:03.543162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3813:3230], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:03.543623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:03.548577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-03-28T09:05:03.621475Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:03.622137Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:03.626504Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:03.627292Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:03.627832Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:03.629295Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:03.630153Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:03.631062Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:03.631583Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:03.634911Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000017s 2025-03-28T09:05:05.725115Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4098:4251] 2025-03-28T09:05:05.730445Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4095:3317] , Record { OperationId: "\000\000\000\000\026\\u7\214\331\036n\357\204\250\177" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } } 2025-03-28T09:05:05.730538Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId=\u7 n 2025-03-28T09:05:05.730585Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId=\u7 n , PathId [OwnerId: 72075186224037897, LocalPathId: 4] Answer: 'Analyze sent. OperationId: 00000005jwemvrsp8ydvqr9a3z' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> YdbSdkSessions::TestSessionPool [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DeleteWithQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 16074, MsgBus: 8717 2025-03-28T09:03:26.191908Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783437687291452:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:26.191968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001301/r3tmp/tmpB6JbEA/pdisk_1.dat 2025-03-28T09:03:26.834202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:26.838552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:26.838643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:26.840551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16074, node 1 2025-03-28T09:03:26.928675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:26.928695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:26.928701Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:26.928851Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8717 TClient is connected to server localhost:8717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:27.525247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:29.526059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783450572193977:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.526180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.566790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:03:29.697973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783450572194113:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.698051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.698255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783450572194118:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.701742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:03:29.712052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783450572194120:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:03:29.813713Z node 1 :TX_PROXY ERROR: Actor# [1:7486783450572194171:2419] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 Trying to start YDB, gRPC: 9095, MsgBus: 4039 2025-03-28T09:03:31.447424Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783456881022788:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:31.447461Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001301/r3tmp/tmp31MfOp/pdisk_1.dat 2025-03-28T09:03:31.566040Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9095, node 2 2025-03-28T09:03:31.598617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:31.598713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:31.600170Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:31.666626Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:31.666651Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:31.666659Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:31.666782Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4039 TClient is connected to server localhost:4039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:32.153500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:32.160307Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:03:34.727646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783469765925321:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:34.727727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:34.745895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:03:34.841187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783469765925455:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:34.841264Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:34.841629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783469765925460:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:34.845635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:03:34.859466Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-03-28T09:03:34.859668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783469765925462:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:03:34.912468Z node 2 :TX_PROXY ERROR: Actor# [2:7486783469765925513:2415] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 Trying to start YDB, gRPC: 20817, MsgBus: 24713 2025-03-28T09:03:36.717760Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783478693545225:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:36.717805Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001301/r3tmp/tmpSOlUPT/pdisk_1.dat 2025-03-28T09:03:36.834604Z node 3 :IMPORT WARN: Table profiles we ... 3:2430] txid# 281474976710664, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } Trying to start YDB, gRPC: 61588, MsgBus: 7462 2025-03-28T09:04:51.861151Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7486783803475041465:2122];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:51.861288Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001301/r3tmp/tmpVFE6bz/pdisk_1.dat 2025-03-28T09:04:52.007848Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:52.046189Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:52.046347Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:52.050732Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61588, node 11 2025-03-28T09:04:52.119396Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:52.119424Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:52.119439Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:52.119625Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7462 TClient is connected to server localhost:7462 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:53.121438Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:56.866820Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7486783803475041465:2122];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:56.866939Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:57.616133Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486783829244845847:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.616269Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.636026Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:04:57.723283Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486783829244845949:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.723440Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.723980Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486783829244845955:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.729218Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:04:57.747138Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7486783829244845957:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:04:57.833346Z node 11 :TX_PROXY ERROR: Actor# [11:7486783829244846008:2401] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29209, MsgBus: 30031 2025-03-28T09:04:59.430849Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7486783837076348200:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:59.430941Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001301/r3tmp/tmpPN8nEX/pdisk_1.dat 2025-03-28T09:04:59.592952Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:59.631847Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:59.632005Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:59.636096Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29209, node 12 2025-03-28T09:04:59.720078Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:59.720117Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:59.720131Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:59.720319Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30031 TClient is connected to server localhost:30031 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:00.800689Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:04.431124Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7486783837076348200:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:04.431247Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:05.100491Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7486783862846152644:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:05.100635Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:05.126190Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:05.248281Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7486783862846152749:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:05.248427Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:05.255185Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7486783862846152755:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:05.269976Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:05:05.296774Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7486783862846152757:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:05:05.367986Z node 12 :TX_PROXY ERROR: Actor# [12:7486783862846152808:2403] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> YdbSdkSessions::TestMultipleSessions [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] >> KqpScanArrowInChanels::AggregateByColumn [GOOD] |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |77.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |77.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSessionPool [GOOD] Test command err: 2025-03-28T09:05:03.170989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783851614715098:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:03.173661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d13/r3tmp/tmpENoMwX/pdisk_1.dat 2025-03-28T09:05:03.642340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:03.642489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:03.648600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:03.651415Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2000, node 1 2025-03-28T09:05:03.809150Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:03.809170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:03.809186Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:03.809311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:04.059303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:06.341726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783864499618015:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:06.341813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783864499618026:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:06.341865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:06.346027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:05:06.382303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783864499618029:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:05:06.440684Z node 1 :TX_PROXY ERROR: Actor# [1:7486783864499618107:2684] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestMultipleSessions [GOOD] Test command err: 2025-03-28T09:05:03.454611Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783853177430621:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:03.454698Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d02/r3tmp/tmp3cNpIN/pdisk_1.dat 2025-03-28T09:05:04.048764Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:04.064768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:04.064882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:04.073548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17890, node 1 2025-03-28T09:05:04.233511Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:04.233531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:04.233539Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:04.233649Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30198 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:04.533819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:07.281611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783870357300837:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:07.281630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783870357300832:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:07.281761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:07.286393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:05:07.289051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783870357300878:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:07.289207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783870357300880:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:07.289341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:07.298581Z node 1 :TX_PROXY ERROR: Actor# [1:7486783870357300886:2636] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:05:07.324295Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783870357300884:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:05:07.324382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783870357300846:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:05:07.405070Z node 1 :TX_PROXY ERROR: Actor# [1:7486783870357300991:2700] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:07.433288Z node 1 :TX_PROXY ERROR: Actor# [1:7486783870357301005:2709] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateByColumn [GOOD] Test command err: Trying to start YDB, gRPC: 62937, MsgBus: 24898 2025-03-28T09:04:40.878272Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783753476792621:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:40.878347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016b5/r3tmp/tmpw9wyhA/pdisk_1.dat 2025-03-28T09:04:41.419004Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:41.442291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:41.442402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:41.450440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62937, node 1 2025-03-28T09:04:41.552741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:41.552775Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:41.552782Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:41.552886Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24898 TClient is connected to server localhost:24898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:42.074603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:42.102374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:42.271704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:42.423134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:42.501522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:44.251705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783770656663382:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:44.251825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:44.608904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:44.650550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:44.731148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:44.811736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:44.851527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:44.913966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:44.975114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783770656663905:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:44.975223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:44.975335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783770656663910:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:44.979874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:44.991355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783770656663912:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:45.090328Z node 1 :TX_PROXY ERROR: Actor# [1:7486783774951631264:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:45.878784Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783753476792621:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:45.932184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:46.495439Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152686527, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 12604, MsgBus: 63312 2025-03-28T09:04:47.655337Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783784692828741:2203];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016b5/r3tmp/tmpNQZowj/pdisk_1.dat 2025-03-28T09:04:47.779114Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:04:47.915896Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:47.944525Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:47.944609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:47.946043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12604, node 2 2025-03-28T09:04:48.026508Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:48.026533Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:48.026541Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:48.026667Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63312 TClient is connected to server localhost:63312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:48.551127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:48.560162Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:04:48.565821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:48.656053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:48.835688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594 ... :04:57.798304Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:57.831823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:57.864758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:57.912643Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:57.944149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:57.977421Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:58.064339Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783831778569938:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.064464Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.064800Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783831778569943:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.068218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:58.078302Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486783831778569945:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:58.145022Z node 3 :TX_PROXY ERROR: Actor# [3:7486783831778570000:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:59.168925Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486783814598698469:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:59.169022Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:00.285143Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152699715, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 20085, MsgBus: 25112 2025-03-28T09:05:01.168957Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783845654619304:2123];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:01.169250Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016b5/r3tmp/tmpmMyMiX/pdisk_1.dat 2025-03-28T09:05:01.323519Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:01.354047Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:01.354143Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:01.356906Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20085, node 4 2025-03-28T09:05:01.411387Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:01.411415Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:01.411425Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:01.411597Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25112 TClient is connected to server localhost:25112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:02.002405Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:02.021125Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:05:02.034875Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:02.121202Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:02.319048Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:02.409074Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:04.786861Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783858539522883:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:04.786988Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:04.843556Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:05:04.901773Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:05:04.983947Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:05:05.036329Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:05:05.078453Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:05:05.154047Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:05:05.248754Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783862834490700:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:05.248866Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:05.249290Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783862834490705:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:05.254638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:05:05.268795Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486783862834490707:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:05:05.353646Z node 4 :TX_PROXY ERROR: Actor# [4:7486783862834490763:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:06.169029Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783845654619304:2123];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:06.169100Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:08.281508Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152707352, txId: 281474976715671] shutting down |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> AssignTxId::Basic >> Cdc::RenameTable [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [GOOD] Test command err: 2025-03-28T09:05:04.114158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783856821476577:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:04.114226Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cfe/r3tmp/tmpOJFseX/pdisk_1.dat 2025-03-28T09:05:04.633543Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:04.657986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:04.658146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:04.664168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20405, node 1 2025-03-28T09:05:04.795417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:04.795444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:04.795451Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:04.795579Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:05.221405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:07.719460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783869706379317:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:07.719581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:08.082677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:08.382995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783874001346815:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:08.383079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:08.383493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783874001346820:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:08.388212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:05:08.421921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783874001346822:2358], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:05:08.484681Z node 1 :TX_PROXY ERROR: Actor# [1:7486783874001346903:2815] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:08.620446Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqdzyyry8p9tgtx6tph7j1fg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWJmOThhZjMtYjQ3OTJmMGYtYWM3ZWJmMTItMmVlMjIxNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:08.692673Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqdzyz2ff86fg57hw17c235g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk3YzcwNjAtNjFiOGI4M2EtNmM5ODEzNmEtZTQwNDQyMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateWithFunction [GOOD] Test command err: Trying to start YDB, gRPC: 16039, MsgBus: 5471 2025-03-28T09:04:41.467885Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783757419561183:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:41.481133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016a0/r3tmp/tmpt55kdE/pdisk_1.dat 2025-03-28T09:04:42.079282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:42.084609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:42.088718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:42.154629Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16039, node 1 2025-03-28T09:04:42.247336Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:42.247373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:42.247380Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:42.247547Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5471 TClient is connected to server localhost:5471 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:42.810437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:42.877564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:43.069757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:43.326993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:43.442382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:45.230837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783774599432002:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:45.230963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:45.589269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:45.621407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:45.652958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:45.685317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:45.720547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:45.793998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:45.850828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783774599432514:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:45.850948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:45.858840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783774599432520:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:45.869899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:45.892376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783774599432522:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:45.970593Z node 1 :TX_PROXY ERROR: Actor# [1:7486783774599432577:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:46.464141Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783757419561183:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:46.464254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:47.110454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:04:48.001939Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152688032, txId: 281474976710675] shutting down 864000000000 Trying to start YDB, gRPC: 30615, MsgBus: 62908 2025-03-28T09:04:48.802441Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783789497993531:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:48.802500Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016a0/r3tmp/tmpOUgS4O/pdisk_1.dat 2025-03-28T09:04:48.932522Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30615, node 2 2025-03-28T09:04:48.974810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:48.974908Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:48.979111Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:49.095329Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:49.095355Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:49.095362Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:49.095488Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62908 TClient is connected to server localhost:62908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:49.592702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:49.603600Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:04:49.614072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:49.725712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 720575940 ... h pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.784307Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:58.819962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:58.850499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:58.924990Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:58.964456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:59.006350Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:59.051630Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783834674676594:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:59.051732Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:59.052590Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783834674676599:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:59.056075Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:59.066776Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486783834674676601:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:04:59.132179Z node 3 :TX_PROXY ERROR: Actor# [3:7486783834674676654:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:00.187176Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486783817494805203:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:00.250150Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:02.868853Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152700947, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 21221, MsgBus: 5709 2025-03-28T09:05:03.770827Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783852856476639:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:03.771843Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016a0/r3tmp/tmpN6x8b3/pdisk_1.dat 2025-03-28T09:05:03.882653Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:03.917473Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:03.917564Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:03.920283Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21221, node 4 2025-03-28T09:05:04.013137Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:04.013162Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:04.013170Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:04.013335Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5709 TClient is connected to server localhost:5709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:04.576111Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:04.590228Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:04.694984Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:04.947243Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:05.048173Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:07.910473Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783870036347619:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:07.910582Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:07.965153Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:05:08.041488Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:05:08.133621Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:05:08.195771Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:05:08.280063Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:05:08.371746Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:05:08.524771Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783874331315448:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:08.524934Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:08.525348Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783874331315454:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:08.530139Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:05:08.557678Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486783874331315456:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:05:08.613927Z node 4 :TX_PROXY ERROR: Actor# [4:7486783874331315510:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:08.771976Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783852856476639:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:08.772053Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:10.839987Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152710390, txId: 281474976715671] shutting down >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties [GOOD] Test command err: Trying to start YDB, gRPC: 8789, MsgBus: 31996 2025-03-28T09:04:28.308062Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783701777094946:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:28.329468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001492/r3tmp/tmpE7MtUQ/pdisk_1.dat 2025-03-28T09:04:28.964032Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:28.973233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:28.975335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:28.977827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8789, node 1 2025-03-28T09:04:29.160853Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:29.160873Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:29.160881Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:29.162150Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31996 TClient is connected to server localhost:31996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:30.325533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:30.405438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:30.586463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:30.783546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:04:30.871885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:04:32.593879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783718956965848:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:32.593991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:33.023771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:33.121404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:33.167891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:33.211842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:33.259260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:33.314844Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783701777094946:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:33.314945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:33.367113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:33.461667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783723251933662:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:33.461773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:33.466856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783723251933668:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:33.471378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:33.496769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783723251933670:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:33.567242Z node 1 :TX_PROXY ERROR: Actor# [1:7486783723251933727:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:34.846576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-03-28T09:04:35.577375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:04:36.289536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:1, at schemeshard: 72057594046644480 2025-03-28T09:04:36.861541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:04:37.412577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:04:37.994940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-03-28T09:04:38.570638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:04:38.614041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:04:41.014312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710716:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13721, MsgBus: 3255 2025-03-28T09:04:42.119318Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783764754177208:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:42.119370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001492/r3tmp/tmp0EUgbj/pdisk_1.dat 2025-03-28T09:04:42.338268Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:42.366958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:42.367041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:42.369873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13721, node 2 2025-03-28T09:04:42.475295Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:42.475316Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:42.475332Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:42.475451Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3255 TClient is connected to server localhost:3255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathI ... ype: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-03-28T09:04:48.535066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-28T09:04:49.063018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2025-03-28T09:04:49.655311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-28T09:04:50.335495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-28T09:04:50.902956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-03-28T09:04:51.465213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:04:51.529277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:04:55.470412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715732:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9747, MsgBus: 25987 2025-03-28T09:04:56.812118Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783821797242405:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:56.812201Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001492/r3tmp/tmpZcwmXj/pdisk_1.dat 2025-03-28T09:04:56.941685Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:56.977333Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:56.977406Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:56.979302Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9747, node 3 2025-03-28T09:04:57.024532Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:57.024555Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:57.024564Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:57.024705Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25987 TClient is connected to server localhost:25987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:57.583568Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:57.598344Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:57.679923Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:57.878037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:57.990865Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:00.664869Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783838977113361:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:00.664997Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:00.729791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.765724Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.803898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.869738Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.908773Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.980882Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:05:01.039872Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783843272081176:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.039975Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.040204Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783843272081181:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.043845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:05:01.055525Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486783843272081183:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:05:01.123078Z node 3 :TX_PROXY ERROR: Actor# [3:7486783843272081236:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:01.812397Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486783821797242405:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:01.812505Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:02.324211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-03-28T09:05:03.039020Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-28T09:05:03.683203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:1, at schemeshard: 72057594046644480 2025-03-28T09:05:04.276028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-28T09:05:04.919310Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-28T09:05:05.546232Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715694:0, at schemeshard: 72057594046644480 2025-03-28T09:05:06.208276Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:05:06.256588Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:05:10.839267Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715733:0, at schemeshard: 72057594046644480 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] >> KqpPg::CheckPgAutoParams+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 17964, MsgBus: 11941 2025-03-28T09:04:38.724599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783744089318339:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:38.724632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016c5/r3tmp/tmp5I8Ioz/pdisk_1.dat 2025-03-28T09:04:39.428315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:39.428404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:39.440337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:39.476891Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17964, node 1 2025-03-28T09:04:39.579363Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:39.579385Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:39.579397Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:39.579523Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11941 TClient is connected to server localhost:11941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:40.323551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:40.346535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:40.514661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:40.755433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:40.862277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:42.816705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783761269189239:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:42.816826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:43.232668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:43.275547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:43.322192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:43.379572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:43.449802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:43.509829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:43.609330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783765564157054:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:43.609452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:43.610729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783765564157059:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:43.615916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:43.632427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783765564157061:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:43.726674Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783744089318339:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:43.726756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:43.740088Z node 1 :TX_PROXY ERROR: Actor# [1:7486783765564157119:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:48.075766Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152685848, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 11256, MsgBus: 28619 2025-03-28T09:04:48.891087Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783787715506870:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:48.891151Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016c5/r3tmp/tmpjCtwj0/pdisk_1.dat 2025-03-28T09:04:49.039785Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:49.078427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:49.078520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:49.081400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11256, node 2 2025-03-28T09:04:49.267279Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:49.267309Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:49.267317Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:49.267429Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28619 TClient is connected to server localhost:28619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:49.828712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:49.836195Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:04:49.841827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:04:49.919500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:04:50.110538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594 ... 28T09:05:01.573132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:05:01.633982Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:05:01.674713Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:05:01.757037Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:05:01.831865Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:05:01.865969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:05:01.949741Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783844243045043:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.949857Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.950238Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783844243045048:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.955128Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:05:01.969391Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486783844243045050:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:05:02.067345Z node 3 :TX_PROXY ERROR: Actor# [3:7486783848538012404:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:02.874800Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486783827063173710:2188];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:02.874879Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:04.257304Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152703775, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 4889, MsgBus: 4480 2025-03-28T09:05:05.344374Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783862080081322:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:05.344474Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016c5/r3tmp/tmpfNe3qt/pdisk_1.dat 2025-03-28T09:05:05.509162Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:05.509279Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:05.510297Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:05.510933Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4889, node 4 2025-03-28T09:05:05.595368Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:05.595393Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:05.595402Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:05.595560Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4480 TClient is connected to server localhost:4480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:06.254953Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:06.270099Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:05:06.281657Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:06.451014Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:06.697185Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:06.847955Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:09.607256Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783879259952280:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.607420Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.656080Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:05:09.692289Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:05:09.737364Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:05:09.782462Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:05:09.831478Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:05:09.887662Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:05:09.969641Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783879259952792:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.969751Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.969752Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783879259952797:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.974443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:05:09.989592Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486783879259952799:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:05:10.063565Z node 4 :TX_PROXY ERROR: Actor# [4:7486783883554920150:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:10.345683Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783862080081322:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:10.345766Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:12.347032Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152711944, txId: 281474976715671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:224:2060] recipient: [1:218:2142] Leader for TabletID 72057594046678944 is [1:234:2152] sender: [1:235:2060] recipient: [1:218:2142] 2025-03-28T09:03:52.621086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:03:52.621179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:52.621226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:03:52.621279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:03:52.621322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:03:52.621355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:03:52.621410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:03:52.621493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:03:52.623105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:03:52.740936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:03:52.741016Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:52.753443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:03:52.753554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:03:52.753776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:03:52.766899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:03:52.767320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:03:52.767997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:52.769886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:52.774024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:52.775479Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:52.775558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:52.775745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:03:52.775798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:52.775837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:03:52.775993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:03:52.790182Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:234:2152] sender: [1:350:2060] recipient: [1:17:2064] 2025-03-28T09:03:53.066271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:53.066533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:53.066906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:53.067174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:53.067293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:53.073040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:53.073248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:53.073501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:53.073615Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:53.073669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:53.073711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:53.079906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:53.080005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:53.080047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:53.087685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:53.087755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:53.087807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:53.087872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:53.096760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:53.106906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:53.107170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:53.108191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:53.108371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 243 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:53.108420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:53.108782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:53.108836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:53.109008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:53.109123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:53.115843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:53.115909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:53.116117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:53.116173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:317:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:03:53.116276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:53.116320Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:53.116435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:53.116471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:53.116515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:53.116545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:53.116591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:53.116632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:53.116669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:53.116697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:53.116779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:53.116816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:53.116853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:53.119198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:53.119330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:53.119371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:05:13.522963Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:05:13.522991Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-03-28T09:05:13.523019Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-28T09:05:13.523096Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-03-28T09:05:13.523144Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:05:13.525581Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:05:13.526584Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:05:13.526624Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:05:13.526972Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:05:13.527007Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:05:13.527290Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-28T09:05:13.527348Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-28T09:05:13.527768Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:679:2505], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:05:13.527825Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:05:13.527866Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:05:13.528018Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:574:2400], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-03-28T09:05:13.528050Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:05:13.528121Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:05:13.528230Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:05:13.528280Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:677:2503] 2025-03-28T09:05:13.528458Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:679:2505], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:05:13.528500Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:05:13.528540Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2025-03-28T09:05:13.528959Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:552:2102], Recipient [7:235:2153] 2025-03-28T09:05:13.529005Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:05:13.531576Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 552 RawX2: 34359740470 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:13.531910Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:05:13.532038Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-28T09:05:13.532233Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:05:13.534354Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:13.534574Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-03-28T09:05:13.534640Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-03-28T09:05:13.535074Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-03-28T09:05:13.535125Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-03-28T09:05:13.535487Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:685:2511], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:05:13.535536Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:05:13.535575Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:05:13.535705Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:574:2400], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2025-03-28T09:05:13.535741Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:05:13.535815Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-03-28T09:05:13.535924Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-28T09:05:13.535965Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:683:2509] 2025-03-28T09:05:13.536155Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:685:2511], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:05:13.536193Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:05:13.536231Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-03-28T09:05:13.536671Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [8:552:2102], Recipient [7:235:2153] 2025-03-28T09:05:13.536729Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:05:13.541158Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 552 RawX2: 34359740470 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:13.541538Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:05:13.541600Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2025-03-28T09:05:13.541800Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:05:13.544072Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:13.544299Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2025-03-28T09:05:13.544361Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-03-28T09:05:13.544817Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-03-28T09:05:13.544863Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-03-28T09:05:13.545240Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:691:2517], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:05:13.545298Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:05:13.545338Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:05:13.545474Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:574:2400], Recipient [7:235:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2025-03-28T09:05:13.545512Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:05:13.545585Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-03-28T09:05:13.545690Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-28T09:05:13.545750Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:689:2515] 2025-03-28T09:05:13.545922Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:691:2517], Recipient [7:235:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:05:13.545955Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:05:13.545992Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 4438, MsgBus: 17674 2025-03-28T09:04:41.417683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783760231653198:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:41.421104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00168e/r3tmp/tmpGr3TpW/pdisk_1.dat 2025-03-28T09:04:42.129665Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:42.132288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:42.132373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:42.137771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4438, node 1 2025-03-28T09:04:42.295347Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:42.295373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:42.295386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:42.295519Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17674 TClient is connected to server localhost:17674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:42.898435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:42.942835Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:04:42.968216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:43.229517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:43.516407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:43.599797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:45.468910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783777411524190:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:45.469350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:45.803115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:04:45.873951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:04:45.950886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:04:46.032282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:04:46.076633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:04:46.123110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:04:46.176613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783781706492009:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:46.176660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:46.176803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783781706492014:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:46.180914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:04:46.196845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783781706492016:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:04:46.285994Z node 1 :TX_PROXY ERROR: Actor# [1:7486783781706492072:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:04:46.416930Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783760231653198:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:46.416987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:04:48.514166Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152688032, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 4368, MsgBus: 24423 2025-03-28T09:04:49.406988Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783794738675880:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:04:49.407100Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00168e/r3tmp/tmpUUyxL4/pdisk_1.dat 2025-03-28T09:04:49.564810Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:49.578151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:49.578231Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:49.579493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4368, node 2 2025-03-28T09:04:49.631277Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:49.631309Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:49.631318Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:49.631454Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24423 TClient is connected to server localhost:24423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:04:50.164183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:50.185785Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:04:50.208799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:50.294863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:04:50.518480Z node 2 :FLAT_TX_SCHEMESHARD WARN ... 28T09:05:00.256822Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.292332Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.325518Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.370446Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.404439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.449730Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.530702Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783839883525845:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:00.530803Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:00.531886Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486783839883525851:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:00.535766Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:05:00.551296Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486783839883525853:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:05:00.625648Z node 3 :TX_PROXY ERROR: Actor# [3:7486783839883525907:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:01.465541Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486783822703654373:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:01.465634Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:05.199505Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152702823, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 6576, MsgBus: 9730 2025-03-28T09:05:06.120356Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783867823617332:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:06.120432Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00168e/r3tmp/tmpoZcA2e/pdisk_1.dat 2025-03-28T09:05:06.273754Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:06.307033Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:06.307137Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:06.309858Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6576, node 4 2025-03-28T09:05:06.467318Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:06.467345Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:06.467354Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:06.467503Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9730 TClient is connected to server localhost:9730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:07.235579Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:07.244024Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:05:07.261442Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:07.388463Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:07.619702Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:07.723304Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:10.294004Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783885003488273:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:10.294116Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:10.363797Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:05:10.410635Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:05:10.446454Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:05:10.488021Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:05:10.527519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:05:10.575777Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:05:10.634342Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783885003488785:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:10.634459Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:10.634937Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783885003488790:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:10.639034Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:05:10.650112Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486783885003488792:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:05:10.717851Z node 4 :TX_PROXY ERROR: Actor# [4:7486783885003488845:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:11.125176Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783867823617332:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:11.125270Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:13.005679Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152712630, txId: 281474976710671] shutting down >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] >> AssignTxId::Basic [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] |77.5%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2025-03-28T09:05:11.005793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783882519647021:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:11.005862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f9c/r3tmp/tmp5cNWzH/pdisk_1.dat 2025-03-28T09:05:11.405890Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:11.445040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:11.445147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:11.447070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16030 TServer::EnableGrpc on GrpcPort 18442, node 1 2025-03-28T09:05:11.672358Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:11.672380Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:11.672387Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:11.672495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:12.131553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:14.655068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783899699516724:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.655177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:15.016376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:15.028431Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] OnActivateExecutor 2025-03-28T09:05:15.028597Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Execute 2025-03-28T09:05:15.035521Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInitSchema] Complete 2025-03-28T09:05:15.035662Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Execute 2025-03-28T09:05:15.035891Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxInit] Complete 2025-03-28T09:05:15.035916Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] SwitchToWork 2025-03-28T09:05:15.060624Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:18442" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } 2025-03-28T09:05:15.060929Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 2 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:18442" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } 2025-03-28T09:05:15.061027Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:05:15.061683Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxCreateReplication] Complete 2025-03-28T09:05:15.064104Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root/replication TableId: [72057594046644480:2:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindReplication DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:05:15.065225Z node 1 :REPLICATION_CONTROLLER TRACE: [TenantResolver][rid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:05:15.065348Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvResolveTenantResult { ReplicationId: 1 Tenant: /Root Sucess: 1 } 2025-03-28T09:05:15.065360Z node 1 :REPLICATION_CONTROLLER NOTICE: [controller 72075186224037888] Tenant resolved: rid# 1, tenant# /Root 2025-03-28T09:05:15.065396Z node 1 :REPLICATION_CONTROLLER INFO: [controller 72075186224037888] Discover tenant nodes: tenant# /Root 2025-03-28T09:05:15.066022Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2025-03-28T09:05:15.066091Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888] Create session: nodeId# 1 TClient::Ls request: /Root/replication TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "replication" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeReplication CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152715108 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ReplicationVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsIns... (TRUNCATED) 2025-03-28T09:05:15.081949Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2025-03-28T09:05:15.082030Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2025-03-28T09:05:15.082079Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2025-03-28T09:05:15.082174Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:05:15.082219Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2025-03-28T09:05:15.082776Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-03-28T09:05:15.083126Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2025-03-28T09:05:15.083159Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-03-28T09:05:15.083194Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-03-28T09:05:15.083433Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2025-03-28T09:05:15.083455Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-03-28T09:05:15.083476Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2025-03-28T09:05:15.083820Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2025-03-28T09:05:15.083869Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2025-03-28T09:05:15.084375Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-03-28T09:05:15.084705Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2025-03-28T09:05:15.084760Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2025-03-28T09:05:15.084801Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2025-03-28T09:05:15.085132Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2025-03-28T09:05:15.085192Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2025-03-28T09:05:15.085507Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2025-03-28T09:05:15.085597Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:05:15.085622Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2025-03-28T09:05:15.085673Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-03-28T09:05:15.085871Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2025-03-28T09:05:15.085899Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2025-03-28T09:05:15.086161Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2025-03-28T09:05:15.136039Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-03-28T09:05:15.136091Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found } 2025-03-28T09:05:15.136240Z node 1 :REPLICATION_CONTROLLER TRACE: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-03-28T09:05:15.136354Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2025-03-28T09:05:15.136394Z node 1 :REPLICATION_CONTROLLER ERROR: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2025-03-28T09:05:15.140549Z node 1 :REPLICATION_CONTROLLER DEBUG: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete >> KqpPg::Returning-useSink [GOOD] >> KqpPg::SelectIndex+useSink >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 >> YdbIndexTable::MultiShardTableTwoIndexes >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] Test command err: 2025-03-28T09:03:17.560668Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783397287090335:2146];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:17.560788Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ce0/r3tmp/tmpttavKO/pdisk_1.dat 2025-03-28T09:03:18.014574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:18.065397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:18.065559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:18.068219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4938, node 1 2025-03-28T09:03:18.123179Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:18.123199Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:18.123206Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:18.123321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19292 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:18.417731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:18.433746Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:03:20.747171Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:20.747282Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783410171992773:2329], Start check tables existence, number paths: 2 2025-03-28T09:03:20.750548Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTg1ZjBiM2YtMjZhYTljMTgtMTg1ODk5MGMtOWE4NzRmOGU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MTg1ZjBiM2YtMjZhYTljMTgtMTg1ODk5MGMtOWE4NzRmOGU= 2025-03-28T09:03:20.755040Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-28T09:03:20.755085Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:20.755105Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:03:20.755183Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783410171992773:2329], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:03:20.755243Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783410171992773:2329], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:03:20.755279Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783410171992773:2329], Successfully finished 2025-03-28T09:03:20.755397Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTg1ZjBiM2YtMjZhYTljMTgtMTg1ODk5MGMtOWE4NzRmOGU=, ActorId: [1:7486783410171992789:2330], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:20.755661Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:03:20.763828Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410171992791:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:20.768358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:20.771188Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410171992791:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-28T09:03:20.774551Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410171992791:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:03:20.793029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410171992791:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:20.882824Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410171992791:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:20.890652Z node 1 :TX_PROXY ERROR: Actor# [1:7486783410171992843:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:20.890773Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410171992791:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:03:20.894238Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-03-28T09:03:20.894272Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-03-28T09:03:20.894358Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783410171992850:2331], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:20.896137Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783410171992850:2331], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:20.896239Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-03-28T09:03:20.896290Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-28T09:03:20.896666Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783410171992859:2332], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-28T09:03:20.898326Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783410171992859:2332], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-03-28T09:03:20.906348Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-28T09:03:20.906377Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-28T09:03:20.906442Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783410171992871:2334], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-28T09:03:20.906446Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTg1ZjBiM2YtMjZhYTljMTgtMTg1ODk5MGMtOWE4NzRmOGU=, ActorId: [1:7486783410171992789:2330], ActorState: ReadyState, TraceId: 01jqdzvnt95sw2rtm7aqfscnk3, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-28T09:03:20.908304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783410171992871:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:20.908405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:21.170789Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783410171992859:2332], DatabaseId: Root, PoolId: sample_pool_id, Got delete notification 2025-03-28T09:03:21.175655Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MTg1ZjBiM2YtMjZhYTljMTgtMTg1ODk5MGMtOWE4NzRmOGU=, ActorId: [1:7486783410171992789:2330], ActorState: ExecuteState, TraceId: 01jqdzvnt95sw2rtm7aqfscnk3, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7486783410171992880:2330] WorkloadServiceCleanup: 0 2025-03-28T09:03:21.177077Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTg1ZjBiM2YtMjZhYTljMTgtMTg1ODk5MGMtOWE4NzRmOGU=, ActorId: [1:7486783410171992789:2330], ActorState: CleanupState, TraceId: 01jqdzvnt95sw2rtm7aqfscnk3, EndCleanup, isFinal: 0 2025-03-28T09:03:21.177143Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTg1ZjBiM2YtMjZhYTljMTgtMTg1ODk5MGMtOWE4NzRmOGU=, ActorId: [1:7486783410171992789:2330], ActorState: CleanupState, TraceId: 01jqdzvnt95sw2rtm7aqfscnk3, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7486783397287090489:2277] 2025-03-28T09:03:21.185322Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MTg1ZjBiM2YtMjZhYTljMTgtMTg1ODk5MGMtOWE4NzRmOGU=, ActorId: [1:7486783410171992789:2330], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:03:21.185403Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=MTg1ZjBiM2YtMjZhYTljMTgtMTg1ODk5MGMtOWE4NzRmOGU=, ActorId: [1:7486783410171992789:2330], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:03:21.185532Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MTg1ZjBiM2YtMjZhYTljMTgtMTg1ODk5MGMtOWE4NzRmOGU=, ActorId: [1:7486783410171992789:2330], ActorState: ReadyState, EndCleanup, isFinal: 1 2025- ... e_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4v75qjpr1rq8kgdwaf6, Sending to Executer TraceId: 0 8 2025-03-28T09:05:14.610272Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4v75qjpr1rq8kgdwaf6, Created new KQP executer: [10:7486783900368415821:2546] isRollback: 0 2025-03-28T09:05:14.617122Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4v75qjpr1rq8kgdwaf6, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-28T09:05:14.617349Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4v75qjpr1rq8kgdwaf6, txInfo Status: Committed Kind: ReadWrite TotalDuration: 17.523 ServerDuration: 17.329 QueriesCount: 2 2025-03-28T09:05:14.617469Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4v75qjpr1rq8kgdwaf6, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:05:14.617543Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4v75qjpr1rq8kgdwaf6, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:05:14.617575Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4v75qjpr1rq8kgdwaf6, EndCleanup, isFinal: 0 2025-03-28T09:05:14.617631Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4v75qjpr1rq8kgdwaf6, Sent query response back to proxy, proxyRequestId: 28, proxyId: [10:7486783797289199161:2094] 2025-03-28T09:05:14.618144Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, TxId: 2025-03-28T09:05:14.618274Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2025-03-28T09:05:14.618705Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ReadyState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, received request, proxyRequestId: 29 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpcActor: [10:7486783900368415828:2553] database: /Root databaseId: /Root pool id: default 2025-03-28T09:05:14.618741Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ReadyState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, request placed into pool from cache: default 2025-03-28T09:05:14.619432Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, ExecutePhyTx, tx: 0x000050C000230458 literal: 0 commit: 0 txCtx.DeferredEffects.size(): 0 2025-03-28T09:05:14.619507Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, Sending to Executer TraceId: 0 8 2025-03-28T09:05:14.619589Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, Created new KQP executer: [10:7486783900368415831:2546] isRollback: 0 2025-03-28T09:05:14.629109Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, TEvTxResponse, CurrentTx: 1/2 response.status: SUCCESS 2025-03-28T09:05:14.629225Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, ExecutePhyTx, tx: 0x000050C0002302D8 literal: 1 commit: 1 txCtx.DeferredEffects.size(): 0 2025-03-28T09:05:14.630234Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-28T09:05:14.630420Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, txInfo Status: Committed Kind: ReadOnly TotalDuration: 11.096 ServerDuration: 10.971 QueriesCount: 2 2025-03-28T09:05:14.630561Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:05:14.630643Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:05:14.630713Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, EndCleanup, isFinal: 0 2025-03-28T09:05:14.630785Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ExecuteState, TraceId: 01jqdzz4vtam6yhpx28bh0gctq, Sent query response back to proxy, proxyRequestId: 29, proxyId: [10:7486783797289199161:2094] 2025-03-28T09:05:14.631177Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, TxId: 2025-03-28T09:05:14.631342Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: [TQueryBase] [TRefreshPoolStateQuery] TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, TxId: 2025-03-28T09:05:14.631782Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:05:14.631825Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:05:14.631852Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:05:14.631874Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:05:14.631929Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=ODU5Nzc5ZmYtZDUwNjQxOTQtNjU2YTc4NTEtYmY0YzIwZjA=, ActorId: [10:7486783900368415797:2546], ActorState: unknown state, Session actor destroyed 2025-03-28T09:05:14.645700Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=Mjg4YmI1MmQtZDYwMzI5MS02MzMxN2Y2Mi02MWM2YTEyYw==, ActorId: [10:7486783818764036244:2332], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:05:14.645759Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=Mjg4YmI1MmQtZDYwMzI5MS02MzMxN2Y2Mi02MWM2YTEyYw==, ActorId: [10:7486783818764036244:2332], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:05:14.645790Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Mjg4YmI1MmQtZDYwMzI5MS02MzMxN2Y2Mi02MWM2YTEyYw==, ActorId: [10:7486783818764036244:2332], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:05:14.645824Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Mjg4YmI1MmQtZDYwMzI5MS02MzMxN2Y2Mi02MWM2YTEyYw==, ActorId: [10:7486783818764036244:2332], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:05:14.645917Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=Mjg4YmI1MmQtZDYwMzI5MS02MzMxN2Y2Mi02MWM2YTEyYw==, ActorId: [10:7486783818764036244:2332], ActorState: unknown state, Session actor destroyed >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 |77.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] Test command err: 2025-03-28T09:05:05.561302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783861253650982:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:05.561359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ce7/r3tmp/tmpqgar2n/pdisk_1.dat 2025-03-28T09:05:06.099903Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:06.129841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:06.129947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:06.140380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13931, node 1 2025-03-28T09:05:06.236067Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:06.236097Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:06.236110Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:06.236238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:06.722437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:09.072851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783878433521032:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.072850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783878433521040:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.073002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.074063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783878433521051:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.074189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.074720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783878433521054:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.077512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:05:09.084302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783878433521097:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.084374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783878433521099:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.084466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.084851Z node 1 :TX_PROXY ERROR: Actor# [1:7486783878433521078:2642] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:05:09.099121Z node 1 :TX_PROXY ERROR: Actor# [1:7486783878433521105:2658] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:05:09.123156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783878433521102:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:05:09.123224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783878433521077:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:05:09.123245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783878433521046:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:05:09.188592Z node 1 :TX_PROXY ERROR: Actor# [1:7486783878433521205:2720] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:09.192345Z node 1 :TX_PROXY ERROR: Actor# [1:7486783878433521206:2721] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:09.230261Z node 1 :TX_PROXY ERROR: Actor# [1:7486783878433521231:2737] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:11.089860Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783885772654727:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:11.089975Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ce7/r3tmp/tmp1iYTkz/pdisk_1.dat 2025-03-28T09:05:11.315893Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:11.343799Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:11.343880Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:11.347932Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20950, node 4 2025-03-28T09:05:11.429860Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:11.429878Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:11.429883Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:11.429985Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7547 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:05:11.731924Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:14.346866Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783898657557639:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.346975Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.373908Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:16.089552Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783885772654727:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:16.089609Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> TSchemeShardExtSubDomainTest::CreateAndWait >> HttpRequest::AnalyzeServerless [GOOD] |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |77.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> HttpRequest::Status [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive |77.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |77.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: 2025-03-28T09:05:01.178340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:05:01.178668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:05:01.178765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001061/r3tmp/tmpQ1MFlI/pdisk_1.dat 2025-03-28T09:05:01.579202Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15435, node 1 2025-03-28T09:05:01.881427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:01.881487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:01.881519Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:01.881963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:05:01.887700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:05:01.991405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:01.991545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:02.020750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7726 2025-03-28T09:05:02.635852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:06.171388Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:05:06.207917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:06.208057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:06.259191Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:05:06.263656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:06.588444Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.589082Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.589679Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.589831Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.590113Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.590212Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.590294Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.590376Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.590457Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.789614Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:06.789729Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:06.805133Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:07.140426Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:07.222561Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:05:07.222678Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:05:07.265410Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:05:07.265593Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:05:07.265831Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:05:07.265912Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:05:07.265995Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:05:07.266056Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:05:07.266130Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:05:07.266189Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:05:07.266809Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:05:07.305815Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:05:07.305957Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:05:07.314098Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:05:07.324633Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:05:07.324869Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:05:07.378352Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:05:07.401133Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:05:07.401197Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:05:07.401284Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:05:07.416720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:05:07.426771Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:05:07.426928Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:05:07.617055Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:05:07.837906Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:05:07.966034Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:05:08.765241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:05:09.576725Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:09.810353Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:05:09.810423Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:05:09.810536Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2594:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:05:09.812910Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2596:2950] 2025-03-28T09:05:09.813968Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2596:2950], schemeshard id = 72075186224037899 2025-03-28T09:05:11.165711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2723:3245], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:11.165896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:11.192084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-28T09:05:11.588220Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2875:3083];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:05:11.588461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2875:3083];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:05:11.588777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2875:3083];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:05:11.588919Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2875:3083];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:05:11.589041Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2875:3083];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:05:11.589167Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2875:3083];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:05:11.589316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2875:3083];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:05:11.589452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2875:3083];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:05:11.589585Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2875:3083];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09: ... let_id=72075186224037914;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:05:12.613321Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:12.619383Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:12.626083Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:12.631434Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:12.636129Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:12.640805Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:12.648697Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:12.655870Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:12.664872Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:12.670377Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:13.989825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3537:3336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:13.989944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.013135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715662:0, at schemeshard: 72075186224037899 2025-03-28T09:05:14.862905Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:05:14.863465Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:05:14.864334Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:05:14.865135Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:05:14.865602Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:05:14.866019Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:05:14.866431Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:05:14.867928Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:05:14.868371Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:05:14.868805Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:05:16.192727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4324:3408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:16.192910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:16.209253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-03-28T09:05:16.269098Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:05:16.269739Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:05:16.270241Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:05:16.271098Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:05:16.271698Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:05:16.272219Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:05:16.272863Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:05:16.273637Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:05:16.274065Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:05:16.274521Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; waiting actualization: 0/0.000018s 2025-03-28T09:05:18.250571Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4611:4461] 2025-03-28T09:05:18.253714Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4607:3494] , Record { OperationId: "\000\000\000\000\034; R\022\336\377\007\007\006X\027" Tables { PathId { OwnerId: 72057594046644480 LocalId: 2 } } } 2025-03-28T09:05:18.253788Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId= ; RX 2025-03-28T09:05:18.253826Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId= ; RX , PathId [OwnerId: 72057594046644480, LocalPathId: 2] Answer: 'Analyze sent. OperationId: 000000071v41915qqz0w3gcp0q' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: 2025-03-28T09:05:04.537524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:05:04.537887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:05:04.537959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001022/r3tmp/tmpLRxYwO/pdisk_1.dat 2025-03-28T09:05:05.008846Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28717, node 1 2025-03-28T09:05:05.335533Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:05.335588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:05.335620Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:05.335997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:05:05.337789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:05:05.444650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:05.444791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:05.468086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4702 2025-03-28T09:05:06.150197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:10.110725Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:05:10.174763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:10.174956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:10.235050Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:05:10.237521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:10.484746Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:10.485481Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:10.486219Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:10.486421Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:10.486786Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:10.486917Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:10.487019Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:10.487141Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:10.487272Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:10.668301Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:10.668432Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:10.692430Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:10.872988Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:10.937484Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:05:10.937606Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:05:10.982933Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:05:10.985352Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:05:10.985595Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:05:10.985654Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:05:10.985713Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:05:10.985775Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:05:10.985839Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:05:10.985899Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:05:10.986543Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:05:11.056326Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:05:11.057346Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:05:11.071331Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:05:11.071403Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:05:11.071506Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:05:11.076595Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:05:11.076745Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:05:11.092975Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:05:11.093600Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:05:11.134077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:05:11.150363Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:05:11.150521Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:05:11.327135Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:05:11.601641Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:05:11.691344Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:05:12.676525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:12.676650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:12.696914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:05:13.058579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:05:13.058854Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:05:13.059176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:05:13.059326Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:05:13.059457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:05:13.059578Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:05:13.059698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:05:13.059844Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:05:13.059998Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:05:13.060145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:05:13.060331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:05:13.060451Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2385:2883];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:05:13.120667Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2393:2887];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:05:13.120780Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2393:2887];tablet_id=72075186224037900;process=T ... storeV1Chunks_V2;id=15; 2025-03-28T09:05:13.542294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:05:13.542400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:05:13.542439Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037907;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:05:14.065223Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:14.071899Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:14.078398Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:14.085730Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:14.090485Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:14.096498Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:14.100879Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:14.107867Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:14.115994Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:14.123272Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:05:15.371129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3183:3175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:15.371285Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:15.406117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-03-28T09:05:16.033517Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:16.034485Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:16.035458Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:16.035930Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:16.036387Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:16.036846Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:16.037304Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:16.038249Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:16.038679Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:16.040278Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:05:16.858320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3808:3232], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:16.858958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:16.862294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-03-28T09:05:16.913838Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:16.914423Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:16.916160Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:16.916771Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:16.917285Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:16.917640Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:16.917963Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:16.919463Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:16.919939Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:05:16.920511Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000018s 2025-03-28T09:05:18.909814Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:4095:4247] 2025-03-28T09:05:18.912781Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |77.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |77.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |77.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |77.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |77.6%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |77.6%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] Test command err: 2025-03-28T09:05:05.025339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783861713354165:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:05.026044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cf4/r3tmp/tmphztP85/pdisk_1.dat 2025-03-28T09:05:05.522386Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:05.545800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:05.545933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:05.553556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19290, node 1 2025-03-28T09:05:05.742002Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:05.742022Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:05.742030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:05.742121Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10348 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:06.048079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:10.901871Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486783885565253738:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:10.901943Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cf4/r3tmp/tmpYp9e3c/pdisk_1.dat 2025-03-28T09:05:11.122761Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:11.166337Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:11.166432Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:11.172617Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18780, node 4 2025-03-28T09:05:11.275386Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:11.275408Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:11.275416Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:11.275553Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:11.515520Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:14.542006Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124257:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542006Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124110:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542109Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124262:2557], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542122Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124259:2554], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542156Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124253:2548], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542173Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124258:2553], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542194Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124270:2564], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542239Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124276:2570], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542264Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124271:2565], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542282Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124260:2555], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542304Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124254:2549], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542316Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124261:2556], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542335Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124272:2566], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542351Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124281:2575], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542385Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124263:2558], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542388Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124255:2550], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.542644Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124256:2551], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.543011Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124264:2559], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.543064Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124273:2567], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.543106Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486783902745124265:2560], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:14.543112Z node 4 ... ROR: Actor# [4:7486783902745124973:3196] txid# 281474976715717, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.709511Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745124971:3194] txid# 281474976715716, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.711019Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745124982:3205] txid# 281474976715718, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.721252Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745124985:3207] txid# 281474976715720, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.721678Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745124984:3206] txid# 281474976715719, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.724655Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745124998:3215] txid# 281474976715721, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.725094Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125000:3217] txid# 281474976715723, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.727188Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125044:3253] txid# 281474976715725, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.727434Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125045:3254] txid# 281474976715726, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.727654Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745124999:3216] txid# 281474976715722, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.729285Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125024:3239] txid# 281474976715724, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.729895Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125046:3255] txid# 281474976715727, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.736574Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125079:3278] txid# 281474976715734, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.738544Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125061:3265] txid# 281474976715728, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.738706Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125064:3268] txid# 281474976715731, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.739859Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125078:3277] txid# 281474976715733, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.739964Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125062:3266] txid# 281474976715729, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.741768Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125063:3267] txid# 281474976715730, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.742713Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125100:3295] txid# 281474976715736, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.743118Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125077:3276] txid# 281474976715732, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.745396Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125123:3317] txid# 281474976715739, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.745640Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125101:3296] txid# 281474976715737, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.745720Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125122:3316] txid# 281474976715738, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.745787Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125099:3294] txid# 281474976715735, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.750081Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125124:3318] txid# 281474976715740, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.750280Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125148:3336] txid# 281474976715742, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.750372Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125150:3338] txid# 281474976715744, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.750543Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125149:3337] txid# 281474976715743, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.750712Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125147:3335] txid# 281474976715741, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.750835Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125159:3345] txid# 281474976715745, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.750978Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125181:3363] txid# 281474976715747, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:14.751391Z node 4 :TX_PROXY ERROR: Actor# [4:7486783902745125180:3362] txid# 281474976715746, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:15.902063Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486783885565253738:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:15.902157Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> KqpPg::SelectIndex+useSink [GOOD] >> KqpPg::SelectIndex-useSink >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |77.7%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::Create >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:19.755797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:19.755898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:19.755941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:19.755974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:19.756013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:19.756044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:19.756103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:19.756186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:19.756529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:19.878353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:19.878413Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:19.903669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:19.904061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:19.904212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:19.938551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:19.938795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:19.939483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:19.939669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:19.941569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:19.942849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:19.942912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:19.943093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:19.943160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:19.943215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:19.943307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:19.949979Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:20.119054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:20.119308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.119527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:20.119876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:20.119948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.125161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:20.125323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:20.125538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.125598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:20.125636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:20.125684Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:20.128272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.128366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:20.128431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:20.134962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.135035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.135088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.135140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.139411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:20.145308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:20.145584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:20.146636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:20.146800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:20.146875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.147172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:20.147237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.147420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:20.147513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:20.149721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:20.149780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:20.149965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:20.150006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:20.150458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.150505Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:20.150613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:20.150649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.150713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:20.150767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.150803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:20.150847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.150887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:20.150916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:20.150978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:20.151036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:20.151082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:20.153092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:20.153218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:20.153253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TTxPublishToSchemeBoard Send, to populator: [7:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:05:26.269845Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.269896Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-03-28T09:05:26.269928Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-03-28T09:05:26.274658Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:26.274873Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:26.274926Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:05:26.274984Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-03-28T09:05:26.275041Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-28T09:05:26.275182Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-28T09:05:26.278099Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-28T09:05:26.278203Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:26.278333Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:390:2361], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:05:26.279021Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-03-28T09:05:26.279068Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-03-28T09:05:26.279217Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-03-28T09:05:26.279254Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:489:2432], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-03-28T09:05:26.279974Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-03-28T09:05:26.280592Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.280648Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:05:26.280819Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:05:26.280879Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:26.280938Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:05:26.280976Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:26.281027Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:05:26.281097Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:26.281146Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:05:26.281208Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:05:26.281304Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:05:26.281666Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:05:26.281737Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:05:26.283784Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:05:26.283850Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:05:26.284372Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:05:26.284487Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:05:26.284537Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:568:2509] TestWaitNotification: OK eventTxId 103 2025-03-28T09:05:26.285153Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:26.285364Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 270us result status StatusSuccess 2025-03-28T09:05:26.285758Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:26.286310Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:26.286479Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 199us result status StatusSuccess 2025-03-28T09:05:26.286874Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:26.287447Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2025-03-28T09:05:26.287605Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 182us result status StatusSuccess 2025-03-28T09:05:26.287964Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:19.755700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:19.755810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:19.755871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:19.755908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:19.755974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:19.756008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:19.756068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:19.756156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:19.756473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:19.876046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:19.876113Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:19.905335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:19.905616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:19.905780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:19.934391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:19.934637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:19.935303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:19.935586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:19.937511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:19.938785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:19.938847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:19.939024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:19.939101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:19.939142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:19.939217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:19.945738Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:20.118423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:20.118705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.118937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:20.119228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:20.119291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.128336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:20.128529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:20.128742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.128822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:20.128882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:20.128929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:20.131510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.131589Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:20.131649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:20.133844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.133913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.133968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.134015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.144558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:20.146476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:20.146667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:20.147796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:20.147955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:20.148015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.148357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:20.148419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.148603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:20.148683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:20.150578Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:20.150653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:20.150842Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:20.150888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:20.151278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.151323Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:20.151424Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:20.151459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.151493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:20.151532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.151575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:20.151619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.151655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:20.151697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:20.151764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:20.151803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:20.151835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:20.153742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:20.153856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:20.153895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... shard: 72057594046678944, txId: 103, path id: 1 2025-03-28T09:05:26.670906Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:05:26.671233Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.671293Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-03-28T09:05:26.671350Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-03-28T09:05:26.672439Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:26.672533Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:26.672564Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:05:26.672599Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:05:26.672632Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:26.673584Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:26.673677Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:26.673705Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:05:26.673737Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:05:26.673772Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-28T09:05:26.673844Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-28T09:05:26.676264Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72075186233409546 at ss 72057594046678944 2025-03-28T09:05:26.676331Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72075186233409546 at ss 72057594046678944 2025-03-28T09:05:26.676363Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72075186233409546 at ss 72057594046678944 2025-03-28T09:05:26.676394Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72075186233409546 at ss 72057594046678944 2025-03-28T09:05:26.676589Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.676638Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:05:26.676825Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:05:26.676889Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:26.676944Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:05:26.676990Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:26.677049Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:05:26.677118Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:26.677173Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:05:26.677230Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:05:26.677441Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:05:26.678771Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:05:26.679916Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:05:26.680161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:26.680565Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:05:26.680889Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 Forgetting tablet 72075186234409547 2025-03-28T09:05:26.682652Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:05:26.682999Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:05:26.683694Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-03-28T09:05:26.684240Z node 7 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-03-28T09:05:26.685712Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409546 2025-03-28T09:05:26.690290Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:05:26.690616Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186234409548 2025-03-28T09:05:26.692014Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:05:26.692228Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:05:26.693257Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:05:26.694145Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:05:26.694210Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:26.694377Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:05:26.694925Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:05:26.694990Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:26.695076Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:26.699127Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:05:26.699204Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:05:26.699348Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:05:26.699374Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-03-28T09:05:26.701908Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:05:26.701963Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-03-28T09:05:26.702097Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:05:26.702151Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-03-28T09:05:26.702359Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:05:26.702462Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:05:26.702825Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:05:26.702889Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:05:26.703438Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:05:26.703569Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:05:26.703620Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:587:2528] TestWaitNotification: OK eventTxId 103 2025-03-28T09:05:26.704321Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:26.704563Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 292us result status StatusPathDoesNotExist 2025-03-28T09:05:26.704737Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:19.755542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:19.755643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:19.755678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:19.755720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:19.755760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:19.755796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:19.755850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:19.755954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:19.756283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:19.887283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:19.887347Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:19.923252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:19.923576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:19.923760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:19.941658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:19.941826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:19.942480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:19.942659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:19.944546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:19.945727Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:19.945779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:19.945896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:19.945936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:19.945973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:19.946033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:19.952340Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:20.133220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:20.133445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.133641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:20.133847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:20.133899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.143572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:20.143728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:20.143938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.144004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:20.144044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:20.144089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:20.147906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.147969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:20.148019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:20.151584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.151632Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.151691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.151735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.155061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:20.161257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:20.161521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:20.162429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:20.162572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:20.162639Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.162994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:20.163069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.163287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:20.163390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:20.167481Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:20.167533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:20.167945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:20.168003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:20.168406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.168459Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:20.168553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:20.168590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.168639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:20.168668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.168705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:20.168755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.168795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:20.168821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:20.168882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:20.168919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:20.168948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:20.171117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:20.171256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:20.171299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:26.631095Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_0', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeExtSubDomain, state: EPathStateAlter), operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-03-28T09:05:26.635917Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.635997Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxAlterExtSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:26.636051Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 ProgressState no shards to create, do next state 2025-03-28T09:05:26.636098Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-03-28T09:05:26.638524Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.638621Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:26.638701Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 3 -> 128 2025-03-28T09:05:26.641036Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.641110Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.641161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 103:0, at tablet# 72057594046678944 2025-03-28T09:05:26.641221Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-03-28T09:05:26.641390Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:26.643194Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-03-28T09:05:26.643374Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-03-28T09:05:26.643896Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:26.644067Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:26.644156Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-03-28T09:05:26.644503Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-03-28T09:05:26.644568Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2025-03-28T09:05:26.644713Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:05:26.644872Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:390:2361], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 2025-03-28T09:05:26.647429Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2025-03-28T09:05:26.647623Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2025-03-28T09:05:26.647832Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2025-03-28T09:05:26.648208Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:26.648263Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:05:26.648426Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:26.648474Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:05:26.648744Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.648794Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2025-03-28T09:05:26.648828Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 240 -> 240 2025-03-28T09:05:26.650011Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:26.650152Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:26.650208Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:05:26.650272Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-03-28T09:05:26.650329Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-28T09:05:26.650436Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-28T09:05:26.652200Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-28T09:05:26.652288Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:26.652385Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:390:2361], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:05:26.652503Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2025-03-28T09:05:26.652529Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2025-03-28T09:05:26.652633Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2025-03-28T09:05:26.652663Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:489:2432], at schemeshard: 72075186234409546, txId: 0, path id: 1 2025-03-28T09:05:26.656454Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409546, cookie: 0 2025-03-28T09:05:26.657138Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:05:26.657247Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:26.657403Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.657458Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:05:26.657665Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:05:26.657723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:26.657796Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:05:26.657852Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:26.657919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:05:26.657979Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:26.658045Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:05:26.658091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:05:26.658188Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2025-03-28T09:05:26.663170Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:05:26.663236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:05:26.663709Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:05:26.663819Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:05:26.663861Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:572:2513] TestWaitNotification: OK eventTxId 103 >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] Test command err: 2025-03-28T09:03:18.303114Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783402951363464:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:18.303152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cd8/r3tmp/tmpi3xhOe/pdisk_1.dat 2025-03-28T09:03:18.705839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:18.705925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:18.708729Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:18.722296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21112, node 1 2025-03-28T09:03:18.790057Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:18.790084Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:18.790091Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:18.790229Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:19.087590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:21.184399Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:21.187380Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmQwYjYxZTktNmU4NmQ2YTQtN2M0ZWM1NjItNzFiOGNiOTc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NmQwYjYxZTktNmU4NmQ2YTQtN2M0ZWM1NjItNzFiOGNiOTc= 2025-03-28T09:03:21.187871Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783415836265987:2326], Start check tables existence, number paths: 2 2025-03-28T09:03:21.188032Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmQwYjYxZTktNmU4NmQ2YTQtN2M0ZWM1NjItNzFiOGNiOTc=, ActorId: [1:7486783415836265988:2327], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:21.188537Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-28T09:03:21.188556Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:21.188571Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:03:21.194261Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783415836265987:2326], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:03:21.194332Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783415836265987:2326], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:03:21.194353Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783415836265987:2326], Successfully finished 2025-03-28T09:03:21.194434Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:03:21.218991Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783415836266015:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:21.222937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:21.226059Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783415836266015:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-28T09:03:21.228190Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783415836266015:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:03:21.236010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783415836266015:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:21.332613Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783415836266015:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:21.336877Z node 1 :TX_PROXY ERROR: Actor# [1:7486783415836266068:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:21.337020Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783415836266015:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:03:21.349240Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-28T09:03:21.349268Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-28T09:03:21.349395Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmQwYjYxZTktNmU4NmQ2YTQtN2M0ZWM1NjItNzFiOGNiOTc=, ActorId: [1:7486783415836265988:2327], ActorState: ReadyState, TraceId: 01jqdzvp84c6rrs3zr8ymb4f72, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-28T09:03:21.353936Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783415836266077:2332], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-28T09:03:21.355841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783415836266077:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:21.355938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:21.660594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:03:21.666921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:03:21.670190Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=NmQwYjYxZTktNmU4NmQ2YTQtN2M0ZWM1NjItNzFiOGNiOTc=, ActorId: [1:7486783415836265988:2327], ActorState: ExecuteState, TraceId: 01jqdzvp84c6rrs3zr8ymb4f72, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [1:7486783415836266078:2327] WorkloadServiceCleanup: 0 2025-03-28T09:03:21.671515Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmQwYjYxZTktNmU4NmQ2YTQtN2M0ZWM1NjItNzFiOGNiOTc=, ActorId: [1:7486783415836265988:2327], ActorState: CleanupState, TraceId: 01jqdzvp84c6rrs3zr8ymb4f72, EndCleanup, isFinal: 0 2025-03-28T09:03:21.671582Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NmQwYjYxZTktNmU4NmQ2YTQtN2M0ZWM1NjItNzFiOGNiOTc=, ActorId: [1:7486783415836265988:2327], ActorState: CleanupState, TraceId: 01jqdzvp84c6rrs3zr8ymb4f72, Sent query response back to proxy, proxyRequestId: 3, proxyId: [1:7486783402951363714:2277] 2025-03-28T09:03:21.676100Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDg0YWZkMDUtMzI5ZWI0ZmMtOTI3M2RiNjAtMTRhMjlkZDg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZDg0YWZkMDUtMzI5ZWI0ZmMtOTI3M2RiNjAtMTRhMjlkZDg= 2025-03-28T09:03:21.676265Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDg0YWZkMDUtMzI5ZWI0ZmMtOTI3M2RiNjAtMTRhMjlkZDg=, ActorId: [1:7486783415836266116:2335], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:21.676370Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-28T09:03:21.676438Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783415836266118:2336], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:21.676441Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZDg0YWZkMDUtMzI5ZWI0ZmMtOTI3M2RiNjAtMTRhMjlkZDg=, ActorId: [1:7486783415836266116:2335], ActorState: ReadyState, TraceId: 01jqdzvpjccecm0rc9d857gj8m, received request, proxyRequestId: 4 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: DROP RESOURCE POOL CLASSIFIER MyResourcePoolClassifier rpcActor: [1:7486783415836266115:2360] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-28T09:03:21.676493Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7486783415836266116:2335], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZDg0YWZkMDUtMzI5ZWI0ZmMtOTI3M2RiNjAtMTRhMjlkZDg= 2025-03-28T09:03:21.676576Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783415836266119:2337], Database: /Root, Start database fetching 2025-03-28T09:03:21.677132Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783415836266118:2336], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:21.6 ... b://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: ReadyState, TraceId: 01jqdzzexfdg0fm2k6pfk3x8rf, received request, proxyRequestId: 106 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [9:7486783942175202206:3046] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-03-28T09:05:24.911498Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: ReadyState, TraceId: 01jqdzzexfdg0fm2k6pfk3x8rf, request placed into pool from cache: default 2025-03-28T09:05:24.911624Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: ExecuteState, TraceId: 01jqdzzexfdg0fm2k6pfk3x8rf, Sending CompileQuery request 2025-03-28T09:05:24.924562Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7486783873455722446:2642][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 54, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-28T09:05:24.924637Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7486783873455722446:2642][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 55, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-28T09:05:24.925541Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7486783942175202208:3047], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-03-28T09:05:24.926515Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: ExecuteState, TraceId: 01jqdzzexfdg0fm2k6pfk3x8rf, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-03-28T09:05:24.926564Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: ExecuteState, TraceId: 01jqdzzexfdg0fm2k6pfk3x8rf, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:05:24.926590Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: ExecuteState, TraceId: 01jqdzzexfdg0fm2k6pfk3x8rf, EndCleanup, isFinal: 0 2025-03-28T09:05:24.926755Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: ExecuteState, TraceId: 01jqdzzexfdg0fm2k6pfk3x8rf, Sent query response back to proxy, proxyRequestId: 106, proxyId: [9:7486783851980885098:2277] 2025-03-28T09:05:24.927567Z node 9 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-03-28T09:05:24.927674Z node 9 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-03-28T09:05:24.927814Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:05:24.927843Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:05:24.927872Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:05:24.927899Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:05:24.927982Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=MTAxZWNkYjItYjU2NTU0MTUtNjczOGVlMTItNzU1MjY1NGE=, ActorId: [9:7486783942175202205:3045], ActorState: unknown state, Session actor destroyed 2025-03-28T09:05:25.135862Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI= 2025-03-28T09:05:25.136673Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:05:25.136831Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: ReadyState, TraceId: 01jqdzzf4ga0dbt4x7eacm4bbq, received request, proxyRequestId: 108 prepared: 0 tx_control: 1 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DML text: SELECT * FROM `//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpcActor: [9:7486783946470169512:3052] database: /Root/test-shared databaseId: /Root/test-shared pool id: default 2025-03-28T09:05:25.136861Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: ReadyState, TraceId: 01jqdzzf4ga0dbt4x7eacm4bbq, request placed into pool from cache: default 2025-03-28T09:05:25.136976Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: ExecuteState, TraceId: 01jqdzzf4ga0dbt4x7eacm4bbq, Sending CompileQuery request 2025-03-28T09:05:25.152045Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7486783873455722446:2642][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 56, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-28T09:05:25.152155Z node 9 :SCHEME_BOARD_SUBSCRIBER WARN: [main][9:7486783873455722446:2642][/Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is done: cookie# 57, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-28T09:05:25.153111Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7486783946470169514:3053], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-03-28T09:05:25.153323Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: ExecuteState, TraceId: 01jqdzzf4ga0dbt4x7eacm4bbq, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: 2025-03-28T09:05:25.153364Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: ExecuteState, TraceId: 01jqdzzf4ga0dbt4x7eacm4bbq, Cleanup start, isFinal: 0 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:05:25.153391Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: ExecuteState, TraceId: 01jqdzzf4ga0dbt4x7eacm4bbq, EndCleanup, isFinal: 0 2025-03-28T09:05:25.153534Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: ExecuteState, TraceId: 01jqdzzf4ga0dbt4x7eacm4bbq, Sent query response back to proxy, proxyRequestId: 108, proxyId: [9:7486783851980885098:2277] 2025-03-28T09:05:25.154248Z node 9 :METADATA_PROVIDER ERROR: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2025-03-28T09:05:25.154588Z node 9 :METADATA_PROVIDER ERROR: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[//Root/test-shared/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2025-03-28T09:05:25.154654Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:05:25.154720Z node 9 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:05:25.154748Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:05:25.154775Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:05:25.154854Z node 9 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=9&id=YWE2YTllYTQtYWFjMjdkMTQtOWM2MjhkZTctM2E2MDA2YTI=, ActorId: [9:7486783946470169511:3051], ActorState: unknown state, Session actor destroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:19.766275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:19.766400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:19.766446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:19.766487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:19.766533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:19.766565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:19.766628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:19.766738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:19.767133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:19.882419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:19.882485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:19.910075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:19.910398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:19.910565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:19.917317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:19.917590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:19.918412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:19.918679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:19.931628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:19.933342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:19.933436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:19.933658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:19.933715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:19.933767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:19.933866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:19.942185Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:20.096199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:20.096480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.096742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:20.096998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:20.097065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.111804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:20.112002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:20.112218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.112278Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:20.112339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:20.112388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:20.119620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.119711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:20.119757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:20.131160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.131260Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.131332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.131393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.135559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:20.143417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:20.143623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:20.144449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:20.144560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:20.144606Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.144882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:20.144936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.145089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:20.145177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:20.155824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:20.155913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:20.156156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:20.156208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:20.156641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.156732Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:20.156846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:20.156885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.156926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:20.156956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.156993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:20.157039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.157074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:20.157106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:20.157202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:20.157259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:20.157303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:20.159437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:20.159630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:20.159676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7.210077Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:27.210287Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:27.210328Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:27.210477Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:05:27.210629Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:27.210699Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-28T09:05:27.210741Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:05:27.210807Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:27.210857Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 103:0 ProgressState 2025-03-28T09:05:27.210910Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 135 -> 240 2025-03-28T09:05:27.212323Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:27.212412Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:27.212442Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:05:27.212476Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:05:27.212511Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:27.213990Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:27.214078Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:05:27.214107Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:05:27.214141Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:05:27.214178Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:05:27.214252Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-28T09:05:27.216002Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:05:27.216054Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:05:27.216075Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:05:27.216331Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:05:27.216386Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:05:27.216538Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:05:27.216588Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:27.216641Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:05:27.216691Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:27.216748Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:05:27.216801Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:05:27.216854Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:05:27.216896Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:05:27.217098Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:05:27.218011Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:05:27.218283Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-03-28T09:05:27.219393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:27.219734Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:05:27.220158Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-28T09:05:27.220921Z node 7 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:27.224317Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:05:27.224531Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:05:27.225292Z node 7 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2025-03-28T09:05:27.226521Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:05:27.226779Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:05:27.227567Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:05:27.227618Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:27.227722Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:05:27.228048Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:05:27.228143Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:05:27.228181Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:27.228252Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:27.230853Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:05:27.230933Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:05:27.232660Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:05:27.232696Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:05:27.232762Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:05:27.232805Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:05:27.233125Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:05:27.233220Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:05:27.233487Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:05:27.233528Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:05:27.233962Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:05:27.234071Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:05:27.234120Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:534:2484] TestWaitNotification: OK eventTxId 103 2025-03-28T09:05:27.234662Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:27.234882Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 269us result status StatusPathDoesNotExist 2025-03-28T09:05:27.235062Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:19.761692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:19.761802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:19.761855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:19.761894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:19.761955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:19.761998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:19.762079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:19.762193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:19.762523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:19.855332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:19.855406Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:19.895527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:19.895807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:19.896006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:19.907187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:19.907395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:19.908109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:19.908349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:19.910893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:19.924291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:19.924364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:19.924551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:19.924602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:19.924645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:19.924732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:19.938930Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:20.070472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:20.070740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.070981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:20.071286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:20.071355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.075955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:20.076118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:20.076344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.076411Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:20.076449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:20.076491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:20.080064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.080236Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:20.080390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:20.085251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.085321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.085389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.085486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.089878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:20.095299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:20.095603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:20.096786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:20.096963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:20.097045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.097395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:20.097466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:20.097677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:20.097760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:20.102867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:20.102957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:20.103146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:20.103188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:20.103615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:20.103675Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:20.103783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:20.103838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.103895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:20.103931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.103975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:20.104019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:20.104085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:20.104138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:20.104214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:20.104254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:20.104290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:20.106472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:20.106588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:20.106628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7.438093Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-28T09:05:27.438279Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:05:27.438329Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:05:27.438385Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:05:27.438435Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:05:27.438489Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-03-28T09:05:27.438549Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:05:27.438601Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:05:27.438642Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-28T09:05:27.438880Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:05:27.440102Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:05:27.441141Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:05:27.441453Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:27.441784Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:05:27.442305Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186234409547 2025-03-28T09:05:27.442597Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186234409546 2025-03-28T09:05:27.442773Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:05:27.443010Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:05:27.444402Z node 6 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409547 2025-03-28T09:05:27.451961Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:05:27.452177Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:05:27.452700Z node 6 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186234409548 2025-03-28T09:05:27.453092Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409549 2025-03-28T09:05:27.453145Z node 6 :TX_DATASHARD ERROR: Datashard's schemeshard pipe destroyed while no messages to sent at 72075186234409550 Forgetting tablet 72075186234409546 2025-03-28T09:05:27.455128Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:05:27.455330Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186234409548 2025-03-28T09:05:27.455712Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:05:27.456178Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:05:27.456233Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:27.456374Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:05:27.456795Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:05:27.456857Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:27.456942Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:27.462792Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:05:27.462879Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:05:27.463276Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:05:27.463308Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2025-03-28T09:05:27.463423Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:05:27.463445Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2025-03-28T09:05:27.464166Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:05:27.464225Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2025-03-28T09:05:27.465122Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:05:27.465247Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-28T09:05:27.465642Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:05:27.465705Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:05:27.466310Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:05:27.466442Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:05:27.466493Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:787:2696] TestWaitNotification: OK eventTxId 105 2025-03-28T09:05:27.467277Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:27.467652Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir/table_1" took 428us result status StatusPathDoesNotExist 2025-03-28T09:05:27.467843Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/dir/table_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/dir/table_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:05:27.468530Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:27.468727Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 227us result status StatusPathDoesNotExist 2025-03-28T09:05:27.468890Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:05:27.469537Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:27.469745Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 238us result status StatusSuccess 2025-03-28T09:05:27.470146Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanUpdatedRows >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:20.935377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:20.935488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:20.935546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:20.935578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:20.935619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:20.935647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:20.935696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:20.935805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:20.936131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:21.027116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:21.027170Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:21.041660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:21.041906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:21.042043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:21.052123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:21.052358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:21.053170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:21.053377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:21.055463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:21.056680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:21.056742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:21.056908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:21.056954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:21.056991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:21.057074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:21.063761Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:21.203859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:21.204101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:21.204331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:21.204563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:21.204622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:21.208717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:21.208860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:21.209059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:21.209123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:21.209156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:21.209186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:21.215372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:21.215450Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:21.215490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:21.217886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:21.217949Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:21.218006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:21.218061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:21.222249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:21.225225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:21.225548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:21.226739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:21.226917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:21.226997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:21.227341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:21.227668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:21.227891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:21.227991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:21.231597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:21.231662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:21.231876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:21.231923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:21.232341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:21.232400Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:21.232517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:21.232554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:21.232611Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:21.232649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:21.232700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:21.232744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:21.232788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:21.232824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:21.232910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:21.232949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:21.232986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:21.235200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:21.235330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:21.235369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hard::TEvSchemeShard::TEvInitTenantSchemeShardResult> complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:05:28.981273Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:05:28.981595Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:05:28.982769Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:05:28.982837Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:05:28.983371Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:05:28.983439Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:05:28.983499Z node 8 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:05:29.019611Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-03-28T09:05:29.019796Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409549 2025-03-28T09:05:29.019856Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:0 HandleReply TEvConfigureStatus operationId:102:0 at schemeshard:72057594046678944 2025-03-28T09:05:29.019917Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-03-28T09:05:29.019963Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-03-28T09:05:29.021861Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.022010Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.022072Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.022110Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2025-03-28T09:05:29.022159Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-28T09:05:29.022294Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:29.023958Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:05:29.024120Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-28T09:05:29.024490Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:29.024638Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 34359740524 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:29.024702Z node 8 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-03-28T09:05:29.024971Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-28T09:05:29.025024Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-03-28T09:05:29.025167Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:05:29.025276Z node 8 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[8:359:2334], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:05:29.027043Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:29.027092Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:05:29.027294Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:29.027364Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:205:2207], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:05:29.027661Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.027711Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2025-03-28T09:05:29.027745Z node 8 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-03-28T09:05:29.028318Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:05:29.028424Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:05:29.028462Z node 8 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:05:29.028507Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-28T09:05:29.028546Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-28T09:05:29.028615Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:05:29.031099Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.031149Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:05:29.031298Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:05:29.031343Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:05:29.031393Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:05:29.031435Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:05:29.031479Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:05:29.031571Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:301:2292] message: TxId: 102 2025-03-28T09:05:29.031642Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:05:29.031703Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:05:29.031744Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:05:29.031919Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:05:29.033215Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:05:29.034161Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:05:29.034224Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:506:2445] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-03-28T09:05:29.037153Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:29.037282Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2025-03-28T09:05:29.037319Z node 8 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2025-03-28T09:05:29.037427Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-03-28T09:05:29.037470Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2025-03-28T09:05:29.043954Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:29.044149Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |77.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] Test command err: 2025-03-28T09:05:04.943110Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783858082895516:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:04.943167Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ce9/r3tmp/tmppoJQVr/pdisk_1.dat 2025-03-28T09:05:05.605036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:05.605163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:05.612028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:05.642505Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21500, node 1 2025-03-28T09:05:05.663032Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:05:05.674468Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:05:05.753874Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:05.753996Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:05.754005Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:05.754097Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:06.089574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:09.946865Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783858082895516:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:09.946930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:10.099061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783883852700527:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:10.101674Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783883852700534:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:10.102037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783883852700515:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:10.110977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:10.171347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:05:10.223269Z node 1 :TX_PROXY ERROR: Actor# [1:7486783883852700559:2657] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:05:10.276384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783883852700558:2476], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:05:10.276489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783883852700530:2473], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:05:10.351757Z node 1 :TX_PROXY ERROR: Actor# [1:7486783883852700784:2783] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:10.374513Z node 1 :TX_PROXY ERROR: Actor# [1:7486783883852700801:2792] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:10.620239Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jqdzz0kz4x6252131re51saa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmIyNzczNjgtNDM1NWE1YTUtMzU2M2VlYy1iMTQ1NWY4Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.622119Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqdzz0k79p5fk5ttk4z9jvh1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjkwMThlOS00YjQ3YjExMy1kZTVmYTA5MC1jM2JlNDRhMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.647303Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqdzz0k8fhdhf50s86jhajy6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM0ZTdiYS1kYjYzYzZkZC01MDJmYzY4My00YjU0MDBjOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.650895Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqdzz0k6agthjddk1fm7m79m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWM0ZTVjM2EtNjdiNTIxZmMtZDA1MzM3YzktODg1ZWQ2MTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.659011Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqdzz0k5c5d8tzmdpwvwbj46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDJmZGYzZjYtZTQ0Mjc5NjctNzA4NDJiYWYtMzY1YTFhNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.660756Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqdzz0k5bj3n7k2mcyt28knk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDkzZTIyZGYtZmI0ZmNhMDUtZWEyYmJlZjItMThlM2JhNDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.662239Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqdzz0k89jt5tm7vx0g85t9c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTdiOTg0NGYtYWJlM2YzZWUtZGM1Mjk3LTI0MGQ2MWUw, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.664040Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqdzz0k5cer82w314gsgqmaj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmZjMmMzMmItYzFkNzk1MGUtM2M5YTFjMzUtMmZiM2JjYzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.665348Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqdzz0k44hz9bzr6157shafv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFhNzExYWMtNTdmMDFhNjEtNDY2MWRmY2QtNGRkMTc1ZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.667064Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jqdzz0k592d0a800edwh3ykm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWJjNGI3NGEtZTQwOGY4MjgtNzA3MDdjMjMtZDc1NmQ3ZWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.668306Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710684. Ctx: { TraceId: 01jqdzz0kj9apdmm1j417fjm6c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTMyMzZmMjMtOGUzMTIxNDMtYWExYmI3MjktZTM0MzVjM2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.668929Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqdzz0k7cvzygqra90b0f4t3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWEzMmM5NDQtNmYwN2VlZjgtNzM3ODM5ODYtM2NhYTBlMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.669926Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710685. Ctx: { TraceId: 01jqdzz0k7b6pd6nmawshrptqk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2I4ZGI3OGItNjUzZDYwMmMtYTc0MjdiODktZDA1NmJhNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.670396Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jqdzz0k7bjjm38t886fp0kjp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzIzYjRmMzAtN2Q1OGMyYTAtODIwOTEyODMtOTRlYThkYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.671745Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqdzz0k4c9qxp0v0qddb9gbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWFmZjhmNmUtZWNhOTllN2MtY2Y2YzZmZS02ZWI2M2VjMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.671902Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710688. Ctx: { TraceId: 01jqdzz0k5cnh32kw4fv0b2gta, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmRlMmU4MmItMjkyMGMxMmUtYjEwMmMxNjMtODc2NGMwNDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:10.673072Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqdzz0kh0bxqe5phyd3rszxd, Database: , Databa ... uestId: 440 2025-03-28T09:05:25.278373Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 446 2025-03-28T09:05:25.278401Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 448 2025-03-28T09:05:25.278462Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 462 2025-03-28T09:05:25.278513Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 463 2025-03-28T09:05:25.278552Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 467 2025-03-28T09:05:25.278585Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 469 2025-03-28T09:05:25.278612Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 473 2025-03-28T09:05:25.278639Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 479 2025-03-28T09:05:25.280204Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=OWI1ODIzOTgtNDcwZDFkMDEtOTQ0ZDI0ZjItZGI4YWUxMDA=, ActorId: [4:7486783929015168297:2352], ActorState: ExecuteState, TraceId: 01jqdzzf55crznkpwx467gy20t, Reply query error, msg: Pending previous query completion proxyRequestId: 492 2025-03-28T09:05:25.280795Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NWQ4OTgxNjItZjNmNjk5MDctYWQxN2Y3ZmMtZWE5ZTQ5NjM=, ActorId: [4:7486783929015168285:2348], ActorState: ExecuteState, TraceId: 01jqdzzf54772xkvqwkg8vwcn3, Reply query error, msg: Pending previous query completion proxyRequestId: 494 2025-03-28T09:05:25.280839Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTE2MmIzOWYtOTZhZmFmY2EtM2UzMjAxZDQtNGU3YjFkZjQ=, ActorId: [4:7486783929015168294:2349], ActorState: ExecuteState, TraceId: 01jqdzzf542bqp25f0a618dypm, Reply query error, msg: Pending previous query completion proxyRequestId: 496 2025-03-28T09:05:25.280893Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzkxNzM5ODMtMTc2YzNjNDUtMmQzMjE0ZDItOTJjZjg1ZjM=, ActorId: [4:7486783929015168296:2351], ActorState: ExecuteState, TraceId: 01jqdzzf55b29h3s46457npvdn, Reply query error, msg: Pending previous query completion proxyRequestId: 501 2025-03-28T09:05:25.284144Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 483 2025-03-28T09:05:25.284211Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 487 2025-03-28T09:05:25.284248Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 490 2025-03-28T09:05:25.284288Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 495 2025-03-28T09:05:25.286793Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 497 2025-03-28T09:05:25.286829Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 499 2025-03-28T09:05:25.286860Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 506 2025-03-28T09:05:25.286910Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Njc5YWVhYTYtYjY5MTQxYy1iM2E3N2UzYy0xZWZkOWExYg==, ActorId: [4:7486783929015168295:2350], ActorState: ExecuteState, TraceId: 01jqdzzf503v9ny3kq46ab81wx, Reply query error, msg: Pending previous query completion proxyRequestId: 510 2025-03-28T09:05:25.286951Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NThhMmQzMGYtNjZiYjBjYzItNjM1Yjk3MTUtOTIxZjUzM2I=, ActorId: [4:7486783929015168298:2353], ActorState: ExecuteState, TraceId: 01jqdzzf541ettmmabd68ppfgx, Reply query error, msg: Pending previous query completion proxyRequestId: 484 2025-03-28T09:05:25.286990Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NThhMmQzMGYtNjZiYjBjYzItNjM1Yjk3MTUtOTIxZjUzM2I=, ActorId: [4:7486783929015168298:2353], ActorState: ExecuteState, TraceId: 01jqdzzf541ettmmabd68ppfgx, Reply query error, msg: Pending previous query completion proxyRequestId: 491 2025-03-28T09:05:25.287041Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjljYTg1YjUtOWQ3Yjk1OTEtNjI2MjczMDQtZjA3NGMxYjk=, ActorId: [4:7486783929015168280:2343], ActorState: ExecuteState, TraceId: 01jqdzzf5468br35cydg33xhka, Reply query error, msg: Pending previous query completion proxyRequestId: 485 2025-03-28T09:05:25.287087Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MjljYTg1YjUtOWQ3Yjk1OTEtNjI2MjczMDQtZjA3NGMxYjk=, ActorId: [4:7486783929015168280:2343], ActorState: ExecuteState, TraceId: 01jqdzzf5468br35cydg33xhka, Reply query error, msg: Pending previous query completion proxyRequestId: 489 2025-03-28T09:05:25.287131Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA5OTc1ZDktOWJkNmMyODAtNDczMWJmZjYtMTgxYTk4MzI=, ActorId: [4:7486783929015168282:2345], ActorState: ExecuteState, TraceId: 01jqdzzf555bp8arp4tyts1k17, Reply query error, msg: Pending previous query completion proxyRequestId: 486 2025-03-28T09:05:25.287178Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA5OTc1ZDktOWJkNmMyODAtNDczMWJmZjYtMTgxYTk4MzI=, ActorId: [4:7486783929015168282:2345], ActorState: ExecuteState, TraceId: 01jqdzzf555bp8arp4tyts1k17, Reply query error, msg: Pending previous query completion proxyRequestId: 493 2025-03-28T09:05:25.287206Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA5OTc1ZDktOWJkNmMyODAtNDczMWJmZjYtMTgxYTk4MzI=, ActorId: [4:7486783929015168282:2345], ActorState: ExecuteState, TraceId: 01jqdzzf555bp8arp4tyts1k17, Reply query error, msg: Pending previous query completion proxyRequestId: 498 2025-03-28T09:05:25.287247Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA5OTc1ZDktOWJkNmMyODAtNDczMWJmZjYtMTgxYTk4MzI=, ActorId: [4:7486783929015168282:2345], ActorState: ExecuteState, TraceId: 01jqdzzf555bp8arp4tyts1k17, Reply query error, msg: Pending previous query completion proxyRequestId: 500 2025-03-28T09:05:25.287275Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA5OTc1ZDktOWJkNmMyODAtNDczMWJmZjYtMTgxYTk4MzI=, ActorId: [4:7486783929015168282:2345], ActorState: ExecuteState, TraceId: 01jqdzzf555bp8arp4tyts1k17, Reply query error, msg: Pending previous query completion proxyRequestId: 502 2025-03-28T09:05:25.290601Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA5OTc1ZDktOWJkNmMyODAtNDczMWJmZjYtMTgxYTk4MzI=, ActorId: [4:7486783929015168282:2345], ActorState: ExecuteState, TraceId: 01jqdzzf555bp8arp4tyts1k17, Reply query error, msg: Pending previous query completion proxyRequestId: 503 2025-03-28T09:05:25.290659Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA5OTc1ZDktOWJkNmMyODAtNDczMWJmZjYtMTgxYTk4MzI=, ActorId: [4:7486783929015168282:2345], ActorState: ExecuteState, TraceId: 01jqdzzf555bp8arp4tyts1k17, Reply query error, msg: Pending previous query completion proxyRequestId: 504 2025-03-28T09:05:25.290718Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA5OTc1ZDktOWJkNmMyODAtNDczMWJmZjYtMTgxYTk4MzI=, ActorId: [4:7486783929015168282:2345], ActorState: ExecuteState, TraceId: 01jqdzzf555bp8arp4tyts1k17, Reply query error, msg: Pending previous query completion proxyRequestId: 509 2025-03-28T09:05:25.294078Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjA5OTc1ZDktOWJkNmMyODAtNDczMWJmZjYtMTgxYTk4MzI=, ActorId: [4:7486783929015168282:2345], ActorState: ExecuteState, TraceId: 01jqdzzf555bp8arp4tyts1k17, Reply query error, msg: Pending previous query completion proxyRequestId: 511 2025-03-28T09:05:25.297570Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2U2NjdkZTEtNzUyMGI2YTctNGI5N2ExMTItOWZjYTZmNTU=, ActorId: [4:7486783929015168283:2346], ActorState: ExecuteState, TraceId: 01jqdzzf546t6gftk0h74qv4hg, Reply query error, msg: Pending previous query completion proxyRequestId: 488 2025-03-28T09:05:25.297644Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2U2NjdkZTEtNzUyMGI2YTctNGI5N2ExMTItOWZjYTZmNTU=, ActorId: [4:7486783929015168283:2346], ActorState: ExecuteState, TraceId: 01jqdzzf546t6gftk0h74qv4hg, Reply query error, msg: Pending previous query completion proxyRequestId: 505 2025-03-28T09:05:25.298558Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2U2NjdkZTEtNzUyMGI2YTctNGI5N2ExMTItOWZjYTZmNTU=, ActorId: [4:7486783929015168283:2346], ActorState: ExecuteState, TraceId: 01jqdzzf546t6gftk0h74qv4hg, Reply query error, msg: Pending previous query completion proxyRequestId: 507 2025-03-28T09:05:25.298613Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Y2U2NjdkZTEtNzUyMGI2YTctNGI5N2ExMTItOWZjYTZmNTU=, ActorId: [4:7486783929015168283:2346], ActorState: ExecuteState, TraceId: 01jqdzzf546t6gftk0h74qv4hg, Reply query error, msg: Pending previous query completion proxyRequestId: 508 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:23.419754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:23.419880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:23.419925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:23.419959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:23.419999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:23.420042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:23.420141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:23.420232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:23.420587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:23.509823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:23.509874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:23.528525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:23.528747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:23.528895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:23.542556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:23.542764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:23.543441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:23.543643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:23.555479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:23.556867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:23.556933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:23.557121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:23.557181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:23.557232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:23.557321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:23.566124Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:23.735906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:23.736135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:23.736361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:23.736576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:23.736628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:23.739669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:23.739860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:23.740122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:23.740191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:23.740230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:23.740270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:23.743532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:23.743616Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:23.743656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:23.746956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:23.747035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:23.747118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:23.747212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:23.752825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:23.755401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:23.755642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:23.756919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:23.757105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:23.757179Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:23.757557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:23.757644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:23.757868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:23.757947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:23.760153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:23.760212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:23.760454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:23.760533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:23.760983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:23.761033Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:23.761175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:23.761221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:23.761268Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:23.761304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:23.761347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:23.761389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:23.761428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:23.761462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:23.761543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:23.761587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:23.761654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:23.764054Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:23.764184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:23.764225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [7:123:2149] sender: [7:239:2058] recipient: [7:15:2062] 2025-03-28T09:05:29.691026Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:29.691304Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.691564Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:29.691795Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:29.691847Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.694109Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:29.694221Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:29.694414Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.694473Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:29.694534Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:29.694582Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:29.696777Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.696851Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:29.696892Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:29.699422Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.699493Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.699572Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:29.699657Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:29.699837Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:29.702113Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:29.702350Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:29.703403Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:29.703559Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 30064773229 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:29.703625Z node 7 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:29.703946Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:29.704023Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:29.704261Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:29.704353Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:29.706549Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:29.706610Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:29.706868Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:29.706937Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:05:29.707277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.707327Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:29.707455Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:29.707493Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:29.707549Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:29.707595Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:29.707643Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:29.707688Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:29.707736Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:29.707795Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:29.707865Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:29.707907Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:29.707946Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:29.708531Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:29.708662Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:29.708722Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:05:29.708777Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:05:29.708830Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:29.708954Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:05:29.712732Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:05:29.713224Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:05:29.713753Z node 7 :TX_PROXY DEBUG: actor# [7:269:2260] Bootstrap 2025-03-28T09:05:29.746418Z node 7 :TX_PROXY DEBUG: actor# [7:269:2260] Become StateWork (SchemeCache [7:274:2265]) 2025-03-28T09:05:29.749797Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:29.750166Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_1" ExternalSchemeShard: true } 2025-03-28T09:05:29.750235Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 101:0, path /MyRoot/USER_1 2025-03-28T09:05:29.750432Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-03-28T09:05:29.750506Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-03-28T09:05:29.751798Z node 7 :TX_PROXY DEBUG: actor# [7:269:2260] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:05:29.763217Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Invalid AlterExtSubDomain request: Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:29.763456Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Invalid AlterExtSubDomain request: Check failed: path: '/MyRoot/USER_1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER DATABASE, path: /MyRoot/USER_1 2025-03-28T09:05:29.764356Z node 7 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 >> KqpScanSpilling::SelfJoin >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> KqpPg::CheckPgAutoParams+useSink [GOOD] >> KqpPg::CheckPgAutoParams-useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |77.8%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |77.8%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 >> KqpPg::SelectIndex-useSink [GOOD] >> KqpPg::TableDeleteAllData+useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink >> TPQTestSlow::TestWriteVeryBigMessage |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |77.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> SlowTopicAutopartitioning::CDC_Write |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true >> TNetClassifierTest::TestInitFromFile >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:108:2140] 2025-03-28T09:05:26.726366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:26.726452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:26.726499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:26.726543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:26.726582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:26.726610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:26.726662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:26.726762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:26.727094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:26.814552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:26.814606Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:26.842222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:26.842431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:26.842599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:26.851902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:26.852277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:26.853043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:26.853347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:26.859400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:26.860982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:26.861055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:26.861202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:26.861252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:26.861319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:26.861519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:26.872039Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-28T09:05:27.025709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:27.025968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:27.026228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:27.026468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:27.026529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:27.029169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:27.029332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:27.029529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:27.029587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:27.029626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:27.029662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:27.031921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:27.031984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:27.032022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:27.034371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:27.034426Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:27.034487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:27.034597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:27.038871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:27.043819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:27.044088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:27.045149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:27.045304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:27.045362Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:27.045661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:27.045739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:27.045913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:27.046000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:27.048433Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:27.048509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:27.048700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:27.048740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:27.049202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:27.049249Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:27.049361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:27.049394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:27.049441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:27.049471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:27.049504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:27.049544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:27.049575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:27.049603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:27.049685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:27.049742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:27.049776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:27.051647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:27.051783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:27.051824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... HARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 30064773228 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:35.059725Z node 7 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 104:0, stepId:5000005, at schemeshard: 72057594046678944 2025-03-28T09:05:35.059982Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:05:35.060038Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:05:35.060090Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:05:35.060134Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:05:35.060212Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:05:35.060302Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-28T09:05:35.060488Z node 7 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [7:352:2330], msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72057594046678944 2025-03-28T09:05:35.060543Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:05:35.060593Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:05:35.060641Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:05:35.060707Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:05:35.060757Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-03-28T09:05:35.060804Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 6 2025-03-28T09:05:35.071842Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2 2025-03-28T09:05:35.072020Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186233409546 2025-03-28T09:05:35.072270Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-03-28T09:05:35.072602Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:35.072668Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:05:35.072912Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:35.072974Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: Erasing txId 104 2025-03-28T09:05:35.073856Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:05:35.073983Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:05:35.074043Z node 7 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:05:35.074096Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-03-28T09:05:35.074148Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:05:35.074263Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-28T09:05:35.077933Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 2 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-28T09:05:35.078057Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:35.078180Z node 7 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[7:352:2330], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 2, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:05:35.078351Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-28T09:05:35.078388Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-28T09:05:35.078544Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-28T09:05:35.078585Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:446:2398], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-03-28T09:05:35.079168Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:05:35.079258Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:35.079480Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186233409546, cookie: 0 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:05:35.079797Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:05:35.079850Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:05:35.080376Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:05:35.080485Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:05:35.080539Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:542:2492] TestWaitNotification: OK eventTxId 104 2025-03-28T09:05:35.081179Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:35.081410Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 272us result status StatusSuccess 2025-03-28T09:05:35.081876Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:35.082610Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-03-28T09:05:35.082858Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 275us result status StatusSuccess 2025-03-28T09:05:35.083280Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> TNetClassifierTest::TestInitFromBadlyFormattedFile |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 >> TNetClassifierTest::TestInitFromFile [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::MustNotLoseSchemaSnapshot |77.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |77.8%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2025-03-28T09:03:19.320169Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783406017897392:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:19.320229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:03:19.375811Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783408402841312:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:19.375856Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cce/r3tmp/tmpqvNCMl/pdisk_1.dat 2025-03-28T09:03:20.065189Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:20.077187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:20.077273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:20.079883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:20.079948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:20.084752Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:03:20.084852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:20.088324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6872, node 1 2025-03-28T09:03:20.235649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:20.235675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:20.235683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:20.235823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:20.719516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:23.162675Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:23.162779Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:23.162791Z node 2 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:03:23.162831Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7486783425582710797:2306], Start check tables existence, number paths: 2 2025-03-28T09:03:23.166921Z node 2 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-03-28T09:03:23.166988Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7486783425582710797:2306], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:03:23.167032Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7486783425582710797:2306], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:03:23.167084Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7486783425582710797:2306], Successfully finished 2025-03-28T09:03:23.167158Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:03:23.234111Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:23.236920Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjA4NjhiZGUtYjgwNDFlYTktM2Y0OGNiZWYtNzY4YjJjOTQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjA4NjhiZGUtYjgwNDFlYTktM2Y0OGNiZWYtNzY4YjJjOTQ= 2025-03-28T09:03:23.237289Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 2 2025-03-28T09:03:23.237314Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:23.237328Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:03:23.246006Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783423197767446:2332], Start check tables existence, number paths: 2 2025-03-28T09:03:23.246098Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjA4NjhiZGUtYjgwNDFlYTktM2Y0OGNiZWYtNzY4YjJjOTQ=, ActorId: [1:7486783423197767447:2333], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:23.247152Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783423197767446:2332], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:03:23.247195Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783423197767446:2332], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:03:23.247217Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783423197767446:2332], Successfully finished 2025-03-28T09:03:23.247258Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:03:23.247936Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783423197767473:2518], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:23.250899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:23.255602Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783423197767473:2518], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-28T09:03:23.255781Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783423197767473:2518], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:03:23.277456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783423197767473:2518], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:23.373468Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783423197767473:2518], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:23.378175Z node 1 :TX_PROXY ERROR: Actor# [1:7486783423197767552:2577] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:23.378355Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783423197767473:2518], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:03:23.380530Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmQ1ZmFkYTgtODEzMDAzYjktOWE5NTI2NzAtYzk4MzRlMmM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZmQ1ZmFkYTgtODEzMDAzYjktOWE5NTI2NzAtYzk4MzRlMmM= 2025-03-28T09:03:23.380784Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmQ1ZmFkYTgtODEzMDAzYjktOWE5NTI2NzAtYzk4MzRlMmM=, ActorId: [1:7486783423197767562:2336], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:23.380916Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZmQ1ZmFkYTgtODEzMDAzYjktOWE5NTI2NzAtYzk4MzRlMmM=, ActorId: [1:7486783423197767562:2336], ActorState: ReadyState, TraceId: 01jqdzvr7m8qena462km13ca5f, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7486783423197767561:2585] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-28T09:03:23.380956Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-28T09:03:23.380965Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-28T09:03:23.381025Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7486783423197767562:2336], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmQ1ZmFkYTgtODEzMDAzYjktOWE5NTI2NzAtYzk4MzRlMmM= 2025-03-28T09:03:23.381080Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783423197767564:2337], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:23.381158Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783423197767565:2338], Database: /Root, Start database fetching 2025-03-28T09:03:23.381408Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783423197767565:2338], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-28T09:03:23.381463Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-28T09:03:23.381514Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486783423197767572:2339], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ZmQ1ZmFkYTgtODEzMDAzYjktOWE5NTI2NzAtYzk4MzRlMmM=, Start pool fetching 2025-03-28T09:03:23.381539Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783423197767574:2340], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2 ... leState: Disconnected -> Connecting 2025-03-28T09:05:30.525054Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24713, node 12 2025-03-28T09:05:30.679482Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:30.679512Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:30.679524Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:30.679707Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:31.087920Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:31.097843Z node 12 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:05:35.615515Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:05:35.615730Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:05:35.615750Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:05:35.619527Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY= 2025-03-28T09:05:35.620345Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7486783990995448637:2330], Start check tables existence, number paths: 2 2025-03-28T09:05:35.620450Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [12:7486783990995448638:2331], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:05:35.620596Z node 12 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-28T09:05:35.624176Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7486783990995448637:2330], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:05:35.624258Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7486783990995448637:2330], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:05:35.624311Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [12:7486783990995448637:2330], Successfully finished 2025-03-28T09:05:35.624365Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:05:35.639076Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7486783990995448664:2309], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:05:35.644932Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:05:35.646930Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7486783990995448664:2309], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-28T09:05:35.647134Z node 12 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7486783990995448664:2309], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:05:35.677223Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7486783990995448664:2309], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:05:35.763363Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7486783990995448664:2309], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:05:35.767070Z node 12 :TX_PROXY ERROR: Actor# [12:7486783990995448715:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:35.767285Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7486783990995448664:2309], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:05:35.771110Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: Root, PoolId: sample_pool_id 2025-03-28T09:05:35.771155Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id Root 2025-03-28T09:05:35.771255Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7486783990995448722:2333], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:05:35.777172Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7486783990995448722:2333], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:05:35.777276Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2025-03-28T09:05:35.777313Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-28T09:05:35.777664Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7486783990995448731:2334], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-28T09:05:35.779563Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7486783990995448731:2334], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-03-28T09:05:35.790169Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-28T09:05:35.790209Z node 12 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-28T09:05:35.790351Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [12:7486783990995448638:2331], ActorState: ReadyState, TraceId: 01jqdzzshdd5a2r23p974124hn, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-28T09:05:35.791186Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7486783990995448743:2336], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-28T09:05:35.793332Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7486783990995448743:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:35.793447Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:35.810503Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:05:35.815027Z node 12 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [12:7486783990995448731:2334], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2025-03-28T09:05:35.816060Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [12:7486783990995448638:2331], ActorState: ExecuteState, TraceId: 01jqdzzshdd5a2r23p974124hn, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [12:7486783990995448752:2331] WorkloadServiceCleanup: 0 2025-03-28T09:05:35.818574Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [12:7486783990995448638:2331], ActorState: CleanupState, TraceId: 01jqdzzshdd5a2r23p974124hn, EndCleanup, isFinal: 0 2025-03-28T09:05:35.818635Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [12:7486783990995448638:2331], ActorState: CleanupState, TraceId: 01jqdzzshdd5a2r23p974124hn, Sent query response back to proxy, proxyRequestId: 3, proxyId: [12:7486783969520611520:2065] 2025-03-28T09:05:35.837737Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [12:7486783990995448638:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:05:35.837797Z node 12 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [12:7486783990995448638:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:05:35.837826Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [12:7486783990995448638:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:05:35.837860Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [12:7486783990995448638:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:05:35.837943Z node 12 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=12&id=YzIwNGZhZTMtNDRiNjgxMzUtZGRmZmRmM2YtY2ZkMWI4YzY=, ActorId: [12:7486783990995448638:2331], ActorState: unknown state, Session actor destroyed >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:29.777898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:29.778026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:29.778078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:29.778128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:29.778176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:29.778208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:29.778299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:29.778391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:29.778784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:29.861672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:29.861735Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:29.877161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:29.877387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:29.877510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:29.883298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:29.883527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:29.884216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:29.884405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:29.886060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:29.887437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:29.887504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:29.887698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:29.887752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:29.887795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:29.887941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:29.894945Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:30.031047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:30.031317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:30.031577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:30.031824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:30.031903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:30.034633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:30.034819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:30.035050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:30.035128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:30.035176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:30.035216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:30.039675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:30.039772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:30.039821Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:30.047184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:30.047266Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:30.047323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:30.047376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:30.051326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:30.053869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:30.054093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:30.055240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:30.055393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:30.055449Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:30.055784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:30.055843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:30.056030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:30.056136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:30.059333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:30.059389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:30.059626Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:30.059672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:30.060116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:30.060173Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:30.060277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:30.060325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:30.060370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:30.060402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:30.060444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:30.060486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:30.060527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:30.060559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:30.060641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:30.060679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:30.060712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:30.062779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:30.062909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:30.062966Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... D INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:05:39.438057Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:05:39.438141Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:05:39.440284Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:05:39.440451Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-28T09:05:39.441289Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:39.441439Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 30064773229 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:39.441518Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:05:39.441625Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 102 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:39.441683Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:05:39.441744Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 134 2025-03-28T09:05:39.443590Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:05:39.443952Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:05:39.445664Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:05:39.445746Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:39.445892Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 134 -> 135 2025-03-28T09:05:39.446217Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:39.446302Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:05:39.448292Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:39.448342Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:39.448502Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:05:39.448670Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:39.448730Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:05:39.448772Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:05:39.449123Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:05:39.449184Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-03-28T09:05:39.449248Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 135 -> 240 2025-03-28T09:05:39.450151Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:05:39.450241Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:05:39.450276Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:05:39.450314Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:05:39.450353Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:39.451125Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:05:39.451212Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:05:39.451241Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:05:39.451271Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:05:39.451301Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:05:39.451370Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:05:39.454552Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:05:39.454617Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:05:39.454799Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:05:39.454856Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:05:39.454911Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:05:39.454960Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:05:39.455013Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:05:39.455091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:05:39.455140Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:05:39.455188Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:05:39.455281Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:05:39.455873Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:05:39.455935Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:39.456019Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:05:39.456478Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:05:39.456540Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:05:39.456620Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:39.457918Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:05:39.458542Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:05:39.460776Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:05:39.460912Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:05:39.461215Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:05:39.461282Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:05:39.461804Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:05:39.461931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:05:39.461983Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:341:2332] TestWaitNotification: OK eventTxId 102 2025-03-28T09:05:39.462587Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:39.462843Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 318us result status StatusPathDoesNotExist 2025-03-28T09:05:39.463033Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2025-03-28T09:05:36.156718Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783995465033338:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:36.156768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001784/r3tmp/tmpRQk0T6/pdisk_1.dat 2025-03-28T09:05:36.826851Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:36.853062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:36.853218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:36.856178Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:36.891289Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001784/r3tmp/yandexVDqJGC.tmp 2025-03-28T09:05:36.891311Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001784/r3tmp/yandexVDqJGC.tmp 2025-03-28T09:05:36.891584Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001784/r3tmp/yandexVDqJGC.tmp 2025-03-28T09:05:36.891713Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |77.9%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] >> KqpScanSpilling::SelfJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:24.439253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:24.439352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:24.439390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:24.439421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:24.439461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:24.439488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:24.439540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:24.439605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:24.439900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:24.523615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:24.523671Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:24.538043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:24.538269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:24.538412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:24.558941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:24.559143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:24.559794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:24.559995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:24.562134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:24.563444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:24.563532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:24.563677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:24.563745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:24.563791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:24.563865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:24.570650Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:24.700818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:24.701089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:24.701327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:24.701562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:24.701630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:24.711574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:24.711740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:24.711939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:24.712004Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:24.712043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:24.712220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:24.715781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:24.715833Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:24.715878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:24.719556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:24.719615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:24.719676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:24.719726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:24.723008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:24.725457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:24.725646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:24.726436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:24.726539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:24.726601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:24.726827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:24.726871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:24.726996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:24.727051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:24.728959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:24.729015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:24.729184Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:24.729225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:24.729624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:24.729677Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:24.729764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:24.729796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:24.729841Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:24.729874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:24.729926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:24.729971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:24.730006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:24.730033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:24.730096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:24.730135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:24.730164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:24.736984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:24.737121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:24.737154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 2 2025-03-28T09:05:40.485803Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 1 -> 2 2025-03-28T09:05:40.486507Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 116:1, propose status:StatusAccepted, reason: , at schemeshard: 72075186233409546 2025-03-28T09:05:40.486593Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 116:0, at schemeshard: 72075186233409546 2025-03-28T09:05:40.486785Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 12 2025-03-28T09:05:40.486877Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 3 2025-03-28T09:05:40.496121Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 116, response: Status: StatusAccepted TxId: 116 SchemeshardId: 72075186233409546 PathId: 9, at schemeshard: 72075186233409546 2025-03-28T09:05:40.496418Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 116, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-03-28T09:05:40.496801Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-28T09:05:40.496868Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-28T09:05:40.497200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 116, path id: [OwnerId: 72075186233409546, LocalPathId: 9] 2025-03-28T09:05:40.497334Z node 7 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-28T09:05:40.497398Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:726:2628], at schemeshard: 72075186233409546, txId: 116, path id: 1 2025-03-28T09:05:40.497468Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [7:726:2628], at schemeshard: 72075186233409546, txId: 116, path id: 9 2025-03-28T09:05:40.498323Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-03-28T09:05:40.498419Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2025-03-28T09:05:40.503029Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-03-28T09:05:40.504291Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-03-28T09:05:40.504460Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 16 PathOwnerId: 72075186233409546, cookie: 116 2025-03-28T09:05:40.504523Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-03-28T09:05:40.504600Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 16 2025-03-28T09:05:40.504667Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 13 2025-03-28T09:05:40.505490Z node 7 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-03-28T09:05:40.505589Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 9 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2025-03-28T09:05:40.505620Z node 7 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2025-03-28T09:05:40.505655Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 9], version: 1 2025-03-28T09:05:40.505690Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 9] was 4 2025-03-28T09:05:40.505773Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2025-03-28T09:05:40.516534Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2025-03-28T09:05:40.516772Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2025-03-28T09:05:40.516847Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-03-28T09:05:40.521464Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 9 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-03-28T09:05:40.521791Z node 7 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2025-03-28T09:05:40.522014Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-03-28T09:05:40.522081Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2025-03-28T09:05:40.522263Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-03-28T09:05:40.522339Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2025-03-28T09:05:40.522462Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2025-03-28T09:05:40.522621Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 116:0 2 -> 3 2025-03-28T09:05:40.524262Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-03-28T09:05:40.524783Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2025-03-28T09:05:40.528617Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2025-03-28T09:05:40.529106Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2025-03-28T09:05:40.529179Z node 7 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2025-03-28T09:05:40.529289Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2025-03-28T09:05:40.530327Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 675 RawX2: 30064773660 } TxBody: "\n\236\004\n\007Table11\020\t\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\t:\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2025-03-28T09:05:40.552297Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2025-03-28T09:05:40.552602Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2025-03-28T09:05:40.626677Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2025-03-28T09:05:40.631823Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-03-28T09:05:40.632041Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 |77.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TNetClassifierTest::TestInitFromRemoteSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoin [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2909/001753/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk1 Trying to start YDB, gRPC: 2121, MsgBus: 16192 2025-03-28T09:05:32.479597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783976995306753:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:32.480346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001753/r3tmp/tmp5ZcGDO/pdisk_1.dat 2025-03-28T09:05:33.091526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:33.091639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:33.094637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:33.126335Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2121, node 1 2025-03-28T09:05:33.140024Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:05:33.259352Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:33.259383Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:33.259392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:33.259511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16192 TClient is connected to server localhost:16192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:34.057689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:34.071018Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:05:34.088404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:34.368363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:34.638779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:34.733729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:36.947425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783994175177592:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:36.947606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:37.369971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.458544Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783976995306753:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:37.458594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:37.460600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.528852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.607647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.696564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.791304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.902276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783998470145417:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:37.902370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:37.902815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783998470145422:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:37.907731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:05:37.935156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783998470145424:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:05:38.027633Z node 1 :TX_PROXY ERROR: Actor# [1:7486784002765112776:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:40.720416Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=1;ch_limit=50;inputs=0;input_channels_count=0; 2025-03-28T09:05:40.720763Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2025-03-28T09:05:40.720912Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=2;input_channels_count=2; 2025-03-28T09:05:40.720988Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:145;event=channel_info;ch_size=50;ch_count=2;ch_limit=50;inputs=1;input_channels_count=1; 2025-03-28T09:05:40.721888Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:46 :META:Table { TableId { OwnerId: 72057594046644480 TableId: 6 } TablePath: "/Root/KeyValue" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Columns { Id: 1 Name: "Key" Type: 4 } Columns { Id: 2 Name: "Value" Type: 4097 } KeyColumnTypes: 4 Reads { ShardId: 72075186224037911 KeyRanges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } } ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC EnableShardsSequentialScan: true KeyColumnTypeInfos { } ReadType: ROWS 2025-03-28T09:05:40.722141Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047945:2557], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. TraceId : 01jqdzzxwx46s639avhcx1ytem. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Start compute actor [1:7486784011355047945:2557], task: 1 2025-03-28T09:05:40.722180Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047945:2557], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. TraceId : 01jqdzzxwx46s639avhcx1ytem. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Set periodic stats 0.100000s 2025-03-28T09:05:40.722210Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047945:2557], TxId: 281474976710682, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. TraceId : 01jqdzzxwx46s639avhcx1ytem. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. EVLOGKQP START 2025-03-28T09:05:40.723021Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047946:2558], TxId: 281474976710682, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Start compute actor [1:7486784011355047946:2558], task: 2 2025-03-28T09:05:40.723063Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047946:2558], TxId: 281474976710682, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Set periodic stats 0.100000s 2025-03-28T09:05:40.723922Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047946:2558], TxId: 281474976710682, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. SessionId : ydb://session/3?node_i ... OMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.829411Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.829541Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.829611Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.829846Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047947:2559], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-03-28T09:05:40.829857Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047947:2559], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.829889Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047947:2559], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:05:40.830091Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646923 2025-03-28T09:05:40.830108Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Finish input channelId: 4, from: [1:7486784011355047947:2559] 2025-03-28T09:05:40.830124Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.830148Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047947:2559], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646927 2025-03-28T09:05:40.830172Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047947:2559], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.830200Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 2, seqNo: [10] 2025-03-28T09:05:40.830210Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [11] 2025-03-28T09:05:40.830217Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. Tasks execution finished 2025-03-28T09:05:40.830227Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047947:2559], TxId: 281474976710682, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:05:40.830289Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 3. pass away 2025-03-28T09:05:40.830359Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710682;task_id=3;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:05:40.830593Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.830831Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.830878Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.831262Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.834913Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.835028Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.835071Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:05:40.835665Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.835701Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:05:40.835833Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.835871Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:05:40.836127Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:05:40.836158Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 4, seqNo: [11] 2025-03-28T09:05:40.836166Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. Tasks execution finished 2025-03-28T09:05:40.836177Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486784011355047948:2560], TxId: 281474976710682, task: 4. Ctx: { SessionId : ydb://session/3?node_id=1&id=NjBhMzQ1NjQtNzM0Nzk4NTctYTkwNjVmMi1hYmQyN2E3YQ==. CustomerSuppliedId : . TraceId : 01jqdzzxwx46s639avhcx1ytem. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:05:40.836250Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710682, task: 4. pass away 2025-03-28T09:05:40.836320Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710682;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:05:40.839639Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152740763, txId: 281474976710681] shutting down >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 >> TOlapNaming::CreateColumnStoreOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: 2025-03-28T09:04:05.359471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:05.359981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:05.360048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0010ed/r3tmp/tmphMYHJH/pdisk_1.dat 2025-03-28T09:04:05.866963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:05.924691Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:05.987693Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZWIzOGU2NzYtYjZlNDlmMTctN2NmYTgxNDktYzhhNmI4NzA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZWIzOGU2NzYtYjZlNDlmMTctN2NmYTgxNDktYzhhNmI4NzA= 2025-03-28T09:04:05.988395Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZWIzOGU2NzYtYjZlNDlmMTctN2NmYTgxNDktYzhhNmI4NzA=, ActorId: [1:619:2540], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:04:05.988980Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZWIzOGU2NzYtYjZlNDlmMTctN2NmYTgxNDktYzhhNmI4NzA=, ActorId: [1:619:2540], ActorState: ReadyState, TraceId: 01jqdzx1v48k53ffacm0ees3z4, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpcActor: [0:0:0] database: databaseId: /Root pool id: default 2025-03-28T09:04:06.258484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:623:2543], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:06.258736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:06.300836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:06.301024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:06.305127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:06.325216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:06.397070Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:04:06.398415Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:04:06.403389Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:689:2579] 2025-03-28T09:04:06.403753Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:06.435280Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:684:2576], Recipient [1:689:2579]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:04:06.518345Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:06.518518Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:06.524785Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:06.524907Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:06.524973Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:06.525432Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:06.525643Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:06.525774Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2579] in generation 1 2025-03-28T09:04:06.526365Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:06.661389Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:04:06.661654Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:06.661836Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:705:2588] 2025-03-28T09:04:06.661883Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:06.661922Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:04:06.661958Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:06.662236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:689:2579], Recipient [1:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:06.662290Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:06.662550Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:04:06.662652Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:04:06.662827Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:06.662879Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:06.662944Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:04:06.663006Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:04:06.663051Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:04:06.663104Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:04:06.663158Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:06.699542Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:708:2590], Recipient [1:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:06.699622Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:06.699688Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:680:2574], serverId# [1:708:2590], sessionId# [0:0:0] 2025-03-28T09:04:06.699850Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:708:2590] 2025-03-28T09:04:06.699896Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:04:06.700043Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:06.700340Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:04:06.700419Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:04:06.700542Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:04:06.700605Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:04:06.700660Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:04:06.700700Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:04:06.700742Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:06.701071Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:04:06.701152Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:04:06.701195Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:04:06.701233Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:06.701297Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:04:06.701333Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:04:06.701372Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:04:06.701413Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:04:06.701441Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:04:06.702442Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:04:06.702502Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:06.702544Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:06.702602Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:04:06.702674Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:04:06.709793Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:709:2591], Recipient [1:689:2579]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:04:06.709873Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:04:06.761187Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:724:2600], Recipient [1:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:06.761275Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:06.761318Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:722:2598], serverId# [1:724:2600], sessionId# [0:0:0] 2025-03-28T09:04:06.761779Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:689:2579]: {TEvPlanStep step# 300 MediatorId# 72057594046382081 Tabl ... node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-03-28T09:05:39.860113Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:05:39.860192Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-28T09:05:39.860285Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-03-28T09:05:39.860408Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:05:39.861312Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=N2M0MWFhZTAtYmJjZWM4MTYtNjUyYjczMzktM2FhMGEwMmY=, ActorId: [13:841:2686], ActorState: ExecuteState, TraceId: 01jqdzzxc83wedgkzmzse5npye, Create QueryResponse for error on request, msg: 2025-03-28T09:05:39.862569Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqdzzxc83wedgkzmzse5npye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=N2M0MWFhZTAtYmJjZWM4MTYtNjUyYjczMzktM2FhMGEwMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:39.863009Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:949:2686], Recipient [13:904:2732]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 949 RawX2: 55834577534 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2025-03-28T09:05:39.863058Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:05:39.863190Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:904:2732], Recipient [13:904:2732]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:05:39.863223Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:05:39.863295Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:05:39.863470Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-03-28T09:05:39.863565Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2025-03-28T09:05:39.863618Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-03-28T09:05:39.863656Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2025-03-28T09:05:39.863688Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:05:39.863721Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:05:39.863767Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v500/18446744073709551615 2025-03-28T09:05:39.863824Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715666] at 72075186224037888 2025-03-28T09:05:39.863861Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-03-28T09:05:39.863890Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:05:39.863918Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-28T09:05:39.863946Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-28T09:05:39.864021Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-03-28T09:05:39.864179Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-03-28T09:05:39.864287Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:05:39.864367Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-03-28T09:05:39.864435Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-28T09:05:39.864465Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:05:39.864495Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-03-28T09:05:39.864554Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:05:39.864674Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2025-03-28T09:05:39.864705Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:05:39.864737Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:05:39.864768Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:05:39.864815Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2025-03-28T09:05:39.864842Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:05:39.864871Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2025-03-28T09:05:39.864932Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:05:39.864987Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2025-03-28T09:05:39.865037Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:05:39.866714Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [13:61:2108], Recipient [13:904:2732]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 13 Status: STATUS_NOT_FOUND 2025-03-28T09:05:40.180877Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqdzzxhmd1v39w41x9mwkgk1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjNiMWM3YzMtOGQ0YmNhYjYtMzFjY2ViNGYtY2I2ZGJlODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:05:40.182770Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:970:2776], Recipient [13:904:2732]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-28T09:05:40.183020Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:05:40.183115Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v500/18446744073709551615 2025-03-28T09:05:40.183187Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v500/18446744073709551615 2025-03-28T09:05:40.183289Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-03-28T09:05:40.183425Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:05:40.183513Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:05:40.183582Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:05:40.183642Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:05:40.183694Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-03-28T09:05:40.183756Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:05:40.183784Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:05:40.183810Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:05:40.183843Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:05:40.184005Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-28T09:05:40.184305Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:970:2776], 0} after executionsCount# 1 2025-03-28T09:05:40.184402Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:970:2776], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:05:40.184519Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:970:2776], 0} finished in read 2025-03-28T09:05:40.184618Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:05:40.184645Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:05:40.184668Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:05:40.184695Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:05:40.184744Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:05:40.184765Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:05:40.184801Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-03-28T09:05:40.184900Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:05:40.185061Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:05:40.186169Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:970:2776], Recipient [13:904:2732]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:05:40.186252Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> TOlap::CreateStore >> TOlap::StoreStats >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate+useSink >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut >> TOlapNaming::CreateColumnStoreOk [GOOD] >> TOlapNaming::CreateColumnStoreFailed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2025-03-28T09:05:39.497668Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784007874223330:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:39.514897Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00176f/r3tmp/tmpTWNL0I/pdisk_1.dat 2025-03-28T09:05:40.153459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:40.157547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:40.157632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:40.171657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:40.223423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00176f/r3tmp/yandexhK44qO.tmp 2025-03-28T09:05:40.223474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00176f/r3tmp/yandexhK44qO.tmp 2025-03-28T09:05:40.223607Z node 1 :NET_CLASSIFIER ERROR: invalid NetData format 2025-03-28T09:05:40.223635Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: /home/runner/.ya/build/build_root/2909/00176f/r3tmp/yandexhK44qO.tmp 2025-03-28T09:05:40.223766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration |77.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |77.9%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |77.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |77.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |77.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 >> TOlap::CreateStore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> TOlap::CreateDropTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink >> TOlapNaming::CreateColumnStoreFailed [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:44.247944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:44.248101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:44.248152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:44.248190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:44.248240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:44.248273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:44.248337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:44.248410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:44.248799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:44.336020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:44.336101Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:44.358346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:44.358680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:44.358881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:44.369317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:44.369580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:44.370306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:44.370575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:44.373270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:44.374736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:44.374810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:44.375004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:44.375055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:44.375095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:44.375194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.383085Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:44.559710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:44.559999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.560295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:44.560609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:44.560694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.576425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:44.576609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:44.576846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.576938Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:44.576979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:44.577017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:44.580168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.580261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:44.580305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:44.584197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.584279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.584329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:44.584388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:44.594608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:44.603716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:44.604030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:44.605221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:44.605390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:44.605459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:44.605811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:44.605881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:44.606084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:44.606170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:44.615970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:44.616048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:44.616286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:44.616333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:44.616759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.616815Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:44.616944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:44.616983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:44.617030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:44.617064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:44.617111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:44.617157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:44.617251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:44.617288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:44.617378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:44.617442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:44.617479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:44.619919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:44.620089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:44.620132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:46.067184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:46.067288Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:46.067455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:46.067507Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:46.067567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:46.067604Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:46.069436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:46.069493Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:46.069548Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:46.071201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:46.071247Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:46.071299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:46.071352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:46.071509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:46.073190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:46.073384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:46.074244Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:46.074372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:46.074422Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:46.074703Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:46.074760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:46.074949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:46.075018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:46.083762Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:46.083839Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:46.084027Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:46.084070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:05:46.084185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:46.084227Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:46.084346Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:46.084381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:46.084420Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:46.084455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:46.084493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:46.084533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:46.084573Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:46.084606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:46.084680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:46.084721Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:46.084752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:46.085801Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:46.085912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:46.085952Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:05:46.086002Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:05:46.086047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:46.086150Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:05:46.091866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:05:46.092396Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:05:46.095987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 1 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "data" Type: "Utf8" } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "timestamp" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:46.096350Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /MyRoot/OlapStore, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:05:46.096528Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-03-28T09:05:46.096968Z node 2 :TX_PROXY DEBUG: actor# [2:270:2261] Bootstrap 2025-03-28T09:05:46.117599Z node 2 :TX_PROXY DEBUG: actor# [2:270:2261] Become StateWork (SchemeCache [2:275:2266]) 2025-03-28T09:05:46.118308Z node 2 :TX_PROXY DEBUG: actor# [2:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:05:46.121551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:46.121717Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN STORE, path: /MyRoot/OlapStore 2025-03-28T09:05:46.122242Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:05:46.122484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:05:46.122525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:05:46.122925Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:05:46.123023Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:05:46.123060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:285:2276] TestWaitNotification: OK eventTxId 101 2025-03-28T09:05:46.123457Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:46.123624Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 191us result status StatusPathDoesNotExist 2025-03-28T09:05:46.123789Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/OlapStore\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/OlapStore" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |77.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |78.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |78.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> KqpPg::InsertNoTargetColumns_Alter+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink >> TOlap::CreateDropTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2025-03-28T09:05:43.532066Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784025946659009:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:43.532191Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001756/r3tmp/tmpvm5zQJ/pdisk_1.dat 2025-03-28T09:05:43.963500Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:43.978624Z node 1 :HTTP ERROR: (#26,[::1]:1356) connection closed with error: Connection refused 2025-03-28T09:05:43.979045Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:05:43.979922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:43.980031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:43.982827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:44.007805Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:44.007825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:44.007832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:44.007944Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 >> TConsoleConfigTests::TestModifyConfigItem >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> TJaegerTracingConfiguratorTests::DefaultConfig >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 |78.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |78.0%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |78.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts >> KqpPg::CheckPgAutoParams-useSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> Viewer::SelectStringWithNoBase64Encoding >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 |78.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |78.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> Viewer::TabletMergingPacked |78.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 >> KqpPg::InsertNoTargetColumns_Alter-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 14868, MsgBus: 25626 2025-03-28T09:03:28.257430Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783447581299462:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:28.257472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012ed/r3tmp/tmpy05nuv/pdisk_1.dat 2025-03-28T09:03:28.690788Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:28.713099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:28.713192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:28.715787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14868, node 1 2025-03-28T09:03:28.792510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:28.792534Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:28.792546Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:28.792668Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25626 TClient is connected to server localhost:25626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:29.359481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:29.384382Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:03:31.489280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-03-28T09:03:31.814973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2025-03-28T09:03:31.975173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-03-28T09:03:32.176125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:03:32.243115Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2025-03-28T09:03:32.381444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:03:32.444237Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill f f t t 2025-03-28T09:03:32.554111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 f f t t 2025-03-28T09:03:32.751706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:03:32.848998Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {f,f} {f,f} {t,t} {t,t} 2025-03-28T09:03:32.910814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 {f,f} {f,f} {t,t} {t,t} 2025-03-28T09:03:33.043045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:03:33.107475Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-28T09:03:33.162329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-28T09:03:33.257416Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783447581299462:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:33.257480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:33.283627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480 2025-03-28T09:03:33.365418Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-28T09:03:33.428847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 2025-03-28T09:03:33.537418Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-28T09:03:33.592035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710700:0, at schemeshard: 72057594046644480 2025-03-28T09:03:33.647876Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-28T09:03:33.693429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-03-28T09:03:33.742224Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-28T09:03:33.798778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710708:0, at schemeshard: 72057594046644480 2025-03-28T09:03:33.856623Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-28T09:03:33.926822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-03-28T09:03:33.993424Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-28T09:03:34.047765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-03-28T09:03:34.152525Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-28T09:03:34.220980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2025-03-28T09:03:34.336478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710723:0, at schemeshard: 72057594046644480 {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-28T09:03:34.466635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710726:0, at schemeshard: 72057594046644480 2025-03-28T09:03:34.537541Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2025-03-28T09:03:34.601741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, b ... to start YDB, gRPC: 25213, MsgBus: 24888 2025-03-28T09:05:43.277326Z node 14 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7486784026752512815:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:43.277400Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012ed/r3tmp/tmp2sGZ50/pdisk_1.dat 2025-03-28T09:05:43.458184Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:43.524427Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:43.524587Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:43.526727Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25213, node 14 2025-03-28T09:05:43.623605Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:43.623634Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:43.623651Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:43.623871Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24888 TClient is connected to server localhost:24888 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:44.980632Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:48.277714Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7486784026752512815:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:48.277824Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:49.986852Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7486784052522317273:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:49.987837Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7486784052522317249:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:49.987968Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:49.996455Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:05:50.018114Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7486784052522317276:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:05:50.095900Z node 14 :TX_PROXY ERROR: Actor# [14:7486784056817284624:2347] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:50.155449Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:05:50.584750Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:05:51.527369Z node 14 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [14:7486784061112252276:2402], owner: [14:7486784052522317228:2325], statement id: 0 2025-03-28T09:05:51.527906Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=MTc3ZmM4ODgtZmViMjM4N2EtNWQ3ZWQyYzEtODIyYjUzM2E=, ActorId: [14:7486784061112252274:2401], ActorState: ExecuteState, TraceId: 01jqe008wz2s35s1e48xwjqxqt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:05:51.833695Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7486784061112252304:2412], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:55: Error: At function: PgOp
:2:55: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-03-28T09:05:51.834957Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=Y2VhODQzNTktMzhjODAzNDktMmIzYjFmMjAtNjJlOGM4YWY=, ActorId: [14:7486784061112252301:2410], ActorState: ExecuteState, TraceId: 01jqe0095k2d7z4s802w57djpr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:05:51.883456Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7486784061112252318:2418], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect
: Error: At function: PgSetItem
:1:1: Error: At function: PgWhere
:2:57: Error: At function: PgAnd
:2:67: Error: At function: PgOp
:2:67: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2025-03-28T09:05:51.883877Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=MzgxMDhiOC1hODJiZDc1MS05YjA3YjQyMy02Y2JjZGNlZA==, ActorId: [14:7486784061112252315:2416], ActorState: ExecuteState, TraceId: 01jqe0097adrs5dag8dythctvv, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:05:51.901558Z node 14 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0098nchhq71y293b0esne, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=MjA0MTI2ZWUtZTg0ZjlmY2QtZTM4MDZkNDMtNGFjODA0ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-03-28T09:05:51.901846Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=MjA0MTI2ZWUtZTg0ZjlmY2QtZTM4MDZkNDMtNGFjODA0ZjI=, ActorId: [14:7486784061112252327:2422], ActorState: ExecuteState, TraceId: 01jqe0098nchhq71y293b0esne, Create QueryResponse for error on request, msg: 2025-03-28T09:05:51.947977Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:05:52.096075Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:05:52.238494Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7486784065407219803:2448], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2025-03-28T09:05:52.238975Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=MzZkOTU5MGItOGVmYWUxMDQtMzZlOTY1MzQtY2ZiNzM0YzQ=, ActorId: [14:7486784065407219800:2446], ActorState: ExecuteState, TraceId: 01jqe009j97297jc29t785dhja, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:05:52.290660Z node 14 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [14:7486784065407219815:2454], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2025-03-28T09:05:52.307342Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=OTY4ZDBjM2UtYzY5YzllMWQtYTUwNTA0NjQtZWY2NzFjOWU=, ActorId: [14:7486784065407219812:2452], ActorState: ExecuteState, TraceId: 01jqe009kt98vzbx14jw3a7wwp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:05:52.781401Z node 14 :KQP_EXECUTER CRIT: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe009p0a1ydwpw2cc386j7a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=MjVlOGEwZjEtODhjY2IyZTktNGE2MDcxYTUtMmEzN2M2NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" 2025-03-28T09:05:52.781930Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=MjVlOGEwZjEtODhjY2IyZTktNGE2MDcxYTUtMmEzN2M2NA==, ActorId: [14:7486784065407219827:2459], ActorState: ExecuteState, TraceId: 01jqe009p0a1ydwpw2cc386j7a, Create QueryResponse for error on request, msg: 2025-03-28T09:05:52.829139Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:05:53.699673Z node 14 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037892 not found 2025-03-28T09:05:53.747044Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 >> KqpPg::TableInsert+useSink [GOOD] >> KqpPg::TableInsert-useSink >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems >> Viewer::TabletMergingPacked [GOOD] >> Viewer::VDiskMerging >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:44.951847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:44.951984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:44.952032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:44.952066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:44.952109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:44.952147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:44.952205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:44.952329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:44.952661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:45.095253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:45.095336Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:45.127914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:45.128189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:45.128348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:45.137577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:45.137876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:45.138611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:45.138905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:45.140876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:45.142163Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:45.142223Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:45.142414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:45.142476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:45.142521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:45.142630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:45.150247Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:45.341513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:45.341791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:45.342055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:45.342395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:45.342465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:45.354044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:45.354238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:45.354472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:45.354547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:45.354585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:45.354623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:45.359920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:45.360020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:45.360089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:45.364549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:45.364636Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:45.364728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:45.364780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:45.369146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:45.371949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:45.372227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:45.373513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:45.373682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:45.373747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:45.374109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:45.374175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:45.374384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:45.374487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:45.377180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:45.377235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:45.377461Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:45.377507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:45.377908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:45.377955Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:45.378058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:45.378091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:45.378129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:45.378163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:45.378200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:45.378245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:45.378318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:45.378353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:45.378427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:45.378501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:45.378535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:45.380786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:45.380964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:45.381008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... pipe to deleted shardIdx 72057594046678944:50 tabletId 72075186233409595 2025-03-28T09:05:58.106882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:52 2025-03-28T09:05:58.106908Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:52 tabletId 72075186233409597 2025-03-28T09:05:58.106967Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:46 2025-03-28T09:05:58.106992Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:46 tabletId 72075186233409591 2025-03-28T09:05:58.107261Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:48 2025-03-28T09:05:58.107293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:48 tabletId 72075186233409593 2025-03-28T09:05:58.107438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:05:58.107494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:05:58.107586Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:05:58.113328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-03-28T09:05:58.113395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-03-28T09:05:58.114074Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:44 2025-03-28T09:05:58.114118Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:44 tabletId 72075186233409589 2025-03-28T09:05:58.115373Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:65 2025-03-28T09:05:58.115416Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:65 tabletId 72075186233409610 2025-03-28T09:05:58.115588Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:63 2025-03-28T09:05:58.115618Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:63 tabletId 72075186233409608 2025-03-28T09:05:58.115705Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:61 2025-03-28T09:05:58.115741Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:61 tabletId 72075186233409606 2025-03-28T09:05:58.115885Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:59 2025-03-28T09:05:58.115913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2025-03-28T09:05:58.116046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-28T09:05:58.116076Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-28T09:05:58.116168Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:05:58.116193Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:05:58.117652Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:05:58.117700Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:05:58.117795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-03-28T09:05:58.117824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-03-28T09:05:58.118190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-03-28T09:05:58.118229Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-03-28T09:05:58.118326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-03-28T09:05:58.118356Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-03-28T09:05:58.121326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-03-28T09:05:58.121393Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-03-28T09:05:58.122198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-03-28T09:05:58.122247Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-03-28T09:05:58.122372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-03-28T09:05:58.122403Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-03-28T09:05:58.123628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-03-28T09:05:58.123676Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-03-28T09:05:58.125157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-03-28T09:05:58.125204Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-03-28T09:05:58.125334Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-03-28T09:05:58.125363Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-03-28T09:05:58.125462Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-03-28T09:05:58.125491Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-03-28T09:05:58.125569Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:40 2025-03-28T09:05:58.125597Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2025-03-28T09:05:58.125680Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-03-28T09:05:58.125710Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-03-28T09:05:58.125780Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-03-28T09:05:58.125808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-03-28T09:05:58.125891Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-03-28T09:05:58.125919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-03-28T09:05:58.130849Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-03-28T09:05:58.130921Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-03-28T09:05:58.131044Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-03-28T09:05:58.131074Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-03-28T09:05:58.131142Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-03-28T09:05:58.131168Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-03-28T09:05:58.131240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:57 2025-03-28T09:05:58.131268Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2025-03-28T09:05:58.131355Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:53 2025-03-28T09:05:58.131382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2025-03-28T09:05:58.131454Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:55 2025-03-28T09:05:58.131480Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2025-03-28T09:05:58.131555Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:49 2025-03-28T09:05:58.131591Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2025-03-28T09:05:58.131676Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:47 2025-03-28T09:05:58.131701Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2025-03-28T09:05:58.135087Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:51 2025-03-28T09:05:58.135134Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2025-03-28T09:05:58.135217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:45 2025-03-28T09:05:58.135238Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2025-03-28T09:05:58.135283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:43 2025-03-28T09:05:58.135299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2025-03-28T09:05:58.135342Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:41 2025-03-28T09:05:58.135382Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2025-03-28T09:05:58.135538Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 2025-03-28T09:05:58.136353Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:05:58.136551Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 228us result status StatusPathDoesNotExist 2025-03-28T09:05:58.136679Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:05:58.137341Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-03-28T09:05:58.137446Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 116us result status StatusPathDoesNotExist 2025-03-28T09:05:58.137516Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksForbidden [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder >> Viewer::VDiskMerging [GOOD] >> Viewer::TenantInfo5kkTablets >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> KqpPg::PgUpdate-useSink [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink >> TGroupMapperTest::MakeDisksUnusable >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoSplit >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> TGroupMapperTest::NonUniformClusterMirror3dc |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksUnusable [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestValidation >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dc [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_pool_ut/unittest >> YdbSdkSessionsPool::StressTestAsync10 [GOOD] Test command err: 2025-03-28T09:03:08.806952Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783361522509685:2112];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:08.807454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013c6/r3tmp/tmplfSLea/pdisk_1.dat 2025-03-28T09:03:09.279363Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:09.284830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:09.284940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:09.297579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4753, node 1 2025-03-28T09:03:09.439088Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:09.439120Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:09.439127Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:09.439229Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:09.869918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:13.802847Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783361522509685:2112];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:13.802936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:03:24.273334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:03:24.273366Z node 1 :IMPORT WARN: Table profiles were not loaded >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> TSchemeShardServerLess::TestServerlessComputeResourcesMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: 2025-03-28T09:04:06.416717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:06.417297Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:06.417365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0010c5/r3tmp/tmpsepDnk/pdisk_1.dat 2025-03-28T09:04:06.966830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.033599Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:07.085324Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:07.086471Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:04:07.086757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:07.086892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:07.099132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:07.192036Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:04:07.192124Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:04:07.192306Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:04:07.408212Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:04:07.408329Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:04:07.408938Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:04:07.409034Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:04:07.409395Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:04:07.409608Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:04:07.409742Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:04:07.410084Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:04:07.414618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.416445Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:04:07.416531Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:04:07.464108Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:04:07.465343Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:04:07.465904Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:04:07.466273Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:07.544118Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:04:07.544986Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:07.545120Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:07.553850Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:07.553990Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:07.572379Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:07.573022Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:07.573233Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:07.573342Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:04:07.573928Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:07.660785Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:04:07.661025Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:07.661165Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:04:07.661207Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:07.661247Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:04:07.661287Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:07.661565Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:07.661645Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:07.662083Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:04:07.662217Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:04:07.668349Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:07.668437Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:07.668498Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:04:07.668564Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:04:07.669884Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:04:07.669960Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:04:07.670050Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:07.670281Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:07.670344Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:07.670409Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:04:07.670530Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:04:07.670595Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:04:07.670791Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:07.671095Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:04:07.671185Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:04:07.671304Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:04:07.671362Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:04:07.671413Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:04:07.671454Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:04:07.671508Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:07.671916Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:04:07.671971Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:04:07.680342Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:04:07.680440Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:07.680552Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:04:07.680609Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:04:07.680652Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:04:07.680696Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:04:07.680748Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:04:07.681925Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:04:07.682003Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:07.682044Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:07.682092Z node 1 :TX_DATASHARD TRACE: Prop ... 2075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:06:01.805653Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:06:01.805718Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-03-28T09:06:01.805845Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3035 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:06:01.805957Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-28T09:06:01.806067Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:666:2570], Recipient [13:757:2635]: {TEvReadSet step# 3035 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:06:01.806096Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:06:01.806124Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-03-28T09:06:01.806174Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3035 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:06:01.806212Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-28T09:06:01.806284Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:757:2635], Recipient [13:666:2570]: {TEvReadSet step# 3035 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:06:01.806312Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:06:01.806338Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2025-03-28T09:06:01.806378Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3035 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:06:01.806562Z node 13 :TX_DATASHARD DEBUG: Complete [3035 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:984:2781], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:06:01.806875Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [13:666:2570], Recipient [13:757:2635]: {TEvReadSet step# 3035 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:06:01.806910Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:06:01.806939Z node 13 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-03-28T09:06:01.806988Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3035 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:06:01.807084Z node 13 :TX_DATASHARD DEBUG: Complete [3035 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:984:2781], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 1967 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1125 } } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } 2025-03-28T09:06:01.808174Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:01.808480Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:06:01.808820Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:01.811665Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-28T09:06:01.816869Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:757:2635], Recipient [13:666:2570]: {TEvReadSet step# 3035 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-03-28T09:06:01.854963Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:06:01.855132Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-03-28T09:06:01.856123Z node 13 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-28T09:06:01.856322Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [13:666:2570], Recipient [13:757:2635]: {TEvReadSet step# 3035 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-03-28T09:06:01.856361Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:06:01.856395Z node 13 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-03-28T09:06:02.078197Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-03-28T09:06:02.078312Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715667 ProcessProposeKqpTransaction 2025-03-28T09:06:02.079625Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqe00jzzagv3db1yjz6yddtn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTE2YzM3N2YtZDBiNWUxZDUtMmRlYmQwNGItOGE0NGIxMjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2025-03-28T09:06:02.083331Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1108:2902], Recipient [13:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-28T09:06:02.083606Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:06:02.083739Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3035/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v4000/18446744073709551615 ImmediateWriteEdge# v4001/0 ImmediateWriteEdgeReplied# v4001/0 2025-03-28T09:06:02.083821Z node 13 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v4001/18446744073709551615 2025-03-28T09:06:02.083936Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-03-28T09:06:02.084099Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:06:02.084178Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:06:02.084245Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:06:02.084308Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:06:02.084368Z node 13 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-03-28T09:06:02.084436Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:06:02.084468Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:06:02.084495Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:06:02.084520Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:06:02.084680Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-28T09:06:02.085155Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:1108:2902], 0} after executionsCount# 1 2025-03-28T09:06:02.085260Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:1108:2902], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:06:02.085379Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:1108:2902], 0} finished in read 2025-03-28T09:06:02.085476Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:06:02.085510Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:06:02.085537Z node 13 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:06:02.085568Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:06:02.085624Z node 13 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:06:02.085649Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:06:02.085683Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-28T09:06:02.085748Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:06:02.085926Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:06:02.087220Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1108:2902], Recipient [13:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:06:02.087306Z node 13 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase |78.0%| [TA] $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 >> TBlobStorageProxyTest::TestProxySimpleDiscover >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:06:05.176863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:06:05.176990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:06:05.177032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:06:05.177070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:06:05.177160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:06:05.177193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:06:05.177257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:06:05.177344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:06:05.177734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:06:05.267068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:05.267132Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:05.284243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:06:05.284548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:06:05.284718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:06:05.299729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:06:05.299980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:06:05.300682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:05.300969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:06:05.303112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:05.304456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:06:05.304520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:05.304688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:06:05.304770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:06:05.304823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:06:05.304936Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:06:05.313678Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:06:05.465923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:06:05.466191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:05.466445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:06:05.466711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:06:05.466770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:05.475571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:05.475753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:06:05.475947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:05.476005Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:06:05.476046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:06:05.476085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:06:05.479554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:05.479633Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:06:05.479674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:06:05.481806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:05.481858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:05.481913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:05.481962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:06:05.485930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:06:05.488379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:06:05.488575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:06:05.489808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:05.489975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:06:05.490032Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:05.490320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:06:05.490517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:05.490678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:06:05.490767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:06:05.493020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:06:05.493077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:06:05.493286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:05.493355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:06:05.493751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:05.493799Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:06:05.493918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:06:05.493960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:05.494003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:06:05.494039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:05.494078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:06:05.494121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:05.494159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:06:05.494195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:06:05.494265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:06:05.494306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:06:05.494369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:06:05.496388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:06:05.496505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:06:05.496542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-28T09:06:05.941176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:06:05.941259Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:604:2535], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:06:05.941653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409549 2025-03-28T09:06:05.941678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409549, txId: 0, path id: [OwnerId: 72075186234409549, LocalPathId: 1] 2025-03-28T09:06:05.941908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409549 2025-03-28T09:06:05.941949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:697:2599], at schemeshard: 72075186234409549, txId: 0, path id: 1 2025-03-28T09:06:05.943930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186234409549, msg: Owner: 72075186234409549 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72075186234409549, cookie: 0 2025-03-28T09:06:05.944392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:3 msg type: 268697640 2025-03-28T09:06:05.944512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-03-28T09:06:05.944660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:06:05.944739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:06:05.944917Z node 1 :HIVE INFO: [72075186233409546] TEvUpdateDomain, msg: DomainKey { SchemeShard: 72057594046678944 PathId: 3 } ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared TxId: 106 2025-03-28T09:06:05.945049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Update domain reply, message: Origin: 72075186233409546 TxId: 106, at schemeshard: 72057594046678944 2025-03-28T09:06:05.945099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-03-28T09:06:05.945243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-03-28T09:06:05.945296Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2025-03-28T09:06:05.945351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 138 -> 240 2025-03-28T09:06:05.947808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:06:05.947931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:06:05.947958Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-03-28T09:06:05.948049Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:06:05.948077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:06:05.948115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:06:05.948141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:06:05.948187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-03-28T09:06:05.948217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:06:05.948243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-28T09:06:05.948265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-28T09:06:05.948319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:06:05.950361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-28T09:06:05.950415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-28T09:06:05.951104Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:06:05.951248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:06:05.951294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:842:2724] TestWaitNotification: OK eventTxId 106 2025-03-28T09:06:05.952044Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:06:05.952258Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 259us result status StatusSuccess 2025-03-28T09:06:05.952644Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:06:05.953400Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2025-03-28T09:06:05.953616Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 182us result status StatusSuccess 2025-03-28T09:06:05.953969Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 3 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2025-03-28T09:06:05.954761Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:06:05.954946Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 229us result status StatusSuccess 2025-03-28T09:06:05.955297Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:06:05.955867Z node 1 :HIVE INFO: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 7243, MsgBus: 23861 2025-03-28T09:03:26.063953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783437185051264:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:26.064009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001308/r3tmp/tmpRi6cOL/pdisk_1.dat 2025-03-28T09:03:26.541994Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:26.559345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:26.559450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:26.562343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7243, node 1 2025-03-28T09:03:26.775298Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:26.775332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:26.775340Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:26.775478Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23861 TClient is connected to server localhost:23861 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:27.308071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 16 2025-03-28T09:03:29.404123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:03:29.600316Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:03:29.614375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:03:29.665397Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:03:29.700958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783450069953997:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.700959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783450069953989:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.701083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:29.704366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:03:29.715158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783450069954003:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:03:29.812201Z node 1 :TX_PROXY ERROR: Actor# [1:7486783450069954054:2461] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } f f t t 18 2025-03-28T09:03:30.223011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:03:30.332389Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:03:30.342772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:03:30.449458Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2025-03-28T09:03:30.840211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:03:30.916771Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:03:30.931871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:03:31.044402Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:03:31.071097Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783437185051264:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:31.071373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2025-03-28T09:03:31.428626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:03:31.538787Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:03:31.542703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2025-03-28T09:03:31.984159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:03:32.076292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:03:32.134978Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2025-03-28T09:03:32.575987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:03:32.649080Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:03:32.653876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-03-28T09:03:32.718117Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 701 2025-03-28T09:03:33.116394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 2025-03-28T09:03:33.177368Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:03:33.186643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 25 2025-03-28T09:03:33.605569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-03-28T09:03:33.700913Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:03:33.708274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 text 0 text 0 text 1 text 1 text 2 text 2 text 3 text 3 text 4 text 4 text 5 text 5 text 6 text 6 text 7 text 7 text 8 text 8 text 9 text 9 1042 2025-03-28T09:03:34.127104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 2025-03-28T09:03:34.187105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-03-28T09:03:34.267982Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill bpchar 0 bpchar 0 bpchar 1 bpchar 1 bpchar 2 bpchar 2 bpchar 3 bpchar 3 bpchar 4 bpchar 4 bpchar 5 bpchar 5 bpchar 6 bpchar 6 bpchar 7 bpchar 7 bpchar 8 bpchar 8 bpchar 9 bpchar 9 1043 2025-03-28T09:03:34.613108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-03-28T09:03:34.681827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 varchar 0 varchar 0 varchar 1 varchar 1 varchar 2 varchar 2 varchar 3 varchar 3 varchar 4 varchar 4 varchar 5 varchar 5 varchar 6 varchar 6 varchar 7 varchar 7 varchar 8 varchar 8 varchar 9 varchar 9 19 2025-03-28T09:03:35.087404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147497671 ... 710658 completed, doublechecking } 2025-03-28T09:05:44.675548Z node 10 :TX_PROXY ERROR: Actor# [10:7486784027538452395:2349] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:44.909643Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:05:45.444603Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 64523, MsgBus: 11211 2025-03-28T09:05:47.591979Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7486784044000645728:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:47.592071Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001308/r3tmp/tmpP2LXWC/pdisk_1.dat 2025-03-28T09:05:47.820457Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:47.820594Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:47.823422Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64523, node 11 2025-03-28T09:05:47.848346Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:48.003460Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:48.003493Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:48.003507Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:48.003689Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11211 TClient is connected to server localhost:11211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:49.095951Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:49.107972Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:05:52.594833Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7486784044000645728:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:52.594944Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:54.191888Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486784074065417461:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:54.192104Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:54.198949Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486784074065417473:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:54.206533Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:05:54.251861Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7486784074065417475:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:05:54.343384Z node 11 :TX_PROXY ERROR: Actor# [11:7486784074065417526:2347] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:54.396514Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:05:55.212716Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29967, MsgBus: 1097 2025-03-28T09:05:57.818548Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7486784087325684985:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:57.818669Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001308/r3tmp/tmp0joaiK/pdisk_1.dat 2025-03-28T09:05:58.060636Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:58.089990Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:58.090149Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:58.091958Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29967, node 12 2025-03-28T09:05:58.237878Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:58.237903Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:58.237913Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:58.238052Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1097 TClient is connected to server localhost:1097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:59.250182Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:02.818761Z node 12 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7486784087325684985:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:02.818860Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:04.170317Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7486784117390456725:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:04.170520Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:04.171043Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7486784117390456737:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:04.182341Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:04.221659Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7486784117390456739:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:06:04.318837Z node 12 :TX_PROXY ERROR: Actor# [12:7486784117390456790:2346] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:04.361607Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:06:06.297574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:06:06.297706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:06:06.297758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:06:06.297799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:06:06.297865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:06:06.297900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:06:06.297970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:06:06.298059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:06:06.298455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:06:06.385710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:06.385776Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:06.408208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:06:06.408506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:06:06.408701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:06:06.414859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:06:06.415088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:06:06.415789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:06.416054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:06:06.418013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:06.419373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:06:06.419442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:06.419618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:06:06.419703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:06:06.419757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:06:06.419862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:06:06.430557Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:06:06.557895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:06:06.558166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:06.558406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:06:06.558706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:06:06.558767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:06.561450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:06.561639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:06:06.561864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:06.561918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:06:06.561956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:06:06.561993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:06:06.564423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:06.564498Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:06:06.564536Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:06:06.566546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:06.566601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:06.566640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:06.566709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:06:06.570231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:06:06.572467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:06:06.572659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:06:06.574116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:06.574280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:06:06.574335Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:06.574649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:06:06.574748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:06.574935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:06:06.575015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:06:06.577428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:06:06.577506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:06:06.577717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:06.577783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:06:06.578207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:06.578253Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:06:06.578353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:06:06.578392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:06.578427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:06:06.578456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:06.578495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:06:06.578532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:06.578567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:06:06.578616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:06:06.578710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:06:06.578751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:06:06.578800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:06:06.580592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:06:06.580706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:06:06.580760Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... State::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-03-28T09:06:06.957603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-03-28T09:06:06.957773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:06:06.959716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-03-28T09:06:06.959886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-03-28T09:06:06.960280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:06.960415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:06:06.960456Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-03-28T09:06:06.960712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-28T09:06:06.960789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-03-28T09:06:06.960893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:06:06.960989Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:610:2538], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-03-28T09:06:06.963261Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:06:06.963300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:06:06.963444Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:06.963495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-28T09:06:06.963782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:06:06.963842Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-03-28T09:06:06.963876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-03-28T09:06:06.964692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:06:06.964820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:06:06.964860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:06:06.964902Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-03-28T09:06:06.964938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-03-28T09:06:06.965029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-28T09:06:06.968111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:06:06.968167Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-28T09:06:06.968269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:06:06.968309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:06:06.968347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:06:06.968380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:06:06.968445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-28T09:06:06.968488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:06:06.968546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:06:06.968581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:06:06.968843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:06:06.969315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:06:06.971186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:06:06.971233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:06:06.971636Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:06:06.971721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:06:06.971755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:763:2645] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-28T09:06:06.974787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:06:06.974960Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2025-03-28T09:06:06.974995Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2025-03-28T09:06:06.975122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-03-28T09:06:06.975165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2025-03-28T09:06:06.979668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:06:06.979840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-03-28T09:06:06.983038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:06:06.983194Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2025-03-28T09:06:06.983232Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2025-03-28T09:06:06.983358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-03-28T09:06:06.983402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2025-03-28T09:06:06.986062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:06:06.986240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 >> TBlobStorageProxyTest::TestDoubleEmptyGet >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2025-03-28T09:05:52.387974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:52.388060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:52.493852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:53.907140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:53.907195Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:53.949755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:55.168191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:55.168258Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:55.243895Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:56.382625Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:56.382725Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:56.471948Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:57.792772Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:57.792843Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:57.843673Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:59.142531Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:59.142609Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:59.186462Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:00.224431Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:00.224506Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:00.272424Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:01.481213Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:01.481289Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:01.532325Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:02.576951Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:02.577028Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:02.637568Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:03.730342Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:03.730412Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:03.799716Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:05.083757Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 268637729, Sender [11:149:2162], Recipient [11:356:2295]: {TEvControllerProposeConfigRequest Record# } 2025-03-28T09:06:05.083866Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvBlobStorage::TEvControllerProposeConfigRequest 2025-03-28T09:06:05.094712Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 269877760, Sender [11:317:2284], Recipient [11:316:2281]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037936131 Status: OK ServerId: [11:409:2341] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:06:05.094810Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:06:05.115161Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [11:316:2281], Recipient [11:356:2295]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 11 Host: "ghrun-mdsjyjltvm.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 89 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-03-28T09:06:05.115531Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285144, Sender [11:316:2281], Recipient [11:363:2307]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionRequest { Generation: 1 Options { NodeId: 11 Host: "ghrun-mdsjyjltvm.auto.internal" Tenant: "" NodeType: "" } ConfigItemKinds: 29 ConfigItemKinds: 1 ConfigItemKinds: 89 ConfigItemKinds: 2 ConfigItemKinds: 32 ConfigItemKinds: 3 ConfigItemKinds: 33 ConfigItemKinds: 34 ConfigItemKinds: 6 ConfigItemKinds: 36 ConfigItemKinds: 37 ConfigItemKinds: 8 ConfigItemKinds: 38 ConfigItemKinds: 10 ConfigItemKinds: 39 ConfigItemKinds: 43 ConfigItemKinds: 73 ConfigItemKinds: 75 ConfigItemKinds: 46 ConfigItemKinds: 77 ConfigItemKinds: 80 ConfigItemKinds: 81 ConfigItemKinds: 52 ConfigItemKinds: 54 ConfigItemKinds: 25 ConfigItemKinds: 55 ConfigItemKinds: 26 ServeYaml: true YamlApiVersion: 1 } 2025-03-28T09:06:05.115619Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionRequest 2025-03-28T09:06:05.115756Z node 11 :CMS_CONFIGS DEBUG: TConfigsProvider registered new subscription [11:316:2281]:1 2025-03-28T09:06:05.115884Z node 11 :CMS_CONFIGS TRACE: TConfigsProvider: check if update is required for volatile subscription [11:316:2281]:1 2025-03-28T09:06:05.115968Z node 11 :CMS_CONFIGS TRACE: TConfigsProvider: new config found for subscription [11:316:2281]:1 version= 2025-03-28T09:06:05.116123Z node 11 :CMS_CONFIGS TRACE: TSubscriptionClientSender([11:316:2281]) send TEvConfigSubscriptionResponse 2025-03-28T09:06:05.117321Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273286169, Sender [11:410:2307], Recipient [11:316:2281]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionResponse { Generation: 1 Status { Code: SUCCESS } } 2025-03-28T09:06:05.117386Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionResponse 2025-03-28T09:06:05.117639Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [11:363:2307], Recipient [11:410:2307]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Generation: 1 Config { } MainYamlConfigNotChanged: true } 2025-03-28T09:06:05.117713Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-03-28T09:06:05.117810Z node 11 :CMS_CONFIGS TRACE: TSubscriptionClientSender([11:316:2281]) send TEvConfigSubscriptionNotificationRequest: Order: 1 Generation: 1 Config { } MainYamlConfigNotChanged: true 2025-03-28T09:06:05.117969Z node 11 :CMS_CONFIGS TRACE: StateWork, received event# 273285146, Sender [11:410:2307], Recipient [11:316:2281]: NKikimr::NConsole::TEvConsole::TEvConfigSubscriptionNotification { Order: 1 Generation: 1 Config { } MainYamlConfigNotChanged: true } 2025-03-28T09:06:05.118026Z node 11 :CMS_CONFIGS TRACE: StateWork, processing event TEvConsole::TEvConfigSubscriptionNotification 2025-03-28T09:06:05.123369Z node 11 :CMS_CONFIGS INFO: TLogSettingsConfigurator: got new config: 2025-03-28T09:06:05.123496Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GLOBAL has been changed from WARN to NOTICE 2025-03-28T09:06:05.123578Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GLOBAL has been changed from WARN to DEBUG 2025-03-28T09:06:05.123634Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT has been changed from WARN to NOTICE 2025-03-28T09:06:05.123670Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT has been changed from WARN to DEBUG 2025-03-28T09:06:05.123700Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TEST has been changed from WARN to NOTICE 2025-03-28T09:06:05.123730Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TEST has been changed from WARN to DEBUG 2025-03-28T09:06:05.123757Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component PROTOCOLS has been changed from WARN to NOTICE 2025-03-28T09:06:05.123787Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component PROTOCOLS has been changed from WARN to DEBUG 2025-03-28T09:06:05.123821Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to NOTICE 2025-03-28T09:06:05.123853Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SPEED_TEST has been changed from WARN to DEBUG 2025-03-28T09:06:05.123886Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_STATUS has been changed from WARN to NOTICE 2025-03-28T09:06:05.123918Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_STATUS has been changed from WARN to DEBUG 2025-03-28T09:06:05.123949Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_NETWORK has been changed from WARN to NOTICE 2025-03-28T09:06:05.123979Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_NETWORK has been changed from WARN to DEBUG 2025-03-28T09:06:05.124007Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component INTERCONNECT_SESSION has been changed from WARN to NOTICE 2025-03-28T09:06:05.124035Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component INTERCONNECT_SESSION has been changed from WARN to DEBUG 2025-03-28T09:06:05.124062Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component HTTP has been changed from WARN to NOTICE 2025-03-28T09:06:05.124090Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component HTTP has been changed from WARN to DEBUG 2025-03-28T09:06:05.124123Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LOGGER has been changed from WARN to NOTICE 2025-03-28T09:06:05.124159Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LOGGER has been changed from WARN to DEBUG 2025-03-28T09:06:05.124194Z node 11 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BLOBSTORAGE has been changed from WARN to NOTI ... ZER has been changed from 0 to 10 2025-03-28T09:06:08.130195Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_MANAGER has been changed from NOTICE to ALERT 2025-03-28T09:06:08.130222Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_MANAGER has been changed from DEBUG to ALERT 2025-03-28T09:06:08.130266Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_MANAGER has been changed from 0 to 10 2025-03-28T09:06:08.130295Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component METADATA_SECRET has been changed from NOTICE to ALERT 2025-03-28T09:06:08.130323Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component METADATA_SECRET has been changed from DEBUG to ALERT 2025-03-28T09:06:08.130346Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component METADATA_SECRET has been changed from 0 to 10 2025-03-28T09:06:08.130372Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_TIERING has been changed from NOTICE to ALERT 2025-03-28T09:06:08.130398Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_TIERING has been changed from DEBUG to ALERT 2025-03-28T09:06:08.130424Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_TIERING has been changed from 0 to 10 2025-03-28T09:06:08.130452Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BG_TASKS has been changed from NOTICE to ALERT 2025-03-28T09:06:08.130476Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BG_TASKS has been changed from DEBUG to ALERT 2025-03-28T09:06:08.130501Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BG_TASKS has been changed from 0 to 10 2025-03-28T09:06:08.130532Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY has been changed from NOTICE to ALERT 2025-03-28T09:06:08.130557Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY has been changed from DEBUG to ALERT 2025-03-28T09:06:08.130580Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY has been changed from 0 to 10 2025-03-28T09:06:08.130603Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DISCOVERY_CACHE has been changed from NOTICE to ALERT 2025-03-28T09:06:08.130629Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DISCOVERY_CACHE has been changed from DEBUG to ALERT 2025-03-28T09:06:08.130652Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DISCOVERY_CACHE has been changed from 0 to 10 2025-03-28T09:06:08.130714Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component EXT_INDEX has been changed from NOTICE to ALERT 2025-03-28T09:06:08.130750Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component EXT_INDEX has been changed from DEBUG to ALERT 2025-03-28T09:06:08.130774Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component EXT_INDEX has been changed from 0 to 10 2025-03-28T09:06:08.130802Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_CONVEYOR has been changed from NOTICE to ALERT 2025-03-28T09:06:08.130827Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_CONVEYOR has been changed from DEBUG to ALERT 2025-03-28T09:06:08.130852Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_CONVEYOR has been changed from 0 to 10 2025-03-28T09:06:08.130877Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_LIMITER has been changed from NOTICE to ALERT 2025-03-28T09:06:08.130902Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_LIMITER has been changed from DEBUG to ALERT 2025-03-28T09:06:08.130926Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_LIMITER has been changed from 0 to 10 2025-03-28T09:06:08.130953Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component ARROW_HELPER has been changed from NOTICE to ALERT 2025-03-28T09:06:08.130978Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component ARROW_HELPER has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131002Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component ARROW_HELPER has been changed from 0 to 10 2025-03-28T09:06:08.131025Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component SSA_GRAPH_EXECUTION has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131051Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component SSA_GRAPH_EXECUTION has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131073Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component SSA_GRAPH_EXECUTION has been changed from 0 to 10 2025-03-28T09:06:08.131098Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component KAFKA_PROXY has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131135Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component KAFKA_PROXY has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131160Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component KAFKA_PROXY has been changed from 0 to 10 2025-03-28T09:06:08.131187Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component OBJECTS_MONITORING has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131221Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component OBJECTS_MONITORING has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131251Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component OBJECTS_MONITORING has been changed from 0 to 10 2025-03-28T09:06:08.131277Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component STATISTICS has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131317Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component STATISTICS has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131343Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component STATISTICS has been changed from 0 to 10 2025-03-28T09:06:08.131369Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_REQUEST_COST has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131395Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_REQUEST_COST has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131420Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_REQUEST_COST has been changed from 0 to 10 2025-03-28T09:06:08.131448Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_VDISK_BALANCING has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131473Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131508Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2025-03-28T09:06:08.131536Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_PROXY_GETBLOCK has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131561Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_GETBLOCK has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131596Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_GETBLOCK has been changed from 0 to 10 2025-03-28T09:06:08.131623Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BS_SHRED has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131647Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BS_SHRED has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131669Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BS_SHRED has been changed from 0 to 10 2025-03-28T09:06:08.131701Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131742Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131782Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2025-03-28T09:06:08.131810Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131836Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131859Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2025-03-28T09:06:08.131886Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from NOTICE to ALERT 2025-03-28T09:06:08.131911Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from DEBUG to ALERT 2025-03-28T09:06:08.131934Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2025-03-28T09:06:08.131974Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from NOTICE to ALERT 2025-03-28T09:06:08.132003Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from DEBUG to ALERT 2025-03-28T09:06:08.132026Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2025-03-28T09:06:08.132065Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from NOTICE to ALERT 2025-03-28T09:06:08.132091Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from DEBUG to ALERT 2025-03-28T09:06:08.132116Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2025-03-28T09:06:08.132144Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from NOTICE to ALERT 2025-03-28T09:06:08.132170Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from DEBUG to ALERT 2025-03-28T09:06:08.132202Z node 14 :CMS_CONFIGS NOTICE: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2025-03-28T09:06:08.132325Z node 14 :CMS_CONFIGS TRACE: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> TBlobStorageProxyTest::TestEmptyDiscover >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> TBlobStorageProxyTest::TestVPutVCollectVGetRace >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData >> KqpTx::ExplicitTcl >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi >> TAccessServiceTest::Authenticate >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] Test command err: 2025-03-28T09:04:05.135995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:05.136444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:05.136520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0010d1/r3tmp/tmpdI31P9/pdisk_1.dat 2025-03-28T09:04:05.515985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:05.580841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:05.623618Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:05.624678Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:04:05.624896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:05.625015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:05.636782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:05.720832Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:04:05.720926Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:04:05.721093Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:04:05.844038Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:04:05.844136Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:04:05.844716Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:04:05.844816Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:04:05.845184Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:04:05.845392Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:04:05.845504Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:04:05.845859Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:04:05.847583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:05.848683Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:04:05.848764Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:04:05.881769Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:04:05.883012Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:04:05.883468Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:04:05.883705Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:05.928445Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:04:05.929207Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:05.929325Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:05.930998Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:05.931093Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:05.931148Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:05.931560Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:05.931716Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:05.931802Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:04:05.932249Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:05.987374Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:04:05.987583Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:05.987722Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:04:05.987770Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:05.987807Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:04:05.987845Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:05.988061Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:05.988117Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:05.988458Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:04:05.988564Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:04:05.989012Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:05.989064Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:05.989110Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:04:05.989170Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:04:05.989205Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:04:05.989256Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:04:05.989305Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:05.989429Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:05.989464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:05.989510Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:04:05.989576Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:04:05.989618Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:04:05.989772Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:05.990032Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:04:05.990107Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:04:05.990216Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:04:05.990263Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:04:05.990299Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:04:05.990335Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:04:05.990368Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:05.990707Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:04:05.990760Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:04:05.990797Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:04:05.990846Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:05.990909Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:04:05.990959Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:04:05.990998Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:04:05.991050Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:04:05.991081Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:04:05.991940Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:04:05.992003Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:05.992038Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:05.992080Z node 1 :TX_DATASHARD TRACE: Prop ... xKind: TX_KIND_DATA SourceDeprecated { RawX1: 990 RawX2: 60129544830 } TxBody: " \0018\000jK\010\001\032;\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001\020\200\200\204\200\200\200\204\200\001\030\200\200\204\200\200\200\204\200\001 \002\"\n\010\340\247\022\020\0020\000@\n\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-03-28T09:06:09.217883Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:06:09.218088Z node 14 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715665 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-03-28T09:06:09.218366Z node 14 :KQP_EXECUTER ERROR: ActorId: [14:990:2686] TxId: 281474976715665. Ctx: { TraceId: 01jqe00t2qdyq2n8tsva3je53k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZTkwNGYzYWEtMmM5YmJhMjgtNmQ4NjI5YmYtYjgyNDFjM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [WRONG_SHARD_STATE] Rejecting data TxId 281474976715665 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state); 2025-03-28T09:06:09.218908Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZTkwNGYzYWEtMmM5YmJhMjgtNmQ4NjI5YmYtYjgyNDFjM2E=, ActorId: [14:840:2686], ActorState: ExecuteState, TraceId: 01jqe00t2qdyq2n8tsva3je53k, Create QueryResponse for error on request, msg: 2025-03-28T09:06:09.220042Z node 14 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe00t2qdyq2n8tsva3je53k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZTkwNGYzYWEtMmM5YmJhMjgtNmQ4NjI5YmYtYjgyNDFjM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:09.220377Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [14:993:2686], Recipient [14:690:2580]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 993 RawX2: 60129544830 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715666 ExecLevel: 0 Flags: 8 2025-03-28T09:06:09.220418Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:06:09.220494Z node 14 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715666 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-03-28T09:06:09.220571Z node 14 :KQP_EXECUTER ERROR: ActorId: [14:993:2686] TxId: 281474976715666. Ctx: { TraceId: 01jqe00t2qdyq2n8tsva3je53k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZTkwNGYzYWEtMmM5YmJhMjgtNmQ4NjI5YmYtYjgyNDFjM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [WRONG_SHARD_STATE] Rejecting data TxId 281474976715666 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state); 2025-03-28T09:06:09.220916Z node 14 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=14&id=ZTkwNGYzYWEtMmM5YmJhMjgtNmQ4NjI5YmYtYjgyNDFjM2E=, ActorId: [14:840:2686], ActorState: CleanupState, TraceId: 01jqe00t2qdyq2n8tsva3je53k, Failed to cleanup:
: Error: Kikimr cluster or one of its subsystems was unavailable., code: 2005
: Error: [WRONG_SHARD_STATE] Rejecting data TxId 281474976715666 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) ... blocking NKikimr::NLongTxService::TEvLongTxService::TEvLockStatus from LONG_TX_SERVICE to TX_DATASHARD_ACTOR 2025-03-28T09:06:09.224218Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [14:594:2519], Recipient [14:690:2580]: NActors::TEvents::TEvPoison 2025-03-28T09:06:09.224833Z node 14 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-03-28T09:06:09.224940Z node 14 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-28T09:06:09.246611Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [14:997:2817], Recipient [14:999:2818]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:06:09.255191Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [14:997:2817], Recipient [14:999:2818]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:06:09.255399Z node 14 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [14:997:2817], Recipient [14:999:2818]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:06:09.259935Z node 14 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:999:2818] 2025-03-28T09:06:09.260416Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:06:09.271829Z node 14 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:06:09.273934Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:06:09.277543Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:06:09.277693Z node 14 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:06:09.277819Z node 14 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:06:09.278580Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:06:09.279057Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:06:09.279161Z node 14 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:06:09.279285Z node 14 :TX_DATASHARD INFO: Switched to work state PreOffline tabletId 72075186224037888 2025-03-28T09:06:09.279487Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:06:09.279584Z node 14 :TX_DATASHARD INFO: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:06:09.279766Z node 14 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [14:1013:2825] 2025-03-28T09:06:09.279839Z node 14 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:06:09.279912Z node 14 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2025-03-28T09:06:09.279976Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:09.280450Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [14:61:2108], Recipient [14:999:2818]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_NOT_FOUND 2025-03-28T09:06:09.280909Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [14:999:2818], Recipient [14:999:2818]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:06:09.280978Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:06:09.281413Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 2146435075, Sender [14:999:2818], Recipient [14:999:2818]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2025-03-28T09:06:09.281475Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2025-03-28T09:06:09.282656Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [14:24:2071], Recipient [14:999:2818]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 600} 2025-03-28T09:06:09.282750Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-28T09:06:09.282831Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 600 2025-03-28T09:06:09.282925Z node 14 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:09.284042Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:06:09.284120Z node 14 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037888 state 5 2025-03-28T09:06:09.284257Z node 14 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-03-28T09:06:09.284355Z node 14 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-03-28T09:06:09.284443Z node 14 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2025-03-28T09:06:09.285049Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [14:999:2818], Recipient [14:898:2731]: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-03-28T09:06:09.285122Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:06:09.285212Z node 14 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2025-03-28T09:06:09.285363Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-03-28T09:06:09.285473Z node 14 :TX_DATASHARD NOTICE: Outdated readset for 500:281474976715663 at 72075186224037889 2025-03-28T09:06:09.285577Z node 14 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-28T09:06:09.285687Z node 14 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2025-03-28T09:06:09.285919Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [14:24:2071], Recipient [14:999:2818]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 400 NextReadStep# 600 ReadStep# 600 } 2025-03-28T09:06:09.285986Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-28T09:06:09.286065Z node 14 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 400 next step 600 2025-03-28T09:06:09.286319Z node 14 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [14:898:2731], Recipient [14:999:2818]: {TEvReadSet step# 500 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2025-03-28T09:06:09.286374Z node 14 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:06:09.286455Z node 14 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2025-03-28T09:06:09.286572Z node 14 :TX_DATASHARD DEBUG: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:06:09.290333Z node 14 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |78.1%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.1%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut >> KqpSinkMvcc::OltpMultiSinksNoSinks >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> TAccessServiceTest::Authenticate [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> KqpLocks::TwoPhaseTx >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> KqpLocks::Invalidate >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2025-03-28T09:06:11.529920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784143730134747:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:11.530116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec1/r3tmp/tmpA487sj/pdisk_1.dat 2025-03-28T09:06:11.939136Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:11.944263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:11.944378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:11.947753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:12.243519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:12.258045Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:12.279688Z node 1 :GRPC_CLIENT DEBUG: [51700008c808] Connect to grpc://localhost:1709 2025-03-28T09:06:12.281191Z node 1 :GRPC_CLIENT DEBUG: [51700008c808] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2025-03-28T09:06:12.294160Z node 1 :GRPC_CLIENT DEBUG: [51700008c808] Status 7 Permission Denied 2025-03-28T09:06:12.294905Z node 1 :GRPC_CLIENT DEBUG: [51700008c808] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2025-03-28T09:06:12.296982Z node 1 :GRPC_CLIENT DEBUG: [51700008c808] Response AuthenticateResponse { subject { user_account { id: "1234" } } } >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: 2025-03-28T09:04:05.077832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:05.078285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:05.078340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0010e1/r3tmp/tmpzyFbtc/pdisk_1.dat 2025-03-28T09:04:05.641632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:05.696697Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:05.739373Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:05.740429Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:04:05.740642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:05.740765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:05.754197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:05.845173Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:04:05.845247Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:04:05.845403Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:04:06.040674Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:04:06.040782Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:04:06.041372Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:04:06.041454Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:04:06.041760Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:04:06.041966Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:04:06.042057Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:04:06.042393Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:04:06.049158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:06.051656Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:04:06.051738Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:04:06.096038Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:04:06.097117Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:04:06.097559Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:04:06.097863Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:06.147463Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:04:06.148185Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:06.148330Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:06.150754Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:06.150910Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:06.150978Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:06.151482Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:06.151676Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:06.151768Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:04:06.152360Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:06.208193Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:04:06.208401Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:06.208547Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:04:06.208604Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:06.208640Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:04:06.208687Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:06.208916Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:06.208963Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:06.209244Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:04:06.209367Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:04:06.209852Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:06.209904Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:06.209944Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:04:06.209978Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:04:06.210009Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:04:06.210041Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:04:06.210106Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:06.210223Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:06.210259Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:06.210302Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:04:06.210371Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:04:06.210408Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:04:06.210523Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:06.213182Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:04:06.213275Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:04:06.213379Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:04:06.213462Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:04:06.213525Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:04:06.213568Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:04:06.213602Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:06.213925Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:04:06.213972Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:04:06.214021Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:04:06.214054Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:06.214137Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:04:06.214169Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:04:06.214202Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:04:06.214245Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:04:06.214277Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:04:06.215200Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:04:06.215248Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:06.215281Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:06.215336Z node 1 :TX_DATASHARD TRACE: Prop ... ablet: 72075186224037890 2025-03-28T09:06:12.111391Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037890 2025-03-28T09:06:12.111870Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 278593539, Sender [13:834:2694], Recipient [13:666:2570]: NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3,4] } 2025-03-28T09:06:12.111985Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435091, Sender [13:666:2570], Recipient [13:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRemoveChangeRecords 2025-03-28T09:06:12.112025Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037888 2025-03-28T09:06:12.112050Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-03-28T09:06:12.112109Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 3, at tablet: 72075186224037888 2025-03-28T09:06:12.112149Z node 13 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-03-28T09:06:12.112509Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037890 2025-03-28T09:06:12.112682Z node 13 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037888 2025-03-28T09:06:12.358216Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:1078:2892]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:06:13.029890Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-03-28T09:06:13.030011Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715671 ProcessProposeKqpTransaction 2025-03-28T09:06:13.031570Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqe00x9a5h20ek521wmm6c0e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NGY5ZDk1MmItMmIzYzE5OTUtNDA3M2Y0ZTctZGFiY2M3ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-03-28T09:06:13.034438Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1705:3414], Recipient [13:796:2664]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-03-28T09:06:13.034782Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-28T09:06:13.034888Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8043/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:06:13.034976Z node 13 :TX_DATASHARD TRACE: 72075186224037889 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-03-28T09:06:13.035100Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2025-03-28T09:06:13.035317Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-28T09:06:13.035410Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2025-03-28T09:06:13.035492Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:06:13.035563Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:06:13.035631Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037889 2025-03-28T09:06:13.035706Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-28T09:06:13.035743Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:06:13.035776Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2025-03-28T09:06:13.035808Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2025-03-28T09:06:13.036002Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 4 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-03-28T09:06:13.036502Z node 13 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[13:1705:3414], 0} after executionsCount# 1 2025-03-28T09:06:13.036619Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1705:3414], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-03-28T09:06:13.036783Z node 13 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[13:1705:3414], 0} finished in read 2025-03-28T09:06:13.036910Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-28T09:06:13.036948Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2025-03-28T09:06:13.036982Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:06:13.037015Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:06:13.037077Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037889 is Executed 2025-03-28T09:06:13.037103Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:06:13.037141Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037889 has finished 2025-03-28T09:06:13.037222Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-28T09:06:13.037454Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-28T09:06:13.039064Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1705:3414], Recipient [13:796:2664]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:06:13.039156Z node 13 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2025-03-28T09:06:13.658779Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] Handle TEvExecuteKqpTransaction 2025-03-28T09:06:13.658901Z node 13 :TX_PROXY DEBUG: actor# [13:59:2106] TxId# 281474976715672 ProcessProposeKqpTransaction 2025-03-28T09:06:13.660576Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqe00xxw4m34yad3cnkbj90q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZGE1ZGVkZWUtYTk0ZWY5OGMtYWVmNDNlMGMtNzY2N2FlYmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2025-03-28T09:06:13.663653Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:1736:3439], Recipient [13:1078:2892]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2025-03-28T09:06:13.664046Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2025-03-28T09:06:13.664157Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037891 CompleteEdge# v8043/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:06:13.664259Z node 13 :TX_DATASHARD TRACE: 72075186224037891 changed HEAD read to non-repeatable v9000/18446744073709551615 2025-03-28T09:06:13.664386Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2025-03-28T09:06:13.664602Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-03-28T09:06:13.664703Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2025-03-28T09:06:13.664789Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2025-03-28T09:06:13.664855Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2025-03-28T09:06:13.664924Z node 13 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037891 2025-03-28T09:06:13.664996Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-03-28T09:06:13.665038Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2025-03-28T09:06:13.665069Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2025-03-28T09:06:13.665100Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2025-03-28T09:06:13.665298Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 7 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-03-28T09:06:13.665778Z node 13 :TX_DATASHARD TRACE: 72075186224037891 Complete read# {[13:1736:3439], 0} after executionsCount# 1 2025-03-28T09:06:13.665905Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1736:3439], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2025-03-28T09:06:13.666048Z node 13 :TX_DATASHARD TRACE: 72075186224037891 read iterator# {[13:1736:3439], 0} finished in read 2025-03-28T09:06:13.666170Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-03-28T09:06:13.666205Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2025-03-28T09:06:13.666236Z node 13 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2025-03-28T09:06:13.666268Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2025-03-28T09:06:13.666333Z node 13 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037891 is Executed 2025-03-28T09:06:13.666357Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2025-03-28T09:06:13.666393Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037891 has finished 2025-03-28T09:06:13.666465Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2025-03-28T09:06:13.666673Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2025-03-28T09:06:13.667791Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [13:1736:3439], Recipient [13:1078:2892]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:06:13.667886Z node 13 :TX_DATASHARD TRACE: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2025-03-28T09:02:30.872859Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783195700805131:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:30.872947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001321/r3tmp/tmp5sPoZd/pdisk_1.dat 2025-03-28T09:02:31.531454Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:31.536916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:31.537003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:31.539151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22631, node 1 2025-03-28T09:02:31.831322Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:31.831348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:31.831353Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:31.831462Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:02:31.967614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:02:32.000704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:02:32.043026Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7486783204290740336:2309] 2025-03-28T09:02:32.043330Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:32.065321Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:32.065383Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:02:32.069729Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:02:32.069782Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:02:32.069837Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:02:32.070210Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:02:32.070295Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:02:32.070337Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7486783204290740352:2309] in generation 1 2025-03-28T09:02:32.077144Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:02:32.134516Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:02:32.134848Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:02:32.134904Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7486783204290740355:2310] 2025-03-28T09:02:32.134913Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:32.134924Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:02:32.134953Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:32.135117Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:02:32.135201Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:02:32.135227Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:32.135254Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:32.135275Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:02:32.135305Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:32.136740Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486783204290740332:2302], serverId# [1:7486783204290740351:2311], sessionId# [0:0:0] 2025-03-28T09:02:32.136847Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:32.137061Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:02:32.137136Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-28T09:02:32.138956Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:32.142265Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:32.142366Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:02:32.148689Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486783204290740369:2321], serverId# [1:7486783204290740370:2322], sessionId# [0:0:0] 2025-03-28T09:02:32.153569Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743152552190 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743152552190 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:02:32.153603Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:32.153748Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:32.153819Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:32.153838Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:02:32.153913Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743152552190:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-28T09:02:32.154143Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743152552190:281474976710657 keys extracted: 0 2025-03-28T09:02:32.154286Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:02:32.154400Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:02:32.154444Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:02:32.156663Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:02:32.157117Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:02:32.160314Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743152552189 2025-03-28T09:02:32.160336Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:32.160371Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743152552190} 2025-03-28T09:02:32.160405Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:32.160443Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:02:32.160459Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:02:32.160489Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:02:32.160538Z node 1 :TX_DATASHARD DEBUG: Complete [1743152552190 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7486783199995772839:2196], exec latency: 2 ms, propose latency: 6 ms 2025-03-28T09:02:32.160563Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-28T09:02:32.160591Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:02:32.160667Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743152552197 2025-03-28T09:02:32.161982Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7486783204290740355:2310][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-03-28T09:02:32.170830Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-28T09:02:32.170898Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:02:32.176873Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:02:32.177917Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:02:32.178011Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:02:32.178029Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-03-28T09:02:32.178035Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-03-28T09:02:32.207164Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:02:32.243654Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:02:32.252764Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-03-28T09:02:32.253012Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:02:32.253274Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-03-28T09:02:32.253306Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:02:32.253320Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-03-28T09:02:32.253340Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:02:32.255488Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:02:32.255562Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-03-28T09:02:32.261566Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [1:7486783204290740425:2367], now have 1 active actors on pipe 2025-03-28T09:02:32.316898Z node 1 :PERSQUEUE DEBUG: [P ... ed on disk 2025-03-28T09:06:11.608468Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 9 requestId: cookie: 5 2025-03-28T09:06:11.608782Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2694] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 5 Offset: 4 WriteTimestampMS: 8969 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 5 } } } 2025-03-28T09:06:11.608932Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:851:2694] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-28T09:06:11.609105Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-03-28T09:06:11.609188Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 5, at tablet: 72075186224037888 2025-03-28T09:06:11.609777Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... unblocking updates ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... unblocking NKikimr::TEvMediatorTimecast::TEvUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-28T09:06:11.719574Z node 27 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2025-03-28T09:06:11.719727Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:11.719872Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2025-03-28T09:06:11.720142Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:06:11.723873Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-03-28T09:06:11.723997Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-03-28T09:06:11.724090Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:11.724176Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 12000 from mediator time cast 2025-03-28T09:06:11.724382Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:684:2580] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-03-28T09:06:11.724561Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:851:2694] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 0 }] } 2025-03-28T09:06:11.725014Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-03-28T09:06:11.725266Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:851:2694], at tablet# 72075186224037888 2025-03-28T09:06:11.725395Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-03-28T09:06:11.725584Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:851:2694] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:06:11.725874Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2694] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 6 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:06:11.726184Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:06:11.726274Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:06:11.726404Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 10 requestId: cookie: 6 2025-03-28T09:06:11.726542Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:06:11.726564Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:06:11.726635Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2025-03-28T09:06:11.727153Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2025-03-28T09:06:11.727279Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2025-03-28T09:06:11.727481Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-03-28T09:06:11.766563Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2025-03-28T09:06:11.768097Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 0 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000| size 93 WTime 8979 2025-03-28T09:06:11.768452Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:06:11.768571Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:06:11.768680Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:06:11.768812Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:06:11.768923Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-03-28T09:06:11.769006Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000005_00000_0000000001_00000| 2025-03-28T09:06:11.769042Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:06:11.769110Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:06:11.769211Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:06:11.769413Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:06:11.769642Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2025-03-28T09:06:11.771002Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [27:796:2661] 2025-03-28T09:06:11.771308Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 size 93 2025-03-28T09:06:11.781903Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:06:11.782128Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:11.782302Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-03-28T09:06:11.782806Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2025-03-28T09:06:11.783200Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:938:2694] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2025-03-28T09:06:11.783403Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:851:2694] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-28T09:06:11.783670Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-03-28T09:06:11.783763Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2025-03-28T09:06:11.784469Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-03-28T09:06:11.893241Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:06:11.893399Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:06:11.893704Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2025-03-28T09:06:11.893817Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 8 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 6 2025-03-28T09:06:11.893992Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 8. All data is from uncompacted head. 2025-03-28T09:06:11.894079Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:06:11.894386Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> TOlap::StoreStats [GOOD] >> TOlap::StoreStatsQuota |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |78.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> KqpPg::V1CreateTable [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb >> KqpPg::ValuesInsert+useSink >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::EmptyTxOnCommit >> KqpSinkMvcc::OltpNamedStatementNoSink >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] >> KqpTx::DeferredEffects |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> KqpTx::RollbackByIdle ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] Test command err: 2025-03-28T09:05:50.931595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:50.931674Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:50.988703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:52.720015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:52.720081Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:52.762108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:54.131984Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:54.132046Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:54.215942Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:55.591058Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:55.591125Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:55.645733Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:56.849045Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:56.849127Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:56.919709Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:58.204341Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:58.204422Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:58.260084Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:59.489769Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:59.489852Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:59.535521Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:00.598869Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:00.598947Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:00.657878Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:01.959965Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:01.960048Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:02.011572Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:03.067163Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:03.067256Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:03.154588Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:04.360142Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:04.360214Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:04.410889Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:05.517108Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:05.517199Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:05.563095Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:07.136275Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:07.136356Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:07.204460Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:08.308776Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:08.308846Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:08.374258Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:09.896028Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:09.896117Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:09.954316Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:11.732438Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:11.732534Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:11.808242Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:13.524378Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:13.524464Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:13.583626Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:15.139834Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:15.139924Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:15.188551Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:16.626465Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:16.626543Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:16.675696Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:17.305629Z node 23 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:113} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] 2025-03-28T09:06:18.295319Z node 24 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:113} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain >> KqpSinkTx::OlapLocksAbortOnCommit >> Viewer::SelectStringWithNoBase64Encoding [GOOD] >> Viewer::ServerlessNodesPage >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [GOOD] >> KqpSinkTx::OlapInvalidateOnError >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocks::MixedTxFail-useSink >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> KqpLocks::Invalidate [GOOD] >> KqpLocks::InvalidateOnCommit >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-28T09:05:33.452583Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:33.452681Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-28T09:05:33.481569Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:33.505451Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:05:33.507198Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-28T09:05:33.509884Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-28T09:05:33.515917Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:05:33.519997Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:05:33.538267Z node 1 :PERSQUEUE INFO: new Cookie default|5b823432-363ffa7-836fa90f-b97a5fda_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:33.547534Z node 1 :PERSQUEUE INFO: new Cookie default|3235e4c3-5d954b8a-ec4c53cc-ecb7f26a_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:33.586162Z node 1 :PERSQUEUE INFO: new Cookie default|20cb47bd-f62b56ca-672e06a1-12ddeb6a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:245:2057] recipient: [1:99:2134] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:248:2057] recipient: [1:247:2248] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:249:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [1:250:2249] sender: [1:251:2057] recipient: [1:247:2248] 2025-03-28T09:05:33.681514Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:33.681589Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:05:33.682197Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:299:2290] 2025-03-28T09:05:33.684666Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:300:2291] 2025-03-28T09:05:33.705856Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:05:33.705975Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:299:2290] 2025-03-28T09:05:33.712043Z node 1 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:05:33.712131Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:300:2291] Leader for TabletID 72057594037927937 is [1:250:2249] sender: [1:332:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-28T09:05:34.520885Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:34.520973Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-03-28T09:05:34.540380Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:34.541314Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:05:34.542045Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-03-28T09:05:34.544812Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-03-28T09:05:34.546492Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:184:2197] 2025-03-28T09:05:34.548373Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:184:2197] 2025-03-28T09:05:34.555831Z node 2 :PERSQUEUE INFO: new Cookie default|13c46821-c0f7c63f-e46ef4ad-baf54afc_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:34.564036Z node 2 :PERSQUEUE INFO: new Cookie default|6150488a-d0d95809-80bbb3b2-d2d60a83_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:34.589584Z node 2 :PERSQUEUE INFO: new Cookie default|502b8df5-ab56d9f9-8f75c980-17a6601a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:242:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:245:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:246:2057] recipient: [2:244:2245] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:248:2057] recipient: [2:244:2245] 2025-03-28T09:05:34.647508Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:34.647583Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:05:34.648559Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:296:2287] 2025-03-28T09:05:34.650782Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:297:2288] 2025-03-28T09:05:34.696927Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:05:34.697039Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:296:2287] 2025-03-28T09:05:34.703411Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:05:34.703473Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:297:2288] !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:247:2246] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:349:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:352:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:354:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:247:2246] sender: [2:356:2057] recipient: [2:355:2322] Leader for TabletID 72057594037927937 is [2:357:2323] sender: [2:358:2057] recipient: [2:355:2322] 2025-03-28T09:05:36.076143Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:36.076206Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:05:36.076797Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:408:2366] 2025-03-28T09:05:36.078355Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:409:2367] 2025-03-28T09:05:36.099001Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:05:36.099114Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 4 [2:408:2366] 2025-03-28T09:05:36.104889Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:05:36.104977Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 4 [2:409:2367] Leader for TabletID 72057594037927937 is [2:357:2323] sender: [2:439:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipien ... n 3 [53:298:2289] Leader for TabletID 72057594037927937 is [53:248:2247] sender: [53:330:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:248:2247]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [53:248:2247] sender: [53:332:2057] recipient: [53:99:2134] Leader for TabletID 72057594037927937 is [53:248:2247] sender: [53:335:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:248:2247] sender: [53:336:2057] recipient: [53:334:2310] Leader for TabletID 72057594037927937 is [53:337:2311] sender: [53:338:2057] recipient: [53:334:2310] 2025-03-28T09:06:19.321457Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:06:19.321544Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:06:19.322541Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [53:388:2354] 2025-03-28T09:06:19.326147Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [53:389:2355] 2025-03-28T09:06:19.358641Z node 53 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:06:19.358775Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 4 [53:388:2354] 2025-03-28T09:06:19.365335Z node 53 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:06:19.365415Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 4 [53:389:2355] !Reboot 72057594037927937 (actor [53:248:2247]) rebooted! !Reboot 72057594037927937 (actor [53:248:2247]) tablet resolver refreshed! new actor is[53:337:2311] Leader for TabletID 72057594037927937 is [53:337:2311] sender: [53:440:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:108:2057] recipient: [54:101:2135] 2025-03-28T09:06:21.224859Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:06:21.224937Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927938 is [54:153:2174] sender: [54:154:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:179:2057] recipient: [54:14:2061] 2025-03-28T09:06:21.251188Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:06:21.252107Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 54 actor [54:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 54 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 54 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 54 Important: false } 2025-03-28T09:06:21.252674Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:185:2198] 2025-03-28T09:06:21.255545Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:185:2198] 2025-03-28T09:06:21.257587Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:186:2199] 2025-03-28T09:06:21.259457Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:186:2199] 2025-03-28T09:06:21.271057Z node 54 :PERSQUEUE INFO: new Cookie default|6337ed6-924408c3-71ff8b2d-d67382bf_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:06:21.281956Z node 54 :PERSQUEUE INFO: new Cookie default|ca91b136-4d36abe4-e5b16273-3f0e8fe7_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:06:21.312199Z node 54 :PERSQUEUE INFO: new Cookie default|2df65823-5c351fdc-8e3bfc06-5a9458df_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:245:2057] recipient: [54:99:2134] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:248:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:249:2057] recipient: [54:247:2248] Leader for TabletID 72057594037927937 is [54:250:2249] sender: [54:251:2057] recipient: [54:247:2248] 2025-03-28T09:06:21.377411Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:06:21.377481Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:06:21.378410Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:299:2290] 2025-03-28T09:06:21.380645Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:300:2291] 2025-03-28T09:06:21.401717Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:06:21.401788Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [54:299:2290] 2025-03-28T09:06:21.404133Z node 54 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:06:21.404197Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [54:300:2291] Leader for TabletID 72057594037927937 is [54:250:2249] sender: [54:330:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:103:2057] recipient: [55:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:103:2057] recipient: [55:101:2135] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:108:2057] recipient: [55:101:2135] 2025-03-28T09:06:21.871326Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:06:21.871409Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:149:2057] recipient: [55:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [55:149:2057] recipient: [55:147:2170] Leader for TabletID 72057594037927938 is [55:153:2174] sender: [55:154:2057] recipient: [55:147:2170] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:177:2057] recipient: [55:14:2061] 2025-03-28T09:06:21.896872Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:06:21.897854Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 55 actor [55:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } 2025-03-28T09:06:21.898577Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:183:2196] 2025-03-28T09:06:21.901882Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:183:2196] 2025-03-28T09:06:21.903917Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:184:2197] 2025-03-28T09:06:21.906592Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:184:2197] 2025-03-28T09:06:21.914795Z node 55 :PERSQUEUE INFO: new Cookie default|3666c540-b15b0dff-b54c022d-e9fa8bc3_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:06:21.922276Z node 55 :PERSQUEUE INFO: new Cookie default|a67fe46b-e3446b69-5a3d780f-ca566abc_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:06:21.965438Z node 55 :PERSQUEUE INFO: new Cookie default|e2348d3b-cc6e7015-6f1c95b9-df9b8840_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:243:2057] recipient: [55:99:2134] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:245:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [55:107:2139] sender: [55:247:2057] recipient: [55:246:2246] Leader for TabletID 72057594037927937 is [55:248:2247] sender: [55:249:2057] recipient: [55:246:2246] 2025-03-28T09:06:22.029975Z node 55 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:06:22.030039Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:06:22.031174Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [55:297:2288] 2025-03-28T09:06:22.033957Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [55:298:2289] 2025-03-28T09:06:22.068968Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:06:22.069057Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:297:2288] 2025-03-28T09:06:22.080389Z node 55 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:06:22.080482Z node 55 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:298:2289] Leader for TabletID 72057594037927937 is [55:248:2247] sender: [55:330:2057] recipient: [55:14:2061] >> KqpPg::PgAggregate-useSink [GOOD] >> KqpPg::MkqlTerminate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 >> KqpTx::EmptyTxOnCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] Test command err: 2025-03-28T09:04:04.719629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:04.720229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:04.720292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0010d6/r3tmp/tmpbayZcX/pdisk_1.dat 2025-03-28T09:04:05.110151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:05.149020Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:05.188592Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:05.189653Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:04:05.189922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:05.190047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:05.204204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:05.290994Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:04:05.291085Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:04:05.291265Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:04:05.415029Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:04:05.415138Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:04:05.415744Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:04:05.415841Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:04:05.416204Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:04:05.416415Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:04:05.416529Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:04:05.416806Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:04:05.418185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:05.419156Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:04:05.419211Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:04:05.449459Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:04:05.450659Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:04:05.451206Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:04:05.451489Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:05.500540Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:04:05.501379Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:05.501503Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:05.503166Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:05.503278Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:05.503342Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:05.503777Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:05.503935Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:05.504017Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:04:05.519187Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:05.551847Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:04:05.552132Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:05.552278Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:04:05.552320Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:05.552355Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:04:05.552394Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:05.552649Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:05.552696Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:05.553037Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:04:05.553128Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:04:05.553578Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:05.553621Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:05.553706Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:04:05.553747Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:04:05.553787Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:04:05.553822Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:04:05.553865Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:05.553990Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:05.554038Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:05.554089Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:04:05.554178Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:04:05.554215Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:04:05.554345Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:05.554617Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:04:05.554706Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:04:05.554808Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:04:05.554855Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:04:05.554893Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:04:05.554925Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:04:05.554959Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:05.555253Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:04:05.555292Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:04:05.555345Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:04:05.555400Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:05.555465Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:04:05.555491Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:04:05.555530Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:04:05.555558Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:04:05.555586Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:04:05.557072Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:04:05.557142Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:04:05.570772Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... teState, TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Created new KQP executer: [13:993:2684] isRollback: 1 2025-03-28T09:06:20.981925Z node 13 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, ActorId: [13:839:2684], ActorState: ExecuteState, TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 1 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:06:20.982418Z node 13 :KQP_EXECUTER DEBUG: TxId: 281474976715665. Resolved key sets: 0 2025-03-28T09:06:20.982501Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:20.982530Z node 13 :KQP_EXECUTER DEBUG: TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 1, snapshot: {0, 0} 2025-03-28T09:06:20.982637Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [], lockTxId: (empty maybe), locks: Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true } Op: Rollback, immediate: 1 2025-03-28T09:06:20.982740Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-03-28T09:06:20.982782Z node 13 :KQP_EXECUTER INFO: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:06:20.982818Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-03-28T09:06:20.982860Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-03-28T09:06:20.982895Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-28T09:06:20.983154Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [13:993:2684], Recipient [13:962:2778]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 993 RawX2: 55834577532 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-03-28T09:06:20.983195Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:06:20.983321Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [13:962:2778], Recipient [13:962:2778]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:06:20.983353Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:06:20.983419Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:06:20.983589Z node 13 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-03-28T09:06:20.983666Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-03-28T09:06:20.983717Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:06:20.983747Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-03-28T09:06:20.983773Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:06:20.983799Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:06:20.983835Z node 13 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v400/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-03-28T09:06:20.983882Z node 13 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-03-28T09:06:20.983912Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:06:20.983937Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:06:20.983960Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-28T09:06:20.983983Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-28T09:06:20.984050Z node 13 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-03-28T09:06:20.984188Z node 13 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: true 2025-03-28T09:06:20.984299Z node 13 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:06:20.984370Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:06:20.984400Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-28T09:06:20.984424Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:06:20.984448Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-28T09:06:20.984493Z node 13 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:06:20.984581Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-03-28T09:06:20.984609Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:06:20.984654Z node 13 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:06:20.984677Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:06:20.984712Z node 13 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:06:20.984732Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:06:20.984748Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-03-28T09:06:20.984794Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:06:20.984816Z node 13 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-28T09:06:20.984843Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:20.984964Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-03-28T09:06:20.985079Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:06:20.985178Z node 13 :KQP_EXECUTER DEBUG: ActorId: [13:993:2684] TxId: 281474976715665. Ctx: { TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:06:20.985283Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, ActorId: [13:839:2684], ActorState: CleanupState, TraceId: 01jqe015j37r2a7pwrtwc5fn6x, EndCleanup, isFinal: 0 2025-03-28T09:06:20.985475Z node 13 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=13&id=Y2UzMWNmNi1hNGM3ZmM4Zi0zMzUyNDRmLWFiZDY5YmE=, ActorId: [13:839:2684], ActorState: CleanupState, TraceId: 01jqe015j37r2a7pwrtwc5fn6x, Sent query response back to proxy, proxyRequestId: 8, proxyId: [13:57:2104] 2025-03-28T09:06:21.250970Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1009:2811], Recipient [13:962:2778]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:21.251099Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:21.251198Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:1008:2810], serverId# [13:1009:2811], sessionId# [0:0:0] 2025-03-28T09:06:21.251459Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [13:593:2518], Recipient [13:962:2778]: NKikimr::TEvDataShard::TEvGetOpenTxs >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 >> KqpLocksTricky::TestNoLocksIssue-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::EmptyTxOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 10844, MsgBus: 17981 2025-03-28T09:06:11.141486Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784147450374732:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:11.141517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016e1/r3tmp/tmpHoYGS9/pdisk_1.dat 2025-03-28T09:06:11.509887Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:11.542351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 10844, node 1 2025-03-28T09:06:11.542445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:11.544199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:11.590426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:11.590455Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:11.590463Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:11.590575Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17981 TClient is connected to server localhost:17981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:12.178366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:12.194939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:12.221276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:12.403463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:12.597709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:12.680032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:14.367826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784160335278373:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:14.367978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:15.106677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:15.172732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:15.201110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:15.240354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:15.278893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:15.314481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:15.420653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784164630246190:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:15.420753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:15.421242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784164630246195:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:15.425922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:15.438382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784164630246197:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:15.521349Z node 1 :TX_PROXY ERROR: Actor# [1:7486784164630246252:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:16.142484Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784147450374732:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:16.142540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:17.412046Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjdiNGI0ZjMtNjRjYjJlNC0xNDUxMjk2YS0yNmIwMWMzZQ==, ActorId: [1:7486784168925213810:2490], ActorState: ReadyState, TraceId: 01jqe0125takrry6tnvg79z19z, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 18075, MsgBus: 11525 2025-03-28T09:06:18.235700Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784174126770274:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:18.235812Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016e1/r3tmp/tmp8G9g9j/pdisk_1.dat 2025-03-28T09:06:18.409640Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:18.415707Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:18.415783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:18.419617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18075, node 2 2025-03-28T09:06:18.478939Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:18.478959Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:18.478966Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:18.479073Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11525 TClient is connected to server localhost:11525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:18.956988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:18.965838Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:18.977264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:19.068930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:19.223353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:19.305258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:21.474031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784187011673938:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:21.474168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:21.515883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:21.592471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:21.626368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:21.697066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:21.727866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:21.798069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:21.849328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784187011674458:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:21.849416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:21.849636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784187011674463:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:21.852933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:21.864388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784187011674465:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:21.935048Z node 2 :TX_PROXY ERROR: Actor# [2:7486784187011674519:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:23.238104Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784174126770274:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:23.238200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpTx::DeferredEffects [GOOD] >> KqpTx::CommitStats >> KqpTx::RollbackByIdle [GOOD] >> KqpTx::RollbackInvalidated >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |78.2%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> KqpSinkMvcc::OltpMultiSinks |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |78.2%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> KqpSinkLocks::InvalidateOnCommit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::MixedTxFail+useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain >> KqpLocks::MixedTxFail-useSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue+withSink >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 >> KqpTx::CommitStats [GOOD] >> KqpTx::RollbackInvalidated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:02:59.703700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:02:59.703792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:59.703860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:02:59.703886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:02:59.703923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:02:59.703942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:02:59.703982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:02:59.704062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:02:59.704334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:02:59.809752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:02:59.809818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:59.843330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:02:59.843651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:02:59.843855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:02:59.883724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:02:59.883997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:02:59.884684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:02:59.884966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:02:59.896182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:59.897667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:02:59.897745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:02:59.897911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:02:59.897963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:02:59.898007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:02:59.898122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:02:59.923690Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:03:00.121062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:03:00.121373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.121614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:03:00.121940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:03:00.122008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.127434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:00.127628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:03:00.127870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.127966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:03:00.128004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:03:00.128039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:03:00.130839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.130904Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:03:00.130958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:03:00.137418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.137525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.137592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:00.137642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:03:00.140611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:03:00.146115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:03:00.146346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:03:00.147553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:03:00.147706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:03:00.147768Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:00.148047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:03:00.148102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:03:00.148276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:03:00.148365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:03:00.154044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:03:00.154150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:03:00.154386Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:03:00.154432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:03:00.154910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:03:00.154965Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:03:00.155075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:00.155112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:00.155159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:03:00.155197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:00.155239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:03:00.155283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:03:00.155315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:03:00.155362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:03:00.155459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:03:00.155501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:03:00.155536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:03:00.157585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:00.157723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:03:00.157763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:06:28.381557Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:06:28.381639Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:06:28.381690Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:06:28.798849Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:310:2297]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:06:28.799121Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 2 2025-03-28T09:06:28.799503Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:310:2297], Recipient [3:124:2150]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 499 Memory: 124232 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 41 TableOwnerId: 72057594046678944 FollowerId: 0 2025-03-28T09:06:28.799554Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:06:28.799611Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0499 2025-03-28T09:06:28.799738Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:06:28.799776Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-28T09:06:28.813692Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:06:28.813780Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:06:28.813904Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:06:28.813941Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:06:28.861050Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:06:28.861128Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:06:28.861162Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-03-28T09:06:28.861235Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:06:28.861270Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-28T09:06:28.861386Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 13940 row count 100 2025-03-28T09:06:28.861456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Simple, is column=0, is olap=0 2025-03-28T09:06:28.861503Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 100, DataSize 13940 2025-03-28T09:06:28.861651Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:28.875064Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:06:28.875147Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:06:28.875179Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:06:28.919036Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:713:2680]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:06:28.919332Z node 3 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 3 2025-03-28T09:06:28.919747Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [3:713:2680], Recipient [3:124:2150]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 3 Generation: 2 Round: 12 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 65 Memory: 124232 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 213 TableOwnerId: 72057594046678944 FollowerId: 0 2025-03-28T09:06:28.919798Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:06:28.919851Z node 3 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0065 2025-03-28T09:06:28.919989Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:06:28.920034Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-28T09:06:28.967039Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue wakeup 2025-03-28T09:06:28.967146Z node 3 :FLAT_TX_SCHEMESHARD INFO: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2025-03-28T09:06:28.967205Z node 3 :FLAT_TX_SCHEMESHARD INFO: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 3], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2025-03-28T09:06:28.967316Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 3 seconds 2025-03-28T09:06:28.967355Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-03-28T09:06:28.967543Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:06:28.967592Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:06:28.967624Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2025-03-28T09:06:28.967691Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:06:28.967723Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-28T09:06:28.967841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 13940 row count 100 2025-03-28T09:06:28.967890Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=CopyTable, is column=0, is olap=0 2025-03-28T09:06:28.967923Z node 3 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 100, DataSize 13940, with borrowed parts 2025-03-28T09:06:28.968019Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2025-03-28T09:06:28.968082Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:28.979052Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:06:28.979151Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:06:28.979191Z node 3 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:06:29.247191Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:06:29.247288Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:06:29.247418Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:124:2150], Recipient [3:124:2150]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:06:29.247462Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] >> KqpSinkMvcc::OltpNamedStatement >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 61779, MsgBus: 32464 2025-03-28T09:06:19.406467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784180142901888:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:19.413501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00167d/r3tmp/tmpT18KsV/pdisk_1.dat 2025-03-28T09:06:19.845721Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:19.862365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:19.862498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:19.872216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61779, node 1 2025-03-28T09:06:19.964434Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:19.964452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:19.964458Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:19.964843Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32464 TClient is connected to server localhost:32464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:20.560788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:20.575392Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:20.585011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:20.732946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:20.882190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:20.969533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:22.859083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784193027805436:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:22.859234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:23.197458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:23.233997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:23.307058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:23.379437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:23.429180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:23.489973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:23.547877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784197322773252:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:23.547968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:23.548156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784197322773257:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:23.552824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:23.570631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784197322773259:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:23.640779Z node 1 :TX_PROXY ERROR: Actor# [1:7486784197322773312:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:24.404429Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784180142901888:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:24.404572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:24.822511Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2Q1ZmFhN2MtZDBiM2NjZTItNDE0ZGUyYWUtZDMzOWQyZQ==, ActorId: [1:7486784201617740870:2489], ActorState: ReadyState, TraceId: 01jqe019da7t610ez0baxjczjc, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 1344, MsgBus: 8948 2025-03-28T09:06:25.760549Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784204845249554:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:25.760653Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00167d/r3tmp/tmpj3DWft/pdisk_1.dat 2025-03-28T09:06:25.912132Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:25.928033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:25.928118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:25.929292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1344, node 2 2025-03-28T09:06:26.048487Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:26.048509Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:26.048516Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:26.048632Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8948 TClient is connected to server localhost:8948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:26.525206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:26.540534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:26.599619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:26.761167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:26.865194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:29.048490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784222025120515:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.048599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.108402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.143964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.187129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.228528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.266379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.319908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.431008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784222025121031:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.431116Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.432159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784222025121036:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.437012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:29.449221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784222025121038:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:06:29.531996Z node 2 :TX_PROXY ERROR: Actor# [2:7486784222025121093:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:30.767408Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784204845249554:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:30.767492Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:30.819945Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486784226320088675:2499], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:06:30.821586Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjM3ZDg3MGEtMmY1MzljZjItMzJkOGUyOGMtMjNhMzA4ZDg=, ActorId: [2:7486784226320088648:2488], ActorState: ExecuteState, TraceId: 01jqe01f7pejhsxrq2ggnbh9re, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 01jqe01f732s0q8b403wftfa05 2025-03-28T09:06:30.831569Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjM3ZDg3MGEtMmY1MzljZjItMzJkOGUyOGMtMjNhMzA4ZDg=, ActorId: [2:7486784226320088648:2488], ActorState: ReadyState, TraceId: 01jqe01f9fcg5fec8vn2jb4kp1, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 63508, MsgBus: 21851 2025-03-28T09:06:18.859693Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784175317573438:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:18.859866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001689/r3tmp/tmpJ0n2JF/pdisk_1.dat 2025-03-28T09:06:19.312917Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63508, node 1 2025-03-28T09:06:19.374175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:19.374296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:19.376024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:19.391121Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:19.391139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:19.391144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:19.391237Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21851 TClient is connected to server localhost:21851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:19.946946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:19.967330Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:19.975902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:20.130590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:20.330040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:20.432121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:22.267738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784192497444390:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:22.267872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:22.648204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:22.695689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:22.738380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:22.823006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:22.901463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:22.974036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:23.064597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784196792412215:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:23.064771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:23.065064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784196792412222:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:23.069276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:23.084352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784196792412224:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:23.166947Z node 1 :TX_PROXY ERROR: Actor# [1:7486784196792412278:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:23.855282Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784175317573438:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:23.855487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21143, MsgBus: 8184 2025-03-28T09:06:25.724393Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784204528031307:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:25.724876Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001689/r3tmp/tmpkWEwrT/pdisk_1.dat 2025-03-28T09:06:25.897678Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:25.897760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:25.900985Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:25.916689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21143, node 2 2025-03-28T09:06:26.011350Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:26.011377Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:26.011384Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:26.011503Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8184 TClient is connected to server localhost:8184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:06:26.572082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:26.600031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:06:26.705336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:26.874915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:26.941199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:29.069357Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784221707902244:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.069440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.117927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.189872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.225998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.294582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.329849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.408245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:29.468264Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784221707902763:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.468338Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.468707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784221707902768:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.473222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:29.484244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784221707902770:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:06:29.567332Z node 2 :TX_PROXY ERROR: Actor# [2:7486784221707902824:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |78.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> KqpTx::RollbackTx >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 >> TKqpScanData::EmptyColumns >> TComputeScheduler::TTotalLimits [GOOD] >> TKqpScanData::EmptyColumns [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpPg::TableDeleteAllData+useSink [GOOD] >> KqpPg::TableDeleteAllData-useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::TTotalLimits [GOOD] Test command err: 1610 1600 1610 1600 |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] >> Viewer::ServerlessNodesPage [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [GOOD] >> Viewer::ServerlessWithExclusiveNodes >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 25156, MsgBus: 15288 2025-03-28T09:01:56.098004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783044511494881:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:56.342594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018dc/r3tmp/tmpWKdFcD/pdisk_1.dat 2025-03-28T09:02:02.592612Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783044511494881:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:02.603700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:04.982899Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783061691364249:2290];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:04.982946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:05.765545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:05.781952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:06.260916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:09.114841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:09.114868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:09.114878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:10.384189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:10.385358Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:10.385767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:11.432521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:11.563367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:11.563395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:11.563418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:11.607673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:11.608476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:11.649399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25156, node 1 2025-03-28T09:02:14.163706Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:14.163724Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:14.163729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:14.163834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15288 TClient is connected to server localhost:15288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:16.685682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.719072Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:16.733889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.993238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.222070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.302177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.063419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783147590711721:2425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.063537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.348161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.428174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.473643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.538872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.578058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.624645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.717918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783147590712240:2475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.717968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783147590712245:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.718009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.721507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:19.737590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783147590712247:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:19.817742Z node 1 :TX_PROXY ERROR: Actor# [1:7486783147590712303:3475] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:21.198462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:02:23.099519Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqdzsxb734c3gbsdrzz0egqg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBkYWE5ZTQtZjY2NjE5ODMtZjgxYWU1Ni1mYjljYWI0MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:23.138103Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqdzsxbda8593c0mjdbmpre0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ViYmYxYi0xYTdm ... sion/3?node_id=2&id=NmE0MjZmNS01ZDNmYmUwZi03OTRjYzdiYi03YzBhMWYzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:25.986088Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721627. Ctx: { TraceId: 01jqe01ah82xx8e9rzjk0b65vb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTlhMGMyNWItMmNhMTkzZi0yMGExNzZhOS0zZTQ2NDg3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:25.986282Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721628. Ctx: { TraceId: 01jqe01ah89vrqaaj91kt8mb1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmE0MjZmNS01ZDNmYmUwZi03OTRjYzdiYi03YzBhMWYzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:25.992284Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721629. Ctx: { TraceId: 01jqe01ah89vrqaaj91kt8mb1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmE0MjZmNS01ZDNmYmUwZi03OTRjYzdiYi03YzBhMWYzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:25.995065Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721630. Ctx: { TraceId: 01jqe01ahr0bwbdj2kj048ry78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjcwMDBmMjUtNzYwOWRmMGUtOGFlNDRiY2EtYmRkN2MyMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:25.996145Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721631. Ctx: { TraceId: 01jqe01ahre9nbqcaaxjx56qzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFkNzNjYjAtNGE0ZTM4ZWYtZmM5ODE1YTEtMzY4MzdkODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.007989Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721632. Ctx: { TraceId: 01jqe01ah82xx8e9rzjk0b65vb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTlhMGMyNWItMmNhMTkzZi0yMGExNzZhOS0zZTQ2NDg3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.014529Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721633. Ctx: { TraceId: 01jqe01ah89vrqaaj91kt8mb1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmE0MjZmNS01ZDNmYmUwZi03OTRjYzdiYi03YzBhMWYzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.028147Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721634. Ctx: { TraceId: 01jqe01ah82xx8e9rzjk0b65vb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTlhMGMyNWItMmNhMTkzZi0yMGExNzZhOS0zZTQ2NDg3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.034509Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721635. Ctx: { TraceId: 01jqe01ahre9nbqcaaxjx56qzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFkNzNjYjAtNGE0ZTM4ZWYtZmM5ODE1YTEtMzY4MzdkODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.041498Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721636. Ctx: { TraceId: 01jqe01ahre9nbqcaaxjx56qzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFkNzNjYjAtNGE0ZTM4ZWYtZmM5ODE1YTEtMzY4MzdkODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.041648Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721637. Ctx: { TraceId: 01jqe01ahr0bwbdj2kj048ry78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjcwMDBmMjUtNzYwOWRmMGUtOGFlNDRiY2EtYmRkN2MyMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.044086Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721638. Ctx: { TraceId: 01jqe01ak79wrbmvk7qt2a4n9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTk3YjI5NGYtOTE5OTIyM2UtMWEzNTYyMTYtNjI3ZDk2ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.059264Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721639. Ctx: { TraceId: 01jqe01ahre9nbqcaaxjx56qzz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFkNzNjYjAtNGE0ZTM4ZWYtZmM5ODE1YTEtMzY4MzdkODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.062096Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721641. Ctx: { TraceId: 01jqe01ak79wrbmvk7qt2a4n9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTk3YjI5NGYtOTE5OTIyM2UtMWEzNTYyMTYtNjI3ZDk2ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.062433Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721640. Ctx: { TraceId: 01jqe01ahr0bwbdj2kj048ry78, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjcwMDBmMjUtNzYwOWRmMGUtOGFlNDRiY2EtYmRkN2MyMTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.067277Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721642. Ctx: { TraceId: 01jqe01am10kf8nad2tfdayeyp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmE0MjZmNS01ZDNmYmUwZi03OTRjYzdiYi03YzBhMWYzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.071466Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721643. Ctx: { TraceId: 01jqe01akv5fntgda522vepht1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDcwM2MwNDctM2I4MDA1NjYtZGMwMmVhN2UtM2VlNTllYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.085188Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721644. Ctx: { TraceId: 01jqe01ak79wrbmvk7qt2a4n9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTk3YjI5NGYtOTE5OTIyM2UtMWEzNTYyMTYtNjI3ZDk2ZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.087425Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721645. Ctx: { TraceId: 01jqe01am10kf8nad2tfdayeyp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmE0MjZmNS01ZDNmYmUwZi03OTRjYzdiYi03YzBhMWYzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.087939Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721646. Ctx: { TraceId: 01jqe01akv5fntgda522vepht1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDcwM2MwNDctM2I4MDA1NjYtZGMwMmVhN2UtM2VlNTllYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.111564Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721647. Ctx: { TraceId: 01jqe01akv5fntgda522vepht1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDcwM2MwNDctM2I4MDA1NjYtZGMwMmVhN2UtM2VlNTllYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-28T09:06:26.119855Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721648. Ctx: { TraceId: 01jqe01ap19f1mjthsggst5pqw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTlhMGMyNWItMmNhMTkzZi0yMGExNzZhOS0zZTQ2NDg3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.123301Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721649. Ctx: { TraceId: 01jqe01ap60ken5zm64dnnb2b3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmQ0M2ZiYWMtZTBiMWFjODctNGY0YWVjY2MtMzZlM2QwNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.127294Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721650. Ctx: { TraceId: 01jqe01ap19f1mjthsggst5pqw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTlhMGMyNWItMmNhMTkzZi0yMGExNzZhOS0zZTQ2NDg3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.131334Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721651. Ctx: { TraceId: 01jqe01ap60ken5zm64dnnb2b3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmQ0M2ZiYWMtZTBiMWFjODctNGY0YWVjY2MtMzZlM2QwNmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.131589Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721652. Ctx: { TraceId: 01jqe01ap6bwa4ac6mjt689ad7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFkNzNjYjAtNGE0ZTM4ZWYtZmM5ODE1YTEtMzY4MzdkODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.135818Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721653. Ctx: { TraceId: 01jqe01ap19f1mjthsggst5pqw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTlhMGMyNWItMmNhMTkzZi0yMGExNzZhOS0zZTQ2NDg3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-28T09:06:26.144160Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721654. Ctx: { TraceId: 01jqe01ap6bwa4ac6mjt689ad7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFkNzNjYjAtNGE0ZTM4ZWYtZmM5ODE1YTEtMzY4MzdkODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.149105Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721655. Ctx: { TraceId: 01jqe01apke23pn0cams920s64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmE0MjZmNS01ZDNmYmUwZi03OTRjYzdiYi03YzBhMWYzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.150988Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721656. Ctx: { TraceId: 01jqe01ap19f1mjthsggst5pqw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTlhMGMyNWItMmNhMTkzZi0yMGExNzZhOS0zZTQ2NDg3YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-28T09:06:26.162239Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721657. Ctx: { TraceId: 01jqe01apke23pn0cams920s64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmE0MjZmNS01ZDNmYmUwZi03OTRjYzdiYi03YzBhMWYzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.168824Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721658. Ctx: { TraceId: 01jqe01apke23pn0cams920s64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmE0MjZmNS01ZDNmYmUwZi03OTRjYzdiYi03YzBhMWYzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.185319Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721659. Ctx: { TraceId: 01jqe01apke23pn0cams920s64, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmE0MjZmNS01ZDNmYmUwZi03OTRjYzdiYi03YzBhMWYzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-28T09:06:26.199076Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721660. Ctx: { TraceId: 01jqe01ap6bwa4ac6mjt689ad7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFkNzNjYjAtNGE0ZTM4ZWYtZmM5ODE1YTEtMzY4MzdkODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:26.214929Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976721661. Ctx: { TraceId: 01jqe01ap6bwa4ac6mjt689ad7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTFkNzNjYjAtNGE0ZTM4ZWYtZmM5ODE1YTEtMzY4MzdkODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 29136, MsgBus: 29914 2025-03-28T09:06:18.262011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784177195331564:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:18.263644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016a1/r3tmp/tmpHIZwoK/pdisk_1.dat 2025-03-28T09:06:18.764930Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:18.768937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:18.769044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:18.772579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29136, node 1 2025-03-28T09:06:18.893247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:18.893270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:18.893277Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:18.893390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29914 TClient is connected to server localhost:29914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:19.396910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:06:19.418705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.568886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:19.728597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:19.805326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:21.572037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784190080235237:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:21.572183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:21.856676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:21.888318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:21.923613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:21.960269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:21.991371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:22.042051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:22.117517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784194375203043:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:22.117678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:22.117948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784194375203048:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:22.122205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:22.131959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784194375203050:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:22.201784Z node 1 :TX_PROXY ERROR: Actor# [1:7486784194375203105:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:23.259290Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784177195331564:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:23.259366Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30171, MsgBus: 26925 2025-03-28T09:06:24.929667Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784200384177647:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:24.929759Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016a1/r3tmp/tmp6fYR1r/pdisk_1.dat 2025-03-28T09:06:25.100469Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:25.138739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:25.138834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:25.180629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30171, node 2 2025-03-28T09:06:25.283233Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:25.283255Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:25.283262Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:25.283365Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26925 TClient is connected to server localhost:26925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:25.720536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:25.729303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:25.805329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:25.964841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:26.039093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:28.375386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784217564048594:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:28.375493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:28.414072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:28.495876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:28.536756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:28.614457Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:28.648454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:28.726260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:28.797063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784217564049110:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:28.797173Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:28.797493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784217564049115:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:28.800629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:28.810915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784217564049117:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:28.911466Z node 2 :TX_PROXY ERROR: Actor# [2:7486784217564049173:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:29.930878Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784200384177647:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:29.930942Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:29.966476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:06:30.028509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:06:30.076986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |78.3%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TKqpScanData::ArrowToUnboxedValueConverter >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 >> KqpLocks::MixedTxFail+useSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 |78.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: 2025-03-28T09:04:06.959091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:06.959571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:06.959648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0010cb/r3tmp/tmpEpOJVk/pdisk_1.dat 2025-03-28T09:04:07.523765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:07.656337Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:07.739283Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:04:07.740346Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:04:07.740564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:07.740675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:07.755818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:07.850355Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:04:07.850420Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:04:07.850553Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:04:08.506202Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:04:08.506312Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:04:08.506871Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:04:08.506958Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:04:08.507222Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:04:08.507407Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:04:08.507502Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:04:08.507868Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:04:08.509416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:08.510567Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:04:08.510645Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:04:08.597083Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:04:08.598267Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:04:08.598746Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:04:08.599062Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:04:08.675281Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:04:08.676160Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:04:08.676296Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:04:08.678056Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:04:08.678158Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:04:08.678233Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:04:08.678669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:04:08.678996Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:04:08.679107Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:04:08.679614Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:04:08.760484Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:04:08.760695Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:04:08.760824Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:04:08.760889Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:04:08.760932Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:04:08.760968Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:04:08.761215Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:08.761270Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:04:08.761584Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:04:08.761696Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:04:08.762182Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:04:08.762232Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:04:08.762275Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:04:08.762309Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:04:08.762342Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:04:08.762380Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:04:08.762450Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:04:08.762575Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:08.762608Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:04:08.762646Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:04:08.767337Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:04:08.767429Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:04:08.767588Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:04:08.767820Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:04:08.767881Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:04:08.768012Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:04:08.768108Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:04:08.768170Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:04:08.768210Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:04:08.768246Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:08.768563Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:04:08.768602Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:04:08.768638Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:04:08.768670Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:08.768750Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:04:08.774854Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:04:08.774919Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:04:08.774962Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:04:08.774993Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:04:08.776118Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:04:08.776177Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:04:08.776213Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:04:08.776276Z node 1 :TX_DATASHARD TRACE: Prop ... rd::TEvProposeTransaction 2025-03-28T09:06:35.232096Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [16:689:2579], Recipient [16:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:06:35.232144Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:06:35.232273Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:06:35.233010Z node 16 :TX_DATASHARD TRACE: TxId: 281474976715663, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\003\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2025-03-28T09:06:35.233146Z node 16 :TX_DATASHARD TRACE: Table /Root/table, shard: 72075186224037888, task: 1, write point (Uint32 : 3) 2025-03-28T09:06:35.233260Z node 16 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 3) table: [72057594046644480:2:1] 2025-03-28T09:06:35.233829Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit CheckDataTx 2025-03-28T09:06:35.233936Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-03-28T09:06:35.234015Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit CheckDataTx 2025-03-28T09:06:35.234083Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:06:35.234147Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:06:35.234231Z node 16 :TX_DATASHARD TRACE: Activated operation [0:281474976715663] at 72075186224037888 2025-03-28T09:06:35.234292Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-03-28T09:06:35.234315Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:06:35.234359Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-28T09:06:35.234400Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-28T09:06:35.234473Z node 16 :TX_DATASHARD TRACE: Operation [0:281474976715663] (execute_kqp_data_tx) at 72075186224037888 aborting because it cannot acquire locks 2025-03-28T09:06:35.234548Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-03-28T09:06:35.234576Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-28T09:06:35.234599Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:06:35.234622Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit FinishPropose 2025-03-28T09:06:35.234651Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is DelayComplete 2025-03-28T09:06:35.234722Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:06:35.234791Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:06:35.234845Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:06:35.234899Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-03-28T09:06:35.234924Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:06:35.234964Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037888 has finished 2025-03-28T09:06:35.235075Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:06:35.235143Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037888 on unit FinishPropose 2025-03-28T09:06:35.235227Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: LOCKS_BROKEN 2025-03-28T09:06:35.235368Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:35.236211Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=ZmE0NWNlZTctODkxNTM0MTEtZGJhMzMyOTQtNTNjNTI0ZTM=, ActorId: [16:837:2682], ActorState: ExecuteState, TraceId: 01jqe01k8d7b5pkd93989t7pk5, Create QueryResponse for error on request, msg: 2025-03-28T09:06:35.237500Z node 16 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe01k8d7b5pkd93989t7pk5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=ZmE0NWNlZTctODkxNTM0MTEtZGJhMzMyOTQtNTNjNTI0ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:35.237926Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [16:895:2682], Recipient [16:689:2579]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 895 RawX2: 68719479418 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715664 ExecLevel: 0 Flags: 8 2025-03-28T09:06:35.237969Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:06:35.238083Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [16:689:2579], Recipient [16:689:2579]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:06:35.238113Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:06:35.238169Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:06:35.291109Z node 16 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-03-28T09:06:35.291257Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2025-03-28T09:06:35.291299Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-03-28T09:06:35.291323Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2025-03-28T09:06:35.291347Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:06:35.291369Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:06:35.291414Z node 16 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v400/0 IncompleteEdge# v{min} UnprotectedReadEdge# v400/18446744073709551615 ImmediateWriteEdge# v401/0 ImmediateWriteEdgeReplied# v401/0 2025-03-28T09:06:35.291479Z node 16 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2025-03-28T09:06:35.291504Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-03-28T09:06:35.291521Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:06:35.291538Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-28T09:06:35.291554Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-28T09:06:35.291632Z node 16 :TX_DATASHARD TRACE: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193448 2025-03-28T09:06:35.291766Z node 16 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-03-28T09:06:35.291891Z node 16 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:06:35.291972Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-03-28T09:06:35.292004Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-28T09:06:35.292031Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:06:35.292055Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-03-28T09:06:35.292108Z node 16 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:06:35.292238Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2025-03-28T09:06:35.292284Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:06:35.292317Z node 16 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:06:35.292344Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:06:35.292388Z node 16 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-03-28T09:06:35.292414Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:06:35.292442Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2025-03-28T09:06:35.292526Z node 16 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:06:35.292557Z node 16 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-03-28T09:06:35.292594Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:35.294217Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [16:61:2108], Recipient [16:689:2579]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 16 Status: STATUS_NOT_FOUND 2025-03-28T09:06:35.298729Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [16:904:2735], Recipient [16:689:2579]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:35.298836Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:35.298924Z node 16 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [16:903:2734], serverId# [16:904:2735], sessionId# [0:0:0] 2025-03-28T09:06:35.299186Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553224, Sender [16:593:2518], Recipient [16:689:2579]: NKikimr::TEvDataShard::TEvGetOpenTxs >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 >> TComputeScheduler::QueryLimits [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 20703, MsgBus: 27261 2025-03-28T09:06:15.892993Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784161413332139:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:15.893668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016a4/r3tmp/tmpgw0vUC/pdisk_1.dat 2025-03-28T09:06:16.356827Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:16.359397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:16.359453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:16.362292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20703, node 1 2025-03-28T09:06:16.435299Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:16.435323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:16.435343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:16.435455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27261 TClient is connected to server localhost:27261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:17.040479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:17.070143Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:17.082593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:17.276071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:06:17.448227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:06:17.514056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:19.284054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784178593203094:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:19.284163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:19.624121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.655261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.685693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.720858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.753594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.790014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.851514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784178593203605:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:19.851586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:19.854810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784178593203610:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:19.859944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:19.875784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784178593203612:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:19.935402Z node 1 :TX_PROXY ERROR: Actor# [1:7486784178593203667:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:20.893926Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784161413332139:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:20.893998Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:21.588896Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTdlNTE2ODQtZWEwNDdjOWMtZTNjN2JhZDItM2ZkMGY3NTc=, ActorId: [1:7486784187183138520:2489], ActorState: ExecuteState, TraceId: 01jqe01661ag9vq6pcsjhf4s9d, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 26292, MsgBus: 1430 2025-03-28T09:06:22.629691Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784194470987380:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:22.629741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016a4/r3tmp/tmpyAHUCz/pdisk_1.dat 2025-03-28T09:06:22.767701Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:22.803946Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:22.804028Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 26292, node 2 2025-03-28T09:06:22.809634Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:22.850472Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:22.850492Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:22.850497Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:22.850583Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1430 TClient is connected to server localhost:1430 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:23.278795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:23.295993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:23.392743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:23.558 ... 74976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.639824Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.642480Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.647298Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.647456Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.653758Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.659959Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.666841Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.669527Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.673566Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.675408Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.680112Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.681830Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.685816Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.688078Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.691116Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.694554Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.696859Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.701457Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.702391Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.709836Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.710290Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.718766Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.718879Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.725617Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.729029Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.732409Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.735129Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.738414Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.741173Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.745198Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:35.747301Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; 2025-03-28T09:06:36.059254Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-28T09:06:36.060353Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-28T09:06:36.061015Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-28T09:06:36.061345Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[3:7486784239297736223:2389];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037947;receive=72075186224037899; 2025-03-28T09:06:36.061816Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037947;local_tx_no=12;method=complete;tx_info=;fline=secondary.h:126;event=duplication_tablet_broken_flag;txId=281474976715664; 2025-03-28T09:06:36.062341Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[3:7486784239297736223:2389];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=15;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037947; 2025-03-28T09:06:36.062728Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-28T09:06:36.065267Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037903;local_tx_no=12;method=complete;tx_info=;fline=secondary.h:126;event=duplication_tablet_broken_flag;txId=281474976715664; 2025-03-28T09:06:36.467795Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715668; 2025-03-28T09:06:36.468850Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486784252182640366:2820], SessionActorId: [3:7486784252182640308:2820], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[3:7486784252182640366:2820].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-28T09:06:36.469523Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486784252182640366:2820], SessionActorId: [3:7486784252182640308:2820], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/DataShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7486784252182640308:2820]. isRollback=0 2025-03-28T09:06:36.469706Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGVlMmU2NmYtOThjMGY3ZjYtNmZlYzBiYy0yMjBmNGZiZQ==, ActorId: [3:7486784252182640308:2820], ActorState: ExecuteState, TraceId: 01jqe01mq40knv1dafkvnv8jra, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7486784252182640397:2820] from: [3:7486784252182640366:2820] 2025-03-28T09:06:36.469820Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486784252182640397:2820] TxId: 281474976715668. Ctx: { TraceId: 01jqe01mq40knv1dafkvnv8jra, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGVlMmU2NmYtOThjMGY3ZjYtNmZlYzBiYy0yMjBmNGZiZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/DataShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-28T09:06:36.470005Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGVlMmU2NmYtOThjMGY3ZjYtNmZlYzBiYy0yMjBmNGZiZQ==, ActorId: [3:7486784252182640308:2820], ActorState: ExecuteState, TraceId: 01jqe01mq40knv1dafkvnv8jra, Create QueryResponse for error on request, msg: 2025-03-28T09:06:36.470123Z node 3 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037936;self_id=[3:7486784239297736232:2392];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:06:36.470849Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715668;tx_id=281474976715668;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715668; 2025-03-28T09:06:36.472422Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715668; >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::QueryLimits [GOOD] Test command err: 800 800 800 800 |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 20150, MsgBus: 12030 2025-03-28T08:59:02.612825Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782301933783325:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:02.612954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001472/r3tmp/tmp2V7U4k/pdisk_1.dat 2025-03-28T08:59:07.631237Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782301933783325:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.631289Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:07.739349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:09.894431Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:09.914835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.242944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.466770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:12.026648Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:12.029763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:12.029843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:12.050968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20150, node 1 2025-03-28T08:59:12.520212Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.520228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.520234Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.520391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12030 TClient is connected to server localhost:12030 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:23.017923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.161905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.984935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.290225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:24.290245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:25.368173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:26.013672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:29.260360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782417897902048:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:29.260469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:30.120014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.457335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.641213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.743373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.913117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:31.271864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:31.705919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782426487837197:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.705980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.706286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782426487837202:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:31.726158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:31.817066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782426487837204:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T08:59:31.932764Z node 1 :TX_PROXY ERROR: Actor# [1:7486782426487837269:3532] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:37.757005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3339, MsgBus: 7596 2025-03-28T08:59:55.897375Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486782531180935856:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001472/r3tmp/tmp5wvh10/pdisk_1.dat 2025-03-28T08:59:55.927807Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T08:59:56.113318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:56.113387Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:56.120159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T08:59:56.134838Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3339, node 2 2025-03-28T08:59:56.813558Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:56.813576Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:56.813582Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:56.813698Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7596 2025-03-28T09:00:00.674050Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486782531180935856:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:00:00.674101Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:7596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version ... WQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00jxg8jd661e80d407xen, Create QueryResponse for error on request, msg: 2025-03-28T09:06:02.651900Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00kb8bgjav362drgdqv83, Create QueryResponse for error on request, msg: 2025-03-28T09:06:03.143018Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00ktg5e1dmd8vty4e864h, Create QueryResponse for error on request, msg: 2025-03-28T09:06:04.023071Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00mny5m62jbrtbw5efwf7, Create QueryResponse for error on request, msg: 2025-03-28T09:06:04.477776Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00n45abjv948pycc6169v, Create QueryResponse for error on request, msg: 2025-03-28T09:06:04.972407Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00nkj3tk8d9h68wg8hyhr, Create QueryResponse for error on request, msg: 2025-03-28T09:06:05.890132Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00pg78cwvqxckk44tr5jy, Create QueryResponse for error on request, msg: 2025-03-28T09:06:06.348344Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00pyg83s811x1ekm1jpz3, Create QueryResponse for error on request, msg: 2025-03-28T09:06:06.811217Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00qcy92d59aggew68raz2, Create QueryResponse for error on request, msg: 2025-03-28T09:06:07.559276Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00r48991ktrb8csnfbgs9, Create QueryResponse for error on request, msg: 2025-03-28T09:06:08.342071Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00rwn8df0jfhrs8c0jh2s, Create QueryResponse for error on request, msg: 2025-03-28T09:06:09.062461Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00sk36w8n39ks27x4y7vs, Create QueryResponse for error on request, msg: 2025-03-28T09:06:09.831217Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00tazcfzfrfjdde69zdzc, Create QueryResponse for error on request, msg: 2025-03-28T09:06:10.590941Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00v2n7jhtc9tnkyandneh, Create QueryResponse for error on request, msg: 2025-03-28T09:06:11.524712Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00vzwddb5snj9nqxagr0q, Create QueryResponse for error on request, msg: 2025-03-28T09:06:12.001309Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00weq43ss8ekg9fzb6s1k, Create QueryResponse for error on request, msg: 2025-03-28T09:06:12.474936Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00wxcfwxkxnkjpt3ajdcj, Create QueryResponse for error on request, msg: 2025-03-28T09:06:13.421684Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00xv09dvfbeermyyyp37r, Create QueryResponse for error on request, msg: 2025-03-28T09:06:14.314898Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00yptcx2njbj62vj4qedt, Create QueryResponse for error on request, msg: 2025-03-28T09:06:15.107990Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe00zfn2rbg59w5xctrgf11, Create QueryResponse for error on request, msg: 2025-03-28T09:06:15.969320Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe010ae9js3hxc5m97gkxmr, Create QueryResponse for error on request, msg: 2025-03-28T09:06:16.772369Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe0113f0etnk89mwfgdwvdw, Create QueryResponse for error on request, msg: 2025-03-28T09:06:17.259082Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe011jk5c2z7x6qzvw0z22h, Create QueryResponse for error on request, msg: 2025-03-28T09:06:17.751004Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe0121y41vrcwqxa9jbzg6k, Create QueryResponse for error on request, msg: 2025-03-28T09:06:18.546935Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe012ts936ctmfqekw624e4, Create QueryResponse for error on request, msg: 2025-03-28T09:06:19.424375Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe013p4dg326ejt0kr8jcte, Create QueryResponse for error on request, msg: 2025-03-28T09:06:20.282991Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe014gv0a22k3xjfk10gxg3, Create QueryResponse for error on request, msg: 2025-03-28T09:06:21.274229Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe015fvfv0wk692faf14cs4, Create QueryResponse for error on request, msg: 2025-03-28T09:06:22.154302Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe016b9a58jyh9y41v25xtp, Create QueryResponse for error on request, msg: 2025-03-28T09:06:23.154738Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe017aa8fbadjpqcjsav93j, Create QueryResponse for error on request, msg: 2025-03-28T09:06:23.659036Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe017t037w54a6nghk601q5, Create QueryResponse for error on request, msg: 2025-03-28T09:06:24.158670Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe0189v8ed5j9m9q8dmx0c9, Create QueryResponse for error on request, msg: 2025-03-28T09:06:25.085658Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe0196kfd2qc29r754gzr3s, Create QueryResponse for error on request, msg: 2025-03-28T09:06:26.041768Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe01a4gayqtyvt9ep2hdyvx, Create QueryResponse for error on request, msg: 2025-03-28T09:06:26.540079Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe01am1dye4f3ceaakb6wzc, Create QueryResponse for error on request, msg: 2025-03-28T09:06:27.094941Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe01b584de63kcbxq9c0nb0, Create QueryResponse for error on request, msg: 2025-03-28T09:06:27.596293Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe01bmz33hh6sdg0xpgr51y, Create QueryResponse for error on request, msg: 2025-03-28T09:06:28.193111Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe01c7gfrjntjyr7266xaqs, Create QueryResponse for error on request, msg: 2025-03-28T09:06:29.182233Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe01d6b1zet97q5gpagf4sj, Create QueryResponse for error on request, msg: 2025-03-28T09:06:30.079239Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe01e2cd2cdbqpx2r32246f, Create QueryResponse for error on request, msg: 2025-03-28T09:06:30.943229Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWQxNzkyODAtM2VhYTRlOTQtMjQwZTZhNjktODg4YzQ3ZjY=, ActorId: [4:7486783701794383244:2497], ActorState: ExecuteState, TraceId: 01jqe01exbedfg8xqy1d6d1841, Create QueryResponse for error on request, msg: >> TKqpScanData::FailOnUnsupportedPgType >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> KqpTx::RollbackTx [GOOD] >> KqpTx::RollbackTx2 >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 >> KqpSinkMvcc::OltpMultiSinks [GOOD] >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 >> KqpPg::NoSelectFullScan [GOOD] >> KqpPg::LongDomainName >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::InvalidateOlapOnCommit >> JsonProtoConversion::JsonToProtoMap [GOOD] |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] Test command err: 2025-03-28T09:03:17.525660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783397640511690:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:17.525734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cf5/r3tmp/tmpNX27XA/pdisk_1.dat 2025-03-28T09:03:17.859968Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18326, node 1 2025-03-28T09:03:17.921330Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:17.922544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:17.938402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:17.947287Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:17.947309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:17.947322Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:17.947432Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7036 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:18.272126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:18.286811Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:03:20.503009Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:03:20.506092Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzE2OTVmNjAtNWM4ZWY0ZDItZjdiMzVjNTgtZjczNzJkODg=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id MzE2OTVmNjAtNWM4ZWY0ZDItZjdiMzVjNTgtZjczNzJkODg= 2025-03-28T09:03:20.506493Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783410525414004:2328], Start check tables existence, number paths: 2 2025-03-28T09:03:20.506554Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=MzE2OTVmNjAtNWM4ZWY0ZDItZjdiMzVjNTgtZjczNzJkODg=, ActorId: [1:7486783410525414012:2329], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:20.508063Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 1 2025-03-28T09:03:20.508094Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:03:20.508247Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:03:20.519633Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783410525414004:2328], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:03:20.519697Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783410525414004:2328], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:03:20.519731Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486783410525414004:2328], Successfully finished 2025-03-28T09:03:20.519800Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:03:20.532734Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410525414029:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:20.537621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:20.541105Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410525414029:2301], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2025-03-28T09:03:20.545360Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410525414029:2301], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2025-03-28T09:03:20.553192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410525414029:2301], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:20.624133Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410525414029:2301], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2025-03-28T09:03:20.629000Z node 1 :TX_PROXY ERROR: Actor# [1:7486783410525414083:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:20.629176Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783410525414029:2301], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2025-03-28T09:03:20.633253Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODU2N2UxOTctZjZhNTM3ZjctNjA0YWY1ZWEtMWYwMGFjYmM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ODU2N2UxOTctZjZhNTM3ZjctNjA0YWY1ZWEtMWYwMGFjYmM= 2025-03-28T09:03:20.633574Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODU2N2UxOTctZjZhNTM3ZjctNjA0YWY1ZWEtMWYwMGFjYmM=, ActorId: [1:7486783410525414091:2331], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:03:20.634279Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-28T09:03:20.634296Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new database state for id /Root 2025-03-28T09:03:20.634384Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783410525414093:2332], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:20.634491Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ODU2N2UxOTctZjZhNTM3ZjctNjA0YWY1ZWEtMWYwMGFjYmM=, ActorId: [1:7486783410525414091:2331], ActorState: ReadyState, TraceId: 01jqdzvnhse30k7qnvgvrff4vj, received request, proxyRequestId: 3 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_GENERIC_QUERY text: SELECT 42; rpcActor: [1:7486783410525414090:2341] database: Root databaseId: /Root pool id: sample_pool_id 2025-03-28T09:03:20.634537Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved new request from [1:7486783410525414091:2331], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODU2N2UxOTctZjZhNTM3ZjctNjA0YWY1ZWEtMWYwMGFjYmM= 2025-03-28T09:03:20.634608Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783410525414095:2333], Database: /Root, Start database fetching 2025-03-28T09:03:20.635410Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7486783410525414095:2333], Database: /Root, Database info successfully fetched, serverless: 0 2025-03-28T09:03:20.635495Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2025-03-28T09:03:20.635600Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486783410525414103:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODU2N2UxOTctZjZhNTM3ZjctNjA0YWY1ZWEtMWYwMGFjYmM=, Start pool fetching 2025-03-28T09:03:20.635723Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783410525414093:2332], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:20.635766Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783410525414105:2335], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:03:20.635827Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2025-03-28T09:03:20.635840Z node 1 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2025-03-28T09:03:20.636233Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783410525414106:2336], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 5] 2025-03-28T09:03:20.636300Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783410525414105:2335], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2025-03-28T09:03:20.636345Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486783410525414103:2334], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODU2N2UxOTctZjZhNTM3ZjctNjA0YWY1ZWEtMWYwMGFjYmM=, Pool info successfully resolved 2025-03-28T09:03:20.636391Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODU2N2UxOTctZjZhNTM3ZjctNjA0YWY1ZWEtMWYwMGFjYmM= 2025-03-28T09:03:20.636450Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7486783410525414106:2336], DatabaseId: /Root, PoolId: sample_pool_id, Received new request, worker id: [1:7486783410525414091:2331], session id: ydb://session/3?node_id=1&id=ODU2N2UxOTctZjZhNTM3ZjctNjA0YWY1ZWEtMWYwMGFjYmM= 2025-03-28T09:03:20.636501Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODU2N2UxOTctZjZhNTM3ZjctNjA0YWY1ZWEtMWYwMGFjYmM= 2025-03-28T09:03:20.636560Z node 1 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] ... ctorState: ExecuteState, TraceId: 01jqdzyzx8f5nrjgker4z68gv2, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:05:09.633146Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YWVkMGIxMDgtMTRjZTg3MjgtZDM1MjllZDktYTIyOGRjZGE=, ActorId: [6:7486783880202898591:2444], ActorState: ExecuteState, TraceId: 01jqdzyzx8f5nrjgker4z68gv2, EndCleanup, isFinal: 1 2025-03-28T09:05:09.633214Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YWVkMGIxMDgtMTRjZTg3MjgtZDM1MjllZDktYTIyOGRjZGE=, ActorId: [6:7486783880202898591:2444], ActorState: ExecuteState, TraceId: 01jqdzyzx8f5nrjgker4z68gv2, Sent query response back to proxy, proxyRequestId: 16, proxyId: [6:7486783841548191913:2269] 2025-03-28T09:05:09.633263Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YWVkMGIxMDgtMTRjZTg3MjgtZDM1MjllZDktYTIyOGRjZGE=, ActorId: [6:7486783880202898591:2444], ActorState: unknown state, TraceId: 01jqdzyzx8f5nrjgker4z68gv2, Cleanup temp tables: 0 2025-03-28T09:05:09.633511Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=YWVkMGIxMDgtMTRjZTg3MjgtZDM1MjllZDktYTIyOGRjZGE=, ActorId: [6:7486783880202898591:2444], ActorState: unknown state, TraceId: 01jqdzyzx8f5nrjgker4z68gv2, Session actor destroyed 2025-03-28T09:05:09.644889Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZmRiMjI5NjAtNzEzNTIwZTQtOWYwYTlmMGQtZWJiZjBmZA==, ActorId: [6:7486783863023028823:2331], ActorState: ReadyState, TraceId: 01jqdzz00cad1exzc35w3a2cfy, received request, proxyRequestId: 18 prepared: 0 tx_control: 0 action: QUERY_ACTION_EXECUTE type: QUERY_TYPE_SQL_DDL text: DROP RESOURCE POOL sample_pool_id; DROP RESOURCE POOL default; rpcActor: [0:0:0] database: /Root databaseId: /Root pool id: default 2025-03-28T09:05:09.673651Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7486783863023028907:2337], DatabaseId: /Root, PoolId: sample_pool_id, Got delete notification 2025-03-28T09:05:09.673770Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-28T09:05:09.673846Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783880202898666:2461], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2025-03-28T09:05:09.674377Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783880202898666:2461], DatabaseId: /Root, PoolId: sample_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-03-28T09:05:09.674524Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool sample_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool sample_pool_id not found or you don't have access permissions } 2025-03-28T09:05:09.678825Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7486783863023029108:2360], DatabaseId: /Root, PoolId: default, Got delete notification 2025-03-28T09:05:09.678923Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Recieved subscription request, DatabaseId: /Root, PoolId: default 2025-03-28T09:05:09.678977Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783880202898682:2462], DatabaseId: /Root, PoolId: default, Start pool fetching 2025-03-28T09:05:09.679524Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486783880202898682:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.679627Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.687828Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZmRiMjI5NjAtNzEzNTIwZTQtOWYwYTlmMGQtZWJiZjBmZA==, ActorId: [6:7486783863023028823:2331], ActorState: ExecuteState, TraceId: 01jqdzz00cad1exzc35w3a2cfy, Cleanup start, isFinal: 0 CleanupCtx: 1 TransactionsToBeAborted.size(): 0 WorkerId: [6:7486783880202898652:2331] WorkloadServiceCleanup: 0 2025-03-28T09:05:09.689922Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZmRiMjI5NjAtNzEzNTIwZTQtOWYwYTlmMGQtZWJiZjBmZA==, ActorId: [6:7486783863023028823:2331], ActorState: CleanupState, TraceId: 01jqdzz00cad1exzc35w3a2cfy, EndCleanup, isFinal: 0 2025-03-28T09:05:09.690005Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZmRiMjI5NjAtNzEzNTIwZTQtOWYwYTlmMGQtZWJiZjBmZA==, ActorId: [6:7486783863023028823:2331], ActorState: CleanupState, TraceId: 01jqdzz00cad1exzc35w3a2cfy, Sent query response back to proxy, proxyRequestId: 18, proxyId: [6:7486783841548191913:2269] Wait pool handlers 0.000031s: number handlers = 2 Wait pool handlers 1.000144s: number handlers = 2 Wait pool handlers 2.000296s: number handlers = 2 Wait pool handlers 3.000409s: number handlers = 2 Wait pool handlers 4.000531s: number handlers = 2 Wait pool handlers 5.000654s: number handlers = 2 Wait pool handlers 6.002491s: number handlers = 2 2025-03-28T09:05:15.725817Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:05:15.725850Z node 6 :IMPORT WARN: Table profiles were not loaded Wait pool handlers 7.002595s: number handlers = 2 Wait pool handlers 8.002699s: number handlers = 2 Wait pool handlers 9.004321s: number handlers = 2 Wait pool handlers 10.005637s: number handlers = 2 Wait pool handlers 11.006545s: number handlers = 2 Wait pool handlers 12.008222s: number handlers = 2 2025-03-28T09:05:22.671394Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7486783863023028907:2337], DatabaseId: /Root, PoolId: sample_pool_id, Try to start scheduled refresh Wait pool handlers 13.008338s: number handlers = 2 Wait pool handlers 14.009689s: number handlers = 2 Wait pool handlers 15.009798s: number handlers = 2 Wait pool handlers 16.012224s: number handlers = 2 Wait pool handlers 17.016224s: number handlers = 2 Wait pool handlers 18.020227s: number handlers = 2 Wait pool handlers 19.020345s: number handlers = 2 Wait pool handlers 20.024215s: number handlers = 2 Wait pool handlers 21.024396s: number handlers = 2 Wait pool handlers 22.025679s: number handlers = 2 Wait pool handlers 23.028232s: number handlers = 2 Wait pool handlers 24.028558s: number handlers = 2 Wait pool handlers 25.029258s: number handlers = 2 Wait pool handlers 26.029443s: number handlers = 2 Wait pool handlers 27.032234s: number handlers = 2 Wait pool handlers 28.034767s: number handlers = 2 Wait pool handlers 29.034924s: number handlers = 2 Wait pool handlers 30.036259s: number handlers = 2 Wait pool handlers 31.036433s: number handlers = 2 Wait pool handlers 32.039675s: number handlers = 2 Wait pool handlers 33.041507s: number handlers = 2 Wait pool handlers 34.044315s: number handlers = 2 Wait pool handlers 35.047891s: number handlers = 2 Wait pool handlers 36.052236s: number handlers = 2 Wait pool handlers 37.060621s: number handlers = 2 Wait pool handlers 38.061669s: number handlers = 2 Wait pool handlers 39.061785s: number handlers = 2 Wait pool handlers 40.065300s: number handlers = 2 Wait pool handlers 41.068202s: number handlers = 2 Wait pool handlers 42.068336s: number handlers = 2 Wait pool handlers 43.071440s: number handlers = 2 Wait pool handlers 44.072350s: number handlers = 2 Wait pool handlers 45.075512s: number handlers = 2 Wait pool handlers 46.080227s: number handlers = 2 Wait pool handlers 47.082628s: number handlers = 2 Wait pool handlers 48.083502s: number handlers = 2 Wait pool handlers 49.084222s: number handlers = 2 Wait pool handlers 50.086245s: number handlers = 2 Wait pool handlers 51.087425s: number handlers = 2 Wait pool handlers 52.088240s: number handlers = 2 Wait pool handlers 53.092247s: number handlers = 2 Wait pool handlers 54.093371s: number handlers = 2 Wait pool handlers 55.096235s: number handlers = 2 Wait pool handlers 56.100242s: number handlers = 2 Wait pool handlers 57.103299s: number handlers = 2 Wait pool handlers 58.103438s: number handlers = 2 Wait pool handlers 59.105396s: number handlers = 2 Wait pool handlers 60.106481s: number handlers = 2 Wait pool handlers 61.106608s: number handlers = 2 Wait pool handlers 62.106763s: number handlers = 2 Wait pool handlers 63.106916s: number handlers = 2 Wait pool handlers 64.107056s: number handlers = 2 Wait pool handlers 65.107197s: number handlers = 2 Wait pool handlers 66.108126s: number handlers = 2 Wait pool handlers 67.112239s: number handlers = 2 Wait pool handlers 68.116234s: number handlers = 2 Wait pool handlers 69.120243s: number handlers = 2 Wait pool handlers 70.122250s: number handlers = 2 Wait pool handlers 71.122632s: number handlers = 2 Wait pool handlers 72.122952s: number handlers = 2 Wait pool handlers 73.126117s: number handlers = 2 Wait pool handlers 74.132244s: number handlers = 2 Wait pool handlers 75.132846s: number handlers = 2 Wait pool handlers 76.133530s: number handlers = 2 Wait pool handlers 77.135134s: number handlers = 2 Wait pool handlers 78.135292s: number handlers = 2 Wait pool handlers 79.136281s: number handlers = 2 Wait pool handlers 80.136732s: number handlers = 2 Wait pool handlers 81.139631s: number handlers = 2 Wait pool handlers 82.140049s: number handlers = 2 Wait pool handlers 83.140255s: number handlers = 2 Wait pool handlers 84.140369s: number handlers = 2 Wait pool handlers 85.140474s: number handlers = 2 2025-03-28T09:06:35.294261Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7486783863023029108:2360], DatabaseId: /Root, PoolId: default, Got stop pool handler request, waiting for 0 requests 2025-03-28T09:06:35.294447Z node 6 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7486783863023028907:2337], DatabaseId: /Root, PoolId: sample_pool_id, Got stop pool handler request, waiting for 0 requests 2025-03-28T09:06:35.294577Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: default 2025-03-28T09:06:35.294604Z node 6 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Got stop pool handler response, DatabaseId: /Root, PoolId: sample_pool_id 2025-03-28T09:06:35.931079Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZmRiMjI5NjAtNzEzNTIwZTQtOWYwYTlmMGQtZWJiZjBmZA==, ActorId: [6:7486783863023028823:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:06:35.931151Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=ZmRiMjI5NjAtNzEzNTIwZTQtOWYwYTlmMGQtZWJiZjBmZA==, ActorId: [6:7486783863023028823:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:06:35.931187Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZmRiMjI5NjAtNzEzNTIwZTQtOWYwYTlmMGQtZWJiZjBmZA==, ActorId: [6:7486783863023028823:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:06:35.931219Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZmRiMjI5NjAtNzEzNTIwZTQtOWYwYTlmMGQtZWJiZjBmZA==, ActorId: [6:7486783863023028823:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:06:35.954855Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=ZmRiMjI5NjAtNzEzNTIwZTQtOWYwYTlmMGQtZWJiZjBmZA==, ActorId: [6:7486783863023028823:2331], ActorState: unknown state, Session actor destroyed >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 >> SplitterBasic::LimitExceed [GOOD] |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] |78.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |78.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] |78.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |78.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 >> KqpSinkTx::OlapInvalidateOnError [FAIL] >> KqpSinkTx::OlapInteractive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 13220, MsgBus: 30587 2025-03-28T09:06:14.606513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784158048401829:2154];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:14.614218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016bc/r3tmp/tmpqlY0kk/pdisk_1.dat 2025-03-28T09:06:15.051409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:15.067568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:15.067655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:15.070503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13220, node 1 2025-03-28T09:06:15.141674Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:15.141699Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:15.141708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:15.141831Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30587 TClient is connected to server localhost:30587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:15.685222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:15.698759Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:17.609277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784170933304266:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:17.609424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784170933304250:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:17.610118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:17.614079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:17.623382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784170933304288:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:06:17.686590Z node 1 :TX_PROXY ERROR: Actor# [1:7486784170933304339:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:17.971739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:06:18.091538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.181011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.926998Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784158048401829:2154];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:19.963769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 32007, MsgBus: 26548 2025-03-28T09:06:27.139518Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784210025884999:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016bc/r3tmp/tmpWYo5eS/pdisk_1.dat 2025-03-28T09:06:27.184429Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:06:27.266714Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:27.283006Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:27.283102Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:27.284934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32007, node 2 2025-03-28T09:06:27.363157Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:27.363180Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:27.363190Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:27.363315Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26548 TClient is connected to server localhost:26548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:06:28.053043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:06:28.072388Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:06:30.726740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784227205754703:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:30.726815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784227205754695:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:30.727274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:30.731301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:30.747575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784227205754709:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:06:30.835558Z node 2 :TX_PROXY ERROR: Actor# [2:7486784227205754760:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:30.898884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:06:30.988522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:32.022939Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784210025884999:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:32.023833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:32.221094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |78.4%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |78.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |78.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |78.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [GOOD] >> EntityId::Order >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 >> Cache::Test1 [GOOD] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] >> Cache::Test2 [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] |78.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |78.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |78.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |78.4%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MinId [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test2 [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |78.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |78.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |78.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [GOOD] >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |78.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |78.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |78.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> TSequence::CreateSequence >> TSequence::CreateSequenceParallel >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 13086, MsgBus: 26126 2025-03-28T09:06:15.314218Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784163286436438:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:15.329384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016b0/r3tmp/tmpX532z1/pdisk_1.dat 2025-03-28T09:06:15.691192Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13086, node 1 2025-03-28T09:06:15.743256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:15.743374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:15.744631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:15.788855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:15.788871Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:15.788876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:15.788977Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26126 TClient is connected to server localhost:26126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:16.442863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:16.462609Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:16.482067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:16.597061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:16.770638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:16.852920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:18.428824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784176171339941:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:18.428977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:18.871702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:18.927766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:18.999520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.075394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.155220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.200830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:19.253763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784180466307755:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:19.253871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784180466307760:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:19.253915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:19.258112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:19.267811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784180466307762:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:19.364929Z node 1 :TX_PROXY ERROR: Actor# [1:7486784180466307817:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:20.313356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784163286436438:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:20.313423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:21.130606Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2I3MjIwOTktYzNlNmZmOC04NTEwMDUzOS03OGNhN2FmYw==, ActorId: [1:7486784184761275375:2490], ActorState: ExecuteState, TraceId: 01jqe015ss7n59t1zz7y5rsf8w, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 26529, MsgBus: 28003 2025-03-28T09:06:21.930782Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784188155673563:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:21.931168Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016b0/r3tmp/tmpuMT1wL/pdisk_1.dat 2025-03-28T09:06:22.112426Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:22.144727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:22.144830Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:22.146470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26529, node 2 2025-03-28T09:06:22.272195Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:22.272218Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:22.272226Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:22.272344Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28003 TClient is connected to server localhost:28003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:22.874706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:25.574816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784205335543371:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:25.574965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: ... d=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-28T09:06:28.762218Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-28T09:06:28.762927Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[2:7486784209630511494:2364];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037899,72075186224037903;receive=72075186224037907; 2025-03-28T09:06:28.762995Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[2:7486784209630511494:2364];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037899,72075186224037903;receive=72075186224037907; 2025-03-28T09:06:28.763117Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[2:7486784209630511494:2364];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037899; 2025-03-28T09:06:28.763172Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;self_id=[2:7486784209630511494:2364];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037947;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903;receive=72075186224037899; 2025-03-28T09:06:28.763637Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-28T09:06:28.764352Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-28T09:06:29.295582Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=281474976715670;tx_id=281474976715670;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715670; 2025-03-28T09:06:29.295863Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWM2ZjEyMjgtNGM2YzRhMDMtNGYyNjE5YTQtYmY0OWE2ZGQ=, ActorId: [2:7486784218220448338:2818], ActorState: ExecuteState, TraceId: 01jqe01dq0f40geej2xhcnnhs7, Create QueryResponse for error on request, msg: 2025-03-28T09:06:29.297140Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976715670; 2025-03-28T09:06:29.299752Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;self_id=[2:7486784209630511554:2376];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715665;problem=finished; 2025-03-28T09:06:29.299806Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;self_id=[2:7486784209630511554:2376];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037936;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715665;problem=finished; Trying to start YDB, gRPC: 10357, MsgBus: 6143 2025-03-28T09:06:33.971100Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:33.971335Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:33.971441Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016b0/r3tmp/tmpBL1QRb/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10357, node 3 2025-03-28T09:06:34.446969Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:34.447984Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:34.448058Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:34.448102Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:34.448551Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:06:34.488239Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:34.488411Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:34.500128Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6143 TClient is connected to server localhost:6143 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:34.824886Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:34.922484Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:35.254241Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:35.703414Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:36.064541Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:36.687353Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1809:3403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:36.687659Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:36.707624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:36.925019Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.166131Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.443698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.707812Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:38.052563Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:38.385833Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2399:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:38.386013Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:38.386495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2404:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:38.403679Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:38.574948Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2406:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:06:38.621852Z node 3 :TX_PROXY ERROR: Actor# [3:2469:3907] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:39.924924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:06:40.253402Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:06:40.650274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> TStorageTenantTest::Empty [GOOD] >> KqpAgg::AggWithLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 24306, MsgBus: 12560 2025-03-28T09:01:56.668680Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783050650333824:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:56.841674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018bd/r3tmp/tmpvl09i1/pdisk_1.dat 2025-03-28T09:02:03.032171Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783050650333824:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:03.034025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:05.040978Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783067830203130:2290];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:05.041359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:06.114854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:06.114879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:06.189599Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:09.163715Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:09.178539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:09.178954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:09.393174Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:09.448214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:09.449341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:09.561838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:10.318601Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.125774s 2025-03-28T09:02:10.324200Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.130399s TServer::EnableGrpc on GrpcPort 24306, node 1 2025-03-28T09:02:15.282013Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:15.282328Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:15.294974Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:15.322888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12560 TClient is connected to server localhost:12560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:16.749843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.773293Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:16.787424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.934375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.134881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.250603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:18.995514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783145139616000:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:18.995630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.293424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.371658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.436790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.496228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.547936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.600607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.711400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783149434583817:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.711495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.714313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783149434583822:2470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.718586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:19.735474Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:02:19.735745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783149434583824:2471], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:19.822552Z node 1 :TX_PROXY ERROR: Actor# [1:7486783149434583878:3475] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:21.765976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:02:23.039162Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqdzsx9qfh8j0wtwagmgvgtd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI3Mjk1YWQtNGRmMGQ5OTgtYzc1MjRiNGUtZjNhYzE0Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:23.050785Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqdzsx9vaz6ggrne2hbp1k6e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGE4YTk1Y2ItNjZjODA4YS1mNWU2OGQ5Yy1mNTEzY2I1Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:23.126158Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqdzsx9qfh8j0wtwagmgvgtd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjI3Mjk1YWQtNGRmMGQ5OTgtYzc1MjRiNGUtZjNhYzE0Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:23.148063Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqdzsxa4f5z2vxrvm8xbjw9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGI0ZGZiYTQtMmQ5ZjUxOWEtMTJhOTljZDAtMTZlNDQ2ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:23.153496Z node 1 :KQP_EXECUTER ERROR ... sion/3?node_id=2&id=NTZiZmFhNWItODAzMmNlOTQtN2M0NzgwMDQtYTZkZDc0NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.838098Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731675. Ctx: { TraceId: 01jqe01n4p5tsbmq367q9e87s4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTZiZmFhNWItODAzMmNlOTQtN2M0NzgwMDQtYTZkZDc0NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.843336Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731677. Ctx: { TraceId: 01jqe01n3h5mrmyjfq5emzxdke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzk2MDkzMWItM2M2YWQ2NzgtMzEwODRkODAtMjEwMzEwMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.843763Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731678. Ctx: { TraceId: 01jqe01n54e8r2pgrkx22x12dz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTI2YmRiNy1kM2RiOTIwMS1jODFiN2VjMC0zNDVjODM3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.844920Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731676. Ctx: { TraceId: 01jqe01n506bmvsbeqmdvgh5bx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWY3MDYxNGUtZGFiNDE5OGQtZmRhOTJhMTYtNTI0YTQyNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.852089Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731679. Ctx: { TraceId: 01jqe01n54e8r2pgrkx22x12dz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTI2YmRiNy1kM2RiOTIwMS1jODFiN2VjMC0zNDVjODM3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.854517Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731680. Ctx: { TraceId: 01jqe01n506bmvsbeqmdvgh5bx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWY3MDYxNGUtZGFiNDE5OGQtZmRhOTJhMTYtNTI0YTQyNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.855301Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731681. Ctx: { TraceId: 01jqe01n3h5mrmyjfq5emzxdke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzk2MDkzMWItM2M2YWQ2NzgtMzEwODRkODAtMjEwMzEwMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.859958Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731682. Ctx: { TraceId: 01jqe01n5e97snpjz65wvre2m3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2M3OWUwMDktNWRhYzE0MzAtZTQ1ZmUzM2YtMjQyM2IzZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.863401Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731683. Ctx: { TraceId: 01jqe01n506bmvsbeqmdvgh5bx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWY3MDYxNGUtZGFiNDE5OGQtZmRhOTJhMTYtNTI0YTQyNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.864665Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731684. Ctx: { TraceId: 01jqe01n3h5mrmyjfq5emzxdke, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzk2MDkzMWItM2M2YWQ2NzgtMzEwODRkODAtMjEwMzEwMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.870499Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731685. Ctx: { TraceId: 01jqe01n5n6z40eb7g8dfy75p0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGIzMjhjYmUtN2I0NGZiOWYtZTA2Y2YwNzYtNzYyODMwNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.872661Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731686. Ctx: { TraceId: 01jqe01n5e97snpjz65wvre2m3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2M3OWUwMDktNWRhYzE0MzAtZTQ1ZmUzM2YtMjQyM2IzZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.873370Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731687. Ctx: { TraceId: 01jqe01n54e8r2pgrkx22x12dz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTI2YmRiNy1kM2RiOTIwMS1jODFiN2VjMC0zNDVjODM3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.875074Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731688. Ctx: { TraceId: 01jqe01n506bmvsbeqmdvgh5bx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWY3MDYxNGUtZGFiNDE5OGQtZmRhOTJhMTYtNTI0YTQyNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.883847Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731689. Ctx: { TraceId: 01jqe01n54e8r2pgrkx22x12dz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTI2YmRiNy1kM2RiOTIwMS1jODFiN2VjMC0zNDVjODM3MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.884705Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731690. Ctx: { TraceId: 01jqe01n5e97snpjz65wvre2m3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2M3OWUwMDktNWRhYzE0MzAtZTQ1ZmUzM2YtMjQyM2IzZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.887063Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731691. Ctx: { TraceId: 01jqe01n506bmvsbeqmdvgh5bx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWY3MDYxNGUtZGFiNDE5OGQtZmRhOTJhMTYtNTI0YTQyNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.889714Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731692. Ctx: { TraceId: 01jqe01n5n6z40eb7g8dfy75p0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGIzMjhjYmUtN2I0NGZiOWYtZTA2Y2YwNzYtNzYyODMwNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.893199Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731693. Ctx: { TraceId: 01jqe01n5e97snpjz65wvre2m3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2M3OWUwMDktNWRhYzE0MzAtZTQ1ZmUzM2YtMjQyM2IzZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.897848Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731694. Ctx: { TraceId: 01jqe01n506bmvsbeqmdvgh5bx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWY3MDYxNGUtZGFiNDE5OGQtZmRhOTJhMTYtNTI0YTQyNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.901129Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731695. Ctx: { TraceId: 01jqe01n5e97snpjz65wvre2m3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2M3OWUwMDktNWRhYzE0MzAtZTQ1ZmUzM2YtMjQyM2IzZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-28T09:06:36.903070Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731696. Ctx: { TraceId: 01jqe01n5n6z40eb7g8dfy75p0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGIzMjhjYmUtN2I0NGZiOWYtZTA2Y2YwNzYtNzYyODMwNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.906741Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731697. Ctx: { TraceId: 01jqe01n5e97snpjz65wvre2m3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2M3OWUwMDktNWRhYzE0MzAtZTQ1ZmUzM2YtMjQyM2IzZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-28T09:06:36.911101Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731698. Ctx: { TraceId: 01jqe01n5n6z40eb7g8dfy75p0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGIzMjhjYmUtN2I0NGZiOWYtZTA2Y2YwNzYtNzYyODMwNmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.911174Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731699. Ctx: { TraceId: 01jqe01n75ave2kq0q4nhq1jpr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTZiZmFhNWItODAzMmNlOTQtN2M0NzgwMDQtYTZkZDc0NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.917587Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731700. Ctx: { TraceId: 01jqe01n75ave2kq0q4nhq1jpr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTZiZmFhNWItODAzMmNlOTQtN2M0NzgwMDQtYTZkZDc0NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-28T09:06:36.920073Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731701. Ctx: { TraceId: 01jqe01n7gc46xex8f7j4dwn3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzk2MDkzMWItM2M2YWQ2NzgtMzEwODRkODAtMjEwMzEwMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.923284Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731702. Ctx: { TraceId: 01jqe01n75ave2kq0q4nhq1jpr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTZiZmFhNWItODAzMmNlOTQtN2M0NzgwMDQtYTZkZDc0NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.927486Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731703. Ctx: { TraceId: 01jqe01n75ave2kq0q4nhq1jpr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTZiZmFhNWItODAzMmNlOTQtN2M0NzgwMDQtYTZkZDc0NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.927851Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731704. Ctx: { TraceId: 01jqe01n7gc46xex8f7j4dwn3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzk2MDkzMWItM2M2YWQ2NzgtMzEwODRkODAtMjEwMzEwMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.932820Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731705. Ctx: { TraceId: 01jqe01n75ave2kq0q4nhq1jpr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTZiZmFhNWItODAzMmNlOTQtN2M0NzgwMDQtYTZkZDc0NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.934225Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731706. Ctx: { TraceId: 01jqe01n7gc46xex8f7j4dwn3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzk2MDkzMWItM2M2YWQ2NzgtMzEwODRkODAtMjEwMzEwMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.938248Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731708. Ctx: { TraceId: 01jqe01n7gc46xex8f7j4dwn3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzk2MDkzMWItM2M2YWQ2NzgtMzEwODRkODAtMjEwMzEwMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:06:36.938458Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731707. Ctx: { TraceId: 01jqe01n75ave2kq0q4nhq1jpr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTZiZmFhNWItODAzMmNlOTQtN2M0NzgwMDQtYTZkZDc0NDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS 2025-03-28T09:06:36.942846Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976731709. Ctx: { TraceId: 01jqe01n7gc46xex8f7j4dwn3y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Yzk2MDkzMWItM2M2YWQ2NzgtMzEwODRkODAtMjEwMzEwMzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] >> KqpSinkMvcc::OltpNamedStatement [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> KqpTx::RollbackTx2 [GOOD] |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |78.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> KqpNewEngine::BlindWrite >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapSnapshotRO >> KqpNewEngine::ContainerRegistryCombiner |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> DataStreams::TestReservedResourcesMetering >> DataStreams::TestUpdateStorage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 24925, MsgBus: 10626 2025-03-28T09:06:18.511528Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784173704706635:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:18.511626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001695/r3tmp/tmpG0Y4VB/pdisk_1.dat 2025-03-28T09:06:18.961245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:18.962637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:18.962798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:18.968000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24925, node 1 2025-03-28T09:06:19.073007Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:19.073030Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:19.073036Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:19.073159Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10626 TClient is connected to server localhost:10626 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:19.664946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:19.682828Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:21.625101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784186589609186:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:21.625186Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784186589609194:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:21.625231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:21.634770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:21.645074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784186589609200:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:06:21.732525Z node 1 :TX_PROXY ERROR: Actor# [1:7486784186589609251:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:22.001661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:06:22.148390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:23.327061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:23.853286Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784173704706635:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:24.190985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 4020, MsgBus: 31591 2025-03-28T09:06:32.355008Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784235344235805:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:32.355062Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001695/r3tmp/tmpbworHQ/pdisk_1.dat 2025-03-28T09:06:32.569717Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:32.588551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:32.588637Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:32.591766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4020, node 2 2025-03-28T09:06:32.738807Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:32.738837Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:32.738854Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:32.739057Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31591 TClient is connected to server localhost:31591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:06:33.390047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:06:33.405816Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:06:36.038966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784252524105625:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:36.039124Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:36.044933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784252524105651:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:36.049286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:36.075853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784252524105662:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:06:36.169090Z node 2 :TX_PROXY ERROR: Actor# [2:7486784252524105713:2336] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:36.221516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:06:36.283232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.327121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.355465Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784235344235805:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:37.355555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackTx2 [GOOD] Test command err: Trying to start YDB, gRPC: 21705, MsgBus: 25297 2025-03-28T09:06:34.140023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784244181724221:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:34.140065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00165c/r3tmp/tmpIvFhcc/pdisk_1.dat 2025-03-28T09:06:34.581382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:34.610556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:34.610678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:34.612614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21705, node 1 2025-03-28T09:06:34.697445Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:34.697470Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:34.697482Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:34.697639Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25297 TClient is connected to server localhost:25297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:35.246030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:35.268968Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:35.275798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:35.417063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:35.590623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:06:35.651661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.259438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784257066627902:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:37.259561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:37.545012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.612410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.650301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.677776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.706789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.739227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:37.783042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784257066628414:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:37.783120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:37.783460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784257066628419:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:37.787386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:37.796329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784257066628421:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:37.864128Z node 1 :TX_PROXY ERROR: Actor# [1:7486784257066628474:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:39.140102Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784244181724221:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:39.140150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:39.233647Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGY1MTgyMGItY2MxODVjZTEtNDVhNTMyYzctNjMxODhkYjk=, ActorId: [1:7486784261361596029:2488], ActorState: ReadyState, TraceId: 01jqe01qfh82k07jwhjdmf1y5f, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 11891, MsgBus: 7781 2025-03-28T09:06:40.227320Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784269456334673:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:40.227359Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00165c/r3tmp/tmpbj77i3/pdisk_1.dat 2025-03-28T09:06:40.428000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:40.428088Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:40.430195Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:40.446557Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11891, node 2 2025-03-28T09:06:40.531241Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:40.531269Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:40.531279Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:40.531407Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7781 TClient is connected to server localhost:7781 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:40.977228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:40.988184Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:41.001088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:41.066354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:41.212579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:41.282533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:43.754873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784282341238326:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:43.754986Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:43.807947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:43.856728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:43.932882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:43.970337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:44.018134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:44.097547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:44.192077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784286636206144:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:44.192147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:44.192203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784286636206149:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:44.196477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:44.212756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784286636206151:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:44.303563Z node 2 :TX_PROXY ERROR: Actor# [2:7486784286636206208:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:45.231942Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784269456334673:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:45.234131Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:45.658938Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTljNTljMDgtZDA0MGE0MTAtZGQzZDBlZjItYWYzNTMwNmI=, ActorId: [2:7486784290931173757:2488], ActorState: ReadyState, TraceId: 01jqe01xrt1a013n8ecvxx1ard, Create QueryResponse for error on request, msg: >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 >> DataStreams::TestControlPlaneAndMeteringData >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 >> KqpPg::LongDomainName [GOOD] |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |78.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential >> DataStreams::TestPutRecordsOfAnauthorizedUser >> DataShardReadIterator::ShouldReadRangeCellVec |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |78.6%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut >> DataShardReadIterator::ShouldReadKeyCellVec |78.6%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::WriteRead >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> Viewer::ServerlessWithExclusiveNodes [GOOD] >> Viewer::SharedDoesntShowExclusiveNodes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 12222, MsgBus: 61777 2025-03-28T09:03:29.762695Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783449287478459:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:29.762772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012e5/r3tmp/tmpa6alzh/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12222, node 1 2025-03-28T09:03:30.153462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:30.153566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:30.167450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:30.185087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:30.265750Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:30.265775Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:30.265797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:30.266004Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61777 TClient is connected to server localhost:61777 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:31.041731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:31.060989Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:03:33.102788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783466467348306:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:33.102881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783466467348318:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:33.102889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:33.107228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:33.120521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783466467348320:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:03:33.221258Z node 1 :TX_PROXY ERROR: Actor# [1:7486783466467348371:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 62546, MsgBus: 14930 2025-03-28T09:03:34.183833Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486783472418344414:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:34.183865Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012e5/r3tmp/tmpGtfgju/pdisk_1.dat 2025-03-28T09:03:34.457488Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:34.465926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:34.465995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:34.468008Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62546, node 2 2025-03-28T09:03:34.630874Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:34.630897Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:34.630905Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:34.631015Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14930 TClient is connected to server localhost:14930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:35.158111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:35.167728Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:03:37.829567Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783485303246963:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:37.829637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486783485303246974:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:37.829687Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:37.834047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:03:37.845999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486783485303246977:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:03:37.913000Z node 2 :TX_PROXY ERROR: Actor# [2:7486783485303247028:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18256, MsgBus: 27706 2025-03-28T09:03:38.831730Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486783486450515985:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:38.831803Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012e5/r3tmp/tmpEDvrxl/pdisk_1.dat 2025-03-28T09:03:38.977066Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:38.993165Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:38.993259Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:38.994487Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18256, node 3 2025-03-28T09:03:39.065530Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:39.065558Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:39.065565Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:39.065677Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27706 TClient is connected to server localhost:27706 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ... CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:06:32.432275Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:06:36.157162Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486784232923586608:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:36.157253Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:36.810398Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486784254398423729:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:36.810530Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486784254398423722:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:36.810916Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:36.814316Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:36.828466Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486784254398423751:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:06:36.919287Z node 10 :TX_PROXY ERROR: Actor# [10:7486784254398423802:2343] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:36.950816Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["aid (null, 3)","aid [7, 7]"],"Name":"TableRangeScan","Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["abalance"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/pgbench_accounts","E-Rows":"No estimate","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 23327, MsgBus: 28767 2025-03-28T09:06:41.175394Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7486784275103644085:2152];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:41.178615Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012e5/r3tmp/tmpJzQ5nn/pdisk_1.dat 2025-03-28T09:06:41.469551Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:41.500236Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:41.500370Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:41.501889Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23327, node 11 2025-03-28T09:06:41.747461Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:41.747506Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:41.747522Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:41.747709Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28767 TClient is connected to server localhost:28767 WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'... TClient::Ls request: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_D... (TRUNCATED) WaitRootIsUp 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' success. 2025-03-28T09:06:42.781772Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:42.799896Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:46.176241Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7486784275103644085:2152];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:46.176364Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:47.339525Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486784300873448440:2334], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:47.339692Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:47.340288Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486784300873448452:2337], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:47.358201Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:47.375583Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7486784300873448454:2338], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:06:47.472253Z node 11 :TX_PROXY ERROR: Actor# [11:7486784300873448505:2346] txid# 281474976710659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:47.506106Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 |78.6%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |78.6%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAttributes |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |78.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> KqpNewEngine::ContainerRegistryCombiner [GOOD] >> KqpNewEngine::DeferredEffects >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> TColumnShardTestReadWrite::WriteReadModifications >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpAgg::AggWithLookup [GOOD] >> KqpAgg::AggWithSelfLookup >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> KqpNewEngine::BlindWrite [GOOD] >> KqpNewEngine::BlindWriteParameters >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] Test command err: Trying to start YDB, gRPC: 15258, MsgBus: 20226 2025-03-28T09:06:26.577114Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784209991650140:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:26.577192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001666/r3tmp/tmpebVjib/pdisk_1.dat 2025-03-28T09:06:27.039067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:27.056071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:27.056172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:27.058073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15258, node 1 2025-03-28T09:06:27.154955Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:27.154983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:27.154989Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:27.155078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20226 TClient is connected to server localhost:20226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:27.866669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:27.896755Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:29.917364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784222876552700:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.917466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784222876552692:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.917592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:29.928520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:29.943838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784222876552706:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:06:30.029605Z node 1 :TX_PROXY ERROR: Actor# [1:7486784227171520054:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:30.371197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:06:30.502830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:31.581429Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784209991650140:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:31.584395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:31.603305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 15499, MsgBus: 29027 2025-03-28T09:06:40.186141Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784269948892006:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:40.187176Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001666/r3tmp/tmp0iTDd6/pdisk_1.dat 2025-03-28T09:06:40.354492Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:40.385642Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:40.385742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:40.387776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15499, node 2 2025-03-28T09:06:40.494092Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:40.494123Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:40.494133Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:40.494264Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29027 TClient is connected to server localhost:29027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:41.004333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:41.016959Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:06:43.928996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784282833794542:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:43.929432Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:43.933743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784282833794569:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:43.944182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:43.956596Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784282833794571:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:06:44.026401Z node 2 :TX_PROXY ERROR: Actor# [2:7486784287128761918:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:44.089448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:06:44.145606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:45.193186Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784269948892006:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:45.194057Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:45.251586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:47.123803Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715666; 2025-03-28T09:06:47.125149Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486784300013672508:2971], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7486784295718704887:2971]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7486784300013672508:2971].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-28T09:06:47.125542Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486784300013672500:2971], SessionActorId: [2:7486784295718704887:2971], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7486784295718704887:2971]. isRollback=0 2025-03-28T09:06:47.125708Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWFmYTk5ZTEtOTBlZDM5ZjktNzNlMThhMDctYTRlMTE5MWM=, ActorId: [2:7486784295718704887:2971], ActorState: ExecuteState, TraceId: 01jqe01z4p9f0f8ecj1thebfgv, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7486784300013672501:2971] from: [2:7486784300013672500:2971] 2025-03-28T09:06:47.125769Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486784300013672501:2971] TxId: 281474976715666. Ctx: { TraceId: 01jqe01z4p9f0f8ecj1thebfgv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWFmYTk5ZTEtOTBlZDM5ZjktNzNlMThhMDctYTRlMTE5MWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-28T09:06:47.126583Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWFmYTk5ZTEtOTBlZDM5ZjktNzNlMThhMDctYTRlMTE5MWM=, ActorId: [2:7486784295718704887:2971], ActorState: ExecuteState, TraceId: 01jqe01z4p9f0f8ecj1thebfgv, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> DataShardReadIterator::ShouldReadRangeCellVec [GOOD] >> DataShardReadIterator::ShouldReadRangeArrow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2025-03-28T09:06:53.505947Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:06:53.627708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:06:53.664233Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:06:53.664519Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:06:53.677478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:06:53.677817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:06:53.678262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:06:53.678570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:06:53.678807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:06:53.678965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:06:53.679079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:06:53.679297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:06:53.679600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:06:53.679884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:06:53.680107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:06:53.680406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:06:53.713251Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:06:53.713583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:06:53.713651Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:06:53.713805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:06:53.713974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:06:53.714050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:06:53.714090Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:06:53.714218Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:06:53.714285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:06:53.714327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:06:53.714366Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:06:53.714523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:06:53.714587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:06:53.714626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:06:53.714656Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:06:53.714834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:06:53.714898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:06:53.714957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:06:53.714996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:06:53.715070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:06:53.715104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:06:53.715146Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:06:53.715215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:06:53.715254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:06:53.715288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:06:53.715669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-03-28T09:06:53.715766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=42; 2025-03-28T09:06:53.715852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-03-28T09:06:53.715933Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-03-28T09:06:53.716086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:06:53.716159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:06:53.716195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:06:53.716404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:06:53.716450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:06:53.716481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:06:53.716636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:06:53.716689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:06:53.716723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:06:53.716896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:06:53.716935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:06:53.716969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:06:53.717121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:06:53.717168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:06:53.717212Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... _ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2025-03-28T09:06:55.340087Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ASSEMBLER::LAST_PK;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2025-03-28T09:06:55.340484Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=SNAPSHOT;details={};;scan_step_idx=4; 2025-03-28T09:06:55.340716Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=5; 2025-03-28T09:06:55.340920Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:06:55.340953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-03-28T09:06:55.340984Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-03-28T09:06:55.341030Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=6;memory=8391908;count=2; 2025-03-28T09:06:55.341346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-03-28T09:06:55.341784Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=0; 2025-03-28T09:06:55.341893Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:06:55.341939Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-28T09:06:55.341971Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:06:55.342164Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:06:55.342202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-28T09:06:55.342246Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-03-28T09:06:55.342286Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=6; 2025-03-28T09:06:55.342332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:06:55.342416Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:06:55.342536Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:06:55.342889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:06:55.343005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:06:55.343130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:06:55.343172Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:430:2448] finished for tablet 9437184 2025-03-28T09:06:55.343637Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:426:2444];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.018},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":1743152815324175,"name":"_full_task","f":1743152815324175,"d_finished":0,"c":0,"l":1743152815343244,"d":19069},"events":[{"name":"bootstrap","f":1743152815324542,"d_finished":5116,"c":1,"l":1743152815329658,"d":5116},{"a":1743152815342864,"name":"ack","f":1743152815342864,"d_finished":0,"c":0,"l":1743152815343244,"d":380},{"a":1743152815342843,"name":"processing","f":1743152815331295,"d_finished":3782,"c":10,"l":1743152815342612,"d":4183},{"name":"ProduceResults","f":1743152815327599,"d_finished":2072,"c":12,"l":1743152815343157,"d":2072},{"a":1743152815343160,"name":"Finish","f":1743152815343160,"d_finished":0,"c":0,"l":1743152815343244,"d":84},{"name":"task_result","f":1743152815331316,"d_finished":3594,"c":10,"l":1743152815342610,"d":3594}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:06:55.343727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:426:2444];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:06:55.344171Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:426:2444];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.018},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":1743152815324175,"name":"_full_task","f":1743152815324175,"d_finished":0,"c":0,"l":1743152815343792,"d":19617},"events":[{"name":"bootstrap","f":1743152815324542,"d_finished":5116,"c":1,"l":1743152815329658,"d":5116},{"a":1743152815342864,"name":"ack","f":1743152815342864,"d_finished":0,"c":0,"l":1743152815343792,"d":928},{"a":1743152815342843,"name":"processing","f":1743152815331295,"d_finished":3782,"c":10,"l":1743152815342612,"d":4731},{"name":"ProduceResults","f":1743152815327599,"d_finished":2072,"c":12,"l":1743152815343157,"d":2072},{"a":1743152815343160,"name":"Finish","f":1743152815343160,"d_finished":0,"c":0,"l":1743152815343792,"d":632},{"name":"task_result","f":1743152815331316,"d_finished":3594,"c":10,"l":1743152815342610,"d":3594}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:06:55.344311Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:06:55.323515Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=1;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=1384;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4196;selected_rows=0; 2025-03-28T09:06:55.344381Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:06:55.344737Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:430:2448];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> DataShardReadIterator::ShouldReadKeyCellVec [GOOD] >> DataShardReadIterator::ShouldReadKeyArrow >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:108:2140] 2025-03-28T09:06:47.142786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:06:47.158884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:06:47.158989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:06:47.159029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:06:47.169017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:06:47.169101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:06:47.169212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:06:47.169302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:06:47.194789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:06:47.603223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:47.603302Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:47.651843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:06:47.652031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:06:47.675467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:06:47.742276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:06:47.742559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:06:47.751521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:47.751911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:06:47.790625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:47.846089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:06:47.846171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:47.846286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:06:47.846341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:06:47.846386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:06:47.860349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:06:47.911787Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-28T09:06:48.053530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:06:48.062823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.074968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:06:48.098072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:06:48.098223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.140203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:48.170964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:06:48.189590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.189749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:06:48.189805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:06:48.189878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:06:48.197795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.197871Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:06:48.197929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:06:48.210450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.210506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.210556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:48.210608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:06:48.214111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:06:48.223486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:06:48.223701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:06:48.229241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:48.229420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:06:48.229468Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:48.229761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:06:48.229831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:48.239760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:06:48.239964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:06:48.244280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:06:48.244342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:06:48.244535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:48.244612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:06:48.245046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.245102Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:06:48.245196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:06:48.245224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:48.245267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:06:48.245320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:48.245368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:06:48.245406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:48.245441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:06:48.245466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:06:48.245536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:06:48.245572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:06:48.245617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:06:48.258913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:06:48.259038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:06:48.259098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... peration in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:06:58.450933Z node 7 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:06:58.451090Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:06:58.451182Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-03-28T09:06:58.451215Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:06:58.452268Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:58.452326Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:2 2025-03-28T09:06:58.452423Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:334:2313] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-03-28T09:06:58.452731Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:58.452766Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 102:0 2025-03-28T09:06:58.452824Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:338:2316] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2025-03-28T09:06:58.453266Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:06:58.453305Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:06:58.453368Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-28T09:06:58.453422Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:06:58.453723Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:06:58.453864Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:06:58.453899Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-03-28T09:06:58.453948Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-03-28T09:06:58.453993Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-03-28T09:06:58.454032Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-03-28T09:06:58.454077Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-03-28T09:06:58.454329Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:06:58.454361Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:58.454677Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:06:58.463860Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:06:58.463967Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:06:58.464015Z node 7 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:06:58.464301Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:06:58.464429Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:06:58.464469Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-03-28T09:06:58.464500Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-28T09:06:58.464539Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-03-28T09:06:58.464565Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-28T09:06:58.464597Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-03-28T09:06:58.464680Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:409:2367] message: TxId: 102 2025-03-28T09:06:58.464756Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-28T09:06:58.464809Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:06:58.464851Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:06:58.464971Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:06:58.465012Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-28T09:06:58.465034Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-28T09:06:58.465064Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:06:58.465086Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-03-28T09:06:58.465105Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-03-28T09:06:58.465143Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:06:58.465170Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-03-28T09:06:58.465191Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-03-28T09:06:58.465238Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-28T09:06:58.470977Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:06:58.471030Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:58.471277Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435083, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2025-03-28T09:06:58.471323Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2025-03-28T09:06:58.471415Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:06:58.471466Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-28T09:06:58.471563Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:06:58.472211Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:06:58.472245Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:58.472295Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:06:58.472355Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:58.472399Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:06:58.472418Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:58.479471Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:06:58.479528Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:58.487647Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:58.487786Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:06:58.487875Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:409:2367] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2025-03-28T09:06:58.488032Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:06:58.488081Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:516:2467] 2025-03-28T09:06:58.488283Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:06:58.488440Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:518:2469], Recipient [7:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:06:58.488479Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:06:58.488508Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-03-28T09:06:58.488993Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:596:2546], Recipient [7:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-28T09:06:58.489067Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:06:58.489189Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:06:58.489430Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 245us result status StatusPathDoesNotExist 2025-03-28T09:06:58.489613Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000003, drop txId: 102" Path: "/MyRoot/Table" PathId: 2 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |78.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> TSequence::AlterTableSetDefaultFromSequence [GOOD] >> KqpNewEngine::DeferredEffects [GOOD] >> KqpNewEngine::Delete+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> TConsoleTests::TestAttributes [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:06:47.142780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:06:47.159273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:06:47.159354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:06:47.159426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:06:47.169794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:06:47.169875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:06:47.170001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:06:47.170099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:06:47.194991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:06:47.605351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:47.605417Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:47.648373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:06:47.660272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:06:47.675972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:06:47.741753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:06:47.742015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:06:47.751550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:47.751876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:06:47.783502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:47.845051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:06:47.845189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:47.859402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:06:47.859527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:06:47.859653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:06:47.859810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:06:47.893756Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:06:48.173604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:06:48.174053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.175497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:06:48.175953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:06:48.176092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.184174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:48.184422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:06:48.189970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.190131Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:06:48.190209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:06:48.190249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:06:48.197300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.197385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:06:48.197427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:06:48.209857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.209926Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.209966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:48.210033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:06:48.214069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:06:48.217398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:06:48.217626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:06:48.229435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:06:48.229631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:06:48.229684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:48.229957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:06:48.230015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:06:48.240522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:06:48.240699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:06:48.250410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:06:48.250504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:06:48.250800Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:06:48.250860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:06:48.251377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:06:48.251444Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:06:48.251586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:06:48.251639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:48.251676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:06:48.251701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:48.251728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:06:48.251760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:06:48.251796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:06:48.251831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:06:48.251912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:06:48.251967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:06:48.252030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:06:48.258738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:06:48.258853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:06:48.258891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-03-28T09:07:00.375031Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 0/1, is published: true 2025-03-28T09:07:00.375071Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:00.375985Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269550080, Sender [7:979:2925], Recipient [7:125:2151]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1643 } } 2025-03-28T09:07:00.376037Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-03-28T09:07:00.376115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1643 } } 2025-03-28T09:07:00.376162Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-03-28T09:07:00.376331Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1643 } } 2025-03-28T09:07:00.376460Z node 7 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 114 Step: 5000014 OrderId: 114 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1643 } } 2025-03-28T09:07:00.376511Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:00.377808Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:1038:2976], Recipient [7:125:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:00.377855Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:00.377885Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:07:00.378183Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [7:979:2925], Recipient [7:125:2151]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 979 RawX2: 30064773997 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-03-28T09:07:00.378234Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-03-28T09:07:00.378367Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 979 RawX2: 30064773997 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-03-28T09:07:00.378424Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2025-03-28T09:07:00.378585Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 979 RawX2: 30064773997 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-03-28T09:07:00.378651Z node 7 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:07:00.380923Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 979 RawX2: 30064773997 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2025-03-28T09:07:00.381033Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:07:00.381088Z node 7 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2025-03-28T09:07:00.381161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:07:00.381242Z node 7 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 114:0 129 -> 240 2025-03-28T09:07:00.381492Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:00.384857Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:00.387595Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-03-28T09:07:00.387721Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:00.388369Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2025-03-28T09:07:00.388409Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:00.388609Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-03-28T09:07:00.388644Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:00.390190Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2025-03-28T09:07:00.390260Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:00.390342Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 114:0 2025-03-28T09:07:00.390531Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:979:2925] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2025-03-28T09:07:00.391014Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:07:00.391081Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:07:00.391150Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2025-03-28T09:07:00.391212Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 114:0 ProgressState 2025-03-28T09:07:00.391370Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:00.391426Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-03-28T09:07:00.391490Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-03-28T09:07:00.391551Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#114:0 progress is 1/1 2025-03-28T09:07:00.391600Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-03-28T09:07:00.391651Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2025-03-28T09:07:00.391750Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:384:2352] message: TxId: 114 2025-03-28T09:07:00.391819Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2025-03-28T09:07:00.391880Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 114:0 2025-03-28T09:07:00.391931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 114:0 2025-03-28T09:07:00.392102Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-28T09:07:00.395659Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:00.395816Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:384:2352] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2025-03-28T09:07:00.396030Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2025-03-28T09:07:00.396105Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1005:2943] 2025-03-28T09:07:00.396373Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:1007:2945], Recipient [7:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:07:00.396432Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:07:00.396479Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2025-03-28T09:07:00.397732Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:1049:2987], Recipient [7:125:2151]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2025-03-28T09:07:00.397798Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:07:00.401137Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:07:00.401429Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2025-03-28T09:07:00.401951Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2025-03-28T09:07:00.402235Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:00.406718Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:07:00.406926Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2025-03-28T09:07:00.406996Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 |78.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |78.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |78.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |78.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream >> KqpPg::ValuesInsert+useSink [GOOD] >> KqpPg::ValuesInsert-useSink >> KqpAgg::AggWithSelfLookup [GOOD] >> KqpAgg::AggWithSelfLookup2 >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported >> DataShardReadIterator::ShouldReadRangeArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec >> KqpNewEngine::BlindWriteParameters [GOOD] >> KqpNewEngine::BlindWriteListParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-28T09:05:34.035523Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:34.035642Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-28T09:05:34.058349Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:34.078046Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:05:34.079141Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-28T09:05:34.081600Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-28T09:05:34.083448Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:05:34.085390Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:05:34.101250Z node 1 :PERSQUEUE INFO: new Cookie default|4a245468-480997c8-86ae1b6a-c686bdd7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:34.111105Z node 1 :PERSQUEUE INFO: new Cookie default|1c02d61a-45466a0f-585c85ad-81ec21b1_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:34.145451Z node 1 :PERSQUEUE INFO: new Cookie default|880fef1f-9e27f63d-a69f8581-38f2ef02_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:34.160957Z node 1 :PERSQUEUE INFO: new Cookie default|f9fce126-4a6a0253-754f240d-5acf994e_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:34.173832Z node 1 :PERSQUEUE INFO: new Cookie default|d9294188-f8740632-23875e00-172bfb6_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:34.183258Z node 1 :PERSQUEUE INFO: new Cookie default|73103d98-e2c96fe-c1c7d5af-4cc2b887_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-28T09:05:34.834292Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:34.834395Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:182:2057] recipient: [2:181:2193] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:183:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:185:2057] recipient: [2:181:2193] 2025-03-28T09:05:34.885320Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:34.885404Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:184:2194] Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:258:2057] recipient: [2:14:2061] 2025-03-28T09:05:36.656293Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:36.657461Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:05:36.658541Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:264:2256] 2025-03-28T09:05:36.665570Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:264:2256] 2025-03-28T09:05:36.680162Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:265:2257] 2025-03-28T09:05:36.682183Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:265:2257] 2025-03-28T09:05:36.710835Z node 2 :PERSQUEUE INFO: new Cookie default|2f774b79-d70654ca-69ddec03-d165dd05_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:36.722891Z node 2 :PERSQUEUE INFO: new Cookie default|4e5b849c-8f7f7e75-7e48507e-956ae385_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:36.796389Z node 2 :PERSQUEUE INFO: new Cookie default|274817d6-9e49c994-e58a1033-d08d358c_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:36.825202Z node 2 :PERSQUEUE INFO: new Cookie default|203e0939-1484044e-b3556d09-7a7c2661_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:36.846607Z node 2 :PERSQUEUE INFO: new Cookie default|dc1434dd-6ea2502-e403e99e-5b10471a_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:05:36.875436Z node 2 :PERSQUEUE INFO: new Cookie default|ed3995d8-57a7ba92-e5822626-12c9bb2a_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-28T09:05:37.657086Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:37.657162Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:181:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:184:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:185:2057] recipient: [3:183:2195] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:187:2057] recipient: [3:183:2195] 2025-03-28T09:05:37.770243Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:37.770327Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:186:2196] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:260:2057] recipient: [3:14:2061] 2025-03-28T09:05:39.505186Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:05:39.505971Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-28T09:05:39.506878Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:266:2258] 2025-03-28T09:05:39.509736Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:266:2258] 2025-03-28T09:05:39.511656Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:267:2259] 2025-03-28T09:05:39.520318Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:267:2259] 2025-03-28T09:05:39.544046Z node 3 :PERSQUEUE INFO: new Cookie default|f2b8b983-b1d1870c-74b78b7-4 ... 58.519972Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 47 actor [47:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 47 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 47 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 47 Important: false } 2025-03-28T09:06:58.521096Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:185:2198] 2025-03-28T09:06:58.524288Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [47:185:2198] 2025-03-28T09:06:58.526471Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:186:2199] 2025-03-28T09:06:58.528800Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [47:186:2199] 2025-03-28T09:06:58.539842Z node 47 :PERSQUEUE INFO: new Cookie default|c1435023-9ca79c30-a8b0477-4d02d44b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:06:58.546817Z node 47 :PERSQUEUE INFO: new Cookie default|4a7dbdf6-170ad9df-5777e014-167f3ba1_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:06:58.585493Z node 47 :PERSQUEUE INFO: new Cookie default|7268266d-5498f507-dcff1e91-ba1dd1f8_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:06:58.596912Z node 47 :PERSQUEUE INFO: new Cookie default|ac7c5e48-48ad6e6f-2d57a4c3-2d0836e_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:06:58.607544Z node 47 :PERSQUEUE INFO: new Cookie default|e460c8ce-ba428778-5cefaa70-ad57bce5_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:06:58.618039Z node 47 :PERSQUEUE INFO: new Cookie default|fe991d34-deba8d27-2f56e4d4-efaa5daa_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [47:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:284:2057] recipient: [47:99:2134] Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:287:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:107:2139] sender: [47:288:2057] recipient: [47:286:2281] Leader for TabletID 72057594037927937 is [47:289:2282] sender: [47:290:2057] recipient: [47:286:2281] 2025-03-28T09:06:58.681271Z node 47 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:06:58.681349Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:06:58.682559Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [47:338:2323] 2025-03-28T09:06:58.687756Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [47:339:2324] 2025-03-28T09:06:58.706831Z node 47 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:06:58.706916Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [47:339:2324] 2025-03-28T09:06:58.709141Z node 47 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:06:58.709232Z node 47 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [47:338:2323] !Reboot 72057594037927937 (actor [47:107:2139]) rebooted! !Reboot 72057594037927937 (actor [47:107:2139]) tablet resolver refreshed! new actor is[47:289:2282] Leader for TabletID 72057594037927937 is [47:289:2282] sender: [47:386:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:103:2057] recipient: [48:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:103:2057] recipient: [48:101:2135] Leader for TabletID 72057594037927937 is [48:107:2139] sender: [48:108:2057] recipient: [48:101:2135] 2025-03-28T09:07:01.110080Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:07:01.110170Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:149:2057] recipient: [48:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:149:2057] recipient: [48:147:2170] Leader for TabletID 72057594037927938 is [48:153:2174] sender: [48:154:2057] recipient: [48:147:2170] Leader for TabletID 72057594037927937 is [48:107:2139] sender: [48:177:2057] recipient: [48:14:2061] 2025-03-28T09:07:01.158585Z node 48 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:07:01.159706Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 48 actor [48:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 48 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 Important: false } 2025-03-28T09:07:01.160758Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [48:183:2196] 2025-03-28T09:07:01.168378Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:183:2196] 2025-03-28T09:07:01.170630Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [48:184:2197] 2025-03-28T09:07:01.174567Z node 48 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:184:2197] 2025-03-28T09:07:01.187110Z node 48 :PERSQUEUE INFO: new Cookie default|d47a25a7-cc20fe2-e9c4f464-bfc5ffb5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:07:01.199970Z node 48 :PERSQUEUE INFO: new Cookie default|f8cacf53-28be43e8-8fc4cec-30f669ea_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:07:01.295408Z node 48 :PERSQUEUE INFO: new Cookie default|b529c521-79764c4b-5eec32bc-eafc4e54_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:07:01.336417Z node 48 :PERSQUEUE INFO: new Cookie default|395af7a7-1e9c06ba-86ce63ae-88841069_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:07:01.355241Z node 48 :PERSQUEUE INFO: new Cookie default|6fad47a8-dabc39ab-c3aba6b-ec7fdc48_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:07:01.385953Z node 48 :PERSQUEUE INFO: new Cookie default|260f6630-eda4aede-bfa1d80e-f64cb396_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:103:2057] recipient: [49:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:103:2057] recipient: [49:101:2135] Leader for TabletID 72057594037927937 is [49:107:2139] sender: [49:108:2057] recipient: [49:101:2135] 2025-03-28T09:07:02.485583Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:07:02.485664Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:149:2057] recipient: [49:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:149:2057] recipient: [49:147:2170] Leader for TabletID 72057594037927938 is [49:153:2174] sender: [49:154:2057] recipient: [49:147:2170] Leader for TabletID 72057594037927937 is [49:107:2139] sender: [49:179:2057] recipient: [49:14:2061] 2025-03-28T09:07:02.518261Z node 49 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:07:02.519406Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 49 actor [49:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 49 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 Important: false } 2025-03-28T09:07:02.520447Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [49:185:2198] 2025-03-28T09:07:02.531886Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:185:2198] 2025-03-28T09:07:02.534397Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [49:186:2199] 2025-03-28T09:07:02.539162Z node 49 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:186:2199] 2025-03-28T09:07:02.555245Z node 49 :PERSQUEUE INFO: new Cookie default|ff263557-86f56f30-65abef47-638f976b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:07:02.562621Z node 49 :PERSQUEUE INFO: new Cookie default|38614e8-b0804135-1641c00b-f5c449fa_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:07:02.632645Z node 49 :PERSQUEUE INFO: new Cookie default|54ddb669-ae30bf22-25272132-54e4cbae_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:07:02.645965Z node 49 :PERSQUEUE INFO: new Cookie default|f9e71c2f-a0d47318-b97d2fda-6f7c4f73_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:07:02.667802Z node 49 :PERSQUEUE INFO: new Cookie default|28eab628-34a8db76-7f36a4f-115a1c49_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:07:02.685792Z node 49 :PERSQUEUE INFO: new Cookie default|54a4257-ac3598dc-36482317-b2dc1471_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> DataShardReadIterator::ShouldReadKeyArrow [GOOD] >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |78.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] |78.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |78.7%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> TBackupCollectionTests::CreateAbsolutePath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 62376, MsgBus: 31919 2025-03-28T09:06:27.788905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784214627050725:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:27.811847Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00165f/r3tmp/tmpmfZBcL/pdisk_1.dat 2025-03-28T09:06:28.331070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:28.331196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:28.333820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62376, node 1 2025-03-28T09:06:28.364962Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:28.429624Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:28.429655Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:28.429666Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:28.429770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31919 TClient is connected to server localhost:31919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:29.027119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:29.056378Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:31.168266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784231806920548:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:31.168352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784231806920572:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:31.168478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:31.173086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:31.186203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784231806920582:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:06:31.258980Z node 1 :TX_PROXY ERROR: Actor# [1:7486784231806920634:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:31.591783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:06:31.750698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:32.912995Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784214627050725:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:32.917064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:32.983585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:34.785793Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-03-28T09:06:34.785973Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-28T09:06:34.786101Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-28T09:06:34.786248Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486784244691831232:2971], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7486784244691830937:2971]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7486784244691831232:2971].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-28T09:06:34.786839Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486784244691831214:2971], SessionActorId: [1:7486784244691830937:2971], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7486784244691830937:2971]. isRollback=0 2025-03-28T09:06:34.787075Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTNkNzEzOTctMjM5NmNkMDctMmYwOTA1NTUtYWMyOTUxNjU=, ActorId: [1:7486784244691830937:2971], ActorState: ExecuteState, TraceId: 01jqe01k4h1ky6dcezqgxnzvbw, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7486784244691831217:2971] from: [1:7486784244691831214:2971] 2025-03-28T09:06:34.787154Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486784244691831217:2971] TxId: 281474976710665. Ctx: { TraceId: 01jqe01k4h1ky6dcezqgxnzvbw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTNkNzEzOTctMjM5NmNkMDctMmYwOTA1NTUtYWMyOTUxNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-28T09:06:34.787361Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTNkNzEzOTctMjM5NmNkMDctMmYwOTA1NTUtYWMyOTUxNjU=, ActorId: [1:7486784244691830937:2971], ActorState: ExecuteState, TraceId: 01jqe01k4h1ky6dcezqgxnzvbw, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 20032, MsgBus: 6680 2025-03-28T09:06:41.113196Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784275475970304:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:41.113374Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00165f/r3tmp/tmpzuccqX/pdisk_1.dat 2025-03-28T09:06:41.244072Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:41.271742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:41.271832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:41.274013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20032, node 2 2025-03-28T09:06:41.410491Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:41.410516Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:41.410524Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:41.410644Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6680 TClient is connected to server localhost:6680 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:42.033798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:42.041089Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046 ... _id=[2:7486784314130682927:3371];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.785682Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7486784314130682877:3362];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.785828Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7486784314130682501:3311];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.785968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486784314130683253:3428];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.786110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7486784314130683226:3421];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.786259Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7486784314130683217:3420];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.786406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7486784314130682844:3348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.786546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7486784314130682731:3318];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.786722Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7486784314130683166:3411];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.786906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7486784314130683166:3411];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.787046Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7486784314130683171:3412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.787200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7486784314130682741:3324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.787535Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7486784314130682741:3324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.787859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7486784314130683247:3427];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.792863Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7486784314130682775:3335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.794461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7486784314130682947:3379];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.794646Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7486784314130682691:3313];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.795063Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7486784314130683195:3416];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.795241Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7486784314130683188:3415];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.795460Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7486784314130682780:3336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038080;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.796679Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486784314130682913:3366];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.796839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7486784314130683091:3409];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.796972Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7486784314130682983:3382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.797104Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7486784314130683076:3405];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.797235Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7486784314130683164:3410];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.797405Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7486784314130682700:3315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.797548Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7486784314130682731:3318];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.797674Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486784314130683230:3423];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.797802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7486784314130683184:3414];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.799019Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7486784314130683197:3417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.799234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7486784314130682929:3372];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.799377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7486784314130682877:3362];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.799499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7486784314130682501:3311];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.799615Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486784314130683253:3428];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.799671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7486784314130682927:3371];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.799759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7486784314130683226:3421];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.799871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7486784314130683217:3420];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.799955Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7486784314130682844:3348];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.800024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7486784314130683171:3412];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.800462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7486784314130683247:3427];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.800616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7486784314130683239:3425];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.800742Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7486784314130682937:3377];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.800877Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7486784314130682780:3336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038080;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:06:56.805186Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7486784314130683197:3417];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TColumnShardTestReadWrite::WriteRead [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2025-03-28T09:06:50.179472Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:06:50.341998Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:06:50.369534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:06:50.369894Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:06:50.378174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:06:50.378410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:06:50.378654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:06:50.378838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:06:50.378949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:06:50.379086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:06:50.379212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:06:50.379332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:06:50.379465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:06:50.379580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:06:50.379684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:06:50.379798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:06:50.411140Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:06:50.411427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:06:50.411483Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:06:50.411658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:06:50.411872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:06:50.411953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:06:50.411999Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:06:50.412100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:06:50.412166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:06:50.412213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:06:50.412244Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:06:50.412450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:06:50.412513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:06:50.412552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:06:50.412586Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:06:50.412731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:06:50.412788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:06:50.412830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:06:50.412860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:06:50.412947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:06:50.412999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:06:50.413040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:06:50.413088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:06:50.413147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:06:50.413185Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:06:50.413554Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-28T09:06:50.413648Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-03-28T09:06:50.413747Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-03-28T09:06:50.413838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:06:50.414009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:06:50.414081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:06:50.414117Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:06:50.414291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:06:50.414333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:06:50.414361Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:06:50.414526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:06:50.414573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:06:50.414605Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:06:50.414859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:06:50.414904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:06:50.414936Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:06:50.496317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:06:50.496396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:06:50.496445Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... );indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:06.184679Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:07:06.184819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-28T09:07:06.184921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-28T09:07:06.185082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:432:2447];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-28T09:07:06.185234Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:06.185388Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:06.185540Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:06.185792Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:07:06.284812Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:06.285023Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:06.285084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:433:2448] finished for tablet 9437184 2025-03-28T09:07:06.285613Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:432:2447];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.094},{"events":["f_processing","f_task_result"],"t":0.095},{"events":["f_ack","l_task_result"],"t":0.407},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.508}],"full":{"a":1743152825776933,"name":"_full_task","f":1743152825776933,"d_finished":0,"c":0,"l":1743152826285156,"d":508223},"events":[{"name":"bootstrap","f":1743152825777219,"d_finished":94206,"c":1,"l":1743152825871425,"d":94206},{"a":1743152826185763,"name":"ack","f":1743152826184347,"d_finished":1228,"c":1,"l":1743152826185575,"d":100621},{"a":1743152826185750,"name":"processing","f":1743152825872785,"d_finished":299802,"c":10,"l":1743152826185577,"d":399208},{"name":"ProduceResults","f":1743152825779177,"d_finished":105333,"c":13,"l":1743152826285059,"d":105333},{"a":1743152826285067,"name":"Finish","f":1743152826285067,"d_finished":0,"c":0,"l":1743152826285156,"d":89},{"name":"task_result","f":1743152825872809,"d_finished":298383,"c":9,"l":1743152826184075,"d":298383}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:06.285727Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:432:2447];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:07:06.286217Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:432:2447];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.094},{"events":["f_processing","f_task_result"],"t":0.095},{"events":["f_ack","l_task_result"],"t":0.407},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.508}],"full":{"a":1743152825776933,"name":"_full_task","f":1743152825776933,"d_finished":0,"c":0,"l":1743152826285790,"d":508857},"events":[{"name":"bootstrap","f":1743152825777219,"d_finished":94206,"c":1,"l":1743152825871425,"d":94206},{"a":1743152826185763,"name":"ack","f":1743152826184347,"d_finished":1228,"c":1,"l":1743152826185575,"d":101255},{"a":1743152826185750,"name":"processing","f":1743152825872785,"d_finished":299802,"c":10,"l":1743152826185577,"d":399842},{"name":"ProduceResults","f":1743152825779177,"d_finished":105333,"c":13,"l":1743152826285059,"d":105333},{"a":1743152826285067,"name":"Finish","f":1743152826285067,"d_finished":0,"c":0,"l":1743152826285790,"d":723},{"name":"task_result","f":1743152825872809,"d_finished":298383,"c":9,"l":1743152826184075,"d":298383}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:06.286310Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:07:05.776176Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-28T09:07:06.286354Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:07:06.288518Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes-EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2909/00178c/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk4 Trying to start YDB, gRPC: 1254, MsgBus: 14575 2025-03-28T09:05:32.439474Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783978650983852:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:32.446754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00178c/r3tmp/tmpQwdRYb/pdisk_1.dat 2025-03-28T09:05:33.026482Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:33.034018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:33.034139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:33.036621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1254, node 1 2025-03-28T09:05:33.119756Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:33.119779Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:33.119786Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:33.119907Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14575 TClient is connected to server localhost:14575 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:33.838858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:33.859657Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:05:33.879412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:05:34.085764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:05:34.326863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:34.421585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:36.710514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783995830854736:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:36.710704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:37.077275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.151962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.213171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.275747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.346624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.437671Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783978650983852:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:37.437735Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:37.525378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:05:37.650050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784000125822555:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:37.650185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:37.650463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784000125822560:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:37.655628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:05:37.677041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784000125822562:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:05:37.770181Z node 1 :TX_PROXY ERROR: Actor# [1:7486784000125822621:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:48.024175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:05:48.024207Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '779) '('"_id" '"bbba0a93-e45f2d15-349367fc-b6fdd026") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '677) '('"_id" '"eebab943-2f72913b-ad7ff534-42117844") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '689) '('"_id" '"1238c655-9974e800-88292aac-969af9f9")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice >> TBackupCollectionTests::HiddenByFeatureFlag >> DataStreams::TestUnsupported [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::BackupAbsentCollection >> KqpSinkTx::OlapInteractive [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 27196, MsgBus: 22856 2025-03-28T09:06:28.612111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:28.612530Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:28.612608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001671/r3tmp/tmp5bD8oU/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27196, node 1 2025-03-28T09:06:29.086360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:29.086406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:29.086436Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:29.088739Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:29.092377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:06:29.132407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:29.132529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:29.143967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22856 TClient is connected to server localhost:22856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:29.466099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:29.497567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:29.901995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:30.340396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:30.687266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:31.532009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1815:3409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:31.532248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:31.552336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:31.740902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:32.041633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:32.348167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:32.629551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:32.996270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:33.340030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2398:3856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:33.340184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:33.340604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2403:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:33.350389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:33.523191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2405:3863], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:06:33.572475Z node 1 :TX_PROXY ERROR: Actor# [1:2470:3909] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:34.770745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:06:35.039078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:06:35.468005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63306, MsgBus: 27441 2025-03-28T09:06:41.312663Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:41.312972Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:41.313030Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001671/r3tmp/tmp8Ygb6N/pdisk_1.dat TServer::EnableGrpc on GrpcPort 63306, node 2 2025-03-28T09:06:41.790799Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:41.791796Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:41.791854Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:41.791893Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:41.792220Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:06:41.836496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:41.836636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:41.848363Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27441 TClient is connected to server localhost:27441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:42.193151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:42.283957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 20 ... 94046644480 2025-03-28T09:06:44.191876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:44.473963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:44.779680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:45.030993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:45.399778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:45.709403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2395:3854], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:45.709537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:45.709892Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2400:3859], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:45.719182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:45.911283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2402:3861], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:06:45.969888Z node 2 :TX_PROXY ERROR: Actor# [2:2467:3907] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:47.252990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:06:47.589284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:06:48.033911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23482, MsgBus: 13371 2025-03-28T09:06:55.412034Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:55.412248Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:55.412374Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001671/r3tmp/tmpa8M6lE/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23482, node 3 2025-03-28T09:06:55.961485Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:55.971016Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:55.971112Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:55.971166Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:55.971697Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:06:56.016763Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:56.017467Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:56.032153Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13371 TClient is connected to server localhost:13371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:56.565573Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:56.678720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:57.079489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:57.537948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:57.833885Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:58.669389Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1824:3418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:58.669711Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:58.691132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:58.872869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:59.173446Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:59.521749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:59.866610Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:07:00.205776Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:07:00.608309Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2403:3861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:00.608479Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:00.608881Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2408:3866], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:00.618456Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:07:00.843937Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2410:3868], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:07:00.923919Z node 3 :TX_PROXY ERROR: Actor# [3:2473:3912] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:03.107176Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:07:03.428488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:07:03.976550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |78.7%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 >> TColumnShardTestReadWrite::RebootWriteRead >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::DisallowedPath ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2025-03-28T09:06:47.993711Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784301800622292:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:47.993764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00156e/r3tmp/tmpoKUlzi/pdisk_1.dat 2025-03-28T09:06:48.451143Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:48.456211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:48.456315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:48.464069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25175, node 1 2025-03-28T09:06:48.699542Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:48.699568Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:48.699578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:48.699692Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:49.132600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:49.200979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:31208 2025-03-28T09:06:49.440960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:49.727701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:53.268597Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486784326303901916:2158];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:53.279985Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00156e/r3tmp/tmpxKnuXu/pdisk_1.dat 2025-03-28T09:06:53.590704Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:53.656406Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:53.656487Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:53.672087Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10716, node 4 2025-03-28T09:06:53.987479Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:53.987498Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:53.987509Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:53.987639Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17686 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:54.396673Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:54.533374Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17686 2025-03-28T09:06:54.722777Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:54.735287Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:06:54.979583Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:55.044038Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "30" shard_id: "shard-000000" } records { sequence_number: "31" shard_id: "shard-000000" } records { sequence_number: "32" shard_id: "shard-000000" } records { sequence_number: "33" shard_id: "shard-000000" } records { sequence_number: "34" shard_id: "shard-000000" } records { sequence_number: "35" shard_id: "shard-000000" } records { sequence_number: "36" shard_id: "shard-000000" } records { sequence_number: "37" shard_id: "shard-000000" } records { sequence_number: "38" shard_id: "shard-000000" } records { sequence_number: "39" shard_id: "shard-000000" } records { sequence_number: "40" shard_id: "shard-000000" } records { sequence_number: "41" shard_id: "shard-000000" } records { sequence_number: "42" shard_id: "shard-000000" } records { sequence_number: "43" shard_id: "shard-000000" } records { sequence_number: "44" shard_id: "shard-000000" } records { sequence_number: "45" shard_id: "shard-000000" } records { sequence_number: "46" shard_id: "shard-000000" } records { sequence_number: "47" shard_id: "shard-000000" } records { sequence_number: "48" shard_id: "shard-000000" } records { sequence_number: "49" shard_id: "shard-000000" } records { sequence_number: "50" shard_id: "shard-000000" } records { sequence_number: "51" shard_id: "shard-000000" } records { sequence_number: "52" shard_id: "shard-000000" } records { sequence_number: "53" shard_id: "shard-000000" } records { sequence_number: "54" shard_id: "shard-000000" } records { sequence_number: "55" shard_id: "shard-000000" } records { sequence_number: "56" shard_id: "shard-000000" } records { sequence_number: "57" shard_id: "shard-000000" } records { sequence_number: "58" shard_id: "shard-000000" } records { sequence_number: "59" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "60" shard_id: "shard-000000" } records { sequence_number: "61" shard_id: "shard-000000" } records { sequence_number: "62" shard_id: "shard-000000" } records { sequence_number: "63" shard_id: "shard-000000" } records { sequence_number: "64" shard_id: "shard-000000" } records { sequence_number: "65" shard_id: "shard-000000" } records { sequence_number: "66" shard_id: "shard-000000" } records { sequence_number: "67" shard_id: "shard-000000" } ... ard_id: "shard-000000" } records { sequence_number: "69" shard_id: "shard-000000" } records { sequence_number: "70" shard_id: "shard-000000" } records { sequence_number: "71" shard_id: "shard-000000" } records { sequence_number: "72" shard_id: "shard-000000" } records { sequence_number: "73" shard_id: "shard-000000" } records { sequence_number: "74" shard_id: "shard-000000" } records { sequence_number: "75" shard_id: "shard-000000" } records { sequence_number: "76" shard_id: "shard-000000" } records { sequence_number: "77" shard_id: "shard-000000" } records { sequence_number: "78" shard_id: "shard-000000" } records { sequence_number: "79" shard_id: "shard-000000" } records { sequence_number: "80" shard_id: "shard-000000" } records { sequence_number: "81" shard_id: "shard-000000" } records { sequence_number: "82" shard_id: "shard-000000" } records { sequence_number: "83" shard_id: "shard-000000" } records { sequence_number: "84" shard_id: "shard-000000" } records { sequence_number: "85" shard_id: "shard-000000" } records { sequence_number: "86" shard_id: "shard-000000" } records { sequence_number: "87" shard_id: "shard-000000" } records { sequence_number: "88" shard_id: "shard-000000" } records { sequence_number: "89" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "90" shard_id: "shard-000000" } records { sequence_number: "91" shard_id: "shard-000000" } records { sequence_number: "92" shard_id: "shard-000000" } records { sequence_number: "93" shard_id: "shard-000000" } records { sequence_number: "94" shard_id: "shard-000000" } records { sequence_number: "95" shard_id: "shard-000000" } records { sequence_number: "96" shard_id: "shard-000000" } records { sequence_number: "97" shard_id: "shard-000000" } records { sequence_number: "98" shard_id: "shard-000000" } records { sequence_number: "99" shard_id: "shard-000000" } records { sequence_number: "100" shard_id: "shard-000000" } records { sequence_number: "101" shard_id: "shard-000000" } records { sequence_number: "102" shard_id: "shard-000000" } records { sequence_number: "103" shard_id: "shard-000000" } records { sequence_number: "104" shard_id: "shard-000000" } records { sequence_number: "105" shard_id: "shard-000000" } records { sequence_number: "106" shard_id: "shard-000000" } records { sequence_number: "107" shard_id: "shard-000000" } records { sequence_number: "108" shard_id: "shard-000000" } records { sequence_number: "109" shard_id: "shard-000000" } records { sequence_number: "110" shard_id: "shard-000000" } records { sequence_number: "111" shard_id: "shard-000000" } records { sequence_number: "112" shard_id: "shard-000000" } records { sequence_number: "113" shard_id: "shard-000000" } records { sequence_number: "114" shard_id: "shard-000000" } records { sequence_number: "115" shard_id: "shard-000000" } records { sequence_number: "116" shard_id: "shard-000000" } records { sequence_number: "117" shard_id: "shard-000000" } records { sequence_number: "118" shard_id: "shard-000000" } records { sequence_number: "119" shard_id: "shard-000000" } 2025-03-28T09:06:58.267646Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486784326303901916:2158];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:58.267731Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "120" shard_id: "shard-000000" } records { sequence_number: "121" shard_id: "shard-000000" } records { sequence_number: "122" shard_id: "shard-000000" } records { sequence_number: "123" shard_id: "shard-000000" } records { sequence_number: "124" shard_id: "shard-000000" } records { sequence_number: "125" shard_id: "shard-000000" } records { sequence_number: "126" shard_id: "shard-000000" } records { sequence_number: "127" shard_id: "shard-000000" } records { sequence_number: "128" shard_id: "shard-000000" } records { sequence_number: "129" shard_id: "shard-000000" } records { sequence_number: "130" shard_id: "shard-000000" } records { sequence_number: "131" shard_id: "shard-000000" } records { sequence_number: "132" shard_id: "shard-000000" } records { sequence_number: "133" shard_id: "shard-000000" } records { sequence_number: "134" shard_id: "shard-000000" } records { sequence_number: "135" shard_id: "shard-000000" } records { sequence_number: "136" shard_id: "shard-000000" } records { sequence_number: "137" shard_id: "shard-000000" } records { sequence_number: "138" shard_id: "shard-000000" } records { sequence_number: "139" shard_id: "shard-000000" } records { sequence_number: "140" shard_id: "shard-000000" } records { sequence_number: "141" shard_id: "shard-000000" } records { sequence_number: "142" shard_id: "shard-000000" } records { sequence_number: "143" shard_id: "shard-000000" } records { sequence_number: "144" shard_id: "shard-000000" } records { sequence_number: "145" shard_id: "shard-000000" } records { sequence_number: "146" shard_id: "shard-000000" } records { sequence_number: "147" shard_id: "shard-000000" } records { sequence_number: "148" shard_id: "shard-000000" } records { sequence_number: "149" shard_id: "shard-000000" } Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743152814932-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743152814,"finish":1743152815},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152815}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743152815006-3","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743152815,"finish":1743152815},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152815}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743152815077-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743152815,"finish":1743152816},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152816}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743152816152-5","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743152816,"finish":1743152817},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152817}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743152817182-6","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743152817,"finish":1743152818},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152818}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestStreamTimeRetention","id":"used_storage-root-72075186224037888-1743152818232-7","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743152818,"finish":1743152819},"labels":{"datastreams_stream_name":"stream_TestStreamTimeRetention","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152819}' 2025-03-28T09:07:03.818445Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486784366809246267:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:03.818489Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00156e/r3tmp/tmp7OGujw/pdisk_1.dat 2025-03-28T09:07:04.240900Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:04.289336Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:04.289428Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:04.293310Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25869, node 7 2025-03-28T09:07:04.367928Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:04.367951Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:04.367959Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:04.368101Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:04.675005Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:04.828695Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:6062 2025-03-28T09:07:05.057216Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> KqpNewEngine::Delete+UseSink [GOOD] >> KqpNewEngine::Delete-UseSink >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |78.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |78.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation >> Viewer::TenantInfo5kkTablets [GOOD] >> Viewer::UseTransactionWhenExecuteDataActionQuery >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [GOOD] >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::ParallelCreate >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 >> KqpNewEngine::BlindWriteListParameter [GOOD] >> KqpNewEngine::BatchUpload >> DataShardReadIterator::ShouldReadKeyOnlyValueColumn [GOOD] >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> Viewer::SharedDoesntShowExclusiveNodes [GOOD] >> Viewer::ServerlessWithExclusiveNodesCheckTable >> Normalizers::CleanEmptyPortionsNormalizer >> KqpSinkTx::OlapSnapshotRO [GOOD] |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::Drop >> KqpPg::TableInsert-useSink [GOOD] >> KqpPg::TempTablesSessionsIsolation >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::DropTwice >> TTxLocatorTest::TestWithReboot >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 27193, MsgBus: 19916 2025-03-28T09:06:20.831026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784183542077185:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:20.831586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00167b/r3tmp/tmpdIExIX/pdisk_1.dat 2025-03-28T09:06:21.276786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:21.276923Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:21.278342Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:21.278894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27193, node 1 2025-03-28T09:06:21.358428Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:21.358470Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:21.358490Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:21.358636Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19916 TClient is connected to server localhost:19916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:21.971661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:21.986934Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:24.110450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784200721947025:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:24.110570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:24.110849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784200721947037:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:24.115856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:24.127691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784200721947039:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:06:24.191338Z node 1 :TX_PROXY ERROR: Actor# [1:7486784200721947090:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:24.580100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:06:24.800364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:06:24.800577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:06:24.800880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:06:24.801024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:06:24.801107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:06:24.801150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:06:24.801155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:06:24.801645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:06:24.801781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:06:24.801921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:06:24.802051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:06:24.802168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:06:24.802262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:06:24.802362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784200721947248:2345];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:06:24.811442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:06:24.811648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:06:24.811761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:06:24.811886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:06:24.812033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:06:24.812448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:06:24.814673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:06:24.819239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:06:24.819407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:06:24.819529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784200721947246:2344];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:06:24.875249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784200721947250:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:06:24.875313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784200721947250:2346];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abs ... .cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.695283Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[2:7486784348895574652:3410];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038096;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.695453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7486784323125765535:2450];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037981;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.695586Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;self_id=[2:7486784348895574968:3498];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038078;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.695707Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037982;self_id=[2:7486784323125765526:2446];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037982;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.696024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7486784344600606199:3306];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.696511Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7486784348895574943:3491];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.696706Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486784344600606354:3343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.697091Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7486784348895574808:3466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.698381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[2:7486784348895574846:3469];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.700331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7486784348895575067:3511];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038062;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.700891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7486784348895574808:3466];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.701576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7486784348895575067:3511];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038062;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.716834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7486784348895574997:3505];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.719835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7486784348895574997:3505];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.725677Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7486784348895574952:3493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.725990Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7486784348895574952:3493];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.726195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7486784323125765365:2430];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037993;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.726381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7486784348895574775:3450];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.726564Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7486784348895574793:3457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.745796Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7486784348895574775:3450];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.746200Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7486784348895574793:3457];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.760159Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;self_id=[2:7486784323125765365:2430];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037993;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.846990Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7486784348895574926:3481];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038071;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.847350Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[2:7486784323125765816:2506];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037931;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.847590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7486784323125765805:2502];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037934;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.847829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;self_id=[2:7486784348895574770:3449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038086;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.848022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7486784348895574931:3482];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.848521Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7486784348895574931:3482];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.848704Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7486784348895574926:3481];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038071;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.848880Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037931;self_id=[2:7486784323125765816:2506];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037931;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.849058Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7486784323125765805:2502];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037934;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.849196Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;self_id=[2:7486784348895574770:3449];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038086;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.852857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7486784348895574627:3407];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.853145Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:7486784323125765597:2462];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037964;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.853618Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7486784323125765840:2515];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037916;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.853838Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037961;self_id=[2:7486784323125765532:2449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037961;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.854038Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[2:7486784323125765662:2481];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037944;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.854451Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzFiNGU3NjUtMzNmY2ViZTEtMTdmMDZkOGEtOTgyYTE5OWU=, ActorId: [2:7486784370370414079:4089], ActorState: ExecuteState, TraceId: 01jqe02hdp2g3rwvx20je1hb9e, Create QueryResponse for error on request, msg: 2025-03-28T09:07:05.856004Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037961;self_id=[2:7486784323125765532:2449];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037961;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.856259Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037944;self_id=[2:7486784323125765662:2481];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037944;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.857127Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7486784348895574627:3407];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.857342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:7486784323125765597:2462];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037964;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:05.857508Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037916;self_id=[2:7486784323125765840:2515];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037916;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled;
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> TTxLocatorTest::TestWithReboot [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:07:07.800163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:07:07.800324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:07:07.800372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:07:07.800421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:07:07.800471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:07:07.800499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:07:07.800558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:07:07.800635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:07:07.800993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:07.969485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:07:07.969549Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:07.985371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:07.985665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:07:07.985828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:07:07.992181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:07:07.992464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:07:07.993278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:07:08.067642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:07:08.070409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:07:08.071747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:07:08.071813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:07:08.071963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:07:08.071999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:07:08.072032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:07:08.072115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:07:08.078715Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:07:08.201131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:07:08.201398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:08.201643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:07:08.201915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:07:08.201968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:08.204898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:07:08.205047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:07:08.205263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:08.205314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:07:08.205349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:07:08.205379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:07:08.207582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:08.207638Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:07:08.207677Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:07:08.209626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:08.209684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:08.209748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:07:08.209803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:07:08.220198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:07:08.222631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:07:08.222857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:07:08.224005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:07:08.224142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:07:08.224197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:07:08.224676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:07:08.224750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:07:08.224948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:07:08.225023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:07:08.227654Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:07:08.227721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:07:08.227937Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:07:08.227991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:07:08.228420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:08.228476Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:07:08.228587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:07:08.228624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:07:08.228664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:07:08.228693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:07:08.228730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:07:08.228771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:07:08.228810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:07:08.228843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:07:08.228925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:07:08.228978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:07:08.229010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:07:08.230996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:07:08.231124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:07:08.231165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Complete at tablet# 72057594046678944 2025-03-28T09:07:14.310431Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 105:1 2025-03-28T09:07:14.310537Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [7:125:2151], Recipient [7:125:2151]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:07:14.310565Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:07:14.310608Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-03-28T09:07:14.310651Z node 7 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-03-28T09:07:14.310811Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:14.310839Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-03-28T09:07:14.310887Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-03-28T09:07:14.310943Z node 7 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 2/2 2025-03-28T09:07:14.310987Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-03-28T09:07:14.311029Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/2, is published: true 2025-03-28T09:07:14.311099Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:302:2293] message: TxId: 105 2025-03-28T09:07:14.311147Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/2 2025-03-28T09:07:14.311199Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:07:14.311236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-28T09:07:14.311310Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-03-28T09:07:14.311355Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-03-28T09:07:14.311379Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-03-28T09:07:14.311458Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-28T09:07:14.311482Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:07:14.313353Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:14.313468Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [7:302:2293] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 105 at schemeshard: 72057594046678944 2025-03-28T09:07:14.313653Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:07:14.313703Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:530:2491] 2025-03-28T09:07:14.313931Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:532:2493], Recipient [7:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:07:14.313972Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:07:14.313994Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-03-28T09:07:14.314586Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [7:596:2555], Recipient [7:125:2151]: {TEvModifySchemeTransaction txid# 106 TabletId# 72057594046678944} 2025-03-28T09:07:14.314643Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:07:14.317143Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:07:14.317615Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:07:14.317807Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 4], parent name: MyCollection1, child name: 19700101000000Z_incremental, child id: [OwnerId: 72057594046678944, LocalPathId: 8], at schemeshard: 72057594046678944 2025-03-28T09:07:14.317873Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2025-03-28T09:07:14.317963Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:07:14.318069Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 106:1, explain: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-03-28T09:07:14.318111Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:2, propose status:StatusInvalidParameter, reason: Incremental backup is disabled on this collection, at schemeshard: 72057594046678944 2025-03-28T09:07:14.320499Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Abort operation: IgniteOperation fail to propose a part, opId: 106:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusInvalidParameter, with reason: Incremental backup is disabled on this collection, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpBackupIncrementalBackupCollection BackupIncrementalBackupCollection { Name: ".backups/collections/MyCollection1" } } TxId: 106 TabletId: 72057594046678944 2025-03-28T09:07:14.320644Z node 7 :FLAT_TX_SCHEMESHARD INFO: MkDir AbortPropose, opId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:07:14.320912Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:14.323611Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Incremental backup is disabled on this collection" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:07:14.323832Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Incremental backup is disabled on this collection, operation: BACKUP INCREMENTAL, path: /MyRoot/.backups/collections/MyCollection1 2025-03-28T09:07:14.323909Z node 7 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:07:14.324321Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-28T09:07:14.324382Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-28T09:07:14.324909Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [7:602:2561], Recipient [7:125:2151]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:14.324974Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:14.325019Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:07:14.325174Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [7:302:2293], Recipient [7:125:2151]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2025-03-28T09:07:14.325199Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:07:14.325276Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:07:14.325380Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:07:14.325430Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:600:2559] 2025-03-28T09:07:14.325651Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [7:602:2561], Recipient [7:125:2151]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:07:14.325704Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:07:14.325757Z node 7 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 2025-03-28T09:07:14.326332Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [7:603:2562], Recipient [7:125:2151]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-28T09:07:14.326399Z node 7 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:07:14.326542Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:07:14.326816Z node 7 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1" took 281us result status StatusSuccess 2025-03-28T09:07:14.327278Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1" PathDescription { Self { Name: "MyCollection1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeBackupCollection CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 BackupCollectionVersion: 0 } ChildrenExist: true } Children { Name: "19700101000000Z_full" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } BackupCollectionDescription { Name: "MyCollection1" ExplicitEntryList { Entries { Type: ETypeTable Path: "/MyRoot/Table1" } } Cluster { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpAgg::AggWithSelfLookup2 [GOOD] >> KqpAgg::AggWithHop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2025-03-28T09:07:14.420088Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:07:14.420581Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:07:14.421381Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:07:14.422933Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.423355Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:07:14.434962Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.435102Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.435227Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:07:14.435348Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.435406Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.435571Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:07:14.435716Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-28T09:07:14.437281Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-03-28T09:07:14.437733Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-03-28T09:07:14.438274Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-03-28T09:07:14.438532Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-03-28T09:07:14.439001Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-03-28T09:07:14.439198Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.439463Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.439521Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.439660Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2025-03-28T09:07:14.439873Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2025-03-28T09:07:14.440034Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.440080Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.440286Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-03-28T09:07:14.440460Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.440529Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.440683Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.440766Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-03-28T09:07:14.440957Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.441107Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.441186Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-03-28T09:07:14.441338Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.441422Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-03-28T09:07:14.441468Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-03-28T09:07:14.441627Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.441683Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.441763Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-03-28T09:07:14.441794Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 100000 to# 200000 2025-03-28T09:07:14.441877Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.441979Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-03-28T09:07:14.442002Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 200000 to# 300000 2025-03-28T09:07:14.442149Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.442227Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-03-28T09:07:14.442253Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 300000 to# 400000 2025-03-28T09:07:14.442303Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-03-28T09:07:14.442339Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 400000 to# 500000 2025-03-28T09:07:14.442455Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.442511Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.442548Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-03-28T09:07:14.442570Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 500000 to# 600000 2025-03-28T09:07:14.442712Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.442764Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-03-28T09:07:14.442784Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 600000 to# 700000 2025-03-28T09:07:14.442843Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-03-28T09:07:14.442867Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 700000 to# 800000 2025-03-28T09:07:14.442992Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.443038Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-03-28T09:07:14.443067Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 800000 to# 900000 2025-03-28T09:07:14.443170Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.443238Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-03-28T09:07:14.443261Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-03-28T09:07:14.447681Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 2 Marker# TSYS31 2025-03-28T09:07:14.448799Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 2 Promote Marker# TSYS16 2025-03-28T09:07:14.449801Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:2:12:0:0:71:0] Snap: 2:1 for 72057594046447617 Marker# TRRH04 2025-03-28T09:07:14.449874Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:12:0:0:71:0], refs: [[72057594046447617:2:12:1:24576:76:0],] for 72057594046447617 2025-03-28T09:07:14.450087Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:1:0:0:42:0], refs: [[72057594046447617:2:1:1:28672:35:0],] for 72057594046447617 2025-03-28T09:07:14.450138Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:2:0:0:71:0], refs: [[72057594046447617:2:2:1:8192:71:0],] for 72057594046447617 2025-03-28T09:07:14.450177Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:3:0:0:69:0], refs: [[72057594046447617:2:3:1:24576:70:0],] for 72057594046447617 2025-03-28T09:07:14.450219Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:2:4:0:0:71:0], refs: [[72057594046447617:2:4:1:24576:76:0],] for 720575940 ... OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.820475Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2025-03-28T09:07:14.820511Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:615:2546] TEvAllocateResult from# 9000000 to# 9100000 2025-03-28T09:07:14.820734Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-03-28T09:07:14.820764Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:617:2548] TEvAllocateResult from# 9100000 to# 9200000 2025-03-28T09:07:14.820879Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.820935Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.821015Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-03-28T09:07:14.821041Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:619:2550] TEvAllocateResult from# 9200000 to# 9300000 2025-03-28T09:07:14.821169Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-03-28T09:07:14.821287Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:621:2552] TEvAllocateResult from# 9300000 to# 9400000 2025-03-28T09:07:14.821378Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.821424Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.821576Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-03-28T09:07:14.821606Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:623:2554] TEvAllocateResult from# 9400000 to# 9500000 2025-03-28T09:07:14.821670Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-03-28T09:07:14.821692Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:625:2556] TEvAllocateResult from# 9500000 to# 9600000 2025-03-28T09:07:14.821814Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.821940Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.822012Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-03-28T09:07:14.822038Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:627:2558] TEvAllocateResult from# 9600000 to# 9700000 2025-03-28T09:07:14.822141Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.822227Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-03-28T09:07:14.822257Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:629:2560] TEvAllocateResult from# 9700000 to# 9800000 2025-03-28T09:07:14.822411Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-03-28T09:07:14.822437Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:631:2562] TEvAllocateResult from# 9800000 to# 9900000 2025-03-28T09:07:14.822533Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.822614Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.822758Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-03-28T09:07:14.822807Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:633:2564] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-03-28T09:07:14.827995Z node 1 :TABLET_MAIN NOTICE: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2025-03-28T09:07:14.829429Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2025-03-28T09:07:14.830118Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2025-03-28T09:07:14.830187Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2025-03-28T09:07:14.830380Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-03-28T09:07:14.830442Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-03-28T09:07:14.830487Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-03-28T09:07:14.830527Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-03-28T09:07:14.830561Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-03-28T09:07:14.830648Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2025-03-28T09:07:14.831774Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2025-03-28T09:07:14.831841Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2025-03-28T09:07:14.831891Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2025-03-28T09:07:14.831951Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2025-03-28T09:07:14.832097Z node 1 :TABLET_MAIN DEBUG: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2025-03-28T09:07:14.832131Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1483:0],] for 72057594046447617 2025-03-28T09:07:14.832164Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2025-03-28T09:07:14.832230Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2025-03-28T09:07:14.832255Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2025-03-28T09:07:14.832295Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2025-03-28T09:07:14.832332Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2025-03-28T09:07:14.832363Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2025-03-28T09:07:14.832397Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2025-03-28T09:07:14.832430Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2025-03-28T09:07:14.832457Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2025-03-28T09:07:14.832483Z node 1 :TABLET_MAIN DEBUG: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2025-03-28T09:07:14.832765Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:07:14.834131Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.837826Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:07:14.838060Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:07:14.839208Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-28T09:07:14.839274Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1639:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.839395Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:14.839479Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> TTxLocatorTest::TestImposibleSize >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 >> TTxLocatorTest::TestImposibleSize [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::TableWithSystemColumns [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:07:10.093777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:07:10.093884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:07:10.093922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:07:10.093978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:07:10.094026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:07:10.094058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:07:10.094113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:07:10.094195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:07:10.094528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:10.184692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:07:10.184759Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:10.200327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:10.200652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:07:10.200891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:07:10.207159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:07:10.207484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:07:10.208236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:07:10.208504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:07:10.211187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:07:10.212786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:07:10.212862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:07:10.213043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:07:10.213099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:07:10.213142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:07:10.213262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:07:10.224374Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:07:10.380529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:07:10.380851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:10.381124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:07:10.381409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:07:10.381481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:10.392141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:07:10.392352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:07:10.392608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:10.392680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:07:10.392731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:07:10.392778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:07:10.400029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:10.400142Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:07:10.400201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:07:10.402925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:10.403003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:10.403065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:07:10.403138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:07:10.407929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:07:10.410877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:07:10.411158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:07:10.412586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:07:10.412768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:07:10.412826Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:07:10.413162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:07:10.413228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:07:10.413476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:07:10.413600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:07:10.416197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:07:10.416279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:07:10.416506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:07:10.416552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:07:10.417021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:07:10.417076Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:07:10.417194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:07:10.417234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:07:10.417280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:07:10.417321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:07:10.417366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:07:10.417421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:07:10.417512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:07:10.417561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:07:10.417634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:07:10.417689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:07:10.417727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:07:10.419886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:07:10.420003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:07:10.420040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RACE: StateWork, received event# 269550080, Sender [6:601:2558], Recipient [6:123:2149]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 3479 } } 2025-03-28T09:07:16.166054Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransactionResult 2025-03-28T09:07:16.166148Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 3479 } } 2025-03-28T09:07:16.166192Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-03-28T09:07:16.166330Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 3479 } } 2025-03-28T09:07:16.166451Z node 6 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 106 Step: 5000007 OrderId: 106 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 3479 } } 2025-03-28T09:07:16.166491Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:16.167302Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274137603, Sender [6:207:2209], Recipient [6:123:2149]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 8] Version: 3 } 2025-03-28T09:07:16.167336Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2025-03-28T09:07:16.167385Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:07:16.167463Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 3 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:07:16.167489Z node 6 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:07:16.167514Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 3 2025-03-28T09:07:16.167539Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 4 2025-03-28T09:07:16.167606Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/2, is published: true 2025-03-28T09:07:16.167665Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:16.168581Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:661:2610], Recipient [6:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:16.168621Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:16.168645Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:07:16.168853Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [6:601:2558], Recipient [6:123:2149]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 601 RawX2: 25769806334 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-03-28T09:07:16.168893Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-03-28T09:07:16.168982Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 601 RawX2: 25769806334 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-03-28T09:07:16.169031Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409548, partId: 1 2025-03-28T09:07:16.169196Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:1, at schemeshard: 72057594046678944, message: Source { RawX1: 601 RawX2: 25769806334 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-03-28T09:07:16.169273Z node 6 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:07:16.169404Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:1 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 601 RawX2: 25769806334 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-03-28T09:07:16.169488Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:1, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:07:16.169549Z node 6 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:1, at schemeshard: 72057594046678944 2025-03-28T09:07:16.169612Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:1, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:07:16.169661Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:1 129 -> 240 2025-03-28T09:07:16.169825Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:16.170722Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:16.170884Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:16.173769Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:07:16.173826Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:16.173983Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:07:16.174009Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:16.174158Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-03-28T09:07:16.174190Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:16.174291Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:07:16.174315Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:16.174442Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:1, at schemeshard: 72057594046678944 2025-03-28T09:07:16.174491Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:16.174527Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 106:1 2025-03-28T09:07:16.174633Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:601:2558] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 106 at schemeshard: 72057594046678944 2025-03-28T09:07:16.175007Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [6:123:2149], Recipient [6:123:2149]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:07:16.175051Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:07:16.175101Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:1, at schemeshard: 72057594046678944 2025-03-28T09:07:16.175141Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:1 ProgressState 2025-03-28T09:07:16.175300Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:07:16.175337Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-03-28T09:07:16.175375Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-03-28T09:07:16.175418Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:1 progress is 2/2 2025-03-28T09:07:16.175462Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-03-28T09:07:16.175505Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 2/2, is published: true 2025-03-28T09:07:16.175579Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:305:2296] message: TxId: 106 2025-03-28T09:07:16.175630Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 2/2 2025-03-28T09:07:16.175687Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-28T09:07:16.175722Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-28T09:07:16.175806Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-28T09:07:16.175858Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:1 2025-03-28T09:07:16.175882Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:1 2025-03-28T09:07:16.175953Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-03-28T09:07:16.178014Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:07:16.178111Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [6:305:2296] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 106 at schemeshard: 72057594046678944 2025-03-28T09:07:16.178284Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:07:16.178339Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [6:628:2577] 2025-03-28T09:07:16.178623Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:630:2579], Recipient [6:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:07:16.178661Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:07:16.178707Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 >> DataShardReadIterator::ShouldReadNoColumnsKeysRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec >> TTxLocatorTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2025-03-28T09:07:16.624631Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:07:16.625043Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:07:16.625662Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:07:16.627104Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:16.627569Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:07:16.636648Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:16.636769Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:16.636902Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:07:16.637025Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:16.637076Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:16.637271Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:07:16.637429Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-28T09:07:16.638074Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#281474976710656 2025-03-28T09:07:16.638240Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2025-03-28T09:07:16.644539Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-03-28T09:07:16.645138Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2108] requested range size#123456 2025-03-28T09:07:16.645634Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:16.645728Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:16.645832Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2025-03-28T09:07:16.645869Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2108] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2025-03-28T09:07:16.646251Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2112] requested range size#281474976587200 2025-03-28T09:07:16.646384Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2025-03-28T09:07:16.646434Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:78:2112] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2025-03-28T09:07:16.646782Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#246912 2025-03-28T09:07:16.647132Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:16.647211Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:16.647310Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2025-03-28T09:07:16.647346Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2025-03-28T09:07:16.647726Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#281474976340288 2025-03-28T09:07:16.647825Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2025-03-28T09:07:16.647868Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> DataStreams::ListStreamsValidation [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> TTxLocatorTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2025-03-28T09:07:17.244002Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:07:17.244538Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:07:17.245379Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:07:17.247015Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:17.247475Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:07:17.260187Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:17.260303Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:17.260420Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:07:17.260563Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:17.260609Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:07:17.260799Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:07:17.260942Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> DataShardReadIterator::ShouldReadKeyValueColumnAndSomeKeyColumn [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2025-03-28T09:06:48.605457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784303738977950:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:48.605519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00155c/r3tmp/tmpoiMTQr/pdisk_1.dat 2025-03-28T09:06:49.077263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:49.077374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:49.104682Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:49.106425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27009, node 1 2025-03-28T09:06:49.231340Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:49.231358Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:49.231364Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:49.231463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:49.572271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:49.667325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:13349 2025-03-28T09:06:49.830855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:52.351272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:52.646805Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037890:1][1:7486784320918848627:2369] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-28T09:06:52.830599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:53.065323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:53.084404Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-03-28T09:06:53.084426Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-03-28T09:06:53.085068Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-03-28T09:06:53.085084Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-03-28T09:06:53.093210Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-03-28T09:06:53.098779Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-03-28T09:06:53.098802Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-03-28T09:06:53.098817Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-03-28T09:06:53.098828Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-03-28T09:06:53.098840Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-03-28T09:06:53.098857Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2025-03-28T09:06:53.098892Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-03-28T09:06:53.098907Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-03-28T09:06:53.098918Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-03-28T09:06:53.098929Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-03-28T09:06:53.098944Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-03-28T09:06:53.108002Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,19) wasn't found 2025-03-28T09:06:53.108056Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2025-03-28T09:06:53.108084Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,13) wasn't found 2025-03-28T09:06:53.108109Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2025-03-28T09:06:53.108197Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,21) wasn't found 2025-03-28T09:06:53.108277Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,15) wasn't found 2025-03-28T09:06:53.108394Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,20) wasn't found 2025-03-28T09:06:53.108419Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,17) wasn't found 2025-03-28T09:06:53.108443Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,14) wasn't found 2025-03-28T09:06:53.108489Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,11) wasn't found 2025-03-28T09:06:53.108550Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found 2025-03-28T09:06:55.578140Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486784333540547683:2182];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:55.713415Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00155c/r3tmp/tmpCqIYbR/pdisk_1.dat 2025-03-28T09:06:56.116932Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:56.182921Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:56.183022Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:56.196071Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15554, node 4 2025-03-28T09:06:56.503685Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:56.503715Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:56.503724Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:56.503881Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:56.782109Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:56.859089Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5356 2025-03-28T09:06:57.057244Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:57.078894Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:06:57.545056Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:57.750904Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:57.903250Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:07:03.522009Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486784370999568029:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:03.522397Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00155c/r3tmp/tmpLsZgBi/pdisk_1.dat 2025-03-28T09:07:04.062889Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:04.125127Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:04.125262Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:04.130655Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21944, node 7 2025-03-28T09:07:04.286975Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:04.287004Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:04.287024Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:04.287170Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:04.876203Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:05.005361Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:26333 2025-03-28T09:07:05.320462Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:06.144934Z node 7 :TX_PROXY ERROR: Actor# [7:7486784383884471868:3368] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:12.083074Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486784407454215667:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:12.083141Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00155c/r3tmp/tmpDaiYvC/pdisk_1.dat 2025-03-28T09:07:12.408850Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:12.448830Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:12.448963Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:12.458457Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63181, node 10 2025-03-28T09:07:12.715802Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:12.715831Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:12.715841Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:12.715989Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:13.109454Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:13.194205Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:28422 2025-03-28T09:07:13.463077Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:13.475889Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 >> TDatabaseResolverTests::MySQL >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied |78.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] >> KqpNewEngine::Delete-UseSink [GOOD] >> KqpNewEngine::DecimalColumn >> KqpLimits::OutOfSpaceYQLUpsertFail+useSink [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink >> DataStreams::TestReservedConsumersMetering [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2025-03-28T09:07:18.803655Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. >> KqpNewEngine::BatchUpload [GOOD] >> KqpNewEngine::BrokenLocksAtROTx >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::TestPutRecords >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2025-03-28T09:06:48.055176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784305620620796:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:48.055258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00158c/r3tmp/tmpeO3vF1/pdisk_1.dat 2025-03-28T09:06:48.655731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:48.655903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:48.661504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:48.679701Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27032, node 1 2025-03-28T09:06:48.836119Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:48.836142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:48.836148Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:48.836295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:49.218043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:49.360017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:11327 2025-03-28T09:06:49.545403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:50.001320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "1" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000007" } records { sequence_number: "1" shard_id: "shard-000007" } records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000007" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000009" } records { sequence_number: "1" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "5" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000008" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000009" } records { sequence_number: "2" shard_id: "shard-000006" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "7" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000008" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000006" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000009" } records { sequence_number: "8" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000009" } records { sequence_number: "9" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "10" shard_id: "shard-000001" } records { sequence_number: "10" shard_id: "shard-000009" } records { sequence_number: "10" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000005" } records { sequence_number: "4" shard_id: "shard-000008" } records { sequence_number: "11" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000005" } records { sequence_number: "11" shard_id: "shard-000001" } records { sequence_number: "11" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000006" } records { sequence_number: "12" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000007" } records { sequence_number: "7" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000007" } records { sequence_number: "13" shard_id: "shard-000004" } records { sequence_number: "8" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "12" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000008" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000006" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000009" } records { sequence_number: "13" shard_id: "shard-000001" } records { sequence_number: "14" shard_id: "shard-000009" } records { sequence_number: "14" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000001" } 2025-03-28T09:06:53.056193Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784305620620796:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:53.056265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "15" shard_id: "shard-000001" } records { sequence_number: "15" shard_id: "shard-000009" } records { sequence_number: "15" shard_id: "shard-000004" } records { sequence_number: "9" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000008" } records { sequence_number: "16" shard_id: "shard-000004" } records { sequence_number: "17" shard_id: "shard-000004" } records { sequence_number: "10" shard_id: "shard-000005" } records { sequence_number: "16" shard_id: "shard-000001" } records { sequence_number: "16" shard_id: "shard-000009" } records { sequence_number: "6" shard_id: "shard-000006" } records { sequence_number: "17" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000007" } records { sequence_number: "10" shard_id: "shard-000007" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000007" } records { sequence_number: "18" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000005" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "17" shard_id: "shard-000009" } records { sequence_number: "7" shard_id: "shard-000008" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000006" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000009" } records { sequence_number: "18" shard_id: "shard-000001" } records { sequence_number: "19" shard_id: "shard-000009" } records { sequence_number: "19" shard_id: "shard-000004" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "20" shard_id: "shard-000001" } records { sequence_number: "20" shard_id: "shard-000009" } records { sequence_number: "20" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000005" } records { sequence_number: "8" shard_id: "shard-000008" } records { sequence_number: "21" shard_id: "shard-000004" } r ... older_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152831157-170","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1743152831,"finish":1743152831},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152831}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152831157-171","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1743152831,"finish":1743152831},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152831}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743152831157-172","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743152831,"finish":1743152831},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152831}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743152831251-173","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743152831,"finish":1743152832},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152832}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152831251-174","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743152831,"finish":1743152832},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152832}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152831251-175","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743152831,"finish":1743152832},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152832}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743152831251-176","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743152831,"finish":1743152832},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152832}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743152832304-177","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743152832,"finish":1743152833},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152833}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152832304-178","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743152832,"finish":1743152833},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152833}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152832304-179","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743152832,"finish":1743152833},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152833}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743152832304-180","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743152832,"finish":1743152833},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152833}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743152833351-181","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743152833,"finish":1743152834},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152834}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152833351-182","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743152833,"finish":1743152834},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152834}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152833351-183","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743152833,"finish":1743152834},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152834}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743152833351-184","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743152833,"finish":1743152834},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152834}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743152834460-185","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743152834,"finish":1743152835},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152835}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152834460-186","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743152834,"finish":1743152835},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152835}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152834460-187","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743152834,"finish":1743152835},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152835}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743152834460-188","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743152834,"finish":1743152835},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152835}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1743152835480-189","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1743152835,"finish":1743152836},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152836}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152835480-190","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1743152835,"finish":1743152836},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152836}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1743152835480-191","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1743152835,"finish":1743152836},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743152836}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"used_storage-root-72075186224037888-1743152835480-192","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":1,"unit":"byte*second","start":1743152835,"finish":1743152836},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743152836}' >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop >> BasicStatistics::TwoTables [GOOD] >> YdbTableSplit::SplitByLoadWithUpdates >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2025-03-28T09:07:11.082984Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:07:11.205018Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:07:11.236461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:07:11.236826Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:07:11.259087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:07:11.259322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:07:11.259571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:07:11.259691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:07:11.259805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:07:11.259940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:07:11.260064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:07:11.260187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:07:11.260332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:07:11.260449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:07:11.260584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:07:11.260708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:07:11.308305Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:07:11.308671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:07:11.308729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:07:11.308907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:11.309104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:07:11.309198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:07:11.309248Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:07:11.309362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:07:11.309430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:07:11.309472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:07:11.309515Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:07:11.309678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:11.309740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:07:11.309777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:07:11.309805Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:07:11.309947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:07:11.310002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:07:11.310062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:07:11.310096Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:07:11.310194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:07:11.310235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:07:11.310266Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:07:11.310312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:07:11.310357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:07:11.310398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:07:11.310821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-03-28T09:07:11.310926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-03-28T09:07:11.311033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=53; 2025-03-28T09:07:11.311149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:07:11.311319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:07:11.311379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:07:11.311417Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:07:11.311649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:07:11.311704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:07:11.311737Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:07:11.311911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:07:11.311970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:07:11.312006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:07:11.312237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:07:11.312297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:07:11.312332Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:07:11.312469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:07:11.312520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:07:11.312570Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... olumn_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:19.793569Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:07:19.793732Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-28T09:07:19.793829Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-28T09:07:19.793980Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1064:2935];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-28T09:07:19.794156Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:19.794291Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:19.794459Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:19.794722Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:07:19.794875Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:19.795019Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:19.795068Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1065:2936] finished for tablet 9437184 2025-03-28T09:07:19.795646Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1064:2935];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.013},{"events":["f_ack"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1743152839779021,"name":"_full_task","f":1743152839779021,"d_finished":0,"c":0,"l":1743152839795127,"d":16106},"events":[{"name":"bootstrap","f":1743152839779234,"d_finished":3324,"c":1,"l":1743152839782558,"d":3324},{"a":1743152839794676,"name":"ack","f":1743152839793213,"d_finished":1278,"c":1,"l":1743152839794491,"d":1729},{"a":1743152839794662,"name":"processing","f":1743152839783954,"d_finished":6357,"c":10,"l":1743152839794495,"d":6822},{"name":"ProduceResults","f":1743152839781072,"d_finished":3336,"c":13,"l":1743152839795049,"d":3336},{"a":1743152839795056,"name":"Finish","f":1743152839795056,"d_finished":0,"c":0,"l":1743152839795127,"d":71},{"name":"task_result","f":1743152839783972,"d_finished":4936,"c":9,"l":1743152839793013,"d":4936}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:19.795736Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1064:2935];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:07:19.796214Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1064:2935];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.013},{"events":["f_ack"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1743152839779021,"name":"_full_task","f":1743152839779021,"d_finished":0,"c":0,"l":1743152839795782,"d":16761},"events":[{"name":"bootstrap","f":1743152839779234,"d_finished":3324,"c":1,"l":1743152839782558,"d":3324},{"a":1743152839794676,"name":"ack","f":1743152839793213,"d_finished":1278,"c":1,"l":1743152839794491,"d":2384},{"a":1743152839794662,"name":"processing","f":1743152839783954,"d_finished":6357,"c":10,"l":1743152839794495,"d":7477},{"name":"ProduceResults","f":1743152839781072,"d_finished":3336,"c":13,"l":1743152839795049,"d":3336},{"a":1743152839795056,"name":"Finish","f":1743152839795056,"d_finished":0,"c":0,"l":1743152839795782,"d":726},{"name":"task_result","f":1743152839783972,"d_finished":4936,"c":9,"l":1743152839793013,"d":4936}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:07:19.796306Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:07:19.778371Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-28T09:07:19.796366Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:07:19.796746Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2025-03-28T09:07:20.202234Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:07:20.202297Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-03-28T09:07:20.202384Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:07:20.202406Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-03-28T09:07:20.202467Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-03-28T09:07:20.202497Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 800, generation# 1 2025-03-28T09:07:20.202544Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2025-03-28T09:07:20.202562Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 800, generation# 1 2025-03-28T09:07:20.202673Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 103 2025-03-28T09:07:20.202739Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-28T09:07:20.208989Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-28T09:07:20.209162Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 103 2025-03-28T09:07:20.210582Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-28T09:07:20.210765Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-28T09:07:20.210875Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:9:2056] 2025-03-28T09:07:20.211503Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-03-28T09:07:20.256816Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-03-28T09:07:20.256887Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Successful handshake: owner# 800, generation# 1 2025-03-28T09:07:20.256930Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:11:2058] 2025-03-28T09:07:20.256960Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Commit generation: owner# 800, generation# 1 2025-03-28T09:07:20.257018Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-03-28T09:07:20.257037Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Successful handshake: owner# 900, generation# 1 2025-03-28T09:07:20.257082Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:12:2059] 2025-03-28T09:07:20.257097Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Commit generation: owner# 900, generation# 1 2025-03-28T09:07:20.257151Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:11:2058], cookie# 0, event size# 103 2025-03-28T09:07:20.257169Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-28T09:07:20.257196Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-28T09:07:20.257243Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:12:2059], cookie# 0, event size# 103 2025-03-28T09:07:20.257263Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2025-03-28T09:07:20.257292Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:10:2057] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2025-03-28T09:07:20.257332Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-28T09:07:20.257420Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:10:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:13:2060] 2025-03-28T09:07:20.257471Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:10:2057] Subscribe: subscriber# [1:13:2060], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2025-03-28T09:07:20.257780Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-03-28T09:07:20.257815Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-03-28T09:07:20.257845Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:15:2062] 2025-03-28T09:07:20.257859Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-03-28T09:07:20.257890Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-03-28T09:07:20.257908Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Successful handshake: owner# 800, generation# 1 2025-03-28T09:07:20.257963Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2025-03-28T09:07:20.257984Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Commit generation: owner# 800, generation# 1 2025-03-28T09:07:20.258032Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:15:2062], cookie# 0, event size# 103 2025-03-28T09:07:20.258070Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-28T09:07:20.258110Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-28T09:07:20.258177Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:16:2063], cookie# 0, event size# 103 2025-03-28T09:07:20.258201Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:14:2061] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-28T09:07:20.258248Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2025-03-28T09:07:20.258298Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:14:2061] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:17:2064] 2025-03-28T09:07:20.258326Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:14:2061] Subscribe: subscriber# [1:17:2064], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2025-03-28T09:07:20.258622Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-03-28T09:07:20.258644Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Successful handshake: owner# 800, generation# 1 2025-03-28T09:07:20.258717Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:19:2066] 2025-03-28T09:07:20.258737Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Commit generation: owner# 800, generation# 1 2025-03-28T09:07:20.258776Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:20:2067] 2025-03-28T09:07:20.258795Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Successful handshake: owner# 900, generation# 1 2025-03-28T09:07:20.258837Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:20:2067] 2025-03-28T09:07:20.258858Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Commit generation: owner# 900, generation# 1 2025-03-28T09:07:20.258902Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:18:2065] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:19:2066], cookie# 0, event size# 103 2025-03-28T09:07:20.258922Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:18:2065] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2025-03-28T09:07:20.258949Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:18:2065] Upsert description: path# /Root/Te ... ble_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-03-28T09:07:20.872075Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:394:2441] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:397:2444] 2025-03-28T09:07:20.872088Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:394:2441] Upsert description: path# /Root/Tenant/table_inside 2025-03-28T09:07:20.872138Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:394:2441] Subscribe: subscriber# [2:397:2444], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-03-28T09:07:20.873890Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-03-28T09:07:20.873912Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-03-28T09:07:20.873952Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:399:2446] 2025-03-28T09:07:20.873989Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-03-28T09:07:20.874022Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-03-28T09:07:20.874039Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Successful handshake: owner# 910, generation# 1 2025-03-28T09:07:20.874082Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2025-03-28T09:07:20.874102Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Commit generation: owner# 910, generation# 1 2025-03-28T09:07:20.874149Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:399:2446], cookie# 0, event size# 64 2025-03-28T09:07:20.874166Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-03-28T09:07:20.874179Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-03-28T09:07:20.874223Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:400:2447], cookie# 0, event size# 130 2025-03-28T09:07:20.874240Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2025-03-28T09:07:20.874257Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:398:2445] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2025-03-28T09:07:20.874288Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:398:2445] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:401:2448] 2025-03-28T09:07:20.874302Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Upsert description: path# /Root/Tenant/table_inside 2025-03-28T09:07:20.874323Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:398:2445] Subscribe: subscriber# [2:401:2448], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-03-28T09:07:20.875929Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-03-28T09:07:20.875955Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-03-28T09:07:20.875982Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:403:2450] 2025-03-28T09:07:20.876001Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-03-28T09:07:20.876041Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-03-28T09:07:20.876063Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Successful handshake: owner# 910, generation# 1 2025-03-28T09:07:20.876142Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2025-03-28T09:07:20.876163Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Commit generation: owner# 910, generation# 1 2025-03-28T09:07:20.876196Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:403:2450], cookie# 0, event size# 64 2025-03-28T09:07:20.876220Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-03-28T09:07:20.876234Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2025-03-28T09:07:20.876273Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:404:2451], cookie# 0, event size# 64 2025-03-28T09:07:20.876290Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:402:2449] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2025-03-28T09:07:20.876329Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:402:2449] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:405:2452] 2025-03-28T09:07:20.876344Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Upsert description: path# /Root/Tenant/table_inside 2025-03-28T09:07:20.876369Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:402:2449] Subscribe: subscriber# [2:405:2452], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2025-03-28T09:07:20.976916Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-03-28T09:07:20.976976Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 800, generation# 1 2025-03-28T09:07:20.977059Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:7:2054] 2025-03-28T09:07:20.977092Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 800, generation# 1 2025-03-28T09:07:20.977141Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-03-28T09:07:20.977163Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 900, generation# 1 2025-03-28T09:07:20.977217Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:8:2055] 2025-03-28T09:07:20.977246Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 900, generation# 1 2025-03-28T09:07:20.977327Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 118 2025-03-28T09:07:20.977355Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-03-28T09:07:20.977394Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-03-28T09:07:20.977471Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 117 2025-03-28T09:07:20.977503Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-03-28T09:07:20.977549Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2025-03-28T09:07:20.977575Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2025-03-28T09:07:20.977615Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-03-28T09:07:20.977746Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:9:2056] 2025-03-28T09:07:20.977789Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: 2025-03-28T09:04:49.395605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:49.395867Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:49.395924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001112/r3tmp/tmprl09wF/pdisk_1.dat 2025-03-28T09:04:49.818323Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12226, node 1 2025-03-28T09:04:50.180394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:50.180452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:50.180489Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:50.181120Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:04:50.184636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:50.288027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:50.288267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:50.306883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17847 2025-03-28T09:04:51.006993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:54.615725Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:04:54.660132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:54.660261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:54.728886Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:54.734573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:55.025793Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:55.026457Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:55.027125Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:55.027347Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:55.027643Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:55.027742Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:55.027829Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:55.027930Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:55.028075Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:55.237434Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:55.237550Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:55.251344Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:55.452608Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:55.518754Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:04:55.518882Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:04:55.558600Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:04:55.560810Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:04:55.561059Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:04:55.561123Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:04:55.561181Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:04:55.561253Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:04:55.561328Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:04:55.561388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:04:55.561947Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:04:55.643665Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:04:55.644569Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:04:55.659402Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:04:55.659485Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:04:55.659594Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:04:55.668229Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:55.668378Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:55.686323Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:04:55.686982Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:04:55.737140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:04:55.745682Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:04:55.745865Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:04:55.924006Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:04:56.161424Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:04:56.213986Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:04:57.109913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.110075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.127348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:04:57.425445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2376:3107], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.425637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.426783Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2381:3111]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:04:57.426967Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:04:57.427058Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2383:3113] 2025-03-28T09:04:57.427115Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2383:3113] 2025-03-28T09:04:57.427567Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2384:2875] 2025-03-28T09:04:57.427809Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2383:3113], server id = [2:2384:2875], tablet id = 72075186224037894, status = OK 2025-03-28T09:04:57.427978Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2384:2875], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:04:57.428038Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:04:57.428215Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:04:57.428284Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2381:3111], StatRequests.size() = 1 2025-03-28T09:04:57.450895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2388:3117], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.451029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.451558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2393:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.457468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:04:57.668980Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:04:57.669064Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:04:57.750575Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2383:3113], schemeshard count = 1 2025-03-28T09:04:58.087167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... t[ 1 ] 2025-03-28T09:07:09.793667Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-28T09:07:09.793720Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6686:4772], StatRequests.size() = 1 2025-03-28T09:07:10.617860Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:07:10.618397Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:07:10.618657Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:10.663399Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-03-28T09:07:10.663480Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 219.000000s, at schemeshard: 72075186224037897 2025-03-28T09:07:10.663830Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-03-28T09:07:10.680365Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:07:11.290823Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6719:4788]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:11.291113Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-28T09:07:11.291153Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6719:4788], StatRequests.size() = 1 2025-03-28T09:07:12.107866Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:12.107948Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:12.108009Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-03-28T09:07:12.108060Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-28T09:07:12.108476Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:07:12.162834Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:12.184243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6744:4809], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:12.184403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6755:4814], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:12.184633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:12.219428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:07:12.336169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6758:4817], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:07:12.553845Z node 2 :TX_PROXY ERROR: Actor# [2:6856:4864] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:12.753996Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6885:4879]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:12.754272Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-28T09:07:12.754329Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6885:4879], StatRequests.size() = 1 2025-03-28T09:07:13.149565Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTU3NjBjY2ItYjI4NzQ0NTUtY2Q2NGI3NWQtM2NkMjk3YTM=, TxId: 2025-03-28T09:07:13.149655Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTU3NjBjY2ItYjI4NzQ0NTUtY2Q2NGI3NWQtM2NkMjk3YTM=, TxId: 2025-03-28T09:07:13.150185Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:13.169647Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-28T09:07:13.169724Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:13.738722Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6917:4899]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:13.739061Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-28T09:07:13.739110Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6917:4899], StatRequests.size() = 1 2025-03-28T09:07:15.153187Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6958:4921]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:15.153476Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-28T09:07:15.153521Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6958:4921], StatRequests.size() = 1 2025-03-28T09:07:15.865366Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:07:15.865673Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:15.865714Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:15.865757Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:07:15.865793Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:15.866102Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:07:15.870906Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:15.891472Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjdhZDdjYzktYTk3MGEwNGUtNzk0NTkzNTktMTEzYmViZWU=, TxId: 2025-03-28T09:07:15.891545Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjdhZDdjYzktYTk3MGEwNGUtNzk0NTkzNTktMTEzYmViZWU=, TxId: 2025-03-28T09:07:15.892022Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:15.910915Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:15.911001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:16.532679Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7024:4961]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:16.533005Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-28T09:07:16.533049Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7024:4961], StatRequests.size() = 1 2025-03-28T09:07:17.915953Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7067:4985]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:17.916350Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-03-28T09:07:17.916400Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7067:4985], StatRequests.size() = 1 2025-03-28T09:07:18.601682Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:07:18.601950Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:18.602285Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:18.602328Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:18.602369Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-28T09:07:18.602403Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:07:18.602722Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:07:18.605620Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:18.606120Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:07:18.619973Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDFkMDNmZTUtYzkzNGVmZDktZjlhMzhhOGItYWNkZjY2Nzk=, TxId: 2025-03-28T09:07:18.620040Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDFkMDNmZTUtYzkzNGVmZDktZjlhMzhhOGItYWNkZjY2Nzk=, TxId: 2025-03-28T09:07:18.620798Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:18.639793Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:07:18.639841Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:19.294059Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7128:5020]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:19.294358Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-03-28T09:07:19.294402Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7128:5020], StatRequests.size() = 1 2025-03-28T09:07:19.295205Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 127 ], ReplyToActorId[ [2:7130:5022]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:19.298816Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 127 ] 2025-03-28T09:07:19.298890Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 127, ReplyToActorId = [2:7130:5022], StatRequests.size() = 1 >> Viewer::UseTransactionWhenExecuteDataActionQuery [GOOD] >> ViewerTopicDataTests::TopicDataTest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestCellVec [GOOD] >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow >> KqpAgg::AggWithHop [GOOD] >> KqpAgg::GroupByLimit |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 >> RemoteTopicReader::ReadTopic >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 >> DataShardReadIterator::ShouldRangeReadReverseLeftNonInclusive [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> DataStreams::TestPutRecords [GOOD] |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |78.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |78.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> KqpNewEngine::DecimalColumn [GOOD] >> KqpNewEngine::DecimalColumn35 >> KqpNewEngine::BrokenLocksAtROTx [GOOD] >> KqpNewEngine::BrokenLocksAtROTxSharded ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecords [GOOD] Test command err: 2025-03-28T09:06:49.994177Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784310695246741:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:49.994680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00154a/r3tmp/tmpkPTwZg/pdisk_1.dat 2025-03-28T09:06:50.816584Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:50.825004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:50.825111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:50.837575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9434, node 1 2025-03-28T09:06:51.195467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:51.195487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:51.195497Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:51.195606Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:51.658219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:51.728815Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:51.844069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:16491 2025-03-28T09:06:52.103839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:52.123744Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:06:56.290201Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486784340617879255:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:56.290262Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00154a/r3tmp/tmpSePW8j/pdisk_1.dat 2025-03-28T09:06:56.622384Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:56.663525Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:56.663613Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:56.670112Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24945, node 4 2025-03-28T09:06:56.903480Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:56.903508Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:56.903517Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:56.903659Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4480 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:57.571938Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:57.726579Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:4480 2025-03-28T09:06:58.324054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:58.335747Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-03-28T09:06:58.707846Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480 encryption_type: NONE sequence_number: "0" shard_id: "shard-000000" encryption_type: NONE records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } 2025-03-28T09:06:58.848105Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:58.944617Z :INFO: [/Root/] [/Root/] [1b3b1fef-8826a49d-3f723127-e53c2a89] Starting read session 2025-03-28T09:06:58.944710Z :DEBUG: [/Root/] [/Root/] [1b3b1fef-8826a49d-3f723127-e53c2a89] Starting session to cluster null (localhost:24945) 2025-03-28T09:06:58.946509Z :DEBUG: [/Root/] [/Root/] [1b3b1fef-8826a49d-3f723127-e53c2a89] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:06:58.946549Z :DEBUG: [/Root/] [/Root/] [1b3b1fef-8826a49d-3f723127-e53c2a89] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:06:58.947456Z :DEBUG: [/Root/] [/Root/] [1b3b1fef-8826a49d-3f723127-e53c2a89] [null] Reconnecting session to cluster null in 0.000000s 2025-03-28T09:06:59.042324Z :DEBUG: [/Root/] [/Root/] [1b3b1fef-8826a49d-3f723127-e53c2a89] [null] Successfully connected. Initializing session 2025-03-28T09:06:59.038285Z node 4 :PQ_READ_PROXY DEBUG: new grpc connection 2025-03-28T09:06:59.038313Z node 4 :PQ_READ_PROXY DEBUG: new session created cookie 1 2025-03-28T09:06:59.098430Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2025-03-28T09:06:59.098947Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_5414206448428330186_v1 read init: from# ipv6:[::1]:35766, request# { init_request { topics_read_settings { topic: "/Root/stream_TestPutRecordsWithRead" } read_only_original: true consumer: "user1" read_params { max_read_size: 104857600 } } } 2025-03-28T09:06:59.099161Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_5414206448428330186_v1 auth for : user1 2025-03-28T09:06:59.111799Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_5414206448428330186_v1 Handle describe topics response 2025-03-28T09:06:59.111955Z node 4 :PQ_READ_PROXY DEBUG: session cookie 1 consumer user1 session user1_4_1_5414206448428330186_v1 auth is DEAD 2025-03-28T09:06:59.112069Z node 4 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_4_1_5414206448428330186_v1 auth ok: topics# 1, initDone# 0 2025-03-28T09:06 ... cordsCornerCases in database: Root, partition 3(assignId:2) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-03-28T09:07:09.489402Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_290723401213004974_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 2(assignId:3) EndOffset 0 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-03-28T09:07:09.489647Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_290723401213004974_v1 got StartRead from client: partition# TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4), readOffset# 0, commitOffset# 0 2025-03-28T09:07:09.489699Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_290723401213004974_v1 Start reading TopicId: Topic /Root/stream_TestPutRecordsCornerCases in database: Root, partition 1(assignId:4) EndOffset 9 readOffset 0 committedOffset 0 clientCommitOffset 0 clientReadOffset 0 2025-03-28T09:07:09.671604Z :DEBUG: [/Root/] [/Root/] [3bf8823a-f1a5de18-deee0ac4-35ed3a0c] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:07:09.673481Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2025-03-28T09:07:09.673550Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2025-03-28T09:07:09.673592Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2025-03-28T09:07:09.673624Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-03-28T09:07:09.673630Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-1) 2025-03-28T09:07:09.673650Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2025-03-28T09:07:09.673720Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2025-03-28T09:07:09.673761Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2025-03-28T09:07:09.673807Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2025-03-28T09:07:09.673842Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2025-03-28T09:07:09.673876Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (8-8) 2025-03-28T09:07:09.673925Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2025-03-28T09:07:09.673959Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (1-1) 2025-03-28T09:07:09.674781Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (2-2) 2025-03-28T09:07:09.674837Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {3, 0} (3-3) 2025-03-28T09:07:09.674895Z :DEBUG: [/Root/] [/Root/] [3bf8823a-f1a5de18-deee0ac4-35ed3a0c] [null] The application data is transferred to the client. Number of messages 4, size 1049088 bytes 2025-03-28T09:07:09.675586Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-03-28T09:07:09.675618Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-03-28T09:07:09.675643Z :DEBUG: [/Root/] [/Root/] [3bf8823a-f1a5de18-deee0ac4-35ed3a0c] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2025-03-28T09:07:09.675765Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2025-03-28T09:07:09.676781Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2025-03-28T09:07:09.679068Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2025-03-28T09:07:09.680130Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2025-03-28T09:07:09.684354Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2025-03-28T09:07:09.685353Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2025-03-28T09:07:09.686430Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2025-03-28T09:07:09.687374Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2025-03-28T09:07:09.687438Z :DEBUG: [/Root/] [/Root/] [3bf8823a-f1a5de18-deee0ac4-35ed3a0c] [null] The application data is transferred to the client. Number of messages 8, size 7340035 bytes 2025-03-28T09:07:09.688930Z :INFO: [/Root/] [/Root/] [3bf8823a-f1a5de18-deee0ac4-35ed3a0c] Closing read session. Close timeout: 0.000000s 2025-03-28T09:07:09.689015Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:1:5:8:0 null:stream_TestPutRecordsCornerCases:2:4:0:0 null:stream_TestPutRecordsCornerCases:3:3:3:0 null:stream_TestPutRecordsCornerCases:0:2:1:0 null:stream_TestPutRecordsCornerCases:4:1:1:0 2025-03-28T09:07:09.689058Z :INFO: [/Root/] [/Root/] [3bf8823a-f1a5de18-deee0ac4-35ed3a0c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 262 BytesRead: 9437699 MessagesRead: 17 BytesReadCompressed: 9437699 BytesInflightUncompressed: 1048576 BytesInflightCompressed: 0 BytesInflightTotal: 1048576 MessagesInflight: 1 } 2025-03-28T09:07:09.689167Z :NOTICE: [/Root/] [/Root/] [3bf8823a-f1a5de18-deee0ac4-35ed3a0c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:07:09.689217Z :DEBUG: [/Root/] [/Root/] [3bf8823a-f1a5de18-deee0ac4-35ed3a0c] [null] Abort session to cluster 2025-03-28T09:07:09.690159Z :NOTICE: [/Root/] [/Root/] [3bf8823a-f1a5de18-deee0ac4-35ed3a0c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:07:09.694847Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_290723401213004974_v1 grpc read failed 2025-03-28T09:07:09.694884Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_290723401213004974_v1 grpc closed 2025-03-28T09:07:09.694932Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer user1 session user1_7_1_290723401213004974_v1 is DEAD 2025-03-28T09:07:20.152066Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486784440079948351:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:20.152183Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00154a/r3tmp/tmpr1Yucm/pdisk_1.dat 2025-03-28T09:07:20.501008Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:20.561634Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:20.561726Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:20.568294Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2345, node 10 2025-03-28T09:07:20.727443Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:20.727481Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:20.727509Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:20.727710Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:21.118340Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:21.327493Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:8683 2025-03-28T09:07:21.598767Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:21.920260Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480
: Error: Access for stream /Root/stream_TestPutRecords is denied for subject user2@builtin, code: 500018 2025-03-28T09:07:22.060121Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 PutRecordsResponse = encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } PutRecord response = encryption_type: NONE sequence_number: "7" shard_id: "shard-000004" |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] >> KqpPg::TempTablesDrop [GOOD] >> KqpPg::TempTablesWithCache >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] Test command err: 2025-03-28T09:07:12.759712Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:07:12.898021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:07:12.923260Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:07:12.923649Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:07:12.932147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-03-28T09:07:12.932420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-03-28T09:07:12.932587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:07:12.932773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:07:12.932906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:07:12.933043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:07:12.933152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:07:12.933256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:07:12.933358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:07:12.933480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:07:12.933591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:07:12.933712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:07:12.933822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:07:12.964165Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:07:12.964427Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-03-28T09:07:12.964492Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-03-28T09:07:12.964752Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:286;tasks_for_remove=0; 2025-03-28T09:07:12.964871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-03-28T09:07:12.964952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-03-28T09:07:12.964998Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-03-28T09:07:12.965196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:12.965349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:07:12.965410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:07:12.965442Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-03-28T09:07:12.965525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:07:12.965600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:07:12.965660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:07:12.965736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-03-28T09:07:12.965928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:12.965990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:07:12.966041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:07:12.966069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-03-28T09:07:12.966177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:07:12.966233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:07:12.966269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:07:12.966314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:07:12.966406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:07:12.966458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:07:12.966485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:07:12.966532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:07:12.966574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:07:12.966604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:07:12.967023Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=60; 2025-03-28T09:07:12.967116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-03-28T09:07:12.967210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-03-28T09:07:12.967288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:07:12.967477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:07:12.967528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:07:12.967565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:07:12.967749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:07:12.967795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:07:12.967828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:07:12.967971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:07:12.968012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:07:12.968045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;eve ... UG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:07:26.830168Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:07:26.830223Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-28T09:07:26.830275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-03-28T09:07:26.830348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-03-28T09:07:26.830401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:07:26.830516Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:07:26.830557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-03-28T09:07:26.830597Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:07:26.830850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:07:26.831059Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:07:26.831114Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:07:26.831245Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-03-28T09:07:26.831314Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-03-28T09:07:26.831432Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:495:2498];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-03-28T09:07:26.831546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:07:26.831635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:07:26.831725Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:07:26.832457Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:07:26.832574Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:07:26.832697Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:07:26.832745Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:497:2499] finished for tablet 9437184 2025-03-28T09:07:26.833102Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:495:2498];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.207},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.209}],"full":{"a":1743152846623528,"name":"_full_task","f":1743152846623528,"d_finished":0,"c":0,"l":1743152846832797,"d":209269},"events":[{"name":"bootstrap","f":1743152846623779,"d_finished":2653,"c":1,"l":1743152846626432,"d":2653},{"a":1743152846832427,"name":"ack","f":1743152846830822,"d_finished":922,"c":1,"l":1743152846831744,"d":1292},{"a":1743152846832406,"name":"processing","f":1743152846626537,"d_finished":107236,"c":9,"l":1743152846831748,"d":107627},{"name":"ProduceResults","f":1743152846625312,"d_finished":2561,"c":12,"l":1743152846832727,"d":2561},{"a":1743152846832733,"name":"Finish","f":1743152846832733,"d_finished":0,"c":0,"l":1743152846832797,"d":64},{"name":"task_result","f":1743152846626556,"d_finished":106147,"c":8,"l":1743152846830662,"d":106147}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:07:26.833203Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:495:2498];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:07:26.833521Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:495:2498];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.207},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.209}],"full":{"a":1743152846623528,"name":"_full_task","f":1743152846623528,"d_finished":0,"c":0,"l":1743152846833255,"d":209727},"events":[{"name":"bootstrap","f":1743152846623779,"d_finished":2653,"c":1,"l":1743152846626432,"d":2653},{"a":1743152846832427,"name":"ack","f":1743152846830822,"d_finished":922,"c":1,"l":1743152846831744,"d":1750},{"a":1743152846832406,"name":"processing","f":1743152846626537,"d_finished":107236,"c":9,"l":1743152846831748,"d":108085},{"name":"ProduceResults","f":1743152846625312,"d_finished":2561,"c":12,"l":1743152846832727,"d":2561},{"a":1743152846832733,"name":"Finish","f":1743152846832733,"d_finished":0,"c":0,"l":1743152846833255,"d":522},{"name":"task_result","f":1743152846626556,"d_finished":106147,"c":8,"l":1743152846830662,"d":106147}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:07:26.833578Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:07:26.622936Z;index_granules=0;index_portions=1;index_batches=939;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589280;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589280;selected_rows=0; 2025-03-28T09:07:26.833616Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:07:26.833860Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:497:2499];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> DataShardReadIterator::ShouldReadNoColumnsRangeRequestArrow [GOOD] >> DataShardReadIterator::ShouldReadNonExistingKey >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 >> DataShardReadIterator::ShouldReadKeyPrefix1 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix2 |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ViewerTopicDataTests::TopicDataTest [GOOD] |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::ServerlessWithExclusiveNodesCheckTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 Test command err: 2025-03-28T09:05:56.422637Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784080501953975:2269];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:56.422701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:05:56.959806Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:56.977944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:56.978052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:56.979807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32413, node 1 2025-03-28T09:05:57.151115Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:57.151142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:57.151154Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:57.151289Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4551 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:57.498950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:57.528226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:05:57.537164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:57.542991Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-28T09:06:00.162989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784097681823637:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:00.163130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:00.163486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784097681823650:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:00.167385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:06:00.177552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784097681823652:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:06:00.242502Z node 1 :TX_PROXY ERROR: Actor# [1:7486784097681823703:2360] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:02.629771Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784107254656781:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:02.629837Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:06:02.748504Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:02.775862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:02.775959Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:02.777548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22940, node 2 2025-03-28T09:06:02.852455Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:02.852478Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:02.852485Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:02.852615Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:03.197661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:03.203269Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:06:03.256006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:06:03.261309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:03.269029Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-03-28T09:06:06.208579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784124434526658:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:06.208579Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784124434526649:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:06.208656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:06.212054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-28T09:06:06.225043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784124434526663:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-28T09:06:06.314806Z node 2 :TX_PROXY ERROR: Actor# [2:7486784124434526714:2357] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:07.945602Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486784129953037472:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:07.945660Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:06:08.120023Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:08.142656Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:08.142807Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:08.145161Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16272, node 3 2025-03-28T09:06:08.190408Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:08.190430Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:08.190443Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:08.190595Z node 3 :NET_CLASSIFIER ERROR: got bad dist ... atileState: Unknown -> Disconnected 2025-03-28T09:06:14.030825Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:14.033681Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2927, node 4 2025-03-28T09:06:14.102296Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:14.102320Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:14.102333Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:14.102483Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:14.532263Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:14.542444Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:06:14.602805Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:06:14.608130Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:18.360315Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486784175405032717:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:18.360412Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:18.360808Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486784175405032737:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:18.365575Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-28T09:06:18.377446Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486784175405032739:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-28T09:06:18.468002Z node 4 :TX_PROXY ERROR: Actor# [4:7486784175405032790:2358] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:29.440566Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:464:2426], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:29.440971Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:29.441153Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:06:29.941771Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:30.127407Z node 5 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:06:30.172779Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:06:30.963444Z node 5 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 30851, node 5 TClient is connected to server localhost:62875 2025-03-28T09:06:31.530980Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:31.531069Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:31.531134Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:31.531898Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:06:44.502349Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:536:2425], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:44.502857Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:44.503085Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:06:44.922728Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:45.083503Z node 7 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:06:45.111857Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:06:46.307214Z node 7 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 18953, node 7 TClient is connected to server localhost:27849 2025-03-28T09:06:46.919983Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:46.920066Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:46.920117Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:46.920698Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:07:04.941767Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:534:2424], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:07:04.942089Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:07:04.942296Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:07:05.525274Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:05.728445Z node 10 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:07:05.819528Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:07:07.600741Z node 10 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 24894, node 10 TClient is connected to server localhost:8587 2025-03-28T09:07:08.511559Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:08.511667Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:08.511754Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:08.512260Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:07:23.475024Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:616:2428], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:07:23.475692Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:07:23.475939Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:07:24.018926Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:24.221406Z node 13 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:07:24.269176Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:07:25.299684Z node 13 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 27366, node 13 TClient is connected to server localhost:17837 2025-03-28T09:07:25.981690Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:25.981797Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:25.981882Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:25.983072Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] >> RemoteTopicReader::ReadTopic [GOOD] >> TTxDataShardMiniKQL::ReadConstant >> TTxDataShardMiniKQL::ReadSpecialColumns >> TTxDataShardMiniKQL::WriteKeyTooLarge >> TTxDataShardMiniKQL::CrossShard_1_Cycle ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2025-03-28T09:07:23.954280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784455592246567:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:23.956477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f14/r3tmp/tmpkT1k4P/pdisk_1.dat 2025-03-28T09:07:24.369593Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:24.390554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:24.390671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:24.394715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30630 TServer::EnableGrpc on GrpcPort 61232, node 1 2025-03-28T09:07:24.621482Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:24.621510Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:24.621520Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:24.621892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30630 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:24.998456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:25.013460Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:07:25.218668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:07:27.155975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784472772116507:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:27.156099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784472772116526:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:27.156135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784472772116525:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:27.156238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:27.162287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-03-28T09:07:27.180691Z node 1 :TX_PROXY ERROR: Actor# [1:7486784472772116546:2448] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:07:27.186063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784472772116543:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:07:27.186115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784472772116544:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:07:27.257666Z node 1 :TX_PROXY ERROR: Actor# [1:7486784472772116592:2478] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:27.282079Z node 1 :TX_PROXY ERROR: Actor# [1:7486784472772116610:2486] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:28.106720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:07:28.547667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:07:28.902498Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784455592246567:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:28.902561Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:07:29.117691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:07:29.633878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:07:30.130751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:07:31.043646Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486784489951986483:2818] Handshake: worker# [1:7486784464182181629:2291] 2025-03-28T09:07:31.059004Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486784489951986483:2818] Create read session: session# [1:7486784489951986484:2290] 2025-03-28T09:07:31.075088Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486784489951986483:2818] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-28T09:07:31.114029Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486784489951986483:2818] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 0 SeqNo: 1 CreateTime: 2025-03-28T09:07:30.938000Z MessageGroupId: producer ProducerId: producer }] } } 2025-03-28T09:07:31.116024Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486784489951986483:2818] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-28T09:07:31.251853Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486784489951986483:2818] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-03-28T09:07:31.219000Z MessageGroupId: producer ProducerId: producer }] } } 2025-03-28T09:07:31.321458Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486784489951986590:2857] Handshake: worker# [1:7486784464182181629:2291] 2025-03-28T09:07:31.323485Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486784489951986590:2857] Create read session: session# [1:7486784489951986591:2290] 2025-03-28T09:07:31.324090Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486784489951986590:2857] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-28T09:07:31.343045Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486784489951986590:2857] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2025-03-28T09:07:31.219000Z MessageGroupId: producer ProducerId: producer }] } } >> TTxDataShardMiniKQL::CrossShard_5_AllToAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] Test command err: 2025-03-28T09:04:49.322883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:49.323197Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:49.323271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001117/r3tmp/tmpNhyXad/pdisk_1.dat 2025-03-28T09:04:49.729063Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1851, node 1 2025-03-28T09:04:50.039518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:50.039577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:50.039609Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:50.040065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:04:50.043502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:50.155284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:50.155429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:50.170603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26061 2025-03-28T09:04:50.803612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:54.432222Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:04:54.469527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:54.469634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:54.508426Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:54.511092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:54.764466Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:54.764870Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:54.765364Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:54.765557Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:54.765923Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:54.766063Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:54.766172Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:54.766278Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:54.766368Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:54.961460Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:54.961604Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:54.975524Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:55.146906Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:55.214099Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:04:55.214225Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:04:55.249839Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:04:55.250006Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:04:55.250232Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:04:55.250323Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:04:55.250386Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:04:55.250437Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:04:55.250497Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:04:55.250552Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:04:55.251096Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:04:55.290518Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:55.290630Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:55.315177Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:04:55.330788Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:04:55.331049Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:04:55.342980Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:04:55.374526Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:04:55.374602Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:04:55.374703Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:04:55.396120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:04:55.405794Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:04:55.406001Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:04:55.631695Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:04:55.842747Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:04:55.929087Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:04:56.929033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:56.929180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:56.951267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:04:57.386997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2447:3112], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.387136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.388501Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2452:3116]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:04:57.388674Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:04:57.388781Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2454:3118] 2025-03-28T09:04:57.388849Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2454:3118] 2025-03-28T09:04:57.389368Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2455:2929] 2025-03-28T09:04:57.389619Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2454:3118], server id = [2:2455:2929], tablet id = 72075186224037894, status = OK 2025-03-28T09:04:57.389748Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2455:2929], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:04:57.389818Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:04:57.390010Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:04:57.390065Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2452:3116], StatRequests.size() = 1 2025-03-28T09:04:57.397268Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2488:3127]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:04:57.397476Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:04:57.397518Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2488:3127], StatRequests.size() = 1 2025-03-28T09:04:57.397725Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2490:3129]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:04:57.397871Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:04:57.397898Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2490:3129], StatRequests.size() = 1 2025-03-28T09:04:57.421081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2495:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.421192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:57.421566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2500:3139], DatabaseId ... 8T09:07:21.159094Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:07:21.159144Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:07:22.029498Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6767:4785]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:22.029793Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-28T09:07:22.029837Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6767:4785], StatRequests.size() = 1 2025-03-28T09:07:22.823381Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:07:22.823581Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:22.823881Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:07:22.879182Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 3, at schemeshard: 72075186224037897 2025-03-28T09:07:22.879264Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 207.000000s, at schemeshard: 72075186224037897 2025-03-28T09:07:22.879581Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 73 2025-03-28T09:07:22.894180Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:07:23.518048Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6802:4803]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:23.518420Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-28T09:07:23.518472Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6802:4803], StatRequests.size() = 1 2025-03-28T09:07:24.259519Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:24.259605Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:24.259665Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:07:24.259710Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:24.260116Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:07:24.276747Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:24.282248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6825:4822], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.282379Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6835:4827], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.283103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.297990Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:07:24.396938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6839:4830], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:07:24.604153Z node 2 :TX_PROXY ERROR: Actor# [2:6939:4879] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:24.651296Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6968:4894]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:24.651533Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-28T09:07:24.651602Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6968:4894], StatRequests.size() = 1 2025-03-28T09:07:24.804316Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTEzYWIxOTYtZTg0MzcwYWItN2Y3YzA1ZmEtMjhmNTBkOTg=, TxId: 2025-03-28T09:07:24.804399Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTEzYWIxOTYtZTg0MzcwYWItN2Y3YzA1ZmEtMjhmNTBkOTg=, TxId: 2025-03-28T09:07:24.805055Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:24.824354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:24.824435Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:25.311938Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6998:4912]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:25.312279Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-28T09:07:25.312324Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6998:4912], StatRequests.size() = 1 2025-03-28T09:07:26.665592Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7041:4937]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:26.665878Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-28T09:07:26.665913Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7041:4937], StatRequests.size() = 1 2025-03-28T09:07:27.349600Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:07:27.360606Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:27.360670Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:27.360710Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 6] is data table. 2025-03-28T09:07:27.360744Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 6] 2025-03-28T09:07:27.361033Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:07:27.363724Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:27.389571Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDczYzU2MzUtOTczNmYyNjAtZjVhNzRjMWQtYzI0YjRhN2U=, TxId: 2025-03-28T09:07:27.389638Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDczYzU2MzUtOTczNmYyNjAtZjVhNzRjMWQtYzI0YjRhN2U=, TxId: 2025-03-28T09:07:27.390115Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:27.407498Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 6] 2025-03-28T09:07:27.407554Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:27.999720Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7109:4977]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:27.999993Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-28T09:07:28.000041Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7109:4977], StatRequests.size() = 1 2025-03-28T09:07:29.420123Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7152:5001]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:29.420422Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-03-28T09:07:29.420466Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7152:5001], StatRequests.size() = 1 2025-03-28T09:07:30.145890Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:07:30.146132Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:30.146462Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:07:30.157784Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:30.157850Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:30.157894Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-28T09:07:30.157930Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:07:30.158366Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:07:30.161312Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:30.179644Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTQxMzM5MDQtNTE2ZjVhZWQtMzgxZWQ2OTAtYmEwYzdiMTk=, TxId: 2025-03-28T09:07:30.179731Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTQxMzM5MDQtNTE2ZjVhZWQtMzgxZWQ2OTAtYmEwYzdiMTk=, TxId: 2025-03-28T09:07:30.180267Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:30.198647Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:07:30.198833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:30.814515Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7217:5038]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:30.814869Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-03-28T09:07:30.814921Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7217:5038], StatRequests.size() = 1 >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx |79.0%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> ViewerTopicDataTests::TopicDataTest [GOOD] Test command err: 2025-03-28T09:06:28.349162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:1731:2429], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:28.350870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:28.351570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:06:28.354224Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:1740:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:28.355354Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:1728:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:28.355542Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:1734:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:28.355716Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:1737:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:28.355802Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:28.356248Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:06:28.356705Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:28.356795Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:28.356851Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:28.357440Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:06:28.357486Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:06:28.357563Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:06:29.064791Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:29.230924Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:06:29.269880Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:06:29.812744Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 64888, node 1 TClient is connected to server localhost:13737 2025-03-28T09:06:30.147535Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:30.147613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:30.147659Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:30.148322Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:07:11.864539Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486784404406064020:2093];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:11.864620Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:07:12.169713Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:12.169837Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:12.172229Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:12.207977Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62960, node 6 2025-03-28T09:07:12.391446Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:12.391483Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:12.391497Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:12.391680Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:12.924404Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:12.959900Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:07:12.976004Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:07:12.985838Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:12.993609Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-28T09:07:15.504385Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:07:15.504500Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:07:16.594366Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486784425880901150:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:16.594456Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486784425880901171:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:16.594569Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:16.599304Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:07:16.611405Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486784425880901179:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:07:16.699277Z node 6 :TX_PROXY ERROR: Actor# [6:7486784425880901230:2359] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:16.864011Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486784404406064020:2093];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:16.864108Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:07:17.317226Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:07:17.689305Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:07:17.689352Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:07:19.046615Z node 6 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:07:19.046660Z node 6 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:07:22.341136Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486784452517770322:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:22.341730Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ff2/r3tmp/tmpxdQt7q/pdisk_1.dat 2025-03-28T09:07:22.526339Z node 7 :IMPORT WARN: Table profiles were not loaded 2025 ... 3-28T09:07:26.451574Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session got write response: sequence_numbers: 11 offsets: 50 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 41 } 2025-03-28T09:07:26.451646Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: acknoledged message 11 2025-03-28T09:07:26.455354Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session got write response: sequence_numbers: 12 offsets: 51 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 3 } 2025-03-28T09:07:26.455415Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: acknoledged message 12 2025-03-28T09:07:26.455723Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session got write response: sequence_numbers: 13 offsets: 52 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-03-28T09:07:26.455751Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: acknoledged message 13 2025-03-28T09:07:26.456130Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session got write response: sequence_numbers: 14 offsets: 53 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-03-28T09:07:26.456159Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: acknoledged message 14 2025-03-28T09:07:26.456300Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session got write response: sequence_numbers: 15 sequence_numbers: 16 offsets: 54 offsets: 55 already_written: false already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-03-28T09:07:26.456319Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: acknoledged message 15 2025-03-28T09:07:26.456343Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: acknoledged message 16 2025-03-28T09:07:26.456949Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session got write response: sequence_numbers: 17 offsets: 56 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-03-28T09:07:26.457007Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: acknoledged message 17 2025-03-28T09:07:26.457167Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session got write response: sequence_numbers: 18 offsets: 57 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-03-28T09:07:26.457197Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: acknoledged message 18 2025-03-28T09:07:26.457328Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session got write response: sequence_numbers: 19 offsets: 58 already_written: false write_statistics { persist_duration_ms: 2 queued_in_partition_duration_ms: 1 } 2025-03-28T09:07:26.457352Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: acknoledged message 19 2025-03-28T09:07:26.458777Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session got write response: sequence_numbers: 20 offsets: 59 already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 1 } 2025-03-28T09:07:26.458811Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: acknoledged message 20 2025-03-28T09:07:26.534790Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session will now close 2025-03-28T09:07:26.534884Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: aborting 2025-03-28T09:07:26.535519Z :INFO: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:07:26.536549Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session is aborting and will not restart 2025-03-28T09:07:26.554161Z :DEBUG: [] MessageGroupId [producer3] SessionId [producer3|1625187-1b4e8494-8b9f0f4c-7b2d97fd_0] Write session: destroy 2025-03-28T09:07:26.884626Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486784469697640465:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:26.884701Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486784469697640452:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:26.884826Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:26.889004Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-28T09:07:26.908711Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486784469697640468:2392], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-28T09:07:26.996280Z node 7 :TX_PROXY ERROR: Actor# [7:7486784469697640519:2517] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:27.186926Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7486784469697640535:2396], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:07:27.187264Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=YTJjNDY2M2EtN2M1M2M1MWMtNTg2ZGViNmUtNmU1ZWM3YmE=, ActorId: [7:7486784469697640427:2385], ActorState: ExecuteState, TraceId: 01jqe036130tkm9vvy5v0jc0m9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:07:27.188238Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:07:27.340555Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486784452517770322:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:27.340713Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 Size: 1398104 2025-03-28T09:07:27.443040Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: try to update token 2025-03-28T09:07:27.443763Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session: Do CDS request 2025-03-28T09:07:27.443836Z :INFO: [] MessageGroupId [producer4] SessionId [] Start write session. Will connect to endpoint: localhost:8032 2025-03-28T09:07:27.455464Z :DEBUG: [] MessageGroupId [producer4] SessionId [] Write session: send init request: init_request { topic: "/Root/topic1" message_group_id: "producer4" } 2025-03-28T09:07:27.466454Z :INFO: [] MessageGroupId [producer4] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743152847466 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:07:27.466552Z :INFO: [] MessageGroupId [producer4] SessionId [] Write session established. Init response: session_id: "producer4|755172e0-170949dc-275c7e01-49aee09d_0" topic: "topic1" 2025-03-28T09:07:27.470237Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|755172e0-170949dc-275c7e01-49aee09d_0] Write 1 messages with Id from 1 to 1 2025-03-28T09:07:27.475036Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|755172e0-170949dc-275c7e01-49aee09d_0] Write session: close. Timeout = 18446744073709551 ms 2025-03-28T09:07:27.517291Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|755172e0-170949dc-275c7e01-49aee09d_0] Write session: try to update token 2025-03-28T09:07:27.517363Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|755172e0-170949dc-275c7e01-49aee09d_0] Send 1 message(s) (0 left), first sequence number is 1 2025-03-28T09:07:27.551072Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|755172e0-170949dc-275c7e01-49aee09d_0] Write session got write response: sequence_numbers: 1 offsets: 60 already_written: false write_statistics { persist_duration_ms: 3 queued_in_partition_duration_ms: 25 } 2025-03-28T09:07:27.551127Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|755172e0-170949dc-275c7e01-49aee09d_0] Write session: acknoledged message 1 2025-03-28T09:07:27.575337Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|755172e0-170949dc-275c7e01-49aee09d_0] Write session will now close 2025-03-28T09:07:27.575439Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|755172e0-170949dc-275c7e01-49aee09d_0] Write session: aborting 2025-03-28T09:07:27.576126Z :INFO: [] MessageGroupId [producer4] SessionId [producer4|755172e0-170949dc-275c7e01-49aee09d_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:07:27.577188Z :DEBUG: [] MessageGroupId [producer4] SessionId [producer4|755172e0-170949dc-275c7e01-49aee09d_0] Write session: destroy Size: 4194320 Got response:400: PathErrorUnknown Got response:400: No such partition in topic 2025-03-28T09:07:27.806712Z node 7 :PERSQUEUE ERROR: [PQ: 72075186224037889, Partition: 0, State: StateIdle] reading from too big offset - topic topic1 partition 0 client $without_consumer EndOffset 61 offset 10000 Got response:400: Bad offset >> KqpNewEngine::DecimalColumn35 [GOOD] >> KqpNewEngine::ComplexLookupLimit >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 >> BasicStatistics::Simple [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> KqpNewEngine::BrokenLocksAtROTxSharded [GOOD] >> KqpNewEngine::BrokenLocksOnUpdate >> TTxDataShardMiniKQL::WriteEraseRead >> TTxDataShardMiniKQL::Write >> DataShardReadIterator::ShouldReadNonExistingKey [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeys >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: 2025-03-28T09:04:50.690520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:50.690852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:50.690921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001100/r3tmp/tmp3BIyYr/pdisk_1.dat 2025-03-28T09:04:51.110425Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19372, node 1 2025-03-28T09:04:51.378218Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:51.378279Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:51.378312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:51.378731Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:04:51.384691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:51.468083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:51.468321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:51.485405Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2918 2025-03-28T09:04:52.105598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:55.650112Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:04:55.690650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:55.690791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:55.738226Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:55.747503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:56.056479Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.057009Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.057508Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.057674Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.057922Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.058034Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.058107Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.058176Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.058249Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:56.247097Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:56.247212Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:56.264270Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:56.455966Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:56.526056Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:04:56.526180Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:04:56.588921Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:04:56.589120Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:04:56.589358Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:04:56.589456Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:04:56.589512Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:04:56.589580Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:04:56.589643Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:04:56.589695Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:04:56.590205Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:04:56.634195Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:56.634310Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:56.645997Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:04:56.656565Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:04:56.656806Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:04:56.674020Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:04:56.709931Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:04:56.709987Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:04:56.710056Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:04:56.725326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:04:56.749325Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:04:56.749497Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:04:56.947734Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:04:57.143127Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:04:57.224426Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:04:58.168965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.169094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.193890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:04:58.505000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2375:3106], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.505167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.506517Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2380:3110]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:04:58.506741Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:04:58.506828Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2382:3112] 2025-03-28T09:04:58.506894Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2382:3112] 2025-03-28T09:04:58.507656Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2383:2874] 2025-03-28T09:04:58.507897Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2382:3112], server id = [2:2383:2874], tablet id = 72075186224037894, status = OK 2025-03-28T09:04:58.509299Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2383:2874], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:04:58.509385Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:04:58.509739Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:04:58.509822Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2380:3110], StatRequests.size() = 1 2025-03-28T09:04:58.534905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2387:3116], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.535023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.535427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2392:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:58.541184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:04:58.751531Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:04:58.751620Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:04:58.851454Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2382:3112], schemeshard count = 1 2025-03-28T09:04:59.256118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorA ... eplyToActorId[ [2:6427:4622]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:20.649025Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 116 ] 2025-03-28T09:07:20.649068Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 116, ReplyToActorId = [2:6427:4622], StatRequests.size() = 1 2025-03-28T09:07:21.917508Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:6458:4634]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:21.917835Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-03-28T09:07:21.917881Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:6458:4634], StatRequests.size() = 1 2025-03-28T09:07:22.673005Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:07:23.281656Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:6495:4652]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:23.282057Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-03-28T09:07:23.282111Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:6495:4652], StatRequests.size() = 1 2025-03-28T09:07:24.081645Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:07:24.081747Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:07:24.081797Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:07:24.081848Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:07:24.903783Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:6537:4669]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:24.904033Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-28T09:07:24.904065Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:6537:4669], StatRequests.size() = 1 2025-03-28T09:07:25.702261Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:07:25.702448Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:25.702750Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:07:25.755437Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-28T09:07:25.755523Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 209.000000s, at schemeshard: 72075186224037897 2025-03-28T09:07:25.755908Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-03-28T09:07:25.772108Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:07:26.409113Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:6570:4685]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:26.409357Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-28T09:07:26.409396Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:6570:4685], StatRequests.size() = 1 2025-03-28T09:07:27.147354Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:27.147437Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:27.147502Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:07:27.147558Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:27.147973Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:07:27.179065Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:27.182765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6593:4704], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:27.182879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6603:4709], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:27.183032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:27.196352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:07:27.282029Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6607:4712], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:07:27.438344Z node 2 :TX_PROXY ERROR: Actor# [2:6702:4760] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:27.486941Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:6731:4775]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:27.487220Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-28T09:07:27.487295Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:6731:4775], StatRequests.size() = 1 2025-03-28T09:07:27.626190Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGZlNGUwZDAtNWRkZjE5ZWUtOWQwMzM5MzktZmQ3MmE4YmI=, TxId: 2025-03-28T09:07:27.626291Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGZlNGUwZDAtNWRkZjE5ZWUtOWQwMzM5MzktZmQ3MmE4YmI=, TxId: 2025-03-28T09:07:27.626949Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:27.644388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:27.644456Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:28.174107Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:6766:4795]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:28.174428Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-28T09:07:28.174471Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:6766:4795], StatRequests.size() = 1 2025-03-28T09:07:29.523637Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:6805:4817]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:29.523864Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-28T09:07:29.523896Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:6805:4817], StatRequests.size() = 1 2025-03-28T09:07:30.255447Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:07:30.266715Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:30.266775Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:30.266814Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-28T09:07:30.266849Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:07:30.267128Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:07:30.269330Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:30.290435Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTEyNzM0ZDItYzY1MDg3NmMtN2NmZjE2MmYtOTMxMGIwNTQ=, TxId: 2025-03-28T09:07:30.290489Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTEyNzM0ZDItYzY1MDg3NmMtN2NmZjE2MmYtOTMxMGIwNTQ=, TxId: 2025-03-28T09:07:30.291107Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:30.322286Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:07:30.322347Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:30.941591Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:6875:4858]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:30.941789Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-28T09:07:30.941819Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:6875:4858], StatRequests.size() = 1 2025-03-28T09:07:32.429357Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:6918:4882]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:32.429676Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-03-28T09:07:32.429710Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:6918:4882], StatRequests.size() = 1 2025-03-28T09:07:33.179847Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:07:33.180035Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:33.180422Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:07:33.196147Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:33.196209Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:33.794738Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:6953:4898]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:33.795187Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-03-28T09:07:33.795242Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:6953:4898], StatRequests.size() = 1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2025-03-28T09:07:33.383405Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:07:33.390647Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:07:33.391090Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:07:33.391262Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:33.428708Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:07:33.511063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:07:33.511115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:33.520678Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:33.521886Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:07:33.523468Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:07:33.523547Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:07:33.523611Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:07:33.524034Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:07:33.524107Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:07:33.524192Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:07:33.592895Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:07:33.627175Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:07:33.627365Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:07:33.627480Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:07:33.627518Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:07:33.627552Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:07:33.627588Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:33.627848Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.627902Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.628147Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:07:33.628237Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:07:33.628293Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:33.628344Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:33.628380Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:07:33.628412Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:33.628452Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:33.628491Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:07:33.628528Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:07:33.628635Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.628679Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.628740Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:07:33.631445Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:07:33.631523Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:33.631603Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:33.631743Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:07:33.631791Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:07:33.631847Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:07:33.631903Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:33.631967Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:07:33.632000Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:07:33.632053Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:33.632344Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:07:33.632380Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:07:33.632411Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:33.632453Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:33.632499Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:07:33.632527Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:33.632562Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:07:33.632603Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:33.632626Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:07:33.645927Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:33.646003Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:33.646064Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:33.646107Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:07:33.646184Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:07:33.646666Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.646743Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.646799Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:07:33.646914Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:07:33.646950Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:07:33.647092Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:33.647160Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:33.647198Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:07:33.647230Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:07:33.650302Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:07:33.650349Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:33.650529Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.650566Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.650635Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:33.650786Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:07:33.650823Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:07:33.650860Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:07:33.650906Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:07:33.650979Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:33.651043Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:07:33.651078Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:07:33.651110Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:07:33.651280Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:07:33.651312Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:33.651331Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:07:33.651348Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:07:33.651365Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:07:33.651410Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:33.651428Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:07:33.651450Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:07:33.651500Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:07:33.651578Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:07:33.651608Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:07:33.651630Z node 1 :TX_DATASHARD TR ... 847547Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:35.847572Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:6] at 9437184 on unit FinishPropose 2025-03-28T09:07:35.847600Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:35.852697Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-03-28T09:07:35.852808Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-28T09:07:35.853264Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:300:2282], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:35.853319Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:35.853381Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:299:2281], serverId# [3:300:2282], sessionId# [0:0:0] 2025-03-28T09:07:35.853656Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\351\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4e\005\'?8\003\013?>\003?\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\0 2025-03-28T09:07:35.863682Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:35.863770Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:35.864466Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2025-03-28T09:07:35.864554Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-03-28T09:07:35.864603Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2025-03-28T09:07:35.864634Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:07:35.864667Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:07:35.864736Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-28T09:07:35.864789Z node 3 :TX_DATASHARD TRACE: Activated operation [0:8] at 9437184 2025-03-28T09:07:35.864823Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-03-28T09:07:35.864844Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:07:35.864863Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:07:35.864883Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2025-03-28T09:07:35.865383Z node 3 :TX_DATASHARD TRACE: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2025-03-28T09:07:35.865446Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:07:35.865503Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-03-28T09:07:35.865527Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:07:35.865548Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:35.865570Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit FinishPropose 2025-03-28T09:07:35.865608Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:07:35.865687Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is DelayComplete 2025-03-28T09:07:35.865716Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:35.865751Z node 3 :TX_DATASHARD TRACE: Add [0:8] at 9437184 to execution unit CompletedOperations 2025-03-28T09:07:35.865782Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2025-03-28T09:07:35.865818Z node 3 :TX_DATASHARD TRACE: Execution status for [0:8] at 9437184 is Executed 2025-03-28T09:07:35.865837Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2025-03-28T09:07:35.865859Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:8] at 9437184 has finished 2025-03-28T09:07:35.865917Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:35.865951Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:8] at 9437184 on unit FinishPropose 2025-03-28T09:07:35.865992Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2025-03-28T09:07:33.375455Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:07:33.384749Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:07:33.385680Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:07:33.385945Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:33.441268Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:07:33.524563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:07:33.524613Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:33.535681Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:33.538756Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:07:33.540457Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:07:33.540536Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:07:33.540582Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:07:33.541113Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:07:33.541197Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:07:33.541275Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:07:33.616273Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:07:33.655824Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:07:33.656002Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:07:33.656110Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:07:33.656146Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:07:33.656180Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:07:33.656222Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:33.656470Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.656556Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.656787Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:07:33.656872Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:07:33.656922Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:33.656973Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:33.657008Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:07:33.657041Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:33.657075Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:33.657112Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:07:33.657168Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:07:33.657273Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.657320Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.657370Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:07:33.660044Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:07:33.660118Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:33.660205Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:33.660391Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:07:33.660434Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:07:33.660490Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:07:33.660558Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:33.660611Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:07:33.660644Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:07:33.660674Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:33.660950Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:07:33.660986Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:07:33.661018Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:33.661047Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:33.661084Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:07:33.661112Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:33.661142Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:07:33.661192Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:33.661227Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:07:33.675008Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:33.675114Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:33.675185Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:33.675248Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:07:33.675358Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:07:33.676108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:220:2218], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.676172Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.676218Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:219:2217], serverId# [1:220:2218], sessionId# [0:0:0] 2025-03-28T09:07:33.676299Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:07:33.676330Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:07:33.676504Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:33.676573Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:33.676617Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:07:33.676658Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:07:33.680968Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:07:33.681056Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:33.681350Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.681409Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.681467Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:33.681514Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:07:33.681552Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:07:33.681598Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:07:33.681640Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:07:33.681690Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:33.681746Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:07:33.681790Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:07:33.681830Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:07:33.682034Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:07:33.682082Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:33.682118Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:07:33.682145Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:07:33.682171Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:07:33.682247Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:33.682276Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:07:33.682332Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:07:33.682368Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:07:33.682439Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:07:33.682508Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:07:33.682545Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:07:33.682594Z node 1 :TX_DATA ... tateInit, received event# 268828673, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:07:35.741094Z node 3 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:07:35.755635Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [3:232:2225] 2025-03-28T09:07:35.755856Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:35.761755Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-03-28T09:07:35.792973Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:35.793132Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:07:35.794988Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:07:35.795076Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:07:35.795133Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:07:35.795517Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:07:35.795754Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:07:35.795859Z node 3 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [3:275:2225] in generation 3 2025-03-28T09:07:35.807934Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:07:35.808055Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-03-28T09:07:35.808143Z node 3 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-03-28T09:07:35.808314Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-03-28T09:07:35.808517Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [3:280:2264] 2025-03-28T09:07:35.808553Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:07:35.808596Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-03-28T09:07:35.808630Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:35.808778Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-03-28T09:07:35.808893Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-03-28T09:07:35.809113Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [3:232:2225], Recipient [3:232:2225]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:35.809159Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:35.809369Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:07:35.809436Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:07:35.809600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 232 RawX2: 12884904113 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-03-28T09:07:35.809671Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [3:24:2071], Recipient [3:232:2225]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-03-28T09:07:35.809706Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-28T09:07:35.809740Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-03-28T09:07:35.809775Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:35.809859Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:35.809907Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:35.809942Z node 3 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:07:35.809975Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:35.810012Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:35.810044Z node 3 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:07:35.810082Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:07:35.810181Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [3:24:2071], Recipient [3:232:2225]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-03-28T09:07:35.810213Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-28T09:07:35.810259Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-03-28T09:07:35.810364Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [3:278:2262], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [3:282:2266] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:07:35.810399Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:07:35.810480Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [3:127:2152], Recipient [3:232:2225]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-03-28T09:07:35.810512Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-28T09:07:35.810558Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-03-28T09:07:35.810616Z node 3 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-03-28T09:07:35.822508Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [3:278:2262], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [3:278:2262] ServerId: [3:282:2266] } 2025-03-28T09:07:35.822564Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:07:35.868494Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-03-28T09:07:35.868553Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-28T09:07:35.868760Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:288:2270], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:35.868797Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:35.868840Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:286:2269], serverId# [3:288:2270], sessionId# [0:0:0] 2025-03-28T09:07:35.869050Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2025-03-28T09:07:35.869084Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:35.869192Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:35.869809Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-03-28T09:07:35.869902Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-28T09:07:35.869946Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-03-28T09:07:35.869981Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:07:35.870021Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:07:35.870050Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:07:35.870090Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-03-28T09:07:35.870120Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-28T09:07:35.870137Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:07:35.870151Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:07:35.870164Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-28T09:07:35.870502Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-03-28T09:07:35.870575Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:07:35.870630Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-28T09:07:35.870654Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:07:35.870680Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:35.870727Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-03-28T09:07:35.870768Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:07:35.870853Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayComplete 2025-03-28T09:07:35.870904Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:35.870942Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-03-28T09:07:35.870977Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-03-28T09:07:35.871018Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-28T09:07:35.871041Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-03-28T09:07:35.871081Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-03-28T09:07:35.871164Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:35.871200Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-03-28T09:07:35.871241Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> KqpPg::TempTablesWithCache [GOOD] >> KqpPg::TableDeleteWhere+useSink >> DataShardReadIterator::ShouldReadKeyPrefix2 [GOOD] >> DataShardReadIterator::ShouldReadKeyPrefix3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 5645, MsgBus: 62668 2025-03-28T09:06:21.548854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784188219300739:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:21.550406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001673/r3tmp/tmppTxI2W/pdisk_1.dat 2025-03-28T09:06:21.872692Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:21.944344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:21.944446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:21.946054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5645, node 1 2025-03-28T09:06:22.098612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:22.098636Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:22.098641Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:22.098754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62668 TClient is connected to server localhost:62668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:22.718419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:22.749848Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:24.686912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784201104203261:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:24.687035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:24.689168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784201104203294:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:24.694256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:06:24.708756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784201104203296:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:06:24.795270Z node 1 :TX_PROXY ERROR: Actor# [1:7486784201104203347:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:25.284994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:06:25.444097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:06:25.444097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:06:25.444287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:06:25.444551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:06:25.444693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:06:25.444826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:06:25.444856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:06:25.444981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:06:25.445007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:06:25.445111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:06:25.445116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:06:25.445234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:06:25.445264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:06:25.445339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:06:25.445402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:06:25.445453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:06:25.445548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:06:25.445576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:06:25.445693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:06:25.445710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:06:25.445849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:06:25.445880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784205399170838:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:06:25.445964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:06:25.446083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784205399170827:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:06:25.485597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784205399170840:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:06:25.485831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784205399170840:2346];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstr ... 0];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.274482Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7486784324932856717:3391];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.276019Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486784324932856819:3428];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.277287Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7486784320637889061:3293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.277760Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7486784320637889069:3299];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.277879Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7486784324932856825:3431];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.277935Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7486784320637889341:3350];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.278026Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7486784320637889190:3330];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.278153Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7486784324932856717:3391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.278568Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7486784324932856736:3402];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.278616Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7486784320637889178:3328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.279105Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7486784324932856755:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.279271Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7486784324932856755:3408];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.279309Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7486784320637889178:3328];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.279470Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7486784320637888999:3284];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.279643Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7486784324932856686:3369];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.279794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7486784320637889125:3308];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038064;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.279906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7486784320637889334:3345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.280051Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7486784320637888997:3283];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.280217Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7486784320637888991:3281];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.280217Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7486784324932856696:3376];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.280376Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7486784320637889061:3293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.281092Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7486784324932856736:3402];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.281808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[2:7486784320637888999:3284];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.281822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7486784324932856686:3369];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.281959Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7486784320637889125:3308];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.282014Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7486784320637889334:3345];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.282088Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7486784320637888997:3283];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.282138Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7486784324932856696:3376];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.282210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7486784320637888991:3281];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.283055Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7486784324932856846:3437];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.283289Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7486784324932856701:3378];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.283475Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7486784324932856849:3438];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.283646Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7486784324932856849:3438];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.283743Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7486784324932856846:3437];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.283871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7486784324932856701:3378];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.284043Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7486784324932856829:3432];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.284195Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7486784324932856829:3432];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.303247Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7486784324932856831:3433];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.303548Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7486784324932856831:3433];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.303707Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7486784324932856801:3416];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.303833Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7486784324932856801:3416];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.303997Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7486784324932856724:3395];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:07:03.304217Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7486784324932856724:3395];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2025-03-28T09:07:34.148949Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:07:34.155954Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:07:34.156492Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:07:34.156751Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:34.207652Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:07:34.287559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:07:34.287607Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:34.296832Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:34.298641Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:07:34.300336Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:07:34.300408Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:07:34.300451Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:07:34.300923Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:07:34.301031Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:07:34.301108Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:07:34.367231Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:07:34.392936Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:07:34.393130Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:07:34.393250Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:07:34.393311Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:07:34.393348Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:07:34.393385Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:34.393630Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:34.393707Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:34.393942Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:07:34.394033Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:07:34.394096Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:34.394130Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:34.394155Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:07:34.394179Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:34.394210Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:34.394236Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:07:34.394266Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:07:34.394348Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:34.394394Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:34.394438Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:07:34.396599Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:07:34.396666Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:34.396731Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:34.396861Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:07:34.396906Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:07:34.396955Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:07:34.397005Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:34.397060Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:07:34.397094Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:07:34.397121Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:34.397355Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:07:34.397382Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:07:34.397409Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:34.397456Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:34.397492Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:07:34.397516Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:34.397544Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:07:34.397580Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:34.397606Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:07:34.410093Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:34.410167Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:34.410222Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:34.410262Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:07:34.410326Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:07:34.410876Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:34.410927Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:34.410965Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:07:34.411039Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-03-28T09:07:34.411066Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:07:34.411223Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:34.411271Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:07:34.411304Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:07:34.411339Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:07:34.414903Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:07:34.414972Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:34.415203Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:34.415239Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:34.415297Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:34.415335Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:07:34.415365Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:07:34.415402Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-03-28T09:07:34.415433Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-03-28T09:07:34.415493Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:07:34.415533Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:07:34.415574Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:07:34.415618Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:07:34.415737Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Restart 2025-03-28T09:07:34.415766Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:07:34.415793Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:07:34.415825Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:34.415852Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:34.416135Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:34.416171Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:07:34.416317Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-03-28T09:07:34.416353Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:07:34.416384Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:07:34.416434Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:07:34.416466Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:07:34.416525Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:34.416558Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEc ... at 9437185 2025-03-28T09:07:37.555459Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 4 -> retry Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:07:37.555511Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} touch new 0b, 0b lo load (0b in total), 86213808b requested for data (96990534b in total) 2025-03-28T09:07:37.555552Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release tx data 2025-03-28T09:07:37.555582Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} released on update Res{3 10776726b}, Memory{0 dyn 0} 2025-03-28T09:07:37.555622Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} update Res{3 96990534b} type transaction 2025-03-28T09:07:37.555705Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-03-28T09:07:37.555777Z node 3 :RESOURCE_BROKER DEBUG: Update task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:364:2309]) (priority=5 type=transaction resources={0, 96990534} resubmit=1) 2025-03-28T09:07:37.555818Z node 3 :RESOURCE_BROKER DEBUG: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:364:2309]) to queue queue_transaction 2025-03-28T09:07:37.555871Z node 3 :RESOURCE_BROKER DEBUG: Allocate resources {0, 96990534} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:364:2309]) from queue queue_transaction 2025-03-28T09:07:37.555903Z node 3 :RESOURCE_BROKER DEBUG: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:364:2309]) to queue queue_transaction 2025-03-28T09:07:37.555945Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 33.873553 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:364:2309])) 2025-03-28T09:07:37.556057Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 96990534b}, Memory{0 dyn 96990534} 2025-03-28T09:07:37.556116Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:37.556147Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2025-03-28T09:07:37.556975Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437184 restored its data 2025-03-28T09:07:37.912641Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2025-03-28T09:07:37.912732Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:07:37.912799Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:37.912830Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:07:37.912859Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompleteOperation 2025-03-28T09:07:37.912898Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2025-03-28T09:07:37.913108Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is DelayComplete 2025-03-28T09:07:37.913130Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2025-03-28T09:07:37.913149Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437184 to execution unit CompletedOperations 2025-03-28T09:07:37.913167Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2025-03-28T09:07:37.913207Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437184 is Executed 2025-03-28T09:07:37.913228Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2025-03-28T09:07:37.913247Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437184 has finished 2025-03-28T09:07:37.913265Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:37.913282Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:07:37.913300Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:37.913318Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:37.913401Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-03-28T09:07:37.913440Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-03-28T09:07:37.913574Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-03-28T09:07:37.913595Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit ExecuteDataTx 2025-03-28T09:07:37.914259Z node 3 :TX_DATASHARD DEBUG: tx 5 at 9437185 restored its data 2025-03-28T09:07:38.206140Z node 3 :TX_DATASHARD TRACE: Executed operation [6:5] at tablet 9437185 with status COMPLETE 2025-03-28T09:07:38.206268Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [6:5] at 9437185: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:07:38.206364Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is ExecutedNoMoreRestarts 2025-03-28T09:07:38.206422Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit ExecuteDataTx 2025-03-28T09:07:38.206476Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompleteOperation 2025-03-28T09:07:38.206536Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompleteOperation 2025-03-28T09:07:38.206836Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is DelayComplete 2025-03-28T09:07:38.206876Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompleteOperation 2025-03-28T09:07:38.206915Z node 3 :TX_DATASHARD TRACE: Add [6:5] at 9437185 to execution unit CompletedOperations 2025-03-28T09:07:38.206951Z node 3 :TX_DATASHARD TRACE: Trying to execute [6:5] at 9437185 on unit CompletedOperations 2025-03-28T09:07:38.206983Z node 3 :TX_DATASHARD TRACE: Execution status for [6:5] at 9437185 is Executed 2025-03-28T09:07:38.207014Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [6:5] at 9437185 executing on unit CompletedOperations 2025-03-28T09:07:38.207074Z node 3 :TX_DATASHARD TRACE: Execution plan for [6:5] at 9437185 has finished 2025-03-28T09:07:38.207116Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:38.207153Z node 3 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-28T09:07:38.207194Z node 3 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-28T09:07:38.207234Z node 3 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-28T09:07:38.207387Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2025-03-28T09:07:38.207463Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 96990534b}, Memory{0 dyn 0} 2025-03-28T09:07:38.207679Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:254:2225]) (release resources {0, 96990534}) 2025-03-28T09:07:38.207750Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 33.873553 to 16.936776 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:254:2225])) 2025-03-28T09:07:38.207845Z node 3 :RESOURCE_BROKER DEBUG: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:364:2309]) (release resources {0, 96990534}) 2025-03-28T09:07:38.207868Z node 3 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_transaction from 16.936776 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:364:2309])) 2025-03-28T09:07:38.222304Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2025-03-28T09:07:38.222441Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-28T09:07:38.222501Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2025-03-28T09:07:38.222593Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-28T09:07:38.222797Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-03-28T09:07:38.222889Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-28T09:07:38.223276Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2025-03-28T09:07:38.223323Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:07:38.223355Z node 3 :TX_DATASHARD TRACE: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2025-03-28T09:07:38.223403Z node 3 :TX_DATASHARD DEBUG: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-28T09:07:38.223576Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-28T09:07:38.223608Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:38.223955Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:342:2309], Recipient [3:450:2392]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2025-03-28T09:07:38.224033Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:07:38.224104Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2025-03-28T09:07:38.224245Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [3:232:2225], Recipient [3:450:2392]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-28T09:07:38.224276Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:07:38.224312Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpPg::TableDeleteAllData-useSink [GOOD] >> KqpPg::PgUpdateCompoundKey+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2025-03-28T09:07:33.441446Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:07:33.448314Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:07:33.448898Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:07:33.449145Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:33.497184Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:07:33.588282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:07:33.588349Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:33.601806Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:33.602086Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:07:33.604067Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:07:33.604162Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:07:33.604228Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:07:33.604613Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:07:33.606450Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:07:33.606552Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:07:33.679977Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:07:33.724305Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:07:33.724523Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:07:33.724655Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:07:33.724695Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:07:33.724732Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:07:33.724769Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:33.724993Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.725039Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.725237Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:07:33.725332Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:07:33.725455Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:33.725507Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:33.725555Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:07:33.725601Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:33.725643Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:33.725673Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:07:33.725706Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:07:33.725789Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.725833Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.725904Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:07:33.728504Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:07:33.728585Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:33.728666Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:33.728833Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:07:33.728890Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:07:33.728957Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:07:33.729012Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:33.729064Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:07:33.729099Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:07:33.729133Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:33.729456Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:07:33.729494Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:07:33.729531Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:33.729568Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:33.729610Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:07:33.729647Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:33.729689Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:07:33.729719Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:33.729742Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:07:33.742103Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:33.742187Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:33.742217Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:33.742261Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:07:33.742336Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:07:33.743009Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.743063Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.743119Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:07:33.743233Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:07:33.743270Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:07:33.743411Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:33.743491Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:33.743540Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:07:33.743585Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:07:33.762898Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:07:33.762998Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:33.763300Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.763354Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.763423Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:33.763467Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:07:33.763510Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:07:33.763554Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:07:33.763593Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:07:33.763667Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:33.763723Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:07:33.763760Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:07:33.763795Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:07:33.763986Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:07:33.764044Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:33.764070Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:07:33.764093Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:07:33.764119Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:07:33.764190Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:33.764221Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:07:33.764261Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:07:33.764309Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:07:33.764386Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:07:33.764444Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:07:33.764494Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:07:37.886263Z node 3 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-03-28T09:07:37.886307Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-28T09:07:37.886334Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:07:37.886360Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:07:37.886387Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-28T09:07:37.886440Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:07:37.886496Z node 3 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 33554432 more memory 2025-03-28T09:07:37.886538Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-03-28T09:07:37.886703Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:37.886753Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-28T09:07:37.886805Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:07:37.917806Z node 3 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-03-28T09:07:37.917925Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 7340039, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:07:37.918032Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:37.918083Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:07:37.918127Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:37.918175Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-03-28T09:07:37.918278Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:07:37.918318Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:37.918358Z node 3 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-03-28T09:07:37.918405Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-03-28T09:07:37.918454Z node 3 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-28T09:07:37.918483Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-03-28T09:07:37.918523Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-03-28T09:07:37.952559Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:37.952638Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-03-28T09:07:37.952691Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2025-03-28T09:07:37.952793Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:39.031995Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-03-28T09:07:39.032170Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-28T09:07:39.032916Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:297:2278], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:39.032975Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:39.033095Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:296:2277], serverId# [3:297:2278], sessionId# [0:0:0] 2025-03-28T09:07:39.234674Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2025-03-28T09:07:39.237455Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:39.237686Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:39.297468Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2025-03-28T09:07:39.297639Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-03-28T09:07:39.297715Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2025-03-28T09:07:39.297786Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:07:39.297873Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:07:39.297933Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-28T09:07:39.298015Z node 3 :TX_DATASHARD TRACE: Activated operation [0:3] at 9437184 2025-03-28T09:07:39.298088Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-03-28T09:07:39.298113Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:07:39.298137Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:07:39.298159Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-03-28T09:07:39.298213Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-28T09:07:39.298411Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 requested 46269638 more memory 2025-03-28T09:07:39.298463Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-03-28T09:07:39.298650Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:39.298763Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-03-28T09:07:39.298885Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-28T09:07:39.345678Z node 3 :TX_DATASHARD TRACE: Operation [0:3] at 9437184 exceeded memory limit 50463942 and requests 403711536 more for the next try 2025-03-28T09:07:39.351630Z node 3 :TX_DATASHARD DEBUG: tx 3 released its data 2025-03-28T09:07:39.351735Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Restart 2025-03-28T09:07:39.352165Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:39.352211Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2025-03-28T09:07:39.417444Z node 3 :TX_DATASHARD DEBUG: tx 3 at 9437184 restored its data 2025-03-28T09:07:39.417550Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-28T09:07:39.527499Z node 3 :TX_DATASHARD TRACE: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2025-03-28T09:07:39.527620Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:07:39.527708Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:39.527759Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:07:39.527858Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:39.527923Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit FinishPropose 2025-03-28T09:07:39.527990Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is DelayComplete 2025-03-28T09:07:39.528019Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:39.528062Z node 3 :TX_DATASHARD TRACE: Add [0:3] at 9437184 to execution unit CompletedOperations 2025-03-28T09:07:39.528105Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2025-03-28T09:07:39.528154Z node 3 :TX_DATASHARD TRACE: Execution status for [0:3] at 9437184 is Executed 2025-03-28T09:07:39.528203Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2025-03-28T09:07:39.528260Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:3] at 9437184 has finished 2025-03-28T09:07:39.617676Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:39.617766Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:3] at 9437184 on unit FinishPropose 2025-03-28T09:07:39.617830Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 2 ms, status: COMPLETE 2025-03-28T09:07:39.617933Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:39.732312Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:07:39.732398Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-03-28T09:07:39.740420Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:230:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerGcApplied |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 >> DataShardReadIterator::ShouldReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 |79.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |79.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |79.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> DataShardReadIterator::ShouldReadKeyPrefix3 [GOOD] >> DataShardReadIterator::ShouldReadFromFollower >> KqpNewEngine::ComplexLookupLimit [GOOD] |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> KqpAgg::GroupByLimit [GOOD] >> KqpExtractPredicateLookup::OverflowLookup >> KqpNewEngine::BrokenLocksOnUpdate [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2025-03-28T09:05:50.687213Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784054097939629:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:50.687426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpIoLLgn/pdisk_1.dat 2025-03-28T09:05:51.481591Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:51.517364Z node 1 :HTTP ERROR: (#26,[::1]:65244) connection closed with error: Connection refused 2025-03-28T09:05:51.520045Z node 1 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:05:51.520742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:51.520856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:51.528198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:55.223144Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784077598711895:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:55.223206Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpK4TGpK/pdisk_1.dat 2025-03-28T09:05:55.544758Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:55.561924Z node 2 :HTTP ERROR: (#28,[::1]:5229) connection closed with error: Connection refused 2025-03-28T09:05:55.562982Z node 2 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:05:55.564008Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:55.564070Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:55.566316Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:59.109418Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486784092824586173:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:59.183580Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmp62jrKd/pdisk_1.dat 2025-03-28T09:05:59.278054Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:59.299192Z node 3 :HTTP ERROR: (#26,[::1]:4592) connection closed with error: Connection refused 2025-03-28T09:05:59.299800Z node 3 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:05:59.307326Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:59.307403Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:59.311656Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:02.696487Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486784108745066285:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:02.696546Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmp05DT6K/pdisk_1.dat 2025-03-28T09:06:02.799959Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:02.832687Z node 4 :HTTP ERROR: (#28,[::1]:63354) connection closed with error: Connection refused 2025-03-28T09:06:02.835133Z node 4 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:06:02.848331Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:02.848415Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:02.850093Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:06.205502Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486784123548477977:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:06.205603Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpQPxHdb/pdisk_1.dat 2025-03-28T09:06:06.304661Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:06.327234Z node 5 :HTTP ERROR: (#30,[::1]:13102) connection closed with error: Connection refused 2025-03-28T09:06:06.327689Z node 5 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:06:06.351447Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:06.351537Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:06.353112Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:09.598183Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486784137762710347:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:09.598266Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpPaNLuJ/pdisk_1.dat 2025-03-28T09:06:09.724010Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:09.747339Z node 6 :HTTP ERROR: (#32,[::1]:27509) connection closed with error: Connection refused 2025-03-28T09:06:09.748159Z node 6 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:06:09.750336Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:09.750447Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:09.755216Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:13.482176Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486784152860579533:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:13.482501Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpp5r6V3/pdisk_1.dat 2025-03-28T09:06:13.597328Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:13.620611Z node 7 :HTTP ERROR: (#34,[::1]:20908) connection closed with error: Connection refused 2025-03-28T09:06:13.621194Z node 7 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:06:13.622020Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:13.622077Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:13.623589Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:17.536950Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486784173249438955:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:17.537087Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmp6otmzo/pdisk_1.dat 2025-03-28T09:06:17.723600Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:17.743432Z node 8 :HTTP ERROR: (#36,[::1]:9670) connection closed with error: Connection refused 2025-03-28T09:06:17.745670Z node 8 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:06:17.747110Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:17.747200Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:17.750344Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:21.593524Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486784188546973062:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:21.595802Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpwRzj48/pdisk_1.dat 2025-03-28T09:06:21.696748Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:21.727665Z node 9 :HTTP ERROR: (#38,[::1]:6301) connection closed with error: Connection refused 2025-03-28T09:06:21.729145Z node 9 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:06:21.729578Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:21.729655Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:21.732702Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:25.810007Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486784207427804648:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:25.810062Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpoBLnTH/pdisk_1.dat 2025-03-28T09:06:25.968803Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:25.986976Z node 10 :HTTP ERROR: (#26,[::1]:2263) connection closed with error: Connection refused 2025-03-28T09:06:25.988125Z node 10 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:06:25.990483Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: ... cheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpDj3RRa/pdisk_1.dat 2025-03-28T09:06:45.250280Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:45.266049Z node 14 :HTTP ERROR: (#34,[::1]:18621) connection closed with error: Connection refused 2025-03-28T09:06:45.266761Z node 14 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:06:45.270728Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:45.270839Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:45.275436Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:50.283787Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7486784311220641845:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:50.283841Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmp1RjQhh/pdisk_1.dat 2025-03-28T09:06:50.569268Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:50.597513Z node 15 :HTTP ERROR: (#36,[::1]:26130) connection closed with error: Connection refused 2025-03-28T09:06:50.598056Z node 15 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:06:50.623603Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:50.623708Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:50.625928Z node 15 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:57.404630Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7486784341760213965:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:57.404681Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmp6BVL6i/pdisk_1.dat 2025-03-28T09:06:58.034293Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:58.063925Z node 16 :HTTP ERROR: (#38,[::1]:1435) connection closed with error: Connection refused 2025-03-28T09:06:58.064451Z node 16 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:06:58.066738Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:58.066834Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:58.075523Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:04.580043Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7486784372966110282:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:04.580095Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpEfvc7x/pdisk_1.dat 2025-03-28T09:07:04.846246Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:04.865899Z node 17 :HTTP ERROR: (#26,[::1]:17534) connection closed with error: Connection refused 2025-03-28T09:07:04.867368Z node 17 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:07:04.882306Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:04.882401Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:04.884542Z node 17 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:10.640434Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7486784400993508594:2150];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpY0xY8B/pdisk_1.dat 2025-03-28T09:07:10.712436Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:07:10.771150Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:10.803594Z node 18 :HTTP ERROR: (#28,[::1]:5162) connection closed with error: Connection refused 2025-03-28T09:07:10.803966Z node 18 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:07:10.805985Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:10.806089Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:10.808082Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:16.309822Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7486784424348757394:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:16.309920Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmp6qMKH7/pdisk_1.dat 2025-03-28T09:07:16.521571Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:16.541447Z node 19 :HTTP ERROR: (#30,[::1]:13268) connection closed with error: Connection refused 2025-03-28T09:07:16.547074Z node 19 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:07:16.563481Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:16.563588Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:16.565389Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:21.234618Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7486784447786745809:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:21.234713Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpWQFTKr/pdisk_1.dat 2025-03-28T09:07:21.425433Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:21.463600Z node 20 :HTTP ERROR: (#32,[::1]:27029) connection closed with error: Connection refused 2025-03-28T09:07:21.467363Z node 20 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:07:21.469093Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:21.469194Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:21.471330Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:26.332634Z node 21 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7486784467200738447:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:26.335607Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpy8toyP/pdisk_1.dat 2025-03-28T09:07:26.528815Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:26.559366Z node 21 :HTTP ERROR: (#34,[::1]:28683) connection closed with error: Connection refused 2025-03-28T09:07:26.560334Z node 21 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:07:26.564901Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:26.565013Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:26.567038Z node 21 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:32.054887Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7486784494906589394:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:32.058850Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmpHU87ss/pdisk_1.dat 2025-03-28T09:07:32.304767Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:32.338143Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:32.338267Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:32.339383Z node 22 :HTTP ERROR: (#36,[::1]:9698) connection closed with error: Connection refused 2025-03-28T09:07:32.340300Z node 22 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:07:32.341437Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:38.181487Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7486784519273147872:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:38.181677Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d2e/r3tmp/tmp38lW6z/pdisk_1.dat 2025-03-28T09:07:38.363850Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:38.400597Z node 23 :HTTP ERROR: (#38,[::1]:20118) connection closed with error: Connection refused 2025-03-28T09:07:38.400969Z node 23 :CMS_CONFIGS ERROR: NetClassifierUpdater failed to get subnets: Connection refused 2025-03-28T09:07:38.401679Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:38.401781Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:38.403834Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |79.2%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::ComplexLookupLimit [GOOD] Test command err: Trying to start YDB, gRPC: 29730, MsgBus: 3829 2025-03-28T09:06:47.058854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784300318781195:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:47.058938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000af9/r3tmp/tmpMWpcHF/pdisk_1.dat 2025-03-28T09:06:47.515676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:47.519982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:47.520058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:47.523160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29730, node 1 2025-03-28T09:06:47.622510Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:47.622535Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:47.622541Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:47.622656Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3829 TClient is connected to server localhost:3829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:48.301883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:50.395999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784313203683734:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:50.396154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:50.655374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:06:50.866885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784313203683836:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:50.867021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:50.874882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784313203683841:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:50.880139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:06:50.905619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784313203683843:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:06:50.979751Z node 1 :TX_PROXY ERROR: Actor# [1:7486784313203683895:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:52.062149Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784300318781195:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:52.062231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:52.387034Z node 1 :RPC_REQUEST WARN: Client lost Trying to start YDB, gRPC: 1847, MsgBus: 30949 2025-03-28T09:06:53.115444Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784325061512227:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:53.115576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000af9/r3tmp/tmpXkcFJZ/pdisk_1.dat 2025-03-28T09:06:53.244193Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:53.273276Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:53.273356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:53.275996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1847, node 2 2025-03-28T09:06:53.341395Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:53.341425Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:53.341435Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:53.341542Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30949 TClient is connected to server localhost:30949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:54.146146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:54.158032Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:06:54.181689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:54.323282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:54.573263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:54.651750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:57.124845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784342241383185:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:57.124929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:57.179045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:57.210002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:57.273160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:57.315272Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:57.397071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:57.535880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:06 ... t not found or you don't have access permissions } 2025-03-28T09:07:31.061582Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:07:31.130811Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:07:31.174219Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:07:31.213272Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:07:31.288565Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:07:31.344507Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:07:31.440402Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486784488379801240:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:31.440496Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486784488379801245:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:31.440499Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:31.445082Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:07:31.465559Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486784488379801247:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:07:31.563720Z node 6 :TX_PROXY ERROR: Actor# [6:7486784488379801304:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:31.669552Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486784466904962532:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:31.669639Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:07:33.059629Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9548, MsgBus: 29322 2025-03-28T09:07:34.713676Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486784501545055661:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:34.713756Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000af9/r3tmp/tmpUI8id7/pdisk_1.dat 2025-03-28T09:07:34.891861Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:34.928595Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:34.928751Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:34.931060Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9548, node 7 2025-03-28T09:07:34.988213Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:34.988246Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:34.988259Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:34.988431Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29322 TClient is connected to server localhost:29322 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:35.824134Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:35.854640Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:35.958837Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:36.249927Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:36.363713Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:39.714635Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486784501545055661:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:39.714744Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:07:39.820654Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486784523019893903:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:39.820771Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:39.886134Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:07:39.995641Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:07:40.046676Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:07:40.090804Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:07:40.132517Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:07:40.182862Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:07:40.265013Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486784527314861712:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:40.265129Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:40.265226Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486784527314861717:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:40.269913Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:07:40.283302Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486784527314861719:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:07:40.376640Z node 7 :TX_PROXY ERROR: Actor# [7:7486784527314861774:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:41.934008Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::BrokenLocksOnUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 29580, MsgBus: 26297 2025-03-28T09:06:46.860047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784295275335877:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:46.860548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b01/r3tmp/tmpWO4btv/pdisk_1.dat 2025-03-28T09:06:47.515913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:47.516013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:47.518491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:47.585122Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29580, node 1 2025-03-28T09:06:47.686375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:47.686397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:47.686404Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:47.686523Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26297 TClient is connected to server localhost:26297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:48.446000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:48.471455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:48.544291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:48.780958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:49.070587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:49.207845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:51.201878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784316750173985:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:51.202335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:51.637206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:51.714877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:51.770915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:51.814194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:51.847093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:51.851824Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784295275335877:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:51.851959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:06:51.897489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:51.966050Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784316750174501:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:51.966148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:51.966213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784316750174506:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:51.975230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:51.995280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784316750174508:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:52.067073Z node 1 :TX_PROXY ERROR: Actor# [1:7486784321045141861:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12594, MsgBus: 10815 2025-03-28T09:06:54.730335Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784332262445700:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:54.733222Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b01/r3tmp/tmpfsWrsc/pdisk_1.dat 2025-03-28T09:06:54.873941Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:54.894767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:54.894883Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:54.896559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12594, node 2 2025-03-28T09:06:55.056942Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:55.056963Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:55.056971Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:55.057072Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10815 TClient is connected to server localhost:10815 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:55.606803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:55.623145Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:06:55.637586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:55.734953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:55.921158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... at schemeshard: 72057594046644480 2025-03-28T09:07:31.848753Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:07:31.897546Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:07:31.943986Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:07:31.962928Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486784468247698926:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:31.962996Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:07:31.992496Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:07:32.051976Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:07:32.126853Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486784494017504988:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:32.126983Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:32.127368Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486784494017504993:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:32.133966Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:07:32.157949Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486784494017504995:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:07:32.247444Z node 6 :TX_PROXY ERROR: Actor# [6:7486784494017505052:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19158, MsgBus: 30564 2025-03-28T09:07:35.506513Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486784506960954453:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:35.507115Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b01/r3tmp/tmpYZw14r/pdisk_1.dat 2025-03-28T09:07:35.666208Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:35.698978Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:35.699106Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:35.700571Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19158, node 7 2025-03-28T09:07:35.788774Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:35.788796Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:35.788805Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:35.788931Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30564 TClient is connected to server localhost:30564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:36.483156Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:36.501709Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:36.523201Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:07:36.694117Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:36.962374Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:37.086596Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:40.507853Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486784506960954453:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:40.507944Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:07:40.913250Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486784528435792689:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:40.913347Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:40.982657Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:07:41.028483Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:07:41.072822Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:07:41.122593Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:07:41.208509Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:07:41.259962Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:07:41.329917Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486784532730760498:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:41.330038Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:41.330502Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486784532730760503:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:41.337569Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:07:41.356977Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486784532730760505:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:07:41.442329Z node 7 :TX_PROXY ERROR: Actor# [7:7486784532730760561:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:43.555014Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MzhhN2RjM2QtYTM3NGMyNTUtY2E2NDA4NzctZmMwZDdmYjE=, ActorId: [7:7486784541320695441:2494], ActorState: ExecuteState, TraceId: 01jqe03p8z79j6vap2e6c6dr2j, Create QueryResponse for error on request, msg: 2025-03-28T09:07:43.556314Z node 7 :TX_DATASHARD ERROR: Complete [1743152863585 : 281474976715673] from 72075186224037888 at tablet 72075186224037888, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] >> ExternalIndex::Simple |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TExtSubDomainTest::DeclareAndDrop |79.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |79.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |79.2%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2025-03-28T09:07:21.129443Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784446199688443:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:21.129509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00159c/r3tmp/tmpBkzSV6/pdisk_1.dat 2025-03-28T09:07:21.590705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:21.600000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:21.600127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:21.606376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7554, node 1 2025-03-28T09:07:21.762956Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:21.762983Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:21.762990Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:21.763111Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:22.054969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:19950 2025-03-28T09:07:24.195897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784459084591352:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.195981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.509274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:07:24.717222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784459084591519:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.717315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.736351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152844671 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152844671 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:07:24.889254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784459084591627:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.889385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.892135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784459084591634:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.899862Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784459084591652:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.899925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784459084591650:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.899978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784459084591661:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.900044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.913737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:07:24.914451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:07:24.918835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-28T09:07:24.919063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:07:24.919097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2025-03-28T09:07:24.919210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:07:24.919306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:3, path# /Root/.metadata/workload_manager/pools/default 2025-03-28T09:07:24.919610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710660:3 1 -> 128 2025-03-28T09:07:24.921407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:07:24.921435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:07:24.923771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710661:0, path# /Root/.metadata/workload_manager/pools/default 2025-03-28T09:07:24.923935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:1, propose status:StatusMultipleModifications, reason: Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate), at schemeshard: 72057594046644480 2025-03-28T09:07:24.928292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784459084591691:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.928451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.931896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784459084591700:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permi ... ase not set, use /Root 2025-03-28T09:07:44.792914Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721716. Ctx: { TraceId: 01jqe03qgm3trzpcbpr84v1mf8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGNhMDU4NGMtYjk2YzU2ZjYtZDAwOWVmMDUtZmVlYmJkOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.792997Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721717. Ctx: { TraceId: 01jqe03qgnbfxqj8vnmfagtje1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE0NDNjNzEtZmQwNDU4OTItMTVkNDNmMjMtNTBkNTQyYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.793447Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721719. Ctx: { TraceId: 01jqe03qgnbdhht5myrh73t7cv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWFhZGJkMzctZmRmNDU1NDAtNTU2MTM5YTItMmRiNGFhOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.793901Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721718. Ctx: { TraceId: 01jqe03qgn67eshq8fm4yww7zj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA2ZGRhMmQtMjgxNzI4MGQtNmE0MDJlZjUtYjYyOTIxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.799882Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721715. Ctx: { TraceId: 01jqe03qgmbm2d5rdcm6x0f2hr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQwODNlZGItM2M3NzNhNTQtYzc2OWFiODEtZTM4NWVmMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.802038Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721721. Ctx: { TraceId: 01jqe03qgx8dwwjp8vk0m37bhv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmE5NTAzMWItN2E4YWU3MjAtZWJlM2RhOGMtZmFmNWRiOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.818218Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721724. Ctx: { TraceId: 01jqe03qhafa2en12eew7rfh9c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhYmY2OTQtNmZkOGY5YzAtMmExNTIzNDctMmRlNjE4ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.819183Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721726. Ctx: { TraceId: 01jqe03qhcapgmd7925dcc9fjm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA2ZGRhMmQtMjgxNzI4MGQtNmE0MDJlZjUtYjYyOTIxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.821905Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721723. Ctx: { TraceId: 01jqe03qhac1p88q60z4t7q8dg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg3NDVkMC1mNDI2MzI4Ni1kMTg1NzFkLTVjNDhhNGFj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.822989Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721725. Ctx: { TraceId: 01jqe03qha0j18ax3csvy8w40k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWFhZGJkMzctZmRmNDU1NDAtNTU2MTM5YTItMmRiNGFhOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.825764Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721727. Ctx: { TraceId: 01jqe03qhgd6880kkpwxbmbgbw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQwODNlZGItM2M3NzNhNTQtYzc2OWFiODEtZTM4NWVmMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.826196Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721728. Ctx: { TraceId: 01jqe03qhgawy4tvwqa0p1kf48, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE0NDNjNzEtZmQwNDU4OTItMTVkNDNmMjMtNTBkNTQyYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.826634Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721722. Ctx: { TraceId: 01jqe03qhabes1k7zbrktrdm11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmJjYTgtMjM0MzkzY2EtNDMwMjVlODctMjZhZWQzNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.832485Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721720. Ctx: { TraceId: 01jqe03qgnftsvt2x6rd45jn6x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUzYzhjNjctYmNhNzQwMTgtMWNkMjMxNTgtYjY2MzkyNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.835646Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721729. Ctx: { TraceId: 01jqe03qhka215bd42xxhmmcbr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGNhMDU4NGMtYjk2YzU2ZjYtZDAwOWVmMDUtZmVlYmJkOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.844271Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721730. Ctx: { TraceId: 01jqe03qja9cwc6mmtnceqyqh3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhYmY2OTQtNmZkOGY5YzAtMmExNTIzNDctMmRlNjE4ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-03-28T09:07:44.849778Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721733. Ctx: { TraceId: 01jqe03qje6fyw0y3j05m0q5w8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE0NDNjNzEtZmQwNDU4OTItMTVkNDNmMjMtNTBkNTQyYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.850160Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721731. Ctx: { TraceId: 01jqe03qje5fhw9dcj9dddhggw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmE5NTAzMWItN2E4YWU3MjAtZWJlM2RhOGMtZmFmNWRiOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.850296Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721732. Ctx: { TraceId: 01jqe03qje73brcs05ea4mry3z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWFhZGJkMzctZmRmNDU1NDAtNTU2MTM5YTItMmRiNGFhOTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.852862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721734. Ctx: { TraceId: 01jqe03qje3thc609301fnpe0m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA2ZGRhMmQtMjgxNzI4MGQtNmE0MDJlZjUtYjYyOTIxMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: 2025-03-28T09:07:44.855016Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721736. Ctx: { TraceId: 01jqe03qjg1jq33t5zm8psv56f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg3NDVkMC1mNDI2MzI4Ni1kMTg1NzFkLTVjNDhhNGFj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.855470Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721735. Ctx: { TraceId: 01jqe03qjff0d0qx2yx32dtkt5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmJjYTgtMjM0MzkzY2EtNDMwMjVlODctMjZhZWQzNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152844671 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:07:44.856695Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721737. Ctx: { TraceId: 01jqe03qjg5yw3c7frdwkxbrs9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmQwODNlZGItM2M3NzNhNTQtYzc2OWFiODEtZTM4NWVmMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.859387Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721738. Ctx: { TraceId: 01jqe03qjs08apwcwf61h85zcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGUzYzhjNjctYmNhNzQwMTgtMWNkMjMxNTgtYjY2MzkyNzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.859474Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721739. Ctx: { TraceId: 01jqe03qjs5bqq9wszp7jqnyy2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGNhMDU4NGMtYjk2YzU2ZjYtZDAwOWVmMDUtZmVlYmJkOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:44.863870Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976721740. Ctx: { TraceId: 01jqe03qjv30t4wfr7q0k81v0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBhYmY2OTQtNmZkOGY5YzAtMmExNTIzNDctMmRlNjE4ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152844671 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:07:45.894330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 235933 rowCount 3474 cpuUsage 0 2025-03-28T09:07:45.909156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 163913 rowCount 2530 cpuUsage 0 Table has 2 shards 2025-03-28T09:07:45.994403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-28T09:07:45.994571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 3474, DataSize 235933 2025-03-28T09:07:45.994741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 2530, DataSize 163913 2025-03-28T09:07:45.995105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false >> KqpPg::PgUpdateCompoundKey+useSink [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> TExtSubDomainTest::DeclareAndLs >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2025-03-28T09:05:34.779802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783984523718694:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:34.779902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:05:34.998305Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001557/r3tmp/tmpxJlSxN/pdisk_1.dat 2025-03-28T09:05:35.254136Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:35.258805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:35.258903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:35.262938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29763, node 1 2025-03-28T09:05:35.427593Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001557/r3tmp/yandexa9Xs4P.tmp 2025-03-28T09:05:35.427620Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001557/r3tmp/yandexa9Xs4P.tmp 2025-03-28T09:05:35.427834Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001557/r3tmp/yandexa9Xs4P.tmp 2025-03-28T09:05:35.427971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:05:35.502153Z INFO: TTestServer started on Port 16707 GrpcPort 29763 TClient is connected to server localhost:16707 PQClient connected to localhost:29763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:05:35.943810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:05:35.989396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:05:36.147052Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:05:36.159110Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-28T09:05:39.575435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784005998555988:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:39.575561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:39.576120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784005998556000:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:39.581111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:05:39.599705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784005998556002:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:05:39.779637Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783984523718694:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:39.779720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:05:39.927056Z node 1 :TX_PROXY ERROR: Actor# [1:7486784005998556066:2456] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:05:39.986641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:05:40.066314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:05:40.096766Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486784005998556076:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:05:40.098918Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmM3MmM0ODgtN2MzMjAxZjMtZTUwOTRmMDgtNDZmMTA3NmM=, ActorId: [1:7486784005998555962:2338], ActorState: ExecuteState, TraceId: 01jqdzzx604b18m7snw5n684d3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:05:40.101238Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:05:40.193147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486784010293523664:2646] === CheckClustersList. Ok 2025-03-28T09:05:46.014889Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:05:46.057949Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486784036063327697:2789], Recipient [1:7486783988818686423:2200]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:05:46.058000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:05:46.058025Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:05:46.063112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7486783993113653945:2318], Recipient [1:7486783988818686423:2200]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 3 Generation: 1 Round: 0 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 1 StartTime: 1743152736003 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-28T09:05:46.063160Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:05:46.063214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-28T09:05:46.063327Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:05:46.063362Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099992s, queue# 1 2025-03-28T09:05:46.103120Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486784036063327720:2796], Recipient [1:7486783988818686423:2200]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:05:46.103157Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:05:46.103166Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:05:46.103208Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486784036063327716:2793], Recipient [1:7486783988818686423:2200]: {TEvModifySchemeTransaction txid# 281474976710672 TabletId# 72057594046644480} 2025-03-28T09:05:46.103224Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:05:46.201909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Exec ... 3-28T09:06:33.557205Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.557240Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 938, partNo: 0, Offset: 35142 is stored on disk 2025-03-28T09:06:33.557262Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.557325Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 941, partNo: 0, Offset: 35143 is stored on disk 2025-03-28T09:06:33.557383Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.557441Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 943, partNo: 0, Offset: 35144 is stored on disk 2025-03-28T09:06:33.557463Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.557481Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 945, partNo: 0, Offset: 35145 is stored on disk 2025-03-28T09:06:33.557514Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.557564Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 946, partNo: 0, Offset: 35146 is stored on disk 2025-03-28T09:06:33.557600Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.557648Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 948, partNo: 0, Offset: 35147 is stored on disk 2025-03-28T09:06:33.557686Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.557708Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 950, partNo: 0, Offset: 35148 is stored on disk 2025-03-28T09:06:33.557743Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.557798Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 951, partNo: 0, Offset: 35149 is stored on disk 2025-03-28T09:06:33.557820Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.557860Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 957, partNo: 0, Offset: 35150 is stored on disk 2025-03-28T09:06:33.558003Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558045Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 958, partNo: 0, Offset: 35151 is stored on disk 2025-03-28T09:06:33.558068Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558134Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 959, partNo: 0, Offset: 35152 is stored on disk 2025-03-28T09:06:33.558156Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558221Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 961, partNo: 0, Offset: 35153 is stored on disk 2025-03-28T09:06:33.558246Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558297Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 963, partNo: 0, Offset: 35154 is stored on disk 2025-03-28T09:06:33.558322Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558367Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 964, partNo: 0, Offset: 35155 is stored on disk 2025-03-28T09:06:33.558409Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558430Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 966, partNo: 0, Offset: 35156 is stored on disk 2025-03-28T09:06:33.558453Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558489Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 967, partNo: 0, Offset: 35157 is stored on disk 2025-03-28T09:06:33.558512Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558553Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 971, partNo: 0, Offset: 35158 is stored on disk 2025-03-28T09:06:33.558581Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558629Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 973, partNo: 0, Offset: 35159 is stored on disk 2025-03-28T09:06:33.558670Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558706Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 974, partNo: 0, Offset: 35160 is stored on disk 2025-03-28T09:06:33.558735Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558768Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 975, partNo: 0, Offset: 35161 is stored on disk 2025-03-28T09:06:33.558799Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558837Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 976, partNo: 0, Offset: 35162 is stored on disk 2025-03-28T09:06:33.558860Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558898Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 977, partNo: 0, Offset: 35163 is stored on disk 2025-03-28T09:06:33.558920Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.558962Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 978, partNo: 0, Offset: 35164 is stored on disk 2025-03-28T09:06:33.558994Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.559019Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 980, partNo: 0, Offset: 35165 is stored on disk 2025-03-28T09:06:33.559055Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.559081Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 981, partNo: 0, Offset: 35166 is stored on disk 2025-03-28T09:06:33.559117Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.559153Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 985, partNo: 0, Offset: 35167 is stored on disk 2025-03-28T09:06:33.559174Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.559194Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 988, partNo: 0, Offset: 35168 is stored on disk 2025-03-28T09:06:33.559211Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.559232Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 989, partNo: 0, Offset: 35169 is stored on disk 2025-03-28T09:06:33.559257Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:06:33.559278Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037956, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037895', Topic: 'origin/feed/streamImpl', Partition: 0, SeqNo: 991, partNo: 0, Offset: 35170 is stored on disk >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false >> DataShardReadIterator::ShouldReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReadNotExistingRange >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |79.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> GroupWriteTest::WriteHardRateDispatcher [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> DataShardReadIterator::ShouldReadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadHeadFromFollower >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> TExtSubDomainTest::DeclareAndDrop [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |79.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop [GOOD] Test command err: 2025-03-28T09:07:48.464059Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784562428595477:2155];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:48.470499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d27/r3tmp/tmp8uUPeI/pdisk_1.dat 2025-03-28T09:07:48.954378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:48.954490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:48.960900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:48.963640Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:12772 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:07:49.202467Z node 1 :TX_PROXY DEBUG: actor# [1:7486784562428595635:2103] Handle TEvNavigate describe path dc-1 2025-03-28T09:07:49.202530Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784566723563211:2258] HANDLE EvNavigateScheme dc-1 2025-03-28T09:07:49.202658Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784562428595658:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:49.202731Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486784562428595658:2116], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:07:49.202921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:07:49.204918Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595331:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784566723563216:2259] 2025-03-28T09:07:49.204974Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784562428595331:2049] Subscribe: subscriber# [1:7486784566723563216:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.205052Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595334:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784566723563217:2259] 2025-03-28T09:07:49.205068Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784562428595334:2052] Subscribe: subscriber# [1:7486784566723563217:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.205085Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595337:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784566723563218:2259] 2025-03-28T09:07:49.205099Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784562428595337:2055] Subscribe: subscriber# [1:7486784566723563218:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.205160Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563216:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784562428595331:2049] 2025-03-28T09:07:49.205188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563217:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784562428595334:2052] 2025-03-28T09:07:49.205210Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563218:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784562428595337:2055] 2025-03-28T09:07:49.205265Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784566723563213:2259] 2025-03-28T09:07:49.205297Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784566723563214:2259] 2025-03-28T09:07:49.205372Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486784566723563212:2259][/dc-1] Set up state: owner# [1:7486784562428595658:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:49.205485Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784566723563215:2259] 2025-03-28T09:07:49.205535Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486784566723563212:2259][/dc-1] Path was already updated: owner# [1:7486784562428595658:2116], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:49.205573Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563216:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563213:2259], cookie# 1 2025-03-28T09:07:49.205588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563217:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563214:2259], cookie# 1 2025-03-28T09:07:49.205599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563218:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563215:2259], cookie# 1 2025-03-28T09:07:49.205638Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595331:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784566723563216:2259] 2025-03-28T09:07:49.205668Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595331:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563216:2259], cookie# 1 2025-03-28T09:07:49.205686Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595334:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784566723563217:2259] 2025-03-28T09:07:49.205699Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595334:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563217:2259], cookie# 1 2025-03-28T09:07:49.205745Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595337:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784566723563218:2259] 2025-03-28T09:07:49.205758Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595337:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563218:2259], cookie# 1 2025-03-28T09:07:49.206782Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563216:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784562428595331:2049], cookie# 1 2025-03-28T09:07:49.206817Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563217:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784562428595334:2052], cookie# 1 2025-03-28T09:07:49.206847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563218:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784562428595337:2055], cookie# 1 2025-03-28T09:07:49.206884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784566723563213:2259], cookie# 1 2025-03-28T09:07:49.206913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:49.206976Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784566723563214:2259], cookie# 1 2025-03-28T09:07:49.207007Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:49.207031Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784566723563215:2259], cookie# 1 2025-03-28T09:07:49.207044Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Unexpected sync response: sender# [1:7486784566723563215:2259], cookie# 1 2025-03-28T09:07:49.277242Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784562428595658:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:07:49.277631Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784562428595658:2116], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-03-28T09:07:49.486883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710659, ready parts: 1/1, is published: true 2025-03-28T09:07:49.486904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710659 ready parts: 1/1 2025-03-28T09:07:49.486926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-03-28T09:07:49.486938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710659:0 2025-03-28T09:07:49.486981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:07:49.487239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710659 2025-03-28T09:07:49.487322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:07:49.487343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:07:49.487381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:07:49.487532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:07:49.487652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:07:49.487667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:07:49.487702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:07:49.487959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:07:49.488033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 TClient::Ls request: /dc-1 2025-03-28T09:07:49.501110Z node 1 :TX_PROXY DEBUG: actor# [1:7486784562428595635:2103] Handle TEvNavigate describe path /dc-1 2025-03-28T09:07:49.501150Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784566723563304:2321] HANDLE EvNavigateScheme /dc-1 2025-03-28T09:07:49.501244Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784562428595658:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:49.501312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784562428595658:2116], cookie# 4 2025-03-28T09:07:49.501377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563216:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563213:2259], cookie# 4 2025-03-28T09:07:49.501391Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563217:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563214:2259], cookie# 4 2025-03-28T09:07:49.501421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563218:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563215:2259], cookie# 4 2025-03-28T09:07:49.501466Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595334:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563217:2259], cookie# 4 2025-03-28T09:07:49.501491Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595337:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563218:2259], cookie# 4 2025-03-28T09:07:49.501527Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563217:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7486784562428595334:2052], cookie# 4 2025-03-28T09:07:49.501543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563218:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7486784562428595337:2055], cookie# 4 2025-03-28T09:07:49.501588Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7486784566723563214:2259], cookie# 4 2025-03-28T09:07:49.501604Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:49.501616Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7486784566723563215:2259], cookie# 4 2025-03-28T09:07:49.501631Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:49.501681Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784562428595658:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:07:49.501747Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784562428595658:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784566723563212:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152869458 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:07:49.501814Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784562428595658:2116], cacheItem# { Subscriber: { Subscriber: [1:7486784566723563212:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152869458 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-03-28T09:07:49.501948Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784566723563305:2322], recipient# [1:7486784566723563304:2321], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:07:49.501993Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784562428595331:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566723563216:2259], cookie# 4 2025-03-28T09:07:49.502020Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784566723563304:2321] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:07:49.502095Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784566723563304:2321] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:07:49.502142Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566723563216:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7486784562428595331:2049], cookie# 4 2025-03-28T09:07:49.502161Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 7 Partial: 0 }: sender# [1:7486784566723563213:2259], cookie# 4 2025-03-28T09:07:49.502172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566723563212:2259][/dc-1] Unexpected sync response: sender# [1:7486784566723563213:2259], cookie# 4 2025-03-28T09:07:49.502736Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784566723563304:2321] Handle TEvDescribeSchemeResult Forward to# [1:7486784566723563303:2320] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152869458 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152869458 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) >> TOlap::StoreStatsQuota [GOOD] >> TExtSubDomainTest::DeclareAndLs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-28T09:07:48.683586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784563078224348:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:48.689309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d1b/r3tmp/tmpA4WJDu/pdisk_1.dat 2025-03-28T09:07:49.411035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:49.411141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:49.426974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:49.488650Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:25116 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:07:49.769503Z node 1 :TX_PROXY DEBUG: actor# [1:7486784563078224582:2099] Handle TEvNavigate describe path dc-1 2025-03-28T09:07:49.769544Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784567373192410:2447] HANDLE EvNavigateScheme dc-1 2025-03-28T09:07:49.769648Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784563078224628:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:49.769726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784563078224628:2123], cookie# 1 2025-03-28T09:07:49.771119Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192395:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192392:2440], cookie# 1 2025-03-28T09:07:49.771153Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192396:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192393:2440], cookie# 1 2025-03-28T09:07:49.771167Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192397:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192394:2440], cookie# 1 2025-03-28T09:07:49.771200Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784563078224298:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192395:2440], cookie# 1 2025-03-28T09:07:49.771224Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784563078224301:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192396:2440], cookie# 1 2025-03-28T09:07:49.771240Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784563078224304:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192397:2440], cookie# 1 2025-03-28T09:07:49.771271Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192395:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784563078224298:2049], cookie# 1 2025-03-28T09:07:49.771289Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192396:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784563078224301:2052], cookie# 1 2025-03-28T09:07:49.771301Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192397:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784563078224304:2055], cookie# 1 2025-03-28T09:07:49.771351Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784567373192392:2440], cookie# 1 2025-03-28T09:07:49.771377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:49.771395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784567373192393:2440], cookie# 1 2025-03-28T09:07:49.771417Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:49.771440Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784567373192394:2440], cookie# 1 2025-03-28T09:07:49.771453Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Unexpected sync response: sender# [1:7486784567373192394:2440], cookie# 1 2025-03-28T09:07:49.771512Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784563078224628:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:07:49.788828Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784563078224628:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784567373192391:2440] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:07:49.788937Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784563078224628:2123], cacheItem# { Subscriber: { Subscriber: [1:7486784567373192391:2440] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:07:49.799654Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784567373192411:2448], recipient# [1:7486784567373192410:2447], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:07:49.799748Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784567373192410:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:07:49.836744Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784567373192410:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:07:49.843051Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784567373192410:2447] Handle TEvDescribeSchemeResult Forward to# [1:7486784567373192409:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:07:49.875216Z node 1 :TX_PROXY DEBUG: actor# [1:7486784563078224582:2099] Handle TEvProposeTransaction 2025-03-28T09:07:49.875248Z node 1 :TX_PROXY DEBUG: actor# [1:7486784563078224582:2099] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:07:49.875344Z node 1 :TX_PROXY DEBUG: actor# [1:7486784563078224582:2099] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486784567373192416:2452] 2025-03-28T09:07:50.048221Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784567373192416:2452] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:07:50.048288Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784567373192416:2452] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:07:50.048358Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784567373192416:2452] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:07:50.048466Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784563078224628:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:1844674407370955161 ... HEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784563078224628:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784571668159889:2590] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743152870172 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:07:50.747741Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784563078224628:2123], cacheItem# { Subscriber: { Subscriber: [1:7486784571668159889:2590] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743152870172 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-03-28T09:07:50.747869Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784571668160029:2679], recipient# [1:7486784571668160028:2678], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:07:50.747898Z node 1 :TX_PROXY INFO: Actor# [1:7486784571668160028:2678] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-03-28T09:07:50.754236Z node 1 :TX_PROXY DEBUG: actor# [1:7486784563078224582:2099] Handle TEvNavigate describe path /dc-1 2025-03-28T09:07:50.754283Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784571668160031:2681] HANDLE EvNavigateScheme /dc-1 2025-03-28T09:07:50.754384Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784563078224628:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:50.754474Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784563078224628:2123], cookie# 4 2025-03-28T09:07:50.754531Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192395:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192392:2440], cookie# 4 2025-03-28T09:07:50.754547Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192396:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192393:2440], cookie# 4 2025-03-28T09:07:50.754562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192397:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192394:2440], cookie# 4 2025-03-28T09:07:50.754611Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784563078224304:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192397:2440], cookie# 4 2025-03-28T09:07:50.754649Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192397:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784563078224304:2055], cookie# 4 2025-03-28T09:07:50.754724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784567373192394:2440], cookie# 4 2025-03-28T09:07:50.754745Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:50.754766Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784563078224298:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192395:2440], cookie# 4 2025-03-28T09:07:50.754786Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784563078224301:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784567373192396:2440], cookie# 4 2025-03-28T09:07:50.754807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192395:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784563078224298:2049], cookie# 4 2025-03-28T09:07:50.754822Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784567373192396:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784563078224301:2052], cookie# 4 2025-03-28T09:07:50.754856Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784567373192392:2440], cookie# 4 2025-03-28T09:07:50.754877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:50.754902Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784567373192393:2440], cookie# 4 2025-03-28T09:07:50.754913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784567373192391:2440][/dc-1] Unexpected sync response: sender# [1:7486784567373192393:2440], cookie# 4 2025-03-28T09:07:50.754957Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784563078224628:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:07:50.755024Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784563078224628:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784567373192391:2440] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152870116 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:07:50.755106Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784563078224628:2123], cacheItem# { Subscriber: { Subscriber: [1:7486784567373192391:2440] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152870116 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-03-28T09:07:50.755262Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784571668160032:2682], recipient# [1:7486784571668160031:2681], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:07:50.755293Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784571668160031:2681] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:07:50.755355Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784571668160031:2681] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:07:50.755987Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784571668160031:2681] Handle TEvDescribeSchemeResult Forward to# [1:7486784571668160030:2680] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152870116 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152870116 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152870172 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: 2025-03-28T09:07:48.730540Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784561986803788:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:48.730605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d1c/r3tmp/tmpZGxmoA/pdisk_1.dat 2025-03-28T09:07:49.235865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:49.235990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:49.239054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:49.290359Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:4676 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:07:49.493987Z node 1 :TX_PROXY DEBUG: actor# [1:7486784561986804012:2093] Handle TEvNavigate describe path dc-1 2025-03-28T09:07:49.494057Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784566281771828:2442] HANDLE EvNavigateScheme dc-1 2025-03-28T09:07:49.494208Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784561986804056:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:49.494261Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486784561986804056:2117], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:07:49.494482Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:07:49.496739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803739:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784566281771833:2443] 2025-03-28T09:07:49.496794Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784561986803739:2049] Subscribe: subscriber# [1:7486784566281771833:2443], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.496856Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803745:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784566281771835:2443] 2025-03-28T09:07:49.496871Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784561986803745:2055] Subscribe: subscriber# [1:7486784566281771835:2443], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.496927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771833:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784561986803739:2049] 2025-03-28T09:07:49.496957Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771835:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784561986803745:2055] 2025-03-28T09:07:49.496995Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784566281771830:2443] 2025-03-28T09:07:49.497025Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784566281771832:2443] 2025-03-28T09:07:49.497076Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486784566281771829:2443][/dc-1] Set up state: owner# [1:7486784561986804056:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:49.497199Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771833:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771830:2443], cookie# 1 2025-03-28T09:07:49.497281Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771834:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771831:2443], cookie# 1 2025-03-28T09:07:49.497299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771835:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771832:2443], cookie# 1 2025-03-28T09:07:49.497393Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803739:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784566281771833:2443] 2025-03-28T09:07:49.497429Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803739:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771833:2443], cookie# 1 2025-03-28T09:07:49.497473Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803745:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784566281771835:2443] 2025-03-28T09:07:49.497509Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803745:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771835:2443], cookie# 1 2025-03-28T09:07:49.499817Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803742:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784566281771834:2443] 2025-03-28T09:07:49.499900Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784561986803742:2052] Subscribe: subscriber# [1:7486784566281771834:2443], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.499988Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803742:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771834:2443], cookie# 1 2025-03-28T09:07:49.500061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771833:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784561986803739:2049], cookie# 1 2025-03-28T09:07:49.500105Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771835:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784561986803745:2055], cookie# 1 2025-03-28T09:07:49.500152Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771834:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784561986803742:2052] 2025-03-28T09:07:49.500174Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771834:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784561986803742:2052], cookie# 1 2025-03-28T09:07:49.500210Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784566281771830:2443], cookie# 1 2025-03-28T09:07:49.500233Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:49.500246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784566281771832:2443], cookie# 1 2025-03-28T09:07:49.500311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:49.500363Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784566281771831:2443] 2025-03-28T09:07:49.500420Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486784566281771829:2443][/dc-1] Path was already updated: owner# [1:7486784561986804056:2117], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:49.500449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784566281771831:2443], cookie# 1 2025-03-28T09:07:49.500473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Unexpected sync response: sender# [1:7486784566281771831:2443], cookie# 1 2025-03-28T09:07:49.500502Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803742:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784566281771834:2443] 2025-03-28T09:07:49.612256Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784561986804056:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:07:49.612645Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784561986804056:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVers ... HEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784561986804056:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784566281772035:2595] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743152869941 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:07:50.513867Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784561986804056:2117], cacheItem# { Subscriber: { Subscriber: [1:7486784566281772035:2595] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743152869941 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2025-03-28T09:07:50.514095Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784570576739458:2673], recipient# [1:7486784570576739457:2672], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:07:50.514128Z node 1 :TX_PROXY INFO: Actor# [1:7486784570576739457:2672] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-03-28T09:07:50.521523Z node 1 :TX_PROXY DEBUG: actor# [1:7486784561986804012:2093] Handle TEvNavigate describe path /dc-1 2025-03-28T09:07:50.521568Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784570576739460:2675] HANDLE EvNavigateScheme /dc-1 2025-03-28T09:07:50.521666Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784561986804056:2117], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:50.521740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784561986804056:2117], cookie# 4 2025-03-28T09:07:50.521788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771833:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771830:2443], cookie# 4 2025-03-28T09:07:50.521805Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771834:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771831:2443], cookie# 4 2025-03-28T09:07:50.521820Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771835:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771832:2443], cookie# 4 2025-03-28T09:07:50.521848Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803739:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771833:2443], cookie# 4 2025-03-28T09:07:50.521886Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803742:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771834:2443], cookie# 4 2025-03-28T09:07:50.521910Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561986803745:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784566281771835:2443], cookie# 4 2025-03-28T09:07:50.521934Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771833:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784561986803739:2049], cookie# 4 2025-03-28T09:07:50.521951Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771834:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784561986803742:2052], cookie# 4 2025-03-28T09:07:50.521981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784566281771835:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784561986803745:2055], cookie# 4 2025-03-28T09:07:50.522026Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784566281771830:2443], cookie# 4 2025-03-28T09:07:50.522043Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:50.522056Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784566281771831:2443], cookie# 4 2025-03-28T09:07:50.522071Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:50.522099Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784566281771832:2443], cookie# 4 2025-03-28T09:07:50.522110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784566281771829:2443][/dc-1] Unexpected sync response: sender# [1:7486784566281771832:2443], cookie# 4 2025-03-28T09:07:50.522149Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784561986804056:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:07:50.522204Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784561986804056:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784566281771829:2443] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152869899 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:07:50.522268Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784561986804056:2117], cacheItem# { Subscriber: { Subscriber: [1:7486784566281771829:2443] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152869899 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-03-28T09:07:50.522426Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784570576739461:2676], recipient# [1:7486784570576739460:2675], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:07:50.522461Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784570576739460:2675] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:07:50.522548Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784570576739460:2675] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:07:50.523210Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784570576739460:2675] Handle TEvDescribeSchemeResult Forward to# [1:7486784570576739459:2674] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152869899 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152869899 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152869941 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046... (TRUNCATED) |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 10916064179617079244 2025-03-28T09:03:04.790155Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2025-03-28T09:03:04.818972Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2025-03-28T09:03:04.819085Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2025-03-28T09:03:04.822122Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2025-03-28T09:03:04.838037Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:03:04.841313Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2025-03-28T09:03:18.058534Z 4 00h01m02.173511s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:3:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 587 2025-03-28T09:06:46.802458Z 8 00h01m08.888182s :BS_LOGCUTTER ERROR: VDISK[82000000:_:0:7:0]: KEEPER: RetryCutLogEvent: limit exceeded; FreeUpToLsn# 20509 2025-03-28T09:07:47.818399Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:07:47.820152Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:07:47.820265Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2025-03-28T09:07:47.820320Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2025-03-28T09:07:48.065893Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} 2025-03-28T09:07:48.066047Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 13 Channel# 0 Status# OK} >> BasicStatistics::Serverless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::StoreStatsQuota [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:05:44.538139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:05:44.538267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:44.538330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:05:44.538362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:05:44.538412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:05:44.538440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:05:44.538507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:05:44.538600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:05:44.539046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:05:44.645904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:44.645980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:44.664040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:05:44.664364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:05:44.664575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:05:44.672069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:05:44.672342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:05:44.673091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:44.673346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:44.675891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:44.677348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:44.677417Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:44.677641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:05:44.677701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:44.677745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:05:44.677849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.695783Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:05:44.876008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:05:44.876287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.876572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:05:44.876847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:05:44.876936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.883706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:44.883864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:05:44.884084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.884152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:05:44.884192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:05:44.884229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:05:44.887586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.887682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:05:44.887731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:05:44.889911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.889983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.890032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:44.890082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:05:44.899970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:05:44.902618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:05:44.902878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:05:44.904098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:05:44.904248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:05:44.904303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:44.904587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:05:44.904640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:05:44.904829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:05:44.904934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:05:44.907456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:05:44.907513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:05:44.907732Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:05:44.907780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:05:44.908161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:05:44.908224Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:05:44.908330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:44.908368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:44.908407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:05:44.908438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:44.908475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:05:44.908520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:05:44.908572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:05:44.908628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:05:44.908717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:05:44.908756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:05:44.908788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:05:44.911384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:44.911510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:05:44.911551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:07:52.639340Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:07:52.639386Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:07:52.639492Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:07:52.800218Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:07:53.006982Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:07:53.017454Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186233409546; 2025-03-28T09:07:53.017607Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546; 2025-03-28T09:07:53.017663Z node 2 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:07:53.017720Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:07:53.017821Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:07:53.017898Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-28T09:07:53.017991Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=4700006;tx_id=18446744073709551615;;current_snapshot_ts=5000006; 2025-03-28T09:07:53.018044Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:07:53.018098Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:07:53.018144Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:07:53.018257Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:07:53.166971Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:07:53.351585Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;parent=[2:415:2384];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:07:53.367890Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186233409546; 2025-03-28T09:07:53.368051Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546; 2025-03-28T09:07:53.368136Z node 2 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:07:53.368219Z node 2 :TX_COLUMNSHARD DEBUG: There are stats for 1 tables 2025-03-28T09:07:53.368339Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:07:53.368415Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:07:53.368499Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-28T09:07:53.368573Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=4700006;tx_id=18446744073709551615;;current_snapshot_ts=5000006; 2025-03-28T09:07:53.368620Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:07:53.368673Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:07:53.368731Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:07:53.368853Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186233409546;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:07:53.369107Z node 2 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-28T09:07:53.369762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-28T09:07:53.369890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=OlapStore, is column=0, is olap=1 2025-03-28T09:07:53.369943Z node 2 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 3: RowCount 0, DataSize 0 2025-03-28T09:07:53.370043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: OLAP store contains 1 tables. 2025-03-28T09:07:53.370229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Aggregated stats for pathId 3: RowCount 0, DataSize 0 2025-03-28T09:07:53.370646Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:07:53.370732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:07:53.371193Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:07:53.384051Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:07:53.384155Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:337:2313], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-03-28T09:07:53.384259Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 256us result status StatusSuccess 2025-03-28T09:07:53.384841Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: "OlapStore" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1000000 data_size_soft_quota: 900000 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:07:53.388121Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2025-03-28T09:07:53.388517Z node 2 :TX_COLUMNSHARD DEBUG: tablet_id=72075186233409546;self_id=[2:415:2384];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046678944, LocalPathId: 2]; |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |79.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |79.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs [GOOD] Test command err: 2025-03-28T09:07:49.818357Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784567916490988:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:49.818465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cf6/r3tmp/tmp8Ec4Em/pdisk_1.dat 2025-03-28T09:07:50.456539Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:50.465004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:50.465125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:50.467908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27810 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:07:50.733168Z node 1 :TX_PROXY DEBUG: actor# [1:7486784567916491228:2103] Handle TEvNavigate describe path dc-1 2025-03-28T09:07:50.733219Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784572211458805:2258] HANDLE EvNavigateScheme dc-1 2025-03-28T09:07:50.733370Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784572211458572:2126], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:50.733414Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486784572211458572:2126], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:07:50.733586Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:07:50.740224Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490923:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784572211458810:2259] 2025-03-28T09:07:50.740302Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784567916490923:2049] Subscribe: subscriber# [1:7486784572211458810:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:50.740372Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490926:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784572211458811:2259] 2025-03-28T09:07:50.740392Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784567916490926:2052] Subscribe: subscriber# [1:7486784572211458811:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:50.740448Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458810:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784567916490923:2049] 2025-03-28T09:07:50.740471Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458811:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784567916490926:2052] 2025-03-28T09:07:50.740509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784572211458807:2259] 2025-03-28T09:07:50.740533Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784572211458808:2259] 2025-03-28T09:07:50.740583Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486784572211458806:2259][/dc-1] Set up state: owner# [1:7486784572211458572:2126], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:50.740682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458810:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458807:2259], cookie# 1 2025-03-28T09:07:50.740697Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458811:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458808:2259], cookie# 1 2025-03-28T09:07:50.740711Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458812:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458809:2259], cookie# 1 2025-03-28T09:07:50.740731Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490923:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784572211458810:2259] 2025-03-28T09:07:50.740753Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490923:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458810:2259], cookie# 1 2025-03-28T09:07:50.740783Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490926:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784572211458811:2259] 2025-03-28T09:07:50.740798Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490926:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458811:2259], cookie# 1 2025-03-28T09:07:50.742864Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490929:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784572211458812:2259] 2025-03-28T09:07:50.742921Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784567916490929:2055] Subscribe: subscriber# [1:7486784572211458812:2259], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:50.742980Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490929:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458812:2259], cookie# 1 2025-03-28T09:07:50.743047Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458810:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784567916490923:2049], cookie# 1 2025-03-28T09:07:50.743076Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458811:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784567916490926:2052], cookie# 1 2025-03-28T09:07:50.743100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458812:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784567916490929:2055] 2025-03-28T09:07:50.743127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458812:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784567916490929:2055], cookie# 1 2025-03-28T09:07:50.743185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784572211458807:2259], cookie# 1 2025-03-28T09:07:50.743203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:50.743216Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784572211458808:2259], cookie# 1 2025-03-28T09:07:50.743231Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:50.743268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784572211458809:2259] 2025-03-28T09:07:50.743325Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486784572211458806:2259][/dc-1] Path was already updated: owner# [1:7486784572211458572:2126], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:50.743346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784572211458809:2259], cookie# 1 2025-03-28T09:07:50.743359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Unexpected sync response: sender# [1:7486784572211458809:2259], cookie# 1 2025-03-28T09:07:50.743377Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490929:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784572211458812:2259] 2025-03-28T09:07:50.855652Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784572211458572:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:07:50.855991Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784572211458572:2126], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... SyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-03-28T09:07:51.185307Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784572211458572:2126], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784576506426167:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743152871208 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:07:51.185364Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784572211458572:2126], cacheItem# { Subscriber: { Subscriber: [1:7486784576506426167:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1743152871208 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:07:51.185483Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784576506426174:2304], recipient# [1:7486784576506426166:2302], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:07:51.185509Z node 1 :TX_PROXY INFO: Actor# [1:7486784576506426166:2302] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 13 ErrorReason: "Could not resolve redirected path" TClient::Ls request: /dc-1 2025-03-28T09:07:51.191819Z node 1 :TX_PROXY DEBUG: actor# [1:7486784567916491228:2103] Handle TEvNavigate describe path /dc-1 2025-03-28T09:07:51.191874Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784576506426176:2306] HANDLE EvNavigateScheme /dc-1 2025-03-28T09:07:51.191968Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784572211458572:2126], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:51.192091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784572211458572:2126], cookie# 4 2025-03-28T09:07:51.192154Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458810:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458807:2259], cookie# 4 2025-03-28T09:07:51.192176Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458811:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458808:2259], cookie# 4 2025-03-28T09:07:51.192188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458812:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458809:2259], cookie# 4 2025-03-28T09:07:51.192208Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490923:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458810:2259], cookie# 4 2025-03-28T09:07:51.192228Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490926:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458811:2259], cookie# 4 2025-03-28T09:07:51.192240Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567916490929:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784572211458812:2259], cookie# 4 2025-03-28T09:07:51.192260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458810:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784567916490923:2049], cookie# 4 2025-03-28T09:07:51.192286Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458811:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784567916490926:2052], cookie# 4 2025-03-28T09:07:51.192302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784572211458812:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784567916490929:2055], cookie# 4 2025-03-28T09:07:51.192329Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784572211458807:2259], cookie# 4 2025-03-28T09:07:51.192345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Sync is in progress: cookie# 4, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:51.192356Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784572211458808:2259], cookie# 4 TClient::Ls response: 2025-03-28T09:07:51.192369Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Sync is done: cookie# 4, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:51.192386Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 }: sender# [1:7486784572211458809:2259], cookie# 4 2025-03-28T09:07:51.192413Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784572211458806:2259][/dc-1] Unexpected sync response: sender# [1:7486784572211458809:2259], cookie# 4 2025-03-28T09:07:51.192447Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784572211458572:2126], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:07:51.192535Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784572211458572:2126], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784572211458806:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152871194 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:07:51.192607Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784572211458572:2126], cacheItem# { Subscriber: { Subscriber: [1:7486784572211458806:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152871194 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-03-28T09:07:51.192728Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784576506426177:2307], recipient# [1:7486784576506426176:2306], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:07:51.192752Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784576506426176:2306] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:07:51.192817Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784576506426176:2306] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:07:51.193317Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784576506426176:2306] Handle TEvDescribeSchemeResult Forward to# [1:7486784576506426175:2305] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152871194 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152871194 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152871208 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: ".sys" PathId: 18446744073709551615 ... (TRUNCATED) |79.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |79.4%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> BasicStatistics::TwoNodes [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> TNebiusAccessServiceTest::Authenticate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: 2025-03-28T09:04:52.524277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:52.524554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:52.524615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0010ca/r3tmp/tmpmFEe1w/pdisk_1.dat 2025-03-28T09:04:52.937017Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26614, node 1 2025-03-28T09:04:53.230294Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:53.230357Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:53.230393Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:53.231455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:04:53.234068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:53.349538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:53.349681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:53.393708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19447 2025-03-28T09:04:54.038325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:57.682289Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:04:57.725387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:57.725515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:57.766609Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:57.771736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:58.064328Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:58.064951Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:58.065587Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:58.065746Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:58.066037Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:58.066138Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:58.066228Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:58.066323Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:58.066424Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:58.250657Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:58.250803Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:58.266567Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:58.435135Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:58.483567Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:04:58.483665Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:04:58.515988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:04:58.516158Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:04:58.516363Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:04:58.516431Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:04:58.516490Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:04:58.516565Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:04:58.516624Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:04:58.516680Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:04:58.517134Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:04:58.602771Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:58.602868Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:58.620108Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:04:58.627367Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:04:58.627584Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:04:58.642901Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:04:58.667414Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:04:58.667459Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:04:58.667514Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:04:58.683519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:04:58.692133Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:04:58.692361Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:04:58.890552Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:04:59.106517Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:04:59.213699Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:04:59.936497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:05:00.674633Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:00.885644Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:05:00.885714Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:05:00.885804Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2594:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:05:00.887874Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2596:2950] 2025-03-28T09:05:00.888711Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2596:2950], schemeshard id = 72075186224037899 2025-03-28T09:05:02.238233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2723:3246], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:02.238441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:02.256323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-28T09:05:02.425910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2876:3282], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:02.426084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:02.473407Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2881:3286]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:05:02.473579Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:05:02.473746Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-28T09:05:02.473809Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2884:3289] 2025-03-28T09:05:02.473876Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2884:3289] 2025-03-28T09:05:02.474491Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2885:3078] 2025-03-28T09:05:02.474788Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2884:3289], server id = [2:2885:3078], tablet id = 72075186224037894, status = OK 2025-03-28T09:05:02.474993Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2885:3078], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:05:02.475072Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:05:02.475332Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:05:02.475409Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2881:3286], StatRequests.size() = 1 2025-03-28T09:05:02.498291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2889:3293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... gatorId, at schemeshard: 72057594046644480 2025-03-28T09:07:43.300408Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:07:43.653408Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 117 ], ReplyToActorId[ [2:7581:5440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:43.653671Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 117 ] 2025-03-28T09:07:43.653714Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 117, ReplyToActorId = [2:7581:5440], StatRequests.size() = 1 2025-03-28T09:07:45.118830Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvSendTopPartitions, time mismath: , partition interval end# 1970-01-01T00:02:01.000000Z, event time# 1970-01-01T00:02:01.079536Z 2025-03-28T09:07:45.220935Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:07:45.221606Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:07:45.221861Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:45.267731Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-03-28T09:07:45.267835Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 229.000000s, at schemeshard: 72075186224037897 2025-03-28T09:07:45.268261Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-03-28T09:07:45.284070Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:07:45.346190Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7622:5463]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:45.346481Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-03-28T09:07:45.346528Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7622:5463], StatRequests.size() = 1 2025-03-28T09:07:46.775410Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:46.775505Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:46.775573Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:07:46.775623Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:46.776191Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:07:46.795835Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:46.801664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7660:5490], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:46.801846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7670:5495], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:46.802110Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:46.814798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:07:46.895930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7674:5498], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:07:47.008934Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7769:5546]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:47.009185Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-28T09:07:47.009224Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7769:5546], StatRequests.size() = 1 2025-03-28T09:07:47.086245Z node 2 :TX_PROXY ERROR: Actor# [2:7771:5548] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:47.131502Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7800:5563]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:47.131701Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-28T09:07:47.131916Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-03-28T09:07:47.131980Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:07:47.132092Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:47.132147Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7800:5563], StatRequests.size() = 1 2025-03-28T09:07:47.252961Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTJhMjJkMTItNTYwOTFkMjEtNDQzMDY0NzQtYjk1MjUyM2M=, TxId: 2025-03-28T09:07:47.253042Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTJhMjJkMTItNTYwOTFkMjEtNDQzMDY0NzQtYjk1MjUyM2M=, TxId: 2025-03-28T09:07:47.253610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:47.272577Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:47.272650Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:47.317674Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:07:47.317740Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:07:47.384288Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3059:3112], schemeshard count = 1 2025-03-28T09:07:47.773704Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-28T09:07:47.773784Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 226.000000s, at schemeshard: 72075186224037899 2025-03-28T09:07:47.774129Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-03-28T09:07:47.793513Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:07:48.893223Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7871:5607]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:48.893518Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-28T09:07:48.893559Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7871:5607], StatRequests.size() = 1 2025-03-28T09:07:50.351558Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:07:50.351846Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:50.351882Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:50.351922Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-03-28T09:07:50.351953Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:07:50.352224Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:07:50.355004Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:50.382368Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-03-28T09:07:48.000000Z 2025-03-28T09:07:50.383380Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWQ3YmVmN2YtM2VmMWJhZGUtYWE1N2Y4ZmUtNGRhNzhiODk=, TxId: 2025-03-28T09:07:50.383437Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWQ3YmVmN2YtM2VmMWJhZGUtYWE1N2Y4ZmUtNGRhNzhiODk=, TxId: 2025-03-28T09:07:50.384064Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:50.405828Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:07:50.405879Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:50.476643Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7942:5651]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:50.476902Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-28T09:07:50.476942Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7942:5651], StatRequests.size() = 1 2025-03-28T09:07:52.142483Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7996:5683]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:52.142810Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-28T09:07:52.142858Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7996:5683], StatRequests.size() = 1 2025-03-28T09:07:53.591637Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-03-28T09:07:53.591880Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:53.592038Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:53.592066Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:53.592535Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:07:53.696600Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8034:5704]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:53.696898Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-28T09:07:53.696935Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8034:5704], StatRequests.size() = 1 >> TNebiusAccessServiceTest::Authenticate [GOOD] |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |79.4%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |79.4%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |79.4%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |79.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |79.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] >> TestProgram::NumRowsWithNulls |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true >> TNebiusAccessServiceTest::PassRequestId >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy >> TNebiusAccessServiceTest::PassRequestId [GOOD] >> TestProgram::NumRowsWithNulls [GOOD] |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2025-03-28T09:07:56.615627Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Connect to grpc://localhost:26133 2025-03-28T09:07:56.619283Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2025-03-28T09:07:56.676618Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Status 7 Permission Denied 2025-03-28T09:07:56.677250Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2025-03-28T09:07:56.679933Z node 1 :GRPC_CLIENT DEBUG: [517000003208] Response AuthenticateResponse { account { user_account { id: "1234" } } } |79.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |79.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |79.5%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: 2025-03-28T09:05:02.033830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:05:02.034162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:05:02.034373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001034/r3tmp/tmpC4LxEj/pdisk_1.dat 2025-03-28T09:05:02.486445Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64741, node 1 2025-03-28T09:05:02.764653Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:02.764737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:02.764781Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:02.765380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:05:02.767997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:05:02.859414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:02.859612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:02.885078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17692 2025-03-28T09:05:03.467878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:10.608358Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:05:10.608511Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-03-28T09:05:10.685089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:10.685245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:10.685992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:10.686071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:10.728008Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:05:10.728250Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:05:10.732673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:10.733691Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:11.029514Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:11.030239Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:11.031505Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:11.031722Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:11.032056Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:11.032168Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:11.032298Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:11.032395Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:11.032505Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:11.232413Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:11.232543Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:11.233411Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:11.233504Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:11.252296Z node 2 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:05:11.252903Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:11.255205Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:11.456565Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:11.498589Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:05:11.498708Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:05:11.536491Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:05:11.536796Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:05:11.536995Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:05:11.537053Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:05:11.537114Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:05:11.537168Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:05:11.537242Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:05:11.537302Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:05:11.537850Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:05:11.565328Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:05:11.565449Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2288:2601], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:05:11.574264Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2297:2608] 2025-03-28T09:05:11.594001Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2332:2624] 2025-03-28T09:05:11.595169Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2332:2624], schemeshard id = 72075186224037897 2025-03-28T09:05:11.599203Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:05:11.622749Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:05:11.622806Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:05:11.622887Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:05:11.641202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725657:1, at schemeshard: 72075186224037897 2025-03-28T09:05:11.649405Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976725657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:05:11.649558Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976725657 2025-03-28T09:05:11.863573Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:05:12.175842Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976725657. Doublechecking... 2025-03-28T09:05:12.309486Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:05:13.401207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2685:3082], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:13.401321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:13.417585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:05:13.729796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2835:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:13.729929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:13.731160Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2840:3123]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:05:13.731340Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:05:13.731421Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2842:3125] 2025-03-28T09:05:13.731484Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2842:3125] 2025-03-28T09:05:13.732044Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2843:2816] 2025-03-28T09:05:13.732364Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2842:3125], server id = [2:2843:2816], tablet id = 72075186224037894, status = OK 2025-03-28T09:05:13.732558Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2843:2816], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:05:13.732626Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:05:13.732906Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:05:13.732975Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2840:3123], StatRequests.size() = 1 2025-03-28T09:05:13.753418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2847:3129], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:13.753556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch poo ... 80 2025-03-28T09:07:44.134446Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:07:44.134495Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:07:45.002030Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7474:4581]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:45.002362Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-28T09:07:45.002408Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7474:4581], StatRequests.size() = 1 2025-03-28T09:07:45.799195Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:07:45.799459Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:45.799865Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:07:45.845144Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-28T09:07:45.845224Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 229.000000s, at schemeshard: 72075186224037897 2025-03-28T09:07:45.845521Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-03-28T09:07:45.860260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:07:46.553121Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:7511:4597]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:46.553526Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-28T09:07:46.553581Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:7511:4597], StatRequests.size() = 1 2025-03-28T09:07:47.261032Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:47.261133Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:47.261228Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:07:47.261284Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:47.261720Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:07:47.293200Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:47.297674Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7538:4616], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:47.297796Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7548:4621], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:47.298304Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:47.310643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976725658:2, at schemeshard: 72075186224037897 2025-03-28T09:07:47.433040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7552:4624], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976725658 completed, doublechecking } 2025-03-28T09:07:47.636795Z node 2 :TX_PROXY ERROR: Actor# [2:7650:4671] txid# 281474976725659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:47.690511Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:7680:4687]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:47.690778Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-28T09:07:47.690834Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:7680:4687], StatRequests.size() = 1 2025-03-28T09:07:47.873038Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDhlMzYyNGMtYTFiZGJhNTktM2IzYzA2M2UtNjY5MDc1MmM=, TxId: 2025-03-28T09:07:47.873131Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDhlMzYyNGMtYTFiZGJhNTktM2IzYzA2M2UtNjY5MDc1MmM=, TxId: 2025-03-28T09:07:47.873946Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:47.889642Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:47.889728Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:48.441099Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:7716:4698]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:48.441422Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-28T09:07:48.441475Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:7716:4698], StatRequests.size() = 1 2025-03-28T09:07:49.854373Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:7759:4712]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:49.854548Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-28T09:07:49.854572Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:7759:4712], StatRequests.size() = 1 2025-03-28T09:07:50.558439Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:07:50.558781Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:50.558818Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:50.558855Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-28T09:07:50.558885Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:07:50.559435Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:07:50.561821Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:50.599269Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-03-28T09:07:48.000000Z 2025-03-28T09:07:50.599458Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDIwNWQzLTU1MGU5N2ZiLWIzZTUwNDdkLTZhOGMyZDgz, TxId: 2025-03-28T09:07:50.599500Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDIwNWQzLTU1MGU5N2ZiLWIzZTUwNDdkLTZhOGMyZDgz, TxId: 2025-03-28T09:07:50.600234Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:50.614258Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:07:50.614311Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:51.324871Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:7831:4744]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:51.325219Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-28T09:07:51.325262Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:7831:4744], StatRequests.size() = 1 2025-03-28T09:07:52.937299Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 125 ], ReplyToActorId[ [2:7880:4760]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:52.937645Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 125 ] 2025-03-28T09:07:52.937688Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 125, ReplyToActorId = [2:7880:4760], StatRequests.size() = 1 2025-03-28T09:07:53.732191Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:07:53.732428Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:53.732461Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:53.732636Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:53.733087Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:07:54.425203Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 126 ], ReplyToActorId[ [2:7917:4776]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:54.425474Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 126 ] 2025-03-28T09:07:54.425559Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 126, ReplyToActorId = [2:7917:4776], StatRequests.size() = 1 2025-03-28T09:07:54.426339Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:7919:3038]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:54.431508Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:07:54.431640Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [3:7929:3042] 2025-03-28T09:07:54.431719Z node 3 :STATISTICS DEBUG: SyncNode(), pipe client id = [3:7929:3042] 2025-03-28T09:07:54.435708Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7937:4778] 2025-03-28T09:07:54.436208Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:7929:3042], server id = [2:7937:4778], tablet id = 72075186224037894, status = OK 2025-03-28T09:07:54.436614Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7937:4778], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:07:54.436692Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2025-03-28T09:07:54.437402Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-03-28T09:07:54.437510Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [3:7919:3038], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N2(17):{\"i\":\"2\",\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(17):{\"i\":\"10001\",\"t\":\"Filter\"}\nREMOVE:10001",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N4(10):{\"o\":\"10002\",\"t\":\"Calculation\"}\n"]; N5[shape=box, label="N5(10):{\"i\":\"10002\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N1->N2->N0->N3->N4->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]},{"owner_id":5,"inputs":[{"from":4}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Filter"},"w":17,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"5":{"p":{"i":"10002","t":"Projection"},"w":10,"id":5},"4":{"p":{"o":"10002","t":"Calculation"},"w":10,"id":4},"0":{"p":{"i":"2","o":"10001","t":"Calculation"},"w":17,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; >> DataShardReadIterator::ShouldReadNotExistingRange [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk1_100 >> TNebiusAccessServiceTest::Authorize ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-03-28T09:07:57.344396Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Connect to grpc://localhost:28303 2025-03-28T09:07:57.421263Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-03-28T09:07:57.446335Z node 2 :GRPC_CLIENT DEBUG: [517000004708]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } >> TNebiusAccessServiceTest::Authorize [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 >> GenericFederatedQuery::ClickHouseManagedSelectAll ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2025-03-28T09:07:58.386174Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Connect to grpc://localhost:24623 2025-03-28T09:07:58.401943Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-03-28T09:07:58.410573Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2025-03-28T09:07:58.411313Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2025-03-28T09:07:58.413763Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied 2025-03-28T09:07:58.414364Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2025-03-28T09:07:58.415878Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied 2025-03-28T09:07:58.416571Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } 2025-03-28T09:07:58.418171Z node 3 :GRPC_CLIENT DEBUG: [517000005888] Status 7 Permission Denied |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |79.5%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |79.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut >> TCdcStreamTests::VirtualTimestamps >> GenericFederatedQuery::YdbSelectCount >> GenericFederatedQuery::YdbManagedSelectAll >> TCdcStreamTests::Basic |79.5%| [TA] $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> DataShardReadIterator::ShouldReadHeadFromFollower [GOOD] >> DataShardReadIterator::ShouldReadFromHead >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] |79.5%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 23318, MsgBus: 8929 2025-03-28T09:01:58.836668Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783058044998664:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:01:58.943333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001908/r3tmp/tmpYdKBeb/pdisk_1.dat 2025-03-28T09:02:05.399317Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783058044998664:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:02:05.399770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:02:06.742717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:02:06.743120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:02:06.816525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:02:07.413039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:07.413086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:10.352384Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:02:10.356040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:10.356057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:02:11.195416Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.127776s 2025-03-28T09:02:11.195478Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.127848s TServer::EnableGrpc on GrpcPort 23318, node 1 2025-03-28T09:02:13.720913Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:02:13.720930Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:02:13.720935Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:02:13.721374Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8929 TClient is connected to server localhost:8929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:02:16.817904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:16.867683Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:02:16.878135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.061054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.264004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:17.368460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:02:19.275665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783148239313665:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.275773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:19.743590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.771705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.857178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.899272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:02:19.951741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.029491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:02:20.123096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783152534281485:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.123209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.123618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783152534281490:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:02:20.128425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:02:20.155104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783152534281492:2472], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:02:20.219860Z node 1 :TX_PROXY ERROR: Actor# [1:7486783152534281546:3478] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:02:21.656185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:02:22.629342Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqdzswwtbhgyt9ps7hvnbdvm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE1MTc1ZWUtMmIzNGJkOWUtMTFlOGQ0NGMtN2Q2MTQ5NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.639262Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqdzswwtbhgyt9ps7hvnbdvm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE1MTc1ZWUtMmIzNGJkOWUtMTFlOGQ0NGMtN2Q2MTQ5NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.644192Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqdzswwtbhgyt9ps7hvnbdvm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE1MTc1ZWUtMmIzNGJkOWUtMTFlOGQ0NGMtN2Q2MTQ5NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.713214Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqdzswzdc6g45rk7t5fb2b91, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRiYWM5Y2UtODZmZDAyZTYtZGI2MGM0MTYtN2Y1NzhkZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.723742Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqdzswzdc6g45rk7t5fb2b91, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRiYWM5Y2UtODZmZDAyZTYtZGI2MGM0MTYtN2Y1NzhkZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.732761Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqdzswzdc6g45rk7t5fb2b91, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGRiYWM5Y2UtODZmZDAyZTYtZGI2MGM0MTYtN2Y1NzhkZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.813766Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqdzsx2r2sx9cbcxzewgjyd0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE1MTc1ZWUtMmIzNGJkOWUtMTFlOGQ0NGMtN2Q2MTQ5NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:02:22.839807Z node 1 :KQP_EXECUTER ERROR: TxId ... : TxId: 281474976723134. Ctx: { TraceId: 01jqe03xfb2rkc41jstda72ngz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:50.910946Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723135. Ctx: { TraceId: 01jqe03xfb2rkc41jstda72ngz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:50.922598Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723136. Ctx: { TraceId: 01jqe03xfb2rkc41jstda72ngz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:50.965659Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723137. Ctx: { TraceId: 01jqe03xhg7s4z57zb77r4s7bf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:50.975670Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723138. Ctx: { TraceId: 01jqe03xhg7s4z57zb77r4s7bf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:50.996378Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723139. Ctx: { TraceId: 01jqe03xhg7s4z57zb77r4s7bf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.082101Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723140. Ctx: { TraceId: 01jqe03xmh0nscdxzv363hdwzf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.103540Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723141. Ctx: { TraceId: 01jqe03xmh0nscdxzv363hdwzf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.114712Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723142. Ctx: { TraceId: 01jqe03xmh0nscdxzv363hdwzf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.166388Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723143. Ctx: { TraceId: 01jqe03xqj7h0vmw82y88jeywq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.174575Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723144. Ctx: { TraceId: 01jqe03xqj7h0vmw82y88jeywq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.187392Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723145. Ctx: { TraceId: 01jqe03xqj7h0vmw82y88jeywq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.218837Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723146. Ctx: { TraceId: 01jqe03xsfc47pxjvve5p6p5c2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.225710Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723147. Ctx: { TraceId: 01jqe03xsfc47pxjvve5p6p5c2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.232646Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723148. Ctx: { TraceId: 01jqe03xsfc47pxjvve5p6p5c2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.268008Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723149. Ctx: { TraceId: 01jqe03xttdbjd34pcm6ph80xf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.275988Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723150. Ctx: { TraceId: 01jqe03xttdbjd34pcm6ph80xf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.284929Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723151. Ctx: { TraceId: 01jqe03xttdbjd34pcm6ph80xf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.314387Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723152. Ctx: { TraceId: 01jqe03xwef70vb8dwwzhh516t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.323330Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723153. Ctx: { TraceId: 01jqe03xwef70vb8dwwzhh516t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.330891Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723154. Ctx: { TraceId: 01jqe03xwef70vb8dwwzhh516t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.386151Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723155. Ctx: { TraceId: 01jqe03xy4akq3mp3whjavx71r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.411468Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723156. Ctx: { TraceId: 01jqe03xy4akq3mp3whjavx71r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.430188Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723157. Ctx: { TraceId: 01jqe03xy4akq3mp3whjavx71r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.499586Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723158. Ctx: { TraceId: 01jqe03y18fbj7vvp7zh9j3qxv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.515302Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723159. Ctx: { TraceId: 01jqe03y18fbj7vvp7zh9j3qxv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.531420Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723160. Ctx: { TraceId: 01jqe03y18fbj7vvp7zh9j3qxv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.579115Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723161. Ctx: { TraceId: 01jqe03y4jes7p7rq1ge9whvpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.588764Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723162. Ctx: { TraceId: 01jqe03y4jes7p7rq1ge9whvpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.599852Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723163. Ctx: { TraceId: 01jqe03y4jes7p7rq1ge9whvpn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.660164Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723164. Ctx: { TraceId: 01jqe03y701r9ya0ps502y8h0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.672855Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723165. Ctx: { TraceId: 01jqe03y701r9ya0ps502y8h0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.694942Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723166. Ctx: { TraceId: 01jqe03y701r9ya0ps502y8h0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjFjOTc0M2EtYWViY2E3MjctNmVjNDcyZmUtZTM1ZmM5ZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.754065Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723167. Ctx: { TraceId: 01jqe03y9xdr9bt6cgqtayw8hm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.764424Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723168. Ctx: { TraceId: 01jqe03y9xdr9bt6cgqtayw8hm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:51.774524Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976723169. Ctx: { TraceId: 01jqe03y9xdr9bt6cgqtayw8hm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2JkY2NiY2MtMzdlNjc5MWUtYWVlYWZmNTAtODI4NDI5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::Attributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 28329, MsgBus: 14869 2025-03-28T09:03:17.055413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783398230289944:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:17.056291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00132e/r3tmp/tmpBfYEcB/pdisk_1.dat 2025-03-28T09:03:17.313487Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:17.321847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:17.322001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:17.325975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28329, node 1 2025-03-28T09:03:17.413105Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:17.413128Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:17.413135Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:17.413220Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14869 TClient is connected to server localhost:14869 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:17.975136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 16 2025-03-28T09:03:20.234964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:03:20.396900Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2025-03-28T09:03:20.424284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783411115192608:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:20.424388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:20.424602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783411115192620:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:20.427910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:03:20.438901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783411115192622:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:03:20.530814Z node 1 :TX_PROXY ERROR: Actor# [1:7486783411115192674:2406] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 2025-03-28T09:03:20.955156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:03:21.095673Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::"char", '0'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::"char", '1'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::"char", '2'::"char"] ); 21 2025-03-28T09:03:21.535363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:03:21.588685Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int2, '1'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int2, '2'::int2] ); 2025-03-28T09:03:22.056944Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783398230289944:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:22.057004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 23 2025-03-28T09:03:22.100786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int4, '0'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int4, '1'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int4, '2'::int4] ); 20 2025-03-28T09:03:22.678907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:03:22.771953Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int8, '0'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int8, '1'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int8, '2'::int8] ); 700 2025-03-28T09:03:23.271557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:03:23.365208Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float4, '0.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float4, '1.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float4, '2.5'::float4] ); 701 2025-03-28T09:03:23.878458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480 --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float8, '0.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float8, '1.5'::float8] ); --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float8, '2.5'::float8] ); 25 2025-03-28T09:03:24.516914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480 2025-03-28T09:03:24.583044Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '0'::int2, ARRAY ['text 0'::text, 'text 0'::text] ); --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '1'::int2, ARRAY ['text 1'::text, 'text 1'::text] ); --!syntax_pg INSERT INTO Pg1009_b (key, value) VALUES ( '2'::int2, ARRAY ['text 2'::text, 'text 2'::text] ); 1042 2025-03-28T09:03:25.067680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480 2025-03-28T09:03:25.124204Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '0'::int2, ARRAY ['bpchar 0'::bpchar, 'bpchar 0'::bpchar] ); --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '1'::int2, ARRAY ['bpchar 1'::bpchar, 'bpchar 1'::bpchar] ); --!syntax_pg INSERT INTO Pg1014_b (key, value) VALUES ( '2'::int2, ARRAY ['bpchar 2'::bpchar, 'bpchar 2'::bpchar] ); 1043 2025-03-28T09:03:25.648795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-03-28T09:03:25.718210Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1015_b (key, value) VALUES ( '0'::int2, ARRAY ['varchar 0'::varc ... true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:42.164127Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:45.859202Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7486784529168906370:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:45.859278Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:07:46.596316Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7486784554938710817:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:46.596436Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:46.623891Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:07:46.731341Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7486784554938710922:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:46.731454Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:46.731815Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7486784554938710928:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:46.736241Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:07:46.749079Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7486784554938710930:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:07:46.854087Z node 9 :TX_PROXY ERROR: Actor# [9:7486784554938710981:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:47.168867Z node 9 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [9:7486784559233678328:2362], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-03-28T09:07:47.169263Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=NmMzNDdiMTctOWE2NTE3NzAtNTllM2UwMWItMmZmYjg0YzI=, ActorId: [9:7486784559233678321:2358], ActorState: ExecuteState, TraceId: 01jqe03ssq5b2hk1gphnzs1xe6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:07:47.182406Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 11059, MsgBus: 20449 2025-03-28T09:07:49.393098Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486784567182290770:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:49.393176Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00132e/r3tmp/tmpY4Efj4/pdisk_1.dat 2025-03-28T09:07:49.619414Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:49.644239Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:49.644380Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:49.647899Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11059, node 10 2025-03-28T09:07:49.734115Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:49.734162Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:49.734180Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:49.734376Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20449 TClient is connected to server localhost:20449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:50.869671Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:50.888071Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:07:54.398873Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486784567182290770:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:54.398979Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:07:56.152826Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486784597247062502:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:56.152961Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:56.193219Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:07:56.352703Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486784597247062612:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:56.360593Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486784597247062606:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:56.360733Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:56.361130Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:07:56.386216Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486784597247062614:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:07:56.457069Z node 10 :TX_PROXY ERROR: Actor# [10:7486784597247062669:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:57.328437Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7486784601542030040:2372], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2025-03-28T09:07:57.331103Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=OTMyZWZiYmItZmI2ZDYyZDctNzFkZGFmYmUtYTQ0YjhhZTQ=, ActorId: [10:7486784601542030032:2368], ActorState: ExecuteState, TraceId: 01jqe043qdb2jmtt7bkzkyfhbt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:07:57.340985Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> BasicStatistics::ServerlessGlobalIndex [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::RetentionPeriod ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-28T09:07:48.914306Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784561209741301:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:48.915990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d11/r3tmp/tmprK0WcR/pdisk_1.dat 2025-03-28T09:07:49.377089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:49.377230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:49.386942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:49.415431Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13809 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:07:49.735214Z node 1 :TX_PROXY DEBUG: actor# [1:7486784561209741458:2115] Handle TEvNavigate describe path dc-1 2025-03-28T09:07:49.735270Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784565504709232:2444] HANDLE EvNavigateScheme dc-1 2025-03-28T09:07:49.735407Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784565504708782:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:49.735461Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486784565504708782:2130], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:07:49.735657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565504709233:2445][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:07:49.737582Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561209741135:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784565504709237:2445] 2025-03-28T09:07:49.737646Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784561209741135:2050] Subscribe: subscriber# [1:7486784565504709237:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.737723Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561209741141:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784565504709239:2445] 2025-03-28T09:07:49.737747Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784561209741141:2056] Subscribe: subscriber# [1:7486784565504709239:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.737799Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565504709237:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784561209741135:2050] 2025-03-28T09:07:49.737821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565504709239:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784561209741141:2056] 2025-03-28T09:07:49.737878Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565504709233:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784565504709234:2445] 2025-03-28T09:07:49.737917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565504709233:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784565504709236:2445] 2025-03-28T09:07:49.737983Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486784565504709233:2445][/dc-1] Set up state: owner# [1:7486784565504708782:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:49.738123Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565504709237:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565504709234:2445], cookie# 1 2025-03-28T09:07:49.738154Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565504709238:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565504709235:2445], cookie# 1 2025-03-28T09:07:49.738167Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565504709239:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565504709236:2445], cookie# 1 2025-03-28T09:07:49.738191Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561209741135:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784565504709237:2445] 2025-03-28T09:07:49.738214Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561209741135:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565504709237:2445], cookie# 1 2025-03-28T09:07:49.738232Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561209741141:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784565504709239:2445] 2025-03-28T09:07:49.738248Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561209741141:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565504709239:2445], cookie# 1 2025-03-28T09:07:49.738737Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561209741138:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784565504709238:2445] 2025-03-28T09:07:49.738771Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784561209741138:2053] Subscribe: subscriber# [1:7486784565504709238:2445], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.738801Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561209741138:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565504709238:2445], cookie# 1 2025-03-28T09:07:49.738837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565504709237:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784561209741135:2050], cookie# 1 2025-03-28T09:07:49.738853Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565504709239:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784561209741141:2056], cookie# 1 2025-03-28T09:07:49.738876Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565504709238:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784561209741138:2053] 2025-03-28T09:07:49.738926Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565504709238:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784561209741138:2053], cookie# 1 2025-03-28T09:07:49.738970Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565504709233:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784565504709234:2445], cookie# 1 2025-03-28T09:07:49.738999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565504709233:2445][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:49.739038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565504709233:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784565504709236:2445], cookie# 1 2025-03-28T09:07:49.739056Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565504709233:2445][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:49.739106Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565504709233:2445][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784565504709235:2445] 2025-03-28T09:07:49.739162Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486784565504709233:2445][/dc-1] Path was already updated: owner# [1:7486784565504708782:2130], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:49.739185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565504709233:2445][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784565504709235:2445], cookie# 1 2025-03-28T09:07:49.739198Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565504709233:2445][/dc-1] Unexpected sync response: sender# [1:7486784565504709235:2445], cookie# 1 2025-03-28T09:07:49.739221Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784561209741138:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784565504709238:2445] 2025-03-28T09:07:49.814172Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784565504708782:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:07:49.814624Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784565504708782:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... d.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7486784612020375144:2926] 2025-03-28T09:08:00.932229Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784594840504482:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-03-28T09:08:00.932237Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486784612020375133:2926][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7486784612020375140:2926] 2025-03-28T09:08:00.932270Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784594840504482:2053] Subscribe: subscriber# [3:7486784612020375144:2926], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:08:00.932288Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486784612020375133:2926][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7486784612020375142:2926] 2025-03-28T09:08:00.932330Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486784612020375137:2925][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7486784594840504479:2050] 2025-03-28T09:08:00.932363Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7486784612020375133:2926][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7486784594840504876:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:08:00.932370Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486784612020375138:2925][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7486784594840504482:2053] 2025-03-28T09:08:00.932404Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486784612020375144:2926][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7486784594840504482:2053] 2025-03-28T09:08:00.932420Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486784612020375125:2925][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7486784612020375134:2925] 2025-03-28T09:08:00.932436Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486784612020375133:2926][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7486784612020375141:2926] 2025-03-28T09:08:00.932459Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7486784612020375125:2925][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7486784594840504876:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:08:00.932459Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7486784612020375133:2926][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7486784594840504876:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:08:00.932490Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486784612020375125:2925][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7486784612020375135:2925] 2025-03-28T09:08:00.932491Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784594840504479:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486784612020375143:2926] 2025-03-28T09:08:00.932525Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7486784612020375125:2925][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7486784594840504876:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:08:00.932534Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784594840504479:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486784612020375137:2925] 2025-03-28T09:08:00.932550Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784594840504485:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486784612020375145:2926] 2025-03-28T09:08:00.932570Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784594840504482:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486784612020375138:2925] 2025-03-28T09:08:00.932576Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7486784594840504876:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-03-28T09:08:00.932585Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784594840504482:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486784612020375144:2926] 2025-03-28T09:08:00.932628Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7486784594840504876:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7486784612020375133:2926] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:08:00.932698Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784594840504876:2149], cacheItem# { Subscriber: { Subscriber: [3:7486784612020375133:2926] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:00.932755Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7486784594840504876:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-28T09:08:00.932869Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7486784594840504876:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7486784612020375125:2925] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:08:00.932919Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784594840504876:2149], cacheItem# { Subscriber: { Subscriber: [3:7486784612020375125:2925] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:00.932988Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784612020375146:2927], recipient# [3:7486784612020375126:2324], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:00.933041Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784612020375147:2928], recipient# [3:7486784612020375123:2323], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:00.997327Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784594840504876:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:00.997478Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784594840504876:2149], cacheItem# { Subscriber: { Subscriber: [3:7486784599135473015:2783] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:00.997608Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784612020375155:2932], recipient# [3:7486784612020375154:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |79.5%| [TA] {RESULT} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCdcStreamTests::DocApi [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TCdcStreamTests::DocApiNegative >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [FAIL] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId [GOOD] >> TCdcStreamTests::RetentionPeriod [GOOD] |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |79.6%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |79.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::Negative >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [FAIL] >> TCdcStreamTests::TopicPartitions >> DataShardReadIterator::ShouldReadRangeChunk1_100 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> DataShardReadIterator::ShouldReadRangeChunk1 |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-28T09:07:49.116759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784565936283683:2280];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:49.116855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d05/r3tmp/tmpWAKQyn/pdisk_1.dat 2025-03-28T09:07:49.577841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:49.577944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:49.580414Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:49.584367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13574 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:07:49.895017Z node 1 :TX_PROXY DEBUG: actor# [1:7486784565936283699:2116] Handle TEvNavigate describe path dc-1 2025-03-28T09:07:49.895080Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784565936284178:2443] HANDLE EvNavigateScheme dc-1 2025-03-28T09:07:49.895210Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784565936283755:2139], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:49.895249Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486784565936283755:2139], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:07:49.895516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:07:49.897850Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784565936283378:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784565936284183:2444] 2025-03-28T09:07:49.897944Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784565936283378:2050] Subscribe: subscriber# [1:7486784565936284183:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.898032Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784565936283384:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784565936284185:2444] 2025-03-28T09:07:49.898061Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784565936283384:2056] Subscribe: subscriber# [1:7486784565936284185:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.898129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784565936283755:2139], cookie# 1 2025-03-28T09:07:49.898195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784565936283381:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784565936284184:2444] 2025-03-28T09:07:49.898215Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784565936283381:2053] Subscribe: subscriber# [1:7486784565936284184:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:49.898408Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565936284183:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784565936283378:2050] 2025-03-28T09:07:49.898445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565936284185:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784565936283384:2056] 2025-03-28T09:07:49.898463Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565936284184:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784565936283381:2053] 2025-03-28T09:07:49.898514Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784565936284180:2444] 2025-03-28T09:07:49.898623Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784565936283378:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784565936284183:2444] 2025-03-28T09:07:49.898648Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784565936283384:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784565936284185:2444] 2025-03-28T09:07:49.898660Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784565936283381:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784565936284184:2444] 2025-03-28T09:07:49.898571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784565936284182:2444] 2025-03-28T09:07:49.898759Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486784565936284179:2444][/dc-1] Set up state: owner# [1:7486784565936283755:2139], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:49.898884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784565936284181:2444] 2025-03-28T09:07:49.898965Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486784565936284179:2444][/dc-1] Path was already updated: owner# [1:7486784565936283755:2139], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:49.899016Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565936284183:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565936284180:2444], cookie# 1 2025-03-28T09:07:49.899044Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565936284184:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565936284181:2444], cookie# 1 2025-03-28T09:07:49.899061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565936284185:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565936284182:2444], cookie# 1 2025-03-28T09:07:49.900214Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784565936283378:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565936284183:2444], cookie# 1 2025-03-28T09:07:49.900285Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784565936283381:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565936284184:2444], cookie# 1 2025-03-28T09:07:49.900307Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784565936283384:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784565936284185:2444], cookie# 1 2025-03-28T09:07:49.900336Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565936284183:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784565936283378:2050], cookie# 1 2025-03-28T09:07:49.900350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565936284184:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784565936283381:2053], cookie# 1 2025-03-28T09:07:49.900361Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784565936284185:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784565936283384:2056], cookie# 1 2025-03-28T09:07:49.900434Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784565936284180:2444], cookie# 1 2025-03-28T09:07:49.900475Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:49.900492Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784565936284181:2444], cookie# 1 2025-03-28T09:07:49.900516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:49.900537Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784565936284182:2444], cookie# 1 2025-03-28T09:07:49.900552Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784565936284179:2444][/dc-1] Unexpected sync response: sender# [1:7486784565936284182:2444], cookie# 1 2025-03-28T09:07:49.946524Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784565936283755:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:07:49.946921Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784565936283755:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@ ... 025-03-28T09:08:00.964064Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486784612207163779:2796][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7486784595027293265:2056] 2025-03-28T09:08:00.964127Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486784612207163761:2796][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7486784612207163774:2796] 2025-03-28T09:08:00.964176Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486784612207163761:2796][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7486784612207163775:2796] 2025-03-28T09:08:00.964197Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784595027293654:2149], cacheItem# { Subscriber: { Subscriber: [3:7486784612207163759:2794] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:00.964246Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7486784612207163761:2796][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7486784595027293654:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:08:00.964282Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7486784595027293654:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-28T09:08:00.964295Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486784612207163761:2796][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7486784612207163776:2796] 2025-03-28T09:08:00.964338Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7486784612207163761:2796][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7486784595027293654:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:08:00.964368Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7486784595027293654:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7486784612207163760:2795] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:08:00.964379Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784595027293259:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486784612207163777:2796] 2025-03-28T09:08:00.964402Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784595027293262:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486784612207163778:2796] 2025-03-28T09:08:00.964415Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784595027293265:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486784612207163779:2796] 2025-03-28T09:08:00.964430Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784595027293654:2149], cacheItem# { Subscriber: { Subscriber: [3:7486784612207163760:2795] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:00.964479Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7486784595027293654:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2025-03-28T09:08:00.964548Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7486784595027293654:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7486784612207163761:2796] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:08:00.964596Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784595027293654:2149], cacheItem# { Subscriber: { Subscriber: [3:7486784612207163761:2796] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:00.964674Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784612207163780:2797], recipient# [3:7486784612207163756:2320], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:00.964721Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784612207163781:2798], recipient# [3:7486784612207163758:2322], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:01.880378Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486784595027293590:2292];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:01.880459Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:01.893922Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784595027293654:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:01.894061Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784595027293654:2149], cacheItem# { Subscriber: { Subscriber: [3:7486784599322261667:2683] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:01.894144Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784616502131094:2799], recipient# [3:7486784616502131093:2323], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:01.972996Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784595027293654:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:01.973152Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784595027293654:2149], cacheItem# { Subscriber: { Subscriber: [3:7486784612207163761:2796] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:01.973266Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784616502131101:2805], recipient# [3:7486784616502131100:2324], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |79.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |79.6%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 9839, MsgBus: 62014 2025-03-28T09:08:00.935886Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784613059471355:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:00.938767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b23/r3tmp/tmpfwoVwn/pdisk_1.dat 2025-03-28T09:08:01.583094Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:01.588946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:01.589621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:01.593325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9839, node 1 2025-03-28T09:08:01.729425Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:01.729451Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:01.729457Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:01.729601Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62014 TClient is connected to server localhost:62014 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:02.699527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId [GOOD] Test command err: Trying to start YDB, gRPC: 13061, MsgBus: 23985 2025-03-28T09:08:00.892488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784614634898767:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:00.898754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b09/r3tmp/tmp6E4QFm/pdisk_1.dat 2025-03-28T09:08:01.380300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:01.402954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:01.403054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:01.405226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13061, node 1 2025-03-28T09:08:01.583326Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:01.583349Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:01.583357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:01.583471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23985 TClient is connected to server localhost:23985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:02.671129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-28T09:07:50.093721Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784572750602783:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:50.093790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cec/r3tmp/tmpzMO3xs/pdisk_1.dat 2025-03-28T09:07:50.891203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:50.891298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:50.899366Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:50.909162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26182 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:07:51.121595Z node 1 :TX_PROXY DEBUG: actor# [1:7486784572750603008:2115] Handle TEvNavigate describe path dc-1 2025-03-28T09:07:51.121640Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784577045570799:2447] HANDLE EvNavigateScheme dc-1 2025-03-28T09:07:51.123462Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784572750603032:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:51.123703Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784577045570782:2442][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784572750603032:2128], cookie# 1 2025-03-28T09:07:51.125515Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784577045570786:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784577045570783:2442], cookie# 1 2025-03-28T09:07:51.125562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784577045570787:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784577045570784:2442], cookie# 1 2025-03-28T09:07:51.125578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784577045570788:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784577045570785:2442], cookie# 1 2025-03-28T09:07:51.125610Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784568455635390:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784577045570786:2442], cookie# 1 2025-03-28T09:07:51.125732Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784568455635396:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784577045570788:2442], cookie# 1 2025-03-28T09:07:51.125810Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784568455635393:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784577045570787:2442], cookie# 1 2025-03-28T09:07:51.130863Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784577045570786:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784568455635390:2050], cookie# 1 2025-03-28T09:07:51.130911Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784577045570788:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784568455635396:2056], cookie# 1 2025-03-28T09:07:51.130927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784577045570787:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784568455635393:2053], cookie# 1 2025-03-28T09:07:51.130960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784577045570782:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784577045570783:2442], cookie# 1 2025-03-28T09:07:51.130984Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784577045570782:2442][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:51.130999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784577045570782:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784577045570785:2442], cookie# 1 2025-03-28T09:07:51.131023Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784577045570782:2442][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:51.131054Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784577045570782:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784577045570784:2442], cookie# 1 2025-03-28T09:07:51.131069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784577045570782:2442][/dc-1] Unexpected sync response: sender# [1:7486784577045570784:2442], cookie# 1 2025-03-28T09:07:51.131129Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784572750603032:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:07:51.140342Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784572750603032:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784577045570782:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:07:51.140443Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784572750603032:2128], cacheItem# { Subscriber: { Subscriber: [1:7486784577045570782:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:07:51.141676Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784577045570801:2449], recipient# [1:7486784577045570799:2447], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:07:51.141723Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784577045570799:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:07:51.173356Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784577045570799:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:07:51.175846Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784577045570799:2447] Handle TEvDescribeSchemeResult Forward to# [1:7486784577045570798:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:07:51.203207Z node 1 :TX_PROXY DEBUG: actor# [1:7486784572750603008:2115] Handle TEvProposeTransaction 2025-03-28T09:07:51.203247Z node 1 :TX_PROXY DEBUG: actor# [1:7486784572750603008:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:07:51.203340Z node 1 :TX_PROXY DEBUG: actor# [1:7486784572750603008:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486784577045570806:2453] 2025-03-28T09:07:51.319698Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784577045570806:2453] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:07:51.319745Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784577045570806:2453] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:07:51.319807Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784577045570806:2453] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:07:51.319886Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784572750603032:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:1844674407370955161 ... tePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:01.684689Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784618754853764:2975], recipient# [3:7486784618754853741:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:01.766883Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784601574983381:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:01.767043Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784601574983381:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784605869951480:2702] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:01.767146Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784618754853766:2976], recipient# [3:7486784618754853765:2326], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:02.690476Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784601574983381:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:02.690618Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784601574983381:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784618754853744:2973] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:02.690713Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784623049821082:2980], recipient# [3:7486784623049821081:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:02.751043Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486784601574983140:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:02.751116Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:02.773391Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784601574983381:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:02.773527Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784601574983381:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784605869951480:2702] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:02.773656Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784623049821086:2983], recipient# [3:7486784623049821085:2328], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:03.696201Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784601574983381:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:03.696349Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784601574983381:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784618754853744:2973] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:03.696440Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784627344788404:2989], recipient# [3:7486784627344788403:2329], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:03.753997Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784601574983381:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:03.754166Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784601574983381:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784605869951480:2702] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:03.754270Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784627344788406:2990], recipient# [3:7486784627344788405:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:03.776539Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784601574983381:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:03.776678Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784601574983381:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784605869951480:2702] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:03.776759Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784627344788408:2991], recipient# [3:7486784627344788407:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:08:02.116978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:08:02.117108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:02.117150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:08:02.117201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:08:02.117248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:08:02.117278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:08:02.117336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:02.117423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:08:02.117752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:08:02.194748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:02.194820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:02.208682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:08:02.208929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:08:02.209064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:08:02.214556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:08:02.214897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:08:02.215516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:02.215752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:02.218025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:02.219516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:02.219580Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:02.219766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:08:02.219847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:02.219890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:08:02.219983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:08:02.241481Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:08:02.605235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:08:02.605521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:02.605787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:08:02.606073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:08:02.606141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:02.619901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:02.620098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:08:02.620356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:02.620418Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:08:02.620454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:08:02.620494Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:08:02.623802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:02.623943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:08:02.623979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:08:02.631503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:02.631574Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:02.631617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:02.631666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:08:02.636761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:08:02.643731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:08:02.644044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:08:02.645270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:02.645440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:02.645493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:02.645808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:08:02.645869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:02.646050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:08:02.646135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:02.656169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:02.656269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:02.656483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:02.656537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:08:02.656994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:02.657046Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:08:02.657145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:02.657181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:02.657221Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:02.657262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:02.657304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:08:02.657374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:02.657422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:08:02.657453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:08:02.657540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:08:02.657597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:08:02.657635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:08:02.659659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:02.659782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:02.659823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , at schemeshard: 72057594046678944, message: Source { RawX1: 605 RawX2: 4294969840 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-28T09:08:04.480293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2025-03-28T09:08:04.480461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 605 RawX2: 4294969840 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-28T09:08:04.480516Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:08:04.480607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 605 RawX2: 4294969840 } Origin: 72075186233409548 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-28T09:08:04.480678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:04.480726Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:08:04.480774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:08:04.480818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-03-28T09:08:04.486163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 608 RawX2: 4294969841 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-28T09:08:04.486234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 2 2025-03-28T09:08:04.486424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:2, at schemeshard: 72057594046678944, message: Source { RawX1: 608 RawX2: 4294969841 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-28T09:08:04.486471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:08:04.486541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 608 RawX2: 4294969841 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2025-03-28T09:08:04.486591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:2, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:04.486621Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:2, at schemeshard: 72057594046678944 2025-03-28T09:08:04.486651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:08:04.568398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:2 129 -> 240 2025-03-28T09:08:04.571005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-28T09:08:04.579801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-28T09:08:04.580025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-28T09:08:04.580097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-28T09:08:04.580196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:08:04.587951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-03-28T09:08:04.588183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:08:04.588590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:08:04.588651Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:0 ProgressState 2025-03-28T09:08:04.588763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 2/3 2025-03-28T09:08:04.588803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-03-28T09:08:04.588844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 2/3 2025-03-28T09:08:04.588880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2025-03-28T09:08:04.588936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2025-03-28T09:08:04.589203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2025-03-28T09:08:04.589563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2025-03-28T09:08:04.589594Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 107:2 ProgressState 2025-03-28T09:08:04.589660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 3/3 2025-03-28T09:08:04.589683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-03-28T09:08:04.589705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:2 progress is 3/3 2025-03-28T09:08:04.589728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-03-28T09:08:04.589752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2025-03-28T09:08:04.589814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:480:2428] message: TxId: 107 2025-03-28T09:08:04.589851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2025-03-28T09:08:04.589937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-03-28T09:08:04.589967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-03-28T09:08:04.590067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:08:04.590097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:1 2025-03-28T09:08:04.590112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:1 2025-03-28T09:08:04.590130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:08:04.590145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:2 2025-03-28T09:08:04.590168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:2 2025-03-28T09:08:04.590206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-28T09:08:04.593094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-28T09:08:04.593146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:536:2484] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-03-28T09:08:04.597336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:08:04.597976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2025-03-28T09:08:04.598112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-03-28T09:08:04.598185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2025-03-28T09:08:04.601126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:04.601370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-03-28T09:08:04.601917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-03-28T09:08:04.601968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-03-28T09:08:04.602585Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-03-28T09:08:04.602718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-28T09:08:04.671182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:734:2649] TestWaitNotification: OK eventTxId 108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: 2025-03-28T09:04:51.419894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:51.420182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:51.420245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0010e9/r3tmp/tmpj1uBnZ/pdisk_1.dat 2025-03-28T09:04:51.828572Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8000, node 1 2025-03-28T09:04:52.115427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:52.115485Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:52.115516Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:52.115933Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:04:52.118207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:52.205092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:52.205216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:52.224211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26409 2025-03-28T09:04:52.878995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:56.677354Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:04:56.712587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:56.712695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:56.764828Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:56.767742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:57.036756Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:57.037198Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:57.037592Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:57.037679Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:57.037861Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:57.037925Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:57.037967Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:57.038024Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:57.038080Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:57.217363Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:57.217487Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:57.234071Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:57.390344Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:57.455060Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:04:57.455172Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:04:57.552149Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:04:57.552326Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:04:57.552541Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:04:57.552609Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:04:57.552666Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:04:57.552727Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:04:57.552778Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:04:57.552827Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:04:57.553378Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:04:57.585845Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:57.585958Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:57.593835Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:04:57.600357Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:04:57.600572Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:04:57.611966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:04:57.634438Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:04:57.634504Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:04:57.634574Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:04:57.649426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:04:57.662651Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:04:57.662886Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:04:57.835781Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:04:58.063474Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:04:58.147350Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:04:58.936146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:04:59.677839Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:59.918031Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:04:59.918102Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:04:59.918210Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2597:2951], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:04:59.919348Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2598:2952] 2025-03-28T09:04:59.919491Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2598:2952], schemeshard id = 72075186224037899 2025-03-28T09:05:01.174124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2725:3245], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.174272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.192765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-28T09:05:01.596485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2957:3291], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.643261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:01.644932Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2962:3295]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:05:01.645216Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:05:01.645455Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-28T09:05:01.645524Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2965:3298] 2025-03-28T09:05:01.645591Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2965:3298] 2025-03-28T09:05:01.646225Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2966:3138] 2025-03-28T09:05:01.646592Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2965:3298], server id = [2:2966:3138], tablet id = 72075186224037894, status = OK 2025-03-28T09:05:01.646846Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2966:3138], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:05:01.646921Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:05:01.647292Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:05:01.647384Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2962:3295], StatRequests.size() = 1 2025-03-28T09:05:01.656203Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2999:3307]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:05:01.656408Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] Re ... .345237Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:07:53.409749Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:7842:5565]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:53.410074Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-03-28T09:07:53.410124Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:7842:5565], StatRequests.size() = 1 2025-03-28T09:07:54.967657Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:54.967761Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:54.967849Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:07:54.967909Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:54.968321Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:07:55.018934Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:55.028381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7880:5592], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:55.028559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7890:5597], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:55.028787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:55.065791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:07:55.175146Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7894:5600], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:07:55.284878Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:7989:5648]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:55.285322Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-28T09:07:55.285397Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:7989:5648], StatRequests.size() = 1 2025-03-28T09:07:55.402238Z node 2 :TX_PROXY ERROR: Actor# [2:7994:5650] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:55.596749Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:8023:5665]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:55.597067Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-28T09:07:55.597352Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-03-28T09:07:55.597435Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:07:55.597650Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:07:55.597739Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:8023:5665], StatRequests.size() = 1 2025-03-28T09:07:55.772771Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWMxZjllNDItMmQ0OTZiMzYtOTUzZThiMmMtMjliMGRhOGU=, TxId: 2025-03-28T09:07:55.772861Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWMxZjllNDItMmQ0OTZiMzYtOTUzZThiMmMtMjliMGRhOGU=, TxId: 2025-03-28T09:07:55.773594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:55.792760Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:07:55.792861Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:55.843616Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:07:55.843694Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:07:55.938655Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3236:3184], schemeshard count = 1 2025-03-28T09:07:56.343741Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037899 2025-03-28T09:07:56.343843Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 212.000000s, at schemeshard: 72075186224037899 2025-03-28T09:07:56.344101Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 50 2025-03-28T09:07:56.363740Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:07:57.518199Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:8093:5711]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:57.518557Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-28T09:07:57.518608Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:8093:5711], StatRequests.size() = 1 2025-03-28T09:07:59.081178Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:07:59.094305Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:07:59.094364Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:07:59.094407Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 4] is data table. 2025-03-28T09:07:59.094444Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 4] 2025-03-28T09:07:59.094758Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:07:59.097474Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:07:59.145965Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODk0ZDA4YTUtZGY2NzcwZTctMzJkYWY5ZWEtOTRmNDAyZjg=, TxId: 2025-03-28T09:07:59.146074Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODk0ZDA4YTUtZGY2NzcwZTctMzJkYWY5ZWEtOTRmNDAyZjg=, TxId: 2025-03-28T09:07:59.146867Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:07:59.164033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 4] 2025-03-28T09:07:59.164096Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:07:59.238444Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:8162:5754]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:07:59.238746Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-28T09:07:59.238792Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:8162:5754], StatRequests.size() = 1 2025-03-28T09:08:00.975446Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:8216:5786]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:00.975836Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-28T09:08:00.975887Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:8216:5786], StatRequests.size() = 1 2025-03-28T09:08:02.435446Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-03-28T09:08:02.435642Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:08:02.436130Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:08:02.449395Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:08:02.449462Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:08:02.449502Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-03-28T09:08:02.449536Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:08:02.449839Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:08:02.452797Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:08:02.475359Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:06.000000Z, event interval end# 2025-03-28T09:08:00.000000Z 2025-03-28T09:08:02.475479Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2QwMDgzNmQtODk1OTY2OWUtMTQyZTZjM2ItZGNiNjJmODE=, TxId: 2025-03-28T09:08:02.475520Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2QwMDgzNmQtODk1OTY2OWUtMTQyZTZjM2ItZGNiNjJmODE=, TxId: 2025-03-28T09:08:02.476039Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:08:02.496040Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:08:02.496398Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:08:02.625120Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:8280:5824]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:02.625456Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-28T09:08:02.625502Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:8280:5824], StatRequests.size() = 1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> DataShardReadIterator::ShouldReadFromHead [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |79.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> StatisticsSaveLoad::Simple >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> StatisticsSaveLoad::ForbidAccess >> BasicStatistics::TwoServerlessDbs [GOOD] >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:08:08.860160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:08:08.860278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:08.860344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:08:08.860386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:08:08.860444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:08:08.860476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:08:08.860537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:08.860627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:08:08.860952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:08:09.025051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:09.025214Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:09.059495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:08:09.059909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:08:09.060161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:08:09.069000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:08:09.069292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:08:09.070132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:09.070363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:09.072727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:09.074434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:09.074509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:09.074842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:08:09.074909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:09.074958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:08:09.075072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:08:09.088268Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:08:09.231493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:08:09.231808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:09.232078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:08:09.232397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:08:09.232463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:09.238631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:09.238854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:08:09.239122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:09.239225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:08:09.239275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:08:09.239326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:08:09.242206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:09.242288Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:08:09.242328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:08:09.244658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:09.244714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:09.244761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:09.244854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:08:09.254547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:08:09.257489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:08:09.257780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:08:09.258956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:09.259139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:09.259192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:09.259443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:08:09.259506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:09.259672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:08:09.259780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:09.262088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:09.262134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:09.262336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:09.262378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:08:09.262765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:09.262815Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:08:09.262909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:09.262943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:09.262978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:09.263038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:09.263077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:08:09.263120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:09.263167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:08:09.263200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:08:09.263274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:08:09.263309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:08:09.263345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:08:09.265309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:09.265416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:09.265452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4 2025-03-28T09:08:09.970986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:08:09.971198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:08:09.971261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:08:09.971317Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:08:09.971359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:08:09.971446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /data_01.csv.zst HTTP/1.1 HEADERS: Host: localhost:12386 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2913A848-FCF7-4EBB-99AA-6FE8895D1EA0 amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:12386 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 21E5C2EA-61B9-4F92-8509-47D5AA137EDF amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-28T09:08:09.975321Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:472:2430], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-03-28T09:08:09.977390Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:479:2435], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2025-03-28T09:08:09.977455Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:479:2435], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-28T09:08:09.977992Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:478:2434], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:12386 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C991BC5C-3DBA-41A7-86A8-62EE30563C95 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-03-28T09:08:09.988215Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:472:2430], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-03-28T09:08:09.988415Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:471:2429] 2025-03-28T09:08:09.988571Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:472:2430], sender# [1:471:2429], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-03-28T09:08:09.989080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:12386 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9EC3F6A5-D440-4325-9202-AEDB7C51AAFD amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-03-28T09:08:09.994140Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:472:2430], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-03-28T09:08:09.994201Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:472:2430], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-28T09:08:09.994387Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:471:2429], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-28T09:08:10.020824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:08:10.020919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:08:10.021096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:08:10.021212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 318 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:08:10.021302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:10.021479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:10.022055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:08:10.022098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:08:10.022208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:08:10.022300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:08:10.022344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:10.022377Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:08:10.022410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:08:10.022447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:08:10.022488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:08:10.022606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:10.026316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:08:10.028009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:08:10.028229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:08:10.028281Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:08:10.028413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:08:10.028447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:08:10.028500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:08:10.028533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:08:10.028569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:08:10.028675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:370:2336] message: TxId: 102 2025-03-28T09:08:10.028729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:08:10.028778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:08:10.028823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:08:10.028980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:08:10.033423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:08:10.033491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:454:2413] TestWaitNotification: OK eventTxId 102 >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> HttpRequest::ProbeServerless [GOOD] |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: 2025-03-28T09:04:57.938720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:57.939007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:57.939078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001086/r3tmp/tmpi0RSzR/pdisk_1.dat 2025-03-28T09:04:58.400591Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9330, node 1 2025-03-28T09:04:58.695306Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:58.695367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:58.695401Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:58.695973Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:04:58.698905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:58.815035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:58.815163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:58.834316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23695 2025-03-28T09:04:59.417115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:02.736872Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:05:02.768442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:02.768601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:02.823749Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:05:02.827559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:03.068676Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:03.069390Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:03.070028Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:03.070285Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:03.070569Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:03.070659Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:03.070994Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:03.071141Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:03.071277Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:03.235956Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:03.236100Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:03.251439Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:03.419998Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:03.475865Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:05:03.475976Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:05:03.518308Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:05:03.520686Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:05:03.520928Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:05:03.520990Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:05:03.521056Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:05:03.521130Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:05:03.521205Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:05:03.521291Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:05:03.521854Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:05:03.550624Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:05:03.551526Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:05:03.563532Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:05:03.563595Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:05:03.563671Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:05:03.567365Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:05:03.567515Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:05:03.579945Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:05:03.580336Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:05:03.620117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:05:03.628357Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:05:03.628522Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:05:03.808863Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:05:04.062800Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:05:04.157544Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:05:04.849710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:05:05.637679Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:05.870534Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:05:05.870622Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:05:05.870740Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2602:2954], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:05:05.871992Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2603:2955] 2025-03-28T09:05:05.872185Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2603:2955], schemeshard id = 72075186224037899 2025-03-28T09:05:06.919343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:05:07.511592Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:07.764605Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-03-28T09:05:07.764682Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-03-28T09:05:07.764788Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3077:3150], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-03-28T09:05:07.767524Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3086:3155] 2025-03-28T09:05:07.767679Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3086:3155], schemeshard id = 72075186224037905 2025-03-28T09:05:09.223004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3201:3405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.223190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.242101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-03-28T09:05:09.499163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3352:3440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.499456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:09.500952Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3357:3444]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:05:09.501262Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:05:09.501423Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-28T09:05:09.501497Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3360:3447] 2025-03-28T09:05:09.501571Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:33 ... 4037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:08:00.925375Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:08:00.941332Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:08:00.948007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8952:6416], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:00.948127Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8962:6421], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:00.948244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:00.970325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:08:01.140680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8966:6424], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:08:01.334067Z node 2 :TX_PROXY ERROR: Actor# [2:9061:6472] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:01.437129Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 118 ], ReplyToActorId[ [2:9090:6487]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:01.437495Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 118 ] 2025-03-28T09:08:01.437787Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-03-28T09:08:01.437863Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:08:01.438109Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:08:01.438209Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 118, ReplyToActorId = [2:9090:6487], StatRequests.size() = 1 2025-03-28T09:08:01.646462Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmE5YTgxNDktYWJiOTAwNWMtMTY0NzE5MTQtZWEzYzdkMTg=, TxId: 2025-03-28T09:08:01.646559Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmE5YTgxNDktYWJiOTAwNWMtMTY0NzE5MTQtZWEzYzdkMTg=, TxId: 2025-03-28T09:08:01.660028Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:08:01.680858Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:08:01.680943Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:08:01.731524Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:08:01.731602Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:08:01.819531Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:3716:3401], schemeshard count = 1 2025-03-28T09:08:02.219208Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-28T09:08:02.219283Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 185.000000s, at schemeshard: 72075186224037899 2025-03-28T09:08:02.219595Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-03-28T09:08:02.239297Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:08:02.464465Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [2:9131:6513]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:02.465111Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-28T09:08:02.465181Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [2:9131:6513], StatRequests.size() = 1 2025-03-28T09:08:04.066090Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [2:9182:6545]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:04.066509Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-28T09:08:04.066558Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [2:9182:6545], StatRequests.size() = 1 2025-03-28T09:08:04.600473Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-03-28T09:08:04.600534Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 202.000000s, at schemeshard: 72075186224037905 2025-03-28T09:08:04.600711Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 26 2025-03-28T09:08:04.620325Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:08:04.895636Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:08:04.910374Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:08:04.910451Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:08:04.910497Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-03-28T09:08:04.910528Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:08:04.910985Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:08:04.913631Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:08:04.948790Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2UwYjJlNDEtYjg5ZjZjZS02MzA4MWRjZC1jMzAzZjQ5YQ==, TxId: 2025-03-28T09:08:04.948865Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2UwYjJlNDEtYjg5ZjZjZS02MzA4MWRjZC1jMzAzZjQ5YQ==, TxId: 2025-03-28T09:08:04.949538Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:08:04.965947Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:08:04.966009Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:08:05.710265Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [2:9262:6599]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:05.710745Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-28T09:08:05.710797Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [2:9262:6599], StatRequests.size() = 1 2025-03-28T09:08:07.513526Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [2:9318:6632]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:07.513970Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-28T09:08:07.514023Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [2:9318:6632], StatRequests.size() = 1 2025-03-28T09:08:08.302299Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-03-28T09:08:08.302551Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:08:08.303032Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:08:08.314561Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:08:08.314624Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:08:08.314664Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is data table. 2025-03-28T09:08:08.314714Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037905, LocalPathId: 2] 2025-03-28T09:08:08.315148Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:08:08.317926Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:08:08.341899Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:06.000000Z, event interval end# 2025-03-28T09:08:06.000000Z 2025-03-28T09:08:08.342171Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzMxY2Y3NmMtZGFiMTA5MDYtZmUwYTMxNmQtZWFiZGYyYw==, TxId: 2025-03-28T09:08:08.342224Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzMxY2Y3NmMtZGFiMTA5MDYtZmUwYTMxNmQtZWFiZGYyYw==, TxId: 2025-03-28T09:08:08.342825Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:08:08.366380Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-03-28T09:08:08.366446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:08:09.242932Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [2:9393:6681]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:09.243288Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-28T09:08:09.243335Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [2:9393:6681], StatRequests.size() = 1 2025-03-28T09:08:09.244255Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [2:9395:6683]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:09.262518Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-28T09:08:09.262608Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [2:9395:6683], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:08:11.112654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:08:11.112745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:11.112805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:08:11.112840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:08:11.112900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:08:11.112947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:08:11.113011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:11.113097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:08:11.113405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:08:11.195590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:11.195653Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:11.210489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:08:11.210771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:08:11.210949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:08:11.216539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:08:11.216753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:08:11.217348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:11.217545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:11.219532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:11.220814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:11.220880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:11.221079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:08:11.221130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:11.221167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:08:11.221264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:08:11.227751Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:08:11.381107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:08:11.381536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:11.381894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:08:11.382268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:08:11.382364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:11.388369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:11.388582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:08:11.388854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:11.388940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:08:11.388990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:08:11.389048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:08:11.395832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:11.395939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:08:11.395992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:08:11.403126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:11.403218Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:11.403300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:11.403384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:08:11.430070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:08:11.434417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:08:11.434711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:08:11.435618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:11.435755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:11.435795Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:11.436001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:08:11.436035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:11.436188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:08:11.436259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:11.441947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:11.442006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:11.442144Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:11.442174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:08:11.442501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:11.442544Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:08:11.442620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:11.442644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:11.442671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:11.442730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:11.442769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:08:11.442816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:11.442848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:08:11.442877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:08:11.442930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:08:11.442958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:08:11.442982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:08:11.444409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:11.444496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:11.444523Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:08:11.914573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-28T09:08:11.914743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:08:11.920436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:08:11.920589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-28T09:08:11.921024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:11.921151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:11.921211Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:08:11.921336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:08:11.921468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:08:11.939452Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-03-28T09:08:12.016716Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:5613 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DCEA20B8-D01D-48F9-97FD-B3752B1FBD91 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-28T09:08:12.058982Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:08:12.061991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:12.062034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:08:12.062436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:12.062484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:08:12.063104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.063180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:08:12.063984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:08:12.064072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:08:12.064105Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:08:12.064135Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:08:12.065596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:08:12.065694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:5613 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C9721872-469F-44D1-83DB-EC578DDD9C26 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-03-28T09:08:12.071403Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-03-28T09:08:12.071516Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-03-28T09:08:12.071671Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-03-28T09:08:12.073154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:5613 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3D1DEB1A-BF9A-49C6-8B3B-2977B29A209F amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2025-03-28T09:08:12.079964Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2025-03-28T09:08:12.080009Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-28T09:08:12.080135Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-28T09:08:12.104145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:08:12.104206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:08:12.104361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:08:12.104445Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:08:12.104532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:12.104563Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.104598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:08:12.104630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:08:12.104751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:12.115222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.116159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.116200Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:08:12.116301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:08:12.116333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:08:12.116386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:08:12.116412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:08:12.116444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:08:12.117887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:08:12.117927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:08:12.117958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:08:12.117985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:08:12.118085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:08:12.120061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:08:12.120115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:396:2368] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2025-03-28T09:07:35.861163Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:07:35.871006Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:07:35.871602Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:07:35.871883Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:35.928926Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:07:36.022610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:07:36.022666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:36.044929Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:36.046295Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:07:36.048056Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:07:36.048135Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:07:36.048189Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:07:36.048644Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:07:36.048746Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:07:36.048820Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:07:36.112232Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:07:36.154928Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:07:36.155144Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:07:36.155253Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:07:36.155292Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:07:36.155331Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:07:36.155375Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:36.155646Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:36.155724Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:36.156008Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:07:36.156139Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:07:36.156205Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:36.156253Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:36.156291Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:07:36.156327Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:36.156365Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:36.156406Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:07:36.156450Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:07:36.156547Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:36.156597Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:36.156651Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:07:36.159435Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:07:36.159525Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:36.159622Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:36.159795Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:07:36.159842Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:07:36.159898Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:07:36.159980Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:36.160065Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:07:36.160102Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:07:36.160140Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:36.160453Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:07:36.160491Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:07:36.160529Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:36.160563Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:36.160604Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:07:36.160630Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:36.160681Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:07:36.160721Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:36.160756Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:07:36.173413Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:36.173485Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:36.173522Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:36.173560Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:07:36.173667Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:07:36.174236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:36.174272Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:36.174313Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:07:36.174377Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:07:36.174402Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:07:36.174526Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:36.174572Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:36.174603Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:07:36.174629Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:07:36.179011Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:07:36.179082Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:36.179300Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:36.179351Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:36.179454Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:36.179506Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:07:36.179544Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:07:36.179587Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:07:36.179625Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:07:36.179673Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:36.179734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:07:36.179785Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:07:36.179820Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:07:36.180068Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:07:36.180112Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:36.180138Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:07:36.180162Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:07:36.180186Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:07:36.180254Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:36.180283Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:07:36.180316Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:07:36.180352Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:07:36.180424Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:07:36.180466Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:07:36.180504Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:07:36.180557Z node 1 :TX_DATA ... 4MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\231\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1002 ExecLevel: 0 Flags: 0 2025-03-28T09:08:11.794226Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:08:11.794321Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:08:11.795430Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CheckDataTx 2025-03-28T09:08:11.795490Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-03-28T09:08:11.795521Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CheckDataTx 2025-03-28T09:08:11.795566Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:08:11.795596Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:08:11.795636Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-28T09:08:11.795690Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1002] at 9437184 2025-03-28T09:08:11.795725Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-03-28T09:08:11.795750Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:08:11.795772Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:08:11.795797Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit ExecuteDataTx 2025-03-28T09:08:11.795852Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-28T09:08:11.796197Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2025-03-28T09:08:11.796244Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:08:11.796295Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:08:11.796325Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:08:11.796372Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit FinishPropose 2025-03-28T09:08:11.796399Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2025-03-28T09:08:11.796433Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is DelayComplete 2025-03-28T09:08:11.796460Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2025-03-28T09:08:11.796487Z node 3 :TX_DATASHARD TRACE: Add [0:1002] at 9437184 to execution unit CompletedOperations 2025-03-28T09:08:11.796515Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2025-03-28T09:08:11.796555Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1002] at 9437184 is Executed 2025-03-28T09:08:11.796577Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2025-03-28T09:08:11.796602Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1002] at 9437184 has finished 2025-03-28T09:08:11.820414Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:08:11.820485Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2025-03-28T09:08:11.820524Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-03-28T09:08:11.820606Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2025-03-28T09:08:11.827004Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-03-28T09:08:11.827063Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-28T09:08:11.827986Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4544:6463], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:11.828028Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:11.828067Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4543:6462], serverId# [3:4544:6463], sessionId# [0:0:0] 2025-03-28T09:08:11.828555Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2025-03-28T09:08:11.828595Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:08:11.828682Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:08:11.829196Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2025-03-28T09:08:11.829251Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-03-28T09:08:11.829280Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2025-03-28T09:08:11.829313Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:08:11.829343Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:08:11.829401Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-28T09:08:11.829462Z node 3 :TX_DATASHARD TRACE: Activated operation [0:1003] at 9437184 2025-03-28T09:08:11.829494Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-03-28T09:08:11.829521Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:08:11.829545Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:08:11.829569Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2025-03-28T09:08:11.829608Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-28T09:08:11.829922Z node 3 :TX_DATASHARD TRACE: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2025-03-28T09:08:11.829967Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:08:11.830016Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:08:11.830042Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:08:11.830073Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit FinishPropose 2025-03-28T09:08:11.830102Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2025-03-28T09:08:11.830132Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is DelayComplete 2025-03-28T09:08:11.830158Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2025-03-28T09:08:11.830184Z node 3 :TX_DATASHARD TRACE: Add [0:1003] at 9437184 to execution unit CompletedOperations 2025-03-28T09:08:11.830212Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2025-03-28T09:08:11.830254Z node 3 :TX_DATASHARD TRACE: Execution status for [0:1003] at 9437184 is Executed 2025-03-28T09:08:11.830279Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2025-03-28T09:08:11.830305Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:1003] at 9437184 has finished 2025-03-28T09:08:11.876523Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:08:11.876605Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2025-03-28T09:08:11.879191Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:08:11.879253Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2025-03-28T09:08:11.879293Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2025-03-28T09:08:11.879373Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:08:11.882326Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [3:229:2224], Recipient [3:232:2225]: NKikimr::TEvTablet::TEvFollowerGcApplied .2025-03-28T09:08:11.885795Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:4558:6476], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:11.885878Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:11.885934Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:4557:6475], serverId# [3:4558:6476], sessionId# [0:0:0] 2025-03-28T09:08:11.886642Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269553160, Sender [3:4556:6474], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1719 LastUpdateTime: 1719 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: 2025-03-28T09:05:00.843043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:05:00.843407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:05:00.843481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00106f/r3tmp/tmpao2SoU/pdisk_1.dat 2025-03-28T09:05:01.268585Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25763, node 1 2025-03-28T09:05:01.528513Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:01.528576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:01.528610Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:01.529068Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:05:01.531574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:05:01.644547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:01.644692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:01.661550Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2634 2025-03-28T09:05:02.306060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:05.875858Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:05:05.923993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:05.924172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:05.980016Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:05:05.982578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:06.254211Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.254869Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.255569Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.255712Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.256005Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.256108Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.256209Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.256289Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.256368Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:05:06.455093Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:06.455215Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:06.476449Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:06.777080Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:06.902163Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:05:06.902305Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:05:07.017060Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:05:07.017314Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:05:07.017549Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:05:07.017628Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:05:07.017691Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:05:07.017749Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:05:07.017809Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:05:07.017870Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:05:07.018469Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:05:07.099399Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:05:07.099530Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:05:07.111524Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:05:07.121348Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:05:07.121596Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:05:07.130622Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:05:07.165493Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:05:07.165569Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:05:07.165663Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:05:07.202506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:05:07.223256Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:05:07.223448Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:05:07.466590Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:05:07.671556Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:05:07.820372Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:05:08.599952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:05:09.534585Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:09.774258Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:05:09.774325Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:05:09.774420Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2597:2950], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:05:09.775750Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2599:2952] 2025-03-28T09:05:09.775897Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2599:2952], schemeshard id = 72075186224037899 2025-03-28T09:05:11.163371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2723:3244], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:11.163563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:11.189826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-28T09:05:11.654665Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:05:11.655015Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:05:11.655336Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:05:11.655467Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:05:11.655577Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:05:11.655703Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:05:11.655822Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:05:11.655943Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:05:11.656096Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2879:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09: ... BUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:08:07.985662Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:08:07.985686Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:08:09.194254Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:08:09.194631Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:08:09.379817Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:08:09.379900Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=(O@](qN 2025-03-28T09:08:09.379968Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:08:10.823529Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:08:10.823666Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-03-28T09:08:10.823709Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:08:10.824468Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:08:10.844389Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:08:10.844964Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:08:10.845036Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:08:10.845594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:08:10.874637Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:08:10.874884Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:08:10.876100Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9932:7453], server id = [2:9937:7458], tablet id = 72075186224037905, status = OK 2025-03-28T09:08:10.876208Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9932:7453], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:08:10.877364Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9933:7454], server id = [2:9938:7459], tablet id = 72075186224037906, status = OK 2025-03-28T09:08:10.877437Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9933:7454], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:08:10.877600Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9934:7455], server id = [2:9940:7461], tablet id = 72075186224037907, status = OK 2025-03-28T09:08:10.877658Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9934:7455], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:08:10.878808Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9935:7456], server id = [2:9939:7460], tablet id = 72075186224037908, status = OK 2025-03-28T09:08:10.878879Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9935:7456], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:08:10.879977Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9936:7457], server id = [2:9941:7462], tablet id = 72075186224037909, status = OK 2025-03-28T09:08:10.880042Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9936:7457], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:08:10.880833Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:08:10.881601Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:08:10.882246Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9932:7453], server id = [2:9937:7458], tablet id = 72075186224037905 2025-03-28T09:08:10.882280Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:08:10.882824Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:08:10.883016Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9933:7454], server id = [2:9938:7459], tablet id = 72075186224037906 2025-03-28T09:08:10.883048Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:08:10.883411Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:08:10.883543Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-03-28T09:08:10.884220Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9934:7455], server id = [2:9940:7461], tablet id = 72075186224037907 2025-03-28T09:08:10.884254Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:08:10.884462Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9947:7468], server id = [2:9949:7470], tablet id = 72075186224037910, status = OK 2025-03-28T09:08:10.884535Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9947:7468], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:08:10.885288Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9948:7469], server id = [2:9950:7471], tablet id = 72075186224037911, status = OK 2025-03-28T09:08:10.885352Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9948:7469], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:08:10.885536Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9935:7456], server id = [2:9939:7460], tablet id = 72075186224037908 2025-03-28T09:08:10.885568Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:08:10.886223Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9936:7457], server id = [2:9941:7462], tablet id = 72075186224037909 2025-03-28T09:08:10.886269Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:08:10.886586Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9951:7472], server id = [2:9954:7475], tablet id = 72075186224037912, status = OK 2025-03-28T09:08:10.886651Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9951:7472], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:08:10.887497Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9952:7473], server id = [2:9955:7476], tablet id = 72075186224037913, status = OK 2025-03-28T09:08:10.887563Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9952:7473], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:08:10.888465Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9953:7474], server id = [2:9956:7477], tablet id = 72075186224037914, status = OK 2025-03-28T09:08:10.888527Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9953:7474], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:08:10.888686Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-03-28T09:08:10.889715Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-03-28T09:08:10.890028Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9947:7468], server id = [2:9949:7470], tablet id = 72075186224037910 2025-03-28T09:08:10.890057Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:08:10.890452Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-03-28T09:08:10.890922Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9948:7469], server id = [2:9950:7471], tablet id = 72075186224037911 2025-03-28T09:08:10.890956Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:08:10.891079Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-03-28T09:08:10.891322Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9951:7472], server id = [2:9954:7475], tablet id = 72075186224037912 2025-03-28T09:08:10.891351Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:08:10.891440Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-03-28T09:08:10.891485Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:08:10.891689Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:08:10.891848Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:08:10.892028Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9952:7473], server id = [2:9955:7476], tablet id = 72075186224037913 2025-03-28T09:08:10.892056Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:08:10.892259Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:08:10.894465Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9953:7474], server id = [2:9956:7477], tablet id = 72075186224037914 2025-03-28T09:08:10.894500Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:08:10.895429Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:08:10.921528Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2NlNTNmZTEtOGI1NTBhZGMtNGZjZGUzYWEtNDQ3ZDcyZDU=, TxId: 2025-03-28T09:08:10.921594Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2NlNTNmZTEtOGI1NTBhZGMtNGZjZGUzYWEtNDQ3ZDcyZDU=, TxId: 2025-03-28T09:08:10.922183Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:08:10.942948Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:08:10.943030Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=(O@](qN, ActorId=[1:4609:3495] 2025-03-28T09:08:10.944417Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:9979:5764]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:10.944843Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:08:10.944902Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:08:10.945247Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:08:10.945306Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-28T09:08:10.945367Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-03-28T09:08:11.023611Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |79.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> TContinuousBackupTests::Basic |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> TPopulatorTest::MakeDir >> TContinuousBackupTests::TakeIncrementalBackup >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2025-03-28T09:07:21.761272Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784448168381899:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:21.761365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00156c/r3tmp/tmptSznne/pdisk_1.dat TServer::EnableGrpc on GrpcPort 7869, node 1 2025-03-28T09:07:22.178846Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:22.204986Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:07:22.205011Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:07:22.220449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:22.220545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:22.229026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:22.229050Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:22.229056Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:22.229170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:07:22.255938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:22.541735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:11039 2025-03-28T09:07:24.836486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784461053284835:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.836567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.099535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:07:25.394184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252335:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.394630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.395519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252366:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.395582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252361:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.395622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252362:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.395653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252363:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.395684Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252364:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.402866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252385:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.402934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252389:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.402980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252390:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.403028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252391:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.403341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.404854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252442:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.404906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252449:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.404967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784465348252450:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.405005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:25.406215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:07:25.424516Z node 1 :TX_PROXY ERROR: Actor# [1:7486784465348252380:2764] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:07:25.425412Z node 1 :TX_PROXY ERROR: Actor# [1:7486784465348252377:2761] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:07:25.425425Z node 1 :TX_PROXY ERROR: Actor# [1:7486784465348252379:2763] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:07:25.425595Z node 1 :TX_PROXY ERROR: Actor# [1:7486784465348252381:2765] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:07:25.427745Z node 1 :TX_PROXY ERROR: Actor# [1:7486784465348252438:2787] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:07:25.427965Z node 1 :TX_PROXY ERROR: Actor# [1:7486784465348252437:2786] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:07:25.428120Z node 1 :TX_PROXY ERROR: Actor# [1:7486784465348252454:2798] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:07:25.428271Z node 1 :TX_PROXY ERROR: Actor# [1:7486784465348252459:2801] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:07:25.428415Z node 1 :TX_PROXY ERROR: Actor# [1:7486784465348252458:2800] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root ... d: 281474976731875. Ctx: { TraceId: 01jqe04fpqe2q8ga0nves1dgsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzdjZmU5NWQtMjM3NzZmMi1iY2ZlMTRmZC01ODE4NDU2ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.563796Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731876. Ctx: { TraceId: 01jqe04fpp5t8e69rhhwfp6amj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmExMWVmZC04ODkyNzc1OC02MDE1MjlmNS1lMDQ4Nzg1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.564113Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731877. Ctx: { TraceId: 01jqe04fpq8yv437n3wt4kfrtr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM3MjU1YjYtY2ExZWQxMmMtNTYwMmYzMDAtMTAxOTg4NzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.567389Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731878. Ctx: { TraceId: 01jqe04fpv14k6yq4y0fc2shg4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U3YTM3MmEtOGExNmNkMC01OWRiNjdjMS00ZjRkNTA2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.568041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731879. Ctx: { TraceId: 01jqe04fpv4x401ab6d49vc5w2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThhYzA1ZmMtZTRhZmZkNGEtY2Q0NzA1ZGQtZDc2MmE0MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.571931Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731880. Ctx: { TraceId: 01jqe04fq08d3bqg538w9dv2w7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDI5MTRkMzgtZDk2NTg5ZWItNTVhZGEzNmItMmM5Nzg4OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.572604Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731881. Ctx: { TraceId: 01jqe04fq0fqm695vkmn0t2r89, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJlYzE3ZmItMzNiZmU2N2ItMjQzMjY3NjgtMWMxNmFjNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.574770Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731882. Ctx: { TraceId: 01jqe04fq15jgjtfkvnww04zcv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg0MmI2OTAtMmI3ZTA0NTQtNThlNGNlZmQtOGZmZDE4NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.577124Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731883. Ctx: { TraceId: 01jqe04fq5d1yvsbgjner6atjf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmExMWVmZC04ODkyNzc1OC02MDE1MjlmNS1lMDQ4Nzg1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.577461Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731884. Ctx: { TraceId: 01jqe04fq567ff8fq0raphk0zt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJjZTc3NGUtMmVhODQzZTMtMmY1OGU1ZjktMjRjMmM5Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.578062Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731885. Ctx: { TraceId: 01jqe04fq5ep2w6nzxv7hn4rs4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZlMWY2YjctNTYyMWNjMmYtNjliMmUwMTEtYWU5OTllMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-03-28T09:08:09.583603Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731886. Ctx: { TraceId: 01jqe04fqda9rztcw1gk5h2h63, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM3MjU1YjYtY2ExZWQxMmMtNTYwMmYzMDAtMTAxOTg4NzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.583884Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731887. Ctx: { TraceId: 01jqe04fqd8bcyz6arj9t7qx9m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzdjZmU5NWQtMjM3NzZmMi1iY2ZlMTRmZC01ODE4NDU2ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.584264Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731888. Ctx: { TraceId: 01jqe04fqd3cqpt76hg4578hzq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U3YTM3MmEtOGExNmNkMC01OWRiNjdjMS00ZjRkNTA2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.584768Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731890. Ctx: { TraceId: 01jqe04fqd6jkfpcs9699w746k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTJlYzE3ZmItMzNiZmU2N2ItMjQzMjY3NjgtMWMxNmFjNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.585434Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731891. Ctx: { TraceId: 01jqe04fqd6yaq7ms7n1xj0xfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZThhYzA1ZmMtZTRhZmZkNGEtY2Q0NzA1ZGQtZDc2MmE0MWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.585853Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731889. Ctx: { TraceId: 01jqe04fqd2tffd1apj4zn4sz3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDI5MTRkMzgtZDk2NTg5ZWItNTVhZGEzNmItMmM5Nzg4OWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152845231 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:08:09.587597Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731892. Ctx: { TraceId: 01jqe04fqe41yqvex0znvycw4m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg0MmI2OTAtMmI3ZTA0NTQtNThlNGNlZmQtOGZmZDE4NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.591897Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731893. Ctx: { TraceId: 01jqe04fqh2q5ksyjkj502dm0c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDJjZTc3NGUtMmVhODQzZTMtMmY1OGU1ZjktMjRjMmM5Y2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.591913Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731894. Ctx: { TraceId: 01jqe04fqh9xr2ayr0dqwnyxax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmExMWVmZC04ODkyNzc1OC02MDE1MjlmNS1lMDQ4Nzg1OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.595676Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731895. Ctx: { TraceId: 01jqe04fqq60rgpyvpb64n1tq6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDM3MjU1YjYtY2ExZWQxMmMtNTYwMmYzMDAtMTAxOTg4NzA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.595684Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976731896. Ctx: { TraceId: 01jqe04fqqatg7ew2fp05aveec, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjZlMWY2YjctNTYyMWNjMmYtNjliMmUwMTEtYWU5OTllMzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152845231 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:08:10.555484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037893 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 14293 rowCount 292 cpuUsage 0 2025-03-28T09:08:10.556887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037893 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 17995 rowCount 292 cpuUsage 0 2025-03-28T09:08:10.557390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037894 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 17633 rowCount 226 cpuUsage 0 2025-03-28T09:08:10.558555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037894 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 13931 rowCount 226 cpuUsage 0 2025-03-28T09:08:10.567818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037892 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 16758 rowCount 271 cpuUsage 0 2025-03-28T09:08:10.574596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 13064 rowCount 211 cpuUsage 0 Table has 4 shards 2025-03-28T09:08:10.659767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 4 2025-03-28T09:08:10.659987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037893 followerId=0, pathId 2: RowCount 292, DataSize 17995 2025-03-28T09:08:10.660163Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037894 followerId=0, pathId 2: RowCount 226, DataSize 13931 2025-03-28T09:08:10.660265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037892 followerId=0, pathId 2: RowCount 271, DataSize 16758 2025-03-28T09:08:10.660382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037891 followerId=0, pathId 2: RowCount 211, DataSize 13064 2025-03-28T09:08:10.660978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> TPopulatorTest::MakeDir [GOOD] |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> GenericFederatedQuery::YdbSelectCount [GOOD] >> TPopulatorTest::RemoveDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2025-03-28T09:08:15.601162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:15.601226Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-03-28T09:08:15.724163Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-03-28T09:08:15.724250Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-03-28T09:08:15.725726Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.725838Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.725895Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.726235Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-03-28T09:08:15.726274Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-03-28T09:08:15.726404Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-03-28T09:08:15.726465Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-03-28T09:08:15.726501Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-03-28T09:08:15.727249Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.727493Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-03-28T09:08:15.727540Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.727575Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.727615Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-03-28T09:08:15.727640Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-03-28T09:08:15.727687Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-03-28T09:08:15.728129Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-03-28T09:08:15.728188Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-03-28T09:08:15.728231Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-03-28T09:08:15.728439Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2124], cookie# 100 2025-03-28T09:08:15.728995Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:96:2122], cookie# 100 2025-03-28T09:08:15.729035Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-03-28T09:08:15.729330Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2123], cookie# 100 2025-03-28T09:08:15.729361Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:08:15.731998Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-03-28T09:08:15.732049Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-03-28T09:08:15.732184Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.732226Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.732867Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-03-28T09:08:15.732922Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-03-28T09:08:15.733027Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 100 2025-03-28T09:08:15.733078Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 100 2025-03-28T09:08:15.733141Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.733203Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 100 2025-03-28T09:08:15.733232Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.733261Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.733290Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:15.733487Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-03-28T09:08:15.733517Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-03-28T09:08:15.733555Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-03-28T09:08:15.733599Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-03-28T09:08:15.733632Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 100 2025-03-28T09:08:15.733689Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-03-28T09:08:15.733950Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-03-28T09:08:15.734100Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-03-28T09:08:15.734127Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-03-28T09:08:15.734404Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 100 2025-03-28T09:08:15.734439Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 2025-03-28T09:08:15.734832Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-03-28T09:08:15.734872Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |79.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> TPopulatorTestWithResets::UpdateAck [GOOD] >> TContinuousBackupTests::Basic [GOOD] >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] >> TPopulatorTest::RemoveDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2025-03-28T09:08:16.798324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:16.798388Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-03-28T09:08:16.917193Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-03-28T09:08:16.917288Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-03-28T09:08:16.918613Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:16.918925Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:16.918990Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:16.919265Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-03-28T09:08:16.919309Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-03-28T09:08:16.919406Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:16.919454Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:16.919485Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:08:16.922923Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-03-28T09:08:16.922976Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-03-28T09:08:16.923231Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirC" PathDescription { Self { Name: "DirC" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-03-28T09:08:16.923265Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-28T09:08:16.948076Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-03-28T09:08:16.948134Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Successful handshake: replica# [1:12:2059] 2025-03-28T09:08:16.948175Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:96:2122] Resume sync: replica# [1:12:2059], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:08:16.948261Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-03-28T09:08:16.948285Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Successful handshake: replica# [1:15:2062] 2025-03-28T09:08:16.948306Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:97:2123] Resume sync: replica# [1:15:2062], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:08:16.948362Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-03-28T09:08:16.948383Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:98:2124] Successful handshake: replica# [1:18:2065] 2025-03-28T09:08:16.948403Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:98:2124] Resume sync: replica# [1:18:2065], fromPathId# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:08:16.948506Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:24339059:0] }: sender# [1:96:2122] 2025-03-28T09:08:16.948587Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-03-28T09:08:16.948740Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:96:2122] 2025-03-28T09:08:16.948803Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-28T09:08:16.948927Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:96:2122] 2025-03-28T09:08:16.948963Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-28T09:08:16.949033Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:1099535966835:0] }: sender# [1:97:2123] 2025-03-28T09:08:16.949099Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:12:2059], cookie# 0 2025-03-28T09:08:16.949171Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-03-28T09:08:16.949250Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:97:2123] 2025-03-28T09:08:16.949293Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 0 2025-03-28T09:08:16.949344Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-28T09:08:16.949431Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:97:2123] 2025-03-28T09:08:16.949475Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:15:2062], cookie# 0 2025-03-28T09:08:16.949513Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-28T09:08:16.949563Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Replica: [1:2199047594611:0] }: sender# [1:98:2124] 2025-03-28T09:08:16.949603Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 1] }: sender# [1:98:2124] 2025-03-28T09:08:16.949638Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 0 2025-03-28T09:08:16.949695Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 2] PathVersion: 3 } }: sender# [1:95:2121] 2025-03-28T09:08:16.949767Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-28T09:08:16.949820Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 2] }: sender# [1:98:2124] 2025-03-28T09:08:16.949891Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:24339059:0] }: sender# [1:96:2122] 2025-03-28T09:08:16.949941Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-03-28T09:08:16.949983Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 0 2025-03-28T09:08:16.950062Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 0 2025-03-28T09:08:16.950105Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 0 2025-03-28T09:08:16.950156Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2025-03-28T09:08:16.950207Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:18:2065], cookie# 0 2025-03-28T09:08:16.950258Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 0 2025-03-28T09:08:16.950295Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:96:2122], cookie# 100 2025-03-28T09:08:16.950332Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:1099535966835:0] }: sender# [1:97:2123] 2025-03-28T09:08:16.950369Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-03-28T09:08:16.950445Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 0 2025-03-28T09:08:16.950471Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:96:2122], cookie# 0 2025-03-28T09:08:16.950500Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2025-03-28T09:08:16.950544Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-03-28T09:08:16.950571Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 0 2025-03-28T09:08:16.950596Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-03-28T09:08:16.950633Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:97:2123], cookie# 100 2025-03-28T09:08:16.950661Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-03-28T09:08:16.986865Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 4 2025-03-28T09:08:16.986980Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 0 2025-03-28T09:08:16.987014Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 0 2025-03-28T09:08:16.987750Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-03-28T09:08:16.987816Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-03-28T09:08:16.987861Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 2025-03-28T09:08:16.988000Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 3] Replica: [1:2199047594611:0] }: sender# [1:98:2124] 2025-03-28T09:08:16.988078Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:95:2121] 2025-03-28T09:08:16.988604Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 0 2025-03-28T09:08:16.988644Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 0 2025-03-28T09:08:16.988700Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] 2025-03-28T09:08:16.988874Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 4 }: sender# [1:98:2124], cookie# 100 2025-03-28T09:08:16.988903Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 2025-03-28T09:08:16.988960Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 0 2025-03-28T09:08:16.988982Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 0 2025-03-28T09:08:16.989190Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-03-28T09:08:16.989247Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 100 TestWaitNotification: OK eventTxId 100 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 >> DataShardReadIterator::ShouldReadRangeChunk1 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:08:15.563879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:08:15.563988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:15.564034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:08:15.564065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:08:15.564120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:08:15.564164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:08:15.564218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:15.564292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:08:15.564634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:08:15.672885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:15.672968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:15.689296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:08:15.689571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:08:15.689774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:08:15.696412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:08:15.696631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:08:15.697329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:15.697549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:15.699587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:15.700891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:15.700953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:15.701104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:08:15.701168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:15.701217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:08:15.701303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:08:15.708304Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:08:15.884927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:08:15.885177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:15.885401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:08:15.885627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:08:15.885689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:15.890438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:15.890630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:08:15.890869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:15.890927Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:08:15.890961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:08:15.890994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:08:15.893175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:15.893239Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:08:15.893463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:08:15.895543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:15.895607Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:15.895657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:15.895712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:08:15.911756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:08:15.919582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:08:15.919798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:08:15.920829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:15.920974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:15.921017Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:15.921333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:08:15.922064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:15.922246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:08:15.922326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:15.926351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:15.926412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:15.926603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:15.926666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:08:15.927069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:15.927117Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:08:15.927219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:15.927251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:15.927316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:15.927356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:15.927397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:08:15.927439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:15.927477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:08:15.927519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:08:15.927590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:08:15.927631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:08:15.927687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:08:15.929703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:15.929834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:15.929870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : 104, ready parts: 2/3, is published: true 2025-03-28T09:08:16.980455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:08:16.980538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:08:16.980561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:08:16.980599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2025-03-28T09:08:16.983258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:08:16.983374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:08:16.985911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:08:16.986045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:08:16.986110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:08:16.986168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:08:16.986198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:08:17.017438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 805 } } 2025-03-28T09:08:17.017499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-03-28T09:08:17.017685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 805 } } 2025-03-28T09:08:17.017786Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000005 OrderId: 104 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 805 } } FAKE_COORDINATOR: Erasing txId 104 2025-03-28T09:08:17.018910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:08:17.018957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-03-28T09:08:17.019077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:08:17.019129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:08:17.019235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:08:17.019294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:17.019335Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:08:17.019368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:08:17.019408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-03-28T09:08:17.031210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:08:17.031715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:08:17.032119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:08:17.032172Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-28T09:08:17.032266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-03-28T09:08:17.032299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-28T09:08:17.032334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 3/3 2025-03-28T09:08:17.032367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-28T09:08:17.032401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2025-03-28T09:08:17.032483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 104 2025-03-28T09:08:17.032547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-28T09:08:17.032594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:08:17.032622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:08:17.032749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:08:17.032799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-03-28T09:08:17.032819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-03-28T09:08:17.032854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:08:17.032873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-03-28T09:08:17.032890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-03-28T09:08:17.032946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:08:17.033301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:08:17.033340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:08:17.033406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:08:17.033446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:08:17.033488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:08:17.045088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:08:17.045143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:726:2641] 2025-03-28T09:08:17.045607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-03-28T09:08:17.046193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:08:17.046389Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl" took 208us result status StatusPathDoesNotExist 2025-03-28T09:08:17.046551Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:08:17.047020Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:08:17.047210Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 157us result status StatusPathDoesNotExist 2025-03-28T09:08:17.047334Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 5579, MsgBus: 23137 2025-03-28T09:08:00.036686Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784615146123856:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:00.038293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b43/r3tmp/tmp7pBEux/pdisk_1.dat 2025-03-28T09:08:00.702932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:00.703044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:00.710818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:00.711708Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5579, node 1 2025-03-28T09:08:00.915361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:00.915385Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:00.915391Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:00.915529Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23137 TClient is connected to server localhost:23137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:01.990447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:02.045876Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:04.852646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784632325993541:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:04.852762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.004917Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784615146123856:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:05.004969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:05.323451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:08:05.478636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784636620960963:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.478726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.479280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784636620960968:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.482573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:08:05.491997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784636620960970:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:08:05.564462Z node 1 :TX_PROXY ERROR: Actor# [1:7486784636620961010:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:06.528779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:08:06.997784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-03-28T09:08:07.704545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:08:08.441903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:08:09.016266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:08:09.619146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:08:09.680289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:08:13.825220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710711:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 >> TransferWriter::Write_ColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2025-03-28T09:08:17.448816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:17.448876Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 100 2025-03-28T09:08:17.655884Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 330, preserialized size# 51 2025-03-28T09:08:17.655974Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 3 2025-03-28T09:08:17.657489Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:17.657601Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:17.657654Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:17.658007Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: false CreateTxId: 100 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 220, preserialized size# 2 2025-03-28T09:08:17.658057Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 2 2025-03-28T09:08:17.658192Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:12:2059], cookie# 100 2025-03-28T09:08:17.658262Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:15:2062], cookie# 100 2025-03-28T09:08:17.658304Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:18:2065], cookie# 100 2025-03-28T09:08:17.658665Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:17.667082Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:96:2122], cookie# 100 2025-03-28T09:08:17.667150Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:17.667188Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:17.667236Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:97:2123], cookie# 100 2025-03-28T09:08:17.667268Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 3 2025-03-28T09:08:17.667319Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:18:2065], cookie# 100 2025-03-28T09:08:17.668006Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 3 }: sender# [1:98:2124], cookie# 100 2025-03-28T09:08:17.668049Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:12:2059], cookie# 100 2025-03-28T09:08:17.668093Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:15:2062], cookie# 100 2025-03-28T09:08:17.668375Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:98:2124], cookie# 100 2025-03-28T09:08:17.668737Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:96:2122], cookie# 100 2025-03-28T09:08:17.668768Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 2 2025-03-28T09:08:17.669033Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 2 }: sender# [1:97:2123], cookie# 100 2025-03-28T09:08:17.669070Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:97:2123], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000001 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:08:17.671538Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 340, preserialized size# 56 2025-03-28T09:08:17.671599Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 4 2025-03-28T09:08:17.671740Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:17.671787Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 100 2025-03-28T09:08:17.672393Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirB" PathDescription { Self { Name: "DirB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 100, event size# 225, preserialized size# 2 2025-03-28T09:08:17.672452Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 100, is deletion# false, version: 3 2025-03-28T09:08:17.672556Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1 ... oard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-28T09:08:17.680446Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-28T09:08:17.680476Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-28T09:08:17.680941Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:12:2059], cookie# 101 2025-03-28T09:08:17.681039Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:15:2062], cookie# 101 2025-03-28T09:08:17.681080Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:18:2065], cookie# 101 2025-03-28T09:08:17.681152Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:96:2122], cookie# 101 2025-03-28T09:08:17.681238Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:12:2059], cookie# 101 2025-03-28T09:08:17.681286Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:15:2062], cookie# 101 2025-03-28T09:08:17.681319Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:18:2065], cookie# 101 2025-03-28T09:08:17.681405Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:97:2123], cookie# 101 2025-03-28T09:08:17.681436Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 5 2025-03-28T09:08:17.681753Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 5 }: sender# [1:98:2124], cookie# 101 2025-03-28T09:08:17.682043Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:96:2122], cookie# 101 2025-03-28T09:08:17.682193Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:97:2123], cookie# 101 2025-03-28T09:08:17.682223Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 3 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-28T09:08:17.682640Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 3 }: sender# [1:98:2124], cookie# 101 2025-03-28T09:08:17.682677Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 101 2025-03-28T09:08:17.684178Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 101, event size# 232, preserialized size# 2 2025-03-28T09:08:17.684231Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 6 2025-03-28T09:08:17.684360Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-28T09:08:17.684408Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-28T09:08:17.684459Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-28T09:08:17.684703Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000002, drop txId: 101" Path: "/Root/DirB" PathId: 2 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944 }: sender# [1:70:2109], cookie# 101, event size# 306, preserialized size# 0 2025-03-28T09:08:17.684737Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], cookie# 101, is deletion# true, version: 0 2025-03-28T09:08:17.684835Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:12:2059], cookie# 101 2025-03-28T09:08:17.684899Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:15:2062], cookie# 101 2025-03-28T09:08:17.684938Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:18:2065], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:08:17.685011Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:96:2122], cookie# 101 2025-03-28T09:08:17.685041Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-28T09:08:17.685110Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-28T09:08:17.685161Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:95:2121], cookie# 101 2025-03-28T09:08:17.685374Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:97:2123], cookie# 101 2025-03-28T09:08:17.685404Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2025-03-28T09:08:17.685447Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:96:2122] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2025-03-28T09:08:17.685493Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:97:2123] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2025-03-28T09:08:17.685531Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:98:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2025-03-28T09:08:17.685589Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:98:2124], cookie# 101 2025-03-28T09:08:17.685895Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:96:2122], cookie# 101 2025-03-28T09:08:17.686017Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:97:2123], cookie# 101 2025-03-28T09:08:17.686046Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: [1:95:2121] Ack update: ack to# [1:70:2109], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 2], version# 18446744073709551615 2025-03-28T09:08:17.686108Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:98:2124], cookie# 101 2025-03-28T09:08:17.686131Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:95:2121] Ack for unknown update (already acked?): sender# [1:98:2124], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:08:16.321168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:08:16.321282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:16.321342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:08:16.321379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:08:16.321425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:08:16.321481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:08:16.321545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:16.321627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:08:16.321970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:08:16.485773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:16.485828Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:16.513490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:08:16.513781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:08:16.513947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:08:16.533239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:08:16.533464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:08:16.534093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:16.534297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:16.578014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:16.579302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:16.579358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:16.579500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:08:16.579566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:16.579604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:08:16.579693Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:08:16.587454Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:08:16.743746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:08:16.743983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:16.744211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:08:16.744472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:08:16.744529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:16.751901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:16.752117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:08:16.752339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:16.752389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:08:16.752441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:08:16.752472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:08:16.763608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:16.763696Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:08:16.763752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:08:16.771687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:16.771755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:16.771793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:16.771854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:08:16.775696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:08:16.783584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:08:16.783816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:08:16.784894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:16.785048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:16.785100Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:16.785405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:08:16.785470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:16.785665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:08:16.785737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:16.788266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:16.788318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:16.788501Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:16.788568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:08:16.788970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:16.789020Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:08:16.789110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:16.789149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:16.789200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:16.789232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:16.789264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:08:16.789301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:16.789333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:08:16.789369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:08:16.789427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:08:16.789460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:08:16.789508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:08:16.791551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:16.791657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:16.791689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 33409546, at schemeshard: 72057594046678944 2025-03-28T09:08:17.671339Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:1 129 -> 240 2025-03-28T09:08:17.673994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-03-28T09:08:17.674172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:1, at schemeshard: 72057594046678944 2025-03-28T09:08:17.674269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:1, at schemeshard: 72057594046678944 2025-03-28T09:08:17.674301Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:1 ProgressState 2025-03-28T09:08:17.674391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-03-28T09:08:17.674423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-03-28T09:08:17.674452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:1 progress is 4/4 2025-03-28T09:08:17.674475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-03-28T09:08:17.674502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/4, is published: true 2025-03-28T09:08:17.674564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 103 2025-03-28T09:08:17.674644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/4 2025-03-28T09:08:17.674704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:08:17.674751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:08:17.674818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:08:17.674845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-03-28T09:08:17.674868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-03-28T09:08:17.674927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:08:17.674950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-03-28T09:08:17.674968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-03-28T09:08:17.675004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:08:17.675026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-03-28T09:08:17.675042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-03-28T09:08:17.675087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:08:17.683549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:08:17.683629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:721:2625] TestWaitNotification: OK eventTxId 103 2025-03-28T09:08:17.684193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:08:17.684492Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 251us result status StatusSuccess 2025-03-28T09:08:17.684967Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:17.685513Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:08:17.685682Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/continuousBackupImpl/streamImpl" took 169us result status StatusSuccess 2025-03-28T09:08:17.686089Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/continuousBackupImpl/streamImpl" PathDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeStreamImpl Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409548 } PersQueueGroup { Name: "streamImpl" PathId: 4 TotalGroupCount: 1 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "continuousBackupImpl" TopicPath: "/MyRoot/Table/continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS OffloadConfig { IncrementalBackup { DstPath: "/MyRoot/IncrBackupImpl" DstPathId { OwnerId: 72057594046678944 LocalId: 5 } } } } Partitions { PartitionId: 0 TabletId: 72075186233409547 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409548 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:17.687210Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:08:17.687415Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 203us result status StatusSuccess 2025-03-28T09:08:17.687796Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |79.8%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> KqpPg::ValuesInsert-useSink [GOOD] >> PgCatalog::PgType >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest |79.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] Test command err: Trying to start YDB, gRPC: 23272, MsgBus: 1256 2025-03-28T09:08:00.043387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784614661760946:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:00.043434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000be2/r3tmp/tmpMqATsk/pdisk_1.dat 2025-03-28T09:08:00.676423Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:00.688556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:00.688647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:00.691725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23272, node 1 2025-03-28T09:08:00.915442Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:00.915468Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:00.915475Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:00.915571Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1256 TClient is connected to server localhost:1256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:02.026219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:02.063262Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:05.049928Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784614661760946:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:05.050015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:05.240388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784636136598090:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.240499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.598856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:08:05.733812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784636136598211:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.733909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.734227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784636136598217:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.737619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:08:05.749425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784636136598219:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:08:05.859644Z node 1 :TX_PROXY ERROR: Actor# [1:7486784636136598259:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:06.748982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:08:07.300142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-03-28T09:08:08.115665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:08:09.003958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:08:09.743465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:08:10.476920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:08:10.548770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:08:13.296674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-03-28T09:08:13.343947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-03-28T09:08:13.345546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-03-28T09:08:13.348219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-03-28T09:08:15.666793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:08:15.666841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:17.932847Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152897969, txId: 281474976710759] shutting down |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> ReadOnlyVDisk::TestGarbageCollect |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |79.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |79.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> ReadOnlyVDisk::TestDiscover >> ReadOnlyVDisk::TestStorageLoad >> ReadOnlyVDisk::TestSync >> TCmsTest::ActionIssuePartialPermissions >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TCmsTest::RequestReplaceBrokenDevices >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-28T09:07:50.071620Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784571161125751:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:50.071878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ce5/r3tmp/tmpDJPLMj/pdisk_1.dat 2025-03-28T09:07:50.719908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:50.770963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:50.771103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:50.782048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12338 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:07:51.123498Z node 1 :TX_PROXY DEBUG: actor# [1:7486784571161125848:2115] Handle TEvNavigate describe path dc-1 2025-03-28T09:07:51.123547Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784575456093638:2444] HANDLE EvNavigateScheme dc-1 2025-03-28T09:07:51.123640Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784571161125938:2153], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:51.123768Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784575456093621:2439][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784571161125938:2153], cookie# 1 2025-03-28T09:07:51.125305Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784575456093625:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784575456093622:2439], cookie# 1 2025-03-28T09:07:51.125350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784575456093626:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784575456093623:2439], cookie# 1 2025-03-28T09:07:51.125364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784575456093627:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784575456093624:2439], cookie# 1 2025-03-28T09:07:51.125400Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784566866158232:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784575456093626:2439], cookie# 1 2025-03-28T09:07:51.125407Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784566866158229:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784575456093625:2439], cookie# 1 2025-03-28T09:07:51.125432Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784566866158235:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784575456093627:2439], cookie# 1 2025-03-28T09:07:51.125453Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784575456093626:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784566866158232:2053], cookie# 1 2025-03-28T09:07:51.125477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784575456093625:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784566866158229:2050], cookie# 1 2025-03-28T09:07:51.125491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784575456093627:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784566866158235:2056], cookie# 1 2025-03-28T09:07:51.125521Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784575456093621:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784575456093623:2439], cookie# 1 2025-03-28T09:07:51.125546Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784575456093621:2439][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:51.125562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784575456093621:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784575456093622:2439], cookie# 1 2025-03-28T09:07:51.125594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784575456093621:2439][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:51.125621Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784575456093621:2439][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784575456093624:2439], cookie# 1 2025-03-28T09:07:51.125636Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784575456093621:2439][/dc-1] Unexpected sync response: sender# [1:7486784575456093624:2439], cookie# 1 2025-03-28T09:07:51.125683Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784571161125938:2153], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:07:51.135316Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784571161125938:2153], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784575456093621:2439] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:07:51.135436Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784571161125938:2153], cacheItem# { Subscriber: { Subscriber: [1:7486784575456093621:2439] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:07:51.137513Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784575456093641:2447], recipient# [1:7486784575456093638:2444], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:07:51.137568Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784575456093638:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:07:51.168883Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784575456093638:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:07:51.170955Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784575456093638:2444] Handle TEvDescribeSchemeResult Forward to# [1:7486784575456093637:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:07:51.193314Z node 1 :TX_PROXY DEBUG: actor# [1:7486784571161125848:2115] Handle TEvProposeTransaction 2025-03-28T09:07:51.193345Z node 1 :TX_PROXY DEBUG: actor# [1:7486784571161125848:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:07:51.193452Z node 1 :TX_PROXY DEBUG: actor# [1:7486784571161125848:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486784575456093647:2452] 2025-03-28T09:07:51.277348Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784575456093647:2452] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:07:51.277396Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784575456093647:2452] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:07:51.277502Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784575456093647:2452] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:07:51.277592Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784571161125938:2153], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:1844674407370955161 ... try for TNavigate: self# [3:7486784626494165676:2126], cacheItem# { Subscriber: { Subscriber: [3:7486784647969003356:2971] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:15.598789Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784678033775266:3530], recipient# [3:7486784678033775265:2353], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:16.473212Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784626494165676:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:16.473383Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784626494165676:2126], cacheItem# { Subscriber: { Subscriber: [3:7486784630789133679:2623] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:16.473502Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784682328742584:3534], recipient# [3:7486784682328742583:2354], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:16.614851Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784626494165676:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:16.615005Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784626494165676:2126], cacheItem# { Subscriber: { Subscriber: [3:7486784647969003356:2971] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:16.615103Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784682328742586:3535], recipient# [3:7486784682328742585:2355], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:17.480733Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784626494165676:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:17.480896Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784626494165676:2126], cacheItem# { Subscriber: { Subscriber: [3:7486784630789133679:2623] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:17.480999Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784686623709902:3539], recipient# [3:7486784686623709901:2356], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:17.619952Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784626494165676:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:17.620122Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784626494165676:2126], cacheItem# { Subscriber: { Subscriber: [3:7486784647969003356:2971] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:17.620210Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784686623709904:3540], recipient# [3:7486784686623709903:2357], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:18.486764Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784626494165676:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:18.486907Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784626494165676:2126], cacheItem# { Subscriber: { Subscriber: [3:7486784630789133679:2623] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:18.487006Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784690918677220:3544], recipient# [3:7486784690918677219:2358], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:18.619909Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784626494165676:2126], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:18.620071Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784626494165676:2126], cacheItem# { Subscriber: { Subscriber: [3:7486784647969003356:2971] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:18.620179Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784690918677222:3545], recipient# [3:7486784690918677221:2359], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2025-03-28T09:07:49.153560Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784567392702919:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:49.153609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d01/r3tmp/tmpXdIYWR/pdisk_1.dat 2025-03-28T09:07:49.716927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:49.717101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:49.717470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:49.733513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12226 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:07:50.099604Z node 1 :TX_PROXY DEBUG: actor# [1:7486784567392703154:2115] Handle TEvNavigate describe path dc-1 2025-03-28T09:07:50.099654Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784571687670930:2443] HANDLE EvNavigateScheme dc-1 2025-03-28T09:07:50.099781Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784567392703178:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:07:50.099809Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486784567392703178:2128], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:07:50.100010Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784571687670931:2444][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:07:50.107153Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567392702835:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784571687670935:2444] 2025-03-28T09:07:50.107229Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784567392702835:2050] Subscribe: subscriber# [1:7486784571687670935:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:50.107317Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567392702838:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784571687670936:2444] 2025-03-28T09:07:50.107352Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784567392702838:2053] Subscribe: subscriber# [1:7486784571687670936:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:50.107400Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567392702841:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486784571687670937:2444] 2025-03-28T09:07:50.107426Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784567392702841:2056] Subscribe: subscriber# [1:7486784571687670937:2444], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:07:50.107528Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784571687670935:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784567392702835:2050] 2025-03-28T09:07:50.107608Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784571687670936:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784567392702838:2053] 2025-03-28T09:07:50.107639Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567392702835:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784571687670935:2444] 2025-03-28T09:07:50.107656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784571687670937:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784567392702841:2056] 2025-03-28T09:07:50.107668Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567392702838:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784571687670936:2444] 2025-03-28T09:07:50.107680Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567392702841:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486784571687670937:2444] 2025-03-28T09:07:50.107693Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784571687670931:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784571687670932:2444] 2025-03-28T09:07:50.107721Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784571687670931:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784571687670933:2444] 2025-03-28T09:07:50.107805Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486784571687670931:2444][/dc-1] Set up state: owner# [1:7486784567392703178:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:50.107925Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784571687670931:2444][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486784571687670934:2444] 2025-03-28T09:07:50.107974Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486784571687670931:2444][/dc-1] Path was already updated: owner# [1:7486784567392703178:2128], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:07:50.108054Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784571687670935:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784571687670932:2444], cookie# 1 2025-03-28T09:07:50.108082Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784571687670936:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784571687670933:2444], cookie# 1 2025-03-28T09:07:50.108096Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784571687670937:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784571687670934:2444], cookie# 1 2025-03-28T09:07:50.108124Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567392702838:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784571687670936:2444], cookie# 1 2025-03-28T09:07:50.108166Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567392702841:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784571687670937:2444], cookie# 1 2025-03-28T09:07:50.108197Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784571687670936:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784567392702838:2053], cookie# 1 2025-03-28T09:07:50.108209Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784571687670937:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784567392702841:2056], cookie# 1 2025-03-28T09:07:50.108239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784571687670931:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784571687670933:2444], cookie# 1 2025-03-28T09:07:50.108259Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784571687670931:2444][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:07:50.108271Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784571687670931:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784571687670934:2444], cookie# 1 2025-03-28T09:07:50.108287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784571687670931:2444][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:07:50.108308Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784567392702835:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784571687670935:2444], cookie# 1 2025-03-28T09:07:50.108323Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784571687670935:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784567392702835:2050], cookie# 1 2025-03-28T09:07:50.108339Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784571687670931:2444][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784571687670932:2444], cookie# 1 2025-03-28T09:07:50.108351Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784571687670931:2444][/dc-1] Unexpected sync response: sender# [1:7486784571687670932:2444], cookie# 1 2025-03-28T09:07:50.148922Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784567392703178:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:07:50.149184Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784567392703178:2128], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... nfo }] } 2025-03-28T09:08:17.141414Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784632564287719:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784658334092938:3184] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:17.141510Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784688398865102:4005], recipient# [3:7486784688398865101:2361], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:17.381890Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784632564287719:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:17.382026Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784632564287719:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784636859255507:2453] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:17.382125Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784688398865104:4006], recipient# [3:7486784688398865103:2362], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:17.396048Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784632564287719:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:17.396179Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784632564287719:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784636859255507:2453] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:17.396265Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784688398865106:4007], recipient# [3:7486784688398865105:2363], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:18.148795Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784632564287719:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:18.148932Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784632564287719:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784658334092938:3184] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:18.149031Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784692693832422:4011], recipient# [3:7486784692693832421:2364], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:18.391457Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784632564287719:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:18.391583Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784632564287719:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784636859255507:2453] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:18.391670Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784692693832424:4012], recipient# [3:7486784692693832423:2365], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:18.399073Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784632564287719:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:18.399195Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784632564287719:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784636859255507:2453] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:18.399281Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784692693832426:4013], recipient# [3:7486784692693832425:2366], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:19.150999Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486784632564287719:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:08:19.151120Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486784632564287719:2127], cacheItem# { Subscriber: { Subscriber: [3:7486784658334092938:3184] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:08:19.151196Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486784696988799742:4017], recipient# [3:7486784696988799741:2367], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |79.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut >> TCmsTest::TestForceRestartMode >> TCmsTest::TestOutdatedState >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> KqpPg::TableDeleteWhere+useSink [GOOD] >> KqpPg::TableDeleteWhere-useSink |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] Leader for TabletID 9437184 is [1:122:2148] sender: [1:124:2057] recipient: [1:106:2138] 2025-03-28T09:07:33.899461Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:07:33.909040Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:07:33.909712Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:07:33.910020Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:33.921896Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:07:34.070989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:07:34.071057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:34.072694Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:34.072849Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:07:34.074450Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:07:34.074530Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:07:34.074596Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:07:34.074974Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:07:34.075089Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:07:34.075156Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2148] in generation 2 Leader for TabletID 9437184 is [1:122:2148] sender: [1:209:2057] recipient: [1:14:2061] 2025-03-28T09:07:34.167696Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:07:34.217883Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:07:34.218133Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:07:34.218272Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:214:2212] 2025-03-28T09:07:34.218311Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:07:34.218345Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:07:34.218387Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:34.218616Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:34.218661Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:34.218990Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:07:34.219092Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:07:34.219155Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:34.219224Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:34.219264Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:07:34.219306Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:34.219334Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:34.219362Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:07:34.219399Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:07:34.219501Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:210:2209], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:34.219572Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:34.219658Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:208:2208], serverId# [1:210:2209], sessionId# [0:0:0] 2025-03-28T09:07:34.222269Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:07:34.222346Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:34.222435Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:34.222614Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:07:34.222671Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:07:34.224859Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:07:34.224939Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:34.225008Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:07:34.225045Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:07:34.225080Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:34.225424Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:07:34.225478Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:07:34.225517Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:34.225550Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:34.225615Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:07:34.225648Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:34.225680Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:07:34.225711Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:34.225744Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:07:34.241323Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:34.241407Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:34.241442Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:34.241486Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:07:34.241565Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:07:34.242172Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:220:2218], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:34.242229Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:34.242272Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:219:2217], serverId# [1:220:2218], sessionId# [0:0:0] 2025-03-28T09:07:34.242427Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-03-28T09:07:34.242458Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:07:34.242619Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:34.242663Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:07:34.242833Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:07:34.242907Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:07:34.246763Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:07:34.246847Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:34.247106Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:34.247142Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:34.247200Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:34.247257Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:07:34.247295Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:07:34.247333Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-03-28T09:07:34.247370Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-03-28T09:07:34.247437Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:07:34.247493Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:07:34.247532Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:07:34.247570Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:07:34.247754Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-03-28T09:07:34.247791Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:07:34.247817Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:07:34.247844Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:07:34.247868Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:07:34.247941Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:34.247971Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:07:34.248021Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:07:34.248062Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:07:34.248127Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-03-28T09:07:34.248185Z node 1 :TX_DATASHARD TRAC ... cribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-03-28T09:08:22.967523Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [23:281:2265], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [23:285:2269] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:08:22.967569Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:08:22.967688Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [23:123:2149], Recipient [23:235:2228]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-03-28T09:08:22.967730Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-28T09:08:22.967785Z node 23 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-03-28T09:08:22.967867Z node 23 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-03-28T09:08:22.984034Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [23:281:2265], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [23:281:2265] ServerId: [23:285:2269] } 2025-03-28T09:08:22.984131Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:08:23.019300Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:292:2274], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:23.019406Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:23.019480Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:290:2273], serverId# [23:292:2274], sessionId# [0:0:0] 2025-03-28T09:08:23.019699Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [23:289:2272], Recipient [23:235:2228]: NKikimrTabletBase.TEvGetCounters 2025-03-28T09:08:23.043611Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [23:99:2134], Recipient [23:235:2228]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 98784249942 } 2025-03-28T09:08:23.043729Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-28T09:08:23.044224Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:294:2276], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:23.044277Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:23.044346Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:293:2275], serverId# [23:294:2276], sessionId# [0:0:0] 2025-03-28T09:08:23.044653Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [23:99:2134], Recipient [23:235:2228]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 98784249942 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2025-03-28T09:08:23.044707Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:08:23.044864Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:08:23.046049Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2025-03-28T09:08:23.046150Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-28T09:08:23.046210Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2025-03-28T09:08:23.046261Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:08:23.046313Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:08:23.046366Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:08:23.046451Z node 23 :TX_DATASHARD TRACE: Activated operation [0:2] at 9437184 2025-03-28T09:08:23.046502Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-28T09:08:23.046536Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:08:23.046569Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:08:23.046606Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-28T09:08:23.046665Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:08:23.046748Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 requested 132374 more memory 2025-03-28T09:08:23.046807Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-03-28T09:08:23.047191Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:08:23.047258Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-28T09:08:23.047321Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:08:23.048382Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 132502 and requests 1060016 more for the next try 2025-03-28T09:08:23.048599Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-03-28T09:08:23.048662Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-03-28T09:08:23.048945Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:08:23.048985Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-28T09:08:23.049920Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-03-28T09:08:23.049989Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:08:23.050556Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 1192518 and requests 9540144 more for the next try 2025-03-28T09:08:23.050679Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-03-28T09:08:23.058002Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-03-28T09:08:23.059044Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:08:23.059109Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-28T09:08:23.059931Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-03-28T09:08:23.059995Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:08:23.064152Z node 23 :TX_DATASHARD TRACE: Operation [0:2] at 9437184 exceeded memory limit 10732662 and requests 85861296 more for the next try 2025-03-28T09:08:23.064348Z node 23 :TX_DATASHARD DEBUG: tx 2 released its data 2025-03-28T09:08:23.064398Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Restart 2025-03-28T09:08:23.064710Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:08:23.064756Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2025-03-28T09:08:23.065488Z node 23 :TX_DATASHARD DEBUG: tx 2 at 9437184 restored its data 2025-03-28T09:08:23.065548Z node 23 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:08:23.426209Z node 23 :TX_DATASHARD TRACE: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2025-03-28T09:08:23.426327Z node 23 :TX_DATASHARD TRACE: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:08:23.426402Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:08:23.426438Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:08:23.426480Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit FinishPropose 2025-03-28T09:08:23.426521Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit FinishPropose 2025-03-28T09:08:23.426629Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:08:23.426675Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2025-03-28T09:08:23.426737Z node 23 :TX_DATASHARD TRACE: Add [0:2] at 9437184 to execution unit CompletedOperations 2025-03-28T09:08:23.426790Z node 23 :TX_DATASHARD TRACE: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2025-03-28T09:08:23.426849Z node 23 :TX_DATASHARD TRACE: Execution status for [0:2] at 9437184 is Executed 2025-03-28T09:08:23.426882Z node 23 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2025-03-28T09:08:23.426938Z node 23 :TX_DATASHARD TRACE: Execution plan for [0:2] at 9437184 has finished 2025-03-28T09:08:23.443746Z node 23 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:08:23.443850Z node 23 :TX_DATASHARD TRACE: Complete execution for [0:2] at 9437184 on unit FinishPropose 2025-03-28T09:08:23.443931Z node 23 :TX_DATASHARD TRACE: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-03-28T09:08:23.444068Z node 23 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:08:23.445431Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [23:299:2281], Recipient [23:235:2228]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:23.445510Z node 23 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:23.445579Z node 23 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [23:298:2280], serverId# [23:299:2281], sessionId# [0:0:0] 2025-03-28T09:08:23.445780Z node 23 :TX_DATASHARD TRACE: StateWork, received event# 268830214, Sender [23:297:2279], Recipient [23:235:2228]: NKikimrTabletBase.TEvGetCounters >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TCmsTest::ActionWithZeroDuration >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> TCmsTest::RequestReplaceBrokenDevices [GOOD] >> TCmsTest::PermissionDuration >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestForceRestartModeDisconnects >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 >> DataShardReadIterator::ShouldReadRangeChunk2 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk3 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits >> TCmsTest::TestForceRestartModeDisconnects [GOOD] >> TCmsTest::StateStorageTwoRings >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] >> StatisticsSaveLoad::Simple [GOOD] >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEviction >> TableCreator::CreateTables >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestProcessingQueue >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::SysTabletsNode >> ReadOnlyVDisk::TestDiscover [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::AllVDisksEvictionInRack |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |79.9%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 >> DstCreator::WithSyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: 2025-03-28T09:08:15.033893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:08:15.034292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:15.034456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001637/r3tmp/tmpBKP7jH/pdisk_1.dat 2025-03-28T09:08:15.615059Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10903, node 1 2025-03-28T09:08:16.094568Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:16.094626Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:16.094656Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:16.103225Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:16.107810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:08:16.232910Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:16.233043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:16.253389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19102 2025-03-28T09:08:17.117979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:08:21.347200Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:08:21.455398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:21.455534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:21.497467Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:21.502611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:21.980771Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:21.981325Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:21.981938Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:21.982098Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:21.982187Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:21.982463Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:21.982568Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:21.982638Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:21.982935Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:22.192138Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:22.192262Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:22.208377Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:22.553156Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:22.632376Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:08:22.632515Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:08:22.726995Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:08:22.728458Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:08:22.728712Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:08:22.728783Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:08:22.728833Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:08:22.728900Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:08:22.728956Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:08:22.729014Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:08:22.729514Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:08:22.780468Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1855:2586] 2025-03-28T09:08:22.781330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:08:22.819152Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:08:22.819232Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:08:22.819318Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:08:22.824106Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:08:22.824225Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1899:2608], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:08:22.873743Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2626] 2025-03-28T09:08:22.874203Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2626], schemeshard id = 72075186224037897 2025-03-28T09:08:22.954288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:08:22.982548Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:08:22.982760Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:08:23.283840Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:08:23.650276Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:08:23.749818Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:08:24.818010Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:08:24.818610Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:08:24.874048Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:08:24.887516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2267:3090], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:24.887651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2282:3095], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:24.887729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:24.905408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-03-28T09:08:25.012973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2287:3098], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:08:25.318022Z node 1 :TX_PROXY ERROR: Actor# [1:2379:3129] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:25.790975Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2401:3141]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:25.791225Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:08:25.791323Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2403:3143] 2025-03-28T09:08:25.791400Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2403:3143] 2025-03-28T09:08:25.792007Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2404:2848] 2025-03-28T09:08:25.792328Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2403:3143], server id = [2:2404:2848], tablet id = 72075186224037894, status = OK 2025-03-28T09:08:25.792607Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2404:2848], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:08:25.792683Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:08:25.792910Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:08:25.792995Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2401:3141], StatRequests.size() = 1 2025-03-28T09:08:26.024472Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZjJlYTAxMzMtMTMzNmVhMS0yOTUyNWE4Zi1hZDYyZmQwNA==, TxId: 2025-03-28T09:08:26.024566Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZjJlYTAxMzMtMTMzNmVhMS0yOTUyNWE4Zi1hZDYyZmQwNA==, TxId: 2025-03-28T09:08:26.025650Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:08:26.033884Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-03-28T09:08:26.165224Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2432:3164]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:26.165490Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:08:26.165543Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2432:3164], StatRequests.size() = 1 2025-03-28T09:08:26.513610Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZmE3YzdiMWUtMjU4NmY4MjEtNjJmOTBjZWYtNGI4Y2M1MDY=, TxId: 01jqe05076fvdz7q8apyk5rnyr 2025-03-28T09:08:26.513764Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZmE3YzdiMWUtMjU4NmY4MjEtNjJmOTBjZWYtNGI4Y2M1MDY=, TxId: 01jqe05076fvdz7q8apyk5rnyr 2025-03-28T09:08:26.529543Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:08:26.543321Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-03-28T09:08:26.582496Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ODk4MzgxZGMtZDE1OWM0YjktNDVlNDlhMWYtMzcwOWNhMDY=, TxId: 01jqe0509gc2f8r9x47bbbg31h 2025-03-28T09:08:26.582623Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ODk4MzgxZGMtZDE1OWM0YjktNDVlNDlhMWYtMzcwOWNhMDY=, TxId: 01jqe0509gc2f8r9x47bbbg31h >> StatisticsSaveLoad::ForbidAccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 15202349657366643071 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-03-28T09:08:24.398146Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-03-28T09:08:24.766747Z 1 00h02m00.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-28T09:08:24.767887Z 2 00h02m00.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-03-28T09:08:25.053092Z 3 00h02m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-28T09:08:25.054082Z 1 00h02m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-28T09:08:25.054802Z 2 00h02m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-28T09:08:25.055110Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [65704f842733115a] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} >> TransferWriter::Write_ColumnTable [GOOD] >> DataShardReadIterator::ShouldReadFromHeadWithConflict-UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink >> TCmsTest::TestProcessingQueue [GOOD] >> TCmsTest::VDisksEviction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2025-03-28T09:07:19.599637Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784435436161960:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:19.599685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00159f/r3tmp/tmpcwvK01/pdisk_1.dat 2025-03-28T09:07:20.019835Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:20.022969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:20.023062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 23054, node 1 2025-03-28T09:07:20.078122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:20.079558Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:07:20.079576Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:07:20.099455Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:20.099476Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:20.099484Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:20.099586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:20.389122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Triggering split by load TClient is connected to server localhost:22607 2025-03-28T09:07:22.841908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784448321064803:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:22.842024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.120249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:07:23.293896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032284:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.294004Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.308440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152843257 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152843257 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:07:23.404703Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032383:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.404796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.404980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032391:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.405036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032392:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.412196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032415:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.412262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.412902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032421:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.413061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032417:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.413568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032438:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.413798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032434:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.413852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.415593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032453:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.415641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784452616032457:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.415682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.416738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:07:23.416966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:07:23.416998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-28T09:07:23.417094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:07:23.417124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2025-03-28T09:07:23.417218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3 ... Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards Fast forward > 10h to trigger the merge TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152843257 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:08:23.367028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.2799 2025-03-28T09:08:23.367087Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.214 2025-03-28T09:08:23.474827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-28T09:08:23.475020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:08:23.475430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Propose merge request : Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976715658 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037890 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2025-03-28T09:08:23.475552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:08:23.476089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:08:23.476901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:08:23.482880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T09:08:23.484077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2025-03-28T09:08:23.492932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715658:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-03-28T09:08:23.493025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 2 -> 3 2025-03-28T09:08:23.501825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:08:23.518625Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7486784710314184723:3581] 2025-03-28T09:08:23.566845Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-03-28T09:08:23.566958Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-03-28T09:08:23.567121Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-03-28T09:08:23.579159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination operationId# 281474976715658:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715658:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715658 TabletId: 72075186224037891 2025-03-28T09:08:23.579203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 3 -> 131 2025-03-28T09:08:23.581553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:08:23.611069Z node 1 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-03-28T09:08:23.611230Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:08:23.611302Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-28T09:08:23.611335Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-03-28T09:08:23.611620Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-03-28T09:08:23.621457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037889 2025-03-28T09:08:23.621748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TTransferData operationId# 281474976715658:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715658 TabletId: 72075186224037890 2025-03-28T09:08:23.622040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 131 -> 132 2025-03-28T09:08:23.622313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037891 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-28T09:08:23.632138Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:08:23.632447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:08:23.632525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:08:23.634555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-03-28T09:08:23.634607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-03-28T09:08:23.634627Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-03-28T09:08:23.646804Z node 1 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-03-28T09:08:23.649061Z node 1 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-03-28T09:08:23.650819Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-03-28T09:08:23.651040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TNotifySrc, operationId: 281474976715658:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-03-28T09:08:23.651119Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-28T09:08:23.651139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-28T09:08:23.651173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-03-28T09:08:23.660370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Unable to activate 281474976715658:0 2025-03-28T09:08:23.661136Z node 1 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:08:23.663168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:08:23.663278Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:08:23.663887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:08:23.675287Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-28T09:08:23.675339Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-28T09:08:23.676774Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-28T09:08:23.676819Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:08:23.679019Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-28T09:08:23.679106Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-28T09:08:23.679543Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-28T09:08:23.679613Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-28T09:08:23.722824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:08:23.723015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037891 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:08:23.726864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152843257 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 3 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> DstCreator::ColumnsSizeMismatch >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTest::AllVDisksEvictionInRack [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2025-03-28T09:08:29.812738Z node 18 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-28T09:08:29.812840Z node 18 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-03-28T09:08:29.813016Z node 18 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-03-28T09:08:29.819393Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120026512 } } 2025-03-28T09:08:29.820534Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120026512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120026512 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120026512 } Timestamp: 120026512 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120026512 } 2025-03-28T09:08:29.820865Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.002512s 2025-03-28T09:08:29.820937Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-28T09:08:29.821159Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-03-28T09:08:29.821231Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-03-28T09:08:29.821299Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-03-28T09:08:29.821455Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-03-28T09:08:29.821662Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-28T09:08:29.821752Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-03-28T09:08:29.822024Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-03-28T09:08:29.822080Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-03-28T09:08:29.822116Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-03-28T09:08:29.822205Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-03-28T09:08:29.822249Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-03-2 ... mp: 120539560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120539560 } } 2025-03-28T09:08:30.292621Z node 18 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-18-18" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 18 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-19-19" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 19 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-20-20" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 20 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-21-21" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 21 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-22-22" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 22 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-23-23" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 23 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-24-24" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 24 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120539560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120539560 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120539560 } Timestamp: 120539560 NodeId: 25 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120539560 } 2025-03-28T09:08:30.293004Z node 18 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-03-28T09:08:30.293085Z node 18 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 2025-03-28T09:08:30.293141Z node 18 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 18 has not yet been completed) 2025-03-28T09:08:30.293311Z node 18 :CMS DEBUG: TTxStorePermissions Execute 2025-03-28T09:08:30.293531Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 18 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-28T09:08:30.293577Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 18, marker# MARKER_DISK_FAULTY 2025-03-28T09:08:30.296115Z node 18 :CMS DEBUG: [Sentinel] [Main] Config was updated in 0.100000s 2025-03-28T09:08:30.296205Z node 18 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-28T09:08:30.296344Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 18, wbId# [18:8388350642965737326:1634689637] 2025-03-28T09:08:30.296415Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 19, wbId# [19:8388350642965737326:1634689637] 2025-03-28T09:08:30.296454Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 20, wbId# [20:8388350642965737326:1634689637] 2025-03-28T09:08:30.296494Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 21, wbId# [21:8388350642965737326:1634689637] 2025-03-28T09:08:30.296532Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 22, wbId# [22:8388350642965737326:1634689637] 2025-03-28T09:08:30.296560Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 23, wbId# [23:8388350642965737326:1634689637] 2025-03-28T09:08:30.296612Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 24, wbId# [24:8388350642965737326:1634689637] 2025-03-28T09:08:30.296642Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-03-28T09:08:30.297206Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 18 CreateTime: 120441072 ChangeTime: 120441072 Path: "/18/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-03-28T09:08:30.297817Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 22, response# PDiskStateInfo { PDiskId: 22 CreateTime: 120441072 ChangeTime: 120441072 Path: "/22/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-03-28T09:08:30.297964Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 23, response# PDiskStateInfo { PDiskId: 23 CreateTime: 120441072 ChangeTime: 120441072 Path: "/23/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-03-28T09:08:30.298165Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 24, response# PDiskStateInfo { PDiskId: 24 CreateTime: 120441072 ChangeTime: 120441072 Path: "/24/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-03-28T09:08:30.298247Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 120441072 ChangeTime: 120441072 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-03-28T09:08:30.298311Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 20, response# PDiskStateInfo { PDiskId: 20 CreateTime: 120441072 ChangeTime: 120441072 Path: "/20/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-03-28T09:08:30.298368Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 21, response# PDiskStateInfo { PDiskId: 21 CreateTime: 120441072 ChangeTime: 120441072 Path: "/21/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-03-28T09:08:30.298457Z node 18 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 19 CreateTime: 120441072 ChangeTime: 120441072 Path: "/19/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120539 2025-03-28T09:08:30.298527Z node 18 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-28T09:08:30.316121Z node 18 :CMS DEBUG: TTxStorePermissions complete 2025-03-28T09:08:30.316482Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "18" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 18 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2025-03-28T09:08:30.317226Z node 18 :CMS INFO: User user removes request user-r-3 2025-03-28T09:08:30.317287Z node 18 :CMS DEBUG: Resulting status: OK 2025-03-28T09:08:30.317363Z node 18 :CMS DEBUG: TTxRemoveRequest Execute 2025-03-28T09:08:30.317416Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 18 2025-03-28T09:08:30.317582Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2025-03-28T09:08:30.334522Z node 18 :CMS DEBUG: TTxRemoveRequest Complete 2025-03-28T09:08:30.335857Z node 18 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2025-03-28T09:08:24.461790Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-03-28T09:08:24.634455Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-03-28T09:08:24.650405Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-03-28T09:08:24.710594Z node 1 :CMS ERROR: [InfoCollector] Couldn't get base config 2025-03-28T09:08:30.072564Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-03-28T09:08:30.072654Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-03-28T09:08:30.072685Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-03-28T09:08:30.072706Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-03-28T09:08:30.072728Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-03-28T09:08:30.072749Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-03-28T09:08:30.072770Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-03-28T09:08:30.072792Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 >> DstCreator::WithSyncIndexAndIntermediateDir |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] >> KqpExtractPredicateLookup::OverflowLookup [GOOD] >> KqpExtractPredicateLookup::ComplexRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: 2025-03-28T09:08:16.550161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:08:16.550452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:16.550528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001622/r3tmp/tmpgmZtcG/pdisk_1.dat 2025-03-28T09:08:17.232004Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5121, node 1 2025-03-28T09:08:17.706021Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:17.706153Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:17.706200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:17.707145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:17.712734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:08:17.837509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:17.846744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:17.876407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25325 2025-03-28T09:08:18.508873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:08:22.262909Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:08:22.339967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:22.340090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:22.405659Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:22.416273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:22.737514Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:22.738186Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:22.738751Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:22.738939Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:22.739197Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:22.739278Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:22.739356Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:22.740972Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:22.741184Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:08:22.952221Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:22.952351Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:22.972637Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:23.340071Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:23.398284Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:08:23.398442Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:08:23.433336Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:08:23.435610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:08:23.435860Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:08:23.435930Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:08:23.436003Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:08:23.436069Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:08:23.436122Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:08:23.436176Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:08:23.436776Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:08:23.516925Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:08:23.517795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:08:23.545655Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:08:23.545726Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:08:23.545799Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:08:23.558216Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:08:23.558354Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:08:23.615611Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:08:23.616291Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:08:23.720797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:08:23.738461Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:08:23.738657Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:08:24.064082Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:08:24.363909Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:08:24.436124Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:08:25.584482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.584652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.603247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:08:26.287275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2542:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:26.287483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:26.289074Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2547:3126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:26.289290Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:08:26.289402Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2549:3128] 2025-03-28T09:08:26.289489Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2549:3128] 2025-03-28T09:08:26.290150Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2550:2993] 2025-03-28T09:08:26.290493Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2549:3128], server id = [2:2550:2993], tablet id = 72075186224037894, status = OK 2025-03-28T09:08:26.294870Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2550:2993], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:08:26.295012Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:08:26.295491Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:08:26.295597Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2547:3126], StatRequests.size() = 1 2025-03-28T09:08:26.324555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2554:3132], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:26.324698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:26.325189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2559:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:26.357579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:08:26.611225Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:08:26.611318Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:08:26.726835Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2549:3128], schemeshard count = 1 2025-03-28T09:08:27.210082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2561:3139], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-28T09:08:27.461040Z node 1 :TX_PROXY ERROR: Actor# [1:2684:3211] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:27.482347Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2707:3227]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:08:27.482532Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:08:27.482573Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2707:3227], StatRequests.size() = 1 2025-03-28T09:08:27.715063Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe04zyscbg46ryvfm2rmya8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNkZGE1NGItMWVjMjliMmItNTg5NzViZS02ODU2ZDY2Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:28.004411Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2782:3258], for# user@builtin, access# DescribeSchema 2025-03-28T09:08:28.004473Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:2782:3258], for# user@builtin, access# DescribeSchema 2025-03-28T09:08:28.012882Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:2772:3254], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:28.014170Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjRmODU1ZjktZjdmZDhjMTItODY2OGMwOGQtNDJmNzVkNGE=, ActorId: [1:2763:3246], ActorState: ExecuteState, TraceId: 01jqe051mq5kgm7re9r8chkpg9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> TCmsTest::PriorityRange [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityRange [GOOD] Test command err: 2025-03-28T09:08:29.068397Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-03-28T09:08:29.068467Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-03-28T09:08:29.068491Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-03-28T09:08:29.068514Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-03-28T09:08:29.068537Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-03-28T09:08:29.068558Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-03-28T09:08:29.068601Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-03-28T09:08:29.068625Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-03-28T09:08:29.095191Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-03-28T09:08:29.095285Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-03-28T09:08:29.095322Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-03-28T09:08:29.095346Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-03-28T09:08:29.095376Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-03-28T09:08:29.095410Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-03-28T09:08:29.095433Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-03-28T09:08:29.095452Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 2025-03-28T09:08:29.142452Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 17:17 2025-03-28T09:08:29.142525Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 18:18 2025-03-28T09:08:29.142550Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 19:19 2025-03-28T09:08:29.142570Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 20:20 2025-03-28T09:08:29.142592Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 21:21 2025-03-28T09:08:29.142612Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 22:22 2025-03-28T09:08:29.142633Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 23:23 2025-03-28T09:08:29.142655Z node 17 :CMS ERROR: Cannot update state for unknown PDisk 24:24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::AllVDisksEvictionInRack [GOOD] Test command err: 2025-03-28T09:08:30.479844Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-28T09:08:30.479931Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-03-28T09:08:30.480059Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-03-28T09:08:30.481414Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120028000 } } 2025-03-28T09:08:30.481903Z node 25 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-25-25" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 25 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 26 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 27 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 28 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 29 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 30 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 31 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120028000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120028000 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120028000 } Timestamp: 120028000 NodeId: 32 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120028000 } 2025-03-28T09:08:30.482048Z node 25 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003000s 2025-03-28T09:08:30.482113Z node 25 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-28T09:08:30.482282Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2025-03-28T09:08:30.482333Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-03-28T09:08:30.482377Z node 25 :CMS DEBUG: Result: DISALLOW_TEMP (reason: VDisks eviction from host 25 has not yet been completed) 2025-03-28T09:08:30.482493Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-03-28T09:08:30.482662Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-28T09:08:30.483361Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Add host marker: host# 25, marker# MARKER_DISK_FAULTY 2025-03-28T09:08:30.483672Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 25, wbId# [25:8388350642965737326:1634689637] 2025-03-28T09:08:30.483729Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2025-03-28T09:08:30.483763Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2025-03-28T09:08:30.483813Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2025-03-28T09:08:30.483845Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-03-28T09:08:30.483875Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-03-28T09:08:30.483905Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-03-28T09:08:30.483953Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-03-28T09:08:30.495306Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: ... [28:8388350642965737326:1634689637] 2025-03-28T09:08:30.748446Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2025-03-28T09:08:30.748475Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2025-03-28T09:08:30.748502Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2025-03-28T09:08:30.748549Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2025-03-28T09:08:30.748848Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-03-28T09:08:30.749301Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-03-28T09:08:30.749407Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-03-28T09:08:30.749487Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-03-28T09:08:30.749519Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-03-28T09:08:30.749566Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-03-28T09:08:30.749600Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-03-28T09:08:30.749636Z node 25 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 180028 2025-03-28T09:08:30.749668Z node 25 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-28T09:08:30.749799Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 26:26, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-03-28T09:08:30.749841Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 25:25, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-03-28T09:08:30.749869Z node 25 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-03-28T09:08:30.750049Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-03-28T09:08:30.750213Z node 25 :CMS DEBUG: TTxLogAndSend Execute 2025-03-28T09:08:30.750292Z node 25 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2025-03-28T09:08:30.750323Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 25:25 2025-03-28T09:08:30.750348Z node 25 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 26:26 2025-03-28T09:08:30.762864Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-03-28T09:08:30.762953Z node 25 :CMS DEBUG: TTxLogAndSend Complete 2025-03-28T09:08:30.778462Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-28T09:08:30.778545Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-03-28T09:08:30.778607Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-03-28T09:08:30.779394Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-28T09:08:30.779495Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 25 has not yet been completed" } 2025-03-28T09:08:30.779604Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 25, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2025-03-28T09:08:30.779651Z node 25 :CMS DEBUG: Ring: 0; State: Ok 2025-03-28T09:08:30.779678Z node 25 :CMS DEBUG: Ring: 1; State: Ok 2025-03-28T09:08:30.779697Z node 25 :CMS DEBUG: Ring: 2; State: Ok 2025-03-28T09:08:30.779721Z node 25 :CMS DEBUG: Result: ALLOW 2025-03-28T09:08:30.779878Z node 25 :CMS DEBUG: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user 2025-03-28T09:08:30.779927Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-03-28T09:08:30.779992Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-03-28T09:08:30.780121Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.128000Z, action# Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 2025-03-28T09:08:30.780222Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-28T09:08:30.795680Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-03-28T09:08:30.795940Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "25" Services: "storage" Duration: 600000000 } Deadline: 780128000 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 25 InterconnectPort: 12001 } } } } 2025-03-28T09:08:30.795989Z node 25 :CMS DEBUG: Schedule cleanup at 1970-01-01T00:33:00.128000Z 2025-03-28T09:08:30.815323Z node 25 :CMS INFO: Adding lock for Host ::1:12001 (25) (permission user-p-1 until 1970-01-01T00:13:00Z) 2025-03-28T09:08:30.815693Z node 25 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-28T09:08:30.815798Z node 25 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-03-28T09:08:30.815871Z node 25 :CMS DEBUG: Timestamp: 1970-01-01T00:03:00Z 2025-03-28T09:08:30.816770Z node 25 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-28T09:08:30.816869Z node 25 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } 2025-03-28T09:08:30.816929Z node 25 :CMS DEBUG: [Nodes Counter] Checking Node: 26, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2025-03-28T09:08:30.816982Z node 25 :CMS DEBUG: Result: ALLOW 2025-03-28T09:08:30.817117Z node 25 :CMS DEBUG: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user 2025-03-28T09:08:30.817177Z node 25 :CMS INFO: Adding lock for Host ::1:12002 (26) (permission user-p-2 until 1970-01-01T00:13:00Z) 2025-03-28T09:08:30.817273Z node 25 :CMS DEBUG: TTxStorePermissions Execute 2025-03-28T09:08:30.817451Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.229512Z, action# Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2025-03-28T09:08:30.817560Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-28T09:08:30.830652Z node 25 :CMS DEBUG: TTxStorePermissions complete 2025-03-28T09:08:30.830994Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } Deadline: 780229512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 26 InterconnectPort: 12002 } } } } 2025-03-28T09:08:30.831649Z node 25 :CMS INFO: User user is done with permissions user-p-1 2025-03-28T09:08:30.831734Z node 25 :CMS DEBUG: Resulting status: OK 2025-03-28T09:08:30.831810Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-03-28T09:08:30.831906Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 25 2025-03-28T09:08:30.832011Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2025-03-28T09:08:30.832058Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2025-03-28T09:08:30.844670Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-03-28T09:08:30.844932Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2025-03-28T09:08:30.845636Z node 25 :CMS INFO: User user is done with permissions user-p-2 2025-03-28T09:08:30.845698Z node 25 :CMS DEBUG: Resulting status: OK 2025-03-28T09:08:30.845784Z node 25 :CMS DEBUG: TTxRemovePermissions Execute 2025-03-28T09:08:30.845891Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reset host markers: host# 26 2025-03-28T09:08:30.845996Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2025-03-28T09:08:30.846063Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2025-03-28T09:08:30.860285Z node 25 :CMS DEBUG: TTxRemovePermissions Complete 2025-03-28T09:08:30.860518Z node 25 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [FAIL] |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |80.0%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest >> TransferWriter::Write_ColumnTable [GOOD] Test command err: 2025-03-28T09:08:18.810925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784692672580111:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:18.811350Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011f2/r3tmp/tmpEFas6G/pdisk_1.dat 2025-03-28T09:08:19.589369Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:19.600093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:19.600185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:19.612676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2254 TServer::EnableGrpc on GrpcPort 17384, node 1 2025-03-28T09:08:20.305957Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:20.312678Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:20.312702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:20.312894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:20.851995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:20.891517Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:20.959915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:21.815171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:21.815511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:21.815889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:21.816096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:21.816181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:21.816277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:21.816359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:21.816497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:21.816645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:21.816780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:21.816922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:21.817038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486784705557482992:2316];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:21.883628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:21.883689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:21.883949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:21.884089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:21.884212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:21.884336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:21.884432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:21.884579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:21.884713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:21.884813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:21.884881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:21.884999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784705557483225:2365];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:21.922371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7486784705557483122:2336];tablet_id=72075186224037934;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:21.922431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7486784705557483122:2336];tablet_id=72075186224037934;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:21.922623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7486784705557483122:2336];tablet_id=72075186224037934;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:21.922737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7486784705557483122:2336];tablet_id=72075186224037934;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:21.922820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7486784705557483122:2336];tablet_id=72075186224037934;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:21.922881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7486784705557483122:2336];tablet_id=72075186224037934;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:21.922962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7486784705557483122:2336];tablet_id=72075186224037934;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:21.923044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1:7486784705557483122:2336];tablet_id=72075186224037934;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:21.923109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[1 ... _state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.315651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.321496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.323672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.331063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.335106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.340583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.344211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.346208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.352473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.354558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037944;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.361219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.366673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.367245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.372937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.373193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.379392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.381535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.386088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.387583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.391619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037918;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.396763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.402162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.407003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.411412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.416780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.421667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.425526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.428083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.431485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.433088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.436791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.437989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.441892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.442657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.448018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.451298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.453333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.458117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.462709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.463976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.470404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; 2025-03-28T09:08:24.472888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710658; TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152904276 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTi... (TRUNCATED) 2025-03-28T09:08:24.491340Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7486784718442387044:3724] GetTableScheme: worker# [0:0:0] 2025-03-28T09:08:24.491394Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7486784718442387044:3724] Handshake: worker# [1:7486784701262515175:2295] 2025-03-28T09:08:24.491627Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7486784718442387044:3724] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindColumnTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:08:24.491687Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7486784718442387044:3724] CompileTransferLambda: worker# [1:7486784701262515175:2295] 2025-03-28T09:08:29.420874Z node 1 :REPLICATION_SERVICE DEBUG: [TransferWriter][1:7486784718442387044:3724] Handle TEvPurecalcCompileResponse: result# >> BasicUsage::WaitEventBlocksBeforeDiscovery >> BasicUsage::FallbackToSingleDb >> BasicUsage::PropagateSessionClosed >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] >> ReadOnlyVDisk::TestGarbageCollect [GOOD] >> BasicUsage::BasicWriteSession >> BasicUsage::RetryDiscoveryWithCancel >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [FAIL] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 18386, MsgBus: 17215 2025-03-28T09:08:00.545418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784613992034025:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:00.550916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b54/r3tmp/tmp388C9m/pdisk_1.dat 2025-03-28T09:08:01.202660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:01.202787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:01.203107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:01.206804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18386, node 1 2025-03-28T09:08:01.395446Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:01.395467Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:01.395477Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:01.395569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17215 TClient is connected to server localhost:17215 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:02.498040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:05.339670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784635466871045:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.339768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.509871Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784613992034025:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:05.509937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:05.637537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:08:05.806817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784635466871168:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.807010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.807227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784635466871174:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.810654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:08:05.824517Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:08:05.825541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784635466871176:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:08:05.886638Z node 1 :TX_PROXY ERROR: Actor# [1:7486784635466871216:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:06.750381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:08:07.262707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480 2025-03-28T09:08:08.052197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:08:08.854829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:08:09.434557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:08:10.183955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:08:10.275877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:08:13.334771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } ... sion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:17.480019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:17.499671Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:20.654833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784697627725196:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:20.654936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:20.675249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:08:20.770766Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784676152888051:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:20.770837Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:20.775964Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784697627725309:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:20.776058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:20.777716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784697627725315:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:20.789296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:08:20.789508Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784697627725328:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:20.789563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784697627725330:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:20.789680Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:20.806485Z node 2 :TX_PROXY ERROR: Actor# [2:7486784697627725354:2385] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:08:20.821240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784697627725319:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:08:20.835059Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:08:20.835396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784697627725353:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:08:20.913607Z node 2 :TX_PROXY ERROR: Actor# [2:7486784697627725382:2410] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:20.933474Z node 2 :TX_PROXY ERROR: Actor# [2:7486784697627725404:2420] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:22.211363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:08:22.999069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:1, at schemeshard: 72057594046644480 2025-03-28T09:08:23.634742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:08:24.345677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:08:24.991001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:08:25.712230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:08:25.768664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:08:29.983684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710725:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2025-03-28T09:07:20.451061Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784443247449729:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:20.451100Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00159e/r3tmp/tmpagetdk/pdisk_1.dat 2025-03-28T09:07:20.910732Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:20.935862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:20.935932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:20.941383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62219, node 1 2025-03-28T09:07:21.111311Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:21.111331Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:21.111342Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:21.111466Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:21.430929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:21.459323Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:29261 2025-03-28T09:07:23.635664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784456132352667:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.635738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:23.869680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:07:24.068083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784460427320135:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.068205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.068602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784460427320140:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:24.074372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:07:24.097527Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784460427320142:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:07:24.192532Z node 1 :TX_PROXY ERROR: Actor# [1:7486784460427320231:2806] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:24.350581Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe03393f5wef79wkqxfac3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.381115Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe033js9veh8rae6s2kbe3v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.399229Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqe033kc38trf1rn0esgt6cx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.427476Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqe033m4datzg18teqt57y98, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.446242Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe033mr01v374xzq1zcdt23, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.463976Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe033nd2bvvzrdr0xc95wn6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.481614Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe033ny8mb6nyt0ptxnetj0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.517307Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jqe033pz6yyf1mm7sbjr3hw7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.537762Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqe033qqe89vf9w75ghcepg6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.553972Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jqe033r75qr2y90r04msc3r5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.568843Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqe033rn1h2h47rkecdfx47z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.585657Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqe033s4a41hkxdvs4gt5emn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.603411Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqe033srfny7wcbwqmr5ns8h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.635555Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqe033t9f3d79vfzw955w11d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.668936Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe033vrbyxqsdzfmadzsfae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.694209Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe033wk3ksqckcp998fvkkt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.718810Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe033xbcq6yc30fk018f035, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.747002Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqe033y8aamk5yn9q9b36940, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.764698Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jqe033yt11wy2g4yh30f9nja, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIwNjU0NTYtNzk2NzhjYmItNmQzYjlmNmUtNTU1ODEwMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:07:24.78420 ... n/3?node_id=1&id=NzA0OTUyNGMtNGIwOTAzZGYtNzU0NzAzMDMtYjE1NWFjNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.652175Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720731. Ctx: { TraceId: 01jqe0539kf28tcy8yxdzkrbaw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U4M2UzN2ItZTBjMjU4OGUtMzRmYWRhODUtYmYxODlmNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.656551Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720732. Ctx: { TraceId: 01jqe0539ydzw8n7zq46kx2ade, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgyNWExNDctODgxM2U0YTAtOGE2MWJmYTctMjY3OTJkYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.657097Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720733. Ctx: { TraceId: 01jqe0539zb28cf56rg0jh898j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVmMDhhODQtYTRhM2FlYjUtOTI3MDgzMTQtMjVjMzUxN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.660720Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720735. Ctx: { TraceId: 01jqe053a36xrdxg1a9zmc11q8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFmNjlkMDgtZWE5NzM0YTctZGExNzAxZGYtYjI5Mjk1ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.661609Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720734. Ctx: { TraceId: 01jqe0539z7q0eh3s3ggvechjd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA2ZjkxNmQtMzZkMTNjMjItYzM4YTg0MTctNzBiNGQ5MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.664830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720736. Ctx: { TraceId: 01jqe053a30xgxj019wk446f2a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZlYzRjMi0yYzMwMmZmOS05Yjc0OTlhZi1hN2I4ZDU3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.665282Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720737. Ctx: { TraceId: 01jqe053a72ttkkcd2jnsfn4hp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjBmMTNjNTEtM2M2Y2NjZWItOTRjOTE3NzAtNTgxNmE2MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.684934Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720738. Ctx: { TraceId: 01jqe053ar86542v1bzfdq5pbq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjYxYWM1YTMtZmNlMDg0MWUtNDc4ZmFhM2YtOWYyOTNlMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.687252Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720739. Ctx: { TraceId: 01jqe053b7fqb52evxkj21y57c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVmZTY5N2QtNDNmOWM1YzctMzM0NjM0MmUtNTgwZWIyOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.687719Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720740. Ctx: { TraceId: 01jqe053b7eed682tdy0jjjewv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U4M2UzN2ItZTBjMjU4OGUtMzRmYWRhODUtYmYxODlmNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.696868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720741. Ctx: { TraceId: 01jqe053bh4vs6hgna1xgjg0g8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA0OTUyNGMtNGIwOTAzZGYtNzU0NzAzMDMtYjE1NWFjNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.705662Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720742. Ctx: { TraceId: 01jqe053bw39c1ctgfmjyg35vn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjBmMTNjNTEtM2M2Y2NjZWItOTRjOTE3NzAtNTgxNmE2MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.706243Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720743. Ctx: { TraceId: 01jqe053bwfd87abwv9x6c44m9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVmMDhhODQtYTRhM2FlYjUtOTI3MDgzMTQtMjVjMzUxN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.706637Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720744. Ctx: { TraceId: 01jqe053bw75mnj5pwcaqd2a4w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFmNjlkMDgtZWE5NzM0YTctZGExNzAxZGYtYjI5Mjk1ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.722574Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720747. Ctx: { TraceId: 01jqe053cc6z11xmxxcd5jeegf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA2ZjkxNmQtMzZkMTNjMjItYzM4YTg0MTctNzBiNGQ5MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.723170Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720745. Ctx: { TraceId: 01jqe053cc5rhd4ynq68h4k4zs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgyNWExNDctODgxM2U0YTAtOGE2MWJmYTctMjY3OTJkYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.723561Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720746. Ctx: { TraceId: 01jqe053cb7azwgj2884yevwbf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZlYzRjMi0yYzMwMmZmOS05Yjc0OTlhZi1hN2I4ZDU3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.730103Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720748. Ctx: { TraceId: 01jqe053chafhvd0ggwvh76wg4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVmZTY5N2QtNDNmOWM1YzctMzM0NjM0MmUtNTgwZWIyOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-03-28T09:08:29.741644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720749. Ctx: { TraceId: 01jqe053cvars353ndrppv2jgf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjYxYWM1YTMtZmNlMDg0MWUtNDc4ZmFhM2YtOWYyOTNlMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.742193Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720750. Ctx: { TraceId: 01jqe053cv5f5ndzadvhhhk7q9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2U4M2UzN2ItZTBjMjU4OGUtMzRmYWRhODUtYmYxODlmNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152844006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:08:29.764371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720751. Ctx: { TraceId: 01jqe053d69eee0pckgvk00pz8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzA0OTUyNGMtNGIwOTAzZGYtNzU0NzAzMDMtYjE1NWFjNTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.764966Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720752. Ctx: { TraceId: 01jqe053d7a32yxxmscd092zae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmVmMDhhODQtYTRhM2FlYjUtOTI3MDgzMTQtMjVjMzUxN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.773885Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720755. Ctx: { TraceId: 01jqe053deb1yp4w3db44akn54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OGZlYzRjMi0yYzMwMmZmOS05Yjc0OTlhZi1hN2I4ZDU3OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.774501Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720753. Ctx: { TraceId: 01jqe053dd2k79g3mha8g04r3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmFmNjlkMDgtZWE5NzM0YTctZGExNzAxZGYtYjI5Mjk1ZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.774936Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720754. Ctx: { TraceId: 01jqe053dee94nvwv51f7t4tk5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjBmMTNjNTEtM2M2Y2NjZWItOTRjOTE3NzAtNTgxNmE2MzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.903174Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720758. Ctx: { TraceId: 01jqe053dp6xmgfgfkma16cd3w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTgyNWExNDctODgxM2U0YTAtOGE2MWJmYTctMjY3OTJkYzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.919154Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720756. Ctx: { TraceId: 01jqe053dj7p7thb5y2mgwy2aw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTA2ZjkxNmQtMzZkMTNjMjItYzM4YTg0MTctNzBiNGQ5MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.920211Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720759. Ctx: { TraceId: 01jqe053e2eb31v8fvaabaq5rf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjYxYWM1YTMtZmNlMDg0MWUtNDc4ZmFhM2YtOWYyOTNlMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:29.920703Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720757. Ctx: { TraceId: 01jqe053dqds75abn6e0qw8vsr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjVmZTY5N2QtNDNmOWM1YzctMzM0NjM0MmUtNTgwZWIyOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152844006 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 7929488160503765570 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-03-28T09:08:24.182556Z 1 00h01m40.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-03-28T09:08:24.187731Z 1 00h01m40.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-03-28T09:08:25.198359Z 1 00h03m20.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-28T09:08:25.217425Z 2 00h03m20.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2025-03-28T09:08:25.825981Z 1 00h04m20.161024s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-28T09:08:25.826186Z 2 00h04m20.161024s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-03-28T09:08:26.291367Z 1 00h05m00.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-28T09:08:26.292699Z 2 00h05m00.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-28T09:08:26.293969Z 3 00h05m00.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-28T09:08:26.294306Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: [dd634bc2a010ab7d] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2025-03-28T09:08:26.844795Z 1 00h06m00.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-28T09:08:26.844987Z 2 00h06m00.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-28T09:08:26.845043Z 3 00h06m00.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-03-28T09:08:28.117310Z 1 00h07m40.260512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-28T09:08:28.117538Z 2 00h07m40.260512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-28T09:08:28.117613Z 3 00h07m40.260512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-28T09:08:28.117673Z 4 00h07m40.260512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5329:719] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-03-28T09:08:28.454973Z 1 00h08m20.262048s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-28T09:08:28.455227Z 2 00h08m20.262048s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-28T09:08:28.455313Z 3 00h08m20.262048s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-28T09:08:28.455375Z 4 00h08m20.262048s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5329:719] 2025-03-28T09:08:28.455434Z 5 00h08m20.262048s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5336:726] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-03-28T09:08:28.752627Z 1 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-28T09:08:28.752840Z 2 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-28T09:08:28.752902Z 3 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-28T09:08:28.752958Z 4 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5329:719] 2025-03-28T09:08:28.753011Z 5 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5336:726] 2025-03-28T09:08:28.753069Z 6 00h09m00.310512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5343:733] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-03-28T09:08:29.068668Z 1 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5308:698] 2025-03-28T09:08:29.068898Z 2 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-28T09:08:29.068971Z 3 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-28T09:08:29.069032Z 4 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5329:719] 2025-03-28T09:08:29.069098Z 5 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5336:726] 2025-03-28T09:08:29.069162Z 6 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5343:733] 2025-03-28T09:08:29.069220Z 7 00h09m40.312048s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5350:740] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-03-28T09:08:29.377946Z 2 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5315:705] 2025-03-28T09:08:29.378074Z 3 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-28T09:08:29.378139Z 4 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5329:719] 2025-03-28T09:08:29.378197Z 5 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5336:726] 2025-03-28T09:08:29.378261Z 6 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5343:733] 2025-03-28T09:08:29.378326Z 7 00h10m20.360512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5350:740] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-03-28T09:08:29.741671Z 3 00h11m00.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5322:712] 2025-03-28T09:08:29.741781Z 4 00h11m00.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5329:719] 2025-03-28T09:08:29.741844Z 5 00h11m00.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5336:726] 2025-03-28T09:08:29.741905Z 6 00h11m00.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5343:733] 2025-03-28T09:08:29.741962Z 7 00h11m00.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5350:740] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2025-03-28T09:08:30.140444Z 4 00h11m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5329:719] 2025-03-28T09:08:30.140546Z 5 00h11m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5336:726] 2025-03-28T09:08:30.140626Z 6 00h11m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5343:733] 2025-03-28T09:08:30.140685Z 7 00h11m40.410512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5350:740] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2025-03-28T09:08:30.581827Z 5 00h12m20.450512s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5336:726] 2025-03-28T09:08:30.581929Z 6 00h12m20.450512s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5343:733] 2025-03-28T09:08:30.581985Z 7 00h12m20.450512s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5350:740] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2025-03-28T09:08:31.950506Z 6 00h14m00.461536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5343:733] 2025-03-28T09:08:31.950617Z 7 00h14m00.461536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5350:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2025-03-28T09:08:32.511556Z 7 00h14m40.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5350:740] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} >> DstCreator::WithSyncIndex [GOOD] >> BasicUsage::WriteSessionWriteInHandlers >> BasicUsage::WriteSessionNoAvailableDatabase >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] >> TableCreator::CreateTables [GOOD] >> BasicUsage::GetAllStartPartitionSessions >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2025-03-28T09:08:30.810386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784740697569242:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:30.820850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001aa7/r3tmp/tmpwZNoq2/pdisk_1.dat 2025-03-28T09:08:31.581751Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:31.585915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:31.586015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:31.597368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28246 TServer::EnableGrpc on GrpcPort 9225, node 1 2025-03-28T09:08:31.939285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:31.939306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:31.939312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:31.939426Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:32.602503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:32.628337Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:32.633616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152913012 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152912669 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152913012 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-03-28T09:08:33.117898Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:08:33.118106Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:08:33.118128Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:08:33.122850Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:08:34.711525Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743152913012, tx_id: 281474976710658 } } } 2025-03-28T09:08:34.711941Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:08:34.714108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:08:34.716954Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-28T09:08:34.718470Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-03-28T09:08:34.788342Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-28T09:08:34.789810Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152914818 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSize ... oCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 7 PathOwnerId: 72057594046644480 } 2025-03-28T09:08:34.805431Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 7] TClient::Ls request: /Root/Replicated/index_by_value TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152914818 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152914818 ParentPathId: 6 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152914818 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152914818 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" >> BasicUsage::WriteSessionCloseWaitsForWrites >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |80.1%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletCountersPercentile::StartFromZero [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithTags |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |80.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2025-03-28T09:08:29.087536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784739696390533:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:29.095739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e51/r3tmp/tmpDh9QVR/pdisk_1.dat 2025-03-28T09:08:30.147883Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:30.188073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:30.188165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:30.190864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:30.193555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23340 TServer::EnableGrpc on GrpcPort 62788, node 1 2025-03-28T09:08:30.528559Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:30.528598Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:30.528608Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:30.528771Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:08:30.825291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:30.929178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:08:30.931600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:08:34.089379Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784739696390533:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:34.159230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; >> TTabletResolver::NodeProblem >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart >> DataShardReadIterator::ShouldReadRangeChunk3 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk100 |80.1%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [FAIL] >> TTabletResolver::NodeProblem [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 17932, MsgBus: 29658 2025-03-28T09:08:00.250311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784613052417943:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:00.251536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b36/r3tmp/tmpjh0fKE/pdisk_1.dat 2025-03-28T09:08:00.913568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:00.913672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:00.917658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:00.940000Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17932, node 1 2025-03-28T09:08:01.103360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:01.103384Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:01.103395Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:01.103511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29658 TClient is connected to server localhost:29658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:01.949797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:01.980124Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:05.250493Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784613052417943:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:05.250574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:05.466386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784634527254888:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.466539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.794764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:08:05.978672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784634527255011:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.978790Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.979041Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784634527255016:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:05.983209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:08:05.996483Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:08:05.996784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784634527255018:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:08:06.102506Z node 1 :TX_PROXY ERROR: Actor# [1:7486784638822222355:2406] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:07.322334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:08:08.156669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:1, at schemeshard: 72057594046644480 2025-03-28T09:08:09.103885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:08:10.082981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:08:10.743090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:08:11.423482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:08:11.480892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:08:14.579784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-03-28T09:08:14.646512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-03-28T09:08:14.654035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-03-28T09:08:14.655372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "col1" type { type_id: UINT16 } } } } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: PO ... s. 2025-03-28T09:08:21.645042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:21.659316Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:25.463898Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784719408092504:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.463978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.505604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:08:25.607125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784719408092622:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.607302Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.607788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784719408092628:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.611806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:08:25.623620Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:08:25.624804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784719408092630:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:08:25.696932Z node 2 :TX_PROXY ERROR: Actor# [2:7486784719408092670:2405] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:26.354676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:08:27.471087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-28T09:08:28.338823Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:08:29.440003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:08:30.231763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:08:30.999085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:08:31.064458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:08:34.726654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710708:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2025-03-28T09:08:35.246800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:08:35.246838Z node 2 :IMPORT WARN: Table profiles were not loaded >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] >> BootstrapperTest::LoneBootstrapper >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [FAIL] >> TTabletPipeTest::TestPipeConnectToHint ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2025-03-28T09:08:37.868840Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StInit ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.869282Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [1:207:2136] CurrentLeaderTablet: [1:208:2137] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:08:37.869329Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-03-28T09:08:37.869389Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2136] 2025-03-28T09:08:37.869667Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StInit ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.869929Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [1:213:2140] CurrentLeaderTablet: [1:214:2141] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:08:37.869975Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-03-28T09:08:37.870025Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:213:2140] 2025-03-28T09:08:37.871553Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.871658Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:207:2136] 2025-03-28T09:08:37.871869Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.871936Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:213:2140] 2025-03-28T09:08:37.872163Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 2 2025-03-28T09:08:37.872225Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [1:207:2136] by NodeId 2025-03-28T09:08:37.872305Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.872547Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [2:223:2094] CurrentLeaderTablet: [2:224:2095] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:08:37.872606Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-03-28T09:08:37.872664Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-03-28T09:08:37.872960Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [1:213:2140] by NodeId 2025-03-28T09:08:37.873025Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.873323Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [2:229:2096] CurrentLeaderTablet: [2:230:2097] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:08:37.873393Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-03-28T09:08:37.873462Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-03-28T09:08:37.875377Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 2 2025-03-28T09:08:37.875456Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.875509Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:223:2094] 2025-03-28T09:08:37.875780Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.875833Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-03-28T09:08:37.876049Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 4 2025-03-28T09:08:37.876105Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 123 leader: [2:223:2094] by NodeId 2025-03-28T09:08:37.876174Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StProblemResolve ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.876417Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 123 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 123 Cookie: 0 CurrentLeader: [3:241:2094] CurrentLeaderTablet: [3:242:2095] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:08:37.876455Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 123 followers: 0 2025-03-28T09:08:37.876503Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] 2025-03-28T09:08:37.877048Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StNormal ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.877105Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:229:2096] 2025-03-28T09:08:37.877359Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2025-03-28T09:08:37.877415Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 123 entry.State: StNormal ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.877460Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:241:2094] 2025-03-28T09:08:37.877691Z node 1 :TABLET_RESOLVER DEBUG: Delayed invalidation of tabletId: 234 leader: [2:229:2096] by NodeId 2025-03-28T09:08:37.877766Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 234 entry.State: StProblemResolve ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:08:37.878017Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 234 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 234 Cookie: 0 CurrentLeader: [3:247:2096] CurrentLeaderTablet: [3:248:2097] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:08:37.878057Z node 1 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 234 followers: 0 2025-03-28T09:08:37.878107Z node 1 :TABLET_RESOLVER DEBUG: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:247:2096] >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 15380, MsgBus: 2397 2025-03-28T09:07:59.077601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784611183000946:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:59.078424Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000c13/r3tmp/tmpBUMmxE/pdisk_1.dat 2025-03-28T09:07:59.631425Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:59.649012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:59.649123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:59.651961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15380, node 1 2025-03-28T09:07:59.774813Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:59.774833Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:59.774841Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:59.774983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2397 TClient is connected to server localhost:2397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:00.460924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:00.483800Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:02.587350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784624067903360:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:02.587449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:02.948287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:08:03.180236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784628362870771:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:03.180433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:03.180896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784628362870778:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:03.186575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:08:03.199628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784628362870780:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:08:03.309225Z node 1 :TX_PROXY ERROR: Actor# [1:7486784628362870854:2415] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:04.004064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:08:04.061072Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784611183000946:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:04.079754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:04.491900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-28T09:08:05.037473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:08:05.697168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:08:06.509903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:08:07.167750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:08:07.262725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:08:13.544610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710741:0, at schemeshard: 72057594046644480 2025-03-28T09:08:13.560772Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe048wm29nzqk5mysxw59jn", SessionId: ydb://session/3?node_id=1&id=MjRjODY1MjAtM2UzMGMxZjItNzg5YjhhOTAtYjRiMjliZDY=, Slow query, duration: 10.977624s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT sa_signature (TYPE SECRET) WITH (value=sa_signature);\n CREATE OBJECT external_data_source_password (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"ClickHouse\",\n MDB_CLUSTER_ID=\"ch-managed\",\n AUTH_METHOD=\"MDB_BASIC\",\n SERVICE_ACCOUNT_ID=\"sa\",\n SERVICE_ACCOUNT_SECRET_NAME=\"sa_signature\",\n LOGIN=\"crab\",\n PASSWORD_SECRET_NAME=\"external_data_source_password\",\n USE_TLS=\"TRUE\",\n PROTOCOL=\"HTTP\",\n DATABASE_NAME=\"pgdb\"\n );\n ", parameters: 0b 2025-03-28T09:08:13.601081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710744:0, at schemeshard: 72057594046644480 2025-03-28T09:08:13.609906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710743:0, at schemeshard: 72057594046644480 2025-03-28T09:08:13.611887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710745:0, at schemeshard: 72057594046644480 2025-03-28T09:08:14.643178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:08:14.643208Z node 1 :IMPORT WARN: Table profiles were not loaded Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" ... or you don't have access permissions } 2025-03-28T09:08:25.183809Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.207824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:08:25.289625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784721293451098:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.289709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.290069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784721293451103:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:25.293543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:08:25.303024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784721293451105:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:08:25.398458Z node 2 :TX_PROXY ERROR: Actor# [2:7486784721293451145:2409] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:26.051928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:26.884960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-28T09:08:27.597034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:08:28.379819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:08:29.142587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:08:30.311183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:08:30.361406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:08:34.479039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:08:34.479074Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:36.957425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710729:0, at schemeshard: 72057594046644480 2025-03-28T09:08:36.965387Z node 2 :KQP_SLOW_LOG WARN: TraceId: "01jqe04yyvcpehekknkjyr1r42", SessionId: ydb://session/3?node_id=2&id=ODQyMDFjNTgtMmM3YjIzODMtYjZmNTJjNjItMjg4NGUwMjY=, Slow query, duration: 11.783788s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE OBJECT sa_signature (TYPE SECRET) WITH (value=sa_signature);\n CREATE OBJECT external_data_source_password (TYPE SECRET) WITH (value=qwerty12345);\n\n CREATE EXTERNAL DATA SOURCE external_data_source WITH (\n SOURCE_TYPE=\"ClickHouse\",\n MDB_CLUSTER_ID=\"ch-managed\",\n AUTH_METHOD=\"MDB_BASIC\",\n SERVICE_ACCOUNT_ID=\"sa\",\n SERVICE_ACCOUNT_SECRET_NAME=\"sa_signature\",\n LOGIN=\"crab\",\n PASSWORD_SECRET_NAME=\"external_data_source_password\",\n USE_TLS=\"TRUE\",\n PROTOCOL=\"HTTP\",\n DATABASE_NAME=\"pgdb\"\n );\n ", parameters: 0b Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2025-03-28T09:08:32.968550Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784750828228795:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:32.969044Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a7e/r3tmp/tmpTA5ljj/pdisk_1.dat 2025-03-28T09:08:34.046839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:34.047040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:34.056745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:34.057456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:34.165643Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:62397 TServer::EnableGrpc on GrpcPort 20881, node 1 2025-03-28T09:08:34.598087Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:34.598114Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:34.598121Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:34.598224Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:35.139229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:35.179926Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:35.184032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152915693 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152915203 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152915693 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-03-28T09:08:35.791029Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:08:35.791173Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:08:35.791186Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:08:35.792209Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:08:37.615192Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743152915693, tx_id: 281474976710658 } } } 2025-03-28T09:08:37.615603Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:08:37.617991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-03-28T09:08:37.621365Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-28T09:08:37.621394Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 TClient::Ls request: /Root/Dir/Replicated 2025-03-28T09:08:37.680110Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-28T09:08:37.681600Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152917709 ParentPathId: 5 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 7 Type: EIndexTypeGlobal Stat ... 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 8 PathOwnerId: 72057594046644480 } 2025-03-28T09:08:37.696233Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 8] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "index_by_value" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152917709 ParentPathId: 6 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152917709 ParentPathId: 7 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { ... (TRUNCATED) TClient::Ls request: /Root/Dir/Replicated/index_by_value/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152917709 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } ... (TRUNCATED) Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152917709 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 7 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" 2025-03-28T09:08:37.948469Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784750828228795:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:37.948575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] >> TResourceBroker::TestErrors >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2136] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:104:2057] recipient: [1:102:2136] Leader for TabletID 9437184 is [1:108:2140] sender: [1:109:2057] recipient: [1:102:2136] Leader for TabletID 9437184 is [1:108:2140] sender: [1:128:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [1:163:2057] recipient: [1:161:2168] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:163:2057] recipient: [1:161:2168] Leader for TabletID 9437185 is [1:167:2172] sender: [1:168:2057] recipient: [1:161:2168] Leader for TabletID 9437185 is [1:167:2172] sender: [1:187:2057] recipient: [1:14:2061] |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> DstCreator::ColumnTypeMismatch [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat >> TResourceBroker::TestCounters >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict+UseSink [GOOD] >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] >> TResourceBroker::TestExecutionStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:159:2058] recipient: [1:157:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:159:2058] recipient: [1:157:2137] Leader for TabletID 9437184 is [1:165:2141] sender: [1:166:2058] recipient: [1:157:2137] Leader for TabletID 9437185 is [0:0:0] sender: [2:170:2049] recipient: [2:160:2095] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:170:2049] recipient: [2:160:2095] Leader for TabletID 9437185 is [2:181:2098] sender: [2:182:2049] recipient: [2:160:2095] Leader for TabletID 9437184 is [1:165:2141] sender: [1:209:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:181:2098] sender: [1:211:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:181:2098] sender: [2:213:2049] recipient: [2:42:2053] Leader for TabletID 9437185 is [2:181:2098] sender: [2:214:2049] recipient: [2:154:2094] Leader for TabletID 9437185 is [2:181:2098] sender: [1:217:2058] recipient: [1:15:2062] Leader for TabletID 9437185 is [2:181:2098] sender: [2:219:2049] recipient: [2:42:2053] Leader for TabletID 9437185 is [2:181:2098] sender: [2:220:2049] recipient: [2:218:2111] Leader for TabletID 9437185 is [2:221:2112] sender: [2:222:2049] recipient: [2:218:2111] Leader for TabletID 9437185 is [2:221:2112] sender: [1:251:2058] recipient: [1:15:2062] >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2025-03-28T09:08:32.061741Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784752770014254:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:32.064324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a81/r3tmp/tmpm8llN3/pdisk_1.dat 2025-03-28T09:08:32.577781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:32.584058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:32.586934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:32.634843Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13280 TServer::EnableGrpc on GrpcPort 17416, node 1 2025-03-28T09:08:32.927173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:32.927194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:32.927203Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:32.927295Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13280 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:33.329988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:33.362883Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:33.377133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:33.545402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152913397 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152913628 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152913397 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152913628 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-28T09:08:33.593322Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:08:33.593483Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:08:33.593494Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:08:33.593928Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:08:35.749328Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743152913502, tx_id: 281474976710658 } } } 2025-03-28T09:08:35.749715Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:08:35.751414Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-28T09:08:35.753781Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743152913628 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetric ... ARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26570 TServer::EnableGrpc on GrpcPort 6164, node 2 2025-03-28T09:08:36.835366Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:36.835386Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:36.835392Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:36.835503Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:37.142646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:37.148362Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:08:37.151179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:37.190130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152917191 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743152917261 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152917191 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743152917261 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-28T09:08:37.227009Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:08:37.227204Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:08:37.227223Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:08:37.227664Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:08:39.722041Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743152917226, tx_id: 281474976715658 } } } 2025-03-28T09:08:39.722295Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:08:39.723576Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-28T09:08:39.724449Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743152917261 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:08:39.724602Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] >> TGRpcYdbTest::ExecuteQueryBadRequest >> TResourceBroker::TestChangeTaskType [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch >> TTableProfileTests::OverwriteCachingPolicy >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeys >> YdbOlapStore::LogNonExistingRequest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> TGRpcAuthentication::ValidCredentials >> KqpJoinOrder::TPCDS88+ColumnStore >> KqpJoinOrder::TPCDS34-ColumnStore >> KqpJoin::JoinLeftPureInner >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] >> BootstrapperTest::MultipleBootstrappers [GOOD] >> TestYmqHttpProxy::TestDeleteMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2025-03-28T09:08:39.766462Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:39.766541Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:39.766624Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:39.769086Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-03-28T09:08:39.769147Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-03-28T09:08:39.769209Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-03-28T09:08:39.769236Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2025-03-28T09:08:39.769552Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-03-28T09:08:39.769583Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2025-03-28T09:08:39.770459Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: UNKNOWN 2025-03-28T09:08:39.770655Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-03-28T09:08:39.770704Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.149198s 2025-03-28T09:08:39.770873Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-03-28T09:08:39.770938Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-03-28T09:08:39.771002Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-03-28T09:08:39.771025Z node 5 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-03-28T09:08:39.771243Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-03-28T09:08:39.771271Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.190190s 2025-03-28T09:08:39.995834Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:39.996593Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-03-28T09:08:39.997134Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-28T09:08:39.997186Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-03-28T09:08:40.058548Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:40.059149Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-03-28T09:08:40.059559Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-28T09:08:40.059596Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 5 (idx 3) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2025-03-28T09:08:41.134359Z node 5 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2025-03-28T09:08:41.134436Z node 5 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 3 2025-03-28T09:08:41.134892Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-03-28T09:08:41.134947Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:41.135005Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-03-28T09:08:41.135039Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:41.137040Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-03-28T09:08:41.138280Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-03-28T09:08:41.139214Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-28T09:08:41.139267Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting 2025-03-28T09:08:41.139369Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-28T09:08:41.139401Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2025-03-28T09:08:42.122097Z node 5 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 4 2025-03-28T09:08:42.122177Z node 5 :PIPE_SERVER ERROR: [9437184] NodeDisconnected NodeId# 3 2025-03-28T09:08:42.122501Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-03-28T09:08:42.122547Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:42.122778Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected 2025-03-28T09:08:42.122809Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:42.124661Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-03-28T09:08:42.125078Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] ... disconnecting nodes 3 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-03-28T09:08:42.125792Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-03-28T09:08:42.125844Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 6528562917658346564 ... disconnecting nodes 3 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-03-28T09:08:42.126644Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-03-28T09:08:42.127997Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 16349739802483488852 2025-03-28T09:08:42.128328Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: UNKNOWN 2025-03-28T09:08:42.128464Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2025-03-28T09:08:42.128878Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-03-28T09:08:42.129025Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: OWNER 2025-03-28T09:08:42.129059Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 5 (owner) 2025-03-28T09:08:42.129196Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: OWNER 2025-03-28T09:08:42.129222Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 5 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2025-03-28T09:08:43.104991Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335029 2025-03-28T09:08:43.105058Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:43.105226Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335029 2025-03-28T09:08:43.105265Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:43.107231Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] 2025-03-28T09:08:43.107572Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:274:2096] ... disconnecting nodes 3 <-> 2 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR 2025-03-28T09:08:43.108605Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-03-28T09:08:43.108661Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 13164802727073798053 ... disconnecting nodes 3 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... disconnecting nodes 3 <-> 2 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2025-03-28T09:08:43.114775Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: ERROR 2025-03-28T09:08:43.114847Z node 3 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 10171326560769670008 2025-03-28T09:08:43.115001Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 3 state: UNKNOWN 2025-03-28T09:08:43.115116Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335031 2025-03-28T09:08:43.115159Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: DISCONNECTED 2025-03-28T09:08:43.115200Z node 4 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot ... disconnecting nodes 3 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER 2025-03-28T09:08:43.116597Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, disconnected from 5, round 16045690984833335031 2025-03-28T09:08:43.116644Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: DISCONNECTED 2025-03-28T09:08:43.116706Z node 3 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2025-03-28T09:08:43.116755Z node 3 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2025-03-28T09:08:43.119379Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, tablet dead 2025-03-28T09:08:43.119460Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:08:43.122925Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:393:2096] 2025-03-28T09:08:43.146362Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-28T09:08:43.146426Z node 5 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... disconnecting nodes 3 <-> 0 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to >> ReadOnlyVDisk::TestSync [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> TGRpcYdbTest::ExecuteQueryBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryImplicitSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 2164079397985348029 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2025-03-28T09:08:24.759813Z 1 00h02m00.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:8808:940] 2025-03-28T09:08:24.760234Z 2 00h02m00.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:8815:947] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-03-28T09:08:28.037018Z 3 00h06m00.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:8822:954] 2025-03-28T09:08:28.037237Z 2 00h06m00.210512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:8815:947] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-03-28T09:08:35.046942Z 5 00h14m00.361536s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:8836:968] 2025-03-28T09:08:35.047048Z 4 00h14m00.361536s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:8829:961] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-03-28T09:08:38.055408Z 6 00h18m00.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:8843:975] 2025-03-28T09:08:38.055520Z 5 00h18m00.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:8836:968] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2025-03-28T09:08:41.154191Z 7 00h22m00.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:8850:982] 2025-03-28T09:08:41.154307Z 6 00h22m00.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:8843:975] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2025-03-28T09:08:43.937522Z 7 00h26m00.561536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:8850:982] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> KqpJoinOrder::CanonizedJoinOrderTPCH9 >> YdbYqlClient::CreateTableWithPartitionAtKeys [GOOD] >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning >> YdbTableBulkUpsertOlap::UpsertArrowBatch [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowDupField >> KqpJoinOrder::TPCDS95+ColumnStore |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangeChunk100 [GOOD] Test command err: 2025-03-28T09:06:53.156047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:53.157533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:53.157598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001096/r3tmp/tmptVtYAW/pdisk_1.dat 2025-03-28T09:06:53.646649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:06:53.700688Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:53.742346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:53.742497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:53.753891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:53.851274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:06:53.896433Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:06:53.897246Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:06:53.897596Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:06:53.897832Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:06:53.946988Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:06:53.947699Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:06:53.947805Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:06:53.949463Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:06:53.949563Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:06:53.949617Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:06:53.950063Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:06:53.956383Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:06:53.956563Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:06:53.971207Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:06:54.004641Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:06:54.004872Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:06:54.005005Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:06:54.005044Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:06:54.005088Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:06:54.005120Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:54.005342Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:06:54.005407Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:06:54.005721Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:06:54.005813Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:06:54.006238Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:06:54.006332Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:06:54.006379Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:06:54.006428Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:06:54.006461Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:06:54.006516Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:06:54.006568Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:06:54.006870Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:54.006930Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:54.006977Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:06:54.007057Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:06:54.007115Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:06:54.007268Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:06:54.007530Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:06:54.007617Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:06:54.007759Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:06:54.007821Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:06:54.007871Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:06:54.007924Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:06:54.007966Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:06:54.008310Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:06:54.008371Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:06:54.008408Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:06:54.008464Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:06:54.008523Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:06:54.008558Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:06:54.008598Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:06:54.008632Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:06:54.008662Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:06:54.010200Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:06:54.010276Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:06:54.023413Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:06:54.023502Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:06:54.023534Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:06:54.023574Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:06:54.023638Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:06:54.194614Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:54.194679Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:54.194743Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:06:54.195238Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:06:54.195279Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:06:54.195422Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:06:54.195467Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:06:54.195506Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:06:54.195542Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:06:54.208567Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:06:54.208670Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:54.209544Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:06:54.209607Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:06:54.209691Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:06:5 ... :2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.375040Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.375356Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709543002, quota bytes left# 18446744073709000383, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.375524Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.375574Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.375613Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.375949Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542903, quota bytes left# 18446744073708994047, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.376074Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.376119Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.376158Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.376465Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542804, quota bytes left# 18446744073708987711, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.376626Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.376669Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.376708Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.377022Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542705, quota bytes left# 18446744073708981375, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.377162Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.377205Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.377244Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.377567Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542606, quota bytes left# 18446744073708975039, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.377706Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.377752Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.377792Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.378095Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542507, quota bytes left# 18446744073708968703, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.378254Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.378297Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.378341Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.378672Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542408, quota bytes left# 18446744073708962367, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.378944Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.378989Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.379030Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.379359Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542309, quota bytes left# 18446744073708956031, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.379501Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.379544Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.379583Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.379913Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542210, quota bytes left# 18446744073708949695, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.380060Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.380105Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.380143Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.380460Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542111, quota bytes left# 18446744073708943359, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.380631Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.380679Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.380718Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.381059Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709542012, quota bytes left# 18446744073708937023, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.381218Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.381265Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.381306Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.381631Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541913, quota bytes left# 18446744073708930687, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.381769Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.381818Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.381857Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.382180Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541814, quota bytes left# 18446744073708924351, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.382332Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.382376Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.382415Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.382922Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541715, quota bytes left# 18446744073708918015, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.383104Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.383149Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.383187Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.383551Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 99, bytes# 6336, quota rows left# 18446744073709541616, quota bytes left# 18446744073708911679, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.383716Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:977:2789], Recipient [15:977:2789]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:46.383761Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 0 2025-03-28T09:08:46.383803Z node 15 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 0 2025-03-28T09:08:46.383936Z node 15 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[15:593:2518], 1} sends rowCount# 1, bytes# 64, quota rows left# 18446744073709541615, quota bytes left# 18446744073708911615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:08:46.384011Z node 15 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[15:593:2518], 1} finished in ReadContinue |80.2%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |80.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck >> TTableProfileTests::OverwriteCachingPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsSimple >> TGRpcAuthentication::ValidCredentials [GOOD] >> TGRpcAuthentication::NoDescribeRights >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately >> KqpJoinOrder::TPCDS23-ColumnStore >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases >> TGRpcYdbTest::ExecuteQueryImplicitSession [GOOD] >> TGRpcYdbTest::ExecuteQueryExplicitSession >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> KqpJoin::JoinLeftPureInner [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:08:00.456781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:08:00.456889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:00.456937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:08:00.456989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:08:00.457028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:08:00.457072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:08:00.457120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:00.457189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:08:00.457601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:08:00.539332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:00.539404Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:00.557647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:08:00.557908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:08:00.558099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:08:00.564098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:08:00.564366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:08:00.565042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:00.565293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:00.567419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:00.568804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:00.568866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:00.569054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:08:00.569101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:00.569139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:08:00.569220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.576433Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:08:00.682057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:08:00.682307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.682545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:08:00.682785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:08:00.682845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.686249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:00.686394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:08:00.686602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.686664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:08:00.686714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:08:00.686756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:08:00.688798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.688858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:08:00.688892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:08:00.692791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.692845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.692875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:00.692911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:08:00.696233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:08:00.704220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:08:00.704504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:08:00.705687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:00.705838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:00.705893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:00.706190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:08:00.706242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:00.706440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:08:00.706547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:00.710969Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:00.711081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:00.711329Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:00.711395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:08:00.711987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.712051Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:08:00.712156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:00.712203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:00.712247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:00.712279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:00.712316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:08:00.712374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:00.712419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:08:00.712453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:08:00.712554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:08:00.712619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:08:00.712665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:08:00.715226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:00.715375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:00.715414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3578Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-03-28T09:08:44.473605Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-03-28T09:08:44.473638Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 2/3 2025-03-28T09:08:44.473660Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2025-03-28T09:08:44.473686Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2025-03-28T09:08:44.475453Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-03-28T09:08:44.475680Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-03-28T09:08:44.475746Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-03-28T09:08:44.475824Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 3], version: 5 2025-03-28T09:08:44.475916Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 4 2025-03-28T09:08:44.476969Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-03-28T09:08:44.477056Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2025-03-28T09:08:44.477081Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2025-03-28T09:08:44.477110Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 5 2025-03-28T09:08:44.477138Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 6 2025-03-28T09:08:44.477208Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2025-03-28T09:08:44.483813Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-03-28T09:08:44.484380Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2025-03-28T09:08:44.500246Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1315 } } 2025-03-28T09:08:44.500329Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-03-28T09:08:44.500478Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1315 } } 2025-03-28T09:08:44.500618Z node 19 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 250 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1315 } } 2025-03-28T09:08:44.501965Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 752 RawX2: 81604381265 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-03-28T09:08:44.502055Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2025-03-28T09:08:44.502291Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 752 RawX2: 81604381265 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-03-28T09:08:44.502387Z node 19 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 2025-03-28T09:08:44.502550Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 752 RawX2: 81604381265 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-03-28T09:08:44.502659Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2025-03-28T09:08:44.505731Z node 19 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-03-28T09:08:44.505830Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2025-03-28T09:08:44.505914Z node 19 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:1 129 -> 240 2025-03-28T09:08:44.509449Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-03-28T09:08:44.511241Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-03-28T09:08:44.511824Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2025-03-28T09:08:44.511892Z node 19 :FLAT_TX_SCHEMESHARD INFO: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2025-03-28T09:08:44.512127Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-03-28T09:08:44.512253Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-03-28T09:08:44.512332Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:1 progress is 3/3 2025-03-28T09:08:44.512393Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-03-28T09:08:44.512449Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2025-03-28T09:08:44.512503Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2025-03-28T09:08:44.512561Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-03-28T09:08:44.512605Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-03-28T09:08:44.512691Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 3] was 3 2025-03-28T09:08:44.512730Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:1 2025-03-28T09:08:44.512745Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:1 2025-03-28T09:08:44.512826Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 5 2025-03-28T09:08:44.512851Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:2 2025-03-28T09:08:44.512864Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:2 2025-03-28T09:08:44.512885Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-03-28T09:08:47.837823Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-03-28T09:08:47.838350Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 553us result status StatusNameConflict 2025-03-28T09:08:47.838653Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 2025-03-28T09:08:50.723346Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2025-03-28T09:08:50.723772Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe pathId 4 took 449us result status StatusNameConflict 2025-03-28T09:08:50.723986Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/Shared/Table/Stream/streamImpl\', error: path is not a common path (id: [OwnerId: 72075186233409546, LocalPathId: 4], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" Path: "/MyRoot/Shared/Table/Stream/streamImpl" PathId: 4 LastExistedPrefixPath: "/MyRoot/Shared/Table/Stream/streamImpl" LastExistedPrefixPathId: 4 LastExistedPrefixDescription { Self { Name: "streamImpl" PathId: 4 SchemeshardId: 72075186233409546 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeStreamImpl ChildrenExist: false BalancerTabletID: 72075186233409554 } } PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TestYmqHttpProxy::TestDeleteMessage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadFromHeadToMvccWithConflict-UseSink [GOOD] Test command err: 2025-03-28T09:06:53.343823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:06:53.344320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:53.344397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001083/r3tmp/tmpU0kV8s/pdisk_1.dat 2025-03-28T09:06:53.802445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:06:53.944365Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:53.998516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:53.998657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:54.012595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:06:54.123248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:06:54.246090Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:06:54.247364Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:06:54.247914Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:06:54.248197Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:06:54.406765Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:06:54.407529Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:06:54.407643Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:06:54.409386Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:06:54.409473Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:06:54.409525Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:06:54.409938Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:06:54.410084Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:06:54.410215Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:06:54.423293Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:06:54.483988Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:06:54.484226Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:06:54.484466Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:06:54.484521Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:06:54.484559Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:06:54.484599Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:54.484852Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:06:54.484911Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:06:54.485297Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:06:54.485411Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:06:54.485890Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:06:54.485937Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:06:54.485986Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:06:54.486042Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:06:54.486084Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:06:54.486117Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:06:54.486232Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:06:54.486401Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:54.486443Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:54.486494Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:06:54.486560Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:06:54.486600Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:06:54.486723Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:06:54.486948Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:06:54.487019Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:06:54.487138Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:06:54.487188Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:06:54.487246Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:06:54.487283Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:06:54.487317Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:06:54.487603Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:06:54.487649Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:06:54.487687Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:06:54.487734Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:06:54.487780Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:06:54.487813Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:06:54.487843Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:06:54.487875Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:06:54.487902Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:06:54.489349Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:06:54.489396Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:06:54.502465Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:06:54.502557Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:06:54.502597Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:06:54.502643Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:06:54.502758Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:06:54.664377Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:54.664441Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:06:54.664498Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:06:54.664879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:06:54.664914Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:06:54.665020Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:06:54.665062Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:06:54.665098Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:06:54.665127Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:06:54.668855Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:06:54.668936Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:06:54.669594Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:06:54.669627Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:06:54.669688Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:06:5 ... lZWItMWVhYjVhMi04OWNkMGJjLWJhNTZiMTJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, datashard 72075186224037888 not finished yet: Executing 2025-03-28T09:08:49.761289Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1154:2909] TxId: 281474976715667. Ctx: { TraceId: 01jqe05pp3fbqj8y2jsxcay4x7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZDczMTFlZWItMWVhYjVhMi04OWNkMGJjLWJhNTZiMTJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-03-28T09:08:49.761945Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [15:1165:2934], Recipient [15:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:49.761992Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:08:49.762042Z node 15 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [15:1164:2933], serverId# [15:1165:2934], sessionId# [0:0:0] 2025-03-28T09:08:49.765593Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 KeysSize: 6 2025-03-28T09:08:49.765904Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:08:49.766033Z node 15 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1500/18446744073709551615 ImmediateWriteEdgeReplied# v1500/18446744073709551615 2025-03-28T09:08:49.766122Z node 15 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v3001/18446744073709551615 2025-03-28T09:08:49.766241Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-03-28T09:08:49.766444Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:08:49.766535Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:08:49.766627Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:08:49.766710Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:08:49.766795Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-03-28T09:08:49.766865Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:08:49.766895Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:08:49.766922Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:08:49.766962Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:08:49.767138Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-03-28T09:08:49.767648Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Continue 2025-03-28T09:08:49.767701Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Continue at tablet# 72075186224037888 2025-03-28T09:08:49.767826Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:08:49.794198Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [15:1060:2855], Recipient [15:666:2570]: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:08:49.794366Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:08:49.794466Z node 15 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037891 dest 72075186224037888 producer 72075186224037891 txId 281474976715667 2025-03-28T09:08:49.794637Z node 15 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3001 txid# 281474976715667 TabletSource# 72075186224037891 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037891 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:08:49.794898Z node 15 :TX_DATASHARD DEBUG: Complete [3001 : 281474976715667] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1154:2909], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:08:49.795019Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:08:49.795119Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:4] at 72075186224037888 for ExecuteRead 2025-03-28T09:08:49.795587Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1154:2909] TxId: 281474976715667. Ctx: { TraceId: 01jqe05pp3fbqj8y2jsxcay4x7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZDczMTFlZWItMWVhYjVhMi04OWNkMGJjLWJhNTZiMTJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-03-28T09:08:49.795843Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1154:2909] TxId: 281474976715667. Ctx: { TraceId: 01jqe05pp3fbqj8y2jsxcay4x7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZDczMTFlZWItMWVhYjVhMi04OWNkMGJjLWJhNTZiMTJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:08:49.795954Z node 15 :KQP_EXECUTER DEBUG: ActorId: [15:1154:2909] TxId: 281474976715667. Ctx: { TraceId: 01jqe05pp3fbqj8y2jsxcay4x7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZDczMTFlZWItMWVhYjVhMi04OWNkMGJjLWJhNTZiMTJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:08:49.796143Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:666:2570], Recipient [15:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:08:49.796246Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:08:49.797456Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:08:49.797800Z node 15 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [15:593:2518], selfId: [15:57:2104], source: [15:1132:2909] 2025-03-28T09:08:49.798187Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:08:49.798278Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:08:49.798354Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:4] at 72075186224037888 2025-03-28T09:08:49.798431Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:08:49.798636Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 ResultFormat: FORMAT_ARROW MaxRowsInResult: 2 } 2025-03-28T09:08:49.800265Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-03-28T09:08:49.800370Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 1} after executionsCount# 2 2025-03-28T09:08:49.800465Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 0 2025-03-28T09:08:49.800794Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:08:49.800881Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:08:49.800953Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:08:49.801030Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:08:49.801097Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:08:49.801125Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:08:49.801161Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-03-28T09:08:49.801221Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:08:49.801289Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:08:49.801366Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:08:49.801435Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:08:49.801821Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:666:2570], Recipient [15:666:2570]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:49.801942Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 2 2025-03-28T09:08:49.802206Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 2 2025-03-28T09:08:49.802419Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:593:2518], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551611, quota bytes left# 18446744073709551551, hasUnreadQueries# 1, total queries# 6, firstUnprocessed# 2 2025-03-28T09:08:49.806329Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553217, Sender [15:666:2570], Recipient [15:666:2570]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:08:49.806427Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue for iterator# {[15:593:2518], 1}, firstUnprocessedQuery# 4 2025-03-28T09:08:49.806630Z node 15 :TX_DATASHARD TRACE: 72075186224037888 ReadContinue: iterator# {[15:593:2518], 1}, FirstUnprocessedQuery# 4 2025-03-28T09:08:49.806793Z node 15 :TX_DATASHARD TRACE: 72075186224037888 readContinue iterator# {[15:593:2518], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551609, quota bytes left# 18446744073709551519, hasUnreadQueries# 0, total queries# 6, firstUnprocessed# 4 2025-03-28T09:08:49.806967Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 read iterator# {[15:593:2518], 1} finished in ReadContinue 2025-03-28T09:08:49.807219Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=ZDczMTFlZWItMWVhYjVhMi04OWNkMGJjLWJhNTZiMTJj, workerId: [15:1132:2909], local sessions count: 0 2025-03-28T09:08:49.807464Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1060:2855]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureInner [GOOD] Test command err: Trying to start YDB, gRPC: 12816, MsgBus: 8513 2025-03-28T09:08:44.686721Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784802279307594:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:44.686756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018a5/r3tmp/tmpfAigv8/pdisk_1.dat 2025-03-28T09:08:45.315763Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:45.335424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:45.335505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:45.341875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12816, node 1 2025-03-28T09:08:45.532466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:45.532487Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:45.532495Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:45.532651Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8513 TClient is connected to server localhost:8513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:46.125493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:46.158009Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:46.172433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:46.425135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:46.642591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:46.740828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:48.773175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784819459178487:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:48.773323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:49.045170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.080175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.156521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.194304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.230675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.317977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.380043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784823754146301:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:49.380112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:49.380448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784823754146306:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:49.384426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:08:49.401723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784823754146308:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:08:49.509059Z node 1 :TX_PROXY ERROR: Actor# [1:7486784823754146364:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:49.687298Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784802279307594:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:49.687417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2025-03-28T09:08:35.654777Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1743152915654743 2025-03-28T09:08:36.284847Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784768071890586:2157];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:36.298247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:36.411048Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784766597543866:2093];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:36.411092Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:36.835477Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e46/r3tmp/tmpmGHoci/pdisk_1.dat 2025-03-28T09:08:36.895198Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:37.163218Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:37.327618Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:37.334364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:37.357481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:37.357620Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:37.361941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:37.362105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:37.367881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:37.368892Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:37.371697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23641, node 1 2025-03-28T09:08:37.716535Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/000e46/r3tmp/yandexyjO0O7.tmp 2025-03-28T09:08:37.716563Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/000e46/r3tmp/yandexyjO0O7.tmp 2025-03-28T09:08:37.716775Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/000e46/r3tmp/yandexyjO0O7.tmp 2025-03-28T09:08:37.716941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:37.781156Z INFO: TTestServer started on Port 13226 GrpcPort 23641 TClient is connected to server localhost:13226 PQClient connected to localhost:23641 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:38.247578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:08:38.387736Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720659, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:41.205540Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784788072380627:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.205580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784788072380652:2314], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.206623Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.218330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-03-28T09:08:41.220942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784789546728003:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.221009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784789546727997:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.221126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.232795Z node 1 :TX_PROXY ERROR: Actor# [1:7486784789546728016:2633] txid# 281474976720661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:08:41.242161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784788072380656:2315], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-28T09:08:41.242984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784789546728015:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-28T09:08:41.284138Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784768071890586:2157];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:41.284212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:41.300724Z node 1 :TX_PROXY ERROR: Actor# [1:7486784789546728101:2688] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:41.350290Z node 2 :TX_PROXY ERROR: Actor# [2:7486784788072380684:2132] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:41.414901Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784766597543866:2093];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:41.414972Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:41.573792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:41.574186Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486784789546728111:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:41.574932Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDRkMTZlMDMtMjY0ZGZjNTktYjBmMzFjYzktMWQ3ZDFjYmU=, ActorId: [1:7486784789546727994:2337], ActorState: ExecuteState, TraceId: 01jqe05eky37bc59e73qcbrycb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:41.574453Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486784788072380691:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:41.575944Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDQzMWRkZjUtNjRhNzE5MTAtZTQzYjFjODMtODI5MWY3YmE=, ActorId: [2:7486784788072380623:2309], ActorState: ExecuteState, TraceId: 01jqe05ekd4zdpr9vdkm3bmfzj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:41.577250Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:41.577597Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIs ... IEventHandle 2025-03-28T09:08:50.467132Z node 2 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 177 actorID [2:7486784818137152429:2434] 2025-03-28T09:08:50.467253Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:08:50.467313Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:08:50.467379Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-28T09:08:50.467387Z node 2 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 1 parts 0 size 177 2025-03-28T09:08:50.467581Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:08:50.467615Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:08:50.467704Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-28T09:08:50.467823Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-28T09:08:50.467858Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-28T09:08:50.467898Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-28T09:08:50.467921Z node 2 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:08:50.471091Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-03-28T09:08:50.467983Z node 2 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1743152930400 queuesize 0 startOffset 0 2025-03-28T09:08:50.471695Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 1 written { } } write_statistics { persisting_time { nanos: 17000000 } min_queue_wait_time { nanos: 49000000 } max_queue_wait_time { nanos: 49000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-03-28T09:08:50.471740Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-03-28T09:08:50.471779Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session: acknoledged message 1 === Inside AcksHandler 2025-03-28T09:08:50.472305Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write 1 messages with Id from 2 to 2 === Inside ReadyToAcceptHandler 2025-03-28T09:08:50.472642Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session: try to update token 2025-03-28T09:08:50.472680Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 2 === AcksHandler has written a message, closing the session 2025-03-28T09:08:50.474902Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|c47a2474-5259d4a-1e257caa-81d6695e_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-28T09:08:50.475183Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-28T09:08:50.477403Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-03-28T09:08:50.476728Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:08:50.476768Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:08:50.476868Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2025-03-28T09:08:50.478889Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:08:50.478922Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:08:50.478972Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2025-03-28T09:08:50.479162Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2025-03-28T09:08:50.480089Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2025-03-28T09:08:50.480783Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 169 WTime 1743152930480 2025-03-28T09:08:50.480899Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:08:50.480914Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:08:50.480929Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:08:50.480943Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:08:50.480958Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000psrc_id 2025-03-28T09:08:50.480968Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000| 2025-03-28T09:08:50.480980Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:08:50.480993Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:08:50.481007Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:08:50.481338Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:08:50.481418Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 169 2025-03-28T09:08:50.489545Z node 2 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 1 count 1 size 169 actorID [2:7486784818137152429:2434] 2025-03-28T09:08:50.489651Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:08:50.489714Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:08:50.489736Z node 2 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 1 parts 0 size 169 2025-03-28T09:08:50.489749Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-03-28T09:08:50.489936Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2025-03-28T09:08:50.496084Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2025-03-28T09:08:50.503222Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-03-28T09:08:50.503444Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 9000000 } min_queue_wait_time { nanos: 1000000 } max_queue_wait_time { nanos: 1000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-03-28T09:08:50.503488Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2025-03-28T09:08:50.503524Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2025-03-28T09:08:50.506877Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2025-03-28T09:08:50.507046Z :INFO: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-03-28T09:08:50.507086Z :INFO: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session will now close 2025-03-28T09:08:50.507133Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session: aborting 2025-03-28T09:08:50.507559Z :WARNING: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-03-28T09:08:50.508013Z :DEBUG: [/Root] TraceId [] SessionId [src_id|c47a2474-5259d4a-1e257caa-81d6695e_0] MessageGroupId [src_id] Write session: destroy 2025-03-28T09:08:50.511722Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|c47a2474-5259d4a-1e257caa-81d6695e_0 grpc read done: success: 0 data: 2025-03-28T09:08:50.511748Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|c47a2474-5259d4a-1e257caa-81d6695e_0 grpc read failed 2025-03-28T09:08:50.511778Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|c47a2474-5259d4a-1e257caa-81d6695e_0 grpc closed 2025-03-28T09:08:50.511794Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|c47a2474-5259d4a-1e257caa-81d6695e_0 is DEAD 2025-03-28T09:08:50.512588Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:08:50.512969Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7486784828201435178:2539] destroyed 2025-03-28T09:08:50.513024Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback >> YdbYqlClient::CreateTableWithPartitionAtKeysAndAutoPartitioning [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize >> TestYmqHttpProxy::TestDeleteMessageBatch >> YdbTableBulkUpsertOlap::UpsertArrowDupField [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup >> KqpJoin::FullOuterJoinNotNullJoinKey >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore >> KqpJoinOrder::TestJoinHint1+ColumnStore >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] >> TGRpcAuthentication::NoDescribeRights [GOOD] >> TGRpcClientLowTest::BiStreamPing >> TTableProfileTests::ExplicitPartitionsSimple [GOOD] >> TTableProfileTests::ExplicitPartitionsUnordered >> TGRpcYdbTest::ExecuteQueryExplicitSession [GOOD] >> TGRpcYdbTest::ExecutePreparedQuery >> YdbYqlClient::CreateAndAltertTableWithPartitioningBySize [GOOD] >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings >> YdbTableBulkUpsertOlap::ParquetImportBug_Datashard [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> TGRpcClientLowTest::BiStreamPing [GOOD] >> TGRpcClientLowTest::BiStreamCancelled >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] >> ReadOnlyVDisk::TestStorageLoad [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 9953648801182690329 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2025-03-28T09:08:30.405808Z 1 00h02m38.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:30.410992Z 1 00h02m38.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:30.415112Z 1 00h02m38.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:30.421986Z 1 00h02m38.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:30.424396Z 1 00h02m38.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:30.441525Z 1 00h02m38.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:30.625232Z 1 00h02m38.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:30.917720Z 1 00h02m38.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.062459Z 1 00h02m38.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.095009Z 1 00h02m38.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.111050Z 1 00h02m38.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.254824Z 1 00h02m39.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.255623Z 1 00h02m39.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.353794Z 1 00h02m39.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.383290Z 1 00h02m39.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.400362Z 1 00h02m39.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.434279Z 1 00h02m39.700000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.456782Z 1 00h02m39.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.499293Z 1 00h02m40.000000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.532988Z 1 00h02m40.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.533757Z 1 00h02m40.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.869316Z 1 00h02m40.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.886518Z 1 00h02m40.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.902709Z 1 00h02m40.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.917943Z 1 00h02m40.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:31.932897Z 1 00h02m40.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.009763Z 1 00h02m40.700000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.023608Z 1 00h02m40.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.154781Z 1 00h02m40.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.170256Z 1 00h02m41.000000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.187320Z 1 00h02m41.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.188021Z 1 00h02m41.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.335549Z 1 00h02m41.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.350886Z 1 00h02m41.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.511339Z 1 00h02m41.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.529444Z 1 00h02m41.700000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.554482Z 1 00h02m41.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.580460Z 1 00h02m42.000000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.638986Z 1 00h02m42.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.639736Z 1 00h02m42.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.972460Z 1 00h02m42.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:32.987452Z 1 00h02m42.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.017531Z 1 00h02m42.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.106078Z 1 00h02m42.700000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.136663Z 1 00h02m42.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.151765Z 1 00h02m43.000000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.296241Z 1 00h02m43.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.296961Z 1 00h02m43.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.414060Z 1 00h02m43.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.542387Z 1 00h02m43.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.559938Z 1 00h02m43.400000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.577479Z 1 00h02m43.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.593816Z 1 00h02m43.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.613216Z 1 00h02m43.700000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.642035Z 1 00h02m43.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.663406Z 1 00h02m43.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.686207Z 1 00h02m44.000000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.716384Z 1 00h02m44.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.852326Z 1 00h02m44.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:33.866315Z 1 00h02m44.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.079041Z 1 00h02m44.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.094456Z 1 00h02m44.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.156309Z 1 00h02m44.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.172208Z 1 00h02m44.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.411082Z 1 00h02m45.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.432062Z 1 00h02m45.200000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.449467Z 1 00h02m45.300000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.548780Z 1 00h02m45.500000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.563365Z 1 00h02m45.600000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.591251Z 1 00h02m45.800000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.610385Z 1 00h02m45.900000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.662437Z 1 00h02m46.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.662622Z 1 00h02m46.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:34.666818Z 1 00h02m46.100000s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2025-03-28T09:08:36.457670Z 1 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:36.457876Z 2 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-28T09:08:36.460248Z 1 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:36.460479Z 2 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-28T09:08:36.463924Z 1 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:36.464129Z 2 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-28T09:08:36.469176Z 1 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:36.469637Z 2 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-28T09:08:36.469910Z 1 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:36.471514Z 2 00h05m16.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-28T09:08:36.486222Z 1 00h05m16.260512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:36.486460Z 2 00h05m16.2 ... 25-03-28T09:08:44.810255Z 1 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:701] 2025-03-28T09:08:44.810436Z 2 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:708] 2025-03-28T09:08:44.810494Z 3 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5325:715] 2025-03-28T09:08:44.810544Z 4 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:3:0]: Unavailable in read-only Sender# [1:5332:722] 2025-03-28T09:08:44.810590Z 5 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:4:0]: Unavailable in read-only Sender# [1:5339:729] 2025-03-28T09:08:44.810639Z 6 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:5:0]: Unavailable in read-only Sender# [1:5346:736] 2025-03-28T09:08:44.810703Z 7 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:6:0]: Unavailable in read-only Sender# [1:5353:743] 2025-03-28T09:08:44.810757Z 8 00h10m24.261536s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:44.811203Z 1 00h10m24.261536s :BS_LOAD_TEST ERROR: TabletId# 1 Generation# 4 recieved not OK, msg# TEvBlockResult {Status# ERROR ErrorReason# "Status# ERROR From# [82000000:1:0:2:0] NodeId# 3 QuorumTracker# {Erroneous# 00000111 Successful# 00000000}"} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2025-03-28T09:08:48.932446Z 8 00h20m54.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:48.940214Z 8 00h20m54.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:48.948350Z 8 00h20m54.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:48.956367Z 8 00h20m54.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:48.957230Z 8 00h20m54.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:49.235227Z 8 00h20m54.512560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:49.388066Z 8 00h20m54.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:49.614558Z 8 00h20m54.812560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:49.685603Z 8 00h20m54.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:49.747806Z 8 00h20m55.112560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:49.867407Z 8 00h20m55.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:49.922455Z 8 00h20m55.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:49.924253Z 8 00h20m55.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:49.952401Z 8 00h20m55.512560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.055881Z 8 00h20m55.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.073371Z 8 00h20m55.812560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.301585Z 8 00h20m56.012560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.321662Z 8 00h20m56.112560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.371052Z 8 00h20m56.312560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.403241Z 8 00h20m56.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.406671Z 8 00h20m56.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.441986Z 8 00h20m56.512560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.741264Z 8 00h20m56.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.758539Z 8 00h20m56.812560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.775708Z 8 00h20m56.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:50.846150Z 8 00h20m57.012560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.004499Z 8 00h20m57.112560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.020750Z 8 00h20m57.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.037598Z 8 00h20m57.312560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.104459Z 8 00h20m57.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.106374Z 8 00h20m57.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.171787Z 8 00h20m57.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.191327Z 8 00h20m57.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.568957Z 8 00h20m57.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.584151Z 8 00h20m58.012560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.651786Z 8 00h20m58.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.672108Z 8 00h20m58.312560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.697444Z 8 00h20m58.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.699308Z 8 00h20m58.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.830942Z 8 00h20m58.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:51.849933Z 8 00h20m58.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.047048Z 8 00h20m58.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.170835Z 8 00h20m59.012560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.201810Z 8 00h20m59.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.300148Z 8 00h20m59.312560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.328708Z 8 00h20m59.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.330345Z 8 00h20m59.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.367601Z 8 00h20m59.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.470958Z 8 00h20m59.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.655689Z 8 00h20m59.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.726594Z 8 00h21m00.012560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.831406Z 8 00h21m00.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:52.991850Z 8 00h21m00.312560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.127548Z 8 00h21m00.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.129262Z 8 00h21m00.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.146882Z 8 00h21m00.512560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.162893Z 8 00h21m00.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.178421Z 8 00h21m00.712560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.198622Z 8 00h21m00.812560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.453225Z 8 00h21m00.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.604724Z 8 00h21m01.112560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.623271Z 8 00h21m01.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.778588Z 8 00h21m01.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.804632Z 8 00h21m01.512560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.830258Z 8 00h21m01.612560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.874293Z 8 00h21m01.812560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:53.975904Z 8 00h21m01.912560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:54.015312Z 8 00h21m02.112560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:54.036216Z 8 00h21m02.212560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:54.113568Z 8 00h21m02.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:54.114012Z 8 00h21m02.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] 2025-03-28T09:08:54.117425Z 8 00h21m02.412560s :BS_SKELETON ERROR: VDISK[82000000:_:0:7:0]: Unavailable in read-only Sender# [1:5360:750] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:106:2138] Leader for TabletID 9437184 is [1:122:2148] sender: [1:124:2057] recipient: [1:106:2138] 2025-03-28T09:07:33.362663Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:07:33.370745Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:07:33.371405Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:07:33.371669Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:33.381776Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:07:33.502171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:07:33.502230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:33.503348Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:33.503508Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:07:33.505084Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:07:33.505154Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:07:33.505210Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:07:33.505565Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:07:33.505652Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:07:33.505712Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:189:2148] in generation 2 Leader for TabletID 9437184 is [1:122:2148] sender: [1:209:2057] recipient: [1:14:2061] 2025-03-28T09:07:33.571677Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:07:33.599766Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:07:33.599944Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:07:33.600057Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:214:2212] 2025-03-28T09:07:33.600093Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:07:33.600121Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:07:33.600167Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:33.600365Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.600417Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.600656Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:07:33.600732Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:07:33.600771Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:33.600826Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:33.600866Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:07:33.600909Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:33.600943Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:33.600972Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:07:33.601006Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:07:33.601100Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:210:2209], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.601155Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.601206Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:208:2208], serverId# [1:210:2209], sessionId# [0:0:0] 2025-03-28T09:07:33.603711Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:07:33.603786Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:33.603875Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:33.604029Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:07:33.604089Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:07:33.604141Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:07:33.604195Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:33.604245Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:07:33.604279Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:07:33.604308Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:33.604620Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:07:33.604656Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:07:33.604685Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:33.604716Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:33.604780Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:07:33.604807Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:33.604837Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:07:33.604867Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:33.604892Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:07:33.617636Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:33.617733Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:33.617773Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:33.617810Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:07:33.617879Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:07:33.618405Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:220:2218], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.618460Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:33.618501Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:219:2217], serverId# [1:220:2218], sessionId# [0:0:0] 2025-03-28T09:07:33.618888Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2025-03-28T09:07:33.619103Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:07:33.619286Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:33.619348Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:07:33.619392Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:07:33.619447Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:07:33.623092Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:07:33.623165Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:33.623416Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.623465Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:33.623524Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:33.623562Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:07:33.623594Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:07:33.623632Z node 1 :TX_DATASHARD DEBUG: Found ready operation [2:1] in PlanQueue unit at 9437184 2025-03-28T09:07:33.623663Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit PlanQueue 2025-03-28T09:07:33.623736Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:07:33.623771Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:07:33.623805Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:07:33.623835Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:07:33.623987Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 2:1 keys extracted: 0 2025-03-28T09:07:33.624096Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is Executed 2025-03-28T09:07:33.624142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:07:33.624165Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:07:33.624187Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:07:33.624248Z node 1 :TX_DATASHARD TRACE: Execution status for [2:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:33.624273Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [2:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:07:33.624308Z node 1 :TX_DATASHARD TRACE: Add [2:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:07:33.624336Z node 1 :TX_DATASHARD TRACE: Trying to execute [2:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:07:33.624380Z node 1 :TX_DATASHARD TRACE: Operation [2:1] is the new logically complete end at 9437184 2025-03-28T09:07:33.624426Z node 1 :TX_DATASHARD TRAC ... 9437185 to execution unit ExecuteDataTx 2025-03-28T09:09:01.079265Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit ExecuteDataTx 2025-03-28T09:09:01.079587Z node 42 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437185 with status COMPLETE 2025-03-28T09:09:01.079640Z node 42 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:09:01.079686Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-03-28T09:09:01.079716Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit ExecuteDataTx 2025-03-28T09:09:01.079744Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit CompleteOperation 2025-03-28T09:09:01.079776Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit CompleteOperation 2025-03-28T09:09:01.079946Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is DelayComplete 2025-03-28T09:09:01.079978Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit CompleteOperation 2025-03-28T09:09:01.080009Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437185 to execution unit CompletedOperations 2025-03-28T09:09:01.080041Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437185 on unit CompletedOperations 2025-03-28T09:09:01.080074Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437185 is Executed 2025-03-28T09:09:01.080098Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437185 executing on unit CompletedOperations 2025-03-28T09:09:01.080123Z node 42 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437185 has finished 2025-03-28T09:09:01.080154Z node 42 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:09:01.080179Z node 42 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-28T09:09:01.080209Z node 42 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-28T09:09:01.080241Z node 42 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-28T09:09:01.080419Z node 42 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [42:236:2229], Recipient [42:236:2229]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:09:01.080458Z node 42 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:09:01.080508Z node 42 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:09:01.080543Z node 42 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:09:01.080571Z node 42 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:09:01.080606Z node 42 :TX_DATASHARD DEBUG: Found ready operation [7:6] in PlanQueue unit at 9437184 2025-03-28T09:09:01.080635Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit PlanQueue 2025-03-28T09:09:01.080665Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-28T09:09:01.080696Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit PlanQueue 2025-03-28T09:09:01.080726Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:09:01.080758Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit LoadTxDetails 2025-03-28T09:09:01.081404Z node 42 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 7:6 keys extracted: 1 2025-03-28T09:09:01.081448Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-28T09:09:01.081476Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:09:01.081503Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit FinalizeDataTxPlan 2025-03-28T09:09:01.081532Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit FinalizeDataTxPlan 2025-03-28T09:09:01.081573Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-28T09:09:01.081598Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit FinalizeDataTxPlan 2025-03-28T09:09:01.081624Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:09:01.081652Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:09:01.081700Z node 42 :TX_DATASHARD TRACE: Operation [7:6] is the new logically complete end at 9437184 2025-03-28T09:09:01.081733Z node 42 :TX_DATASHARD TRACE: Operation [7:6] is the new logically incomplete end at 9437184 2025-03-28T09:09:01.081765Z node 42 :TX_DATASHARD TRACE: Activated operation [7:6] at 9437184 2025-03-28T09:09:01.081803Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-28T09:09:01.081830Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:09:01.081859Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit BuildDataTxOutRS 2025-03-28T09:09:01.081886Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit BuildDataTxOutRS 2025-03-28T09:09:01.081936Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-28T09:09:01.081961Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit BuildDataTxOutRS 2025-03-28T09:09:01.081983Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit StoreAndSendOutRS 2025-03-28T09:09:01.082008Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit StoreAndSendOutRS 2025-03-28T09:09:01.082035Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-28T09:09:01.082060Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit StoreAndSendOutRS 2025-03-28T09:09:01.082083Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit PrepareDataTxInRS 2025-03-28T09:09:01.082106Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit PrepareDataTxInRS 2025-03-28T09:09:01.082136Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-28T09:09:01.082162Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit PrepareDataTxInRS 2025-03-28T09:09:01.082189Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit LoadAndWaitInRS 2025-03-28T09:09:01.082214Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit LoadAndWaitInRS 2025-03-28T09:09:01.082241Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-28T09:09:01.082266Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit LoadAndWaitInRS 2025-03-28T09:09:01.082293Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:09:01.082320Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit ExecuteDataTx 2025-03-28T09:09:01.082617Z node 42 :TX_DATASHARD TRACE: Executed operation [7:6] at tablet 9437184 with status COMPLETE 2025-03-28T09:09:01.082672Z node 42 :TX_DATASHARD TRACE: Datashard execution counters for [7:6] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:09:01.082738Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-28T09:09:01.082768Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:09:01.082798Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompleteOperation 2025-03-28T09:09:01.082825Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompleteOperation 2025-03-28T09:09:01.082994Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is DelayComplete 2025-03-28T09:09:01.083025Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompleteOperation 2025-03-28T09:09:01.083053Z node 42 :TX_DATASHARD TRACE: Add [7:6] at 9437184 to execution unit CompletedOperations 2025-03-28T09:09:01.083081Z node 42 :TX_DATASHARD TRACE: Trying to execute [7:6] at 9437184 on unit CompletedOperations 2025-03-28T09:09:01.083115Z node 42 :TX_DATASHARD TRACE: Execution status for [7:6] at 9437184 is Executed 2025-03-28T09:09:01.083141Z node 42 :TX_DATASHARD TRACE: Advance execution plan for [7:6] at 9437184 executing on unit CompletedOperations 2025-03-28T09:09:01.083170Z node 42 :TX_DATASHARD TRACE: Execution plan for [7:6] at 9437184 has finished 2025-03-28T09:09:01.083200Z node 42 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:09:01.083226Z node 42 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:09:01.083254Z node 42 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:09:01.083287Z node 42 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:09:01.099927Z node 42 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2025-03-28T09:09:01.099993Z node 42 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2025-03-28T09:09:01.100046Z node 42 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:09:01.100082Z node 42 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2025-03-28T09:09:01.100138Z node 42 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [42:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:09:01.100180Z node 42 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:09:01.100445Z node 42 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2025-03-28T09:09:01.100481Z node 42 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2025-03-28T09:09:01.100523Z node 42 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-28T09:09:01.100550Z node 42 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2025-03-28T09:09:01.100608Z node 42 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [42:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:09:01.100640Z node 42 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-28T09:09:01.100999Z node 42 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2025-03-28T09:09:01.101036Z node 42 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2025-03-28T09:09:01.101074Z node 42 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:09:01.101104Z node 42 :TX_DATASHARD TRACE: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2025-03-28T09:09:01.101153Z node 42 :TX_DATASHARD DEBUG: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [42:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:09:01.101185Z node 42 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> TGRpcYdbTest::ExecutePreparedQuery [GOOD] >> TGRpcYdbTest::ExecuteQueryCache ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinNotNullJoinKey [GOOD] Test command err: Trying to start YDB, gRPC: 3022, MsgBus: 17865 2025-03-28T09:08:55.456274Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784848643030859:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:55.456329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00188d/r3tmp/tmpNe3ez5/pdisk_1.dat 2025-03-28T09:08:56.090563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:56.104556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:56.104628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:56.107964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3022, node 1 2025-03-28T09:08:56.317378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:56.317397Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:56.317404Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:56.317541Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17865 TClient is connected to server localhost:17865 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:56.960371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:56.989177Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:56.996076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:57.124799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:57.298612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:57.370551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:59.112280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784865822901825:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:59.112462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:59.444208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.475656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.525391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.562734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.608302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.656205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.736519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784865822902341:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:59.736604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:59.736798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784865822902346:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:59.740072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:08:59.748914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784865822902348:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:08:59.831959Z node 1 :TX_PROXY ERROR: Actor# [1:7486784865822902403:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:00.456808Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784848643030859:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:00.456917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:00.943343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:09:00.986947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:08:11.668113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:08:11.668225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:11.668265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:08:11.668304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:08:11.668393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:08:11.668432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:08:11.668515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:11.668616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:08:11.669005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:08:11.886574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:11.886638Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:11.947540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:08:11.947940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:08:11.948148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:08:11.985278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:08:11.985544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:08:11.986297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:11.986545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:11.993661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:11.995314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:11.995393Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:11.995610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:08:11.995664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:11.995705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:08:11.995812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.023944Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:08:12.310717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:08:12.311115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.311489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:08:12.311825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:08:12.311907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.323842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:12.324059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:08:12.324316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.324427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:08:12.324476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:08:12.324521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:08:12.329419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.329519Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:08:12.329578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:08:12.335601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.335678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.335721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:12.335834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:08:12.367667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:08:12.376076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:08:12.376468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:08:12.377715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:12.377943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:12.378014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:12.378332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:08:12.378397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:12.378636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:08:12.382956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:12.385767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:12.385848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:12.386044Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:12.386087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:08:12.386537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:12.386594Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:08:12.386716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:12.386768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:12.386811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:12.386887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:12.386933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:08:12.386985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:12.387029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:08:12.387060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:08:12.387137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:08:12.387179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:08:12.387212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:08:12.389305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:12.389484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:12.389540Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:09:01.340205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-28T09:09:01.340435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:09:01.345069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:09:01.345240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-28T09:09:01.346196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:09:01.346364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:09:01.346428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:09:01.346572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:09:01.347131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:09:01.362851Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:3458:5422], attempt# 0 2025-03-28T09:09:01.412131Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3458:5422], sender# [1:3457:5421] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:15385 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 52333168-F0A4-4AE5-A4AD-560FFE401624 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-28T09:09:01.423222Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3458:5422], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:15385 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 87F5350B-EE65-4279-B3BA-E4DD3DF874A8 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-03-28T09:09:01.429900Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:09:01.429994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:09:01.430270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:09:01.430318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:09:01.430707Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3458:5422], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-03-28T09:09:01.431078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:09:01.431149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:09:01.431534Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5421] 2025-03-28T09:09:01.431954Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5422], sender# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-03-28T09:09:01.433172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:09:01.433268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:09:01.433297Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:09:01.433329Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:09:01.433364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:09:01.433439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:15385 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 815F538D-FBA6-43ED-BCDA-0D40BB30B649 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2025-03-28T09:09:01.439227Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3458:5422], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2025-03-28T09:09:01.439490Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3458:5422], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-28T09:09:01.439866Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-28T09:09:01.448704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:09:01.482508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-28T09:09:01.482591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:09:01.482829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-28T09:09:01.482958Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-28T09:09:01.483035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:09:01.483096Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:09:01.483156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:09:01.483220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:09:01.483432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:09:01.495334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:09:01.495966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:09:01.496020Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:09:01.496138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:09:01.496177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:09:01.496222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:09:01.496259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:09:01.496299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:09:01.496381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:09:01.496441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:09:01.496480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:09:01.496512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:09:01.496628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:09:01.505245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:09:01.505328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3442:5407] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2025-03-28T09:08:04.679233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784630059621571:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:04.691516Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a51/r3tmp/tmpwHitCk/pdisk_1.dat 2025-03-28T09:08:05.185727Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:05.204285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:05.204382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:05.206094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11836, node 1 2025-03-28T09:08:05.345380Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:05.345414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:05.345422Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:05.345532Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:05.797834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8577 2025-03-28T09:08:06.256360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:06.262116Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:08:06.263689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:06.281655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:06.417595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:06.495247Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-03-28T09:08:06.509045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:06.608469Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-28T09:08:06.615359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:06.673420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:06.734338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:06.827535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:06.914579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:06.978231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:08:07.034593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:08.795619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784647239492191:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:08.797433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784647239492187:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:08.797561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:08.800015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-28T09:08:08.814905Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2025-03-28T09:08:08.816263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784647239492200:2380], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-28T09:08:08.881460Z node 1 :TX_PROXY ERROR: Actor# [1:7486784647239492252:2918] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:09.412153Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe04eyr342f4rdj8vk3q13g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY1Njg3ZDEtOWRmYzA3MDktNzE4NmEwMGEtNTBkNDg4Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.423862Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe04eyr342f4rdj8vk3q13g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY1Njg3ZDEtOWRmYzA3MDktNzE4NmEwMGEtNTBkNDg4Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.449522Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe04eyr342f4rdj8vk3q13g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDY1Njg3ZDEtOWRmYzA3MDktNzE4NmEwMGEtNTBkNDg4Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:08:09.481123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:09.532647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:09.581943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:09.623095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:09.669255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:09.683020Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784630059621571:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:09.683112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:09.722787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:09.804650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:09.851945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:09.927984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:10.055362Z node 1 :HTTP INFO: Listening on http://127.0.0.1:13104 2025-03-28T09:08:11.053794Z node 1 :SQS INFO: Start SQS service actor 2025-03-28T09:08:11.054068Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-28T09:08:11.054076Z node 1 :SQS DEBUG: S ... 377\377\377\377\377\377\377\030\003?6\000\003?8\002\003?:\000\003?<\000\006\n?@\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\037\003?B\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010 (POST /Root) 2025-03-28T09:09:01.594925Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976710716 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2025-03-28T09:09:01.594956Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} 2025-03-28T09:09:01.595023Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [f897:9200:6050:0:e097:9200:6050:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 950e8cd0-1f237ba2-6ab2f243-e27c354b 2025-03-28T09:09:01.595087Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 11ms 2025-03-28T09:09:01.595211Z node 7 :SQS DEBUG: Request [] Sending executed reply 2025-03-28T09:09:01.595322Z node 7 :SQS DEBUG: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/0] 2025-03-28T09:09:01.595746Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [950e8cd0-1f237ba2-6ab2f243-e27c354b] got new request from [f897:9200:6050:0:e097:9200:6050:0] 2025-03-28T09:09:01.596259Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [950e8cd0-1f237ba2-6ab2f243-e27c354b] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2025-03-28T09:09:01.596281Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [950e8cd0-1f237ba2-6ab2f243-e27c354b] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-28T09:09:01.596462Z node 7 :SQS DEBUG: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 950e8cd0-1f237ba2-6ab2f243-e27c354b 2025-03-28T09:09:01.596578Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2025-03-28T09:09:01.596590Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Request proxy started 2025-03-28T09:09:01.596723Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2025-03-28T09:09:01.596796Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Get configuration duration: 0ms 2025-03-28T09:09:01.596906Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Send get leader node request to sqs service for cloud4/000000000000000101v0 2025-03-28T09:09:01.596929Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Leader node for queue [cloud4/000000000000000101v0] is 7 2025-03-28T09:09:01.596978Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Got leader node for queue response. Node id: 7. Status: 0 2025-03-28T09:09:01.597083Z node 7 :SQS TRACE: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" 2025-03-28T09:09:01.597172Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" 2025-03-28T09:09:01.597235Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Request started. Actor: [7:7486784875769508373:3851] 2025-03-28T09:09:01.597278Z node 7 :SQS TRACE: Inc local leader ref for actor [7:7486784875769508373:3851] 2025-03-28T09:09:01.597295Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2025-03-28T09:09:01.597333Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Get configuration duration: 0ms 2025-03-28T09:09:01.597351Z node 7 :SQS TRACE: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Got configuration. Root url: http://ghrun-mdsjyjltvm.auto.internal:8771, Shards: 4, Fail: 0 2025-03-28T09:09:01.597377Z node 7 :SQS TRACE: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2025-03-28T09:09:01.597392Z node 7 :SQS TRACE: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] DoRoutine 2025-03-28T09:09:01.597444Z node 7 :SQS TRACE: Increment active message requests for [cloud4/000000000000000101v0/1]. ActiveMessageRequests: 1 2025-03-28T09:09:01.597494Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Received empty result from shard 1 infly. Infly capacity: 0. Messages count: 0 2025-03-28T09:09:01.597511Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] No known messages in this shard. Skip attempt to add messages to infly 2025-03-28T09:09:01.597523Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Already tried to add messages to infly 2025-03-28T09:09:01.597564Z node 7 :SQS TRACE: Decrement active message requests for [[cloud4/000000000000000101v0/1]. ActiveMessageRequests: 0 2025-03-28T09:09:01.597633Z node 7 :SQS TRACE: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" } } 2025-03-28T09:09:01.597671Z node 7 :SQS DEBUG: Request ReceiveMessage working duration: 0ms 2025-03-28T09:09:01.597727Z node 7 :SQS TRACE: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Sending sqs response: { ReceiveMessage { RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" } RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-03-28T09:09:01.597795Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7486784875769508373:3851]. Found: 1 2025-03-28T09:09:01.597801Z node 7 :SQS TRACE: HandleSqsResponse ReceiveMessage { RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" } RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-03-28T09:09:01.597849Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7486784875769508372:2556]: ReceiveMessage { RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" } RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-03-28T09:09:01.598027Z node 7 :SQS TRACE: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] HandleResponse: { ReceiveMessage { RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" } RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-03-28T09:09:01.598104Z node 7 :SQS DEBUG: Request [950e8cd0-1f237ba2-6ab2f243-e27c354b] Sending reply from proxy actor: { ReceiveMessage { RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" } RequestId: "950e8cd0-1f237ba2-6ab2f243-e27c354b" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-03-28T09:09:01.598357Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [950e8cd0-1f237ba2-6ab2f243-e27c354b] Got succesfult GRPC response. 2025-03-28T09:09:01.598408Z node 7 :HTTP_PROXY INFO: http request [ReceiveMessage] requestId [950e8cd0-1f237ba2-6ab2f243-e27c354b] reply ok 2025-03-28T09:09:01.598496Z node 7 :HTTP_PROXY DEBUG: http request [ReceiveMessage] requestId [950e8cd0-1f237ba2-6ab2f243-e27c354b] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 179 SourceAddress: f897:9200:6050:0:e097:9200:6050:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage 2025-03-28T09:09:01.598580Z node 7 :HTTP DEBUG: (#37,[::1]:35356) <- (200 ) Http output full {} 2025-03-28T09:09:01.598705Z node 7 :HTTP DEBUG: (#37,[::1]:35356) connection closed >> YdbYqlClient::CreateAndAltertTableWithReadReplicasSettings [GOOD] >> YdbYqlClient::CreateTableWithMESettings >> YdbOlapStore::LogNonExistingRequest [GOOD] >> YdbOlapStore::LogNonExistingUserId >> TTableProfileTests::ExplicitPartitionsUnordered [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat >> BasicUsage::CloseWriteSessionImmediately [GOOD] >> KqpFlipJoin::RightSemi_2 >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 >> KqpJoin::LeftJoinWithNull-StreamLookupJoin >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] Test command err: 2025-03-28T09:08:34.734200Z :BasicWriteSession INFO: Random seed for debugging is 1743152914734163 2025-03-28T09:08:35.446757Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784764273515949:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:35.446796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:35.647757Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784765913660862:2290];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:35.647838Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e66/r3tmp/tmpio2cvm/pdisk_1.dat 2025-03-28T09:08:36.019750Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:36.026702Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:36.484739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:36.558055Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:36.571036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:36.571131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:36.575205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:36.575278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:36.616455Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:36.616594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:36.617817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21539, node 1 2025-03-28T09:08:36.955192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/000e66/r3tmp/yandexxY6wrA.tmp 2025-03-28T09:08:36.955217Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/000e66/r3tmp/yandexxY6wrA.tmp 2025-03-28T09:08:36.955351Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/000e66/r3tmp/yandexxY6wrA.tmp 2025-03-28T09:08:36.955448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:37.091091Z INFO: TTestServer started on Port 10550 GrpcPort 21539 TClient is connected to server localhost:10550 PQClient connected to localhost:21539 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:37.550064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:08:37.682342Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-28T09:08:40.450795Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784764273515949:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:40.450858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:40.515068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784785748353436:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.515310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.515443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784785748353448:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.520317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:08:40.554801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784785748353467:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:08:40.656099Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784765913660862:2290];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:40.656144Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:40.834768Z node 1 :TX_PROXY ERROR: Actor# [1:7486784785748353557:2689] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:40.884931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:40.910946Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486784787388497449:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:40.912803Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmY4NzUzMDctZDZlZWNhNDEtMTFjMjQxYmItZWIwMGFjMTE=, ActorId: [2:7486784787388497400:2308], ActorState: ExecuteState, TraceId: 01jqe05e05cj6vrr64a44hweb7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:40.915067Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486784785748353569:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:40.916096Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2QzMjk3NzMtOTNjMTAxMjMtNTdhYTM1MWQtNWU0NTQwY2I=, ActorId: [1:7486784785748353433:2337], ActorState: ExecuteState, TraceId: 01jqe05dxk3j071d6jeq851xtj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:40.921001Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:40.920779Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:41.125887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:08:41.300384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:21539", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-28T09:08:41.670555Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe05evddqa3t1hwfhgqeg4q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjcxODNiYWUtNmJiODMxNDMtMjUzZGVmNDUtOGM5N2FmMWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486784790043321275:2997] === CheckClustersList. Ok 2025-03-28T09:08:47.953381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:21539 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:08:48.044629Z node 1 :PERSQUEUE IN ... alDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--test-topic" name rt3.dc1--test-topic version1 CallPersQueueGRPC request to localhost:11151 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:09:01.960766Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:11151 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:09:02.469147Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:11151 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:09:02.977453Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-03-28T09:09:02.991892Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-03-28T09:09:02.992466Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-03-28T09:09:02.992524Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:11151 2025-03-28T09:09:02.999649Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-28T09:09:03.003230Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-28T09:09:03.003263Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-03-28T09:09:03.004392Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-28T09:09:03.004468Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:40464 2025-03-28T09:09:03.004482Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:40464 proto=v1 topic=test-topic durationSec=0 2025-03-28T09:09:03.004489Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:09:03.005556Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-03-28T09:09:03.005653Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-28T09:09:03.005667Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:09:03.005675Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-28T09:09:03.005688Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784882651745004:2518] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:09:03.007554Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784882651745004:2518] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:09:03.167512Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784882651745004:2518] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-28T09:09:03.168656Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784882651745054:2518] connected; active server actors: 1 2025-03-28T09:09:03.169115Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784882651745004:2518] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-28T09:09:03.169148Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784882651745004:2518] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-28T09:09:03.173439Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784882651745054:2518] disconnected; active server actors: 1 2025-03-28T09:09:03.173469Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784882651745054:2518] disconnected no session 2025-03-28T09:09:03.322517Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784882651745004:2518] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-28T09:09:03.322566Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784882651745004:2518] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-28T09:09:03.322584Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784882651745004:2518] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-28T09:09:03.322614Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:09:03.325049Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7486784882651745070:2518], now have 1 active actors on pipe 2025-03-28T09:09:03.325137Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-03-28T09:09:03.325804Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:09:03.325841Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:09:03.325931Z node 3 :PERSQUEUE INFO: new Cookie src|4162c6ed-75c8cb4f-82de5416-8f485673_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-28T09:09:03.326048Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:09:03.326096Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:09:03.326523Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:09:03.326544Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:09:03.326625Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:09:03.326748Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|4162c6ed-75c8cb4f-82de5416-8f485673_0 2025-03-28T09:09:03.330627Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743152943330 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:03.330787Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|4162c6ed-75c8cb4f-82de5416-8f485673_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:09:03.334781Z :INFO: [] MessageGroupId [src] SessionId [src|4162c6ed-75c8cb4f-82de5416-8f485673_0] Write session: close. Timeout = 0 ms 2025-03-28T09:09:03.334839Z :INFO: [] MessageGroupId [src] SessionId [src|4162c6ed-75c8cb4f-82de5416-8f485673_0] Write session will now close 2025-03-28T09:09:03.334873Z :DEBUG: [] MessageGroupId [src] SessionId [src|4162c6ed-75c8cb4f-82de5416-8f485673_0] Write session: aborting 2025-03-28T09:09:03.335017Z :INFO: [] MessageGroupId [src] SessionId [src|4162c6ed-75c8cb4f-82de5416-8f485673_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:09:03.335046Z :DEBUG: [] MessageGroupId [src] SessionId [src|4162c6ed-75c8cb4f-82de5416-8f485673_0] Write session: destroy 2025-03-28T09:09:03.338776Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|4162c6ed-75c8cb4f-82de5416-8f485673_0 grpc read done: success: 0 data: 2025-03-28T09:09:03.338809Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|4162c6ed-75c8cb4f-82de5416-8f485673_0 grpc read failed 2025-03-28T09:09:03.338839Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|4162c6ed-75c8cb4f-82de5416-8f485673_0 grpc closed 2025-03-28T09:09:03.338854Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|4162c6ed-75c8cb4f-82de5416-8f485673_0 is DEAD 2025-03-28T09:09:03.339705Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:09:03.343069Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7486784882651745070:2518] destroyed 2025-03-28T09:09:03.343148Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] >> TGRpcClientLowTest::BiStreamCancelled [GOOD] >> TGRpcClientLowTest::ChangeAcl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink 2025-03-28 09:09:00,645 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:09:00,998 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 11454 46.0M 46.0M 23.1M test_tool run_ut @/home/runner/.ya/build/build_root/2909/001501/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/chunk12/testing_out_stuff/test_tool.args 11710 2.0G 2.0G 1.9G └─ ydb-core-kqp-ut-query --trace-path-append /home/runner/.ya/build/build_root/2909/001501/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/chunk12/ytest.report Test command err: Trying to start YDB, gRPC: 23162, MsgBus: 23346 2025-03-28T08:59:02.638902Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486782303381364430:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:02.639062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001501/r3tmp/tmpvNVFu7/pdisk_1.dat 2025-03-28T08:59:06.257400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:07.660681Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486782303381364430:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T08:59:07.660755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T08:59:08.454223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:08.738786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:10.052808Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:10.166912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.545070Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:11.595284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.595306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T08:59:11.676888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T08:59:11.677543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T08:59:11.706757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23162, node 1 2025-03-28T08:59:12.440160Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T08:59:12.440176Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T08:59:12.440182Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T08:59:12.440501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23346 TClient is connected to server localhost:23346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T08:59:22.490590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:22.742062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:23.716503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:24.652825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:25.143085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T08:59:25.143101Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T08:59:25.188360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T08:59:28.212784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782415050517052:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:28.213223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:29.437922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T08:59:29.626749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.065157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.356007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T08:59:30.865964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T08:59:31.302413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T08:59:32.080978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782432230387245:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:32.081029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:32.081180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486782432230387250:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T08:59:32.084791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T08:59:32.183177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486782432230387252:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T08:59:32.306151Z node 1 :TX_PROXY ERROR: Actor# [1:7486782432230387332:4896] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T08:59:35.894651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:00:24.925433Z node 1 :TX_DATASHARD ERROR: CPU usage 63.4688 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: [/Root/LargeTable] 2025-03-28T09:01:26.639046Z node 1 :TX_DATASHARD ERROR: CPU usage 78.9835 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: [/Root/LargeTable] 2025-03-28T09:02:30.798315Z node 1 :TX_DATASHARD ERROR: CPU usage 91.448 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: [/Root/LargeTable] 2025-03-28T09:03:27.451842Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037919.1.332, eph 79} end=2, 12 blobs 0r (max 600), put Spent{time=2.352s,wait=0.191s,interrupts=4} 2025-03-28T09:03:27.466969Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037919:1:352} Compact 205 on TGenCompactionParams{1001: gen 2 epoch 0, 4 parts} step 332, product {0 parts epoch 0} thrown 2025-03-28T09:03:31.261760Z node 1 :TX_DATASHARD ERROR: CPU usage 102.514 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037919 table: 2025-03-28T09:03:42.247195Z node 1 :TX_DATASHARD ERROR: CPU usage 80.7983 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037921 table: [/Root/LargeTable] 2025-03-28T09:04:07.399020Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037920.1.12, eph 39} end=2, 95 blobs 0r (max 1170), put Spent{time=30.35s,wait=2.594s,interrupts=31} 2025-03-28T09:04:07.399243Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037920:1:28} Compact 1 on TGenCompactionParams{1001: gen 255 epoch 0, 2 parts} step 12, product {0 parts epoch 0} thrown 2025-03-28T09:04:09.356809Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923053221408}: tablet 72075186224037921 could not find a group for channel 1 pool /Root:test 2025-03-28T09:04:09.356869Z node 1 :HIVE WARN: ... :15.159196Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923025311904}: tablet 72075186224037890 skipped channel 1 2025-03-28T09:07:16.123605Z node 2 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923053492224}: tablet 72075186224037888 could not find a group for channel 0 pool /Root:test 2025-03-28T09:07:16.123646Z node 2 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923053492224}: tablet 72075186224037888 could not find a group for channel 1 pool /Root:test 2025-03-28T09:07:16.123664Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923053492224}: tablet 72075186224037888 wasn't changed 2025-03-28T09:07:16.123682Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923053492224}: tablet 72075186224037888 skipped channel 0 2025-03-28T09:07:16.123724Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923053492224}: tablet 72075186224037888 skipped channel 1 2025-03-28T09:07:16.294932Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-28T09:07:16.376750Z node 2 :BS_SKELETON WARN: VDISK[0:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-03-28T09:07:16.407430Z node 2 :BS_SKELETON WARN: VDISK[82000000:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-03-28T09:07:17.379231Z node 2 :BS_SKELETON WARN: VDISK[0:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-03-28T09:07:17.408937Z node 2 :BS_SKELETON WARN: VDISK[82000000:_:0:0:0]: TDskSpaceTrackerActor: YELLOW ZONE Marker# BSVSOOST01 2025-03-28T09:07:17.782665Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_OUT_OF_SPACE;details=Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 21;tx_id=21; 2025-03-28T09:07:17.782797Z node 2 :TX_DATASHARD ERROR: Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 21 2025-03-28T09:07:17.806901Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 21 at tablet 72075186224037890 errors: Status: STATUS_OUT_OF_SPACE Issues: { message: "Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 21" issue_code: 2006 severity: 1 } 2025-03-28T09:07:17.807124Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 21 at tablet 72075186224037890 Status: STATUS_OUT_OF_SPACE Issues: { message: "Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 21" issue_code: 2006 severity: 1 } 2025-03-28T09:07:17.811014Z node 2 :KQP_COMPUTE WARN: SelfId: [2:7486784427423282956:2339], Table: `/Root/LargeTable` ([72057594046644480:2:1]), SessionActorId: [2:7486783714458699008:2339]Got OUT_OF_SPACE for table `/Root/LargeTable`. ShardID=72075186224037890, Sink=[2:7486784427423282956:2339]. Ignored this error.{
: Error: Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 21, code: 2006 } 2025-03-28T09:07:17.811137Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486784427423282919:2339], SessionActorId: [2:7486783714458699008:2339], statusCode=OVERLOADED. Issue=
: Error: Tablet 72075186224037890 is out of space. Table `/Root/LargeTable`., code: 2006
: Error: Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 21, code: 2006 . sessionActorId=[2:7486783714458699008:2339]. isRollback=0 2025-03-28T09:07:17.850301Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmZjNzI1YTMtODUzODlhODEtNTE5MmVhNDktOTlkMTlkZDE=, ActorId: [2:7486783714458699008:2339], ActorState: ExecuteState, TraceId: 01jqe02wgd1g4cm06r65h15cqz, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [2:7486784427423282920:2339] from: [2:7486784427423282919:2339] 2025-03-28T09:07:17.850411Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486784427423282920:2339] TxId: 281474976715756. Ctx: { TraceId: 01jqe02wgd1g4cm06r65h15cqz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmZjNzI1YTMtODUzODlhODEtNTE5MmVhNDktOTlkMTlkZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Tablet 72075186224037890 is out of space. Table `/Root/LargeTable`., code: 2006 subissue: {
: Error: Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 21, code: 2006 } } 2025-03-28T09:07:17.851320Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmZjNzI1YTMtODUzODlhODEtNTE5MmVhNDktOTlkMTlkZDE=, ActorId: [2:7486783714458699008:2339], ActorState: ExecuteState, TraceId: 01jqe02wgd1g4cm06r65h15cqz, Create QueryResponse for error on request, msg: Got out of space. Successfully inserted 30 x 0 lines, each of size 1048576bytes Trying to start YDB, gRPC: 62819, MsgBus: 3564 2025-03-28T09:07:25.116984Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486784463592419992:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:25.131912Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001501/r3tmp/tmpymY2r6/pdisk_1.dat 2025-03-28T09:07:25.553988Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:25.613702Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:25.613826Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:25.617997Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62819, node 3 2025-03-28T09:07:25.767130Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:25.767172Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:25.767183Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:25.767359Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3564 TClient is connected to server localhost:3564 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:07:26.599319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:26.642181Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:07:30.111658Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486784463592419992:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:07:30.111740Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:07:30.661846Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486784485067257542:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:30.661961Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:30.662357Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486784485067257554:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:07:30.668412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:07:30.711727Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486784485067257556:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:07:30.788455Z node 3 :TX_PROXY ERROR: Actor# [3:7486784485067257634:2705] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:07:40.406390Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:07:40.406420Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:12.379928Z node 3 :TX_DATASHARD ERROR: CPU usage 60.4293 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/LargeTable] Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/001501/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/chunk12/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/001501/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/chunk12/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2025-03-28T09:08:34.340319Z :FallbackToSingleDb INFO: Random seed for debugging is 1743152914340262 2025-03-28T09:08:35.095228Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784763329815655:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:35.095283Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:35.224958Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784761885661318:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:35.272317Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:35.694492Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e6d/r3tmp/tmpgNQLcl/pdisk_1.dat 2025-03-28T09:08:35.778942Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:36.131170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:36.370824Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:36.557898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:36.557998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:36.560997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:36.561060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:36.569306Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:36.572634Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:36.572757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:36.579001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2600, node 1 2025-03-28T09:08:36.887574Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/000e6d/r3tmp/yandexLGJlmn.tmp 2025-03-28T09:08:36.887601Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/000e6d/r3tmp/yandexLGJlmn.tmp 2025-03-28T09:08:36.887750Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/000e6d/r3tmp/yandexLGJlmn.tmp 2025-03-28T09:08:36.887868Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:37.018769Z INFO: TTestServer started on Port 12266 GrpcPort 2600 TClient is connected to server localhost:12266 PQClient connected to localhost:2600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:37.477951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:37.661475Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:40.029082Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784763329815655:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:40.029137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:40.146789Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784761885661318:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:40.146856Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:40.315567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784784804653198:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.315997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784784804653190:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.316081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.321229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:08:40.322052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784784804653233:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.322129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.364513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784784804653204:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:08:40.644806Z node 1 :TX_PROXY ERROR: Actor# [1:7486784784804653284:2695] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:40.675739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:40.691890Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486784783360498172:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:40.692193Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzM2Yzc4MjEtZDc5NjJmMDgtM2IyNDI1YWUtY2VhYTc0MWI=, ActorId: [2:7486784783360498131:2310], ActorState: ExecuteState, TraceId: 01jqe05dtq52d2ftzs8hxkfsah, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:40.693602Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486784784804653303:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:40.694236Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGYwZjM4ZjItY2MzMTI5YjUtMjQ1OTAyMzEtNzYxMmI1ODA=, ActorId: [1:7486784784804653172:2338], ActorState: ExecuteState, TraceId: 01jqe05dq2fmzaa6mpy9srm8d6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:40.694958Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:40.698888Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:40.893826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:08:41.076842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:2600", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-28T09:08:41.485819Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe05en31k26epth67sqed62, Databas ... BUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-28T09:09:03.349528Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:09:03.349535Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-28T09:09:03.349551Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784885008207363:2520] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:09:03.351855Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784885008207363:2520] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:09:03.523064Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784885008207363:2520] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-28T09:09:03.524854Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784885008207416:2520] connected; active server actors: 1 2025-03-28T09:09:03.529384Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784885008207363:2520] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-28T09:09:03.529415Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784885008207363:2520] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-28T09:09:03.530992Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784885008207416:2520] disconnected; active server actors: 1 2025-03-28T09:09:03.531024Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784885008207416:2520] disconnected no session 2025-03-28T09:09:03.659416Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784885008207363:2520] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-28T09:09:03.659453Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784885008207363:2520] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-28T09:09:03.659468Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784885008207363:2520] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-28T09:09:03.659497Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:09:03.660436Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-03-28T09:09:03.660485Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7486784885008207440:2520], now have 1 active actors on pipe 2025-03-28T09:09:03.660572Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:09:03.660591Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:09:03.660657Z node 3 :PERSQUEUE INFO: new Cookie src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-28T09:09:03.661856Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743152943661 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:03.661972Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:09:03.662154Z :INFO: [] MessageGroupId [src] SessionId [src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0] Write session: close. Timeout = 0 ms 2025-03-28T09:09:03.662191Z :INFO: [] MessageGroupId [src] SessionId [src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0] Write session will now close 2025-03-28T09:09:03.662233Z :DEBUG: [] MessageGroupId [src] SessionId [src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0] Write session: aborting 2025-03-28T09:09:03.662628Z :INFO: [] MessageGroupId [src] SessionId [src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:09:03.660748Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:09:03.662668Z :DEBUG: [] MessageGroupId [src] SessionId [src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0] Write session: destroy 2025-03-28T09:09:03.660794Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:09:03.660872Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:09:03.660883Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:09:03.660983Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:09:03.661076Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0 PORTS 24562 5749 2025-03-28T09:09:03.664618Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0 grpc read done: success: 0 data: 2025-03-28T09:09:03.664638Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0 grpc read failed 2025-03-28T09:09:03.665230Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0 2025-03-28T09:09:03.665261Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|2244e9f9-c3c285b3-f86e552d-43a4b9f0_0 is DEAD 2025-03-28T09:09:03.665575Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:09:03.665733Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7486784885008207440:2520] destroyed 2025-03-28T09:09:03.665769Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2025-03-28T09:09:04.694673Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2025-03-28T09:09:04.694830Z :INFO: [/Root] [] [be774adb-6b0291c-f1733a50-389444b7] Open read subsessions to databases: { name: , endpoint: localhost:5749, path: /Root } 2025-03-28T09:09:04.695038Z :INFO: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] Starting read session 2025-03-28T09:09:04.695079Z :DEBUG: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] Starting single session 2025-03-28T09:09:04.695645Z :DEBUG: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-03-28T09:09:04.695692Z :DEBUG: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-03-28T09:09:04.695752Z :DEBUG: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] [] Reconnecting session to cluster in 0.000000s 2025-03-28T09:09:04.695957Z :ERROR: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:5749
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:5749. 2025-03-28T09:09:04.696026Z :DEBUG: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-03-28T09:09:04.696075Z :DEBUG: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2025-03-28T09:09:04.696213Z :INFO: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:5749" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:5749
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:5749. " } 2025-03-28T09:09:04.698873Z :NOTICE: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:09:04.698921Z :DEBUG: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:5749" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:5749
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:5749. " } 2025-03-28T09:09:04.699034Z :INFO: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] Closing read session. Close timeout: 0.010000s 2025-03-28T09:09:04.699077Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:09:04.699122Z :INFO: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:04.699166Z :INFO: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] Closing read session. Close timeout: 0.000000s 2025-03-28T09:09:04.699194Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:09:04.699231Z :INFO: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:04.699267Z :INFO: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] Closing read session. Close timeout: 0.000000s 2025-03-28T09:09:04.699297Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:09:04.699337Z :INFO: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:04.699408Z :NOTICE: [/Root] [/Root] [f345d90-fbd6e647-20a282a4-bf45c19a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> KqpJoin::JoinLeftPureExclusion >> KqpPg::TableDeleteWhere-useSink [GOOD] >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] >> YdbYqlClient::CreateTableWithMESettings [GOOD] >> TGRpcYdbTest::ExecuteQueryCache [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinCustomColumnOrder-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 22635, MsgBus: 13794 2025-03-28T09:08:54.951472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784844920306453:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:54.952017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001893/r3tmp/tmp9mIyP7/pdisk_1.dat 2025-03-28T09:08:55.339850Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22635, node 1 2025-03-28T09:08:55.378404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:55.389174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:55.410204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:55.491248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:55.491271Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:55.491281Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:55.491377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13794 TClient is connected to server localhost:13794 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:56.124667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:56.156613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:56.311273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:56.494574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:56.589318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:58.019848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784862100177420:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:58.019978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:58.355350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:58.441793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:58.475705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:08:58.548050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:08:58.577924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:08:58.655798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:08:58.711054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784862100177939:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:58.711098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:58.711230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784862100177944:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:58.714116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:08:58.723714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784862100177946:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:08:58.805675Z node 1 :TX_PROXY ERROR: Actor# [1:7486784862100177999:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:59.895549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.932277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.958783Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784844920306453:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:59.958908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:59.990545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:09:00.029773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:09:00.084633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:09:00.157716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27485, MsgBus: 11831 2025-03-28T09:09:02.193706Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784880292911637:2083];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001893/r3tmp/tmpcFuthP/pdisk_1.dat 2025-03-28T09:09:02.221164Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:02.311961Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:02.327834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:02.328000Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27485, node 2 2025-03-28T09:09:02.330574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:02.415298Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:02.415315Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:02.415321Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:02.415413Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11831 TClient is connected to server localhost:11831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:02.866228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:02.878786Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:09:02.888275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:02.973992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:03.150576Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:03.251402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:05.826318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784893177815269:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:05.826641Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:05.878921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:09:05.958674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:06.032812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:09:06.087674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:09:06.142521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:09:06.245679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:09:06.345280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784897472783091:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:06.345375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:06.345772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784897472783096:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:06.351170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:09:06.370879Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784897472783098:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:09:06.463158Z node 2 :TX_PROXY ERROR: Actor# [2:7486784897472783153:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:07.189989Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784880292911637:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:07.190045Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:07.706384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:09:07.742773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:09:07.779536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:09:07.818766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:09:07.861813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:09:07.906253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithMESettings [GOOD] Test command err: 2025-03-28T09:08:43.167343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784797875866852:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:43.170511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001491/r3tmp/tmpSyvD1N/pdisk_1.dat 2025-03-28T09:08:43.642130Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:43.642230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:43.646677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:43.686713Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7126, node 1 2025-03-28T09:08:43.768912Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:08:43.768963Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:08:43.856170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:43.856198Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:43.856208Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:43.856315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:44.145485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:46.837608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.916685Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486784819667751934:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:48.916745Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001491/r3tmp/tmpr3zET4/pdisk_1.dat 2025-03-28T09:08:49.312844Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:49.358143Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:49.358223Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:49.372303Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14230, node 4 2025-03-28T09:08:49.520258Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:49.520280Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:49.520286Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:49.520420Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:49.855142Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:52.281075Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:08:54.240451Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486784847119968904:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:54.240558Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001491/r3tmp/tmpfgXenY/pdisk_1.dat 2025-03-28T09:08:54.460614Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2778, node 7 2025-03-28T09:08:54.549998Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:54.550021Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:54.550029Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:54.550156Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:54.569843Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:54.569929Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:54.572592Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:54.824088Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:57.338022Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.446673Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.509867Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.565482Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.217157Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486784866268016386:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:59.217492Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001491/r3tmp/tmp2BXegx/pdisk_1.dat 2025-03-28T09:08:59.382210Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:59.424384Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:59.424483Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:59.428880Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25085, node 10 2025-03-28T09:08:59.587229Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:59.587251Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:59.587256Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:59.587374Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:59.836479Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:02.694360Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:09:02.854350Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:09:02.874571Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-03-28T09:09:02.874606Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-03-28T09:09:04.825954Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486784889860004400:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:04.826031Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001491/r3tmp/tmpByEnPk/pdisk_1.dat 2025-03-28T09:09:05.080413Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9108, node 13 2025-03-28T09:09:05.207901Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:05.208051Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:05.283221Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:05.283245Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:05.283255Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:05.283407Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:09:05.328872Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20175 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:05.618374Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:08.581149Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExecuteQueryCache [GOOD] Test command err: 2025-03-28T09:08:42.300406Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784793945877134:2269];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:42.300501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ad/r3tmp/tmp7dHMn7/pdisk_1.dat 2025-03-28T09:08:42.789383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:42.812462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:42.812568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:42.817746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12419, node 1 2025-03-28T09:08:43.148827Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:43.148849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:43.148861Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:43.148982Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5644 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:43.500208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:47.554035Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486784816342128557:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:47.554209Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ad/r3tmp/tmpO5IXTO/pdisk_1.dat 2025-03-28T09:08:47.834788Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4429, node 4 2025-03-28T09:08:47.904620Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:47.904715Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:47.986701Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:48.043353Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:48.043382Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:48.043391Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:48.044219Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:48.280003Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:52.375338Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486784836974264861:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:52.375655Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ad/r3tmp/tmppYWyWm/pdisk_1.dat 2025-03-28T09:08:52.590929Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22426, node 7 2025-03-28T09:08:52.705578Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:52.705671Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:52.753262Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:52.771642Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:52.771665Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:52.771673Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:52.771823Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16715 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:53.075045Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:56.015879Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486784854154134955:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:56.015879Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486784854154134966:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:56.015973Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:56.019474Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:08:56.053732Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486784854154134969:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:08:56.124237Z node 7 :TX_PROXY ERROR: Actor# [7:7486784854154135042:2682] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:58.626492Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486784861185721481:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:58.626563Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ad/r3tmp/tmpO9BpE1/pdisk_1.dat 2025-03-28T09:08:58.868746Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22469, node 10 2025-03-28T09:08:58.964675Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:58.964705Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:58.964714Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:58.964860Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:58.965452Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:58.965555Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:58.974754Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:59.211393Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:01.965375Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486784874070624429:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:01.965520Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:01.965924Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486784874070624441:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:01.971665Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:02.002230Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486784874070624443:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:09:02.089178Z node 10 :TX_PROXY ERROR: Actor# [10:7486784878365591831:2687] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:03.919228Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486784883352420438:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:03.919288Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ad/r3tmp/tmp7NQbAO/pdisk_1.dat 2025-03-28T09:09:04.257302Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:04.286463Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:04.286570Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:04.292892Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20840, node 13 2025-03-28T09:09:04.454066Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:04.454093Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:04.454103Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:04.454303Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:04.780440Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:08.498457Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486784904827257979:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:08.498458Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486784904827257987:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:08.498556Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:08.503252Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:08.527679Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486784904827257993:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:09:08.586703Z node 13 :TX_PROXY ERROR: Actor# [13:7486784904827258073:2688] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:08.920561Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7486784883352420438:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:08.920718Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2025-03-28T09:08:36.994523Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1743152916994479 2025-03-28T09:08:37.533420Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784773762755988:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:37.533469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:37.926257Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784772943971029:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:37.926702Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:37.933903Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000de3/r3tmp/tmpKAOoJY/pdisk_1.dat 2025-03-28T09:08:37.968873Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:38.414373Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:38.417568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:38.417653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:38.418142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:38.418179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:38.428749Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:38.428904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:38.430887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20455, node 1 2025-03-28T09:08:38.539462Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/000de3/r3tmp/yandexvosV1Y.tmp 2025-03-28T09:08:38.539495Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/000de3/r3tmp/yandexvosV1Y.tmp 2025-03-28T09:08:38.539651Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/000de3/r3tmp/yandexvosV1Y.tmp 2025-03-28T09:08:38.539786Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:38.717544Z INFO: TTestServer started on Port 63210 GrpcPort 20455 TClient is connected to server localhost:63210 PQClient connected to localhost:20455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:39.509662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:08:42.248840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784794418807666:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:42.248947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:42.249682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784794418807693:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:42.270578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:08:42.331027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784794418807695:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:08:42.416859Z node 2 :TX_PROXY ERROR: Actor# [2:7486784794418807723:2133] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:42.658270Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784772943971029:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:42.658324Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:42.831250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784773762755988:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:42.831336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:42.858001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:08:42.880222Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486784794418807730:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:42.880449Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmM4ODNmNDMtM2IxNTM2NWEtNjM4ZjY3MzctOGViNzc3YTU=, ActorId: [2:7486784794418807664:2309], ActorState: ExecuteState, TraceId: 01jqe05fm24mf7yeq9p2nvrfkw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:42.882949Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:42.883621Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486784795237593502:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:42.887767Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDY1Y2JkZDAtZTMxMDAxZDYtYjhiZjcxYzQtY2FiOTM5NDc=, ActorId: [1:7486784795237593430:2337], ActorState: ExecuteState, TraceId: 01jqe05fpc3yjmfsmrzp67r1q1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:42.888725Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:43.065387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:43.286549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:20455", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-28T09:08:43.643546Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqe05gsmaktv2ardh3ke53s0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2M4YTIzN2YtODBlM2UyNGQtNTQyYzEwNGEtYjczNjdiNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486784799532561217:2995] === CheckClustersList. Ok 2025-03-28T09:08:49.820400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:20455 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:08:49.953999Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:20455 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: ... TOPIC CallPersQueueGRPC request to localhost:62217 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:09:04.845233Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:62217 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:09:05.351224Z node 3 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--test-topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-03-28T09:09:05.362237Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-03-28T09:09:05.363326Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-03-28T09:09:05.363375Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:62217 2025-03-28T09:09:05.374281Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-28T09:09:05.376929Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-28T09:09:05.376969Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-03-28T09:09:05.379322Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-28T09:09:05.379462Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:34956 2025-03-28T09:09:05.379489Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:34956 proto=v1 topic=test-topic durationSec=0 2025-03-28T09:09:05.379499Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:09:05.381202Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-03-28T09:09:05.381326Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-28T09:09:05.381342Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:09:05.381352Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-28T09:09:05.381370Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892047425207:2518] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:09:05.385325Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892047425207:2518] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:09:05.595390Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892047425207:2518] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-28T09:09:05.596212Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784892047425251:2518] connected; active server actors: 1 2025-03-28T09:09:05.596481Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892047425207:2518] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-28T09:09:05.596509Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892047425207:2518] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-28T09:09:05.601434Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784892047425251:2518] disconnected; active server actors: 1 2025-03-28T09:09:05.601497Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784892047425251:2518] disconnected no session 2025-03-28T09:09:05.748905Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892047425207:2518] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-28T09:09:05.748974Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892047425207:2518] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-28T09:09:05.749015Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892047425207:2518] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-28T09:09:05.749050Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:09:05.752241Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2025-03-28T09:09:05.782259Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7486784892047425270:2518], now have 1 active actors on pipe 2025-03-28T09:09:05.782341Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:09:05.782364Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:09:05.782444Z node 4 :PERSQUEUE INFO: new Cookie src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-28T09:09:05.782539Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:09:05.782590Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:09:05.792107Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0 2025-03-28T09:09:05.794064Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743152945794 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:05.794192Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:09:05.794588Z :INFO: [] MessageGroupId [src] SessionId [src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0] Write session: close. Timeout = 0 ms 2025-03-28T09:09:05.794633Z :INFO: [] MessageGroupId [src] SessionId [src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0] Write session will now close 2025-03-28T09:09:05.794672Z :DEBUG: [] MessageGroupId [src] SessionId [src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0] Write session: aborting 2025-03-28T09:09:05.790581Z node 4 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:09:05.790630Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:09:05.790743Z node 4 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:09:05.795156Z :INFO: [] MessageGroupId [src] SessionId [src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:09:05.795192Z :DEBUG: [] MessageGroupId [src] SessionId [src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0] Write session: destroy 2025-03-28T09:09:05.798990Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0 grpc read done: success: 0 data: 2025-03-28T09:09:05.799017Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0 grpc read failed 2025-03-28T09:09:05.799044Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0 grpc closed 2025-03-28T09:09:05.799063Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|5ee7d09b-4b0b1a6f-1be980aa-b7daad0e_0 is DEAD 2025-03-28T09:09:05.799718Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:09:05.806819Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7486784892047425270:2518] destroyed 2025-03-28T09:09:05.806921Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. Session was created >>> Ready to answer: ok 2025-03-28T09:09:05.906652Z :ERROR: [/Root] OnFederationDiscovery: Got error. Status: UNAVAILABLE. Description: 2025-03-28T09:09:08.338563Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:09:08.338599Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:09.429305Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486784909227294693:2561] TxId: 281474976715693. Ctx: { TraceId: 01jqe069wa4eqab43yk64eqgwn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YmFlMzRiYS01YzYzYzMzNy1jZjg1NTk3LWQ0ZWQzMjNm, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-03-28T09:09:09.430318Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486784909227294699:2570], TxId: 281474976715693, task: 2. Ctx: { SessionId : ydb://session/3?node_id=3&id=YmFlMzRiYS01YzYzYzMzNy1jZjg1NTk3LWQ0ZWQzMjNm. TraceId : 01jqe069wa4eqab43yk64eqgwn. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7486784909227294693:2561], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-28T09:09:09.430536Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486784909227294701:2571], TxId: 281474976715693, task: 4. Ctx: { TraceId : 01jqe069wa4eqab43yk64eqgwn. SessionId : ydb://session/3?node_id=3&id=YmFlMzRiYS01YzYzYzMzNy1jZjg1NTk3LWQ0ZWQzMjNm. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7486784909227294693:2561], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> PgCatalog::CheckSetConfig [GOOD] >> PgCatalog::PgDatabase+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 27183, MsgBus: 9298 2025-03-28T09:03:23.063905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783423023980474:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:23.064341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001316/r3tmp/tmpRlPUdh/pdisk_1.dat 2025-03-28T09:03:23.445197Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27183, node 1 2025-03-28T09:03:23.498334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:23.504066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:23.516716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:03:23.583329Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:23.583356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:23.583363Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:23.583487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9298 TClient is connected to server localhost:9298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:24.369051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:26.331896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-03-28T09:03:26.564810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-03-28T09:03:26.675506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 abcd 2025-03-28T09:03:26.851854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-03-28T09:03:26.967737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 abcd 2025-03-28T09:03:27.111338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 {"abcd ","abcd "} 2025-03-28T09:03:27.268543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-03-28T09:03:27.347443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2025-03-28T09:03:27.424932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 abcd 2025-03-28T09:03:27.600719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-03-28T09:03:27.791620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 abcd 2025-03-28T09:03:27.919106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 {abcd,abcd} 2025-03-28T09:03:28.019753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:03:28.063857Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783423023980474:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:28.063929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-03-28T09:03:28.096367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2025-03-28T09:03:28.185966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 1111 2025-03-28T09:03:28.329117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 {1111,1111} 2025-03-28T09:03:28.433090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-03-28T09:03:28.505312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbit_10103374131519304989_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2025-03-28T09:03:28.573604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgvarbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-03-28T09:03:28.661560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgvarbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgvarbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string too long for type bit varying(2) 2025-03-28T09:03:28.736433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710698:0, at schemeshard: 72057594046644480 1111 2025-03-28T09:03:28.838439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480 {1111,1111} 2025-03-28T09:03:28.940303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710704:0, at schemeshard: 72057594046644480 1111 2025-03-28T09:03:29.071673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480 {1111,1111} 2025-03-28T09:03:29.191755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgnumeric_17472595041006102391_7644398022171395976' Unable to coerce val ... FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710818:0, at schemeshard: 72057594046644480 2025-03-28T09:09:01.293344Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:01.323118Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710820:0, at schemeshard: 72057594046644480 2025-03-28T09:09:01.440958Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 628 2025-03-28T09:09:01.463904Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710822:0, at schemeshard: 72057594046644480 2025-03-28T09:09:01.624168Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:01.653612Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710824:0, at schemeshard: 72057594046644480 601 2025-03-28T09:09:01.793510Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710825:0, at schemeshard: 72057594046644480 2025-03-28T09:09:01.952401Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710826:0, at schemeshard: 72057594046644480 603 2025-03-28T09:09:02.162479Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710827:0, at schemeshard: 72057594046644480 2025-03-28T09:09:02.304941Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:02.346363Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710829:0, at schemeshard: 72057594046644480 2025-03-28T09:09:02.445384Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 602 2025-03-28T09:09:02.479310Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710831:0, at schemeshard: 72057594046644480 2025-03-28T09:09:02.656500Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710832:0, at schemeshard: 72057594046644480 604 2025-03-28T09:09:02.763945Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:02.801176Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710834:0, at schemeshard: 72057594046644480 2025-03-28T09:09:02.949784Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710835:0, at schemeshard: 72057594046644480 718 2025-03-28T09:09:03.068123Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:03.105458Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710837:0, at schemeshard: 72057594046644480 2025-03-28T09:09:03.220130Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:03.250465Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710840:0, at schemeshard: 72057594046644480 869 2025-03-28T09:09:03.419670Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:03.462288Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710842:0, at schemeshard: 72057594046644480 2025-03-28T09:09:03.564450Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:03.598255Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710845:0, at schemeshard: 72057594046644480 650 2025-03-28T09:09:03.709012Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:03.733363Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710847:0, at schemeshard: 72057594046644480 2025-03-28T09:09:03.859909Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:03.894339Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710849:0, at schemeshard: 72057594046644480 829 2025-03-28T09:09:04.071874Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710850:0, at schemeshard: 72057594046644480 2025-03-28T09:09:04.185235Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:04.214479Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710852:0, at schemeshard: 72057594046644480 774 2025-03-28T09:09:04.341737Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:04.371777Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710854:0, at schemeshard: 72057594046644480 2025-03-28T09:09:04.479013Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:04.504222Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710856:0, at schemeshard: 72057594046644480 2950 2025-03-28T09:09:04.657748Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710857:0, at schemeshard: 72057594046644480 2025-03-28T09:09:04.822531Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710858:0, at schemeshard: 72057594046644480 114 2025-03-28T09:09:04.959543Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:04.994523Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710860:0, at schemeshard: 72057594046644480 2025-03-28T09:09:05.099831Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:05.127938Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710862:0, at schemeshard: 72057594046644480 3802 2025-03-28T09:09:05.247955Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:05.278087Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710864:0, at schemeshard: 72057594046644480 2025-03-28T09:09:05.382546Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:05.384594Z node 11 :TX_DATASHARD ERROR: Prepare transaction failed. txid 281474976710866 at tablet 72075186224037958 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710866] at 72075186224037958 while waiting for scan finish) | 2025-03-28T09:09:05.389008Z node 11 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710866 at tablet 72075186224037958 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976710866] at 72075186224037958 while waiting for scan finish) | 2025-03-28T09:09:05.420833Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710867:0, at schemeshard: 72057594046644480 4072 2025-03-28T09:09:05.564542Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:05.610000Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710869:0, at schemeshard: 72057594046644480 2025-03-28T09:09:05.745189Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:05.781586Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710871:0, at schemeshard: 72057594046644480 2025-03-28T09:09:05.908427Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 142 2025-03-28T09:09:05.949057Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710873:0, at schemeshard: 72057594046644480 2025-03-28T09:09:06.059594Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:06.102136Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710875:0, at schemeshard: 72057594046644480 3615 2025-03-28T09:09:06.288154Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710876:0, at schemeshard: 72057594046644480 2025-03-28T09:09:06.399749Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:06.446525Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710878:0, at schemeshard: 72057594046644480 3614 2025-03-28T09:09:06.580657Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:06.610247Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710880:0, at schemeshard: 72057594046644480 2025-03-28T09:09:06.739530Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:06.762409Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710882:0, at schemeshard: 72057594046644480 22 2025-03-28T09:09:06.854230Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:09:06.892320Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710885:0, at schemeshard: 72057594046644480 2025-03-28T09:09:07.043787Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710886:0, at schemeshard: 72057594046644480 2025-03-28T09:09:07.158025Z node 11 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TTableProfileTests::ExplicitPartitionsWrongKeyFormat [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> KqpFlipJoin::RightSemi_2 [GOOD] >> KqpFlipJoin::RightSemi_3 |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |80.3%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller >> TGRpcClientLowTest::ChangeAcl [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleInnerJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7898, MsgBus: 4312 2025-03-28T09:09:06.203961Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784898464029688:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:06.204614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001882/r3tmp/tmpn4fTT8/pdisk_1.dat 2025-03-28T09:09:06.639432Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:06.646261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:06.646382Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 7898, node 1 2025-03-28T09:09:06.649111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:06.715249Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:06.715273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:06.715284Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:06.715428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4312 TClient is connected to server localhost:4312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:07.207744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:07.246653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:07.401841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:07.575654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:07.658989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:09.330981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784911348933209:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.331116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.621320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.651608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.692651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.722553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.752432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.785290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.831252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784911348933721:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.831335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.831611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784911348933726:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.834634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:09:09.842917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784911348933728:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:09:09.909794Z node 1 :TX_PROXY ERROR: Actor# [1:7486784911348933781:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:11.005476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.032513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.065579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.094225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.133283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.204776Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784898464029688:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:11.204846Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:11.208013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::ChangeAcl [GOOD] Test command err: 2025-03-28T09:08:44.027283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784800725295423:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:44.027405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001488/r3tmp/tmprJMVBJ/pdisk_1.dat 2025-03-28T09:08:44.579861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:44.579958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:44.590738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:44.631935Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19874, node 1 2025-03-28T09:08:44.815091Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:08:44.815113Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:08:44.828802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:44.828841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:44.828856Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:44.829056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28354 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:45.511549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:28354 TClient is connected to server localhost:28354 2025-03-28T09:08:46.137094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.258214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784817905165692:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:48.258338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:48.258601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784817905165704:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:48.263051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:08:48.296583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784817905165706:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:08:48.404145Z node 1 :TX_PROXY ERROR: Actor# [1:7486784817905165775:2713] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:28354 TClient::Ls request: Root 2025-03-28T09:08:49.027791Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784800725295423:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:49.027861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152925570 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\001\020\200\204\002\032\004user \003" EffectiveACL: "\n\016\010\001\020\200\204\002\032\004user \003" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".metadata" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743152928328 ParentPathId: 1 PathState: EPathStateCreate Owner: "met... (TRUNCATED) 2025-03-28T09:08:50.808935Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486784828240367054:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:50.809021Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001488/r3tmp/tmpDifche/pdisk_1.dat 2025-03-28T09:08:51.049777Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25439, node 4 2025-03-28T09:08:51.139574Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:51.140451Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:51.173187Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:51.231588Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:51.231612Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:51.231621Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:51.231779Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:51.481586Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18627 TClient is connected to server localhost:18627 2025-03-28T09:08:52.008015Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:08:54.162093Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486784845420237325:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:54.162093Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486784845420237333:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:54.162183Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:54.165565Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:08:54.195466Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486784845420237339:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:08:54.276220Z node 4 :TX_PROXY ERROR: Actor# [4:7486784845420237421:2693] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:18627 TClient::Ls request: Root 2025-03-28T09:08:54.607513Z node 4 :TX_PROXY ERROR: Access denied for user with access DescribeSchema to path Root TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 12 ErrorReason: "Access denied" 2025-03-28T09:08:56.388933Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486784855586546903:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:56.389551Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001488/r3tmp/tmplc2NQP/pdisk_1.dat 2025-03-28T09:08:56.605397Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13282, node 7 2025-03-28T09:08:56.698609Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:56.698734Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:56.712686Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:56.797525Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:56.797546Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:56.797551Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:56.797662Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13503 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:57.088425Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:57.172444Z node 7 :TICKET_PARSER ERROR: Ticket some****oken (BB86510A): Could not find correct token validator 2025-03-28T09:08:57.172601Z node 7 :GRPC_SERVER ERROR: Received TEvRefreshTokenResponse, Authenticated = 0 2025-03-28T09:09:01.571412Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486784874486991410:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:01.578928Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001488/r3tmp/tmpLs1Dfo/pdisk_1.dat 2025-03-28T09:09:01.845510Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:01.908041Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:01.908146Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:01.913647Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5958, node 10 2025-03-28T09:09:02.127637Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:02.127674Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:02.127685Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:02.127891Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3985 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:02.509029Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:07.621350Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486784902086659327:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:07.626162Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001488/r3tmp/tmpwEehUj/pdisk_1.dat 2025-03-28T09:09:07.902590Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:07.933903Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:07.934003Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:07.938533Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22310, node 13 2025-03-28T09:09:08.071491Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:08.071525Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:08.071534Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:08.071732Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:08.425038Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:29726 2025-03-28T09:09:08.808887Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> KqpJoin::JoinLeftPureExclusion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinWithNull-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 11681, MsgBus: 26625 2025-03-28T09:09:06.055639Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784898744166345:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:06.062180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001885/r3tmp/tmprxRQJI/pdisk_1.dat 2025-03-28T09:09:06.423907Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:06.425067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:06.425126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:06.431681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11681, node 1 2025-03-28T09:09:06.546802Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:06.546823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:06.546838Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:06.546963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26625 TClient is connected to server localhost:26625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:07.062626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:07.085801Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:07.094007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:07.283117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:09:07.465330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:07.536117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:09.297057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784911629069864:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.297184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.590409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.621183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.656578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.687600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.716967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.788181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.830884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784911629070378:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.830964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.831124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784911629070383:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.834644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:09:09.843950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784911629070385:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:09:09.923745Z node 1 :TX_PROXY ERROR: Actor# [1:7486784911629070438:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:10.959162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.989698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.021810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.050192Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784898744166345:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:11.050266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureExclusion [GOOD] Test command err: Trying to start YDB, gRPC: 14713, MsgBus: 1502 2025-03-28T09:09:08.469975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784905918568926:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:08.470017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001879/r3tmp/tmpMkD7Ey/pdisk_1.dat 2025-03-28T09:09:08.797088Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14713, node 1 2025-03-28T09:09:08.872449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:08.872555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:08.874169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:08.882317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:08.882341Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:08.882356Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:08.882476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1502 TClient is connected to server localhost:1502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:09.360033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:09.390403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:09.537759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:09.684658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:09.757976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:11.476628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784918803472583:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:11.476768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:11.800932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.841427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.910944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.952247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:09:11.986706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:09:12.026990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:09:12.073185Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784923098440393:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:12.073250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:12.073418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784923098440398:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:12.076817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:09:12.102699Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784923098440401:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:09:12.202383Z node 1 :TX_PROXY ERROR: Actor# [1:7486784923098440457:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:13.470175Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784905918568926:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:13.470256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> YdbTableBulkUpsertOlap::UpsertCSV [GOOD] >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |80.3%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |80.3%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |80.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |80.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |80.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> BasicUsage::ReadMirrored [GOOD] |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |80.4%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink >> KqpFlipJoin::RightSemi_3 [GOOD] >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] >> TSubDomainTest::LsLs >> TModifyUserTest::ModifyUser >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> TSubDomainTest::StartAndStopTenanNode >> TSubDomainTest::FailIfAffectedSetNotInterior >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> TSubDomainTest::UserAttributes >> TSubDomainTest::CreateTablet >> TSubDomainTest::Boot >> TSubDomainTest::CreateDummyTabletsInDifferentDomains ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2025-03-28T09:08:34.479284Z :PropagateSessionClosed INFO: Random seed for debugging is 1743152914479240 2025-03-28T09:08:34.879090Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784759654586193:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:34.879122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:35.044095Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784763090403376:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:35.044592Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:35.324367Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e57/r3tmp/tmprx461E/pdisk_1.dat 2025-03-28T09:08:35.364924Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:35.732362Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:35.752390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:35.752511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:35.756010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:35.756098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:35.797295Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:35.798287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:35.799797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4002, node 1 2025-03-28T09:08:36.026038Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/000e57/r3tmp/yandexvDKDjn.tmp 2025-03-28T09:08:36.026073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/000e57/r3tmp/yandexvDKDjn.tmp 2025-03-28T09:08:36.026225Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/000e57/r3tmp/yandexvDKDjn.tmp 2025-03-28T09:08:36.026360Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:36.084101Z INFO: TTestServer started on Port 26939 GrpcPort 4002 TClient is connected to server localhost:26939 PQClient connected to localhost:4002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:36.365000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:36.449037Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976720658, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:39.304364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784781129423679:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:39.309891Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:39.310313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784781129423707:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:39.314413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480 2025-03-28T09:08:39.411554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784781129423709:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2025-03-28T09:08:39.786941Z node 1 :TX_PROXY ERROR: Actor# [1:7486784781129423800:2678] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:39.860765Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486784780270272924:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:39.861452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:39.862182Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486784781129423827:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:39.862925Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDE1ZDA3ZmItYWZjNjA4NDgtMzM1YmE1NzgtOGU0MjY5YzA=, ActorId: [2:7486784780270272884:2309], ActorState: ExecuteState, TraceId: 01jqe05czw7zgm7pakb8ekrt8s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:39.864031Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTlkZThmOWEtM2Q4MzgzNDMtZjA4NDkwYWQtZTQ3YmFhMWU=, ActorId: [1:7486784781129423677:2337], ActorState: ExecuteState, TraceId: 01jqe05cqx2ygs65ad4m9sxzvy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:39.875043Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:39.875040Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:39.879278Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784759654586193:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:39.879357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:40.047853Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784763090403376:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:40.047903Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:40.104970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-03-28T09:08:40.254535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:4002", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-28T09:08:40.592473Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720666. Ctx: { TraceId: 01jqe05dt0csxxdppb3rcjwts0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTZiMWE4NGQtYWQ0YTMxNy1iYjcxOTgzNi1jNDg1OTgx, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486784785424391507:2978] === CheckClustersList. Ok 2025-03-28T09:08:46.781244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:4002 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:08:46.883158Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:4002 MetaReq ... , "_ip": "ipv6:[::1]:40044", "logtype": "unknown" } MessageMeta: { } } Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 } } >>> get 4 messages in this event 2025-03-28T09:09:18.004225Z :DEBUG: [/Root] [/Root] [ff16648a-d79a871c-8e69e1a-2cc31c19] [] The application data is transferred to the client. Number of messages 4, size 1180 bytes 2025-03-28T09:09:18.004267Z :DEBUG: [/Root] [/Root] [ff16648a-d79a871c-8e69e1a-2cc31c19] [] Returning serverBytesSize = 0 to budget 2025-03-28T09:09:18.001286Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic-mirrored-from-dc3' requestId: 2025-03-28T09:09:18.001323Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] got client message batch for topic 'rt3.dc1--test-topic-mirrored-from-dc3' partition 0 2025-03-28T09:09:18.001436Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--test-topic-mirrored-from-dc3' partition 0 user user offset 1 count 4 size 1920 endOffset 5 max time lag 0ms effective offset 1 2025-03-28T09:09:18.001462Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 1, current partition end offset: 5 2025-03-28T09:09:18.001589Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-28T09:09:18.001608Z node 3 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:09:18.001719Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic-mirrored-from-dc3' partition: 0 messageNo: 0 requestId: cookie: 1 2025-03-28T09:09:18.002095Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 TopicId: Topic rt3.dc1--test-topic-mirrored-from-dc3 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 5 Result { Offset: 1 Data: "... 202 bytes ..." SourceId: "\000src_id" SeqNo: 2 WriteTimestampMS: 1743152957983 CreateTimestampMS: 1743152957951 UncompressedSize: 130 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 312 bytes ..." SourceId: "\000src_id" SeqNo: 3 WriteTimestampMS: 1743152957985 CreateTimestampMS: 1743152957951 UncompressedSize: 240 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 422 bytes ..." SourceId: "\000src_id" SeqNo: 4 WriteTimestampMS: 1743152957985 CreateTimestampMS: 1743152957951 UncompressedSize: 350 PartitionKey: "" ExplicitHash: "" } Result { Offset: 4 Data: "... 532 bytes ..." SourceId: "\000src_id" SeqNo: 5 WriteTimestampMS: 1743152957985 CreateTimestampMS: 1743152957951 UncompressedSize: 460 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 18446744073709551580 RealReadOffset: 4 WaitQuotaTimeMs: 0 EndOffset: 5 StartOffset: 0 } Cookie: 1 } 2025-03-28T09:09:18.002250Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 TopicId: Topic rt3.dc1--test-topic-mirrored-from-dc3 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset5 2025-03-28T09:09:18.002289Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 after read state TopicId: Topic rt3.dc1--test-topic-mirrored-from-dc3 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 5 ReadOffset 5 ReadGuid 12f9d1c9-129b7972-cf67f203-8674c8a3 has messages 1 2025-03-28T09:09:18.002377Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 read done: guid# 12f9d1c9-129b7972-cf67f203-8674c8a3, partition# TopicId: Topic rt3.dc1--test-topic-mirrored-from-dc3 in dc dc1 in database: Root, partition 0(assignId:1), size# 1568 2025-03-28T09:09:18.002403Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 response to read: guid# 12f9d1c9-129b7972-cf67f203-8674c8a3 2025-03-28T09:09:18.002595Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 Process answer. Aval parts: 0 2025-03-28T09:09:18.005008Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1568 } } 2025-03-28T09:09:18.005126Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 got read request: guid# 5ffa8d03-a67aabdf-620ff79d-5d1d3dc 2025-03-28T09:09:18.039698Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:09:18.039721Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:18.051800Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|2a88475d-74b255be-e76cd750-c7d3cc13_0] Write session will now close 2025-03-28T09:09:18.051865Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|2a88475d-74b255be-e76cd750-c7d3cc13_0] Write session: aborting 2025-03-28T09:09:18.052394Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|2a88475d-74b255be-e76cd750-c7d3cc13_0] Write session: gracefully shut down, all writes complete >>> Writes to test-topic-mirrored-from-dc3 successful 2025-03-28T09:09:18.052446Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|2a88475d-74b255be-e76cd750-c7d3cc13_0] Write session: destroy 2025-03-28T09:09:18.052599Z :INFO: [/Root] [/Root] [ff16648a-d79a871c-8e69e1a-2cc31c19] Closing read session. Close timeout: 18446744073709.551615s 2025-03-28T09:09:18.052664Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc3:0:1:4:0 2025-03-28T09:09:18.052710Z :INFO: [/Root] [/Root] [ff16648a-d79a871c-8e69e1a-2cc31c19] Counters: { Errors: 0 CurrentSessionLifetimeMs: 466 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:18.053226Z :INFO: [/Root] [/Root] [ff16648a-d79a871c-8e69e1a-2cc31c19] Closing read session. Close timeout: 0.000000s 2025-03-28T09:09:18.053276Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc3:0:1:4:0 2025-03-28T09:09:18.053331Z :INFO: [/Root] [/Root] [ff16648a-d79a871c-8e69e1a-2cc31c19] Counters: { Errors: 0 CurrentSessionLifetimeMs: 466 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:18.053374Z :INFO: [/Root] [/Root] [ff16648a-d79a871c-8e69e1a-2cc31c19] Closing read session. Close timeout: 0.000000s 2025-03-28T09:09:18.053413Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic-mirrored-from-dc2:0:3:4:0 -:test-topic:0:2:4:0 -:test-topic-mirrored-from-dc3:0:1:4:0 2025-03-28T09:09:18.053448Z :INFO: [/Root] [/Root] [ff16648a-d79a871c-8e69e1a-2cc31c19] Counters: { Errors: 0 CurrentSessionLifetimeMs: 467 BytesRead: 3600 MessagesRead: 15 BytesReadCompressed: 3600 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:18.053537Z :NOTICE: [/Root] [/Root] [ff16648a-d79a871c-8e69e1a-2cc31c19] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:09:18.057755Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|2a88475d-74b255be-e76cd750-c7d3cc13_0 grpc read done: success: 0 data: 2025-03-28T09:09:18.057789Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|2a88475d-74b255be-e76cd750-c7d3cc13_0 grpc read failed 2025-03-28T09:09:18.057819Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|2a88475d-74b255be-e76cd750-c7d3cc13_0 grpc closed 2025-03-28T09:09:18.057847Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|2a88475d-74b255be-e76cd750-c7d3cc13_0 is DEAD 2025-03-28T09:09:18.058299Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:09:18.058456Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7486784945349078019:2647] destroyed 2025-03-28T09:09:18.058520Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 grpc read done: success# 0, data# { } 2025-03-28T09:09:18.058545Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:09:18.059218Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 grpc read failed 2025-03-28T09:09:18.059283Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 grpc closed 2025-03-28T09:09:18.059350Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_237499513524025520_v1 is DEAD 2025-03-28T09:09:18.060530Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Destroy direct read session shared/user_3_1_237499513524025520_v1 2025-03-28T09:09:18.060569Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [3:7486784945349077870:2626] destroyed 2025-03-28T09:09:18.060577Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_237499513524025520_v1 2025-03-28T09:09:18.060598Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7486784945349077865:2625] destroyed 2025-03-28T09:09:18.060608Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_237499513524025520_v1 2025-03-28T09:09:18.060619Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_3_1_237499513524025520_v1 2025-03-28T09:09:18.060623Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_237499513524025520_v1 2025-03-28T09:09:18.060636Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_237499513524025520_v1 2025-03-28T09:09:18.060637Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [3:7486784945349077861:2624] destroyed 2025-03-28T09:09:18.060902Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7486784945349077857:2619] disconnected; active server actors: 1 2025-03-28T09:09:18.060950Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037897][rt3.dc1--test-topic-mirrored-from-dc3] pipe [3:7486784945349077857:2619] client user disconnected session shared/user_3_1_237499513524025520_v1 2025-03-28T09:09:18.061059Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7486784945349077859:2619] disconnected; active server actors: 1 2025-03-28T09:09:18.061130Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037895][rt3.dc1--test-topic-mirrored-from-dc2] pipe [3:7486784945349077859:2619] client user disconnected session shared/user_3_1_237499513524025520_v1 2025-03-28T09:09:18.061203Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784945349077858:2619] disconnected; active server actors: 1 2025-03-28T09:09:18.061222Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784945349077858:2619] client user disconnected session shared/user_3_1_237499513524025520_v1 |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |80.4%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 26493, MsgBus: 64358 2025-03-28T09:09:05.206524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784894178557331:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:05.207836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001888/r3tmp/tmpome1UF/pdisk_1.dat 2025-03-28T09:09:05.776700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:05.776781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:05.788080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:05.807083Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26493, node 1 2025-03-28T09:09:05.917480Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:05.917502Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:05.917513Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:05.917657Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64358 TClient is connected to server localhost:64358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:06.673147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:06.696376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:06.707526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:06.870032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:07.062102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:07.157512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:08.963037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784907063460858:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:08.963153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.214135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.282835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.312329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.340132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.369031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.403702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.466412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784911358428667:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.466511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.466736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784911358428672:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.471344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:09:09.484577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784911358428674:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:09:09.585830Z node 1 :TX_PROXY ERROR: Actor# [1:7486784911358428730:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:10.202442Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784894178557331:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:10.202584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:10.595754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.663863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.696420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.728165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27610, MsgBus: 18251 2025-03-28T09:09:12.695981Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784923430075267:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:12.696017Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001888/r3tmp/tmpB0m7Wn/pdisk_1.dat 2025-03-28T09:09:12.804098Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27610, node 2 2025-03-28T09:09:12.854122Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:12.854221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:12.878952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:12.918248Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:12.918270Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:12.918277Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:12.918392Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18251 TClient is connected to server localhost:18251 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:13.371943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:13.384286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:13.425093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:13.562505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:13.616989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:15.882491Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784936314978926:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:15.882580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:15.932198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:09:15.979250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:16.010170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:09:16.052490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:09:16.088248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:09:16.124261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:09:16.171975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784940609946731:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:16.172048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:16.172112Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784940609946737:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:16.175206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:09:16.188090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784940609946739:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:09:16.274604Z node 2 :TX_PROXY ERROR: Actor# [2:7486784940609946792:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:17.351570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:09:17.384006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:09:17.413613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:09:17.488703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:09:17.696425Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784923430075267:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:17.696485Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::ExplicitPartitionsWrongKeyType [GOOD] Test command err: 2025-03-28T09:08:42.725941Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784795116894000:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:42.726001Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00149f/r3tmp/tmpHTXBIi/pdisk_1.dat 2025-03-28T09:08:43.303160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:43.303268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:43.309700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:43.339588Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13201, node 1 2025-03-28T09:08:43.415382Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:43.415401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:43.415414Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:43.415544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:43.806657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:28837 2025-03-28T09:08:44.137270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:44.178825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:44.706824Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486784802901093829:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:44.763972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:44.764068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:44.779501Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:08:44.787681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:44.940350Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TClient is connected to server localhost:28837 2025-03-28T09:08:45.478648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:28837 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743152926020 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:08:46.803533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:28837 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743152927230 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:08:47.687028Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-28T09:08:47.689858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:08:47.726274Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784795116894000:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:47.726340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:50.851515Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486784827449532673:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:50.851564Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00149f/r3tmp/tmpQOWUCA/pdisk_1.dat 2025-03-28T09:08:51.050491Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:51.096686Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:51.096785Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:51.100285Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25734, node 4 2025-03-28T09:08:51.214217Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:51.214243Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:51.214250Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:51.214568Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:51.498383Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:20008 2025-03-28T09:08:51.816621Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:51.841182Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:52.350528Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486784836609191452:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:52.350587Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:52.406898Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:52.406977Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:52.412857Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::T ... WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-03-28T09:08:59.954946Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:09:00.548551Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:01.549701Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:02.555098Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:05.125231Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486784892811823409:2178];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00149f/r3tmp/tmpY1kry3/pdisk_1.dat 2025-03-28T09:09:05.250508Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:05.393011Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:05.446565Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:05.446715Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:05.449167Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11858, node 10 2025-03-28T09:09:05.663482Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:05.663510Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:05.663519Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:05.663703Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:06.127304Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8672 2025-03-28T09:09:06.602508Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:06.645099Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:07.152728Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7486784899761274459:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:07.152814Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:07.212626Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:07.212730Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:07.230530Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-03-28T09:09:07.232932Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8672 2025-03-28T09:09:07.698137Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-03-28T09:09:07.698643Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:09:08.153877Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:09.154815Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:10.159179Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:12.998679Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486784923181687865:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:13.002093Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00149f/r3tmp/tmp2uGLLv/pdisk_1.dat 2025-03-28T09:09:13.223726Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:13.282771Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:13.282917Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:13.287683Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20974, node 13 2025-03-28T09:09:13.378995Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:13.379018Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:13.379026Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:13.379188Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:13.766171Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:18754 2025-03-28T09:09:14.181824Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:14.207604Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:14.712322Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7486784931694036689:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:14.712424Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:14.716532Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:14.716671Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:14.719843Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-03-28T09:09:14.730984Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18754 2025-03-28T09:09:15.104313Z node 13 :TX_PROXY ERROR: Actor# [13:7486784936066591107:2895] txid# 281474976710660, issues: { message: "Error at split boundary 0: Value of type Uint64 expected in tuple at position 1" severity: 1 } 2025-03-28T09:09:15.112240Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-03-28T09:09:15.112781Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:09:15.716199Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:16.717252Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:17.717954Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [FAIL] >> KqpJoinOrder::TPCH9_100 >> KqpJoinOrder::DatetimeConstantFold-ColumnStore |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |80.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service >> KqpJoin::LeftJoinWithNull+StreamLookupJoin >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |80.4%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration >> TModifyUserTest::ModifyUser [GOOD] >> TModifyUserTest::ModifyLdapUser >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> TSubDomainTest::Boot [GOOD] >> TSubDomainTest::CheckAccessCopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [GOOD] Test command err: Starting YDB, grpc: 11983, msgbus: 5374 2025-03-28T09:05:15.556022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783903997233480:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:15.568589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012f4/r3tmp/tmpdpm1mc/pdisk_1.dat 2025-03-28T09:05:15.995982Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:16.006120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:16.006231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11983, node 1 2025-03-28T09:05:16.032927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:16.065350Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T09:05:16.065372Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T09:05:16.092343Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:16.092376Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:16.092399Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:16.092624Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5374 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:05:16.290860Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903997233736:2134] Handle TEvNavigate describe path dc-1 2025-03-28T09:05:16.290909Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201513:2446] HANDLE EvNavigateScheme dc-1 2025-03-28T09:05:16.291777Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201513:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:16.332755Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201513:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:05:16.343657Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201513:2446] Handle TEvDescribeSchemeResult Forward to# [1:7486783908292201512:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:05:16.375317Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903997233736:2134] Handle TEvProposeTransaction 2025-03-28T09:05:16.375350Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903997233736:2134] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:05:16.375502Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903997233736:2134] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486783908292201528:2454] 2025-03-28T09:05:16.455470Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201528:2454] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:16.455548Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201528:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T09:05:16.455573Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201528:2454] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:16.455617Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201528:2454] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:16.455913Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201528:2454] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:16.456029Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201528:2454] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:05:16.456070Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201528:2454] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:05:16.456220Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201528:2454] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:05:16.457200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:16.462502Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201528:2454] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:05:16.462588Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201528:2454] txid# 281474976710657 SEND to# [1:7486783908292201527:2453] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:05:16.485124Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903997233736:2134] Handle TEvProposeTransaction 2025-03-28T09:05:16.485176Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903997233736:2134] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:05:16.485216Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903997233736:2134] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486783908292201573:2495] 2025-03-28T09:05:16.487861Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201573:2495] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:16.487940Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201573:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T09:05:16.487957Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201573:2495] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:16.488023Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201573:2495] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:16.488325Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201573:2495] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:16.488427Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201573:2495] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:05:16.488515Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201573:2495] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:05:16.488747Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201573:2495] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:05:16.489326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:16.491999Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201573:2495] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:05:16.492043Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201573:2495] txid# 281474976710658 SEND to# [1:7486783908292201572:2494] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:05:16.515383Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903997233736:2134] Handle TEvProposeTransaction 2025-03-28T09:05:16.515425Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903997233736:2134] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:05:16.515471Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903997233736:2134] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486783908292201591:2505] 2025-03-28T09:05:16.517852Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783908292201591:2505] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000\n!\010\000\022\035\010\001\020\200\010\032\024ordinaryuser@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\ ... e 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968243:2589] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:40.962641Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968243:2589] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:40.962808Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968243:2589] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:40.962861Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968243:2589] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-03-28T09:08:40.962995Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968243:2589] txid# 281474976710660 HANDLE EvClientConnected 2025-03-28T09:08:40.966315Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968243:2589] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-28T09:08:40.966463Z node 59 :TX_PROXY ERROR: Actor# [59:7486784785054968243:2589] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:40.966497Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968243:2589] txid# 281474976710660 SEND to# [59:7486784785054968164:2341] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-03-28T09:08:40.987421Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] Handle TEvProposeTransaction 2025-03-28T09:08:40.987459Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] TxId# 281474976710661 ProcessProposeTransaction 2025-03-28T09:08:40.987509Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [59:7486784785054968267:2601] 2025-03-28T09:08:40.990186Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968267:2601] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:34486" 2025-03-28T09:08:40.990275Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968267:2601] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:40.990300Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968267:2601] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:40.990347Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968267:2601] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:40.990655Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968267:2601] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:40.990786Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968267:2601] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:40.990840Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968267:2601] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-03-28T09:08:40.991001Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968267:2601] txid# 281474976710661 HANDLE EvClientConnected 2025-03-28T09:08:41.003796Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968267:2601] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-03-28T09:08:41.003855Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784785054968267:2601] txid# 281474976710661 SEND to# [59:7486784785054968266:2334] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-03-28T09:08:41.235135Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] Handle TEvProposeTransaction 2025-03-28T09:08:41.235169Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] TxId# 281474976710662 ProcessProposeTransaction 2025-03-28T09:08:41.235218Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7486784789349935588:2617] 2025-03-28T09:08:41.238043Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935588:2617] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:34494" 2025-03-28T09:08:41.238120Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935588:2617] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:41.238146Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935588:2617] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:41.238209Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935588:2617] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:41.238546Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935588:2617] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:41.238665Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935588:2617] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:41.238866Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935588:2617] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-28T09:08:41.239027Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935588:2617] txid# 281474976710662 HANDLE EvClientConnected 2025-03-28T09:08:41.239590Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:41.243779Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935588:2617] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-28T09:08:41.243843Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935588:2617] txid# 281474976710662 SEND to# [59:7486784789349935587:2348] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-28T09:08:41.311740Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] Handle TEvProposeTransaction 2025-03-28T09:08:41.311774Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] TxId# 281474976710663 ProcessProposeTransaction 2025-03-28T09:08:41.311818Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7486784789349935628:2639] 2025-03-28T09:08:41.314292Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935628:2639] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5NjEyMSwiaWF0IjoxNzQzMTUyOTIxLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.LncWARKQ3mdjyC-_ZXrK_JnL1PkG98dZ509iEmtfK_iB-z_BxXRuPQcIr594R07fW4gBER7OyIEpezH_S5CeNLUl46TH4c9NqGNxMRywJf4Xz8v-ikgq6hEqkCbAGv6aEZufVSfWOSnizrogXcNDW0KoCCWJAdh5bySmtAmY3988HsvvD9s6uMcF2E8xMog8qW3dppAVX7f3LYjRIrwdWTDLgFPNs0X2NH0LtQ125I3jvyviexeKgpePbjRkAG153pxZG_ndIBKXbH1FXiro8PbqwWzrZXNfPtO18ojX3SYfSupsgUHz7RjqXToAhKwdkqCcQz19DH05Uh25ipK4Fw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5NjEyMSwiaWF0IjoxNzQzMTUyOTIxLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:34528" 2025-03-28T09:08:41.314371Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935628:2639] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:41.314392Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935628:2639] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-28T09:08:41.314531Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935628:2639] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-28T09:08:41.314567Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935628:2639] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-28T09:08:41.314613Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935628:2639] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:41.314871Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935628:2639] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:41.314904Z node 59 :TX_PROXY ERROR: Actor# [59:7486784789349935628:2639] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-03-28T09:08:41.314999Z node 59 :TX_PROXY ERROR: Actor# [59:7486784789349935628:2639] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-28T09:08:41.315039Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784789349935628:2639] txid# 281474976710663 SEND to# [59:7486784789349935627:2354] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T09:08:41.315355Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=YjZjZDIzNzUtOTcxMDNmZjMtMzIzZDRmZmYtODI1Zjk2MDk=, ActorId: [59:7486784789349935613:2354], ActorState: ExecuteState, TraceId: 01jqe05epfamebjjr1rbhhkw0c, Create QueryResponse for error on request, msg: 2025-03-28T09:08:41.315673Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] Handle TEvExecuteKqpTransaction 2025-03-28T09:08:41.315696Z node 59 :TX_PROXY DEBUG: actor# [59:7486784763580130817:2136] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 12297, msgbus: 11209 2025-03-28T09:05:30.061697Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783968704433242:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:30.061744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012ef/r3tmp/tmp5KYzHc/pdisk_1.dat 2025-03-28T09:05:30.815791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:30.815906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:30.822421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:30.848326Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12297, node 1 2025-03-28T09:05:30.909834Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T09:05:30.909860Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T09:05:31.018993Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:31.019014Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:31.019020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:31.019125Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:05:31.106783Z node 1 :TX_PROXY DEBUG: actor# [1:7486783968704433330:2113] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:05:31.127144Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TClient is connected to server localhost:11209 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:05:31.293920Z node 1 :TX_PROXY DEBUG: actor# [1:7486783968704433330:2113] Handle TEvNavigate describe path dc-1 2025-03-28T09:05:31.293964Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401169:2449] HANDLE EvNavigateScheme dc-1 2025-03-28T09:05:31.294235Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401169:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:31.339953Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401169:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:05:31.353547Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401169:2449] Handle TEvDescribeSchemeResult Forward to# [1:7486783972999401168:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:05:31.399323Z node 1 :TX_PROXY DEBUG: actor# [1:7486783968704433330:2113] Handle TEvProposeTransaction 2025-03-28T09:05:31.399349Z node 1 :TX_PROXY DEBUG: actor# [1:7486783968704433330:2113] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:05:31.399446Z node 1 :TX_PROXY DEBUG: actor# [1:7486783968704433330:2113] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7486783972999401177:2456] 2025-03-28T09:05:31.512825Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401177:2456] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:31.512913Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401177:2456] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:05:31.512937Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401177:2456] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:31.513004Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401177:2456] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:31.513483Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401177:2456] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:31.513684Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401177:2456] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:05:31.513759Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401177:2456] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:05:31.513914Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401177:2456] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:05:31.514715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:31.521454Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401177:2456] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:05:31.521513Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401177:2456] txid# 281474976715657 SEND to# [1:7486783972999401176:2455] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-03-28T09:05:31.556394Z node 1 :TX_PROXY DEBUG: actor# [1:7486783968704433330:2113] Handle TEvProposeTransaction 2025-03-28T09:05:31.556417Z node 1 :TX_PROXY DEBUG: actor# [1:7486783968704433330:2113] TxId# 281474976715658 ProcessProposeTransaction 2025-03-28T09:05:31.556447Z node 1 :TX_PROXY DEBUG: actor# [1:7486783968704433330:2113] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7486783972999401221:2493] 2025-03-28T09:05:31.558668Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401221:2493] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:31.558781Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401221:2493] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:05:31.558799Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401221:2493] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:31.558872Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401221:2493] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:31.559129Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401221:2493] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:31.559210Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401221:2493] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:05:31.559256Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401221:2493] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-03-28T09:05:31.559389Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401221:2493] txid# 281474976715658 HANDLE EvClientConnected 2025-03-28T09:05:31.559838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:31.568061Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401221:2493] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-03-28T09:05:31.568116Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783972999401221:2493] txid# 281474976715658 SEND to# [1:7486783972999401220:2492] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-03-28T09:05:33.913269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783981589335899:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:33.913391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:33.913823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [Wo ... wd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54716" 2025-03-28T09:08:53.476537Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279597:2594] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:53.476553Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279597:2594] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:53.476591Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279597:2594] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:53.476976Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279597:2594] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:53.477085Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279597:2594] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:53.477167Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279597:2594] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-03-28T09:08:53.477322Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279597:2594] txid# 281474976710661 HANDLE EvClientConnected 2025-03-28T09:08:53.485869Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279597:2594] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-03-28T09:08:53.485917Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279597:2594] txid# 281474976710661 SEND to# [59:7486784842110279596:2335] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-03-28T09:08:53.766187Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] Handle TEvProposeTransaction 2025-03-28T09:08:53.766220Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] TxId# 281474976710662 ProcessProposeTransaction 2025-03-28T09:08:53.766268Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7486784842110279617:2608] 2025-03-28T09:08:53.769455Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279617:2608] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54730" 2025-03-28T09:08:53.769540Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279617:2608] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:53.769563Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279617:2608] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:53.769628Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279617:2608] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:53.769964Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279617:2608] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:53.770066Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279617:2608] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:53.770124Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279617:2608] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-28T09:08:53.770282Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279617:2608] txid# 281474976710662 HANDLE EvClientConnected 2025-03-28T09:08:53.770892Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:53.775267Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279617:2608] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-28T09:08:53.775324Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279617:2608] txid# 281474976710662 SEND to# [59:7486784842110279616:2348] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-28T09:08:53.858365Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] Handle TEvProposeTransaction 2025-03-28T09:08:53.858398Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] TxId# 281474976710663 ProcessProposeTransaction 2025-03-28T09:08:53.858442Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7486784842110279650:2627] 2025-03-28T09:08:53.861145Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279650:2627] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54750" 2025-03-28T09:08:53.861226Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279650:2627] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:53.861248Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279650:2627] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:53.861306Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279650:2627] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:53.861654Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279650:2627] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:53.861757Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279650:2627] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:53.861815Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279650:2627] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-28T09:08:53.861969Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279650:2627] txid# 281474976710663 HANDLE EvClientConnected 2025-03-28T09:08:53.870168Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279650:2627] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-28T09:08:53.870224Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279650:2627] txid# 281474976710663 SEND to# [59:7486784842110279649:2350] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-28T09:08:53.939148Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] Handle TEvProposeTransaction 2025-03-28T09:08:53.939184Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] TxId# 281474976710664 ProcessProposeTransaction 2025-03-28T09:08:53.939243Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7486784842110279680:2640] 2025-03-28T09:08:53.942092Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279680:2640] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5NjEzMywiaWF0IjoxNzQzMTUyOTMzLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.CBy_cW3xq3LYWuplr_ZwVVJszpWXIZV6QYYag4Yxpc0CSKxOxGzVaeYDl9oVGp1e2pmP2OOl8ONKNIcIRoR8k4HkK3mSZJoC57b2lz27FHAQwykdAqZe_A6Hn61vtP8DNNOJGTtd0MccaF3FUG450aRYSAsPDWV8IPyc9rpkFgDUT91fDOOd_cNszBbkxcdv8I7V3pg7eDesfiQ4YgjW2FoTOlqkizPTs7WuqrBhJYQ9lkUCCO-UwikNAMgQf3psTVeQ22XcWSd717Lw1av7Bp5i2nS6eDD951T_DnfjPbfLmhbC1Vo3xC6XYmIaTalC4TpJBTT3Dyhp26oJj9zovg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5NjEzMywiaWF0IjoxNzQzMTUyOTMzLCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54766" 2025-03-28T09:08:53.942168Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279680:2640] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:53.942192Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279680:2640] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-28T09:08:53.942377Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279680:2640] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-28T09:08:53.942409Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279680:2640] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-28T09:08:53.942473Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279680:2640] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:53.942885Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279680:2640] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:53.942922Z node 59 :TX_PROXY ERROR: Actor# [59:7486784842110279680:2640] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2025-03-28T09:08:53.943055Z node 59 :TX_PROXY ERROR: Actor# [59:7486784842110279680:2640] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-28T09:08:53.943086Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784842110279680:2640] txid# 281474976710664 SEND to# [59:7486784842110279679:2363] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T09:08:53.943426Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=NTNjMDk5ZGItZTA2Zjg4NTEtNTQzOGQ2MDYtZjk3M2VjMzQ=, ActorId: [59:7486784842110279670:2363], ActorState: ExecuteState, TraceId: 01jqe05v12816ezpc69cfnhqch, Create QueryResponse for error on request, msg: 2025-03-28T09:08:53.943704Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] Handle TEvExecuteKqpTransaction 2025-03-28T09:08:53.943724Z node 59 :TX_PROXY DEBUG: actor# [59:7486784816340474802:2108] TxId# 281474976710665 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [FAIL] Test command err: Starting YDB, grpc: 64972, msgbus: 25126 2025-03-28T09:05:15.697762Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783903319206679:2151];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:15.697864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012fe/r3tmp/tmpt989AQ/pdisk_1.dat 2025-03-28T09:05:16.123457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:16.123590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 64972, node 1 2025-03-28T09:05:16.161371Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T09:05:16.173466Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T09:05:16.183566Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:16.183930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:16.209108Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:16.209157Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:16.209166Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:16.209330Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25126 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:05:16.415273Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903319206848:2131] Handle TEvNavigate describe path dc-1 2025-03-28T09:05:16.415365Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174630:2445] HANDLE EvNavigateScheme dc-1 2025-03-28T09:05:16.416429Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174630:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:16.474565Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174630:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:05:16.488748Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174630:2445] Handle TEvDescribeSchemeResult Forward to# [1:7486783907614174629:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:05:16.503811Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903319206848:2131] Handle TEvProposeTransaction 2025-03-28T09:05:16.503840Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903319206848:2131] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:05:16.503958Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903319206848:2131] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486783907614174645:2453] 2025-03-28T09:05:16.593048Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174645:2453] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:16.593124Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174645:2453] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:16.593213Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174645:2453] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:16.593256Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174645:2453] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:16.593548Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174645:2453] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:16.593729Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174645:2453] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:05:16.593806Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174645:2453] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:05:16.593968Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174645:2453] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:05:16.594838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:16.597040Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174645:2453] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:05:16.597158Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174645:2453] txid# 281474976710657 SEND to# [1:7486783907614174644:2452] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:05:16.610282Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903319206848:2131] Handle TEvProposeTransaction 2025-03-28T09:05:16.610311Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903319206848:2131] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:05:16.610372Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903319206848:2131] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486783907614174687:2491] 2025-03-28T09:05:16.612860Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174687:2491] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:16.612954Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174687:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:16.613004Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174687:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:16.613062Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174687:2491] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:16.613407Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174687:2491] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:16.613511Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174687:2491] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:05:16.613548Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174687:2491] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:05:16.613704Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174687:2491] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:05:16.614074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:16.617219Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174687:2491] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:05:16.617300Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174687:2491] txid# 281474976710658 SEND to# [1:7486783907614174686:2490] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:05:16.641085Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903319206848:2131] Handle TEvProposeTransaction 2025-03-28T09:05:16.641116Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903319206848:2131] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:05:16.641176Z node 1 :TX_PROXY DEBUG: actor# [1:7486783903319206848:2131] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486783907614174705:2501] 2025-03-28T09:05:16.643815Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783907614174705:2501] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known ... t record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:08:29.546415Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886461:2450] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:08:29.546439Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886461:2450] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:29.546502Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886461:2450] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:29.547213Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886461:2450] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:29.547356Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886461:2450] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:08:29.547423Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886461:2450] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:08:29.547574Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886461:2450] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:08:29.548269Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:08:29.554704Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886461:2450] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:08:29.554768Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886461:2450] txid# 281474976715657 SEND to# [59:7486784736888886460:2449] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-03-28T09:08:29.599284Z node 59 :TX_PROXY DEBUG: actor# [59:7486784732593918618:2112] Handle TEvProposeTransaction 2025-03-28T09:08:29.599340Z node 59 :TX_PROXY DEBUG: actor# [59:7486784732593918618:2112] TxId# 281474976715658 ProcessProposeTransaction 2025-03-28T09:08:29.599384Z node 59 :TX_PROXY DEBUG: actor# [59:7486784732593918618:2112] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7486784736888886501:2486] 2025-03-28T09:08:29.602226Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886501:2486] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:08:29.602306Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886501:2486] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:08:29.602330Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886501:2486] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:29.602394Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886501:2486] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:29.603296Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886501:2486] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:29.603422Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886501:2486] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:29.603489Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886501:2486] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-03-28T09:08:29.603657Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886501:2486] txid# 281474976715658 HANDLE EvClientConnected 2025-03-28T09:08:29.604212Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:08:29.612307Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886501:2486] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-03-28T09:08:29.612371Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886501:2486] txid# 281474976715658 SEND to# [59:7486784736888886500:2485] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-03-28T09:08:29.663290Z node 59 :TX_PROXY DEBUG: actor# [59:7486784732593918618:2112] Handle TEvProposeTransaction 2025-03-28T09:08:29.663328Z node 59 :TX_PROXY DEBUG: actor# [59:7486784732593918618:2112] TxId# 281474976715659 ProcessProposeTransaction 2025-03-28T09:08:29.663375Z node 59 :TX_PROXY DEBUG: actor# [59:7486784732593918618:2112] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [59:7486784736888886522:2496] 2025-03-28T09:08:29.666197Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886522:2496] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:59630" 2025-03-28T09:08:29.666267Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886522:2496] txid# 281474976715659 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:08:29.666290Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886522:2496] txid# 281474976715659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:29.666346Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886522:2496] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:29.666671Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886522:2496] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:29.670890Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886522:2496] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:29.670981Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886522:2496] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2025-03-28T09:08:29.671179Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886522:2496] txid# 281474976715659 HANDLE EvClientConnected 2025-03-28T09:08:29.672046Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:08:29.676703Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886522:2496] txid# 281474976715659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715659} 2025-03-28T09:08:29.676762Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784736888886522:2496] txid# 281474976715659 SEND to# [59:7486784736888886521:2319] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2025-03-28T09:08:33.267104Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7486784732593918534:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:33.267185Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:36.857953Z node 59 :KQP_PROXY ERROR: TraceId: "01jqe053bwbdt56rkbj419stmy", Request deadline has expired for 2.167227s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16349 TBackTrace::Capture()+28 (0x18B5EB7C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1901AA10) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+2039 (0x18730487) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+2789 (0x18746C55) std::__y1::__bind_return, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase>, std::__y1::tuple, __is_valid_bind_return, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase>, std::__y1::tuple>::value>::type std::__y1::__bind const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>::operator()[abi:fe190000](NUnitTest::TTestContext&)+588 (0x1878371C) std::__y1::__function::__func, void ()>::operator()()+280 (0x18774058) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19051A56) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19021589) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1204 (0x18772F04) NUnitTest::TTestFactory::Execute()+2438 (0x19022E56) NUnitTest::RunMain(int, char**)+5213 (0x1904BFCD) ??+0 (0x7F633DCA9D90) __libc_start_main+128 (0x7F633DCA9E40) _start+41 (0x160CB029) >> TSubDomainTest::LsLs [GOOD] >> TSubDomainTest::LsAltered ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [FAIL] Test command err: Starting YDB, grpc: 18992, msgbus: 4296 2025-03-28T09:05:13.213114Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783896682085457:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:13.226500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001314/r3tmp/tmpTZ4zAD/pdisk_1.dat 2025-03-28T09:05:13.759498Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:13.767782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:13.767919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:13.775610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18992, node 1 2025-03-28T09:05:13.912405Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:13.912419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:13.912423Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:13.912494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4296 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:05:14.206824Z node 1 :TX_PROXY DEBUG: actor# [1:7486783896682085710:2134] Handle TEvNavigate describe path dc-1 2025-03-28T09:05:14.206872Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053485:2444] HANDLE EvNavigateScheme dc-1 2025-03-28T09:05:14.208084Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053485:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:14.264379Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053485:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:05:14.276515Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053485:2444] Handle TEvDescribeSchemeResult Forward to# [1:7486783900977053484:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:05:14.306946Z node 1 :TX_PROXY DEBUG: actor# [1:7486783896682085710:2134] Handle TEvProposeTransaction 2025-03-28T09:05:14.306976Z node 1 :TX_PROXY DEBUG: actor# [1:7486783896682085710:2134] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:05:14.307094Z node 1 :TX_PROXY DEBUG: actor# [1:7486783896682085710:2134] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486783900977053513:2455] 2025-03-28T09:05:14.397605Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053513:2455] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:14.397703Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053513:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:14.397743Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053513:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:14.397822Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053513:2455] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:14.398134Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053513:2455] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:14.398274Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053513:2455] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:05:14.398327Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053513:2455] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:05:14.399119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:14.400254Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053513:2455] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:05:14.402106Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053513:2455] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:05:14.402161Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053513:2455] txid# 281474976710657 SEND to# [1:7486783900977053512:2454] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:05:14.423940Z node 1 :TX_PROXY DEBUG: actor# [1:7486783896682085710:2134] Handle TEvProposeTransaction 2025-03-28T09:05:14.423973Z node 1 :TX_PROXY DEBUG: actor# [1:7486783896682085710:2134] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:05:14.424014Z node 1 :TX_PROXY DEBUG: actor# [1:7486783896682085710:2134] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486783900977053553:2491] 2025-03-28T09:05:14.426121Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053553:2491] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:14.426180Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053553:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:14.426196Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053553:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:14.426260Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053553:2491] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:14.426575Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053553:2491] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:14.426705Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053553:2491] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:05:14.426750Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053553:2491] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:05:14.426881Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053553:2491] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:05:14.427291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:14.431192Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053553:2491] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:05:14.431244Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053553:2491] txid# 281474976710658 SEND to# [1:7486783900977053552:2490] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:05:14.502889Z node 1 :TX_PROXY DEBUG: actor# [1:7486783896682085710:2134] Handle TEvProposeTransaction 2025-03-28T09:05:14.502922Z node 1 :TX_PROXY DEBUG: actor# [1:7486783896682085710:2134] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:05:14.502976Z node 1 :TX_PROXY DEBUG: actor# [1:7486783896682085710:2134] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486783900977053571:2501] 2025-03-28T09:05:14.505336Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783900977053571:2501] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53062" 2025-03-28T09:05:14.505384Z node 1 :TX_PROXY DEBUG: A ... st record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:08:27.410851Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653012:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:27.410871Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653012:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:27.410928Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653012:2455] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:27.411229Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653012:2455] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:27.411330Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653012:2455] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:08:27.411388Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653012:2455] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:08:27.411568Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653012:2455] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:08:27.412187Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:08:27.424247Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653012:2455] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:08:27.424305Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653012:2455] txid# 281474976710657 SEND to# [59:7486784727810653011:2454] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:08:27.454781Z node 59 :TX_PROXY DEBUG: actor# [59:7486784723515684961:2109] Handle TEvProposeTransaction 2025-03-28T09:08:27.454821Z node 59 :TX_PROXY DEBUG: actor# [59:7486784723515684961:2109] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:08:27.454865Z node 59 :TX_PROXY DEBUG: actor# [59:7486784723515684961:2109] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [59:7486784727810653052:2491] 2025-03-28T09:08:27.457422Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653052:2491] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:08:27.457489Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653052:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:27.457508Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653052:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:27.457579Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653052:2491] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:27.457904Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653052:2491] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:27.458011Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653052:2491] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:27.458070Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653052:2491] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:08:27.458218Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653052:2491] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:08:27.459712Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:08:27.464239Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653052:2491] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:08:27.464296Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653052:2491] txid# 281474976710658 SEND to# [59:7486784727810653051:2490] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:08:27.530766Z node 59 :TX_PROXY DEBUG: actor# [59:7486784723515684961:2109] Handle TEvProposeTransaction 2025-03-28T09:08:27.530803Z node 59 :TX_PROXY DEBUG: actor# [59:7486784723515684961:2109] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:08:27.530852Z node 59 :TX_PROXY DEBUG: actor# [59:7486784723515684961:2109] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [59:7486784727810653070:2501] 2025-03-28T09:08:27.533194Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653070:2501] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58200" 2025-03-28T09:08:27.533259Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653070:2501] txid# 281474976710659 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:27.533280Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653070:2501] txid# 281474976710659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:27.533337Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653070:2501] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:27.533648Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653070:2501] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:27.533747Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653070:2501] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:27.533808Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653070:2501] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-03-28T09:08:27.533952Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653070:2501] txid# 281474976710659 HANDLE EvClientConnected 2025-03-28T09:08:27.534489Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:08:27.539502Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653070:2501] txid# 281474976710659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710659} 2025-03-28T09:08:27.539559Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784727810653070:2501] txid# 281474976710659 SEND to# [59:7486784727810653069:2319] Source {TEvProposeTransactionStatus txid# 281474976710659 Status# 48} 2025-03-28T09:08:31.023074Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7486784723515684958:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:31.023157Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:32.995236Z node 59 :KQP_PROXY ERROR: TraceId: "01jqe051913f1g4kcasxzfermh", Request deadline has expired for 0.442520s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27709 TBackTrace::Capture()+28 (0x18B5EB7C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1901AA10) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+2039 (0x18730487) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+2789 (0x18746C55) std::__y1::__bind_return, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase>, std::__y1::tuple, __is_valid_bind_return, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase>, std::__y1::tuple>::value>::type std::__y1::__bind const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>::operator()[abi:fe190000](NUnitTest::TTestContext&)+588 (0x1878371C) std::__y1::__function::__func, void ()>::operator()()+280 (0x18774058) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19051A56) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19021589) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1204 (0x18772F04) NUnitTest::TTestFactory::Execute()+2438 (0x19022E56) NUnitTest::RunMain(int, char**)+5213 (0x1904BFCD) ??+0 (0x7F574F4A6D90) __libc_start_main+128 (0x7F574F4A6E40) _start+41 (0x160CB029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [FAIL] Test command err: Starting YDB, grpc: 16479, msgbus: 22594 2025-03-28T09:05:13.326588Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783894684530157:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:13.326632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001313/r3tmp/tmpyQLKkH/pdisk_1.dat 2025-03-28T09:05:13.842341Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:13.863764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:13.863871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:13.868956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16479, node 1 2025-03-28T09:05:13.946554Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:13.946578Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:13.946587Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:13.946766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22594 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:05:14.193323Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894684530320:2119] Handle TEvNavigate describe path dc-1 2025-03-28T09:05:14.193422Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498108:2437] HANDLE EvNavigateScheme dc-1 2025-03-28T09:05:14.194472Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498108:2437] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:14.241832Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498108:2437] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:05:14.260477Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498108:2437] Handle TEvDescribeSchemeResult Forward to# [1:7486783898979498107:2436] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:05:14.279287Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894684530320:2119] Handle TEvProposeTransaction 2025-03-28T09:05:14.279321Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894684530320:2119] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:05:14.279433Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894684530320:2119] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486783898979498121:2443] 2025-03-28T09:05:14.380035Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498121:2443] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:14.380132Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498121:2443] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:14.380166Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498121:2443] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:14.380220Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498121:2443] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:14.380505Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498121:2443] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:14.380634Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498121:2443] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:05:14.380706Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498121:2443] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:05:14.380834Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498121:2443] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:05:14.381562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:14.390709Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498121:2443] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:05:14.390783Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498121:2443] txid# 281474976710657 SEND to# [1:7486783898979498120:2442] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:05:14.408068Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894684530320:2119] Handle TEvProposeTransaction 2025-03-28T09:05:14.408095Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894684530320:2119] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:05:14.408121Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894684530320:2119] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486783898979498173:2484] 2025-03-28T09:05:14.410457Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498173:2484] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:14.410501Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498173:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:14.410542Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498173:2484] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:14.410591Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498173:2484] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:14.410879Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498173:2484] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:14.411026Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498173:2484] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:05:14.411075Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498173:2484] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:05:14.411199Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498173:2484] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:05:14.411649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:14.413974Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498173:2484] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:05:14.414032Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498173:2484] txid# 281474976710658 SEND to# [1:7486783898979498172:2483] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:05:14.440198Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894684530320:2119] Handle TEvProposeTransaction 2025-03-28T09:05:14.440231Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894684530320:2119] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:05:14.440275Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894684530320:2119] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486783898979498194:2494] 2025-03-28T09:05:14.442629Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498194:2494] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\026\010\001\022\022\032\020db_admin@builtin\n\036\010\000\022\032\010\001\020\200\200\002\032\020db_admin@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:40536" 2025-03-28T09:05:14.442710Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783898979498194:2494] txid# 281474976710659 Bootstrap, UserSID: ro ... curityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:08:31.153955Z node 59 :TX_PROXY DEBUG: actor# [59:7486784743332186347:2110] Handle TEvNavigate describe path dc-1 2025-03-28T09:08:31.154010Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154394:2453] HANDLE EvNavigateScheme dc-1 2025-03-28T09:08:31.154338Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154394:2453] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:31.154450Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154394:2453] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:08:31.155365Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154394:2453] Handle TEvDescribeSchemeResult Forward to# [59:7486784747627154393:2452] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 2025-03-28T09:08:31.164097Z node 59 :TX_PROXY DEBUG: actor# [59:7486784743332186347:2110] Handle TEvProposeTransaction 2025-03-28T09:08:31.164140Z node 59 :TX_PROXY DEBUG: actor# [59:7486784743332186347:2110] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:08:31.164245Z node 59 :TX_PROXY DEBUG: actor# [59:7486784743332186347:2110] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [59:7486784747627154400:2458] 2025-03-28T09:08:31.166914Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154400:2458] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:08:31.166985Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154400:2458] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:31.167023Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154400:2458] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:31.167085Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154400:2458] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:31.167373Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154400:2458] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:31.167480Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154400:2458] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:08:31.167538Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154400:2458] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:08:31.167680Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154400:2458] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:08:31.168412Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:08:31.174651Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154400:2458] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:08:31.174740Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154400:2458] txid# 281474976715657 SEND to# [59:7486784747627154399:2457] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} waiting... 2025-03-28T09:08:31.229178Z node 59 :TX_PROXY DEBUG: actor# [59:7486784743332186347:2110] Handle TEvProposeTransaction 2025-03-28T09:08:31.229214Z node 59 :TX_PROXY DEBUG: actor# [59:7486784743332186347:2110] TxId# 281474976715658 ProcessProposeTransaction 2025-03-28T09:08:31.229251Z node 59 :TX_PROXY DEBUG: actor# [59:7486784743332186347:2110] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7486784747627154438:2492] 2025-03-28T09:08:31.232114Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154438:2492] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:08:31.232196Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154438:2492] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:31.232218Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154438:2492] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:31.232276Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154438:2492] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:31.232578Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154438:2492] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:31.232687Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154438:2492] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:31.232745Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154438:2492] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2025-03-28T09:08:31.232885Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154438:2492] txid# 281474976715658 HANDLE EvClientConnected 2025-03-28T09:08:31.233373Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:08:31.243690Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154438:2492] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2025-03-28T09:08:31.243775Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784747627154438:2492] txid# 281474976715658 SEND to# [59:7486784747627154437:2491] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2025-03-28T09:08:35.100126Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7486784743332186372:2095];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:35.100207Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:36.962296Z node 59 :KQP_PROXY ERROR: TraceId: "01jqe054xc1j1wrvjb6gz07zvw", Request deadline has expired for 0.686578s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29201 TBackTrace::Capture()+28 (0x18B5EB7C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1901AA10) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+2039 (0x18730487) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1849 (0x187468A9) std::__y1::__bind_return, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase>, std::__y1::tuple, __is_valid_bind_return, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase>, std::__y1::tuple>::value>::type std::__y1::__bind const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>::operator()[abi:fe190000](NUnitTest::TTestContext&)+588 (0x1878371C) std::__y1::__function::__func, void ()>::operator()()+280 (0x18774058) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19051A56) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19021589) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1204 (0x18772F04) NUnitTest::TTestFactory::Execute()+2438 (0x19022E56) NUnitTest::RunMain(int, char**)+5213 (0x1904BFCD) ??+0 (0x7F36B60FDD90) __libc_start_main+128 (0x7F36B60FDE40) _start+41 (0x160CB029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [FAIL] Test command err: Starting YDB, grpc: 13743, msgbus: 22469 2025-03-28T09:05:13.890675Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783894777363656:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:13.890755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012ff/r3tmp/tmpja0cRE/pdisk_1.dat 2025-03-28T09:05:14.416331Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:14.445897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:14.446016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:14.452998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13743, node 1 2025-03-28T09:05:14.567349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:14.567375Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:14.567383Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:14.567538Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22469 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:05:14.800997Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894777363904:2115] Handle TEvNavigate describe path dc-1 2025-03-28T09:05:14.801070Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331705:2442] HANDLE EvNavigateScheme dc-1 2025-03-28T09:05:14.802347Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331705:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:14.853773Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331705:2442] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:05:14.872093Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331705:2442] Handle TEvDescribeSchemeResult Forward to# [1:7486783899072331704:2441] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:05:14.912503Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894777363904:2115] Handle TEvProposeTransaction 2025-03-28T09:05:14.912539Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894777363904:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:05:14.912682Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894777363904:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486783899072331727:2450] 2025-03-28T09:05:15.027823Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331727:2450] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:15.027944Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331727:2450] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:15.027980Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331727:2450] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:15.028046Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331727:2450] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:15.028355Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331727:2450] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:15.028502Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331727:2450] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:05:15.028556Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331727:2450] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:05:15.028718Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331727:2450] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:05:15.029488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:15.034019Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331727:2450] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:05:15.034099Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899072331727:2450] txid# 281474976710657 SEND to# [1:7486783899072331726:2449] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:05:15.053551Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894777363904:2115] Handle TEvProposeTransaction 2025-03-28T09:05:15.053577Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894777363904:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:05:15.053616Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894777363904:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486783903367299068:2491] 2025-03-28T09:05:15.056232Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299068:2491] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:15.056300Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299068:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:15.056317Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299068:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:15.056382Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299068:2491] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:15.056677Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299068:2491] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:15.056830Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299068:2491] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:05:15.056877Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299068:2491] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:05:15.057013Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299068:2491] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:05:15.057527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:15.062717Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299068:2491] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:05:15.062771Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299068:2491] txid# 281474976710658 SEND to# [1:7486783903367299067:2490] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:05:15.127496Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894777363904:2115] Handle TEvProposeTransaction 2025-03-28T09:05:15.127527Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894777363904:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:05:15.127586Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894777363904:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486783903367299086:2501] 2025-03-28T09:05:15.129893Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783903367299086:2501] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58072" 2025-03-28T09:05:15.129957Z node 1 :TX_PROXY DEBUG: ... nType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:08:27.120115Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452730:2442] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:27.120135Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452730:2442] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:27.120191Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452730:2442] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:27.120482Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452730:2442] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:27.120605Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452730:2442] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:08:27.120662Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452730:2442] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:08:27.121267Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452730:2442] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:08:27.122030Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:08:27.137282Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452730:2442] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:08:27.137338Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452730:2442] txid# 281474976710657 SEND to# [59:7486784729347452729:2441] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:08:27.182813Z node 59 :TX_PROXY DEBUG: actor# [59:7486784725052484741:2091] Handle TEvProposeTransaction 2025-03-28T09:08:27.182844Z node 59 :TX_PROXY DEBUG: actor# [59:7486784725052484741:2091] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:08:27.182880Z node 59 :TX_PROXY DEBUG: actor# [59:7486784725052484741:2091] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [59:7486784729347452772:2480] 2025-03-28T09:08:27.185795Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452772:2480] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:08:27.185866Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452772:2480] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:27.185887Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452772:2480] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:27.185939Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452772:2480] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:27.186220Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452772:2480] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:27.194893Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452772:2480] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:27.194982Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452772:2480] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:08:27.196217Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452772:2480] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:08:27.197005Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:08:27.204833Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452772:2480] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:08:27.204896Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452772:2480] txid# 281474976710658 SEND to# [59:7486784729347452771:2479] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:08:27.255997Z node 59 :TX_PROXY DEBUG: actor# [59:7486784725052484741:2091] Handle TEvProposeTransaction 2025-03-28T09:08:27.256027Z node 59 :TX_PROXY DEBUG: actor# [59:7486784725052484741:2091] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:08:27.256067Z node 59 :TX_PROXY DEBUG: actor# [59:7486784725052484741:2091] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [59:7486784729347452799:2492] 2025-03-28T09:08:27.259445Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452799:2492] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\026\010\001\022\022\032\020db_admin@builtin\n\036\010\000\022\032\010\001\020\200\200\002\032\020db_admin@builtin \000\n\035\010\000\022\031\010\001\020\200\010\032\020db_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:35980" 2025-03-28T09:08:27.259520Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452799:2492] txid# 281474976710659 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:27.259540Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452799:2492] txid# 281474976710659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:27.259593Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452799:2492] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:27.259873Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452799:2492] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:27.259968Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452799:2492] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:27.260022Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452799:2492] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-03-28T09:08:27.260156Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452799:2492] txid# 281474976710659 HANDLE EvClientConnected 2025-03-28T09:08:27.260678Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:08:27.269348Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452799:2492] txid# 281474976710659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710659} 2025-03-28T09:08:27.269407Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784729347452799:2492] txid# 281474976710659 SEND to# [59:7486784729347452798:2319] Source {TEvProposeTransactionStatus txid# 281474976710659 Status# 48} 2025-03-28T09:08:31.198885Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7486784725052484729:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:31.199010Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:32.762369Z node 59 :KQP_PROXY ERROR: TraceId: "01jqe0510h24vnh3a6wm2kavgs", Request deadline has expired for 0.481653s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:1267 TBackTrace::Capture()+28 (0x18B5EB7C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1901AA10) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+2039 (0x18730487) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+2789 (0x18746C55) std::__y1::__bind_return, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase>, std::__y1::tuple, __is_valid_bind_return, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase>, std::__y1::tuple>::value>::type std::__y1::__bind const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>::operator()[abi:fe190000](NUnitTest::TTestContext&)+588 (0x1878371C) std::__y1::__function::__func, void ()>::operator()()+280 (0x18774058) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19051A56) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19021589) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1204 (0x18772F04) NUnitTest::TTestFactory::Execute()+2438 (0x19022E56) NUnitTest::RunMain(int, char**)+5213 (0x1904BFCD) ??+0 (0x7F95AC79AD90) __libc_start_main+128 (0x7F95AC79AE40) _start+41 (0x160CB029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [GOOD] Test command err: Starting YDB, grpc: 13173, msgbus: 22146 2025-03-28T09:05:13.344271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783894946306912:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:13.344334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00130a/r3tmp/tmp9dwaSf/pdisk_1.dat 2025-03-28T09:05:13.802786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:13.830984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:13.831115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13173, node 1 2025-03-28T09:05:13.876741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:05:13.893424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:13.893451Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:13.893460Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:13.893564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22146 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:05:14.096268Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894946307143:2115] Handle TEvNavigate describe path dc-1 2025-03-28T09:05:14.096310Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274953:2450] HANDLE EvNavigateScheme dc-1 2025-03-28T09:05:14.097326Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274953:2450] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:14.134196Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274953:2450] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:05:14.149066Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274953:2450] Handle TEvDescribeSchemeResult Forward to# [1:7486783899241274952:2449] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:05:14.181964Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894946307143:2115] Handle TEvProposeTransaction 2025-03-28T09:05:14.181991Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894946307143:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:05:14.182083Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894946307143:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486783899241274968:2458] 2025-03-28T09:05:14.255730Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274968:2458] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:14.255817Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274968:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:14.255848Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274968:2458] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:14.255892Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274968:2458] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:14.256202Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274968:2458] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:14.256338Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274968:2458] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:05:14.256410Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274968:2458] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:05:14.256529Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274968:2458] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:05:14.257232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:14.259562Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274968:2458] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:05:14.259630Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241274968:2458] txid# 281474976710657 SEND to# [1:7486783899241274967:2457] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:05:14.275057Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894946307143:2115] Handle TEvProposeTransaction 2025-03-28T09:05:14.275082Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894946307143:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:05:14.275129Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894946307143:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486783899241275008:2494] 2025-03-28T09:05:14.277265Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275008:2494] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:14.277299Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275008:2494] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:14.277308Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275008:2494] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:14.277354Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275008:2494] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:14.277617Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275008:2494] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:14.277708Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275008:2494] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:05:14.277749Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275008:2494] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:05:14.277906Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275008:2494] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:05:14.278330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:14.282532Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275008:2494] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:05:14.282573Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275008:2494] txid# 281474976710658 SEND to# [1:7486783899241275007:2493] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:05:14.328701Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894946307143:2115] Handle TEvProposeTransaction 2025-03-28T09:05:14.328731Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894946307143:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:05:14.328839Z node 1 :TX_PROXY DEBUG: actor# [1:7486783894946307143:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486783899241275026:2504] 2025-03-28T09:05:14.331092Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783899241275026:2504] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44456" 2025-03-28T09:05:14.331154Z node 1 :TX_PROXY DEBUG: ... dPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60126" 2025-03-28T09:08:37.846349Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784772908782589:2600] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:37.846374Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784772908782589:2600] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:37.846433Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784772908782589:2600] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:37.849322Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784772908782589:2600] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:37.849482Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784772908782589:2600] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:37.849562Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784772908782589:2600] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-28T09:08:37.849734Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784772908782589:2600] txid# 281474976715661 HANDLE EvClientConnected 2025-03-28T09:08:37.887968Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784772908782589:2600] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-03-28T09:08:37.888042Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784772908782589:2600] txid# 281474976715661 SEND to# [59:7486784772908782588:2333] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-28T09:08:38.035353Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] Handle TEvProposeTransaction 2025-03-28T09:08:38.035391Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] TxId# 281474976715662 ProcessProposeTransaction 2025-03-28T09:08:38.035443Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7486784777203749910:2618] 2025-03-28T09:08:38.037544Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749910:2618] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60140" 2025-03-28T09:08:38.037615Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749910:2618] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:38.037639Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749910:2618] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:38.037706Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749910:2618] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:38.038046Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749910:2618] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:38.038160Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749910:2618] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:38.038221Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749910:2618] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-28T09:08:38.038393Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749910:2618] txid# 281474976715662 HANDLE EvClientConnected 2025-03-28T09:08:38.039711Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:38.043969Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749910:2618] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-28T09:08:38.044040Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749910:2618] txid# 281474976715662 SEND to# [59:7486784777203749909:2350] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-28T09:08:38.099107Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] Handle TEvProposeTransaction 2025-03-28T09:08:38.099143Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] TxId# 281474976715663 ProcessProposeTransaction 2025-03-28T09:08:38.099181Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7486784777203749941:2635] 2025-03-28T09:08:38.101704Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749941:2635] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60156" 2025-03-28T09:08:38.101775Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749941:2635] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:38.101799Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749941:2635] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:38.101845Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749941:2635] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:38.102128Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749941:2635] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:38.102236Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749941:2635] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:38.102287Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749941:2635] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-03-28T09:08:38.102420Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749941:2635] txid# 281474976715663 HANDLE EvClientConnected 2025-03-28T09:08:38.111081Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749941:2635] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-03-28T09:08:38.111163Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749941:2635] txid# 281474976715663 SEND to# [59:7486784777203749940:2352] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-03-28T09:08:38.194384Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] Handle TEvProposeTransaction 2025-03-28T09:08:38.194422Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] TxId# 281474976715664 ProcessProposeTransaction 2025-03-28T09:08:38.194474Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7486784777203749972:2647] 2025-03-28T09:08:38.197192Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749972:2647] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5NjExOCwiaWF0IjoxNzQzMTUyOTE4LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.rA4amOXb_i-aVztzgat24ZWbhh1yM_OMcWaVnht37bMumcvD8rvbLgJ2Sxm2sx2ZuGXh4CTWzXFvjnLu8PTivfz9t0FPEuE-Itl2-HAW6BqbVbJLCXZWgmDW7Uu74xJkz0B1n1cFrUOPk4LV9W0scuZ9h7NxNn2zSGV-46IaYTqEgNb4Hihx6EpNMivqfyG53zG5kVI5TikVEbt-0xdiWBOu7DGHl_gNuWB2JhobN6UNIaaAngPpSUQYcB_JeDuKUmcoIx6eod7MfvVqw4M3QNSlzKT-i0m1rTju6SeNKh110e_T-4QaEpvCCaRz-z763s6lWEeoL7uyDxEIz8zNXw\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5NjExOCwiaWF0IjoxNzQzMTUyOTE4LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60172" 2025-03-28T09:08:38.197283Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749972:2647] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:38.197305Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749972:2647] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-28T09:08:38.197482Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749972:2647] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-28T09:08:38.197516Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749972:2647] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-28T09:08:38.197567Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749972:2647] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:38.197886Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749972:2647] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:38.197913Z node 59 :TX_PROXY ERROR: Actor# [59:7486784777203749972:2647] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-03-28T09:08:38.198019Z node 59 :TX_PROXY ERROR: Actor# [59:7486784777203749972:2647] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-28T09:08:38.198048Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784777203749972:2647] txid# 281474976715664 SEND to# [59:7486784777203749971:2364] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T09:08:38.198661Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=YmY1ZjY1MzEtZmM2ZDMxZmUtMTg5MjBjYzAtOTlkN2ZkMzk=, ActorId: [59:7486784777203749962:2364], ActorState: ExecuteState, TraceId: 01jqe05bn43wm3gzs0xv5jqh84, Create QueryResponse for error on request, msg: 2025-03-28T09:08:38.198978Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] Handle TEvExecuteKqpTransaction 2025-03-28T09:08:38.199002Z node 59 :TX_PROXY DEBUG: actor# [59:7486784747138977790:2100] TxId# 281474976715665 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 64084, msgbus: 29171 2025-03-28T09:05:12.400833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783893454707073:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:12.400870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001328/r3tmp/tmp0SJ0kh/pdisk_1.dat 2025-03-28T09:05:12.923846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:12.923945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:12.927477Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:12.930491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64084, node 1 2025-03-28T09:05:13.078211Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:13.078233Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:13.078242Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:13.078372Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29171 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:05:13.343739Z node 1 :TX_PROXY DEBUG: actor# [1:7486783893454707310:2116] Handle TEvNavigate describe path dc-1 2025-03-28T09:05:13.343787Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675114:2445] HANDLE EvNavigateScheme dc-1 2025-03-28T09:05:13.344851Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675114:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:13.417108Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675114:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:05:13.439169Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675114:2445] Handle TEvDescribeSchemeResult Forward to# [1:7486783897749675113:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:05:13.453761Z node 1 :TX_PROXY DEBUG: actor# [1:7486783893454707310:2116] Handle TEvProposeTransaction 2025-03-28T09:05:13.453796Z node 1 :TX_PROXY DEBUG: actor# [1:7486783893454707310:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:05:13.453894Z node 1 :TX_PROXY DEBUG: actor# [1:7486783893454707310:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486783897749675139:2456] 2025-03-28T09:05:13.573768Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675139:2456] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:13.573842Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675139:2456] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:13.573871Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675139:2456] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:13.573915Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675139:2456] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:13.574190Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675139:2456] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:13.574303Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675139:2456] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:05:13.574370Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675139:2456] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:05:13.574478Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675139:2456] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:05:13.576695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:13.579260Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675139:2456] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:05:13.579332Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675139:2456] txid# 281474976710657 SEND to# [1:7486783897749675138:2455] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:05:13.593367Z node 1 :TX_PROXY DEBUG: actor# [1:7486783893454707310:2116] Handle TEvProposeTransaction 2025-03-28T09:05:13.593393Z node 1 :TX_PROXY DEBUG: actor# [1:7486783893454707310:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:05:13.593445Z node 1 :TX_PROXY DEBUG: actor# [1:7486783893454707310:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486783897749675179:2492] 2025-03-28T09:05:13.595528Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675179:2492] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:13.595565Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675179:2492] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:13.595580Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675179:2492] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:13.595621Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675179:2492] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:13.595812Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675179:2492] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:13.595905Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675179:2492] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:05:13.595939Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675179:2492] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:05:13.596041Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675179:2492] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:05:13.596849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:13.599310Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675179:2492] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:05:13.599357Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783897749675179:2492] txid# 281474976710658 SEND to# [1:7486783897749675178:2491] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:05:15.618820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783906339609865:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:15.619174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783906339609857:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:15.619247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:15.619847Z node 1 :TX_PROXY DEBUG: actor# [1:7486783893454707310:2116] Handle TEvProposeTransaction 2025-03-28T09:05:15.619868Z node 1 :TX_PROXY DEBUG: actor# [1:7486783893454707310:2116] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28 ... wd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53146" 2025-03-28T09:08:39.600544Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504511:2586] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:39.600567Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504511:2586] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:39.600616Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504511:2586] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:39.600968Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504511:2586] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:39.601081Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504511:2586] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:39.601145Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504511:2586] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-03-28T09:08:39.601282Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504511:2586] txid# 281474976710661 HANDLE EvClientConnected 2025-03-28T09:08:39.613125Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504511:2586] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-03-28T09:08:39.613201Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504511:2586] txid# 281474976710661 SEND to# [59:7486784779723504510:2335] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-03-28T09:08:39.718661Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] Handle TEvProposeTransaction 2025-03-28T09:08:39.718717Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] TxId# 281474976710662 ProcessProposeTransaction 2025-03-28T09:08:39.718785Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7486784779723504535:2602] 2025-03-28T09:08:39.721769Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504535:2602] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53148" 2025-03-28T09:08:39.721842Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504535:2602] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:39.721865Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504535:2602] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:39.721930Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504535:2602] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:39.722264Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504535:2602] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:39.722378Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504535:2602] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:39.722443Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504535:2602] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-28T09:08:39.722631Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504535:2602] txid# 281474976710662 HANDLE EvClientConnected 2025-03-28T09:08:39.723430Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:39.731392Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504535:2602] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-28T09:08:39.731458Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504535:2602] txid# 281474976710662 SEND to# [59:7486784779723504534:2348] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-28T09:08:39.795252Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] Handle TEvProposeTransaction 2025-03-28T09:08:39.795288Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] TxId# 281474976710663 ProcessProposeTransaction 2025-03-28T09:08:39.795336Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7486784779723504571:2623] 2025-03-28T09:08:39.798444Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504571:2623] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53178" 2025-03-28T09:08:39.798515Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504571:2623] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:39.798537Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504571:2623] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:39.798586Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504571:2623] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:39.798962Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504571:2623] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:39.799063Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504571:2623] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:39.799123Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504571:2623] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-28T09:08:39.799286Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504571:2623] txid# 281474976710663 HANDLE EvClientConnected 2025-03-28T09:08:39.807182Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504571:2623] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-28T09:08:39.807256Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504571:2623] txid# 281474976710663 SEND to# [59:7486784779723504570:2350] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-28T09:08:39.899844Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] Handle TEvProposeTransaction 2025-03-28T09:08:39.899880Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] TxId# 281474976710664 ProcessProposeTransaction 2025-03-28T09:08:39.899931Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7486784779723504599:2635] 2025-03-28T09:08:39.902620Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504599:2635] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5NjExOSwiaWF0IjoxNzQzMTUyOTE5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.ZpPbrp9PuRz52RgUtKag-5J0OrOdpDBA2LjBZxSXW0kr7BXS-pZH_NVo0U-LrYvMHnrIxDwd3GOuPRJe1dvcD5pJS9JvsL1_Zlu_ilUqSlrOphhrSfR3d2klVrMtebFKNk909piG_2WONRAO9s-qO8CQmb_twG7on-UAljBZ1QZv46AB6si8SFUiL4uBGfHKIRRyfpy2aGEB-OC2VkbFHcFfV91_rnFnnCwMOCoBHrK5uvCN0d9bFkATgZQ0jhskPy3HcOlj3SFhqJUu8LEEEmwgIXXEjIL6gY1C2Rfdt1oZIi0IIPkovoT6kr2-2KuUKhEbX2YzNT2Ph8NoAQqGZA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5NjExOSwiaWF0IjoxNzQzMTUyOTE5LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53214" 2025-03-28T09:08:39.902714Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504599:2635] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:39.902736Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504599:2635] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-28T09:08:39.902884Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504599:2635] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-28T09:08:39.902912Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504599:2635] txid# 281474976710664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-28T09:08:39.902954Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504599:2635] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:39.903204Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504599:2635] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:39.903235Z node 59 :TX_PROXY ERROR: Actor# [59:7486784779723504599:2635] txid# 281474976710664, Access denied for ordinaryuser, attempt to manage user 2025-03-28T09:08:39.903321Z node 59 :TX_PROXY ERROR: Actor# [59:7486784779723504599:2635] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-28T09:08:39.903348Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784779723504599:2635] txid# 281474976710664 SEND to# [59:7486784779723504598:2362] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T09:08:39.903679Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=NjQ0NTRhMTUtMTNjM2VmZTYtN2NkNGY4ZjEtZjJiZWFjYTI=, ActorId: [59:7486784779723504589:2362], ActorState: ExecuteState, TraceId: 01jqe05d9t70fagt8rwd4ns6yv, Create QueryResponse for error on request, msg: 2025-03-28T09:08:39.904117Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] Handle TEvExecuteKqpTransaction 2025-03-28T09:08:39.904134Z node 59 :TX_PROXY DEBUG: actor# [59:7486784758248667268:2131] TxId# 281474976710665 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::UpsertArrowBatch_DataShard [GOOD] Test command err: 2025-03-28T09:08:42.742033Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784795324798351:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:42.742443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00149b/r3tmp/tmp9R7pdJ/pdisk_1.dat 2025-03-28T09:08:43.295766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:43.295848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:43.298944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:43.303187Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21187, node 1 2025-03-28T09:08:43.496897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:43.496925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:43.496933Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:43.497070Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:43.838618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:9748 2025-03-28T09:08:44.058199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:44.165400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:08:44.196948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:08:44.197167Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037891 2025-03-28T09:08:44.207725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:44.207999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:44.208342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:44.208524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:44.208648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:44.208804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:44.208942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:44.209066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:44.209184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:44.209346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:44.209571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:44.209756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486784803914733863:2320];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:44.213033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:08:44.241765Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:08:44.241921Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037890 2025-03-28T09:08:44.249109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:44.249182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:44.249428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:44.249568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:44.249749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:44.249885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:44.250045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:44.250193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:44.250312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:44.250437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:44.250548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:44.250665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784803914733867:2322];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:44.254673Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7486784803914733901:2323];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:08:44.282894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:7486784803914733901:2323];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:08:44.283026Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-03-28T09:08:44.290296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784803914733901:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:44.290362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784803914733901:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:44.290587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784803914733901:2323];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:44.290754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[ ... ready operation [1743152961193:281474976710658] in PlanQueue unit at 72075186224037888 2025-03-28T09:09:21.159665Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743152961193:281474976710658 keys extracted: 0 2025-03-28T09:09:21.159830Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:09:21.159944Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:09:21.159992Z node 13 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:09:21.160635Z node 13 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:09:21.161061Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:09:21.162647Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743152961192 2025-03-28T09:09:21.162670Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:09:21.165469Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743152961200 2025-03-28T09:09:21.167020Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743152961193} 2025-03-28T09:09:21.167089Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:09:21.167153Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:09:21.167191Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:09:21.167233Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:09:21.167304Z node 13 :TX_DATASHARD DEBUG: Complete [1743152961193 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7486784942370228357:2213], exec latency: 1 ms, propose latency: 7 ms 2025-03-28T09:09:21.167343Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710658 state Ready TxInFly 0 2025-03-28T09:09:21.167407Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:09:21.174896Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710658 datashard 72075186224037888 state Ready 2025-03-28T09:09:21.174982Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-03-28T09:09:21.210506Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:7486784959550098368:2741], serverId# [13:7486784959550098369:2742], sessionId# [0:0:0] 2025-03-28T09:09:21.210633Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-03-28T09:09:21.220036Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-28T09:09:21.220090Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 SUCCESS Upsert done: 0.041421s 2025-03-28T09:09:21.246118Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486784959550098377:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:21.246249Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:21.247053Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486784959550098389:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:21.251350Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:09:21.259393Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:09:21.306934Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:09:21.326175Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486784959550098391:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:09:21.396322Z node 13 :TX_PROXY ERROR: Actor# [13:7486784959550098470:2810] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:21.512516Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:09:21.512658Z node 13 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710661 at tablet 72075186224037888 2025-03-28T09:09:21.519835Z node 13 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:09:21.523305Z node 13 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710661 at step 1743152961564 at tablet 72075186224037888 { Transactions { TxId: 281474976710661 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743152961564 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:09:21.523333Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:09:21.523476Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:09:21.523492Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:09:21.523512Z node 13 :TX_DATASHARD DEBUG: Found ready operation [1743152961564:281474976710661] in PlanQueue unit at 72075186224037888 2025-03-28T09:09:21.523660Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743152961564:281474976710661 keys extracted: 0 2025-03-28T09:09:21.523942Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:09:21.529977Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743152961564} 2025-03-28T09:09:21.530030Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:09:21.530076Z node 13 :TX_DATASHARD DEBUG: Complete [1743152961564 : 281474976710661] from 72075186224037888 at tablet 72075186224037888 send result to client [13:7486784959550098514:2826], exec latency: 0 ms, propose latency: 6 ms 2025-03-28T09:09:21.530099Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:09:21.533574Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe06npr75xns50vagegn9ec, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Mjk2MGQ3OS1iMjA0MjJmOS03NzhhMWE2My04Mjg3ZTQ2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:09:21.539213Z node 13 :TX_DATASHARD INFO: Start scan, at: [13:7486784959550098541:2146], tablet: [13:7486784959550098260:2339], scanId: 4, table: /Root/LogsX, gen: 1, deadline: 2025-03-28T09:19:21.538865Z 2025-03-28T09:09:21.541270Z node 13 :TX_DATASHARD DEBUG: Got ScanDataAck, at: [13:7486784959550098541:2146], scanId: 4, table: /Root/LogsX, gen: 1, tablet: [13:7486784959550098260:2339], freeSpace: 8388608;limits:(bytes=0;chunks=0); 2025-03-28T09:09:21.541296Z node 13 :TX_DATASHARD DEBUG: Wakeup driver at: [13:7486784959550098541:2146] 2025-03-28T09:09:21.542791Z node 13 :TX_DATASHARD DEBUG: Range 0 of 1 exhausted: try next one. table: /Root/LogsX range: [(Utf8 : NULL, Timestamp : NULL) ; ()) next range: 2025-03-28T09:09:21.542812Z node 13 :TX_DATASHARD DEBUG: TableRanges is over, at: [13:7486784959550098541:2146], scanId: 4, table: /Root/LogsX 2025-03-28T09:09:21.542865Z node 13 :TX_DATASHARD DEBUG: Finish scan, at: [13:7486784959550098541:2146], scanId: 4, table: /Root/LogsX, reason: 0, abortEvent: 2025-03-28T09:09:21.542901Z node 13 :TX_DATASHARD DEBUG: Send ScanData, from: [13:7486784959550098541:2146], to: [13:7486784959550098537:2359], scanId: 4, table: /Root/LogsX, bytes: 11000, rows: 100, page faults: 0, finished: 1, pageFault: 0 2025-03-28T09:09:21.543289Z node 13 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:09:21.543386Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:09:21.543410Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:09:21.543430Z node 13 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:09:21.543482Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:09:21.553961Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743152961564, txId: 281474976710661] shutting down 2025-03-28T09:09:22.104459Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7486784942370227988:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:22.104945Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:22.796394Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqe06p4f325m17mcnwft81ev, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NTkzYzQyYjktNTg0YjUwMjctN2QzNjYzNTYtYTMyN2M0Yjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 100 rows Negative (wrong format): BAD_REQUEST Negative (wrong data): SCHEME_ERROR FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8016;columns=9; 2025-03-28T09:09:22.963546Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Invalid: Ran out of field metadata, likely malformed;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (less columns): BAD_REQUEST FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8984;columns=10; 2025-03-28T09:09:22.982788Z node 13 :ARROW_HELPER ERROR: fline=arrow_helpers.cpp:142;event=cannot_parse;message=Serialization error: batch is not valid: Invalid: Offsets buffer size (bytes): 400 isn't large enough for length: 100;schema_columns_count=10;schema_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; Negative (reordered columns): BAD_REQUEST |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |80.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 23847, msgbus: 18903 2025-03-28T09:05:12.516694Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783891723019959:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:12.516749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001336/r3tmp/tmpMYAG9x/pdisk_1.dat 2025-03-28T09:05:13.000537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:13.000659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:13.002143Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:13.007890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23847, node 1 2025-03-28T09:05:13.110227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:13.110249Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:13.110255Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:13.110408Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18903 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:05:13.339465Z node 1 :TX_PROXY DEBUG: actor# [1:7486783891723020186:2116] Handle TEvNavigate describe path dc-1 2025-03-28T09:05:13.339516Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987983:2440] HANDLE EvNavigateScheme dc-1 2025-03-28T09:05:13.340781Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987983:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:13.378846Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987983:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:05:13.392408Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987983:2440] Handle TEvDescribeSchemeResult Forward to# [1:7486783896017987980:2437] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:05:13.414377Z node 1 :TX_PROXY DEBUG: actor# [1:7486783891723020186:2116] Handle TEvProposeTransaction 2025-03-28T09:05:13.414405Z node 1 :TX_PROXY DEBUG: actor# [1:7486783891723020186:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:05:13.414525Z node 1 :TX_PROXY DEBUG: actor# [1:7486783891723020186:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486783896017987996:2446] 2025-03-28T09:05:13.580681Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987996:2446] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:13.580777Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987996:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:13.580812Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987996:2446] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:13.580893Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987996:2446] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:13.581310Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987996:2446] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:13.581494Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987996:2446] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:05:13.581547Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987996:2446] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:05:13.581671Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987996:2446] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:05:13.582643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:13.587647Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987996:2446] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:05:13.587742Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017987996:2446] txid# 281474976710657 SEND to# [1:7486783896017987995:2445] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:05:13.614621Z node 1 :TX_PROXY DEBUG: actor# [1:7486783891723020186:2116] Handle TEvProposeTransaction 2025-03-28T09:05:13.614648Z node 1 :TX_PROXY DEBUG: actor# [1:7486783891723020186:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:05:13.614673Z node 1 :TX_PROXY DEBUG: actor# [1:7486783891723020186:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486783896017988048:2487] 2025-03-28T09:05:13.616549Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988048:2487] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:13.616578Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988048:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:05:13.616590Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988048:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:13.616637Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988048:2487] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:13.616828Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988048:2487] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:13.616915Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988048:2487] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:05:13.616958Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988048:2487] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:05:13.617070Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988048:2487] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:05:13.617513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:13.619640Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988048:2487] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:05:13.619689Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988048:2487] txid# 281474976710658 SEND to# [1:7486783896017988047:2486] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:05:13.656041Z node 1 :TX_PROXY DEBUG: actor# [1:7486783891723020186:2116] Handle TEvProposeTransaction 2025-03-28T09:05:13.656097Z node 1 :TX_PROXY DEBUG: actor# [1:7486783891723020186:2116] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:05:13.656160Z node 1 :TX_PROXY DEBUG: actor# [1:7486783891723020186:2116] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486783896017988066:2497] 2025-03-28T09:05:13.658604Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783896017988066:2497] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:50526" 2025-03-28T09:05:13.658675Z node 1 :TX_PROXY DEBUG: ... 75148Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868189:2544] txid# 281474976715660 HANDLE EvClientConnected 2025-03-28T09:08:32.676568Z node 59 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:08:32.685050Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868189:2544] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-03-28T09:08:32.685111Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868189:2544] txid# 281474976715660 SEND to# [59:7486784751447868188:2343] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-03-28T09:08:32.738912Z node 59 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [59:7486784751447868188:2343], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-28T09:08:32.811813Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] Handle TEvProposeTransaction 2025-03-28T09:08:32.811848Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] TxId# 281474976715661 ProcessProposeTransaction 2025-03-28T09:08:32.811891Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7486784751447868270:2602] 2025-03-28T09:08:32.814868Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-03-28T09:08:32.814921Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:32.814944Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-03-28T09:08:32.815166Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-28T09:08:32.815194Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-28T09:08:32.815918Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:08:32.816013Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:32.816193Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:32.816339Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:32.816391Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-28T09:08:32.816525Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 HANDLE EvClientConnected 2025-03-28T09:08:32.826672Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-28T09:08:32.826844Z node 59 :TX_PROXY ERROR: Actor# [59:7486784751447868270:2602] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:32.826882Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868270:2602] txid# 281474976715661 SEND to# [59:7486784751447868188:2343] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-28T09:08:32.850085Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] Handle TEvProposeTransaction 2025-03-28T09:08:32.850121Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] TxId# 281474976715662 ProcessProposeTransaction 2025-03-28T09:08:32.850163Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7486784751447868294:2614] 2025-03-28T09:08:32.852769Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868294:2614] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53176" 2025-03-28T09:08:32.852835Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868294:2614] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:32.852858Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868294:2614] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:08:32.852906Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868294:2614] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:32.853888Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868294:2614] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:32.854020Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868294:2614] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:08:32.854093Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868294:2614] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-28T09:08:32.854298Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868294:2614] txid# 281474976715662 HANDLE EvClientConnected 2025-03-28T09:08:32.868005Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868294:2614] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-28T09:08:32.868067Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868294:2614] txid# 281474976715662 SEND to# [59:7486784751447868293:2336] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-28T09:08:32.926744Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] Handle TEvProposeTransaction 2025-03-28T09:08:32.926777Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] TxId# 281474976715663 ProcessProposeTransaction 2025-03-28T09:08:32.926821Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7486784751447868327:2628] 2025-03-28T09:08:32.928962Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868327:2628] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:53198" 2025-03-28T09:08:32.929017Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868327:2628] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T09:08:32.929031Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868327:2628] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-03-28T09:08:32.929123Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868327:2628] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-28T09:08:32.929139Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868327:2628] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-28T09:08:32.929165Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868327:2628] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:08:32.929327Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868327:2628] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:08:32.929341Z node 59 :TX_PROXY ERROR: Actor# [59:7486784751447868327:2628] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2025-03-28T09:08:32.929399Z node 59 :TX_PROXY ERROR: Actor# [59:7486784751447868327:2628] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2025-03-28T09:08:32.929417Z node 59 :TX_PROXY DEBUG: Actor# [59:7486784751447868327:2628] txid# 281474976715663 SEND to# [59:7486784751447868326:2353] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T09:08:32.929641Z node 59 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=59&id=MWNmYzE0YzAtM2JjMzM5Y2UtMTQ0YmQ2NzEtN2U4N2QyZDc=, ActorId: [59:7486784751447868312:2353], ActorState: ExecuteState, TraceId: 01jqe056ghatxed7qrw4beh5yx, Create QueryResponse for error on request, msg: 2025-03-28T09:08:32.929933Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] Handle TEvExecuteKqpTransaction 2025-03-28T09:08:32.929956Z node 59 :TX_PROXY DEBUG: actor# [59:7486784729973030771:2102] TxId# 281474976715664 ProcessProposeKqpTransaction >> BsControllerConfig::MergeIntersectingBoxes >> BsControllerConfig::SelectAllGroups >> BsControllerConfig::OverlayMap >> OlapEstimationRowsCorrectness::TPCH11 >> BsControllerConfig::ExtendByCreatingSeparateBox |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |80.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> OlapEstimationRowsCorrectness::TPCDS78 >> BsControllerConfig::OverlayMap [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2 >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |80.5%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMap [GOOD] >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore >> TModifyUserTest::ModifyLdapUser [GOOD] >> TModifyUserTest::ModifyUserIsEnabled >> BsControllerConfig::SelectAllGroups [GOOD] >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |80.6%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SelectAllGroups [GOOD] Test command err: 2025-03-28T09:09:28.395543Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:09:28.399777Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:09:28.400120Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:09:28.401589Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:09:28.401833Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:09:28.402274Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:09:28.402300Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:09:28.402449Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:09:28.409967Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:09:28.410069Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:09:28.410178Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:09:28.410251Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:09:28.410338Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:09:28.410396Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:09:28.589166Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.133467s 2025-03-28T09:09:28.589342Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.133675s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [FAIL] Test command err: Starting YDB, grpc: 5148, msgbus: 16558 2025-03-28T09:05:12.406307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783890955140074:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:05:12.406379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012f5/r3tmp/tmpJ0vmsX/pdisk_1.dat 2025-03-28T09:05:12.944072Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:12.951193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:05:12.951319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:05:12.971960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5148, node 1 2025-03-28T09:05:13.112768Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:05:13.112790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:05:13.112796Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:05:13.112903Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16558 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:05:13.358091Z node 1 :TX_PROXY DEBUG: actor# [1:7486783890955140338:2140] Handle TEvNavigate describe path dc-1 2025-03-28T09:05:13.358126Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108108:2446] HANDLE EvNavigateScheme dc-1 2025-03-28T09:05:13.358939Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108108:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:13.392943Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108108:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T09:05:13.402475Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108108:2446] Handle TEvDescribeSchemeResult Forward to# [1:7486783895250108107:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:05:13.424352Z node 1 :TX_PROXY DEBUG: actor# [1:7486783890955140338:2140] Handle TEvProposeTransaction 2025-03-28T09:05:13.424391Z node 1 :TX_PROXY DEBUG: actor# [1:7486783890955140338:2140] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:05:13.424504Z node 1 :TX_PROXY DEBUG: actor# [1:7486783890955140338:2140] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486783895250108134:2455] 2025-03-28T09:05:13.478849Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108134:2455] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:13.478949Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108134:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T09:05:13.478980Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108134:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:13.479067Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108134:2455] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:13.479357Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108134:2455] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:13.479508Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108134:2455] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:05:13.479559Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108134:2455] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:05:13.479692Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108134:2455] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:05:13.480419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:05:13.492112Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108134:2455] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:05:13.492171Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108134:2455] txid# 281474976710657 SEND to# [1:7486783895250108132:2454] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T09:05:13.511381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:05:13.534817Z node 1 :TX_PROXY DEBUG: actor# [1:7486783890955140338:2140] Handle TEvProposeTransaction 2025-03-28T09:05:13.534842Z node 1 :TX_PROXY DEBUG: actor# [1:7486783890955140338:2140] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:05:13.534876Z node 1 :TX_PROXY DEBUG: actor# [1:7486783890955140338:2140] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486783895250108176:2493] 2025-03-28T09:05:13.537176Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108176:2493] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T09:05:13.537242Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108176:2493] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T09:05:13.537273Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108176:2493] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T09:05:13.537320Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108176:2493] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:05:13.537572Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108176:2493] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:05:13.537658Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108176:2493] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:05:13.537695Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108176:2493] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:05:13.537843Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108176:2493] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:05:13.538263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:05:13.540814Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108176:2493] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T09:05:13.540856Z node 1 :TX_PROXY DEBUG: Actor# [1:7486783895250108176:2493] txid# 281474976710658 SEND to# [1:7486783895250108175:2492] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T09:05:15.609978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783903840042854:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:15.610062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783903840042865:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:15.610113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:05:15.610510Z node 1 :TX_PROXY DEBUG: actor# [1:7486783890955140338:2140] Handle TEvPropose ... nitTenantSchemeShardResult operationId: 281474976715660:1 at schemeshard: 72057594046644480 2025-03-28T09:09:13.494750Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 281474976715660:1 Got OK TEvInitTenantSchemeShardResult from schemeshard tablet: 72075186224037891 shardIdx: 72057594046644480:2 at schemeshard: 72057594046644480 2025-03-28T09:09:13.494782Z node 59 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:1 3 -> 128 2025-03-28T09:09:13.503616Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715660:1, at schemeshard: 72057594046644480 2025-03-28T09:09:13.503831Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715660:1, at schemeshard: 72057594046644480 2025-03-28T09:09:13.503863Z node 59 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715660:1, at schemeshard: 72057594046644480 2025-03-28T09:09:13.503902Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715660:1, at tablet# 72057594046644480 2025-03-28T09:09:13.503935Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715660 ready parts: 2/2 2025-03-28T09:09:13.504142Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715660 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:09:13.506502Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715660:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715660 msg type: 269090816 2025-03-28T09:09:13.506660Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715660, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:09:13.509401Z node 59 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743152953556, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:09:13.509554Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715660 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743152953556 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:09:13.509581Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute operation part is already done, operationId: 281474976715660:0 2025-03-28T09:09:13.509615Z node 59 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715660:1, at tablet# 72057594046644480 2025-03-28T09:09:13.510016Z node 59 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:1 128 -> 240 2025-03-28T09:09:13.510070Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715660:1, at tablet# 72057594046644480 2025-03-28T09:09:13.510212Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 7 2025-03-28T09:09:13.510308Z node 59 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2], Generation: 1, ActorId:[60:7486784927305310982:2300], EffectiveACLVersion: 1, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186224037888, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 1, actualUserAttrsVersion: 1, tenantHive: 72075186224037888, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:09:13.512601Z node 59 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:09:13.512634Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715660, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:09:13.512883Z node 59 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:09:13.512919Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [59:7486784920930669210:2377], at schemeshard: 72057594046644480, txId: 281474976715660, path id: 2 2025-03-28T09:09:13.512983Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715660:1, at schemeshard: 72057594046644480 2025-03-28T09:09:13.513016Z node 59 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TSyncHive, operationId 281474976715660:1, ProgressState, NeedSyncHive: 0 2025-03-28T09:09:13.513038Z node 59 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715660:1 240 -> 240 2025-03-28T09:09:13.514387Z node 59 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-03-28T09:09:13.514502Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715660 2025-03-28T09:09:13.514522Z node 59 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715660 2025-03-28T09:09:13.514545Z node 59 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-03-28T09:09:13.514574Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 8 2025-03-28T09:09:13.514653Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715660, ready parts: 1/2, is published: true 2025-03-28T09:09:13.519215Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715660 2025-03-28T09:09:13.519343Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715660:1, at schemeshard: 72057594046644480 2025-03-28T09:09:13.519381Z node 59 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715660:1 ProgressState 2025-03-28T09:09:13.519570Z node 59 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:1 progress is 2/2 2025-03-28T09:09:13.519593Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715660 ready parts: 2/2 2025-03-28T09:09:13.519631Z node 59 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715660:1 progress is 2/2 2025-03-28T09:09:13.519650Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715660 ready parts: 2/2 2025-03-28T09:09:13.519675Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715660, ready parts: 2/2, is published: true 2025-03-28T09:09:13.519761Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [59:7486784925225636851:2324] message: TxId: 281474976715660 2025-03-28T09:09:13.519795Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715660 ready parts: 2/2 2025-03-28T09:09:13.519826Z node 59 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-03-28T09:09:13.519841Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715660:0 2025-03-28T09:09:13.520028Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 7 2025-03-28T09:09:13.520044Z node 59 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-03-28T09:09:13.520052Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715660:1 2025-03-28T09:09:13.520108Z node 59 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 TEST create admin clusteradmin 2025-03-28T09:09:13.535041Z node 59 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /dc-1, user: root@builtin, from ip: ipv6:[::1]:52422 2025-03-28T09:09:16.882710Z node 59 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7486784916635701282:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:16.882851Z node 59 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:18.102798Z node 60 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[60:7486784927305310643:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:18.102893Z node 60 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/tenant-db/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:18.632828Z node 59 :KQP_PROXY ERROR: TraceId: "01jqe06e5y53vhs21rn3hgsk3c", Request deadline has expired for 0.100098s seconds 2025-03-28T09:09:18.664086Z node 59 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2025-03-28T09:09:18.664677Z node 59 :HIVE WARN: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:09:18.671689Z node 60 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16483 TBackTrace::Capture()+28 (0x18B5EB7C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1901AA10) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+2039 (0x18730487) void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant(NUnitTest::TTestContext&)+3067 (0x187AB2CB) std::__y1::__function::__func, void ()>::operator()()+280 (0x18785258) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19051A56) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19021589) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAdminAccessInTenant::TCurrentTest::Execute()+1275 (0x1878440B) NUnitTest::TTestFactory::Execute()+2438 (0x19022E56) NUnitTest::RunMain(int, char**)+5213 (0x1904BFCD) ??+0 (0x7FF076F25D90) __libc_start_main+128 (0x7FF076F25E40) _start+41 (0x160CB029) >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain >> TSubDomainTest::LsAltered [GOOD] >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated >> SystemView::AuthGroups_TableRange >> SystemView::ShowCreateTableDefaultLiteral >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> YdbOlapStore::LogNonExistingUserId [GOOD] >> YdbOlapStore::LogPagingBefore |80.6%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2025-03-28T09:09:21.099808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784961128748021:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.100056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016d5/r3tmp/tmpGwLIQN/pdisk_1.dat 2025-03-28T09:09:21.735702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:21.745993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:21.783958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:21.920855Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65103 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:09:22.234888Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961128748243:2090] Handle TEvNavigate describe path dc-1 2025-03-28T09:09:22.234955Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965423715865:2265] HANDLE EvNavigateScheme dc-1 2025-03-28T09:09:22.235090Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784961128748284:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.235203Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965423715847:2259][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784961128748284:2113], cookie# 1 2025-03-28T09:09:22.236837Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965423715851:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965423715848:2259], cookie# 1 2025-03-28T09:09:22.236873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965423715852:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965423715849:2259], cookie# 1 2025-03-28T09:09:22.236887Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965423715853:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965423715850:2259], cookie# 1 2025-03-28T09:09:22.236926Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961128747974:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965423715851:2259], cookie# 1 2025-03-28T09:09:22.236953Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961128747977:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965423715852:2259], cookie# 1 2025-03-28T09:09:22.236975Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961128747980:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965423715853:2259], cookie# 1 2025-03-28T09:09:22.237005Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965423715851:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961128747974:2049], cookie# 1 2025-03-28T09:09:22.237018Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965423715852:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961128747977:2052], cookie# 1 2025-03-28T09:09:22.237031Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965423715853:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961128747980:2055], cookie# 1 2025-03-28T09:09:22.237069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965423715847:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965423715848:2259], cookie# 1 2025-03-28T09:09:22.237093Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965423715847:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:09:22.237126Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965423715847:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965423715849:2259], cookie# 1 2025-03-28T09:09:22.237163Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965423715847:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:22.237188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965423715847:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965423715850:2259], cookie# 1 2025-03-28T09:09:22.237201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965423715847:2259][/dc-1] Unexpected sync response: sender# [1:7486784965423715850:2259], cookie# 1 2025-03-28T09:09:22.237271Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784961128748284:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:09:22.252086Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784961128748284:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784965423715847:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:22.252212Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784961128748284:2113], cacheItem# { Subscriber: { Subscriber: [1:7486784965423715847:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:09:22.263057Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784965423715866:2266], recipient# [1:7486784965423715865:2265], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:22.263137Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965423715865:2265] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:22.313657Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965423715865:2265] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:09:22.317011Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965423715865:2265] Handle TEvDescribeSchemeResult Forward to# [1:7486784965423715864:2264] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:09:22.340363Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961128748243:2090] Handle TEvProposeTransaction 2025-03-28T09:09:22.340392Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961128748243:2090] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:09:22.340514Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961128748243:2090] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486784965423715872:2271] 2025-03-28T09:09:22.441968Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965423715872:2271] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:09:22.442020Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965423715872:2271] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:09:22.442097Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965423715872:2271] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:09:22.442192Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784961128748284:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... 3699Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7486784984531220541:2117], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7486784984531220797:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152966870 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:27.383791Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486784984531220541:2117], cacheItem# { Subscriber: { Subscriber: [2:7486784984531220797:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152966870 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2025-03-28T09:09:27.383949Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486784988826188206:2329], recipient# [2:7486784988826188205:2328], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:27.383977Z node 2 :TX_PROXY DEBUG: Actor# [2:7486784988826188205:2328] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:27.384034Z node 2 :TX_PROXY DEBUG: Actor# [2:7486784988826188205:2328] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:09:27.384477Z node 2 :TX_PROXY DEBUG: Actor# [2:7486784988826188205:2328] Handle TEvDescribeSchemeResult Forward to# [2:7486784988826188204:2327] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743152966870 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 2025-03-28T09:09:27.523717Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486784984531220541:2117], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:27.523796Z node 2 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [2:7486784984531220541:2117], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-03-28T09:09:27.523965Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784988826188209:2331][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:09:27.524343Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7486784984531220222:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7486784988826188213:2331] 2025-03-28T09:09:27.524361Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7486784984531220222:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-28T09:09:27.524421Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7486784984531220222:2049] Subscribe: subscriber# [2:7486784988826188213:2331], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:27.524474Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7486784984531220225:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7486784988826188214:2331] 2025-03-28T09:09:27.524485Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7486784984531220225:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-28T09:09:27.524506Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7486784984531220225:2052] Subscribe: subscriber# [2:7486784988826188214:2331], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:27.524530Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7486784984531220228:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7486784988826188215:2331] 2025-03-28T09:09:27.524537Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7486784984531220228:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-28T09:09:27.524558Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:7486784984531220228:2055] Subscribe: subscriber# [2:7486784988826188215:2331], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:27.524592Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486784988826188213:2331][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7486784984531220222:2049] 2025-03-28T09:09:27.524614Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486784988826188214:2331][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7486784984531220225:2052] 2025-03-28T09:09:27.524630Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486784988826188215:2331][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7486784984531220228:2055] 2025-03-28T09:09:27.524663Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784988826188209:2331][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7486784988826188210:2331] 2025-03-28T09:09:27.524698Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784988826188209:2331][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7486784988826188211:2331] 2025-03-28T09:09:27.524729Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:7486784988826188209:2331][/dc-1/.metadata/initialization/migrations] Set up state: owner# [2:7486784984531220541:2117], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:27.524748Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784988826188209:2331][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7486784988826188212:2331] 2025-03-28T09:09:27.524772Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7486784988826188209:2331][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [2:7486784984531220541:2117], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:27.524795Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7486784984531220222:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7486784988826188213:2331] 2025-03-28T09:09:27.524812Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7486784984531220225:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7486784988826188214:2331] 2025-03-28T09:09:27.524825Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7486784984531220228:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:7486784988826188215:2331] 2025-03-28T09:09:27.524880Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7486784984531220541:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-03-28T09:09:27.524977Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7486784984531220541:2117], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:7486784988826188209:2331] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:27.525079Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486784984531220541:2117], cacheItem# { Subscriber: { Subscriber: [2:7486784988826188209:2331] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:27.525143Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486784988826188216:2332], recipient# [2:7486784988826188208:2298], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2025-03-28T09:09:21.163012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784961315999453:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.163054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016b2/r3tmp/tmpTi7Fx3/pdisk_1.dat 2025-03-28T09:09:21.893064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:21.893205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:21.895637Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:21.923980Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64841 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:09:22.269976Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961315999687:2100] Handle TEvNavigate describe path dc-1 2025-03-28T09:09:22.270019Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965610967293:2265] HANDLE EvNavigateScheme dc-1 2025-03-28T09:09:22.270124Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784961315999713:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.270191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965610967276:2260][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784961315999713:2114], cookie# 1 2025-03-28T09:09:22.272042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965610967280:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965610967277:2260], cookie# 1 2025-03-28T09:09:22.272093Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965610967281:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965610967278:2260], cookie# 1 2025-03-28T09:09:22.272110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965610967282:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965610967279:2260], cookie# 1 2025-03-28T09:09:22.272152Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961315999400:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965610967280:2260], cookie# 1 2025-03-28T09:09:22.272187Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961315999403:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965610967281:2260], cookie# 1 2025-03-28T09:09:22.272206Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961315999406:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965610967282:2260], cookie# 1 2025-03-28T09:09:22.272231Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965610967280:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961315999400:2049], cookie# 1 2025-03-28T09:09:22.272242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965610967281:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961315999403:2052], cookie# 1 2025-03-28T09:09:22.272255Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965610967282:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961315999406:2055], cookie# 1 2025-03-28T09:09:22.272286Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965610967276:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965610967277:2260], cookie# 1 2025-03-28T09:09:22.272311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965610967276:2260][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:09:22.272340Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965610967276:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965610967278:2260], cookie# 1 2025-03-28T09:09:22.272357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965610967276:2260][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:22.272376Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965610967276:2260][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965610967279:2260], cookie# 1 2025-03-28T09:09:22.272392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965610967276:2260][/dc-1] Unexpected sync response: sender# [1:7486784965610967279:2260], cookie# 1 2025-03-28T09:09:22.272434Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784961315999713:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:09:22.285665Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784961315999713:2114], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784965610967276:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:22.285813Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784961315999713:2114], cacheItem# { Subscriber: { Subscriber: [1:7486784965610967276:2260] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:09:22.288142Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784965610967294:2266], recipient# [1:7486784965610967293:2265], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:22.288216Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965610967293:2265] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:22.357029Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965610967293:2265] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:09:22.359901Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965610967293:2265] Handle TEvDescribeSchemeResult Forward to# [1:7486784965610967292:2264] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:09:22.406863Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961315999687:2100] Handle TEvProposeTransaction 2025-03-28T09:09:22.406889Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961315999687:2100] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:09:22.406994Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961315999687:2100] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486784965610967300:2271] 2025-03-28T09:09:22.525784Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965610967300:2271] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:09:22.525835Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965610967300:2271] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:09:22.525914Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965610967300:2271] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:09:22.525991Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784961315999713:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... nedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 269} 2025-03-28T09:09:27.079025Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: [2:7486784985159546290:2243] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7486784980864578433:2049], cookie# 281474976710662 2025-03-28T09:09:27.079054Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: [2:7486784985159546280:2236] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7486784985159546290:2243], cookie# 281474976710662 2025-03-28T09:09:27.079065Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: [2:7486784985159546280:2236] Ack for unknown update (already acked?): sender# [2:7486784985159546290:2243], cookie# 281474976710662 2025-03-28T09:09:27.079175Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486784985159546375:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7486784980864578433:2049] 2025-03-28T09:09:27.079209Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784985159546371:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7486784985159546372:2303] 2025-03-28T09:09:27.079246Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7486784985159546371:2303][/dc-1/USER_0] Path was already updated: owner# [2:7486784985159546037:2113], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:27.079310Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7486784980864578433:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 6 }: sender# [2:7486784985159546375:2303] 2025-03-28T09:09:27.079361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710662 TClient::Ls request: /dc-1/USER_0 2025-03-28T09:09:27.081023Z node 2 :TX_PROXY DEBUG: actor# [2:7486784980864578500:2086] Handle TEvNavigate describe path /dc-1/USER_0 2025-03-28T09:09:27.081051Z node 2 :TX_PROXY DEBUG: Actor# [2:7486784989454513736:2356] HANDLE EvNavigateScheme /dc-1/USER_0 2025-03-28T09:09:27.081133Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486784985159546037:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:27.081217Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784985159546371:2303][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7486784985159546037:2113], cookie# 10 2025-03-28T09:09:27.081258Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486784985159546375:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7486784985159546372:2303], cookie# 10 2025-03-28T09:09:27.081271Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486784985159546376:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7486784985159546373:2303], cookie# 10 2025-03-28T09:09:27.081284Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486784985159546377:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7486784985159546374:2303], cookie# 10 2025-03-28T09:09:27.081307Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7486784980864578433:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7486784985159546375:2303], cookie# 10 2025-03-28T09:09:27.081332Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7486784980864578436:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7486784985159546376:2303], cookie# 10 2025-03-28T09:09:27.081347Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:7486784980864578439:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7486784985159546377:2303], cookie# 10 2025-03-28T09:09:27.081371Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486784985159546375:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7486784980864578433:2049], cookie# 10 2025-03-28T09:09:27.081383Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486784985159546376:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7486784980864578436:2052], cookie# 10 2025-03-28T09:09:27.081395Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486784985159546377:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7486784980864578439:2055], cookie# 10 2025-03-28T09:09:27.081419Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784985159546371:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7486784985159546372:2303], cookie# 10 2025-03-28T09:09:27.081436Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784985159546371:2303][/dc-1/USER_0] Sync is in progress: cookie# 10, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:09:27.081449Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784985159546371:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7486784985159546373:2303], cookie# 10 2025-03-28T09:09:27.081466Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784985159546371:2303][/dc-1/USER_0] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:27.081485Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784985159546371:2303][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 }: sender# [2:7486784985159546374:2303], cookie# 10 2025-03-28T09:09:27.081496Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486784985159546371:2303][/dc-1/USER_0] Unexpected sync response: sender# [2:7486784985159546374:2303], cookie# 10 2025-03-28T09:09:27.081527Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:7486784985159546037:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2025-03-28T09:09:27.081587Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:7486784985159546037:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7486784985159546371:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1743152966968 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:27.081657Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486784985159546037:2113], cacheItem# { Subscriber: { Subscriber: [2:7486784985159546371:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1743152966968 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-03-28T09:09:27.081822Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486784989454513737:2357], recipient# [2:7486784989454513736:2356], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:27.081852Z node 2 :TX_PROXY DEBUG: Actor# [2:7486784989454513736:2356] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:27.081902Z node 2 :TX_PROXY DEBUG: Actor# [2:7486784989454513736:2356] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152966968 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1... (TRUNCATED) 2025-03-28T09:09:27.082764Z node 2 :TX_PROXY DEBUG: Actor# [2:7486784989454513736:2356] Handle TEvDescribeSchemeResult Forward to# [2:7486784989454513735:2355] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743152966968 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 >> Yq_1::CreateConnection_With_Existing_Name >> TSubDomainTest::CheckAccessCopyTable [GOOD] >> TSubDomainTest::ConsistentCopyTable >> Yq_1::Basic >> TModifyUserTest::ModifyUserIsEnabled [GOOD] >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable >> Yq_1::DescribeConnection >> Yq_1::CreateQuery_With_Idempotency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] Test command err: 2025-03-28T09:09:21.099030Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784963241599179:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.099121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00168b/r3tmp/tmpYAVLXJ/pdisk_1.dat 2025-03-28T09:09:21.756798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:21.756898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:21.784136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:21.869384Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:17954 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:09:22.173022Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784967536567012:2261], recipient# [1:7486784967536567003:2258], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:22.173217Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784963241599447:2114], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.173284Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486784963241599447:2114], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-03-28T09:09:22.173422Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784967536567013:2262][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:09:22.180911Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784963241599128:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7486784967536567017:2262] 2025-03-28T09:09:22.180942Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784963241599128:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-28T09:09:22.181008Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784963241599128:2049] Subscribe: subscriber# [1:7486784967536567017:2262], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:22.181087Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784963241599134:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7486784967536567019:2262] 2025-03-28T09:09:22.181130Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784963241599134:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-28T09:09:22.181780Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784963241599134:2055] Subscribe: subscriber# [1:7486784967536567019:2262], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:22.181886Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784967536567017:2262][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7486784963241599128:2049] 2025-03-28T09:09:22.181901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784967536567019:2262][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7486784963241599134:2055] 2025-03-28T09:09:22.181952Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784963241599131:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7486784967536567018:2262] 2025-03-28T09:09:22.181966Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784963241599131:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-28T09:09:22.182043Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486784963241599131:2052] Subscribe: subscriber# [1:7486784967536567018:2262], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:22.182106Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784963241599134:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486784967536567019:2262] 2025-03-28T09:09:22.182124Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784963241599128:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486784967536567017:2262] 2025-03-28T09:09:22.182123Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784967536567013:2262][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7486784967536567014:2262] 2025-03-28T09:09:22.182185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784967536567013:2262][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7486784967536567016:2262] 2025-03-28T09:09:22.182215Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486784967536567013:2262][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7486784963241599447:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:22.182240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784967536567018:2262][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7486784963241599131:2052] 2025-03-28T09:09:22.182258Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784967536567013:2262][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7486784967536567015:2262] 2025-03-28T09:09:22.182274Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486784967536567013:2262][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [1:7486784963241599447:2114], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:22.182318Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784963241599447:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-03-28T09:09:22.182515Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784963241599131:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486784967536567018:2262] 2025-03-28T09:09:22.182657Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784963241599447:2114], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7486784967536567013:2262] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:22.188611Z node 1 :TX_PROXY DEBUG: actor# [1:7486784963241599397:2090] Handle TEvNavigate describe path dc-1 2025-03-28T09:09:22.188653Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784967536567021:2264] HANDLE EvNavigateScheme dc-1 2025-03-28T09:09:22.193907Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784963241599447:2114], cacheItem# { Subscriber: { Subscriber: [1:7486784967536567013:2262] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:22.194076Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784963241599447:2114], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.194110Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784967536567022:2265], recipient# [1:7486784967536567002:2295], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.194168Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784967536567004:2259][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784963241599447:2114], cookie# 1 2025-03-28T09:09:22.194270Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784967536567008:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784967536567005:2259], cookie# 1 2025-03-28T09:09:22.194298Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784967536567009:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784967536567006:2259], cookie# 1 2025-03-28T09:09:22.194309Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784967536567010:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784967536567007:2259], cookie# 1 2025-03-28T09:09:22.194329Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784963241599131:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784967536567009:2259], cookie# 1 2025-03-28T09:09:22.194330Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784963241599128:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784967536567008:2259], cookie# 1 2025-03-28T09:09:22.194364Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784963241599134:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRe ... es# 0 2025-03-28T09:09:30.896542Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486785002042829089:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7486785002042829091:2259], cookie# 10 2025-03-28T09:09:30.896559Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486785002042829089:2259][/dc-1] Sync is done: cookie# 10, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:30.896583Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486785002042829089:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 }: sender# [3:7486785002042829092:2259], cookie# 10 2025-03-28T09:09:30.896599Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486785002042829089:2259][/dc-1] Unexpected sync response: sender# [3:7486785002042829092:2259], cookie# 10 2025-03-28T09:09:30.896650Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7486785002042828875:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:09:30.896722Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7486785002042828875:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7486785002042829089:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152970727 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:30.896799Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486785002042828875:2127], cacheItem# { Subscriber: { Subscriber: [3:7486785002042829089:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743152970727 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2025-03-28T09:09:30.896956Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486785002042829177:2331], recipient# [3:7486785002042829176:2330], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2025-03-28T09:09:30.896989Z node 3 :TX_PROXY DEBUG: Actor# [3:7486785002042829176:2330] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:30.897028Z node 3 :TX_PROXY ERROR: Actor# [3:7486785002042829176:2330] txid# 281474976710663, Access denied for user2 on path /dc-1, with access AlterSchema 2025-03-28T09:09:30.897131Z node 3 :TX_PROXY ERROR: Actor# [3:7486785002042829176:2330] txid# 281474976710663, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2025-03-28T09:09:30.897154Z node 3 :TX_PROXY DEBUG: Actor# [3:7486785002042829176:2330] txid# 281474976710663 SEND to# [3:7486785002042829175:2329] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T09:09:30.994376Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486785002042828875:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:30.994461Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7486785002042828875:2127], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2025-03-28T09:09:30.994670Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486785002042829181:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:09:30.995202Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997747861213:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7486785002042829185:2334] 2025-03-28T09:09:30.995217Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784997747861213:2049] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-28T09:09:30.995283Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784997747861213:2049] Subscribe: subscriber# [3:7486785002042829185:2334], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:30.995320Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997747861216:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7486785002042829186:2334] 2025-03-28T09:09:30.995328Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784997747861216:2052] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-28T09:09:30.995347Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784997747861216:2052] Subscribe: subscriber# [3:7486785002042829186:2334], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:30.995369Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997747861219:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [3:7486785002042829187:2334] 2025-03-28T09:09:30.995376Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784997747861219:2055] Upsert description: path# /dc-1/.metadata/initialization/migrations 2025-03-28T09:09:30.995393Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784997747861219:2055] Subscribe: subscriber# [3:7486785002042829187:2334], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:30.995430Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486785002042829185:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7486784997747861213:2049] 2025-03-28T09:09:30.995451Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486785002042829186:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7486784997747861216:2052] 2025-03-28T09:09:30.995492Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486785002042829187:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7486784997747861219:2055] 2025-03-28T09:09:30.995538Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486785002042829181:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7486785002042829182:2334] 2025-03-28T09:09:30.995580Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486785002042829181:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7486785002042829183:2334] 2025-03-28T09:09:30.995610Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7486785002042829181:2334][/dc-1/.metadata/initialization/migrations] Set up state: owner# [3:7486785002042828875:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:30.995631Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486785002042829181:2334][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [3:7486785002042829184:2334] 2025-03-28T09:09:30.995654Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7486785002042829181:2334][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [3:7486785002042828875:2127], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:30.995681Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997747861213:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486785002042829185:2334] 2025-03-28T09:09:30.995696Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997747861216:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486785002042829186:2334] 2025-03-28T09:09:30.995709Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997747861219:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486785002042829187:2334] 2025-03-28T09:09:30.995759Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7486785002042828875:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 } 2025-03-28T09:09:30.995842Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7486785002042828875:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7486785002042829181:2334] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:30.995935Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486785002042828875:2127], cacheItem# { Subscriber: { Subscriber: [3:7486785002042829181:2334] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:30.996025Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486785002042829188:2335], recipient# [3:7486785002042829180:2297], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinWithNull+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 9694, MsgBus: 25991 2025-03-28T09:09:24.093796Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784974361588751:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:24.094080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00186a/r3tmp/tmpvSRGdx/pdisk_1.dat 2025-03-28T09:09:24.864948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:24.865024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:24.867282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9694, node 1 2025-03-28T09:09:25.024087Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:25.118391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:25.118425Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:25.118433Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:25.118575Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25991 TClient is connected to server localhost:25991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:25.790003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:25.809346Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:25.820991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:25.968866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:26.268026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:26.395825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:28.900441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784991541459578:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:28.900543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:29.098812Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784974361588751:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:29.098892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:29.345909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:09:29.464672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:29.560590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:09:29.616719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:09:29.657817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:09:29.722138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:09:29.807788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784995836427394:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:29.807876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:29.808172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784995836427399:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:29.812697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:09:29.827355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784995836427401:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:09:29.927257Z node 1 :TX_PROXY ERROR: Actor# [1:7486784995836427459:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:32.029019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:09:32.119004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:09:32.187631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |80.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |80.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] >> KqpJoinOrder::TPCDS34-ColumnStore [GOOD] >> PrivateApi::PingTask >> Yq_1::DescribeJob >> BasicUsage::PreferredDatabaseNoFallback [GOOD] |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2025-03-28T09:09:21.575808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784961637262249:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.575863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001684/r3tmp/tmpM71mAH/pdisk_1.dat 2025-03-28T09:09:22.563195Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:22.580690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:22.580780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:22.583106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:22.587890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31378 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:09:22.891907Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961637262267:2116] Handle TEvNavigate describe path dc-1 2025-03-28T09:09:22.891948Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965932230048:2439] HANDLE EvNavigateScheme dc-1 2025-03-28T09:09:22.892090Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784961637262304:2133], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.892169Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965932229957:2366][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784961637262304:2133], cookie# 1 2025-03-28T09:09:22.893449Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965932229964:2366][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965932229961:2366], cookie# 1 2025-03-28T09:09:22.893477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965932229965:2366][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965932229962:2366], cookie# 1 2025-03-28T09:09:22.893491Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965932229966:2366][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965932229963:2366], cookie# 1 2025-03-28T09:09:22.893563Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961637261947:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965932229964:2366], cookie# 1 2025-03-28T09:09:22.893590Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961637261950:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965932229965:2366], cookie# 1 2025-03-28T09:09:22.893605Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961637261953:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965932229966:2366], cookie# 1 2025-03-28T09:09:22.893632Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965932229964:2366][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961637261947:2050], cookie# 1 2025-03-28T09:09:22.893647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965932229965:2366][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961637261950:2053], cookie# 1 2025-03-28T09:09:22.893658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965932229966:2366][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961637261953:2056], cookie# 1 2025-03-28T09:09:22.893696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965932229957:2366][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965932229961:2366], cookie# 1 2025-03-28T09:09:22.893733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965932229957:2366][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:09:22.893750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965932229957:2366][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965932229962:2366], cookie# 1 2025-03-28T09:09:22.893768Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965932229957:2366][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:22.893788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965932229957:2366][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965932229963:2366], cookie# 1 2025-03-28T09:09:22.893801Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965932229957:2366][/dc-1] Unexpected sync response: sender# [1:7486784965932229963:2366], cookie# 1 2025-03-28T09:09:22.893847Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784961637262304:2133], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:09:22.911840Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784961637262304:2133], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784965932229957:2366] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:22.911954Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784961637262304:2133], cacheItem# { Subscriber: { Subscriber: [1:7486784965932229957:2366] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:09:22.914247Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784965932230051:2442], recipient# [1:7486784965932230048:2439], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:22.914303Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965932230048:2439] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:22.961139Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965932230048:2439] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:09:22.964124Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965932230048:2439] Handle TEvDescribeSchemeResult Forward to# [1:7486784965932230047:2438] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:09:22.992882Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961637262267:2116] Handle TEvProposeTransaction 2025-03-28T09:09:22.992912Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961637262267:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:09:22.993031Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961637262267:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486784965932230059:2449] 2025-03-28T09:09:23.144347Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965932230059:2449] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:09:23.144396Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965932230059:2449] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:09:23.144461Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965932230059:2449] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:09:23.144587Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784961637262304:2133], request# { ErrorCount: 0 Databa ... }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:37.427487Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785030394847760:2319], recipient# [4:7486785026099880436:2332], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:37.427948Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7486785026099880436:2332], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:37.883216Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486785004625043601:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:37.883356Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785004625043601:2105], cacheItem# { Subscriber: { Subscriber: [4:7486785004625043630:2113] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:37.883483Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785030394847763:2321], recipient# [4:7486785030394847762:2335], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:37.993510Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486785004625043601:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:37.993648Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785004625043601:2105], cacheItem# { Subscriber: { Subscriber: [4:7486785026099880441:2314] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:37.993751Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785030394847765:2322], recipient# [4:7486785030394847764:2336], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:37.994081Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:38.059076Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486785004625043601:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:38.059209Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785004625043601:2105], cacheItem# { Subscriber: { Subscriber: [4:7486785026099880439:2312] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:38.059261Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785004625043601:2105], cacheItem# { Subscriber: { Subscriber: [4:7486785026099880440:2313] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:38.059388Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785034689815062:2323], recipient# [4:7486785026099880436:2332], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:38.059874Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7486785026099880436:2332], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:38.603180Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486785004625043601:2105], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:38.603311Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785004625043601:2105], cacheItem# { Subscriber: { Subscriber: [4:7486785026099880439:2312] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:38.603361Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785004625043601:2105], cacheItem# { Subscriber: { Subscriber: [4:7486785026099880440:2313] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:38.603498Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785034689815063:2324], recipient# [4:7486785026099880436:2332], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:38.604060Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7486785026099880436:2332], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2025-03-28T09:09:21.455122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784961171203527:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.455348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016c2/r3tmp/tmpvNcXIW/pdisk_1.dat 2025-03-28T09:09:22.423573Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:22.431774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:22.431885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:22.436604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18340 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:09:22.762589Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961171203530:2112] Handle TEvNavigate describe path dc-1 2025-03-28T09:09:22.762647Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965466171331:2434] HANDLE EvNavigateScheme dc-1 2025-03-28T09:09:22.762810Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784961171203562:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.762894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965466171258:2385][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784961171203562:2127], cookie# 1 2025-03-28T09:09:22.764504Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965466171262:2385][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965466171259:2385], cookie# 1 2025-03-28T09:09:22.764535Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965466171263:2385][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965466171260:2385], cookie# 1 2025-03-28T09:09:22.764551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965466171264:2385][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965466171261:2385], cookie# 1 2025-03-28T09:09:22.764608Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961171203226:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965466171262:2385], cookie# 1 2025-03-28T09:09:22.764643Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961171203229:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965466171263:2385], cookie# 1 2025-03-28T09:09:22.764679Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961171203232:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965466171264:2385], cookie# 1 2025-03-28T09:09:22.764716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965466171262:2385][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961171203226:2050], cookie# 1 2025-03-28T09:09:22.764732Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965466171263:2385][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961171203229:2053], cookie# 1 2025-03-28T09:09:22.764746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965466171264:2385][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961171203232:2056], cookie# 1 2025-03-28T09:09:22.764791Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965466171258:2385][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965466171259:2385], cookie# 1 2025-03-28T09:09:22.764810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965466171258:2385][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:09:22.764825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965466171258:2385][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965466171260:2385], cookie# 1 2025-03-28T09:09:22.764849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965466171258:2385][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:22.764871Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965466171258:2385][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965466171261:2385], cookie# 1 2025-03-28T09:09:22.764888Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965466171258:2385][/dc-1] Unexpected sync response: sender# [1:7486784965466171261:2385], cookie# 1 2025-03-28T09:09:22.764936Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784961171203562:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:09:22.771843Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784961171203562:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784965466171258:2385] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:22.772006Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784961171203562:2127], cacheItem# { Subscriber: { Subscriber: [1:7486784965466171258:2385] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:09:22.780396Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784965466171334:2437], recipient# [1:7486784965466171331:2434], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:22.780492Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965466171331:2434] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:22.823057Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965466171331:2434] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:09:22.826901Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965466171331:2434] Handle TEvDescribeSchemeResult Forward to# [1:7486784965466171330:2433] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:09:22.855300Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961171203530:2112] Handle TEvProposeTransaction 2025-03-28T09:09:22.855329Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961171203530:2112] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:09:22.855430Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961171203530:2112] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7486784965466171340:2442] 2025-03-28T09:09:22.962926Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965466171340:2442] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:09:22.962993Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965466171340:2442] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:09:22.963075Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965466171340:2442] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:09:22.963171Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784961171203562:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7486785027753804330:2569] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:36.315466Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486785001983999955:2149], cacheItem# { Subscriber: { Subscriber: [3:7486785027753804330:2569] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:36.315495Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997689032270:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486785027753804336:2567] 2025-03-28T09:09:36.315517Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997689032270:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7486785027753804342:2568] 2025-03-28T09:09:36.315526Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784997689032270:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-03-28T09:09:36.315557Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784997689032270:2056] Subscribe: subscriber# [3:7486785027753804342:2568], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:36.315593Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997689032270:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7486785027753804348:2569] 2025-03-28T09:09:36.315604Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784997689032270:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2025-03-28T09:09:36.315627Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:7486784997689032270:2056] Subscribe: subscriber# [3:7486785027753804348:2569], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:09:36.315662Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486785027753804348:2569][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7486784997689032270:2056] 2025-03-28T09:09:36.315690Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486785027753804330:2569][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7486785027753804345:2569] 2025-03-28T09:09:36.315718Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7486785027753804330:2569][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7486785001983999955:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:36.315734Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997689032270:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486785027753804348:2569] 2025-03-28T09:09:36.315809Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486785027753804349:2570], recipient# [3:7486785027753804326:2317], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:36.315865Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486785027753804350:2571], recipient# [3:7486785027753804327:2318], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:36.315892Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486785027753804342:2568][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7486784997689032270:2056] 2025-03-28T09:09:36.315915Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486785027753804329:2568][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7486785027753804339:2568] 2025-03-28T09:09:36.315938Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7486785027753804329:2568][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7486785001983999955:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:36.315954Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:7486784997689032270:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7486785027753804342:2568] 2025-03-28T09:09:37.018869Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486785001983999955:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:37.019016Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486785001983999955:2149], cacheItem# { Subscriber: { Subscriber: [3:7486785006278967655:2438] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:37.019109Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486785032048771657:2575], recipient# [3:7486785032048771656:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:37.034889Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486785001983999955:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:37.035031Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486785001983999955:2149], cacheItem# { Subscriber: { Subscriber: [3:7486785006278967655:2438] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:37.035139Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486785032048771659:2576], recipient# [3:7486785032048771658:2320], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:37.317557Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486785001983999955:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:37.317703Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486785001983999955:2149], cacheItem# { Subscriber: { Subscriber: [3:7486785027753804330:2569] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:37.317805Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486785032048771661:2577], recipient# [3:7486785032048771660:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13539, MsgBus: 13983 2025-03-28T09:08:44.228643Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784800703389035:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:44.228860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018aa/r3tmp/tmpj5gXSb/pdisk_1.dat 2025-03-28T09:08:44.577407Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13539, node 1 2025-03-28T09:08:44.634519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:44.634883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:44.641321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:44.662956Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:44.662976Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:44.662983Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:44.663085Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13983 TClient is connected to server localhost:13983 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:45.511054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:47.676961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784813588291593:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:47.677125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:47.684615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784813588291605:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:47.691493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:08:47.707545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784813588291607:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:08:47.799807Z node 1 :TX_PROXY ERROR: Actor# [1:7486784813588291658:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:48.099656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.214785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.251510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.282963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.322129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.470238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.552155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.620042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.659343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.731012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.780658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.815010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.844559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.230833Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784800703389035:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:49.231013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:49.721253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:08:49.797124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.869912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.906488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.948535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:08:49.997924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.037190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.085525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.122480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.159505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.200654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.272542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.366009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.432560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.481148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.521443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.567871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:08:50.611984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:14.987550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:14.993241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.005155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.008268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.019855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.023169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.030464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.031493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.036889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.037601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.043686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.044114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.050955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.052220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.057969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.058736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.065388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.065736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.072576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.075544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.080166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.081608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.087660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.087912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.094265Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.094342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.100425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.100864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.106849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.107176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.114886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.114898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.121396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.121560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.127733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.127909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.134793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.134793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.141650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.141652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.148367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.148371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.155177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.155924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.163441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:15.243294Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe05rz925hseenk7qsynsyx", SessionId: ydb://session/3?node_id=1&id=NTQ4NDU1OGItOGU2NWJlZGItMjEyZGEzN2ItNjQwYThjYw==, Slow query, duration: 23.424957s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:09:15.944068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:09:15.944514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:09:15.945191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7486784882307789290:5228];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-28T09:09:15.945566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2025-03-28T09:08:36.049385Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1743152916049280 2025-03-28T09:08:36.732917Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784767699230734:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:36.732987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:36.908724Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784767817632251:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:36.909001Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:37.207698Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:37.226738Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e55/r3tmp/tmpDu04e3/pdisk_1.dat 2025-03-28T09:08:37.834151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:37.853628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:37.853757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:37.870753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:37.870850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:37.879442Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:37.887356Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:37.887617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:37.888570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18596, node 1 2025-03-28T09:08:38.181737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/000e55/r3tmp/yandex00JLoy.tmp 2025-03-28T09:08:38.181762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/000e55/r3tmp/yandex00JLoy.tmp 2025-03-28T09:08:38.181951Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/000e55/r3tmp/yandex00JLoy.tmp 2025-03-28T09:08:38.182094Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:38.234433Z INFO: TTestServer started on Port 6385 GrpcPort 18596 TClient is connected to server localhost:6385 PQClient connected to localhost:18596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:38.613676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:08:41.716219Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784767699230734:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:41.716277Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:41.794470Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784789292468929:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.794566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784789292468911:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.794647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.802033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-28T09:08:41.851339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784789292468939:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-28T09:08:41.876431Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784767817632251:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:41.876488Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:41.944099Z node 2 :TX_PROXY ERROR: Actor# [2:7486784789292468967:2133] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:42.249873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:08:42.252498Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486784789292468974:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:42.254389Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486784789174068152:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:42.254541Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2QwMjBmZDAtNzJkODRlYzktMWUyMjhmZjAtM2E3NGFlODQ=, ActorId: [1:7486784789174068101:2336], ActorState: ExecuteState, TraceId: 01jqe05f9pfkdtckw95m3fe92s, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:42.255052Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:42.252829Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2RlZGM4ZC1mNDc4NWEyMS01MWYyZjI1ZC1hODVkZTMxNA==, ActorId: [2:7486784789292468908:2309], ActorState: ExecuteState, TraceId: 01jqe05f5w2v74z36jtjty4ma0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:42.263223Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:42.426612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:42.710470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:18596", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-28T09:08:43.029808Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe05g5s96rkd6k6hhphckwv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmJlOWNjYjMtZjJkNmYxNmQtOGFkMTg3Y2QtOWQxMTAwODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486784797764003214:3030] === CheckClustersList. Ok 2025-03-28T09:08:49.345142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 5 partitions CallPersQueueGRPC request to localhost:18596 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:18596 20 ... ; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:09:05.988801Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-28T09:09:05.988819Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892054583684:2523] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:09:05.991450Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892054583684:2523] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:09:06.317903Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892054583684:2523] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-28T09:09:06.323338Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784896349551033:2523] connected; active server actors: 1 2025-03-28T09:09:06.324803Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892054583684:2523] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-28T09:09:06.324828Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892054583684:2523] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-28T09:09:06.336153Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784896349551033:2523] disconnected; active server actors: 1 2025-03-28T09:09:06.336185Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486784896349551033:2523] disconnected no session 2025-03-28T09:09:06.479951Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892054583684:2523] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-28T09:09:06.479992Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892054583684:2523] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-28T09:09:06.480009Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486784892054583684:2523] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-28T09:09:06.480036Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:09:06.483546Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-03-28T09:09:06.483603Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7486784896349551073:2523], now have 1 active actors on pipe 2025-03-28T09:09:06.483632Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:09:06.483655Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:09:06.483727Z node 3 :PERSQUEUE INFO: new Cookie src|c8c6d7a5-1f915c59-71081411-543bb6f4_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-28T09:09:06.483818Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:09:06.483871Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:09:06.484683Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:09:06.484711Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:09:06.484778Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:09:06.484933Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|c8c6d7a5-1f915c59-71081411-543bb6f4_0 2025-03-28T09:09:06.486415Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743152946486 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:06.486525Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|c8c6d7a5-1f915c59-71081411-543bb6f4_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:09:06.486722Z :INFO: [] MessageGroupId [src] SessionId [src|c8c6d7a5-1f915c59-71081411-543bb6f4_0] Write session: close. Timeout = 0 ms 2025-03-28T09:09:06.486757Z :INFO: [] MessageGroupId [src] SessionId [src|c8c6d7a5-1f915c59-71081411-543bb6f4_0] Write session will now close 2025-03-28T09:09:06.486801Z :DEBUG: [] MessageGroupId [src] SessionId [src|c8c6d7a5-1f915c59-71081411-543bb6f4_0] Write session: aborting 2025-03-28T09:09:06.487149Z :INFO: [] MessageGroupId [src] SessionId [src|c8c6d7a5-1f915c59-71081411-543bb6f4_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:09:06.487183Z :DEBUG: [] MessageGroupId [src] SessionId [src|c8c6d7a5-1f915c59-71081411-543bb6f4_0] Write session: destroy 2025-03-28T09:09:06.491350Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|c8c6d7a5-1f915c59-71081411-543bb6f4_0 grpc read done: success: 0 data: 2025-03-28T09:09:06.491372Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|c8c6d7a5-1f915c59-71081411-543bb6f4_0 grpc read failed 2025-03-28T09:09:06.491394Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|c8c6d7a5-1f915c59-71081411-543bb6f4_0 grpc closed 2025-03-28T09:09:06.491404Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|c8c6d7a5-1f915c59-71081411-543bb6f4_0 is DEAD 2025-03-28T09:09:06.491945Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:09:06.492366Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7486784896349551073:2523] destroyed 2025-03-28T09:09:06.492398Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ====TYdbPqTestRetryPolicy() ====ExpectBreakDown === Session was created, waiting for retries >>> Ready to answer: ok ====CreateRetryState ====CreateRetryState Initialized Test retry state: get retry delay 2025-03-28T09:09:06.556880Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:08.557940Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-03-28T09:09:09.233587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:09:09.233637Z node 3 :IMPORT WARN: Table profiles were not loaded Test retry state: get retry delay === In the next federation discovery response dc2 will be available 2025-03-28T09:09:10.558973Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:12.562747Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:14.563921Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:16.569161Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:18.569954Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:20.575113Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:22.579129Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:24.579994Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:26.582865Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:28.586905Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:30.588805Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:32.590891Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s Test retry state: get retry delay 2025-03-28T09:09:34.592908Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s 2025-03-28T09:09:35.171682Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-03-28T09:09:35.170811Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-03-28T09:09:35.170899Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2025-03-28T09:09:35.175445Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-03-28T09:09:35.175973Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 Test retry state: get retry delay 2025-03-28T09:09:36.594872Z :NOTICE: [/Root] [] [] Retry to update federation state in 2.000000s === Waiting for repair >>> Ready to answer: ok 2025-03-28T09:09:38.599524Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:22464" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:22464" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:22464" location: "dc3" status: AVAILABLE weight: 500 } ] } === Closing the session 2025-03-28T09:09:38.620051Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2025-03-28T09:09:38.620912Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2025-03-28T09:09:38.631027Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2025-03-28T09:09:38.635077Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2025-03-28T09:09:38.635174Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2025-03-28T09:09:38.635326Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2025-03-28T09:09:38.635385Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy >> Yq_1::DeleteConnections >> THiveTest::TestNoMigrationToSelf >> TSubDomainTest::GenericCases [GOOD] >> Yq_1::ModifyConnections >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet >> IncrementalBackup::SimpleRestore >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2025-03-28T09:09:21.555751Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784961492806390:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.555806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016be/r3tmp/tmp0i2cNv/pdisk_1.dat 2025-03-28T09:09:22.480454Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:22.515977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:22.516067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:22.523233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18871 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:09:22.942881Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961492806619:2117] Handle TEvNavigate describe path dc-1 2025-03-28T09:09:22.942922Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965787774405:2444] HANDLE EvNavigateScheme dc-1 2025-03-28T09:09:22.943037Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784961492806647:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.943144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965787774333:2396][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784961492806647:2131], cookie# 1 2025-03-28T09:09:22.944485Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965787774337:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965787774334:2396], cookie# 1 2025-03-28T09:09:22.944512Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965787774338:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965787774335:2396], cookie# 1 2025-03-28T09:09:22.944526Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965787774339:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965787774336:2396], cookie# 1 2025-03-28T09:09:22.944555Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961492806293:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965787774337:2396], cookie# 1 2025-03-28T09:09:22.944576Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961492806296:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965787774338:2396], cookie# 1 2025-03-28T09:09:22.944589Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784961492806299:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784965787774339:2396], cookie# 1 2025-03-28T09:09:22.944612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965787774337:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961492806293:2051], cookie# 1 2025-03-28T09:09:22.944629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965787774338:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961492806296:2054], cookie# 1 2025-03-28T09:09:22.944643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784965787774339:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784961492806299:2057], cookie# 1 2025-03-28T09:09:22.944681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965787774333:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965787774334:2396], cookie# 1 2025-03-28T09:09:22.944702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965787774333:2396][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:09:22.944716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965787774333:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965787774335:2396], cookie# 1 2025-03-28T09:09:22.944731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965787774333:2396][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:22.944749Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965787774333:2396][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784965787774336:2396], cookie# 1 2025-03-28T09:09:22.944768Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784965787774333:2396][/dc-1] Unexpected sync response: sender# [1:7486784965787774336:2396], cookie# 1 2025-03-28T09:09:22.944811Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784961492806647:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:09:22.957648Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784961492806647:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784965787774333:2396] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:22.957768Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784961492806647:2131], cacheItem# { Subscriber: { Subscriber: [1:7486784965787774333:2396] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:09:22.960743Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784965787774409:2448], recipient# [1:7486784965787774405:2444], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:22.960802Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965787774405:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:23.012000Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965787774405:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:09:23.015018Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784965787774405:2444] Handle TEvDescribeSchemeResult Forward to# [1:7486784965787774404:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:09:23.078125Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961492806619:2117] Handle TEvProposeTransaction 2025-03-28T09:09:23.078156Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961492806619:2117] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:09:23.078241Z node 1 :TX_PROXY DEBUG: actor# [1:7486784961492806619:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486784970082741713:2452] 2025-03-28T09:09:23.255731Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784970082741713:2452] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:09:23.255784Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784970082741713:2452] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:09:23.255859Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784970082741713:2452] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:09:23.259014Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784961492806647:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... alid> AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:40.851142Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7486785044293127539:3039][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7486785044293127548:3039] 2025-03-28T09:09:40.851163Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7486785044293127539:3039][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7486785018523322558:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:40.851200Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7486785018523322558:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-28T09:09:40.851319Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7486785018523322558:2149], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7486785044293127539:3039] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:40.851381Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785018523322558:2149], cacheItem# { Subscriber: { Subscriber: [4:7486785044293127539:3039] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:40.851460Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785044293127552:3040], recipient# [4:7486785044293127537:2336], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:41.241060Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486785018523322558:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:41.241195Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785018523322558:2149], cacheItem# { Subscriber: { Subscriber: [4:7486785022818290415:2556] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:41.241294Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785048588094853:3044], recipient# [4:7486785048588094852:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:41.538579Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486785018523322558:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:41.538741Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785018523322558:2149], cacheItem# { Subscriber: { Subscriber: [4:7486785022818290415:2556] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:41.538827Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785048588094855:3045], recipient# [4:7486785048588094854:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:41.827704Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486785018523322558:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:41.827829Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785018523322558:2149], cacheItem# { Subscriber: { Subscriber: [4:7486785044293127516:3036] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:41.827907Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785048588094860:3046], recipient# [4:7486785048588094859:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:42.250840Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486785018523322558:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:42.250956Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785018523322558:2149], cacheItem# { Subscriber: { Subscriber: [4:7486785022818290415:2556] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:42.251252Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785052883062164:3050], recipient# [4:7486785052883062163:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:42.543126Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486785018523322558:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:42.543276Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486785018523322558:2149], cacheItem# { Subscriber: { Subscriber: [4:7486785022818290415:2556] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:42.543382Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486785052883062166:3051], recipient# [4:7486785052883062165:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> BsControllerConfig::ExtendBoxAndStoragePool >> TSubDomainTest::ConsistentCopyTable [GOOD] >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> BsControllerConfig::MoveGroups >> Yq_1::ListConnections >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> BasicUsage::SimpleHandlers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2025-03-28T09:09:21.097601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784959777612248:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.097647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00169d/r3tmp/tmpwhGhdm/pdisk_1.dat 2025-03-28T09:09:21.767112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:21.767219Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:21.784123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:21.922029Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61130 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:09:22.185591Z node 1 :TX_PROXY DEBUG: actor# [1:7486784959777612467:2090] Handle TEvNavigate describe path dc-1 2025-03-28T09:09:22.185639Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964072580091:2265] HANDLE EvNavigateScheme dc-1 2025-03-28T09:09:22.185775Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784959777612509:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.185873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964072580073:2259][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784959777612509:2113], cookie# 1 2025-03-28T09:09:22.192018Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964072580077:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964072580074:2259], cookie# 1 2025-03-28T09:09:22.192090Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964072580078:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964072580075:2259], cookie# 1 2025-03-28T09:09:22.192107Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964072580079:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964072580076:2259], cookie# 1 2025-03-28T09:09:22.192159Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784959777612198:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964072580077:2259], cookie# 1 2025-03-28T09:09:22.192188Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784959777612201:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964072580078:2259], cookie# 1 2025-03-28T09:09:22.192202Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784959777612204:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964072580079:2259], cookie# 1 2025-03-28T09:09:22.192230Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964072580077:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784959777612198:2049], cookie# 1 2025-03-28T09:09:22.192243Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964072580078:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784959777612201:2052], cookie# 1 2025-03-28T09:09:22.192268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964072580079:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784959777612204:2055], cookie# 1 2025-03-28T09:09:22.192311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964072580073:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784964072580074:2259], cookie# 1 2025-03-28T09:09:22.192331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964072580073:2259][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:09:22.192344Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964072580073:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784964072580075:2259], cookie# 1 2025-03-28T09:09:22.192366Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964072580073:2259][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:22.192391Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964072580073:2259][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784964072580076:2259], cookie# 1 2025-03-28T09:09:22.192403Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964072580073:2259][/dc-1] Unexpected sync response: sender# [1:7486784964072580076:2259], cookie# 1 2025-03-28T09:09:22.192453Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784959777612509:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:09:22.209522Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784959777612509:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784964072580073:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:22.209664Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784959777612509:2113], cacheItem# { Subscriber: { Subscriber: [1:7486784964072580073:2259] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:09:22.211728Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784964072580092:2266], recipient# [1:7486784964072580091:2265], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:22.211794Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964072580091:2265] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:22.293197Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964072580091:2265] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:09:22.300884Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964072580091:2265] Handle TEvDescribeSchemeResult Forward to# [1:7486784964072580090:2264] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:09:22.326767Z node 1 :TX_PROXY DEBUG: actor# [1:7486784959777612467:2090] Handle TEvProposeTransaction 2025-03-28T09:09:22.326803Z node 1 :TX_PROXY DEBUG: actor# [1:7486784959777612467:2090] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:09:22.326899Z node 1 :TX_PROXY DEBUG: actor# [1:7486784959777612467:2090] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486784964072580098:2271] 2025-03-28T09:09:22.434006Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964072580098:2271] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:09:22.434054Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964072580098:2271] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:09:22.434145Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964072580098:2271] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:09:22.434247Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784959777612509:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Statu ... 6:2329], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:46.300488Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7486785069254835340:2260], recipient# [6:7486785069254835317:2330], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:46.300691Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7486785069254835316:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:46.300808Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:46.367621Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7486785030600129415:2117], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:46.367788Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7486785030600129415:2117], cacheItem# { Subscriber: { Subscriber: [6:7486785034895096873:2219] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:46.367905Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7486785069254835342:2261], recipient# [6:7486785069254835341:2331], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:46.377123Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7486785030600129415:2117], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:46.377276Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7486785030600129415:2117], cacheItem# { Subscriber: { Subscriber: [6:7486785069254835318:2256] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:46.377353Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7486785030600129415:2117], cacheItem# { Subscriber: { Subscriber: [6:7486785069254835319:2257] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:46.377479Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7486785069254835343:2262], recipient# [6:7486785069254835316:2329], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:46.377783Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7486785069254835316:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:46.478785Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7486785030600129415:2117], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:46.478930Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7486785030600129415:2117], cacheItem# { Subscriber: { Subscriber: [6:7486785034895096873:2219] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:46.479038Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7486785069254835345:2263], recipient# [6:7486785069254835344:2332], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:46.531339Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7486785030600129415:2117], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:46.531483Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7486785030600129415:2117], cacheItem# { Subscriber: { Subscriber: [6:7486785069254835318:2256] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:46.531536Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [6:7486785030600129415:2117], cacheItem# { Subscriber: { Subscriber: [6:7486785069254835319:2257] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:46.531652Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [6:7486785069254835346:2264], recipient# [6:7486785069254835316:2329], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:46.535054Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7486785069254835316:2329], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |80.7%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2025-03-28T09:08:34.278788Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1743152914278743 2025-03-28T09:08:34.790955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784761338404388:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:34.791012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:34.893598Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784759364568125:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:34.893649Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e8d/r3tmp/tmp2ce9QR/pdisk_1.dat 2025-03-28T09:08:35.300492Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:35.318859Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:35.845812Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:35.845835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:35.858572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:35.861604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:35.867291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:35.867363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:35.875316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:35.888218Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:35.894963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18229, node 1 2025-03-28T09:08:36.179374Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/000e8d/r3tmp/yandexP6r261.tmp 2025-03-28T09:08:36.179408Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/000e8d/r3tmp/yandexP6r261.tmp 2025-03-28T09:08:36.179575Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/000e8d/r3tmp/yandexP6r261.tmp 2025-03-28T09:08:36.179715Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:36.274801Z INFO: TTestServer started on Port 9256 GrpcPort 18229 TClient is connected to server localhost:9256 PQClient connected to localhost:18229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:36.675384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:36.760805Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:39.735086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784780839404932:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:39.735256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784780839404921:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:39.735718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:39.740044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784782813241845:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:39.740110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784782813241838:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:39.740172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:39.747300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:08:39.759677Z node 2 :TX_PROXY ERROR: Actor# [2:7486784780839404937:2125] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:08:39.778544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784782813241867:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:08:39.779118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784780839404936:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:08:39.790868Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784761338404388:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:39.790930Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:39.863057Z node 2 :TX_PROXY ERROR: Actor# [2:7486784780839404964:2131] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:39.878825Z node 1 :TX_PROXY ERROR: Actor# [1:7486784782813241964:2690] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:39.893267Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784759364568125:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:39.893310Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:40.069792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:40.084156Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486784780839404979:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:40.084542Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGI4MGM4N2ItYTA0ZGI2MzYtZmE4ZjRlMDEtMmEzN2Y1MTg=, ActorId: [2:7486784780839404905:2309], ActorState: ExecuteState, TraceId: 01jqe05d506g1nf5qbfpxwtyeq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:40.084250Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486784782813241978:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:40.084524Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjBkMzg5ZDMtOGE4YzFiMDgtNTFiY2YzZGItNzc0MDAyNDE=, ActorId: [1:7486784782813241835:2337], ActorState: ExecuteState, TraceId: 01jqe05d5sbcb910vfm88148bb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:40.089564Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:40.087177Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: ... 3_0] Write session will now close 2025-03-28T09:09:46.817318Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|bab7f7d-832657bb-1392aba9-cbaeee03_0] Write session: aborting 2025-03-28T09:09:46.817739Z :INFO: [/Root] MessageGroupId [src_id] SessionId [src_id|bab7f7d-832657bb-1392aba9-cbaeee03_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:09:46.817783Z :DEBUG: [/Root] MessageGroupId [src_id] SessionId [src_id|bab7f7d-832657bb-1392aba9-cbaeee03_0] Write session: destroy 2025-03-28T09:09:46.853128Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_9060348831477912903_v1 grpc read done: success# 0, data# { } 2025-03-28T09:09:46.853161Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9060348831477912903_v1 grpc read failed 2025-03-28T09:09:46.853199Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9060348831477912903_v1 grpc closed 2025-03-28T09:09:46.853226Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_9060348831477912903_v1 is DEAD 2025-03-28T09:09:46.854138Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_11289571335448288836_v1 grpc read done: success# 0, data# { } 2025-03-28T09:09:46.854149Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_11289571335448288836_v1 grpc read failed 2025-03-28T09:09:46.854169Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_11289571335448288836_v1 grpc closed 2025-03-28T09:09:46.854186Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_11289571335448288836_v1 is DEAD 2025-03-28T09:09:46.854712Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_8677836925976873846_v1 grpc read done: success# 0, data# { } 2025-03-28T09:09:46.854725Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_8677836925976873846_v1 grpc read failed 2025-03-28T09:09:46.854742Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_8677836925976873846_v1 grpc closed 2025-03-28T09:09:46.854770Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_8677836925976873846_v1 is DEAD 2025-03-28T09:09:46.855316Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: src_id|bab7f7d-832657bb-1392aba9-cbaeee03_0 grpc read done: success: 0 data: 2025-03-28T09:09:46.855330Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|bab7f7d-832657bb-1392aba9-cbaeee03_0 grpc read failed 2025-03-28T09:09:46.855356Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|bab7f7d-832657bb-1392aba9-cbaeee03_0 grpc closed 2025-03-28T09:09:46.855373Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: src_id|bab7f7d-832657bb-1392aba9-cbaeee03_0 is DEAD 2025-03-28T09:09:46.856042Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:09:46.856258Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785060987714396:2537] disconnected; active server actors: 1 2025-03-28T09:09:46.856282Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785060987714396:2537] client user disconnected session shared/user_3_1_9060348831477912903_v1 2025-03-28T09:09:46.857976Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-03-28T09:09:46.858055Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785060987714403:2538] disconnected; active server actors: 1 2025-03-28T09:09:46.858074Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785060987714403:2538] client user disconnected session shared/user_3_2_11289571335448288836_v1 2025-03-28T09:09:46.858106Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785060987714402:2539] disconnected; active server actors: 1 2025-03-28T09:09:46.858119Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785060987714402:2539] client user disconnected session shared/user_3_3_8677836925976873846_v1 2025-03-28T09:09:46.859069Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7486785060987714451:2541] destroyed 2025-03-28T09:09:46.859111Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_3_8677836925976873846_v1 2025-03-28T09:09:46.859140Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7486785060987714430:2554] destroyed 2025-03-28T09:09:46.859191Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:09:46.859266Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_3_8677836925976873846_v1 2025-03-28T09:09:46.919240Z :INFO: [/Root] [/Root] [404d9c0d-fd55e584-640903f1-b80bbf42] Closing read session. Close timeout: 0.000000s 2025-03-28T09:09:46.919317Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:09:46.919374Z :INFO: [/Root] [/Root] [404d9c0d-fd55e584-640903f1-b80bbf42] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2494 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:46.919411Z :INFO: [/Root] [/Root] [38d37602-12dc9993-402fd839-2b804a63] Closing read session. Close timeout: 0.000000s 2025-03-28T09:09:46.919440Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:09:46.919467Z :INFO: [/Root] [/Root] [38d37602-12dc9993-402fd839-2b804a63] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2479 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:46.919491Z :INFO: [/Root] [/Root] [c198c8aa-ebbf0b47-745f22e1-59e07275] Closing read session. Close timeout: 0.000000s 2025-03-28T09:09:46.919523Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-03-28T09:09:46.919556Z :INFO: [/Root] [/Root] [c198c8aa-ebbf0b47-745f22e1-59e07275] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2443 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:46.919591Z :INFO: [/Root] [/Root] [c198c8aa-ebbf0b47-745f22e1-59e07275] Closing read session. Close timeout: 0.000000s 2025-03-28T09:09:46.919627Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2025-03-28T09:09:46.919671Z :INFO: [/Root] [/Root] [c198c8aa-ebbf0b47-745f22e1-59e07275] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2443 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:46.919759Z :NOTICE: [/Root] [/Root] [c198c8aa-ebbf0b47-745f22e1-59e07275] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:09:46.920674Z :INFO: [/Root] [/Root] [38d37602-12dc9993-402fd839-2b804a63] Closing read session. Close timeout: 0.000000s 2025-03-28T09:09:46.920711Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:09:46.920743Z :INFO: [/Root] [/Root] [38d37602-12dc9993-402fd839-2b804a63] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2480 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:46.920795Z :NOTICE: [/Root] [/Root] [38d37602-12dc9993-402fd839-2b804a63] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:09:46.921276Z :INFO: [/Root] [/Root] [404d9c0d-fd55e584-640903f1-b80bbf42] Closing read session. Close timeout: 0.000000s 2025-03-28T09:09:46.921317Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:09:46.921363Z :INFO: [/Root] [/Root] [404d9c0d-fd55e584-640903f1-b80bbf42] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2496 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:09:46.921420Z :NOTICE: [/Root] [/Root] [404d9c0d-fd55e584-640903f1-b80bbf42] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:09:48.162485Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486785078167583874:2580] TxId: 281474976715695. Ctx: { TraceId: 01jqe07f1a2qc0jjw5cma79qf2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YTQ5MTY0NzMtZTM4ZGM5MDUtNWNlNDg4MWEtOTZkODEwYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 2025-03-28T09:09:48.163181Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486785078167583884:2591], TxId: 281474976715695, task: 2. Ctx: { TraceId : 01jqe07f1a2qc0jjw5cma79qf2. SessionId : ydb://session/3?node_id=3&id=YTQ5MTY0NzMtZTM4ZGM5MDUtNWNlNDg4MWEtOTZkODEwYTQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7486785078167583874:2580], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-28T09:09:48.163469Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486785078167583885:2592], TxId: 281474976715695, task: 4. Ctx: { SessionId : ydb://session/3?node_id=3&id=YTQ5MTY0NzMtZTM4ZGM5MDUtNWNlNDg4MWEtOTZkODEwYTQ=. CustomerSuppliedId : . TraceId : 01jqe07f1a2qc0jjw5cma79qf2. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7486785078167583874:2580], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-28T09:09:48.575054Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715696. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:09:48.575233Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7486785078167583899:2594] TxId: 281474976715696. Ctx: { TraceId: 01jqe07g6hbegz4nfgm8krnxqa, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTJlMjhkYzItZTczMWFlYTYtNDU1M2FjYWYtNDBiMzAwZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:09:48.575513Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTJlMjhkYzItZTczMWFlYTYtNDU1M2FjYWYtNDBiMzAwZTk=, ActorId: [3:7486785078167583896:2594], ActorState: ExecuteState, TraceId: 01jqe07g6hbegz4nfgm8krnxqa, Create QueryResponse for error on request, msg: 2025-03-28T09:09:48.577329Z node 3 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqe07g6kerkgnsk38q478q6k" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |80.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode >> TTopicApiDescribes::GetPartitionDescribe >> SystemView::ShowCreateTableDefaultLiteral [GOOD] >> SystemView::ShowCreateTablePartitionAtKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-03-28T09:09:21.597915Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784962135463735:2291];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.598049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016ab/r3tmp/tmpVK5G7d/pdisk_1.dat 2025-03-28T09:09:22.639061Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:22.642555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:22.665805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:22.665920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:22.677048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14457 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:09:22.987343Z node 1 :TX_PROXY DEBUG: actor# [1:7486784962135463620:2112] Handle TEvNavigate describe path dc-1 2025-03-28T09:09:22.987402Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784966430431509:2440] HANDLE EvNavigateScheme dc-1 2025-03-28T09:09:22.987545Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784962135463738:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.987625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966430431397:2352][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784962135463738:2127], cookie# 1 2025-03-28T09:09:22.989015Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966430431402:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966430431399:2352], cookie# 1 2025-03-28T09:09:22.989056Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966430431403:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966430431400:2352], cookie# 1 2025-03-28T09:09:22.989089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966430431404:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966430431401:2352], cookie# 1 2025-03-28T09:09:22.989124Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784962135463405:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966430431403:2352], cookie# 1 2025-03-28T09:09:22.989162Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784962135463408:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966430431404:2352], cookie# 1 2025-03-28T09:09:22.989207Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966430431403:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784962135463405:2053], cookie# 1 2025-03-28T09:09:22.989227Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966430431404:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784962135463408:2056], cookie# 1 2025-03-28T09:09:22.989256Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966430431397:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784966430431400:2352], cookie# 1 2025-03-28T09:09:22.989275Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966430431397:2352][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:09:22.989304Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966430431397:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784966430431401:2352], cookie# 1 2025-03-28T09:09:22.989340Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966430431397:2352][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:22.989399Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784962135463738:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:09:22.990760Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784962135463402:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966430431402:2352], cookie# 1 2025-03-28T09:09:22.990812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966430431402:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784962135463402:2050], cookie# 1 2025-03-28T09:09:22.990833Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966430431397:2352][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784966430431399:2352], cookie# 1 2025-03-28T09:09:22.990848Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966430431397:2352][/dc-1] Unexpected sync response: sender# [1:7486784966430431399:2352], cookie# 1 2025-03-28T09:09:23.003736Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784962135463738:2127], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784966430431397:2352] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:23.003873Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784962135463738:2127], cacheItem# { Subscriber: { Subscriber: [1:7486784966430431397:2352] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:09:23.009950Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784970725398806:2441], recipient# [1:7486784966430431509:2440], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:23.010043Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784966430431509:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:23.074440Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784966430431509:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:09:23.077183Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784966430431509:2440] Handle TEvDescribeSchemeResult Forward to# [1:7486784966430431508:2439] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:09:23.108201Z node 1 :TX_PROXY DEBUG: actor# [1:7486784962135463620:2112] Handle TEvProposeTransaction 2025-03-28T09:09:23.108232Z node 1 :TX_PROXY DEBUG: actor# [1:7486784962135463620:2112] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:09:23.108341Z node 1 :TX_PROXY DEBUG: actor# [1:7486784962135463620:2112] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7486784970725398811:2445] 2025-03-28T09:09:23.307409Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784970725398811:2445] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:09:23.307471Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784970725398811:2445] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:09:23.307531Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784970725398811:2445] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:09:23.307626Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784962135463738:2127], request# { ErrorCount: 0 Databa ... :7486785046690944651:2053] 2025-03-28T09:09:51.496021Z node 5 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][5:7486785089640618883:2786][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [5:7486785089640618887:2786] 2025-03-28T09:09:51.496044Z node 5 :SCHEME_BOARD_SUBSCRIBER INFO: [main][5:7486785089640618883:2786][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [5:7486785050985912285:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:09:51.496064Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7486785046690944651:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7486785089640618896:2787] 2025-03-28T09:09:51.496080Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7486785046690944651:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7486785089640618902:2788] 2025-03-28T09:09:51.496093Z node 5 :SCHEME_BOARD_REPLICA DEBUG: [5:7486785046690944651:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [5:7486785089640618890:2786] 2025-03-28T09:09:51.986872Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7486785050985912285:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:51.987015Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7486785050985912285:2130], cacheItem# { Subscriber: { Subscriber: [5:7486785050985912750:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:51.987110Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7486785089640618914:2794], recipient# [5:7486785089640618913:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:52.089453Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7486785050985912285:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:52.089582Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7486785050985912285:2130], cacheItem# { Subscriber: { Subscriber: [5:7486785050985912750:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:52.089683Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7486785093935586212:2795], recipient# [5:7486785093935586211:2328], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:52.498158Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7486785050985912285:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:52.498279Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7486785050985912285:2130], cacheItem# { Subscriber: { Subscriber: [5:7486785089640618885:2788] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:52.498359Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7486785093935586218:2797], recipient# [5:7486785093935586217:2329], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:52.995596Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7486785050985912285:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:52.995727Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7486785050985912285:2130], cacheItem# { Subscriber: { Subscriber: [5:7486785050985912750:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:52.995833Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7486785093935586229:2804], recipient# [5:7486785093935586228:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:53.088221Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7486785050985912285:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:53.088333Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7486785050985912285:2130], cacheItem# { Subscriber: { Subscriber: [5:7486785050985912750:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:53.088415Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7486785098230553527:2805], recipient# [5:7486785098230553526:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:53.499509Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [5:7486785050985912285:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:53.499649Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [5:7486785050985912285:2130], cacheItem# { Subscriber: { Subscriber: [5:7486785089640618885:2788] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:09:53.499734Z node 5 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [5:7486785098230553534:2808], recipient# [5:7486785098230553533:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestLockTabletExecutionTimeout >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency >> TTopicApiDescribes::DescribeConsumer >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k >> TIcNodeCache::GetNodesInfoTest >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> SystemView::AuthGroups_TableRange [GOOD] >> SystemView::AuthOwners >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |80.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |80.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection >> BsControllerConfig::MoveGroups [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness1 [GOOD] Test command err: Trying to start YDB, gRPC: 20425, MsgBus: 14383 2025-03-28T09:09:05.317872Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784891279349276:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:05.318676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001887/r3tmp/tmpAe4MT6/pdisk_1.dat 2025-03-28T09:09:05.887472Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:05.897849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:05.897980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:05.913351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20425, node 1 2025-03-28T09:09:06.018753Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:06.018776Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:06.018786Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:06.018908Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14383 TClient is connected to server localhost:14383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:06.587674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:06.607830Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:08.421754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784904164251687:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:08.421773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784904164251699:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:08.421833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:08.426638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:08.440890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784904164251701:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:08.514163Z node 1 :TX_PROXY ERROR: Actor# [1:7486784904164251752:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:08.832166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:08.956464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:09:08.984585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.013289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.078085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.194935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.229028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.252974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.308524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.337384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.368019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.401100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.438109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.951736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:09:09.977230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.001715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.027531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.055395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.087417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.114453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.135346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.165207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.196227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.267760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.312455Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784891279349276:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:10.312528Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:10.343209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.373651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.406044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.441232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.466805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:09:10.508573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.725090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.729208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.734551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.744645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.745419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.756218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.778027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.779838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.792798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.794207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.806494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.823863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.829541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.838703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.843990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.853562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.859398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.866664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.869384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.888728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.900264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.915354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.931674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.947664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.960088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.964808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.969937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.976427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.982609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.988704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:47.999578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.007221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.015832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.021197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.026546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.035556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.040477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.054343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.072078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.076474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.090220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.096147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.212861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038476;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.219336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038442;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.350003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:48.575266Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe06cbj19np35res5qprhf0", SessionId: ydb://session/3?node_id=1&id=NDdhY2Q5NTctZGNkMTQyMGUtNTIyOGVkMmItZjQ3NmE1YmM=, Slow query, duration: 36.908307s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:09:49.430876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:09:49.431469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486785020128395408:5971];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-28T09:09:49.438103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:09:49.439424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |80.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2887:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2887:2116] Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3017:2106] recipient: [1:2887:2116] 2025-03-28T09:09:28.807598Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:09:28.811322Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:09:28.811656Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:09:28.812374Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:09:28.814187Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:09:28.814953Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:09:28.814989Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:09:28.815299Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:09:28.824159Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:09:28.824281Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:09:28.824399Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:09:28.824463Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:09:28.824539Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:09:28.824599Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3042:2106] recipient: [1:60:2107] 2025-03-28T09:09:28.836378Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:09:28.836506Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:09:28.847501Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:09:28.847656Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:09:28.847751Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:09:28.847849Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:09:28.847965Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:09:28.848019Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:09:28.848058Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:09:28.848141Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:09:28.859895Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:09:28.860043Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:09:28.871295Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:09:28.871506Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:09:28.873055Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:09:28.873121Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:09:28.873407Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:09:28.873483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:09:28.895066Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2025-03-28T09:09:28.896700Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-03-28T09:09:28.896782Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-03-28T09:09:28.896809Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-03-28T09:09:28.896852Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-03-28T09:09:28.896885Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-03-28T09:09:28.896930Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-03-28T09:09:28.896961Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-03-28T09:09:28.896987Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-03-28T09:09:28.897029Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-03-28T09:09:28.897057Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-03-28T09:09:28.897081Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-03-28T09:09:28.897106Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-03-28T09:09:28.897134Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-03-28T09:09:28.897160Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-03-28T09:09:28.897185Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-03-28T09:09:28.897227Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-03-28T09:09:28.897286Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-03-28T09:09:28.897333Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-03-28T09:09:28.897364Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-03-28T09:09:28.897403Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-03-28T09:09:28.897449Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... 78:1000 Path# /dev/disk1 2025-03-28T09:09:59.894876Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2025-03-28T09:09:59.894901Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2025-03-28T09:09:59.894930Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2025-03-28T09:09:59.894956Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2025-03-28T09:09:59.894983Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2025-03-28T09:09:59.895008Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2025-03-28T09:09:59.895036Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2025-03-28T09:09:59.895062Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2025-03-28T09:09:59.895088Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2025-03-28T09:09:59.895113Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2025-03-28T09:09:59.895139Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2025-03-28T09:09:59.895163Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2025-03-28T09:09:59.895189Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2025-03-28T09:09:59.895218Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2025-03-28T09:09:59.895244Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2025-03-28T09:09:59.895271Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2025-03-28T09:09:59.895297Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2025-03-28T09:09:59.895321Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2025-03-28T09:09:59.895345Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2025-03-28T09:09:59.895367Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2025-03-28T09:09:59.895392Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2025-03-28T09:09:59.895419Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2025-03-28T09:09:59.895445Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2025-03-28T09:09:59.895469Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2025-03-28T09:09:59.895494Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2025-03-28T09:09:59.895519Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2025-03-28T09:09:59.895544Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2025-03-28T09:09:59.895567Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2025-03-28T09:09:59.895593Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2025-03-28T09:09:59.895619Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2025-03-28T09:09:59.895644Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2025-03-28T09:09:59.895668Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2025-03-28T09:09:59.895692Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2025-03-28T09:09:59.895717Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2025-03-28T09:09:59.895746Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2025-03-28T09:09:59.895769Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2025-03-28T09:09:59.895795Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2025-03-28T09:09:59.895821Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2025-03-28T09:09:59.895844Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2025-03-28T09:09:59.895867Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2025-03-28T09:09:59.895891Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2025-03-28T09:09:59.895915Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2025-03-28T09:09:59.895939Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2025-03-28T09:09:59.895965Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2025-03-28T09:09:59.895990Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2025-03-28T09:09:59.896014Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2025-03-28T09:09:59.896039Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2025-03-28T09:09:59.896063Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2025-03-28T09:09:59.896088Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2025-03-28T09:09:59.896114Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2025-03-28T09:09:59.896140Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2025-03-28T09:09:59.896166Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2025-03-28T09:09:59.896191Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2025-03-28T09:09:59.896216Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2025-03-28T09:09:59.896241Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2025-03-28T09:09:59.896266Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2025-03-28T09:09:59.896293Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2025-03-28T09:09:59.896317Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2025-03-28T09:09:59.896340Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2025-03-28T09:09:59.896365Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2025-03-28T09:09:59.896391Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2025-03-28T09:09:59.896416Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2025-03-28T09:09:59.896442Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2025-03-28T09:09:59.896468Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2025-03-28T09:09:59.896494Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2025-03-28T09:09:59.896518Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2025-03-28T09:09:59.896543Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2025-03-28T09:09:59.896565Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2025-03-28T09:10:00.226294Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.338776s 2025-03-28T09:10:00.226496Z node 151 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.339011s 2025-03-28T09:10:00.264098Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-03-28T09:10:00.379933Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2025-03-28T09:10:00.399913Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-03-28T09:10:00.573519Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2025-03-28T09:10:00.599638Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2025-03-28T09:10:00.816704Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2025-03-28T09:10:00.835098Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } >> SystemView::ShowCreateTablePartitionAtKeys [GOOD] >> SystemView::ShowCreateTablePartitionSettings |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> KqpJoinOrder::TPCDS16+ColumnStore >> TTopicApiDescribes::GetLocalDescribe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2887:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3014:2106] recipient: [1:2887:2116] Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3017:2106] recipient: [1:2887:2116] 2025-03-28T09:09:29.202578Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:09:29.208329Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:09:29.208742Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:09:29.209503Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:09:29.219548Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:09:29.220226Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:09:29.220263Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:09:29.220575Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:09:29.239916Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:09:29.240074Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:09:29.240251Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:09:29.240377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:09:29.240477Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:09:29.240560Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3016:2118] sender: [1:3042:2106] recipient: [1:60:2107] 2025-03-28T09:09:29.254984Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:09:29.255192Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:09:29.271338Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:09:29.271497Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:09:29.271574Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:09:29.271664Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:09:29.271795Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:09:29.271849Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:09:29.271886Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:09:29.271956Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:09:29.283350Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:09:29.283484Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:09:29.295371Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:09:29.295529Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:09:29.296718Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:09:29.296769Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:09:29.296925Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:09:29.296983Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:09:29.308969Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2025-03-28T09:09:29.309908Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-03-28T09:09:29.309986Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-03-28T09:09:29.310016Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-03-28T09:09:29.310039Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-03-28T09:09:29.310063Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-03-28T09:09:29.310086Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-03-28T09:09:29.310110Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-03-28T09:09:29.310137Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-03-28T09:09:29.310181Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-03-28T09:09:29.310207Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-03-28T09:09:29.310234Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-03-28T09:09:29.310257Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-03-28T09:09:29.310280Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-03-28T09:09:29.310304Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-03-28T09:09:29.310326Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-03-28T09:09:29.310354Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-03-28T09:09:29.310402Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-03-28T09:09:29.310441Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-03-28T09:09:29.310470Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-03-28T09:09:29.310506Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-03-28T09:09:29.310534Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-03-28T09:09:29.310557Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-03-28T09:09:29.310582Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-03-28T09:09:29.310614Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-03-28T09:09:29.310652Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-03-28T09:09:29.310678Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-03-28T09:09:29.310728Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-03-28T09:09:29.310758Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-03-28T09:09:29.310786Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-03-28T09:09:29.310817Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2025-03-28T09:09:29.310843Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-03-28T09:09:29.310868Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-03-28T09:09:29.310895Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Cr ... ER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2025-03-28T09:10:02.002445Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2025-03-28T09:10:02.002506Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2025-03-28T09:10:02.002538Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2025-03-28T09:10:02.002571Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2025-03-28T09:10:02.002603Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2025-03-28T09:10:02.002635Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2025-03-28T09:10:02.002668Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2025-03-28T09:10:02.012343Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2025-03-28T09:10:02.012420Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2025-03-28T09:10:02.012458Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2025-03-28T09:10:02.012492Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2025-03-28T09:10:02.012529Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2025-03-28T09:10:02.012564Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2025-03-28T09:10:02.012595Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2025-03-28T09:10:02.012653Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2025-03-28T09:10:02.012705Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2025-03-28T09:10:02.012740Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2025-03-28T09:10:02.012783Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2025-03-28T09:10:02.012825Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2025-03-28T09:10:02.012859Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2025-03-28T09:10:02.599600Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.613281s 2025-03-28T09:10:02.599836Z node 161 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.613542s 2025-03-28T09:10:02.657348Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2025-03-28T09:10:02.660741Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2025-03-28T09:10:02.660840Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2025-03-28T09:10:02.660885Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2025-03-28T09:10:02.660920Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2025-03-28T09:10:02.660958Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2025-03-28T09:10:02.660990Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2025-03-28T09:10:02.661022Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2025-03-28T09:10:02.661053Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2025-03-28T09:10:02.661085Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2025-03-28T09:10:02.661119Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2025-03-28T09:10:02.661151Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2025-03-28T09:10:02.661203Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2025-03-28T09:10:02.661235Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2025-03-28T09:10:02.661267Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2025-03-28T09:10:02.661300Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2025-03-28T09:10:02.661330Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2025-03-28T09:10:02.661361Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2025-03-28T09:10:02.661392Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2025-03-28T09:10:02.661427Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2025-03-28T09:10:02.661482Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2025-03-28T09:10:02.661516Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2025-03-28T09:10:02.661547Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2025-03-28T09:10:02.661581Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2025-03-28T09:10:02.661614Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2025-03-28T09:10:02.661642Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2025-03-28T09:10:02.661674Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2025-03-28T09:10:02.661710Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2025-03-28T09:10:02.661744Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2025-03-28T09:10:02.661777Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2025-03-28T09:10:02.661811Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 >> PgCatalog::PgTables [GOOD] >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2025-03-28T09:09:53.970661Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785097338932060:2219];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:53.970896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:54.076549Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785104690038603:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:54.400053Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00192d/r3tmp/tmpTQS45S/pdisk_1.dat 2025-03-28T09:09:54.485581Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:54.486702Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:09:54.983371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:55.045909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:55.045981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:55.056396Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:09:55.061268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:55.131548Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:55.138520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:55.138597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 30604, node 1 2025-03-28T09:09:55.174748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:55.191644Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:09:55.202824Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:09:55.393096Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00192d/r3tmp/yandex6O3aq6.tmp 2025-03-28T09:09:55.393143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00192d/r3tmp/yandex6O3aq6.tmp 2025-03-28T09:09:55.393337Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00192d/r3tmp/yandex6O3aq6.tmp 2025-03-28T09:09:55.393618Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:09:55.543736Z INFO: TTestServer started on Port 16565 GrpcPort 30604 TClient is connected to server localhost:16565 PQClient connected to localhost:30604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:56.230768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:09:56.340599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:09:58.970822Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785097338932060:2219];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:58.970907Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:59.045053Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785104690038603:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:59.045128Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:00.721111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785127403704153:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:00.721284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:00.721547Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785130459842553:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:00.721719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785130459842521:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:00.721797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:00.728043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785127403704188:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:00.730680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785127403704194:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:00.730775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:00.733627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:10:00.753421Z node 2 :TX_PROXY ERROR: Actor# [2:7486785130459842559:2128] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:10:00.800329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785130459842558:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:10:00.802932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785127403704190:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:10:00.962014Z node 1 :TX_PROXY ERROR: Actor# [1:7486785127403704279:2829] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:01.019305Z node 2 :TX_PROXY ERROR: Actor# [2:7486785130459842586:2134] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:01.417843Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486785130459842597:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:10:01.418928Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2YwMTg2NTYtMmI0ZWU3YTctYzY3MGMwYy1hMTdlOTdhMw==, ActorId: [2:7486785130459842518:2311], ActorState: ExecuteState, TraceId: 01jqe07w61dnjnkr2w8dmnnbr0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:10:01.421112Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:10:01.420626Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785127403704290:2357], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:10:01.422130Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTAwMzFlZmYtNmRjZGM4YzUtNjk3MWJlMTEtOGIwZWE1ZTA=, ActorId: [1:7486785127403 ... E INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] bootstrapping 14 [2:7486785173409516375:2434] 2025-03-28T09:10:10.453992Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 1 [2:7486785173409516375:2434] 2025-03-28T09:10:10.455229Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 5, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [2:7486785173409516381:2440] 2025-03-28T09:10:10.457626Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] bootstrapping 10 [2:7486785173409516382:2441] 2025-03-28T09:10:10.459005Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [1:7486785170353378516:2502] 2025-03-28T09:10:10.460852Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [1:7486785170353378516:2502] 2025-03-28T09:10:10.461687Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [1:7486785170353378550:2508] 2025-03-28T09:10:10.462487Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [1:7486785170353378551:2509] 2025-03-28T09:10:10.464384Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [1:7486785170353378551:2509] 2025-03-28T09:10:10.466554Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037896, Partition: 10, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [2:7486785173409516382:2441] 2025-03-28T09:10:10.531357Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:10.534030Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:10.541087Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:10.544663Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:10.544814Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:10.543258Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:10.543880Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:10.544324Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig Create topic result: 1 2025-03-28T09:10:10.614837Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785170353378792:3913]: Request location 2025-03-28T09:10:10.616170Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785170353378823:3932] connected; active server actors: 1 2025-03-28T09:10:10.616231Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 1, Generation 1 2025-03-28T09:10:10.616250Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 2, Generation 1 2025-03-28T09:10:10.616261Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 1, Generation 1 2025-03-28T09:10:10.616272Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 2025-03-28T09:10:10.616282Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 2, Generation 1 2025-03-28T09:10:10.616293Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 2, Generation 1 2025-03-28T09:10:10.616303Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 2, Generation 1 2025-03-28T09:10:10.616319Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 1, Generation 1 2025-03-28T09:10:10.616329Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 1, Generation 1 2025-03-28T09:10:10.616339Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 1, Generation 1 2025-03-28T09:10:10.616349Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 2, Generation 1 2025-03-28T09:10:10.616358Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 2, Generation 1 2025-03-28T09:10:10.616368Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 1, Generation 1 2025-03-28T09:10:10.616376Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 1, Generation 1 2025-03-28T09:10:10.616388Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 2, Generation 1 2025-03-28T09:10:10.618845Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785170353378792:3913]: Got location 2025-03-28T09:10:10.622424Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785170353378823:3932] disconnected; active server actors: 1 2025-03-28T09:10:10.622450Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785170353378823:3932] disconnected no session 2025-03-28T09:10:10.622960Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785170353378835:3941]: Request location 2025-03-28T09:10:10.623167Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785170353378837:3943] connected; active server actors: 1 2025-03-28T09:10:10.623204Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 2, Generation 1 2025-03-28T09:10:10.623216Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 1, Generation 1 2025-03-28T09:10:10.623224Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 2, Generation 1 2025-03-28T09:10:10.623271Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785170353378835:3941]: Got location 2025-03-28T09:10:10.623561Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785170353378837:3943] disconnected; active server actors: 1 2025-03-28T09:10:10.623572Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785170353378837:3943] disconnected no session 2025-03-28T09:10:10.623808Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785170353378839:3945]: Request location 2025-03-28T09:10:10.624081Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785170353378841:3947] connected; active server actors: 1 2025-03-28T09:10:11.150810Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486785174648346217:2524] TxId: 281474976710683. Ctx: { TraceId: 01jqe085w095fv36ck6x3dfzx1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzg5MGQ5NTEtMTI1NGE1NzUtZDU1NWM4OWUtYjA5MTM4YTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-03-28T09:10:11.151903Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785174648346226:2549], TxId: 281474976710683, task: 2. Ctx: { TraceId : 01jqe085w095fv36ck6x3dfzx1. SessionId : ydb://session/3?node_id=1&id=Nzg5MGQ5NTEtMTI1NGE1NzUtZDU1NWM4OWUtYjA5MTM4YTY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486785174648346217:2524], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-28T09:10:11.152273Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785174648346227:2550], TxId: 281474976710683, task: 4. Ctx: { CustomerSuppliedId : . TraceId : 01jqe085w095fv36ck6x3dfzx1. SessionId : ydb://session/3?node_id=1&id=Nzg5MGQ5NTEtMTI1NGE1NzUtZDU1NWM4OWUtYjA5MTM4YTY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486785174648346217:2524], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-28T09:10:12.151284Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Nzg5MGQ5NTEtMTI1NGE1NzUtZDU1NWM4OWUtYjA5MTM4YTY=, ActorId: [1:7486785170353378728:2524], ActorState: ExecuteState, TraceId: 01jqe085w095fv36ck6x3dfzx1, Create QueryResponse for error on request, msg: 2025-03-28T09:10:12.152988Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 2" severity: 1 } } TxMeta { id: "01jqe086e34fqp2e505v9m48f0" } } YdbStatus: UNAVAILABLE ConsumedRu: 380 } 2025-03-28T09:10:12.256413Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710684. Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-28T09:10:12.256577Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7486785178943313555:2553] TxId: 281474976710684. Ctx: { TraceId: 01jqe087b119tny06bzv84w96z, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWU4YzllMjgtMzIxZDY4Y2UtYTU2MDU4MzQtNzVjZjhkNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-28T09:10:12.256780Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWU4YzllMjgtMzIxZDY4Y2UtYTU2MDU4MzQtNzVjZjhkNzM=, ActorId: [1:7486785178943313552:2553], ActorState: ExecuteState, TraceId: 01jqe087b119tny06bzv84w96z, Create QueryResponse for error on request, msg: 2025-03-28T09:10:12.258185Z node 1 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqe087b7084f253cj09dzg0n" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } >> KqpJoinOrder::TPCDS61+ColumnStore >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 2402, MsgBus: 3834 2025-03-28T09:03:30.295052Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486783453394844524:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:30.297739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012dc/r3tmp/tmpqDUZ2n/pdisk_1.dat 2025-03-28T09:03:30.741486Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:03:30.763919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:03:30.764025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:03:30.770395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2402, node 1 2025-03-28T09:03:30.856135Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:03:30.856157Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:03:30.856164Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:03:30.856315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3834 TClient is connected to server localhost:3834 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:03:31.446041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:03:31.467904Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 1042 2025-03-28T09:03:33.516954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2025-03-28T09:03:33.704212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783466279747177:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:33.704356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:33.704566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486783466279747189:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:03:33.708600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:03:33.720322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486783466279747191:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:03:33.788982Z node 1 :TX_PROXY ERROR: Actor# [1:7486783466279747243:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:03:34.127873Z node 1 :TX_DATASHARD CRIT: Exception while executing KQP transaction [0:281474976710663] at 72075186224037888: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-03-28T09:03:34.131364Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710663 at tablet 72075186224037888 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-03-28T09:03:34.131715Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486783470574714593:2339] TxId: 281474976710663. Ctx: { TraceId: 01jqdzw2a45w64g3y8gpm0gavm, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmI3ZDQxZGYtZDQzZTNlZGYtMjBlNGM0ZDktMTAxZGQ4MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-03-28T09:03:34.142582Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmI3ZDQxZGYtZDQzZTNlZGYtMjBlNGM0ZDktMTAxZGQ4MDU=, ActorId: [1:7486783466279747174:2339], ActorState: ExecuteState, TraceId: 01jqdzw2a45w64g3y8gpm0gavm, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-03-28T09:03:34.177919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2025-03-28T09:03:34.531307Z node 1 :TX_DATASHARD CRIT: Exception while executing KQP transaction [0:281474976710668] at 72075186224037889: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2025-03-28T09:03:34.536429Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710668 at tablet 72075186224037889 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2025-03-28T09:03:34.536695Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486783470574714732:2372] TxId: 281474976710668. Ctx: { TraceId: 01jqdzw2wkaq11rb61r7z0d9qa, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmIwNWRlZjgtMWVjNDIyOWEtODM3ZTEyNWYtODZhYWJiNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; 2025-03-28T09:03:34.536921Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmIwNWRlZjgtMWVjNDIyOWEtODM3ZTEyNWYtODZhYWJiNjY=, ActorId: [1:7486783470574714684:2372], ActorState: ExecuteState, TraceId: 01jqdzw2wkaq11rb61r7z0d9qa, Create QueryResponse for error on request, msg:
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 1042 2025-03-28T09:03:34.579345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_2169371982377735806_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce_pgbpchar_2169371982377735806_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) abcd 2025-03-28T09:03:35.102281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_2169371982377735806_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 4 --!syntax_pg INSERT INTO Coerce__pgbpchar_2169371982377735806_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2025-03-28T09:03:35.295352Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486783453394844524:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:03:35.295425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {abcd,abcd} 1042 2025-03-28T09:03:35.581167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480
: Error: ... -03-28T09:09:31.481972Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:31.482144Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:31.491477Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6154, node 13 2025-03-28T09:09:31.707676Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:31.707714Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:31.707727Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:31.707937Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21295 TClient is connected to server localhost:21295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:09:33.395678Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:09:35.932456Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7486784999225887572:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:35.932581Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:43.728393Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486785055060462957:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:43.728558Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:43.729291Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486785055060462993:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:43.737557Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:43.787726Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486785055060462995:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:09:43.870390Z node 13 :TX_PROXY ERROR: Actor# [13:7486785055060463047:2369] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9869, MsgBus: 23573 2025-03-28T09:09:45.634448Z node 14 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7486785065461990012:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:45.634695Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012dc/r3tmp/tmpk8iksW/pdisk_1.dat 2025-03-28T09:09:45.924728Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:45.960483Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:45.960620Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:45.962762Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9869, node 14 2025-03-28T09:09:46.089310Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:46.089367Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:46.089381Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:46.089595Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23573 TClient is connected to server localhost:23573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:49.308082Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:50.634970Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7486785065461990012:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:50.635096Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:58.802961Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7486785121296565532:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:58.803348Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:58.804339Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7486785121296565567:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:58.811082Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:58.842615Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7486785121296565569:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:58.927900Z node 14 :TX_PROXY ERROR: Actor# [14:7486785121296565622:2369] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:59.049169Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:59.243213Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:10:00.971104Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:10:00.971155Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:09.925941Z node 14 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037888 not found 2025-03-28T09:10:10.044580Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:10:12.039763Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:7486785177131141112:2489], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=14&id=YWZjZDk0ZGQtNGFiYTY3ODEtZGFjMzk4ZDMtODk3YTI0NWI=. TraceId : 01jqe085t900tj3yxwbsaa8wfn. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2025-03-28T09:10:12.063139Z node 14 :KQP_COMPUTE ERROR: SelfId: [14:7486785177131141113:2490], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jqe085t900tj3yxwbsaa8wfn. SessionId : ydb://session/3?node_id=14&id=YWZjZDk0ZGQtNGFiYTY3ODEtZGFjMzk4ZDMtODk3YTI0NWI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [14:7486785177131141109:2483], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:10:12.074110Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=YWZjZDk0ZGQtNGFiYTY3ODEtZGFjMzk4ZDMtODk3YTI0NWI=, ActorId: [14:7486785172836173795:2483], ActorState: ExecuteState, TraceId: 01jqe085t900tj3yxwbsaa8wfn, Create QueryResponse for error on request, msg: |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |80.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH14 >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] Test command err: 2025-03-28T09:09:52.206446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:52.207162Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:52.207240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00156d/r3tmp/tmpfVsxlD/pdisk_1.dat 2025-03-28T09:09:52.876895Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:09:52.876984Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:09:52.877027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:09:52.877151Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-28T09:09:52.877186Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:09:53.394651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-28T09:09:53.396144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:09:53.396415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:09:53.396700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:09:53.396859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:09:53.396978Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:09:53.397929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:09:53.398136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:09:53.398190Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:09:53.398231Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:09:53.398458Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:09:53.398519Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:09:53.398611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:09:53.398726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:09:53.398772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:09:53.398816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:09:53.398965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:09:53.399583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:09:53.399631Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:09:53.399799Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:09:53.399859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:09:53.399934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:09:53.399986Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:09:53.400035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:09:53.400147Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:09:53.400604Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:09:53.400637Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:09:53.400743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:09:53.400790Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:09:53.400833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:09:53.400887Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:09:53.400936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-28T09:09:53.400966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:09:53.401008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:09:53.415653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:09:53.416494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:09:53.416575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:09:53.416795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:09:53.418366Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:09:53.418432Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:09:53.418482Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-28T09:09:53.418663Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-28T09:09:53.419341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:09:53.419400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:09:53.419444Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:09:53.419599Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-28T09:09:53.419638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:09:53.419747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:09:53.419816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-28T09:09:53.419869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:09:53.507478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-28T09:09:53.507632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-28T09:09:53.507703Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:53.508266Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T09:09:53.508367Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-28T09:09:53.561168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:53.561310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:53.573355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:53.664279Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-28T09:09:53.665091Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:09:53.665154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:09:53.665189Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:09:53.665360Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-28T09:09:53.665411Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:09:53.665505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:09:53.665647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... 76715662:1, at schemeshard: 72057594046644480 2025-03-28T09:10:15.270333Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:10:15.270709Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-28T09:10:15.270761Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:10:15.284137Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:10:15.284223Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-03-28T09:10:15.284301Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:10:15.284392Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:10:15.284684Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:994:2794], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:10:15.284725Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:10:15.284755Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:10:15.284905Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:666:2570], Recipient [3:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-28T09:10:15.284943Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-03-28T09:10:15.285010Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-28T09:10:15.285047Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 1 2025-03-28T09:10:15.285173Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480, message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-28T09:10:15.285211Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-28T09:10:15.285274Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 666 RawX2: 12884904458 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-28T09:10:15.285324Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:1, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-28T09:10:15.285365Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-28T09:10:15.285414Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-03-28T09:10:15.285460Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:1, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-03-28T09:10:15.285493Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 129 -> 240 2025-03-28T09:10:15.285639Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:10:15.286179Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-28T09:10:15.286218Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:10:15.286259Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-03-28T09:10:15.286360Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:935:2744] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-28T09:10:15.286425Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:666:2570] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-28T09:10:15.286542Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-03-28T09:10:15.286625Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-28T09:10:15.286900Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:10:15.286943Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:10:15.286991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-28T09:10:15.287048Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1ProgressState, operation type TxCopyTable 2025-03-28T09:10:15.287097Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:10:15.287140Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:1, name: CopyTableBarrier, done: 1, blocked: 1, parts count: 2 2025-03-28T09:10:15.287193Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 1, blocked: 1 2025-03-28T09:10:15.287289Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:1 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-03-28T09:10:15.287334Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:1 240 -> 240 2025-03-28T09:10:15.287687Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-03-28T09:10:15.287737Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:10:15.288086Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:10:15.288121Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:1 2025-03-28T09:10:15.288232Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:10:15.288266Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:10:15.288313Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-28T09:10:15.288362Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:1 ProgressState 2025-03-28T09:10:15.288490Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:10:15.288533Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-03-28T09:10:15.288587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-03-28T09:10:15.288650Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:1 progress is 2/2 2025-03-28T09:10:15.288691Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-03-28T09:10:15.288739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 2/2, is published: true 2025-03-28T09:10:15.288825Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:905:2724] message: TxId: 281474976715662 2025-03-28T09:10:15.288883Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 2/2 2025-03-28T09:10:15.288949Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-03-28T09:10:15.288991Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-03-28T09:10:15.289063Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 2 2025-03-28T09:10:15.289103Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:1 2025-03-28T09:10:15.289127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:1 2025-03-28T09:10:15.289216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-03-28T09:10:15.289261Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:10:15.289778Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:10:15.289868Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:905:2724] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-28T09:10:15.291320Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:919:2731], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:10:15.291367Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:10:15.291396Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-28T09:10:15.508511Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [3:1021:2811], serverId# [3:1022:2812], sessionId# [0:0:0] 2025-03-28T09:10:15.508717Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe08ag588jtjvpwz6t2d272, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjM4M2NkMmQtNzU1NzMxZC0yZWY3OWY0ZC01MWM3OWM0Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } 2025-03-28T09:10:15.764689Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe08aq29rqzq9y5h03pphxz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDcxMGUyY2UtZGI0MDM5NTYtOGYzMDY4ZjEtYjYzOTBiMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2025-03-28T09:09:56.751098Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785113908004479:2240];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:56.751330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:57.231634Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785113051671946:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:57.232064Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:57.232121Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:09:57.224697Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001929/r3tmp/tmpYYOIkn/pdisk_1.dat 2025-03-28T09:09:57.783268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:57.795967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:57.796077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:57.797510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:57.797580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:57.805222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:57.811660Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:09:57.826051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:57.827595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22787, node 1 2025-03-28T09:09:58.182986Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001929/r3tmp/yandexI6MPgX.tmp 2025-03-28T09:09:58.183016Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001929/r3tmp/yandexI6MPgX.tmp 2025-03-28T09:09:58.187934Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001929/r3tmp/yandexI6MPgX.tmp 2025-03-28T09:09:58.188134Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:09:58.279429Z INFO: TTestServer started on Port 61116 GrpcPort 22787 TClient is connected to server localhost:61116 PQClient connected to localhost:22787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:58.601821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:09:58.752147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:10:01.743706Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785113908004479:2240];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:01.743786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:01.946818Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785113051671946:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:01.946889Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:03.568888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785143972776505:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:03.578997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:03.580052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785143972776521:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:03.584522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:10:03.623593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785143972776523:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:10:04.078972Z node 1 :TX_PROXY ERROR: Actor# [1:7486785143972776591:2769] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:04.117356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:10:04.124031Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785148267743911:2356], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:10:04.125940Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTEzZWMyZGEtODNhZmZkODQtN2Y3ZjRiZGQtYTU3NGEyMjU=, ActorId: [1:7486785143972776503:2341], ActorState: ExecuteState, TraceId: 01jqe07yykbdszkvvwpsbpgr5f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:10:04.124303Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486785143116443274:2322], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:10:04.126057Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWQ2ZTQ2YTctN2JhNzY2ZWYtODcyZGYwMGMtMzRlMjMxMzU=, ActorId: [2:7486785143116443241:2316], ActorState: ExecuteState, TraceId: 01jqe07z8c4x1zeshe3mn6yy71, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:10:04.128063Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:10:04.130923Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:10:04.242333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:10:04.423216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:10:04.829802Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe0800qcz6sgc6pdv0tcjr3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDllZDMxZTQtYWU5NDYzM2YtNmEyZmNlZDEtZDk5ZTQ0M2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486785148267744363:3117] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-03-28T09:10:10.928347Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-03-28T09:10:11.3472 ... 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:4:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.032736Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037899, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 4 generation 2 [1:7486785178332517099:2566] 2025-03-28T09:10:12.035989Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:6:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.036002Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037893, Partition: 6, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 2 [1:7486785178332517087:2559] 2025-03-28T09:10:12.036311Z node 1 :PERSQUEUE INFO: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.036322Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037898, Partition: 14, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [1:7486785178332517091:2560] 2025-03-28T09:10:12.038651Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037896, NodeId 1, Generation 2 2025-03-28T09:10:12.049391Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:12.049415Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897] has a tx writes info 2025-03-28T09:10:12.049714Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037899, NodeId 1, Generation 2 2025-03-28T09:10:12.049734Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 2 2025-03-28T09:10:12.049747Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2025-03-28T09:10:12.055267Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:12.056115Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892] has a tx writes info 2025-03-28T09:10:12.058182Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:12.058206Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895] has a tx writes info 2025-03-28T09:10:12.060002Z node 2 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:10:12.060038Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894] has a tx writes info 2025-03-28T09:10:12.061965Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] bootstrapping 13 [2:7486785181771150118:2476] 2025-03-28T09:10:12.067152Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] bootstrapping 7 [2:7486785181771150117:2475] 2025-03-28T09:10:12.068591Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] bootstrapping 0 [2:7486785181771150127:2479] 2025-03-28T09:10:12.070784Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] bootstrapping 8 [2:7486785181771150133:2482] 2025-03-28T09:10:12.070789Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] bootstrapping 12 [2:7486785181771150134:2483] 2025-03-28T09:10:12.073548Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] bootstrapping 3 [2:7486785181771150126:2478] 2025-03-28T09:10:12.073835Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] bootstrapping 9 [2:7486785181771150130:2480] 2025-03-28T09:10:12.076617Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] bootstrapping 2 [2:7486785181771150131:2481] 2025-03-28T09:10:12.078431Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.078476Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 13, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7486785181771150118:2476] 2025-03-28T09:10:12.080255Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.080289Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7486785181771150127:2479] 2025-03-28T09:10:12.082003Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.082035Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 12, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7486785181771150134:2483] 2025-03-28T09:10:12.082165Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.082177Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.082184Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037894, Partition: 8, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7486785181771150133:2482] 2025-03-28T09:10:12.082192Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037897, Partition: 7, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7486785181771150117:2475] 2025-03-28T09:10:12.083173Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2025-03-28T09:10:12.083192Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2025-03-28T09:10:12.084036Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.084058Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7486785181771150131:2481] 2025-03-28T09:10:12.084752Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.084802Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037895, Partition: 9, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7486785181771150130:2480] 2025-03-28T09:10:12.085260Z node 2 :PERSQUEUE INFO: [rt3.dc1--topic-x:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:10:12.085283Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2025-03-28T09:10:12.085287Z node 2 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7486785181771150126:2478] 2025-03-28T09:10:12.085867Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2025-03-28T09:10:12.354243Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-03-28T09:10:12.354355Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2025-03-28T09:10:12.354411Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7486785182627484605:2582]: Bootstrap 2025-03-28T09:10:12.358831Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785182627484605:2582]: Request location 2025-03-28T09:10:12.359612Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785182627484607:2583] connected; active server actors: 1 2025-03-28T09:10:12.359970Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-03-28T09:10:12.360112Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785182627484605:2582]: Got location 2025-03-28T09:10:12.360825Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785182627484607:2583] disconnected; active server actors: 1 2025-03-28T09:10:12.360853Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785182627484607:2583] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-03-28T09:10:12.372607Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785182627484611:2586] connected; active server actors: 1 2025-03-28T09:10:12.373284Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-03-28T09:10:12.370840Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-03-28T09:10:12.370964Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2025-03-28T09:10:12.371022Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7486785182627484608:2584]: Bootstrap 2025-03-28T09:10:12.371680Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785182627484608:2584]: Request location 2025-03-28T09:10:12.373922Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785182627484608:2584]: Got location Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743153012 nanos: 67000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2025-03-28T09:10:12.378280Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785182627484611:2586] disconnected; active server actors: 1 2025-03-28T09:10:12.378299Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785182627484611:2586] disconnected no session 2025-03-28T09:10:12.378398Z node 1 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-03-28T09:10:12.378461Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-03-28T09:10:12.378527Z node 1 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[1:7486785182627484615:2587]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } 2025-03-28T09:10:12.814129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:10:12.814167Z node 1 :IMPORT WARN: Table profiles were not loaded |80.8%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} >> SystemView::ShowCreateTablePartitionSettings [GOOD] >> SystemView::ShowCreateTableReadReplicas |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> KqpExtractPredicateLookup::ComplexRange [GOOD] >> YdbOlapStore::LogPagingBefore [GOOD] >> YdbOlapStore::LogPagingBetween |80.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> TTopicApiDescribes::DescribeConsumer [GOOD] >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes >> TIcNodeCache::GetNodesInfoTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2025-03-28T09:09:59.259631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785125632154727:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:59.259680Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:59.551946Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785123122457245:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:00.192173Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:10:00.227817Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:10:00.228227Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00191e/r3tmp/tmp6oG1hg/pdisk_1.dat 2025-03-28T09:10:00.397056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:10:01.107034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:01.107133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:01.108507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:01.108560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:01.124362Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:10:01.124498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:10:01.139507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:10:01.296814Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:01.303477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 7262, node 1 2025-03-28T09:10:01.666643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00191e/r3tmp/yandexIwYldE.tmp 2025-03-28T09:10:01.666671Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00191e/r3tmp/yandexIwYldE.tmp 2025-03-28T09:10:01.666862Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00191e/r3tmp/yandexIwYldE.tmp 2025-03-28T09:10:01.666985Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:10:01.877474Z INFO: TTestServer started on Port 29700 GrpcPort 7262 TClient is connected to server localhost:29700 PQClient connected to localhost:7262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:02.760364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:10:02.915288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:10:03.583109Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:04.260914Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785125632154727:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:04.260995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:04.516091Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785123122457245:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:04.516184Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:08.720095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785161777163153:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:08.720330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:08.720740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785161777163188:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:08.733526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:10:08.760665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785161777163190:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:10:08.835229Z node 1 :KQP_PROXY ERROR: TraceId: "01jqe07z5c8qzhjzqhg4dtrx7e", Request deadline has expired for 0.144515s seconds 2025-03-28T09:10:08.921888Z node 2 :TX_PROXY ERROR: Actor# [2:7486785161777163217:2187] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:09.570643Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785164286861432:2357], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:10:09.572804Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTQ4NDdmYzQtZTNlZWI4M2EtYWJkNzg1NjctY2Y4OGE1Yjc=, ActorId: [1:7486785164286861379:2350], ActorState: ExecuteState, TraceId: 01jqe08469ajp45br0zs1yq5t4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:10:09.585010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:10:09.587233Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:10:09.582465Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486785161777163224:2330], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:10:09.583889Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDU4ODRiNTEtNmI1M2MwYmItOTYzYjE5YjEtYTU0ZWUwYzE=, ActorId: [2:7486785161777163150:2320], ActorState: ExecuteState, TraceId: 01jqe0842520ye1ebf4h1xhm03, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:10:09.587356Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:10:09.725549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:10:09.947416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:10:10.384231Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe085dm926g2xfg372e107c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2Y4MTY3YTQtOGFjMGNmYjAtMWM0ZDI4ZWQtZjFhN2M5NGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486785172876796483:3119] 20 ... partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1743153019 nanos: 102000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 11 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743153019 nanos: 378000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1743153019 nanos: 431000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 12 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743153019 nanos: 538000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1743153019 nanos: 605000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743153019 nanos: 566000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1743153019 nanos: 602000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1743153019 nanos: 378000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1743153019 nanos: 443000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } } } } 2025-03-28T09:10:20.671173Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-03-28T09:10:20.671254Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2025-03-28T09:10:20.671844Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785215826471880:2646]: Request location 2025-03-28T09:10:20.675366Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785215826471882:2647] connected; active server actors: 1 2025-03-28T09:10:20.681127Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-03-28T09:10:20.681153Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-03-28T09:10:20.681164Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-03-28T09:10:20.681176Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-03-28T09:10:20.681186Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-03-28T09:10:20.681196Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-03-28T09:10:20.681206Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-03-28T09:10:20.681217Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-03-28T09:10:20.681237Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-03-28T09:10:20.681250Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-03-28T09:10:20.681262Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-03-28T09:10:20.681273Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-03-28T09:10:20.681282Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-03-28T09:10:20.681291Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-03-28T09:10:20.681311Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-03-28T09:10:20.682087Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785215826471880:2646]: Got location 2025-03-28T09:10:20.683062Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785215826471882:2647] disconnected; active server actors: 1 2025-03-28T09:10:20.683082Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785215826471882:2647] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743153017844 tx_id: 281474976710676 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2025-03-28T09:10:20.689710Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-03-28T09:10:20.689781Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743153017844 tx_id: 281474976710676 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2025-03-28T09:10:20.704842Z node 1 :PQ_READ_PROXY DEBUG: new Describe consumer request 2025-03-28T09:10:20.704934Z node 1 :PQ_READ_PROXY DEBUG: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> TCacheTest::RacyRecreateAndSync >> TCacheTest::WatchRoot >> Yq_1::DeleteQuery [GOOD] >> TCacheTest::CheckSystemViewAccess >> Yq_1::CreateConnections_With_Idempotency [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2025-03-28T09:10:01.168734Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785135244880397:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:01.168969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:10:01.459839Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785132617995932:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:01.460183Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:10:02.112504Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001915/r3tmp/tmpUhK8OG/pdisk_1.dat 2025-03-28T09:10:02.277618Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:10:02.470825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:10:02.468773Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:10:02.889368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:02.889485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:02.899637Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:02.902001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:02.902053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:02.905266Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:10:02.935514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:10:02.936393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65197, node 1 2025-03-28T09:10:03.403928Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001915/r3tmp/yandex93K53p.tmp 2025-03-28T09:10:03.403949Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001915/r3tmp/yandex93K53p.tmp 2025-03-28T09:10:03.404140Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001915/r3tmp/yandex93K53p.tmp 2025-03-28T09:10:03.404256Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:10:03.511940Z INFO: TTestServer started on Port 1211 GrpcPort 65197 TClient is connected to server localhost:1211 PQClient connected to localhost:65197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:04.372155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:04.416596Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:04.463823Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:10:04.471849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:10:06.166122Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785135244880397:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:06.168717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:06.419573Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785132617995932:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:06.419644Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:10.127991Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785171272701778:2317], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:10.128111Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:10.133969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785171272701805:2320], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:10.162235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-28T09:10:10.327255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785171272701807:2321], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-28T09:10:10.427835Z node 2 :TX_PROXY ERROR: Actor# [2:7486785171272701835:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:10.465331Z node 1 :KQP_PROXY ERROR: TraceId: "01jqe08099bdeeggqw857mrzpd", Request deadline has expired for 0.635607s seconds 2025-03-28T09:10:11.370224Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486785171272701842:2325], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:10:11.371030Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmZhNDMwMi1kOTBjNTFlYi04MTUyMTcyYi1kZWVhOWJiNQ==, ActorId: [2:7486785171272701776:2316], ActorState: ExecuteState, TraceId: 01jqe085dwbsknpnwgcj3yrft7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:10:11.373734Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:10:11.382771Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785173899587272:2360], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:10:11.384318Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTE1N2RmYzItODQ1MGIwNmYtZGQ4OGU2OTMtMTE5Y2JkNWY=, ActorId: [1:7486785173899587223:2352], ActorState: ExecuteState, TraceId: 01jqe085s3dv1w5xxevgd7g987, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:10:11.392257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:10:11.393832Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:10:11.512833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:10:11.713983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:10:12.630512Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe087846zxq4m0vt13drv7h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE0NjIyYzYtYjVjNGE1OTEtNGRhNGFhMzYtOGY2NjY1NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486785182489522270:3126] === CheckClustersList. Ok 2025-03-28T09:10:17.890993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:10:17.891018Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:21.380310Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486785221144228519:2496] TxId: 281474976710683. Ctx: { TraceId: 01jqe08fssdq55vnjz84ywhq8r, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzEzMmRjZDctYmI2MGEzZTMtYzBkMGY2ZjctNzA1ZDNhZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-03-28T09:10:21.380861Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785221144228529:2507], TxId: 281474976710683, task: 2. Ctx: { TraceId : 01jqe08fssdq55vnjz84ywhq8r. SessionId : ydb://session/3?node_id=1&id=MzEzMmRjZDctYmI2MGEzZTMtYzBkMGY2ZjctNzA1ZDNhZTA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486785221144228519:2496], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-28T09:10:21.381086Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785221144228530:2508], TxId: 281474976710683, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MzEzMmRjZDctYmI2MGEzZTMtYzBkMGY2ZjctNzA1ZDNhZTA=. TraceId : 01jqe08fssdq55vnjz84ywhq8r. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486785221144228519:2496], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-28T09:10:21.967118Z node 1 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710684. Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-28T09:10:21.967229Z node 1 :KQP_EXECUTER WARN: ActorId: [1:7486785221144228558:2512] TxId: 281474976710684. Ctx: { TraceId: 01jqe08gtn2ynpbryp4fm1rxrw, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVkNjMxYjMtZTlkOGU3YWItNDE4NTUwN2EtNTk0YTg5NWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-28T09:10:21.967486Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjVkNjMxYjMtZTlkOGU3YWItNDE4NTUwN2EtNTk0YTg5NWY=, ActorId: [1:7486785221144228555:2512], ActorState: ExecuteState, TraceId: 01jqe08gtn2ynpbryp4fm1rxrw, Create QueryResponse for error on request, msg: 2025-03-28T09:10:21.970079Z node 1 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqe08gttdv2wz271ne54qnq3" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-28T09:10:22.372907Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzEzMmRjZDctYmI2MGEzZTMtYzBkMGY2ZjctNzA1ZDNhZTA=, ActorId: [1:7486785216849261173:2496], ActorState: ExecuteState, TraceId: 01jqe08fssdq55vnjz84ywhq8r, Create QueryResponse for error on request, msg: 2025-03-28T09:10:22.376102Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Kikimr cluster or one of its subsystems was unavailable." issue_code: 2005 severity: 1 issues { message: "Failed to send EvStartKqpTasksRequest because node is unavailable: 2" severity: 1 } } TxMeta { id: "01jqe08gdsba9e0edsb9vasjgc" } } YdbStatus: UNAVAILABLE ConsumedRu: 418 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] Test command err: 2025-03-28T09:09:21.507769Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784960630701761:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.507894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001699/r3tmp/tmpaA1KYm/pdisk_1.dat 2025-03-28T09:09:22.430582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:22.508137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:22.508300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:22.528018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3965 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:09:22.804006Z node 1 :TX_PROXY DEBUG: actor# [1:7486784960630701859:2117] Handle TEvNavigate describe path dc-1 2025-03-28T09:09:22.804063Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964925669651:2445] HANDLE EvNavigateScheme dc-1 2025-03-28T09:09:22.804187Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784960630701882:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:22.804284Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964925669569:2387][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784960630701882:2130], cookie# 1 2025-03-28T09:09:22.806079Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964925669573:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964925669570:2387], cookie# 1 2025-03-28T09:09:22.806116Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964925669574:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964925669571:2387], cookie# 1 2025-03-28T09:09:22.806148Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964925669575:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964925669572:2387], cookie# 1 2025-03-28T09:09:22.806188Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784960630701534:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964925669573:2387], cookie# 1 2025-03-28T09:09:22.806214Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784960630701537:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964925669574:2387], cookie# 1 2025-03-28T09:09:22.806234Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784960630701540:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784964925669575:2387], cookie# 1 2025-03-28T09:09:22.806265Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964925669573:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784960630701534:2051], cookie# 1 2025-03-28T09:09:22.806280Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964925669574:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784960630701537:2054], cookie# 1 2025-03-28T09:09:22.806352Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784964925669575:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784960630701540:2057], cookie# 1 2025-03-28T09:09:22.806478Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964925669569:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784964925669570:2387], cookie# 1 2025-03-28T09:09:22.806503Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964925669569:2387][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:09:22.806518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964925669569:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784964925669571:2387], cookie# 1 2025-03-28T09:09:22.806538Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964925669569:2387][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:22.806574Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964925669569:2387][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784964925669572:2387], cookie# 1 2025-03-28T09:09:22.806593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784964925669569:2387][/dc-1] Unexpected sync response: sender# [1:7486784964925669572:2387], cookie# 1 2025-03-28T09:09:22.806654Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784960630701882:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:09:22.822626Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784960630701882:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784964925669569:2387] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:22.823181Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784960630701882:2130], cacheItem# { Subscriber: { Subscriber: [1:7486784964925669569:2387] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:09:22.825619Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784964925669652:2446], recipient# [1:7486784964925669651:2445], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:22.825686Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964925669651:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:22.869380Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964925669651:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:09:22.872921Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964925669651:2445] Handle TEvDescribeSchemeResult Forward to# [1:7486784964925669650:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:09:22.897520Z node 1 :TX_PROXY DEBUG: actor# [1:7486784960630701859:2117] Handle TEvProposeTransaction 2025-03-28T09:09:22.897548Z node 1 :TX_PROXY DEBUG: actor# [1:7486784960630701859:2117] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:09:22.897651Z node 1 :TX_PROXY DEBUG: actor# [1:7486784960630701859:2117] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7486784964925669659:2452] 2025-03-28T09:09:23.035962Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964925669659:2452] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:09:23.036018Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964925669659:2452] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:09:23.036086Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784964925669659:2452] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:09:23.036178Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784960630701882:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: true Status ... d_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:10:24.003328Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7486785230922540818:2218], recipient# [11:7486785222332606188:2327], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:24.006963Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7486785222332606188:2327], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:10:24.107004Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [13:7486785169543683023:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:24.107194Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7486785169543683023:2108], cacheItem# { Subscriber: { Subscriber: [13:7486785225378258082:2218] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:10:24.107264Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [13:7486785169543683023:2108], cacheItem# { Subscriber: { Subscriber: [13:7486785225378258084:2219] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:10:24.107424Z node 13 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [13:7486785233968192703:2230], recipient# [13:7486785225378258075:2328], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:24.111002Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:7486785225378258075:2328], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:10:24.247365Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7486785170792998468:2111], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:24.247586Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7486785170792998468:2111], cacheItem# { Subscriber: { Subscriber: [11:7486785222332606189:2204] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:10:24.247701Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7486785230922540820:2219], recipient# [11:7486785230922540819:2333], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:24.248464Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:10:24.550972Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7486785170792998468:2111], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:24.551161Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7486785170792998468:2111], cacheItem# { Subscriber: { Subscriber: [11:7486785222332606190:2205] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:10:24.551240Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7486785170792998468:2111], cacheItem# { Subscriber: { Subscriber: [11:7486785222332606191:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:10:24.551403Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7486785230922540821:2220], recipient# [11:7486785222332606188:2327], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:24.552106Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7486785222332606188:2327], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:10:24.592732Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [11:7486785170792998468:2111], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:24.593093Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [11:7486785170792998468:2111], cacheItem# { Subscriber: { Subscriber: [11:7486785170792998485:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:10:24.593253Z node 11 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [11:7486785230922540823:2221], recipient# [11:7486785230922540822:2334], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2025-03-28T09:09:35.467301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785021043485235:2223];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:35.467799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0328 09:09:36.382395163 173422 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:36.382529836 173422 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:36.560445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:36.704913Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13614: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13614 } ] 2025-03-28T09:09:36.776032Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13614: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13614 2025-03-28T09:09:37.564684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:37.779022Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13614: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13614 } ] 2025-03-28T09:09:38.567655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:39.398880Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13614: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13614 } ] 2025-03-28T09:09:39.575017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:40.426062Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785021043485235:2223];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:40.426157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:40.586999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0328 09:09:41.399116393 173591 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:41.399268456 173591 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:41.420658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:41.542951Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13614: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13614 2025-03-28T09:09:41.543072Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13614: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13614 } ] 2025-03-28T09:09:41.592728Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:42.421779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:42.598951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:42.652805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785051108256609:2317], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:42.652942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:42.719159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785051108256609:2317], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e82/r3tmp/tmpXPIX96/pdisk_1.dat 2025-03-28T09:09:42.849763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785051108256609:2317], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 13614, node 1 TClient is connected to server localhost:13229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:43.699352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:44.338196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:44.338219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:44.338232Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:44.338363Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:09:44.340315Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:44.951975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:44.952060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:44.960193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:45.302854Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-28T09:09:45.302885Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-28T09:09:45.302893Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-28T09:09:45.304291Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-28T09:09:45.304305Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:45.304310Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:45.305027Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-28T09:09:45.305039Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:09:45.305043Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:09:45.305806Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-28T09:09:45.305817Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:45.305822Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:45.328513Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-28T09:09:45.328541Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:45.328548Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:45.330246Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:09:45.338781Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-28T09:09:45.338806Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:45.338814Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:45.351538Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-03-28T09:09:45.351565Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-03-28T09:09:45.352704Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-28T09:09:45.352720Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:45.352727Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:45.379945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:09:45.3817 ... ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593076Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593144Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593215Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593361Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593381Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593409Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593478Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593659Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593681Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593699Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593869Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593890Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.593909Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.594024Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.594148Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.594170Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.594276Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.594309Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.594327Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.594454Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.594533Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.594601Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.598226Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.598375Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.598471Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.598550Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.598631Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.598736Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.598816Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.598888Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.598914Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.598986Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.599058Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.599128Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.599197Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.599267Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.599338Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.599408Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.599555Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.599658Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.599788Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.599867Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.603093Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.603160Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.603186Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.603294Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.603465Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.603502Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.603580Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.603651Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.615292Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.615346Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.626864Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.627074Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.631084Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.631267Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.631316Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.635230Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.635301Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.635396Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.635477Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.635606Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.635699Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.635834Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.635870Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.636146Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.636173Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.636273Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.636345Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.636414Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.636534Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.636659Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.636736Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.636815Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.636916Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.637000Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.646415Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.646482Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.646524Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.646738Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.646879Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.646903Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.646934Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647010Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647197Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647217Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647238Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647465Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647494Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647514Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647697Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647722Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647843Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647884Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.647904Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.648450Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.648486Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.648519Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.648599Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.648718Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.648912Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.648935Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.648955Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649121Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649144Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649219Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649342Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649366Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649482Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649521Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649540Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649633Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649757Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649840Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649958Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.649989Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.650151Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.650273Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.650300Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.650421Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.650490Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.650516Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.650632Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.651141Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.651175Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.651274Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.651393Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.651442Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.651611Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:21.651638Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2025-03-28T09:09:36.995095Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785024278540685:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:36.995644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0328 09:09:37.670822474 173816 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:37.670967368 173816 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:38.010894Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:38.779680Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13807: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13807 } ] 2025-03-28T09:09:39.035296Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13807: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13807 2025-03-28T09:09:39.074763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:40.083140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:40.425567Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13807: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13807 } ] 2025-03-28T09:09:41.087802Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:41.974781Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785024278540685:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:41.974837Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:42.095256Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0328 09:09:42.674391585 174000 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:42.674547294 174000 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:42.975116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:43.115805Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13807: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:13807 } ] 2025-03-28T09:09:43.119698Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:13807: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:13807 2025-03-28T09:09:43.122852Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:43.979153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:44.135266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e50/r3tmp/tmpHr6hwc/pdisk_1.dat 2025-03-28T09:09:44.567273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785058638279642:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:44.567369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:44.585444Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13807, node 1 2025-03-28T09:09:44.726758Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:44.726778Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:44.726788Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:44.726913Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:45.166335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:45.791335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:45.791431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:45.796348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:46.827007Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-28T09:09:46.827037Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:09:46.827044Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:09:46.836060Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-28T09:09:46.836086Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-28T09:09:46.836092Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-28T09:09:46.856857Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-28T09:09:46.856890Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:46.856897Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:46.859118Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-28T09:09:46.859143Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:46.859148Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:46.862341Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-28T09:09:46.862356Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:46.862361Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:46.866165Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-28T09:09:46.866179Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:46.866185Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:46.890175Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-28T09:09:46.890204Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:46.890211Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:46.902319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:09:46.906012Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-28T09:09:46.906043Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:09:46.906052Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:09:46.906907Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-28T09:09:46.906922Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:09:46.906927Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:09:46.911648Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-28T09:09:46.911671Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-28T09:09:46.911677Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-28T09:09:46.912557Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-28T09:09:46.912599Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:46.912615Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys ... } } DstEndpoint { ActorId { RawX1: 7486785223152518949 RawX2: 4503616807242204 } } InMemory: true } 2025-03-28T09:10:22.766349Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518953:3087], TxId: 281474976710821, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-03-28T09:10:22.766358Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518954:3088], TxId: 281474976710821, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:22.766409Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518953:3087], TxId: 281474976710821, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785223152518953 RawX2: 4503616807242767 } } DstEndpoint { ActorId { RawX1: 7486785223152518954 RawX2: 4503616807242768 } } InMemory: true DstStageId: 1 } 2025-03-28T09:10:22.766419Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-03-28T09:10:22.766432Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-03-28T09:10:22.766457Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518953:3087], TxId: 281474976710821, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:22.766470Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. enter getasyncinputdata results size 0, freeSpace 8388608 2025-03-28T09:10:22.766487Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-03-28T09:10:22.767493Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. Recv TEvReadResult from ShardID=72075186224037896, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-03-28T09:10:22.767520Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. Taken 0 locks 2025-03-28T09:10:22.767535Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. new data for read #0 seqno = 1 finished = 1 2025-03-28T09:10:22.767557Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518953:3087], TxId: 281474976710821, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 276037645 2025-03-28T09:10:22.767577Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518953:3087], TxId: 281474976710821, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:22.767595Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-03-28T09:10:22.767614Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. enter pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-28T09:10:22.767632Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. exit pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-28T09:10:22.767646Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. returned 0 rows; processed 0 rows 2025-03-28T09:10:22.767676Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. dropping batch for read #0 2025-03-28T09:10:22.767687Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. effective maxinflight 1024 sorted 0 2025-03-28T09:10:22.767700Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-03-28T09:10:22.767716Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1, CA Id [4:7486785223152518953:3087]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-03-28T09:10:22.767799Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518953:3087], TxId: 281474976710821, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:22.767814Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518954:3088], TxId: 281474976710821, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-03-28T09:10:22.767834Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 2. Finish input channelId: 1, from: [4:7486785223152518953:3087] 2025-03-28T09:10:22.767864Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518954:3088], TxId: 281474976710821, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:22.767908Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518954:3088], TxId: 281474976710821, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:22.767923Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518953:3087], TxId: 281474976710821, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646927 2025-03-28T09:10:22.767941Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518953:3087], TxId: 281474976710821, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:22.767961Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1. Tasks execution finished 2025-03-28T09:10:22.767973Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518953:3087], TxId: 281474976710821, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:22.768081Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 1. pass away 2025-03-28T09:10:22.768175Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710821;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:22.768544Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518954:3088], TxId: 281474976710821, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:22.768581Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-28T09:10:22.768591Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 2. Tasks execution finished 2025-03-28T09:10:22.768604Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785223152518954:3088], TxId: 281474976710821, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=OWRjYTIxNzgtNjJhODljYjItMTUxMTlhODEtNDE5NDVkYWE=. TraceId : 01jqe08he95f4x2ve9f1x8ns92. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:22.768662Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710821, task: 2. pass away 2025-03-28T09:10:22.768718Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710821;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:22.776558Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqueeji80u7g3tdnb5n" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:662: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } 2025-03-28T09:10:23.155081Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:25716: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:25716 E0328 09:10:23.474815869 176911 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:10:23.475107913 176911 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:10:23.632789Z node 4 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:25716: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:25716 >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync >> TCacheTest::CheckSystemViewAccess [GOOD] >> TCacheTest::CookiesArePreserved >> SystemView::AuthOwners [GOOD] >> SystemView::AuthOwners_Access >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction >> KqpJoinOrder::DatetimeConstantFold-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpExtractPredicateLookup::ComplexRange [GOOD] Test command err: Trying to start YDB, gRPC: 12848, MsgBus: 17037 2025-03-28T09:06:45.663443Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784291382435625:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:45.663524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b1e/r3tmp/tmpOB8qTm/pdisk_1.dat 2025-03-28T09:06:46.119258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:46.141008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:46.141091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:46.149568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12848, node 1 2025-03-28T09:06:46.257019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:46.257036Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:46.257044Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:46.257121Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17037 TClient is connected to server localhost:17037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:46.834212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:46.850254Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:06:46.867344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:47.122260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:47.292362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:06:47.379511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:06:49.177986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784308562306600:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:49.178109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:49.556319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:06:49.592691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:06:49.630195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:06:49.661608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:06:49.702817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:06:49.774859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:06:49.832531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784308562307114:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:49.832612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:49.832810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784308562307119:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:06:49.835767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:06:49.845624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784308562307121:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:06:49.917266Z node 1 :TX_PROXY ERROR: Actor# [1:7486784308562307173:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:06:50.666988Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784291382435625:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:06:50.667071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18534, MsgBus: 6602 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b1e/r3tmp/tmp6ONbcp/pdisk_1.dat 2025-03-28T09:06:53.712104Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:06:53.869493Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:53.912482Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:06:53.912586Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:06:53.923688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18534, node 2 2025-03-28T09:06:54.143240Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:06:54.143267Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:06:54.143275Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:06:54.143393Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6602 TClient is connected to server localhost:6602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:06:54.805243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:54.818211Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:06:54.840742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:54.933929Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:55.105719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:06:55.225292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemesha ... ation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:09:48.432441Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:09:48.584631Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:09:48.748558Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:09:48.887414Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:09:49.033814Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:09:49.393785Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11377, MsgBus: 26577 2025-03-28T09:09:57.698314Z node 14 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7486785117128808795:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:57.698460Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b1e/r3tmp/tmpLQi3Xf/pdisk_1.dat 2025-03-28T09:09:58.023209Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:58.060391Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:58.060556Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:58.067917Z node 14 :HIVE WARN: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11377, node 14 2025-03-28T09:09:58.221775Z node 14 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:58.221804Z node 14 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:58.221826Z node 14 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:58.222014Z node 14 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26577 TClient is connected to server localhost:26577 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:00.425207Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:00.465228Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:01.465653Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:02.170036Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:02.350462Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:02.900136Z node 14 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7486785117128808795:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:02.903477Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:08.114871Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7486785164373450879:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:08.115043Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:08.187379Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:10:08.368224Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:10:08.485412Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:10:08.600767Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:10:08.712296Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:10:08.950861Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:10:09.134157Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7486785168668418713:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:09.134351Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:09.134945Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7486785168668418718:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:09.147485Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:10:09.174482Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7486785168668418720:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:10:09.273764Z node 14 :TX_PROXY ERROR: Actor# [14:7486785168668418778:3484] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:13.000747Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:10:13.000785Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:13.495072Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:10:13.579620Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:10:13.688473Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:10:13.826329Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:10:13.998401Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:10:14.112206Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:10:14.225430Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:10:14.323655Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:10:14.478738Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:10:14.653048Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 >> TCacheTest::CookiesArePreserved [GOOD] >> KqpIndexLookupJoin::RightSemi >> TCacheTest::RacyCreateAndSync [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2025-03-28T09:10:27.597974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:27.600128Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:28.259444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-03-28T09:10:28.385322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:10:28.414885Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-28T09:10:28.431129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-03-28T09:10:28.477240Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:205:2195], for# user1@builtin, access# DescribeSchema 2025-03-28T09:10:28.478188Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:211:2201], for# user1@builtin, access# 2025-03-28T09:10:29.026160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:29.026223Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:29.088865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-03-28T09:10:29.095930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:10:29.102113Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2025-03-28T09:10:27.612736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:27.612811Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:28.255620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:10:28.414233Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-28T09:10:28.478627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:10:28.525503Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:10:28.547882Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-03-28T09:10:29.074376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:29.074483Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:29.171765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:10:29.204558Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 >> KqpJoinOrder::CanonizedJoinOrderTPCH7 >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] >> SystemView::ShowCreateTableReadReplicas [GOOD] >> SystemView::ShowCreateTableKeyBloomFilter |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 18666, MsgBus: 65330 2025-03-28T09:09:23.643600Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784970506353444:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:23.643648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00186c/r3tmp/tmpuRz9kV/pdisk_1.dat 2025-03-28T09:09:24.197381Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:24.201728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:24.201812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:24.208490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18666, node 1 2025-03-28T09:09:24.436805Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:24.436825Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:24.436833Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:24.436973Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65330 TClient is connected to server localhost:65330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:25.157450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:25.190250Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:27.376114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784987686223087:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:27.376227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:27.382864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784987686223099:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:27.386622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:27.398727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784987686223101:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:27.458514Z node 1 :TX_PROXY ERROR: Actor# [1:7486784987686223152:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:27.827079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.125290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.183206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.222062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.269089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.467095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.550196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.606618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.647013Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784970506353444:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:28.647104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:28.691465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.750886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.797844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.849553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:09:28.904870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.024988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:09:30.094619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.148431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.245564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.301682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.384034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.445562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.501197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.565129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.689237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.797714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.885956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:09:30.973909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:09:31.017817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:09:31.067585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:09:31.157621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:09:31.237158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.737872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.739808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.744626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.746810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038451;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.750516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.753822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.759380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.760312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.766380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.767466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.775577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.814624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.814917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.822340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.822406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.828852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.831787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.835942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.840420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.843443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.855529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.859835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.872904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.874983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.879570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.882293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038425;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.891237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.893053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.898123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.907168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.913506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.918326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.928247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.933871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.951778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.956751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.963254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.971248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.980104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.986001Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.996695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:21.997151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.012136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.013406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.030898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.199064Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0715wcm5y3k5yqvbjtkwv", SessionId: ydb://session/3?node_id=1&id=ZmM2ZTZhM2MtZDM3Y2MxYTUtZTQ4N2RjNDYtYzY3ZmRjZjc=, Slow query, duration: 49.210394s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:10:22.595378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:22.595757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:22.596695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7486785026340934289:2732];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-28T09:10:22.597041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2025-03-28T09:09:44.446065Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785060544138095:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:44.446336Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0328 09:09:44.935264729 175081 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:44.935424727 175081 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:45.444555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:45.894964Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12518: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12518 } ] 2025-03-28T09:09:46.000089Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12518: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:12518 2025-03-28T09:09:46.447314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:47.451340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:47.683965Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12518: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12518 } ] 2025-03-28T09:09:48.495621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:49.430363Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785060544138095:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:49.430436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:49.499233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0328 09:09:49.935867534 175187 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:49.936029970 175187 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:50.427197Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12518: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:12518 } ] 2025-03-28T09:09:50.439637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:50.506259Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:12518: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:12518 2025-03-28T09:09:50.528595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:51.443383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:51.531335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e17/r3tmp/tmpSbMR0G/pdisk_1.dat 2025-03-28T09:09:51.762860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:51.779233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785090608909589:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:51.838616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785090608909589:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:52.036846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785090608909589:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:52.219569Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12518, node 1 2025-03-28T09:09:52.423410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:52.423439Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:52.423447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:52.423607Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:09:52.909754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:52.909855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:52.917266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:53.348368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:54.166963Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:09:54.198947Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-03-28T09:09:54.198984Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-03-28T09:09:54.212171Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-28T09:09:54.212210Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:09:54.212238Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:09:54.219807Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-28T09:09:54.219832Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:54.219839Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:54.220911Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-28T09:09:54.220931Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-28T09:09:54.220937Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-28T09:09:54.230675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:09:54.231292Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-28T09:09:54.231309Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:54.231315Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:54.235054Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-28T09:09:54.235086Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:54.235094Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:54.237733Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-28T09:09:54.237752Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:54.237759Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:54.238468Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-28T09:09:54.238493Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:54.238499Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:54.258729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:54.260055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 720575 ... =. TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785233629495641 RawX2: 4503616807242330 } } DstEndpoint { ActorId { RawX1: 7486785233629495642 RawX2: 4503616807242331 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785233629495642 RawX2: 4503616807242331 } } DstEndpoint { ActorId { RawX1: 7486785233629495637 RawX2: 4503616807242230 } } InMemory: true } 2025-03-28T09:10:24.717248Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495642:2651], TxId: 281474976710712, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Update input channelId: 1, peer: [4:7486785233629495641:2650] 2025-03-28T09:10:24.717296Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495642:2651], TxId: 281474976710712, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-03-28T09:10:24.717394Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495642:2651], TxId: 281474976710712, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785233629495641 RawX2: 4503616807242330 } } DstEndpoint { ActorId { RawX1: 7486785233629495642 RawX2: 4503616807242331 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785233629495642 RawX2: 4503616807242331 } } DstEndpoint { ActorId { RawX1: 7486785233629495637 RawX2: 4503616807242230 } } InMemory: true } 2025-03-28T09:10:24.717423Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495642:2651], TxId: 281474976710712, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:24.718535Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. Recv TEvReadResult from ShardID=72075186224037892, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= LockId: 281474976710703 DataShard: 72075186224037892 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 4, BrokenTxLocks= 2025-03-28T09:10:24.718558Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. Taken 1 locks 2025-03-28T09:10:24.718571Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. new data for read #0 seqno = 1 finished = 1 2025-03-28T09:10:24.718591Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495641:2650], TxId: 281474976710712, task: 1. Ctx: { TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-03-28T09:10:24.718607Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495641:2650], TxId: 281474976710712, task: 1. Ctx: { TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:24.718622Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-03-28T09:10:24.718650Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. enter pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-28T09:10:24.718705Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. exit pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 1 freeSpace: 8388557 2025-03-28T09:10:24.718725Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. returned 1 rows; processed 1 rows 2025-03-28T09:10:24.718769Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. dropping batch for read #0 2025-03-28T09:10:24.718778Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. effective maxinflight 1024 sorted 0 2025-03-28T09:10:24.718791Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-03-28T09:10:24.718804Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1, CA Id [4:7486785233629495641:2650]. returned async data processed rows 1 left freeSpace 8388557 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-03-28T09:10:24.719072Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495641:2650], TxId: 281474976710712, task: 1. Ctx: { TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:24.719091Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495641:2650], TxId: 281474976710712, task: 1. Ctx: { TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:24.719103Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495642:2651], TxId: 281474976710712, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2025-03-28T09:10:24.719130Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-03-28T09:10:24.719145Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 2. Finish input channelId: 1, from: [4:7486785233629495641:2650] 2025-03-28T09:10:24.719202Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495642:2651], TxId: 281474976710712, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:24.719413Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495642:2651], TxId: 281474976710712, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:24.719434Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495641:2650], TxId: 281474976710712, task: 1. Ctx: { TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-03-28T09:10:24.719472Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495641:2650], TxId: 281474976710712, task: 1. Ctx: { TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:24.719493Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1. Tasks execution finished 2025-03-28T09:10:24.719529Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495641:2650], TxId: 281474976710712, task: 1. Ctx: { TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:24.719648Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 1. pass away 2025-03-28T09:10:24.719750Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710712;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:24.720126Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495642:2651], TxId: 281474976710712, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:24.720161Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-28T09:10:24.720170Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 2. Tasks execution finished 2025-03-28T09:10:24.720182Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785233629495642:2651], TxId: 281474976710712, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=. TraceId : 01jqe08k7pe0q0e1v6x11g0s0z. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:24.720228Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710712, task: 2. pass away 2025-03-28T09:10:24.720273Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710712;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:24.722072Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710713. Ctx: { TraceId: 01jqe08k7pe0q0e1v6x11g0s0z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=M2IwNGM2NjctZDc2YjJmMWEtOWQ1NzUyYzItY2NlYzFiMDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:10:25.358317Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:25005: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:25005 >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoinQueryService >> Yq_1::CreateQuery_Without_Connection [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock+UseSink |80.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |80.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect |80.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |80.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |81.0%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2025-03-28T09:09:37.218320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785029345868933:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:37.218373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0328 09:09:38.243318213 173946 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:38.243492301 173946 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:38.312793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:39.237148Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3643: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3643 } ] 2025-03-28T09:09:39.326908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:39.419416Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3643: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3643 2025-03-28T09:09:40.338842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:40.847209Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3643: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3643 } ] 2025-03-28T09:09:41.337806Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:42.218892Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785029345868933:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:42.218959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:42.339609Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0328 09:09:43.215040319 174055 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:43.215204638 174055 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:43.235555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:43.340624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e45/r3tmp/tmpcZ4cna/pdisk_1.dat 2025-03-28T09:09:43.698886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785055115673479:2319], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:43.716441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:43.987008Z node 1 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3643: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3643 2025-03-28T09:09:43.992106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785055115673479:2319], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:44.002968Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3643: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:3643 } ] 2025-03-28T09:09:44.010232Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:3643: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:3643 TServer::EnableGrpc on GrpcPort 3643, node 1 TClient is connected to server localhost:27942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:44.488232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:45.167990Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:45.168014Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:45.168021Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:45.168170Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:09:45.179527Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:45.340240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:45.340338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:45.355540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:46.167101Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-28T09:09:46.167136Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:46.167144Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:46.169437Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-28T09:09:46.169466Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:46.169478Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:46.170210Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-28T09:09:46.170222Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:46.170226Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:46.176294Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-28T09:09:46.176310Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:46.176316Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:46.177917Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-28T09:09:46.177949Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-28T09:09:46.177954Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-28T09:09:46.180427Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-28T09:09:46.180439Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:46.180445Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:46.183552Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-28T09:09:46.183592Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:46.183599Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:46.186042Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-28T09:09:46.186064Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:46.186070Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:46.201924Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-28T09:09:46.201954Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-28T09:09:46.201961Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-28T09:09:46.319730Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-28T09:09:46.319769Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:09:46.319776Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:09:46.375221Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-28T09:09:46.375253Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:09:46.375259Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:09:46.431272Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create sessio ... .125596Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.125684Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.125752Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.125835Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.125917Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.126001Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.126075Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.126143Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.126252Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.126346Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.126424Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.126495Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.126576Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.126656Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.127443Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.127536Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.127612Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.127689Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.127757Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.127829Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.127929Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128027Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128098Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128164Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128230Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128304Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128371Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128451Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128518Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128585Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128663Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128730Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128803Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128870Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.128963Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129058Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129145Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129224Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129312Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129400Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129482Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129549Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129615Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129741Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129809Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129882Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.129977Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.130052Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.130121Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.130190Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.130292Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.130372Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.130441Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.130506Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.130599Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.130680Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.131528Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.131622Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.131706Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.131804Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.131885Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.131977Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132080Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132157Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132247Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132351Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132455Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132526Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132601Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132688Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132774Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132846Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.132922Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.133400Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.133491Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.133566Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.133668Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.133741Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.133826Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.133903Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.133996Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134075Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134147Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134214Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134279Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134390Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134463Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134527Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134593Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134714Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134805Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134902Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.134967Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135031Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135099Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135165Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135297Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135384Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135458Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135532Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135606Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135720Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135799Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.135926Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.136105Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.136194Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.136319Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.136399Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.136469Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.136549Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.136623Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.136704Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.136774Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.146850Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.147083Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.147205Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.147308Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.147407Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.147488Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.147605Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.147707Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.152201Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.152368Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.152447Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.152536Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.152610Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.152688Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.152764Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.152841Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:10:31.152965Z node 1 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: [good] Yq_1::CreateQuery_Without_Connection |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpJoinOrder::TPCDS23-ColumnStore [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpIndexLookupJoin::RightSemi [GOOD] >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> BasicUsage::RecreateObserver [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock-UseSink |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::RightSemi [GOOD] Test command err: Trying to start YDB, gRPC: 31247, MsgBus: 29648 2025-03-28T09:10:29.587855Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785251820723765:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:29.587896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001727/r3tmp/tmpXfb95v/pdisk_1.dat 2025-03-28T09:10:30.054508Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:30.086548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:30.086639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:30.089477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31247, node 1 2025-03-28T09:10:30.223269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:30.223289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:30.223296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:30.223435Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29648 TClient is connected to server localhost:29648 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:30.931150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:30.954747Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:10:30.976252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:31.198892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:31.470125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:31.550415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:33.311906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785269000594588:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:33.312015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:33.673365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:10:33.713837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:10:33.773000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:10:33.884450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:10:33.929255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:10:33.991161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:10:34.097814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785273295562404:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:34.097886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:34.098296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785273295562409:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:34.102040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:10:34.115025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785273295562411:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:10:34.197154Z node 1 :TX_PROXY ERROR: Actor# [1:7486785273295562464:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:34.587912Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785251820723765:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:34.587975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:35.262673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:10:35.314747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:10:35.392936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:10:35.447130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:10:35.499926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:10:35.559949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> TFlatTest::LargeDatashardReplyDistributed |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS23-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 3337, MsgBus: 25993 2025-03-28T09:08:51.806857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784834154111602:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:51.807280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001898/r3tmp/tmpkJ0PUq/pdisk_1.dat 2025-03-28T09:08:52.264813Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:52.266733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:52.266810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:52.271222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3337, node 1 2025-03-28T09:08:52.357166Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:52.357208Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:52.357223Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:52.357352Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25993 TClient is connected to server localhost:25993 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:52.882782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:52.899440Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:55.229135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784851333981317:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:55.229243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:55.229670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784851333981329:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:55.233832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:08:55.245855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784851333981331:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:08:55.302193Z node 1 :TX_PROXY ERROR: Actor# [1:7486784851333981382:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:55.630356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:08:55.799123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:08:55.847101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:55.888263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:55.929403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:08:56.103194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:08:56.180401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:08:56.208745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:08:56.251208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:08:56.294671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:08:56.325108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:08:56.373201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:08:56.401191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:08:56.803778Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784834154111602:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:56.803865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:57.163616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:08:57.197750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.265084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.319648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.348369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.379288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.416079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.444153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.478055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.521383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.553270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.585437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.652834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.680434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.707046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.741482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:08:57.784501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:25.988364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:25.988489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:25.994819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:25.994925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.001961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.001983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.008555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.008808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.015465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.015523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.022253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.022350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.028483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.028503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.035532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.035533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.043004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.043021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.049361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.049490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.056999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.057241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.063733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.063820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.069729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.080433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.085986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.091721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.097782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.108682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.112309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.123093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.127517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.141699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.145222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.159781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.167373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.174157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.177460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.189222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.194220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.209158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.213252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.220631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.232341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:09:26.434951Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0603x4mfn4ffmtcyznhq3", SessionId: ydb://session/3?node_id=1&id=YzEzZGIzZDUtNzgyNDRjNWMtYmI2ZWU5ODctNjQ1MjdlMjk=, Slow query, duration: 27.300954s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:09:27.110246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:09:27.110631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:09:27.111397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7486784911463540132:4465];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-03-28T09:09:27.111729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2025-03-28T09:08:34.959316Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1743152914959272 2025-03-28T09:08:35.634841Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784764500315128:2283];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:35.634962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:35.700573Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784762619570507:2152];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e36/r3tmp/tmpWqicog/pdisk_1.dat 2025-03-28T09:08:36.139378Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:36.140141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:36.140817Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:36.634071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:36.850791Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:36.924748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:36.926845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:36.931334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:36.931399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:36.934624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:36.937674Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:36.939603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19645, node 1 2025-03-28T09:08:37.286503Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/000e36/r3tmp/yandexBecYu5.tmp 2025-03-28T09:08:37.286531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/000e36/r3tmp/yandexBecYu5.tmp 2025-03-28T09:08:37.286738Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/000e36/r3tmp/yandexBecYu5.tmp 2025-03-28T09:08:37.286888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:37.349149Z INFO: TTestServer started on Port 2998 GrpcPort 19645 TClient is connected to server localhost:2998 PQClient connected to localhost:19645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:37.782093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:37.871264Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:08:40.598876Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784764500315128:2283];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:40.598939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:40.700594Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784762619570507:2152];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:40.700658Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:40.759291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784785975152426:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.759406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.759769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784785975152438:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.765115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:08:40.774876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784785975152472:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.775207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:40.808271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784785975152440:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:08:40.907310Z node 1 :TX_PROXY ERROR: Actor# [1:7486784785975152518:2683] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:41.244530Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486784785975152531:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:41.244765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:08:41.245598Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmZmYmM5ZTUtMzEwMTkyZTAtZjc3M2JiYy03NzkwZTYwOA==, ActorId: [1:7486784785975152423:2338], ActorState: ExecuteState, TraceId: 01jqe05e4s00tb6encsqqbf7r6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:41.247707Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:41.251352Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486784784094407251:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:41.253016Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGRmOGE1Y2EtNGZiN2M3Y2EtN2I0ZjUyMGUtOTFjMzZjZDQ=, ActorId: [2:7486784784094407223:2307], ActorState: ExecuteState, TraceId: 01jqe05e865b3ywas00r0gpr8z, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:41.253425Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:41.388601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:08:41.584954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:19645", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-28T09:08:41.958757Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe05f3ybf89ee7b71z64y5a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzRiYWIxZjAtMzBkYjk1NC1hMmUxNWQ5NC1kNjMzNGZjMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckCl ... partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-03-28T09:10:36.293141Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_8499742080068615803_v1 sending to client partition status >>> Got event: StartPartitionSession { Partition session id: 1 Topic: "test-topic" Partition: 0 Database name: dc3 Database path: /Root Database id: account-dc3 CommittedOffset: 0 EndOffset: 0 } 2025-03-28T09:10:36.298633Z :INFO: [/Root] [/Root] [fde68a33-fd6f51d0-12f60238-8dad08e8] Closing read session. Close timeout: 0.000000s 2025-03-28T09:10:36.299880Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:10:36.299943Z :INFO: [/Root] [/Root] [fde68a33-fd6f51d0-12f60238-8dad08e8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 115 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:10:36.300061Z :NOTICE: [/Root] [/Root] [fde68a33-fd6f51d0-12f60238-8dad08e8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:10:36.300108Z :DEBUG: [/Root] [/Root] [fde68a33-fd6f51d0-12f60238-8dad08e8] [] Abort session to cluster 2025-03-28T09:10:36.300630Z :INFO: [/Root] [/Root] [962c65f7-cdb6292a-ec168a03-a815c1db] Closing read session. Close timeout: 0.000000s 2025-03-28T09:10:36.300672Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:10:36.300705Z :INFO: [/Root] [/Root] [962c65f7-cdb6292a-ec168a03-a815c1db] Counters: { Errors: 0 CurrentSessionLifetimeMs: 114 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:10:36.301302Z :NOTICE: [/Root] [/Root] [962c65f7-cdb6292a-ec168a03-a815c1db] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:10:36.301333Z :DEBUG: [/Root] [/Root] [962c65f7-cdb6292a-ec168a03-a815c1db] [] Abort session to cluster 2025-03-28T09:10:36.301572Z :INFO: [/Root] [/Root] [d1e03d7e-fa6738b7-797afa5b-b242c61f] Closing read session. Close timeout: 0.000000s 2025-03-28T09:10:36.301613Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-03-28T09:10:36.301644Z :INFO: [/Root] [/Root] [d1e03d7e-fa6738b7-797afa5b-b242c61f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 114 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:10:36.301705Z :NOTICE: [/Root] [/Root] [d1e03d7e-fa6738b7-797afa5b-b242c61f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:10:36.301728Z :DEBUG: [/Root] [/Root] [d1e03d7e-fa6738b7-797afa5b-b242c61f] [] Abort session to cluster 2025-03-28T09:10:36.301920Z :INFO: [/Root] [/Root] [d1e03d7e-fa6738b7-797afa5b-b242c61f] Closing read session. Close timeout: 0.000000s 2025-03-28T09:10:36.301959Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2025-03-28T09:10:36.302003Z :INFO: [/Root] [/Root] [d1e03d7e-fa6738b7-797afa5b-b242c61f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 114 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:10:36.302081Z :NOTICE: [/Root] [/Root] [d1e03d7e-fa6738b7-797afa5b-b242c61f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:10:36.302199Z :INFO: [/Root] [/Root] [962c65f7-cdb6292a-ec168a03-a815c1db] Closing read session. Close timeout: 0.000000s 2025-03-28T09:10:36.302223Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:10:36.302247Z :INFO: [/Root] [/Root] [962c65f7-cdb6292a-ec168a03-a815c1db] Counters: { Errors: 0 CurrentSessionLifetimeMs: 116 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:10:36.302286Z :NOTICE: [/Root] [/Root] [962c65f7-cdb6292a-ec168a03-a815c1db] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:10:36.302331Z :INFO: [/Root] [/Root] [fde68a33-fd6f51d0-12f60238-8dad08e8] Closing read session. Close timeout: 0.000000s 2025-03-28T09:10:36.302353Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:10:36.302401Z :INFO: [/Root] [/Root] [fde68a33-fd6f51d0-12f60238-8dad08e8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 118 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:10:36.302444Z :NOTICE: [/Root] [/Root] [fde68a33-fd6f51d0-12f60238-8dad08e8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:10:36.313398Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_2_8499742080068615803_v1 2025-03-28T09:10:36.313442Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7486785284743304473:2569] destroyed 2025-03-28T09:10:36.313490Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_2_8499742080068615803_v1 2025-03-28T09:10:36.310840Z node 3 :PQ_READ_PROXY DEBUG: session cookie 3 consumer shared/user session shared/user_3_3_9429514936361795185_v1 grpc read done: success# 0, data# { } 2025-03-28T09:10:36.310878Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_9429514936361795185_v1 grpc read failed 2025-03-28T09:10:36.310916Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_9429514936361795185_v1 grpc closed 2025-03-28T09:10:36.310938Z node 3 :PQ_READ_PROXY INFO: session cookie 3 consumer shared/user session shared/user_3_3_9429514936361795185_v1 is DEAD 2025-03-28T09:10:36.311620Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_3_2_8499742080068615803_v1 grpc read done: success# 0, data# { } 2025-03-28T09:10:36.311632Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_8499742080068615803_v1 grpc read failed 2025-03-28T09:10:36.311649Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_8499742080068615803_v1 grpc closed 2025-03-28T09:10:36.311676Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_3_2_8499742080068615803_v1 is DEAD 2025-03-28T09:10:36.312034Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_144083037714434984_v1 grpc read done: success# 0, data# { } 2025-03-28T09:10:36.312045Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_144083037714434984_v1 grpc read failed 2025-03-28T09:10:36.312058Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_144083037714434984_v1 grpc closed 2025-03-28T09:10:36.312072Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_144083037714434984_v1 is DEAD 2025-03-28T09:10:36.313755Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785284743304462:2561] disconnected; active server actors: 1 2025-03-28T09:10:36.313797Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785284743304462:2561] client user disconnected session shared/user_3_3_9429514936361795185_v1 2025-03-28T09:10:36.313844Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2025-03-28T09:10:36.313881Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785284743304466:2560] disconnected; active server actors: 1 2025-03-28T09:10:36.313893Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785284743304466:2560] client user disconnected session shared/user_3_2_8499742080068615803_v1 2025-03-28T09:10:36.313939Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785284743304465:2559] disconnected; active server actors: 1 2025-03-28T09:10:36.313952Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486785284743304465:2559] client user disconnected session shared/user_3_1_144083037714434984_v1 2025-03-28T09:10:36.732429Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710692, task: 1, CA Id [3:7486785284743304490:2572]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-03-28T09:10:36.762794Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710692, task: 1, CA Id [3:7486785284743304490:2572]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:10:36.811343Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710692, task: 1, CA Id [3:7486785284743304490:2572]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:10:36.875226Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710692, task: 1, CA Id [3:7486785284743304490:2572]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:10:36.946430Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710692, task: 1, CA Id [3:7486785284743304490:2572]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:10:37.029762Z node 3 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710693. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:10:37.029885Z node 3 :KQP_EXECUTER WARN: ActorId: [3:7486785284743304514:2550] TxId: 281474976710693. Ctx: { TraceId: 01jqe08yv4ea9ae76k92dpawhf, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGRlYWViYmMtNjc1N2EzNDgtYjA5YWY2OGQtNTZlNmE4OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:10:37.030119Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OGRlYWViYmMtNjc1N2EzNDgtYjA5YWY2OGQtNTZlNmE4OQ==, ActorId: [3:7486785284743304435:2550], ActorState: ExecuteState, TraceId: 01jqe08yv4ea9ae76k92dpawhf, Create QueryResponse for error on request, msg: 2025-03-28T09:10:37.031471Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqe08zhr34fg9dkzrp4vs3t5" } } YdbStatus: UNAVAILABLE ConsumedRu: 473 } 2025-03-28T09:10:37.117119Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710692, task: 1, CA Id [3:7486785284743304490:2572]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:10:37.495438Z node 3 :KQP_COMPUTE WARN: TxId: 281474976710692, task: 1, CA Id [3:7486785284743304490:2572]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2025-03-28T09:09:49.575879Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785080544929140:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:49.576009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0328 09:09:50.673059732 175893 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:50.673255100 175893 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:50.830915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:51.825549Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25491: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25491 } ] 2025-03-28T09:09:51.832667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:51.883090Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25491: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25491 2025-03-28T09:09:52.838704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:53.678175Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25491: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25491 } ] 2025-03-28T09:09:53.839668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:54.510892Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785080544929140:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:54.510944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:54.843132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:55.511170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0328 09:09:55.679414804 175997 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:55.679561649 175997 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:55.851245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:56.083243Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25491: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25491 2025-03-28T09:09:56.120509Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:25491: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25491 } ] 2025-03-28T09:09:56.515247Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:56.856926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:57.519077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:57.645971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785114904667835:2318], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:57.646155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:57.741043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785114904667835:2318], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000d6a/r3tmp/tmpJHlySW/pdisk_1.dat 2025-03-28T09:09:57.870809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:57.922835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785114904667835:2318], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:58.231654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785114904667835:2318], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:58.271458Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25491, node 1 2025-03-28T09:09:58.362488Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:09:58.362505Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:09:58.387233Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:58.387252Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:58.387265Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:58.387374Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:58.966178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:59.567354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:59.567459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:59.575883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected E0328 09:10:00.744333760 175997 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:10:00.744474428 175997 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:10:01.023761Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-28T09:10:01.023805Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:10:01.023813Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:10:01.025297Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-28T09:10:01.025314Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:10:01.025338Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:10:01.039734Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-28T09:10:01.039766Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-28T09:10:01.039773Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-28T09:10:01.041039Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-28T09:10:01.041067Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-28T09:10:01.041074Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenant_acks" 2025-03-28T09:10:01.042091Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-28T09:10:01.042107Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:10:01.042113Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:10:01.046506Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-28T09:10:01.046533Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:10:01.046540Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:10:01.068815Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-28T09:10:01.068848Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/ ... d=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-03-28T09:10:33.079545Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667107:2681], TxId: 281474976710692, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785271236667106 RawX2: 4503616807242360 } } DstEndpoint { ActorId { RawX1: 7486785271236667107 RawX2: 4503616807242361 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785271236667107 RawX2: 4503616807242361 } } DstEndpoint { ActorId { RawX1: 7486785271236667100 RawX2: 4503616807242094 } } InMemory: true } 2025-03-28T09:10:33.079556Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667107:2681], TxId: 281474976710692, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7486785271236667106:2680] 2025-03-28T09:10:33.079598Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667107:2681], TxId: 281474976710692, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-03-28T09:10:33.079705Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667107:2681], TxId: 281474976710692, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785271236667106 RawX2: 4503616807242360 } } DstEndpoint { ActorId { RawX1: 7486785271236667107 RawX2: 4503616807242361 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785271236667107 RawX2: 4503616807242361 } } DstEndpoint { ActorId { RawX1: 7486785271236667100 RawX2: 4503616807242094 } } InMemory: true } 2025-03-28T09:10:33.079737Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667107:2681], TxId: 281474976710692, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:33.086842Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. Recv TEvReadResult from ShardID=72075186224037892, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2025-03-28T09:10:33.086875Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. Taken 0 locks 2025-03-28T09:10:33.086891Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. new data for read #0 seqno = 1 finished = 1 2025-03-28T09:10:33.086918Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667106:2680], TxId: 281474976710692, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. CustomerSuppliedId : . TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-03-28T09:10:33.086936Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667106:2680], TxId: 281474976710692, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. CustomerSuppliedId : . TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:33.086954Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-03-28T09:10:33.086969Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. enter pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-28T09:10:33.086983Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. exit pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-28T09:10:33.086995Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. returned 0 rows; processed 0 rows 2025-03-28T09:10:33.087037Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. dropping batch for read #0 2025-03-28T09:10:33.087048Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. effective maxinflight 1 sorted 1 2025-03-28T09:10:33.087060Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-03-28T09:10:33.087073Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1, CA Id [4:7486785271236667106:2680]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-03-28T09:10:33.087148Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667106:2680], TxId: 281474976710692, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. CustomerSuppliedId : . TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:33.087175Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667107:2681], TxId: 281474976710692, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-03-28T09:10:33.087195Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 2. Finish input channelId: 1, from: [4:7486785271236667106:2680] 2025-03-28T09:10:33.087221Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667107:2681], TxId: 281474976710692, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:33.087291Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667107:2681], TxId: 281474976710692, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:33.087307Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667106:2680], TxId: 281474976710692, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. CustomerSuppliedId : . TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-03-28T09:10:33.087322Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667106:2680], TxId: 281474976710692, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. CustomerSuppliedId : . TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:33.087341Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1. Tasks execution finished 2025-03-28T09:10:33.087356Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667106:2680], TxId: 281474976710692, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. CustomerSuppliedId : . TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:33.087445Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 1. pass away 2025-03-28T09:10:33.087535Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710692;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:33.087813Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667107:2681], TxId: 281474976710692, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:33.087843Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-28T09:10:33.087853Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 2. Tasks execution finished 2025-03-28T09:10:33.087863Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785271236667107:2681], TxId: 281474976710692, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmU4MjJjZmUtNWZkZjIwNGItYjhiODNkZDQtYTk4OTQwYzU=. TraceId : 01jqe08vexat58w4ja5x708rb8. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:33.087907Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976710692, task: 2. pass away 2025-03-28T09:10:33.087949Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710692;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:33.495044Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:27649: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:27649 E0328 09:10:33.827668239 179100 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:10:33.827833819 179100 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:10:33.926918Z node 4 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:27649: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:27649 >> SystemView::ShowCreateTableKeyBloomFilter [GOOD] >> SystemView::ShowCreateTableTtlSettings |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2025-03-28T09:05:51.517997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:51.518066Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:51.573550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:53.003392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:53.003454Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:53.062504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:54.207487Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:54.207550Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:54.262479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:05:57.411480Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:05:57.411562Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:05:57.471321Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:00.211572Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:00.211653Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:00.265682Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:01.517992Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:01.518053Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:01.573302Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:02.119449Z node 6 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 7 2025-03-28T09:06:02.119760Z node 6 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 7 2025-03-28T09:06:02.119834Z node 6 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 7 2025-03-28T09:06:02.120002Z node 7 :TX_PROXY WARN: actor# [7:346:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-03-28T09:06:03.012589Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:03.012670Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:03.064225Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:03.625807Z node 8 :BS_CONTROLLER ERROR: {BSC26@console_interaction.cpp:113} failed to parse config obtained from Console ErrorReason# ydb/library/yaml_config/yaml_config_parser.cpp:1268: Condition violated: `config.HasDomainsConfig()' Yaml# --- metadata: kind: MainConfig cluster: "" version: 1 config: log_config: cluster_name: cluster1 allowed_labels: test: type: enum values: ? true selector_config: [] 2025-03-28T09:06:04.347748Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:06:04.347810Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:06:04.412254Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:06:05.588331Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-03-28T09:06:05.760056Z node 10 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:06:05.760520Z node 10 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001d14/r3tmp/tmpZtSyuH/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:06:05.761295Z node 10 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001d14/r3tmp/tmpZtSyuH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d14/r3tmp/tmpZtSyuH/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12087626901231569864 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:06:05.765093Z node 10 :BS_LOCALRECOVERY CRIT: VDISK[80000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001d14/r3tmp/tmpZtSyuH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:06:05.809679Z node 16 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:06:05.810024Z node 16 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001d14/r3tmp/tmpZtSyuH/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:06:05.810281Z node 16 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001d14/r3tmp/tmpZtSyuH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d14/r3tmp/tmpZtSyuH/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 18357211964983996349 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:06:05.865359Z node 11 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:06:05.865709Z node 11 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001d14/r3tmp/tmpZtSyuH/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:06:05.865912Z node 11 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001d14/r3tmp/tmpZtSyuH/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d14/r3tmp/tmpZtSyuH/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9054863806742317844 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 Sec ... :hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:28.925173Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:34.663385Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:34.663777Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:40.501072Z node 108 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.108281s 2025-03-28T09:09:40.501225Z node 108 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.108458s 2025-03-28T09:09:40.501356Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:40.510987Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:40.730572Z node 108 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.107263s 2025-03-28T09:09:40.730767Z node 108 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.107754s 2025-03-28T09:09:40.730913Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:40.731419Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:46.549402Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:46.549865Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:52.865319Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:52.865590Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:58.874162Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:09:58.874466Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:04.841306Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:04.841569Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:11.175200Z node 108 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.170599s 2025-03-28T09:10:11.175368Z node 108 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.170811s 2025-03-28T09:10:11.175484Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:11.175830Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:17.223177Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:17.223429Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:23.105632Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:23.106309Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:29.047055Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:29.047401Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:34.862239Z node 108 :CMS_TENANTS ERROR: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} 2025-03-28T09:10:34.862533Z node 108 :CMS_TENANTS CRIT: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(108:1-s[16/16])(109:1000-s[16/16]o)(110:1000-s[16/16]o)(111:1000-s[16/16]o)(112:1000-s[16/16]o)(113:1000-s[16/16]o)(114:1000-s[16/16]o)(115:1000-s[16/16]o)(116:1000-s[16/16]o)]} >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink |81.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] >> PrivateApi::Nodes [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 29286, MsgBus: 8346 2025-03-28T09:10:31.783094Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785261485624034:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:31.783726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001724/r3tmp/tmpJmauqG/pdisk_1.dat 2025-03-28T09:10:32.556494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:32.556562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:32.560466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:10:32.618603Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29286, node 1 2025-03-28T09:10:32.779652Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:32.779691Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:32.779707Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:32.779811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8346 TClient is connected to server localhost:8346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:33.576981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:33.603728Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:33.617897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:10:33.910926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:34.114859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:34.253878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:36.770811Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785261485624034:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:36.780389Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:36.896629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785282960462186:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:36.896724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:37.496394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.532069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.582764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.628024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.689989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.750870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.838411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785287255430001:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:37.838514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:37.838782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785287255430006:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:37.842650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:10:37.856706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785287255430008:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:10:37.959721Z node 1 :TX_PROXY ERROR: Actor# [1:7486785287255430064:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:39.342664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:10:39.389348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:10:39.439786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:10:39.551914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:10:39.595128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:10:39.632627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpScanSpilling::SpillingPragmaParseError >> Yq_1::ModifyQuery [GOOD] >> TLocksTest::BrokenSameKeyLock >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:08:00.437601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:08:00.437724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:00.437785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:08:00.437848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:08:00.437893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:08:00.437920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:08:00.437980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:08:00.438058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:08:00.438456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:08:00.567328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:08:00.567408Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:00.605636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:08:00.605936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:08:00.606095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:08:00.646028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:08:00.646276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:08:00.646986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:00.647367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:00.658867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:00.660568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:00.660641Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:00.660811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:08:00.660857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:00.660902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:08:00.660998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.684527Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:08:00.854093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:08:00.854346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.854602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:08:00.854966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:08:00.855036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.857741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:00.857889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:08:00.858147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.858225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:08:00.858270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:08:00.858306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:08:00.865416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.865534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:08:00.865578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:08:00.868351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.868413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.868456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:00.868507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:08:00.875754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:08:00.881924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:08:00.882173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:08:00.883322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:08:00.883462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:08:00.883532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:00.883797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:08:00.883857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:08:00.884041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:08:00.884155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:08:00.886955Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:08:00.887042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:08:00.887259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:08:00.887338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:08:00.887770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:08:00.887826Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:08:00.887927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:00.887959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:00.888015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:08:00.888051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:00.888087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:08:00.888137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:08:00.888176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:08:00.888258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:08:00.888367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:08:00.888416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:08:00.888455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:08:00.890534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:00.890669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:08:00.891172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:10:41.876443Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:10:41.876469Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:10:41.876994Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2025-03-28T09:10:41.877102Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:10:41.877521Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 5 2025-03-28T09:10:41.877768Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-03-28T09:10:41.877842Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-03-28T09:10:41.877905Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:2 progress is 4/5 2025-03-28T09:10:41.877975Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2025-03-28T09:10:41.878055Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: false 2025-03-28T09:10:41.879685Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:10:41.879756Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:10:41.879778Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:10:41.879877Z node 19 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:10:41.879932Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:10:41.879958Z node 19 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:10:41.879985Z node 19 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:10:41.880011Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:10:41.880115Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2025-03-28T09:10:41.881732Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:10:41.881780Z node 19 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:10:41.882089Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:10:41.882214Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-03-28T09:10:41.882240Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-03-28T09:10:41.882269Z node 19 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 5/5 2025-03-28T09:10:41.882293Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-03-28T09:10:41.882321Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2025-03-28T09:10:41.882419Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [19:377:2345] message: TxId: 103 2025-03-28T09:10:41.882518Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2025-03-28T09:10:41.882596Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:10:41.882651Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:10:41.882817Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:10:41.882875Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:1 2025-03-28T09:10:41.882912Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:1 2025-03-28T09:10:41.882957Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:10:41.882984Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:2 2025-03-28T09:10:41.883008Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:2 2025-03-28T09:10:41.883051Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:10:41.883081Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:3 2025-03-28T09:10:41.883102Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:3 2025-03-28T09:10:41.883138Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-28T09:10:41.883165Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:4 2025-03-28T09:10:41.883187Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:4 2025-03-28T09:10:41.883246Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 1 2025-03-28T09:10:41.884111Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:10:41.884201Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 6], at schemeshard: 72057594046678944 2025-03-28T09:10:41.884328Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-28T09:10:41.884422Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-28T09:10:41.884469Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:10:41.884997Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:10:41.886383Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:10:41.886487Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:10:41.886523Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:10:41.889536Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:10:41.889633Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:10:41.890254Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:10:41.890341Z node 19 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [19:757:2660] 2025-03-28T09:10:41.891788Z node 19 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-03-28T09:10:41.892462Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:10:41.892868Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 446us result status StatusPathDoesNotExist 2025-03-28T09:10:41.893123Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:10:41.893927Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:10:41.894295Z node 19 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 418us result status StatusPathDoesNotExist 2025-03-28T09:10:41.894507Z node 19 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 4])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2025-03-28T09:09:41.142284Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785048866726859:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:41.142419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0328 09:09:42.034434340 174630 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:42.034579270 174630 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:42.247956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:43.279180Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:43.285397Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:6354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6354 } ] 2025-03-28T09:09:43.355175Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:6354: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:6354 2025-03-28T09:09:44.280119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:44.793628Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:6354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6354 } ] 2025-03-28T09:09:45.283692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:46.074967Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785048866726859:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:46.075017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:46.284942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:47.187511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0328 09:09:47.203662953 174709 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:47.203828050 174709 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:47.290843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:47.500202Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:6354: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:6354 } ] 2025-03-28T09:09:47.572530Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:6354: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:6354 2025-03-28T09:09:48.189773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:48.295275Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:49.295124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:49.315159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e34/r3tmp/tmpSO7iJU/pdisk_1.dat 2025-03-28T09:09:50.310881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:50.326896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:50.707189Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:50.731345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785087521433187:2325], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:50.880148Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6354, node 1 2025-03-28T09:09:51.001977Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:51.001995Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:51.002002Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:51.002102Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:51.631380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:51.967318Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-28T09:09:51.967367Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:51.967375Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:51.983182Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-28T09:09:51.983218Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:09:51.983229Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:09:51.999612Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-28T09:09:51.999632Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:51.999637Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:52.002796Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-28T09:09:52.002830Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:52.002837Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:52.021496Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-28T09:09:52.021520Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-28T09:09:52.021526Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-28T09:09:52.036384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:09:52.149531Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-28T09:09:52.149564Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:52.149571Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:52.185808Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-28T09:09:52.185835Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:52.185841Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:52.189874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:09:52.199898Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-28T09:09:52.199914Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:52.199920Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:52.207049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:52.208685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part p ... 25-03-28T09:10:39.466355Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213596:2631], TxId: 281474976710685, task: 1. Ctx: { TraceId : 01jqe0922cd4a2gkd1x1dvpwfr. SessionId : ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:39.466368Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 1, CA Id [7:7486785296546213596:2631]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-03-28T09:10:39.466384Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 1, CA Id [7:7486785296546213596:2631]. enter pack cells method shardId: 72075186224037889 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-28T09:10:39.466398Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 1, CA Id [7:7486785296546213596:2631]. exit pack cells method shardId: 72075186224037889 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-28T09:10:39.466409Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 1, CA Id [7:7486785296546213596:2631]. returned 0 rows; processed 0 rows 2025-03-28T09:10:39.466445Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 1, CA Id [7:7486785296546213596:2631]. dropping batch for read #0 2025-03-28T09:10:39.466455Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 1, CA Id [7:7486785296546213596:2631]. effective maxinflight 1024 sorted 0 2025-03-28T09:10:39.466464Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 1, CA Id [7:7486785296546213596:2631]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-03-28T09:10:39.466477Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 1, CA Id [7:7486785296546213596:2631]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-03-28T09:10:39.466536Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213596:2631], TxId: 281474976710685, task: 1. Ctx: { TraceId : 01jqe0922cd4a2gkd1x1dvpwfr. SessionId : ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:39.466552Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213608:2634], TxId: 281474976710686, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=YjFmMDM5ZTktZmJhNzdlMTctMzM3NDViYTQtYjFjMzY1MWM=. TraceId : 01jqe0922v27s94m7mnntnrtc3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2025-03-28T09:10:39.466574Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710686, task: 2. Finish input channelId: 1, from: [7:7486785296546213607:2633] 2025-03-28T09:10:39.466606Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213608:2634], TxId: 281474976710686, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=YjFmMDM5ZTktZmJhNzdlMTctMzM3NDViYTQtYjFjMzY1MWM=. TraceId : 01jqe0922v27s94m7mnntnrtc3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:39.466655Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213608:2634], TxId: 281474976710686, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=YjFmMDM5ZTktZmJhNzdlMTctMzM3NDViYTQtYjFjMzY1MWM=. TraceId : 01jqe0922v27s94m7mnntnrtc3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:39.466669Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213597:2632], TxId: 281474976710685, task: 2. Ctx: { TraceId : 01jqe0922cd4a2gkd1x1dvpwfr. SessionId : ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-03-28T09:10:39.466697Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 2. Finish input channelId: 1, from: [7:7486785296546213596:2631] 2025-03-28T09:10:39.466719Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213597:2632], TxId: 281474976710685, task: 2. Ctx: { TraceId : 01jqe0922cd4a2gkd1x1dvpwfr. SessionId : ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:39.466767Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213597:2632], TxId: 281474976710685, task: 2. Ctx: { TraceId : 01jqe0922cd4a2gkd1x1dvpwfr. SessionId : ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:39.466778Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213607:2633], TxId: 281474976710686, task: 1. Ctx: { TraceId : 01jqe0922v27s94m7mnntnrtc3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=YjFmMDM5ZTktZmJhNzdlMTctMzM3NDViYTQtYjFjMzY1MWM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-03-28T09:10:39.466800Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213607:2633], TxId: 281474976710686, task: 1. Ctx: { TraceId : 01jqe0922v27s94m7mnntnrtc3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=YjFmMDM5ZTktZmJhNzdlMTctMzM3NDViYTQtYjFjMzY1MWM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:39.466820Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710686, task: 1. Tasks execution finished 2025-03-28T09:10:39.466836Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213607:2633], TxId: 281474976710686, task: 1. Ctx: { TraceId : 01jqe0922v27s94m7mnntnrtc3. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=YjFmMDM5ZTktZmJhNzdlMTctMzM3NDViYTQtYjFjMzY1MWM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:39.466964Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710686, task: 1. pass away 2025-03-28T09:10:39.467072Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710686;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:39.467442Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213596:2631], TxId: 281474976710685, task: 1. Ctx: { TraceId : 01jqe0922cd4a2gkd1x1dvpwfr. SessionId : ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-03-28T09:10:39.467465Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213596:2631], TxId: 281474976710685, task: 1. Ctx: { TraceId : 01jqe0922cd4a2gkd1x1dvpwfr. SessionId : ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:39.467485Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 1. Tasks execution finished 2025-03-28T09:10:39.467497Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213596:2631], TxId: 281474976710685, task: 1. Ctx: { TraceId : 01jqe0922cd4a2gkd1x1dvpwfr. SessionId : ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:39.467587Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 1. pass away 2025-03-28T09:10:39.467653Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710685;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:39.471533Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213608:2634], TxId: 281474976710686, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=YjFmMDM5ZTktZmJhNzdlMTctMzM3NDViYTQtYjFjMzY1MWM=. TraceId : 01jqe0922v27s94m7mnntnrtc3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:39.471600Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710686, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-28T09:10:39.471615Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710686, task: 2. Tasks execution finished 2025-03-28T09:10:39.471631Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213608:2634], TxId: 281474976710686, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=7&id=YjFmMDM5ZTktZmJhNzdlMTctMzM3NDViYTQtYjFjMzY1MWM=. TraceId : 01jqe0922v27s94m7mnntnrtc3. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:39.471724Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710686, task: 2. pass away 2025-03-28T09:10:39.471808Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710686;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:39.472033Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213597:2632], TxId: 281474976710685, task: 2. Ctx: { TraceId : 01jqe0922cd4a2gkd1x1dvpwfr. SessionId : ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:39.472072Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-28T09:10:39.472083Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 2. Tasks execution finished 2025-03-28T09:10:39.472095Z node 7 :KQP_COMPUTE DEBUG: SelfId: [7:7486785296546213597:2632], TxId: 281474976710685, task: 2. Ctx: { TraceId : 01jqe0922cd4a2gkd1x1dvpwfr. SessionId : ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:39.472153Z node 7 :KQP_COMPUTE DEBUG: TxId: 281474976710685, task: 2. pass away 2025-03-28T09:10:39.472203Z node 7 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710685;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:39.501797Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710687. Ctx: { TraceId: 01jqe09248016j7znz96hnz8vh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZGQyODhkMDQtNDhkODkwZmUtM2U0NjUwNGMtNTk3Y2FlNDQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:10:40.442571Z node 7 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:14154: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:14154 >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> Yq_1::DescribeQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2025-03-28T09:09:46.613000Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785067879965232:2292];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:46.613066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0328 09:09:47.495830651 175398 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:47.495960315 175398 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:47.712304Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:48.661353Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:7093: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:7093 } ] 2025-03-28T09:09:48.715377Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:7093: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:7093 2025-03-28T09:09:48.768588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:49.780689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:50.279692Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:7093: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:7093 } ] 2025-03-28T09:09:50.793475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:51.615034Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785067879965232:2292];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:51.615083Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:51.799660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:52.532632Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:7093: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:7093 } ] E0328 09:09:52.537962167 175450 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:52.562946490 175450 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:52.627597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:52.804704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:53.628662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:53.810968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:54.317551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785102239703805:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:54.317858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:54.403945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785102239703805:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000dc7/r3tmp/tmp0rSUFG/pdisk_1.dat 2025-03-28T09:09:54.581867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785102239703805:2314], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:54.637434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:54.820682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 7093, node 1 TClient is connected to server localhost:31176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:55.258136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:55.566014Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:55.566233Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:55.566253Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:55.566261Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:55.566447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:09:55.850898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:55.850996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:55.860079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:56.382951Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-28T09:09:56.382986Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:56.382993Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:56.384420Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-28T09:09:56.384467Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:56.384475Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:56.385370Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-28T09:09:56.385387Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:09:56.385392Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:09:56.386251Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-28T09:09:56.386262Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:56.386267Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:56.390988Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-28T09:09:56.391013Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:09:56.391020Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:09:56.510916Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-03-28T09:09:56.510954Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-03-28T09:09:56.510989Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-28T09:09:56.511004Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:56.511011Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:56.553789Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-28T09:09:56.553834Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:56.553842Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:56.559752Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/pending_small". Create session OK 2025-03-28T09:09:56.559786Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-28T09:09:56.559793Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/pending_small" 2025-03-28T09:09:56.561000Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-28T09:09:56.561012Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:56.561017Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:56.561926Z node 1 :YQ_CONT ... s 0 left freeSpace 8388608 received rows 0 running reads 1 pending shards 0 finished = 0 has limit 0 limit reached 0 2025-03-28T09:10:39.776510Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. Recv TEvReadResult from ShardID=72075186224037891, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-03-28T09:10:39.776528Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. Taken 0 locks 2025-03-28T09:10:39.776545Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. new data for read #0 seqno = 1 finished = 1 2025-03-28T09:10:39.776566Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781165:3102], TxId: 281474976715827, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . TraceId : 01jqe091vy2dxs16gvpvt85e6k. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-03-28T09:10:39.776587Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781165:3102], TxId: 281474976715827, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . TraceId : 01jqe091vy2dxs16gvpvt85e6k. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:39.776604Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-03-28T09:10:39.776622Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. enter pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-28T09:10:39.776648Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. exit pack cells method shardId: 72075186224037891 processedRows: 0 packed rows: 1 freeSpace: 8387508 2025-03-28T09:10:39.776666Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. returned 1 rows; processed 1 rows 2025-03-28T09:10:39.776706Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. dropping batch for read #0 2025-03-28T09:10:39.776717Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. effective maxinflight 1024 sorted 0 2025-03-28T09:10:39.776729Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-03-28T09:10:39.776744Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1, CA Id [4:7486785295746781165:3102]. returned async data processed rows 1 left freeSpace 8387508 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-03-28T09:10:39.776957Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781165:3102], TxId: 281474976715827, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . TraceId : 01jqe091vy2dxs16gvpvt85e6k. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:39.776979Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781165:3102], TxId: 281474976715827, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . TraceId : 01jqe091vy2dxs16gvpvt85e6k. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:39.777023Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-03-28T09:10:39.777326Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781166:3103], TxId: 281474976715827, task: 2. Ctx: { TraceId : 01jqe091vy2dxs16gvpvt85e6k. SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785295746781165 RawX2: 4503616807242782 } } DstEndpoint { ActorId { RawX1: 7486785295746781166 RawX2: 4503616807242783 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785295746781166 RawX2: 4503616807242783 } } DstEndpoint { ActorId { RawX1: 7486785295746781161 RawX2: 4503616807242072 } } InMemory: true } 2025-03-28T09:10:39.777345Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781166:3103], TxId: 281474976715827, task: 2. Ctx: { TraceId : 01jqe091vy2dxs16gvpvt85e6k. SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Update input channelId: 1, peer: [4:7486785295746781165:3102] 2025-03-28T09:10:39.777414Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781166:3103], TxId: 281474976715827, task: 2. Ctx: { TraceId : 01jqe091vy2dxs16gvpvt85e6k. SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646926 2025-03-28T09:10:39.777523Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781166:3103], TxId: 281474976715827, task: 2. Ctx: { TraceId : 01jqe091vy2dxs16gvpvt85e6k. SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785295746781165 RawX2: 4503616807242782 } } DstEndpoint { ActorId { RawX1: 7486785295746781166 RawX2: 4503616807242783 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785295746781166 RawX2: 4503616807242783 } } DstEndpoint { ActorId { RawX1: 7486785295746781161 RawX2: 4503616807242072 } } InMemory: true } 2025-03-28T09:10:39.777551Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781166:3103], TxId: 281474976715827, task: 2. Ctx: { TraceId : 01jqe091vy2dxs16gvpvt85e6k. SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:39.777573Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781166:3103], TxId: 281474976715827, task: 2. Ctx: { TraceId : 01jqe091vy2dxs16gvpvt85e6k. SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646923 2025-03-28T09:10:39.777605Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 2. Finish input channelId: 1, from: [4:7486785295746781165:3102] 2025-03-28T09:10:39.777648Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781166:3103], TxId: 281474976715827, task: 2. Ctx: { TraceId : 01jqe091vy2dxs16gvpvt85e6k. SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:39.777885Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781166:3103], TxId: 281474976715827, task: 2. Ctx: { TraceId : 01jqe091vy2dxs16gvpvt85e6k. SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:39.777909Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781165:3102], TxId: 281474976715827, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . TraceId : 01jqe091vy2dxs16gvpvt85e6k. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-03-28T09:10:39.777939Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781165:3102], TxId: 281474976715827, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . TraceId : 01jqe091vy2dxs16gvpvt85e6k. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:39.777961Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1. Tasks execution finished 2025-03-28T09:10:39.777984Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781165:3102], TxId: 281474976715827, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . TraceId : 01jqe091vy2dxs16gvpvt85e6k. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:39.778114Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 1. pass away 2025-03-28T09:10:39.778228Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715827;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:39.778715Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781166:3103], TxId: 281474976715827, task: 2. Ctx: { TraceId : 01jqe091vy2dxs16gvpvt85e6k. SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:10:39.778771Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-28T09:10:39.778782Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 2. Tasks execution finished 2025-03-28T09:10:39.778794Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785295746781166:3103], TxId: 281474976715827, task: 2. Ctx: { TraceId : 01jqe091vy2dxs16gvpvt85e6k. SessionId : ydb://session/3?node_id=4&id=NWNhYWUzZmMtZmRkZTU5MzMtMjU0OThmMzQtOTVkYjY0MjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:39.778851Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715827, task: 2. pass away 2025-03-28T09:10:39.778905Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715827;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:39.969609Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:21903: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:21903 2025-03-28T09:10:40.958069Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: Client is stopped |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4076, MsgBus: 14172 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a1/r3tmp/tmpV8eoqV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4076, node 1 TClient is connected to server localhost:14172 TClient is connected to server localhost:14172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2025-03-28T09:09:41.922861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785046819612185:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:41.922971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0328 09:09:43.060228301 174786 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:43.060359413 174786 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:43.116367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:44.138078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:44.258706Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8398: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8398 } ] 2025-03-28T09:09:44.259924Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8398: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8398 2025-03-28T09:09:45.139465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:45.931353Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8398: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8398 } ] 2025-03-28T09:09:46.143144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:46.926939Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785046819612185:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:46.926990Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:47.235142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:47.948101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0328 09:09:48.063369262 174882 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:48.063557545 174882 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:48.235466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:48.840756Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8398: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8398 } ] 2025-03-28T09:09:48.865237Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:8398: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8398 2025-03-28T09:09:48.952656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:49.271028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:49.957952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:50.275688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:50.984199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:51.278333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:51.993842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:52.113442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:52.150144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785094064252962:2322], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e2e/r3tmp/tmpj4HuSA/pdisk_1.dat 2025-03-28T09:09:52.276685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785094064252962:2322], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:52.292596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:52.445954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785094064252962:2322], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 8398, node 1 TClient is connected to server localhost:8953 WaitRootIsUp 'Root'... TClient::Ls request: Root E0328 09:09:53.067512369 174882 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:53.067641696 174882 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:53.330807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:09:53.360750Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-28T09:09:53.360786Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-28T09:09:53.360797Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-28T09:09:53.364349Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-28T09:09:53.364371Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:53.364377Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:53.365282Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-28T09:09:53.365293Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:53.365299Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" waiting... 2025-03-28T09:09:53.373744Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-28T09:09:53.373769Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:09:53.373775Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:09:53.374841Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-28T09:09:53.374856Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:53.374862Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:53.387292Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-28T09:09:53.387334Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:09:53.387341Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:09:53.389133Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-28T09:09:53.389153Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:53.389163Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:09:53.392442Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-28T09:09:53.392469Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:53.392475Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:09:53.394058Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Ro ... ssionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-03-28T09:10:41.951789Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870187:3039], TxId: 281474976715817, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785306132870186 RawX2: 4503616807242718 } } DstEndpoint { ActorId { RawX1: 7486785306132870187 RawX2: 4503616807242719 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785306132870187 RawX2: 4503616807242719 } } DstEndpoint { ActorId { RawX1: 7486785306132870182 RawX2: 4503616807242210 } } InMemory: true } 2025-03-28T09:10:41.951802Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870187:3039], TxId: 281474976715817, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Update input channelId: 1, peer: [4:7486785306132870186:3038] 2025-03-28T09:10:41.951848Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870187:3039], TxId: 281474976715817, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646926 2025-03-28T09:10:41.951946Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870187:3039], TxId: 281474976715817, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785306132870186 RawX2: 4503616807242718 } } DstEndpoint { ActorId { RawX1: 7486785306132870187 RawX2: 4503616807242719 } } InMemory: true DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7486785306132870187 RawX2: 4503616807242719 } } DstEndpoint { ActorId { RawX1: 7486785306132870182 RawX2: 4503616807242210 } } InMemory: true } 2025-03-28T09:10:41.951968Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870187:3039], TxId: 281474976715817, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:41.955601Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. Recv TEvReadResult from ShardID=72075186224037898, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2025-03-28T09:10:41.955639Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. Taken 0 locks 2025-03-28T09:10:41.955657Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. new data for read #0 seqno = 1 finished = 1 2025-03-28T09:10:41.955683Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870186:3038], TxId: 281474976715817, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0944308qft45d1xcp5v9j. SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 276037645 2025-03-28T09:10:41.955704Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870186:3038], TxId: 281474976715817, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0944308qft45d1xcp5v9j. SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:41.955727Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. enter getasyncinputdata results size 1, freeSpace 8388608 2025-03-28T09:10:41.955747Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. enter pack cells method shardId: 72075186224037898 processedRows: 0 packed rows: 0 freeSpace: 8388608 2025-03-28T09:10:41.955775Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. exit pack cells method shardId: 72075186224037898 processedRows: 0 packed rows: 1 freeSpace: 8386368 2025-03-28T09:10:41.955796Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. returned 1 rows; processed 1 rows 2025-03-28T09:10:41.955832Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. dropping batch for read #0 2025-03-28T09:10:41.955846Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. effective maxinflight 1024 sorted 0 2025-03-28T09:10:41.955860Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2025-03-28T09:10:41.955880Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1, CA Id [4:7486785306132870186:3038]. returned async data processed rows 1 left freeSpace 8386368 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2025-03-28T09:10:41.956134Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870186:3038], TxId: 281474976715817, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0944308qft45d1xcp5v9j. SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:41.956166Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870186:3038], TxId: 281474976715817, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0944308qft45d1xcp5v9j. SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:41.956207Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-03-28T09:10:41.956228Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870187:3039], TxId: 281474976715817, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646923 2025-03-28T09:10:41.956258Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 2. Finish input channelId: 1, from: [4:7486785306132870186:3038] 2025-03-28T09:10:41.956295Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870187:3039], TxId: 281474976715817, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:41.956486Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870187:3039], TxId: 281474976715817, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:10:41.956507Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870186:3038], TxId: 281474976715817, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0944308qft45d1xcp5v9j. SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646927 2025-03-28T09:10:41.956536Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870186:3038], TxId: 281474976715817, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0944308qft45d1xcp5v9j. SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:41.956557Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1. Tasks execution finished 2025-03-28T09:10:41.956571Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870186:3038], TxId: 281474976715817, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0944308qft45d1xcp5v9j. SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:41.956683Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 1. pass away 2025-03-28T09:10:41.956779Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715817;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:41.957172Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870187:3039], TxId: 281474976715817, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646922 2025-03-28T09:10:41.957212Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-28T09:10:41.957223Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 2. Tasks execution finished 2025-03-28T09:10:41.957235Z node 4 :KQP_COMPUTE DEBUG: SelfId: [4:7486785306132870187:3039], TxId: 281474976715817, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NmM0NjJmMDMtMzM1ZWUyMmYtYWQ2YzA1OWItOWQzNDZmZQ==. TraceId : 01jqe0944308qft45d1xcp5v9j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Compute state finished. All channels and sinks finished 2025-03-28T09:10:41.957288Z node 4 :KQP_COMPUTE DEBUG: TxId: 281474976715817, task: 2. pass away 2025-03-28T09:10:41.957341Z node 4 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715817;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:10:42.661060Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv6:%5B::%5D:22241: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:22241 >> KqpJoinOrder::CanonizedJoinOrderTPCH9 [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5381, MsgBus: 10990 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001464/r3tmp/tmp5CqJ9g/pdisk_1.dat TServer::EnableGrpc on GrpcPort 5381, node 1 TClient is connected to server localhost:10990 TClient is connected to server localhost:10990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink >> KqpJoinOrder::TestJoinHint1+ColumnStore [GOOD] >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] >> SystemView::AuthOwners_Access [GOOD] >> SystemView::AuthOwners_ResultOrder |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20767, MsgBus: 8691 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001452/r3tmp/tmpWGoh8U/pdisk_1.dat TServer::EnableGrpc on GrpcPort 20767, node 1 TClient is connected to server localhost:8691 TClient is connected to server localhost:8691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] Test command err: 2025-03-28T09:09:44.330154Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:09:44.333771Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:09:44.333974Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:09:44.334895Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:45:2072] ControllerId# 72057594037932033 2025-03-28T09:09:44.334935Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:09:44.335035Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:09:44.335374Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:09:44.337365Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:09:44.337424Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:09:44.341132Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:51:2076] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.341357Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:52:2077] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.341536Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:53:2078] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.341706Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:54:2079] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.341838Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:55:2080] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.341994Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:56:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.342150Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:57:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.342185Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:09:44.342435Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:45:2072] 2025-03-28T09:09:44.342472Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:45:2072] 2025-03-28T09:09:44.342556Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:09:44.342642Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:09:44.343179Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:09:44.347928Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:09:44.348181Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-28T09:09:44.348839Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:09:44.349983Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-28T09:09:44.350050Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:09:44.351311Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:68:2076] ControllerId# 72057594037932033 2025-03-28T09:09:44.351355Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:09:44.351430Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:09:44.351645Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:09:44.365084Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:09:44.365202Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:09:44.366971Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:76:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.367144Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:77:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.367315Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:78:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.367499Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:79:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.367646Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:80:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.367797Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:81:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.367937Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:82:2087] targetNodeId# 1 Marker# DSP01 2025-03-28T09:09:44.367963Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:09:44.368041Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:68:2076] 2025-03-28T09:09:44.368069Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:68:2076] 2025-03-28T09:09:44.368108Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:09:44.368160Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:09:44.369133Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:09:44.394077Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:68:2076] 2025-03-28T09:09:44.394154Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:09:44.394190Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:09:44.396127Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:09:44.396293Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:45:2072] 2025-03-28T09:09:44.396349Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:09:44.396378Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:09:44.396424Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:09:44.396585Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:09:44.396782Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:09:44.396814Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-28T09:09:44.400801Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-28T09:09:44.401131Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-28T09:09:44.401382Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:68:2076] 2025-03-28T09:09:44.401435Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:09:44.401483Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-28T09:09:44.401596Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-28T09:09:44.401879Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-28T09:09:44.410793Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:09:44.412384Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:09:44.412884Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:09:44.413145Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:09:44.413277Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:09:44.413708Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:49:2064] 2025-03-28T09:09:44.413745Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:49:2064] 2025-03-28T09:09:44.413789Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-28T09:09:44.413897Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-03-28T09:09:44.413948Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-03-28T09:09:44.413984Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-03-28T09:09:44.414019Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:09:44.414094Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:72:2064] 2025-03-28T09:09:44.414114Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:72:2064] 2025-03-28T09:09:44.414557Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-03-2 ... 0Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [23:934:2269] 2025-03-28T09:10:47.912537Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result local node, try to connect [23:1080:2355] 2025-03-28T09:10:47.912578Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [23:1080:2355] 2025-03-28T09:10:47.912668Z node 23 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [23:1080:2355] 2025-03-28T09:10:47.912763Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [23:1080:2355] 2025-03-28T09:10:47.912795Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [23:1080:2355] 2025-03-28T09:10:47.913091Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [23:1084:2358] 2025-03-28T09:10:47.913122Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [23:1084:2358] 2025-03-28T09:10:47.913182Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StInit ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:10:47.913307Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:10:47.913606Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-03-28T09:10:47.913654Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-03-28T09:10:47.913689Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-03-28T09:10:47.913940Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [23:680:2175] CurrentLeaderTablet: [23:686:2178] CurrentGeneration: 1 CurrentStep: 0} 2025-03-28T09:10:47.914019Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [23:680:2175] CurrentLeaderTablet: [23:686:2178] CurrentGeneration: 1 CurrentStep: 0} 2025-03-28T09:10:47.914100Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [23:680:2175] CurrentLeaderTablet: [23:686:2178] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:10:47.914128Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-03-28T09:10:47.914166Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [23:680:2175] 2025-03-28T09:10:47.914242Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result local node, try to connect [23:1084:2358] 2025-03-28T09:10:47.914290Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [23:1084:2358] 2025-03-28T09:10:47.914397Z node 23 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [23:1084:2358] 2025-03-28T09:10:47.914525Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [23:1084:2358] 2025-03-28T09:10:47.914584Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [23:1084:2358] 2025-03-28T09:10:47.914858Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [23:1088:2361] 2025-03-28T09:10:47.914894Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [23:1088:2361] 2025-03-28T09:10:47.914953Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StInit ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:10:47.915035Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037895 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:10:47.915205Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 0} 2025-03-28T09:10:47.915248Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 1} 2025-03-28T09:10:47.915279Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037895 Cookie: 2} 2025-03-28T09:10:47.915408Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037895 CurrentLeader: [23:1020:2317] CurrentLeaderTablet: [23:1022:2318] CurrentGeneration: 2 CurrentStep: 0} 2025-03-28T09:10:47.915456Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037895 CurrentLeader: [23:1020:2317] CurrentLeaderTablet: [23:1022:2318] CurrentGeneration: 2 CurrentStep: 0} 2025-03-28T09:10:47.915518Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037895 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037895 Cookie: 0 CurrentLeader: [23:1020:2317] CurrentLeaderTablet: [23:1022:2318] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:10:47.915545Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037895 followers: 0 2025-03-28T09:10:47.915589Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [23:1020:2317] 2025-03-28T09:10:47.915647Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result local node, try to connect [23:1088:2361] 2025-03-28T09:10:47.915671Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [23:1088:2361] 2025-03-28T09:10:47.915747Z node 23 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [23:1088:2361] 2025-03-28T09:10:47.915815Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [23:1088:2361] 2025-03-28T09:10:47.915850Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [23:1088:2361] 2025-03-28T09:10:47.916035Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [23:1092:2364] 2025-03-28T09:10:47.916054Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [23:1092:2364] 2025-03-28T09:10:47.916090Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StInit ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:10:47.916152Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037896 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:10:47.916346Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 0} 2025-03-28T09:10:47.916374Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 1} 2025-03-28T09:10:47.916400Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037896 Cookie: 2} 2025-03-28T09:10:47.916543Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037896 CurrentLeader: [23:763:2197] CurrentLeaderTablet: [23:769:2200] CurrentGeneration: 1 CurrentStep: 0} 2025-03-28T09:10:47.916604Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037896 CurrentLeader: [23:763:2197] CurrentLeaderTablet: [23:769:2200] CurrentGeneration: 1 CurrentStep: 0} 2025-03-28T09:10:47.916662Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037896 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037896 Cookie: 0 CurrentLeader: [23:763:2197] CurrentLeaderTablet: [23:769:2200] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:10:47.916686Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037896 followers: 0 2025-03-28T09:10:47.916731Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [23:763:2197] 2025-03-28T09:10:47.916807Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result local node, try to connect [23:1092:2364] 2025-03-28T09:10:47.916833Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [23:1092:2364] 2025-03-28T09:10:47.916896Z node 23 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [23:1092:2364] 2025-03-28T09:10:47.916961Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [23:1092:2364] 2025-03-28T09:10:47.916991Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [23:1092:2364] 2025-03-28T09:10:47.917174Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] ::Bootstrap [23:1096:2367] 2025-03-28T09:10:47.917223Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] lookup [23:1096:2367] 2025-03-28T09:10:47.917286Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037897 entry.State: StInit ev: {EvForward TabletID: 72075186224037897 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:10:47.917371Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037897 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:10:47.917577Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 0} 2025-03-28T09:10:47.917608Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 1} 2025-03-28T09:10:47.917658Z node 22 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037897 Cookie: 2} 2025-03-28T09:10:47.917833Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037897 CurrentLeader: [23:846:2221] CurrentLeaderTablet: [23:848:2222] CurrentGeneration: 2 CurrentStep: 0} 2025-03-28T09:10:47.917885Z node 23 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037897 CurrentLeader: [23:846:2221] CurrentLeaderTablet: [23:848:2222] CurrentGeneration: 2 CurrentStep: 0} 2025-03-28T09:10:47.917934Z node 23 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037897 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037897 Cookie: 0 CurrentLeader: [23:846:2221] CurrentLeaderTablet: [23:848:2222] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:10:47.917963Z node 23 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037897 followers: 0 2025-03-28T09:10:47.918000Z node 23 :TABLET_RESOLVER DEBUG: SelectForward node 23 selfDC 2 leaderDC 2 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037897 followers: 0 countLeader 1 allowFollowers 0 winner: [23:846:2221] 2025-03-28T09:10:47.918071Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] forward result local node, try to connect [23:1096:2367] 2025-03-28T09:10:47.918102Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897]::SendEvent [23:1096:2367] 2025-03-28T09:10:47.918160Z node 23 :PIPE_SERVER DEBUG: [72075186224037897] Accept Connect Originator# [23:1096:2367] 2025-03-28T09:10:47.918260Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] connected with status OK role: Leader [23:1096:2367] 2025-03-28T09:10:47.918284Z node 23 :PIPE_CLIENT DEBUG: TClient[72075186224037897] send queued [23:1096:2367] |81.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 17291, MsgBus: 62281 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00144a/r3tmp/tmpWgh8al/pdisk_1.dat TServer::EnableGrpc on GrpcPort 17291, node 1 TClient is connected to server localhost:62281 TClient is connected to server localhost:62281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink >> KqpScanSpilling::SpillingPragmaParseError [GOOD] >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore [GOOD] |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH9 [GOOD] Test command err: Trying to start YDB, gRPC: 15830, MsgBus: 10984 2025-03-28T09:08:48.269484Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784819732990417:2249];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:48.269543Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018a1/r3tmp/tmpOBFZ9m/pdisk_1.dat 2025-03-28T09:08:48.691132Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15830, node 1 2025-03-28T09:08:48.707782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:48.707909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:48.709962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:48.817976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:48.818004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:48.818015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:48.818180Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10984 TClient is connected to server localhost:10984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:49.624996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:51.975905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784832617892785:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:51.976032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784832617892797:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:51.976090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:51.980999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:08:51.994212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784832617892799:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:08:52.063087Z node 1 :TX_PROXY ERROR: Actor# [1:7486784836912860146:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:52.451506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:08:52.681117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:52.681368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:52.681646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:52.681783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:52.681896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:52.682052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:52.682172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:52.682291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:52.682400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:52.682523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:52.682644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:52.682791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784836912860360:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:52.684920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:52.684986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:52.685221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:52.685338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:52.685437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:52.685527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:52.685621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:52.685715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:52.685848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:52.685956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:52.686050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:52.686168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486784836912860366:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:52.715986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784836912860447:2364];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:52.716047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784836912860447:2364];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:52.716224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;sel ... tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.837280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.850751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.857787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.864758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.864759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.875961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.878802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.890643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.891591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039204;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.902325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039212;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.905043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.916652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.919321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.928604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.932390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.943220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.946979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.952513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.955058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.959212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.962104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.965605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.969279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.972262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.976226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.978833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.983278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.985004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.989685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.991077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.996584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039202;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:22.997160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.005061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.005671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.010829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.014967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.017084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.021332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.023463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.027905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.029100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.034166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.034849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.039961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.049923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.057552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:23.258978Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe070ez5s40szk91esvz0wd", SessionId: ydb://session/3?node_id=1&id=YjgyZDY3YjUtZWRkMjgwYjMtNmQ1NzllOTMtZWJlNDRlZGU=, Slow query, duration: 51.003165s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:10:23.735997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:23.740011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:23.741376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint1+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 19507, MsgBus: 23393 2025-03-28T09:08:55.551376Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784850505020618:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:55.552583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00188b/r3tmp/tmphVuqo6/pdisk_1.dat 2025-03-28T09:08:56.002998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:56.003068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:56.004614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:56.010855Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19507, node 1 2025-03-28T09:08:56.090396Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:56.090419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:56.090426Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:56.090524Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23393 TClient is connected to server localhost:23393 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:56.636984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:58.807458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784863389923155:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:58.807578Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:58.808096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784863389923167:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:58.813014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:08:58.830525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784863389923169:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:08:58.902218Z node 1 :TX_PROXY ERROR: Actor# [1:7486784863389923220:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:59.233803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.417587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:59.417588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:59.417751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:59.418000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:59.418116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:59.418223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:59.418290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:59.418312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:59.418394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:59.418471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:59.418510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:59.418597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:59.418625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:59.418736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:59.418768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:59.418852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:59.418920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:59.418956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:59.419031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784867684890829:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:59.419082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:59.419216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:59.419345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:59.419494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:59.419632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784867684890760:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:59.454501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784867684890801:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:59.454502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784867684890762:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:59.454556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:74867848 ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.410514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.422643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.423244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.428547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.432046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.547348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.549486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.554346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.556996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.561941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.564302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.569358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.570607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.578254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.578890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.585587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.596620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.602263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.602960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.609822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.612182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.616627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.622832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.625373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.635052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.637359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.645936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.652451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.657948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.661611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.667764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.671222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.680228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.683467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.690039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.697443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.703995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.707078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.718180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.720889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.727704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.732565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.740281Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.741250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.756652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:36.921269Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe07a715k8pb0xk8rcety5x", SessionId: ydb://session/3?node_id=1&id=MmE1Y2I4YTQtN2JkYTNhOGYtZTYyZjFjYmYtNmY5NDZmMGU=, Slow query, duration: 54.677914s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:10:37.305329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:37.305851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:37.306700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486785146857811791:9365];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:10:37.307110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingPragmaParseError [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2909/00173e/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk5 Trying to start YDB, gRPC: 8097, MsgBus: 61126 2025-03-28T09:10:43.356134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785311881186091:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:43.356817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00173e/r3tmp/tmpwPlLR8/pdisk_1.dat 2025-03-28T09:10:44.071799Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:44.072531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:44.072613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:44.084749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8097, node 1 2025-03-28T09:10:44.351105Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:44.351136Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:44.351144Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:44.351246Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61126 TClient is connected to server localhost:61126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:45.152310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:45.170932Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:10:45.189139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:45.442556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:45.802266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:45.933674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:47.946930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785329061056911:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:47.947049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:48.334827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:10:48.339289Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785311881186091:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:48.340634Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:48.423752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:10:48.480405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:10:48.524532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:10:48.554307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:10:48.594510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:10:48.683828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785333356024727:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:48.683929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:48.684111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785333356024732:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:48.687412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:10:48.697428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785333356024734:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:10:48.756615Z node 1 :TX_PROXY ERROR: Actor# [1:7486785333356024790:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:50.202100Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785341945959652:2497], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:3:40: Error: Bad "EnableSpillingNodes" setting for "$all" cluster: (yexception) tools/enum_parser/enum_serialization_runtime/enum_runtime.cpp:70: Key 'GraceJoin1' not found in enum NYql::NDq::EEnabledSpillingNodes. Valid options are: 'None', 'GraceJoin', 'Aggregation', 'All'. 2025-03-28T09:10:50.202904Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTA3ZWFkNmUtOTZiOTdmYWMtYTVjODM2OTMtYWM1NzgzZDU=, ActorId: [1:7486785341945959643:2492], ActorState: ExecuteState, TraceId: 01jqe09ce802sm70b5xxj5cq0f, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpDataIntegrityTrails::Select >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> SystemView::ShowCreateTableTtlSettings [GOOD] >> TColumnShardTestSchema::HotTiersAfterTtl >> KqpDataIntegrityTrails::Ddl [GOOD] >> SystemView::ShowCreateTableTemporary |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SelfJoinQueryService [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2909/001740/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk2 Trying to start YDB, gRPC: 31366, MsgBus: 12465 2025-03-28T09:10:34.327226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785274619133346:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:34.327279Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001740/r3tmp/tmpp42vUd/pdisk_1.dat 2025-03-28T09:10:35.582649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:10:35.638648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:35.638793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:35.640875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:10:35.680350Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31366, node 1 2025-03-28T09:10:36.919078Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:36.919102Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:36.919124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:36.919273Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12465 TClient is connected to server localhost:12465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: 2025-03-28T09:10:39.327820Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785274619133346:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:39.327884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:39.454603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:39.551622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:39.922324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:40.056998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:40.126737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:40.645211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785300388938835:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:40.652121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:43.157997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:10:43.232524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:10:43.318509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:10:43.362093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:10:43.416162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:10:43.493291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:10:43.751817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785313273841338:2471], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:43.751904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:43.752310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785313273841343:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:43.776083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:10:43.802435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785313273841345:2475], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:10:43.928551Z node 1 :TX_PROXY ERROR: Actor# [1:7486785313273841404:3478] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (DataType 'String)) (let $5 (OptionalType $4)) (let $6 (StructType '('"Key" $3) '('"Value" $5))) (let $7 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($21) (block '( (let $22 (lambda '($23) (block '( (let $24 (VariantType (TupleType $6 $6))) (let $25 (Variant $23 '0 $24)) (let $26 (Variant $23 '1 $24)) (return $25 $26) )))) (return (FromFlow (MultiMap (ToFlow $21) $22))) ))) '('('"_logical_id" '702) '('"_id" '"4d968918-f45ee1ad-bc693cc3-fd5ab794")))) (let $8 (DqCnUnionAll (TDqOutput $7 '1))) (let $9 '('('"_logical_id" '547) '('"_id" '"1bab0b98-2dabfabd-a958e56d-a66617af") '('"_wide_channels" $6))) (let $10 (DqPhyStage '($8) (lambda '($27) (block '( (let $28 (lambda '($29) (Member $29 '"Key") (Member $29 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $27) $28))) ))) $9)) (let $11 (DqCnMap (TDqOutput $7 '0))) (let $12 (DqCnBroadcast (TDqOutput $10 '0))) (let $13 (StructType '('"t1.Key" $3) '('"t1.Value" $5) '('"t2.Key" $3) '('"t2.Value" $5))) (let $14 '('('"_logical_id" '617) '('"_id" '"afad31c4-ba70bc4c-2643dea0-b41ce202") '('"_wide_channels" $13))) (let $15 (DqPhyStage '($11 $12) (lambda '($30 $31) (block '( (let $32 '('Many 'Hashed 'Compact)) (let $33 (SqueezeToDict (NarrowFlatMap (WideFilter (ToFlow $31) (lambda '($36 $37) (Exists $37))) (lambda '($38 $39) (IfPresent $39 (lambda '($40) (Just '($40 (AsStruct '('"Key" $38) '('"Value" $39))))) (Nothing (OptionalType (TupleType $4 $6)))))) (lambda '($41) (Nth $41 '0)) (lambda '($42) (Nth $42 '1)) $32)) (let $34 (Sort (FlatMap $33 (lambda '($43) (block '( (let $44 '('"Value")) (let $45 '('"Key" '"t1.Key" '"Value" '"t1.Value")) (let $46 '('"Key" '"t2.Key" '"Value" '"t2.Value")) (return (MapJoinCore (OrderedFilter (ToFlow $30) (lambda '($47) (Exists (Member $47 '"Value")))) $43 'Inner $44 $44 $45 $46 '('"t1.Value") '('"t2.Value"))) )))) (Bool 'true) (lambda '($48) (Member $48 '"t1.Key")))) (let $35 (lambda '($49) (Member $49 '"t1.Key") (Member $49 '"t1.Value") (Member $49 '"t2.Key") (Member $49 '"t2.Value"))) (return (FromFlow (ExpandMap $34 $35))) ))) $14)) (let $16 (DqCnMerge (TDqOutput $15 '0) '('('0 '"Asc")))) (let $17 (DqPhyStage '($16) (lambda '($50) (FromFlow (NarrowMap (ToFlow $50) (lambda '($51 $52 $53 $54) (AsStruct '('"t1.Key" $51) '('"t1.Value" $52) '('"t2.Key" $53) '('"t2.Value" $54)))))) '('('"_logical_id" '629) '('"_id" '"4125ebba-71bff456-f094ae68-82c11836")))) (let $18 '($7 $10 $15 $17)) (let $19 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $20 (DqCnResult (TDqOutput $17 '0) $19)) (return (KqpPhysicalQuery '((KqpPhysicalTx $18 '($20) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $13) '0 '0)) '('('"type" '"query")))) ) 2025-03-28T09:10:50.445340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:10:50.445377Z node 1 :IMPORT WARN: Table profiles were not loaded |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |81.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |81.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |81.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TCacheTest::SystemView >> TCacheTest::Attributes >> TCacheTest::MigrationCommon >> TCacheTest::Recreate >> TCacheTest::List >> TCacheTest::MigrationLostMessage >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull >> TFlatTest::LargeDatashardReplyRW >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinLeftFirst+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 63626, MsgBus: 19839 2025-03-28T09:08:55.492383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784849140658390:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:55.492942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001890/r3tmp/tmpNPAZhR/pdisk_1.dat 2025-03-28T09:08:56.118678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:56.118768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:56.125721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63626, node 1 2025-03-28T09:08:56.171260Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:08:56.187636Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:08:56.188708Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:56.377315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:56.377332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:56.377336Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:56.377412Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19839 TClient is connected to server localhost:19839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:57.088235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:57.106055Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:59.188763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784866320528100:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:59.188861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:59.189076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784866320528112:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:59.194186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:08:59.203169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784866320528114:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:08:59.262794Z node 1 :TX_PROXY ERROR: Actor# [1:7486784866320528165:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:59.552455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:08:59.820531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:59.820715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:59.821697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:59.821865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:59.822021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:59.822150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:59.822268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:59.822384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:59.822500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:59.822619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:59.822759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:59.822887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784866320528447:2359];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:59.824226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:59.824256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:59.824371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:59.824464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:59.824565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:59.824623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:59.824672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:59.824729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:59.824784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:59.824851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:59.824959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:59.825029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486784866320528433:2352];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:59.857071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486784866320528498:2364];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08 ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.404707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.410523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.410528Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.416360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.416368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.422578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.422586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.427981Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.428037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.433077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.433500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.439650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.439649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.445699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.445844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.454167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.469678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.476842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.483556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.490087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.494050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.498127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.500363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.504606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.506254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.510876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.512470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.517407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.517444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.523801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.527125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.530924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.533384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.537506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.540036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.549792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.550711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.557359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.557439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.565201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.565240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.573221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.573222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.580962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.581009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:32.799739Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe07bfm5hw6v9372bbj1stz", SessionId: ydb://session/3?node_id=1&id=NjYxN2EyZmEtZjUzZjZlNzgtMzQ5YjRhZjgtNTQ0MTQ3OWY=, Slow query, duration: 49.258830s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:10:33.092688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:33.093120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:33.093837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486785222802873272:11698];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-28T09:10:33.094231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |81.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 30112, MsgBus: 20958 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001438/r3tmp/tmpBmysnW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 30112, node 1 TClient is connected to server localhost:20958 TClient is connected to server localhost:20958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 32144, MsgBus: 31844 2025-03-28T09:10:43.126399Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785313889342137:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:43.127974Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001721/r3tmp/tmpngcsSP/pdisk_1.dat 2025-03-28T09:10:43.739711Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:43.751847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:43.751935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:43.767229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32144, node 1 2025-03-28T09:10:43.910197Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:43.910222Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:43.910230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:43.910337Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31844 TClient is connected to server localhost:31844 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:44.605755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:44.633697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:10:44.776565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:44.972780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:10:45.065190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:47.020638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785326774245830:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:47.020801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:47.348063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:10:47.380336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:10:47.413650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:10:47.465094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:10:47.548003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:10:47.628244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:10:47.713381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785331069213646:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:47.713494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:47.713777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785331069213651:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:47.718073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:10:47.733226Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:10:47.734130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785331069213653:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:10:47.813208Z node 1 :TX_PROXY ERROR: Actor# [1:7486785331069213708:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:48.122819Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785313889342137:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:48.122893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:49.089153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:10:49.164658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:10:49.213647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:10:49.256155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:10:49.298323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:10:49.374172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19744, MsgBus: 8374 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001428/r3tmp/tmpmwnJQM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19744, node 1 TClient is connected to server localhost:8374 TClient is connected to server localhost:8374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TCacheTest::SystemView [GOOD] >> TCacheTest::TableSchemaVersion >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] Test command err: Trying to start YDB, gRPC: 32194, MsgBus: 20369 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142f/r3tmp/tmp7k92DS/pdisk_1.dat TServer::EnableGrpc on GrpcPort 32194, node 1 TClient is connected to server localhost:20369 TClient is connected to server localhost:20369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckAccess [GOOD] Test command err: 2025-03-28T09:10:56.032093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:56.032151Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:56.189409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:10:56.206503Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-03-28T09:10:56.469248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:56.469305Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:56.525032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-28T09:10:56.535453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 2025-03-28T09:10:56.538974Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:197:2187], for# user1@builtin, access# DescribeSchema 2025-03-28T09:10:56.539568Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [2:201:2191], for# user1@builtin, access# DescribeSchema >> TCacheTest::SysLocks [GOOD] >> TCacheTest::TableSchemaVersion [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2025-03-28T09:10:56.425784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:56.425855Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:56.635569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:10:56.655324Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-28T09:10:56.657209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:10:56.709706Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:10:56.721561Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2025-03-28T09:10:57.102881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:57.102941Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:57.159693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate >> TCacheTest::MigrationCommit [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 14966, MsgBus: 18892 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001441/r3tmp/tmpaqZusd/pdisk_1.dat TServer::EnableGrpc on GrpcPort 14966, node 1 TClient is connected to server localhost:18892 TClient is connected to server localhost:18892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationUndo |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2025-03-28T09:10:56.198831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:56.198889Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:56.379811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-03-28T09:10:56.670230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:56.670302Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:56.732716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-03-28T09:10:56.862833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-28T09:10:57.384558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 >> TTopicApiDescribes::DescribeTopic |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationCommit [GOOD] Test command err: 2025-03-28T09:10:56.594340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:56.594397Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:56.719669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2025-03-28T09:10:56.751054Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:10:56.751257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:10:56.751440Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-03-28T09:10:57.084667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:57.084739Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:57.146945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-03-28T09:10:57.212326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:57.212388Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:211:2067] recipient: [2:24:2071] 2025-03-28T09:10:57.243002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-28T09:10:57.252631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:240:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:240:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:252:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:252:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:250:2219] sender: [2:255:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [2:254:2221] sender: [2:256:2067] recipient: [2:240:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-03-28T09:10:57.282502Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:250:2219] sender: [2:289:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:254:2221] sender: [2:290:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-03-28T09:10:57.369206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:344:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:345:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-28T09:10:57.589992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:415:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:415:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:422:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:422:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:424:2067] recipient: [2:415:2333] 2025-03-28T09:10:57.639353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:57.639417Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:451:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-03-28T09:10:57.696680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:10:57.696740Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:10:57.697106Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-03-28T09:10:57.697231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:10:57.716084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-03-28T09:10:57.716440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:512:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:514:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:516:2067] recipient: [2:515:2408] Leader for TabletID 72057594046678944 is [2:517:2409] sender: [2:518:2067] recipient: [2:515:2408] 2025-03-28T09:10:57.772109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:57.772168Z node 2 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046678944 is [2:517:2409] sender: [2:545:2067] recipient: [2:24:2071] { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11977, MsgBus: 27675 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001417/r3tmp/tmpqCP1Sq/pdisk_1.dat TServer::EnableGrpc on GrpcPort 11977, node 1 TClient is connected to server localhost:27675 TClient is connected to server localhost:27675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TCacheTest::Navigate |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TCacheTest::MigrationUndo [GOOD] >> TCacheTest::Navigate [GOOD] >> TCacheTest::PathBelongsToDomain >> AggregateStatistics::ShouldBePings >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes >> TCacheTest::PathBelongsToDomain [GOOD] >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] >> AggregateStatistics::ShouldBePings [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |81.3%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2025-03-28T09:10:56.499319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:56.499380Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:56.633502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:175:2067] recipient: [1:46:2093] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:178:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:179:2067] recipient: [1:177:2171] Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:181:2067] recipient: [1:177:2171] 2025-03-28T09:10:56.675688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:56.675754Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:211:2067] recipient: [1:24:2071] 2025-03-28T09:10:56.706266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-28T09:10:56.720527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:247:2067] recipient: [1:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:247:2067] recipient: [1:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:248:2067] recipient: [1:240:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:248:2067] recipient: [1:240:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:251:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [1:251:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:253:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [1:253:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409546 is [1:250:2219] sender: [1:255:2067] recipient: [1:238:2213] Leader for TabletID 72075186233409547 is [1:254:2221] sender: [1:256:2067] recipient: [1:240:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-03-28T09:10:56.831302Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [1:250:2219] sender: [1:289:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409547 is [1:254:2221] sender: [1:290:2067] recipient: [1:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-03-28T09:10:56.995275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:340:2067] recipient: [1:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:340:2067] recipient: [1:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:341:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [1:341:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409548 is [1:343:2289] sender: [1:344:2067] recipient: [1:336:2285] Leader for TabletID 72075186233409548 is [1:343:2289] sender: [1:345:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-28T09:10:57.381205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:420:2067] recipient: [1:415:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:420:2067] recipient: [1:415:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:422:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [1:422:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409549 is [1:423:2337] sender: [1:424:2067] recipient: [1:415:2333] 2025-03-28T09:10:57.442127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:57.442181Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [1:423:2337] sender: [1:451:2067] recipient: [1:24:2071] TestWaitNotification: OK eventTxId 105 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 106 2025-03-28T09:10:57.546386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:10:57.546466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:10:57.546860Z node 1 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-03-28T09:10:57.547013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:10:57.594908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-03-28T09:10:57.595217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 skipDeleteNotification path: /Root/USER_0/DirA/Table1 pathId: [OwnerId: 72057594046678944, LocalPathId: 4] Strong: 1 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-03-28T09:10:57.675161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 108:0, at schemeshard: 72075186233409549 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 skipDeleteNotification path: /Root/USER_0/DirA pathId: [OwnerId: 72057594046678944, LocalPathId: 3] Strong: 1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 TestWaitNotification: OK eventTxId 109 TestModificationResults wait txId: 110 2025-03-28T09:10:58.042797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 110:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:625:2067] recipient: [1:621:2503] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:625:2067] recipient: [1:621:2503] Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:627:2067] recipient: [1:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [1:627:2067] recipient: [1:24:2071] Leader for TabletID 72075186233409550 is [1:628:2507] sender: [1:629:2067] recipient: [1:621:2503] Leader for TabletID 72075186233409550 is [1:628:2507] sender: [1:630:2067] recipient: [1:24:2071] TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 TestWaitNotification: OK eventTxId 110 2025-03-28T09:10:58.457711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:58.457793Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:58.531121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-03-28T09:10:58.536852Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [2:174:2170], Recipient [2:70:2109]: NActors::TEvents::TEvPoison 2025-03-28T09:10:58.537565Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-03-28T09:10:58.544165Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received ev ... : 2] was 2 2025-03-28T09:10:59.355166Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: Mediator, at schemeshard: 72057594046678944 2025-03-28T09:10:59.355200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:10:59.355237Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:3, tabletId: 72075186233409548, PathId: [OwnerId: 72057594046678944, LocalPathId: 4], TabletType: DataShard, at schemeshard: 72057594046678944 2025-03-28T09:10:59.355260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:10:59.355386Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:10:59.355570Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.355768Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:10:59.356101Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.356220Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.356681Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.356767Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.357038Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.357189Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.357289Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.357483Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.357591Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.357818Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.358087Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.358261Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.358320Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.358388Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.358654Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:10:59.361836Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:10:59.362036Z node 2 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T09:10:59.363183Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [2:516:2402], Recipient [2:516:2402]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:10:59.363236Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:10:59.364098Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:10:59.364165Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:10:59.364380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:10:59.364448Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:10:59.364493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:10:59.364525Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:10:59.364836Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [2:532:2402], Recipient [2:516:2402]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:10:59.364875Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:10:59.364924Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:10:59.379420Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:59.379614Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:382:2319] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 5000002 PathId: [OwnerId: 72057594046678944, LocalPathId: 2] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:10:59.379899Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:544:2419], recipient# [2:543:2418], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0 TableId: [72057594046678944:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:10:59.380412Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:59.380536Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:391:2322] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 200 PathId: [OwnerId: 72057594046678944, LocalPathId: 3] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/USER_0/DirA TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:10:59.380761Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:546:2421], recipient# [2:545:2420], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:10:59.381272Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:160:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:10:59.381399Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:160:2157], cacheItem# { Subscriber: { Subscriber: [2:400:2325] DomainOwnerId: 72057594046678944 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 250 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] DomainId: [OwnerId: 72057594046678944, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/USER_0/DirA/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:10:59.381587Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:548:2423], recipient# [2:547:2422], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/USER_0/DirA/Table1 TableId: [72057594046678944:4:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2025-03-28T09:10:59.952186Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:10:59.953146Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-03-28T09:11:00.001481Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.009902Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:00.010110Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:00.010217Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2025-03-28T09:11:00.010299Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.010591Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-03-28T09:11:00.010717Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 5, status = OK 2025-03-28T09:11:00.010766Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.010815Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [2:46:2057], tablet id = 4, status = OK 2025-03-28T09:11:00.010844Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:46:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.010931Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2025-03-28T09:11:00.010977Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.011245Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-03-28T09:11:00.011305Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-03-28T09:11:00.012659Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-03-28T09:11:00.012788Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-03-28T09:11:00.012841Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-28T09:11:00.012985Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-03-28T09:11:00.013011Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.013053Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-03-28T09:11:00.013072Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.013094Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-03-28T09:11:00.013564Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2025-03-28T09:11:00.013595Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.013625Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2025-03-28T09:11:00.013642Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.013678Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2025-03-28T09:11:00.013743Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.013933Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2025-03-28T09:11:00.013954Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.013981Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-03-28T09:11:00.014026Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:11:00.014120Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2025-03-28T09:11:00.014139Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.014178Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-03-28T09:11:00.014365Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-03-28T09:11:00.014426Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-28T09:11:00.014602Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-03-28T09:11:00.014648Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 >> KqpDataIntegrityTrails::Select [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2025-03-28T09:10:59.420230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:59.420285Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:59.570076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:10:59.589001Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-03-28T09:10:59.864155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:59.864203Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:59.913121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2025-03-28T09:10:59.917940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:10:59.922647Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 2025-03-28T09:10:59.929942Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:226:2204], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:10:59.930154Z node 2 :TX_PROXY_SCHEME_CACHE WARN: Path does not belong to the specified domain: self# [2:228:2206], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 2] |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2025-03-28T09:10:59.939621Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:10:59.942941Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:00.099361Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 2 2025-03-28T09:11:00.099447Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-28T09:11:00.099499Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-03-28T09:11:00.100252Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2025-03-28T09:11:00.100307Z node 2 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.100365Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2025-03-28T09:11:00.100385Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.100496Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2025-03-28T09:11:00.100554Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2025-03-28T09:11:00.621373Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:00.621607Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2025-03-28T09:11:00.621637Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-03-28T09:11:00.621748Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-03-28T09:11:00.622336Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2025-03-28T09:11:00.622389Z node 1 :STATISTICS DEBUG: Tablet 1 is not local. 2025-03-28T09:11:00.622519Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2025-03-28T09:11:00.622535Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-03-28T09:11:00.622584Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-03-28T09:11:00.622598Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.622807Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2025-03-28T09:11:00.622827Z node 1 :STATISTICS DEBUG: Tablet 5 is not local. 2025-03-28T09:11:00.622903Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 6 2025-03-28T09:11:00.622974Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2025-03-28T09:11:00.623008Z node 1 :STATISTICS DEBUG: Tablet 7 is not local. 2025-03-28T09:11:00.623049Z node 1 :STATISTICS DEBUG: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2025-03-28T09:11:00.623068Z node 1 :STATISTICS DEBUG: Tablet 8 is not local. 2025-03-28T09:11:00.623086Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-28T09:11:00.623140Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-03-28T09:11:00.623156Z node 1 :STATISTICS DEBUG: Skip EvClientConnected ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-03-28T09:11:00.865355Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:00.866343Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-03-28T09:11:00.866757Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.867028Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:00.867157Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:00.867289Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-03-28T09:11:00.867429Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-03-28T09:11:00.867448Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.867486Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2025-03-28T09:11:00.867531Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.867694Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-03-28T09:11:00.867747Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-03-28T09:11:00.867823Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-28T09:11:00.867958Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-03-28T09:11:00.867975Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.868009Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-03-28T09:11:00.868046Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.868128Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-03-28T09:11:00.868168Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-03-28T09:11:00.868187Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:11:00.868289Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-03-28T09:11:00.868308Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.868381Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-03-28T09:11:00.878961Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:00.879027Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:11:00.879061Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:00.879082Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:11:00.890893Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-03-28T09:11:00.890958Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-03-28T09:11:00.890986Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-28T09:11:00.891097Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:00.891135Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-03-28T09:11:00.891170Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:00.891193Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:11:00.891333Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:00.891369Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2025-03-28T09:11:00.885966Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:00.888251Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2025-03-28T09:11:00.888642Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.888812Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2025-03-28T09:11:00.888862Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.888902Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2025-03-28T09:11:00.888935Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.889002Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-03-28T09:11:00.889233Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2025-03-28T09:11:00.889283Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.889334Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-03-28T09:11:00.889415Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2025-03-28T09:11:00.889453Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.889516Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2025-03-28T09:11:00.889537Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.889567Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2025-03-28T09:11:00.889617Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.889660Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-03-28T09:11:00.889678Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.889702Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2025-03-28T09:11:00.889730Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:00.889772Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 5 2025-03-28T09:11:00.889851Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 7 2025-03-28T09:11:00.889912Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2025-03-28T09:11:00.889935Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.889960Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2025-03-28T09:11:00.889976Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.901795Z node 1 :STATISTICS DEBUG: Tablet 1 has already been processed 2025-03-28T09:11:00.901887Z node 1 :STATISTICS ERROR: No result was received from the tablet 2 2025-03-28T09:11:00.901925Z node 1 :STATISTICS DEBUG: Tablet 2 is not local. 2025-03-28T09:11:00.902020Z node 1 :STATISTICS DEBUG: Tablet 3 has already been processed 2025-03-28T09:11:00.902041Z node 1 :STATISTICS ERROR: No result was received from the tablet 4 2025-03-28T09:11:00.902060Z node 1 :STATISTICS DEBUG: Tablet 4 is not local. 2025-03-28T09:11:00.902113Z node 1 :STATISTICS DEBUG: Tablet 5 has already been processed 2025-03-28T09:11:00.902133Z node 1 :STATISTICS ERROR: No result was received from the tablet 6 2025-03-28T09:11:00.902150Z node 1 :STATISTICS DEBUG: Tablet 6 is not local. 2025-03-28T09:11:00.902188Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-28T09:11:00.902267Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:00.902296Z node 1 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-03-28T09:11:00.902376Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2025-03-28T09:11:00.902403Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.902453Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2025-03-28T09:11:00.902472Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:00.902513Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2025-03-28T09:11:00.902530Z node 1 :STATISTICS DEBUG: Skip EvClientConnected ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19155, MsgBus: 5718 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001403/r3tmp/tmpRLyzBp/pdisk_1.dat TServer::EnableGrpc on GrpcPort 19155, node 1 TClient is connected to server localhost:5718 TClient is connected to server localhost:5718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 4691, MsgBus: 23526 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013f1/r3tmp/tmpHsSG0N/pdisk_1.dat TServer::EnableGrpc on GrpcPort 4691, node 1 TClient is connected to server localhost:23526 TClient is connected to server localhost:23526 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2025-03-28T09:11:01.424630Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:01.425553Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2025-03-28T09:11:01.425938Z node 1 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:01.426203Z node 3 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:01.426365Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2025-03-28T09:11:01.426504Z node 1 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 1 2025-03-28T09:11:01.426670Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2025-03-28T09:11:01.426747Z node 1 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:01.426809Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2025-03-28T09:11:01.426867Z node 3 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:01.427010Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2025-03-28T09:11:01.427072Z node 3 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 3 2025-03-28T09:11:01.427111Z node 3 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-28T09:11:01.427251Z node 3 :STATISTICS DEBUG: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2025-03-28T09:11:01.427269Z node 3 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:01.427307Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2025-03-28T09:11:01.427345Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2025-03-28T09:11:01.427430Z node 1 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2025-03-28T09:11:01.427476Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 4 2025-03-28T09:11:01.427497Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:11:01.427599Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2025-03-28T09:11:01.427624Z node 4 :STATISTICS DEBUG: Skip EvClientConnected 2025-03-28T09:11:01.427688Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2025-03-28T09:11:01.438801Z node 4 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:01.438881Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:11:01.438990Z node 3 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:01.439013Z node 3 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:11:01.450961Z node 2 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-03-28T09:11:01.451057Z node 1 :STATISTICS INFO: Node 2 is unavailable 2025-03-28T09:11:01.451091Z node 1 :STATISTICS DEBUG: Send aggregate statistics response to node: 1 2025-03-28T09:11:01.451209Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:01.451242Z node 1 :STATISTICS DEBUG: Skip TEvKeepAliveTimeout 2025-03-28T09:11:01.451273Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:01.451298Z node 1 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:11:01.451445Z node 1 :STATISTICS DEBUG: Event round 1 is different from the current 0 2025-03-28T09:11:01.451487Z node 1 :STATISTICS DEBUG: Skip TEvAggregateKeepAlive ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 64892, MsgBus: 20916 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ea/r3tmp/tmpcCWbfV/pdisk_1.dat TServer::EnableGrpc on GrpcPort 64892, node 1 TClient is connected to server localhost:20916 TClient is connected to server localhost:20916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... |81.4%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2025-03-28T09:10:27.601479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:27.601553Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:28.256003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:10:28.454631Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-28T09:10:28.477694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:10:28.529025Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2025-03-28T09:10:29.238506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:29.238565Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:29.303577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore [GOOD] >> YdbOlapStore::LogPagingBetween [GOOD] >> YdbOlapStore::LogPagingAfter >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] >> Yq_1::Basic_EmptyDict [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental >> IncrementalBackup::BackupRestore ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 3665, MsgBus: 19726 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00140c/r3tmp/tmpCbAXb2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 3665, node 1 TClient is connected to server localhost:19726 TClient is connected to server localhost:19726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... waiting... waiting... waiting... waiting... >> SystemView::ShowCreateTableTemporary [GOOD] >> KqpJoin::IdxLookupPartialLeftPredicate |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> IncrementalBackup::SimpleBackup >> LocalTableWriter::ConsistentWrite >> LocalTableWriter::WaitTxIds >> LocalTableWriter::WriteTable >> LocalTableWriter::ApplyInCorrectOrder |81.5%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull >> LocalTableWriter::SupportedTypes >> LocalTableWriter::DataAlongWithHeartbeat >> LocalTableWriter::DecimalKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2025-03-28T09:09:35.555702Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785021684413907:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:35.555993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0328 09:09:36.481358151 173442 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:36.481486887 173442 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:36.746780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:37.638149Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:16571: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16571 } ] 2025-03-28T09:09:37.652341Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:16571: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:16571 2025-03-28T09:09:37.743518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:38.747291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:38.972867Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:16571: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16571 } ] 2025-03-28T09:09:39.750363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:40.558876Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785021684413907:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:40.558937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:40.751292Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; E0328 09:09:41.483352704 173607 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:09:41.483494404 173607 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:09:41.542794Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:16571: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:16571 } ] 2025-03-28T09:09:41.561047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:41.755037Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e69/r3tmp/tmp5F9sGm/pdisk_1.dat 2025-03-28T09:09:42.619618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:42.806140Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:42.830222Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:42.834350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785051749185564:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:09:42.834475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:09:42.892761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486785051749185564:2320], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } TServer::EnableGrpc on GrpcPort 16571, node 1 2025-03-28T09:09:42.930351Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:42.930387Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:42.930393Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:42.930528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:43.358057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:44.015818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:44.015921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:44.023710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:45.518107Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/mappings". Create session OK 2025-03-28T09:09:45.518154Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:45.518163Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/mappings" 2025-03-28T09:09:45.526539Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-28T09:09:45.526579Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:45.526587Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:09:45.530879Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-28T09:09:45.530917Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-28T09:09:45.530927Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-28T09:09:45.555462Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-28T09:09:45.555486Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:09:45.555494Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:09:45.586800Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-28T09:09:45.586838Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:45.586846Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:09:45.587256Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:09:45.588973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:09:45.591070Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-28T09:09:45.591093Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:09:45.591100Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:09:45.592673Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/result_sets". Create session OK 2025-03-28T09:09:45.592691Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:45.592697Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/result_sets" 2025-03-28T09:09:45.603857Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/idempotency_keys". Create session OK 2025-03-28T09:09:45.603877Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:45.612417Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/idempotency_keys" 2025-03-28T09:09:45.629184Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-03-28T09:09:45.629220Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-03-28T09:09:45.690091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:45.764538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:09:45.780277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:09:45.7816 ... ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.070090Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.070184Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.070379Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.070548Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.070577Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.070615Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.070875Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.070906Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071062Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071087Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071248Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071273Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071428Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071458Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071550Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071649Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071741Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071832Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.071919Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.072014Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.072106Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.072200Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.072291Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.072387Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.073828Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.073873Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.073900Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074015Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074110Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074331Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074358Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074384Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074600Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074628Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074649Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074881Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074911Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.074939Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.075285Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.075316Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.075501Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.075553Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.075648Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.075786Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.075922Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076018Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076052Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076142Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076239Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076331Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076422Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076523Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076614Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076709Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076799Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076891Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.076983Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.077077Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.077168Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.077258Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.078211Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.078252Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.078280Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.078508Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.078535Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.078559Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.078796Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.078826Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.078854Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.078950Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079044Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079130Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079229Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079311Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079495Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079588Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079611Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079750Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079792Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079814Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.079898Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080041Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080228Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080259Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080403Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080449Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080475Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080560Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080647Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080733Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080929Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080956Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.080981Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.081223Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.081253Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.081276Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.081488Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.081516Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.081539Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.081623Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.081706Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.081862Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082008Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082034Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082066Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082152Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082355Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082378Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082402Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082601Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082627Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082654Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082873Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082905Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.082998Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.083088Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.083198Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.083380Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.083473Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.083515Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.083613Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.083719Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.083820Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.083922Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.084018Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.084116Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.084214Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.084316Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.084418Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:11:01.084509Z node 7 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> SystemView::AuthOwners_ResultOrder [GOOD] >> SystemView::AuthOwners_TableRange >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::ShowCreateTableTemporary [GOOD] Test command err: 2025-03-28T09:09:32.762162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785007676653604:2218];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:32.767248Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015a3/r3tmp/tmp8RBPr4/pdisk_1.dat 2025-03-28T09:09:33.542741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:33.542865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:33.550741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:33.604790Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15556, node 1 2025-03-28T09:09:33.935143Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:09:33.935169Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:09:34.315572Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:34.315613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:34.315631Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:34.315774Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62778 TClient is connected to server localhost:62778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:35.726550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:37.762934Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785007676653604:2218];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:37.763003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:39.087281Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-03-28T09:09:39.087314Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-03-28T09:09:39.206280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785037741425734:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:39.206404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:39.206739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785037741425746:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:39.223095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:39.301977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785037741425748:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:39.409625Z node 1 :TX_PROXY ERROR: Actor# [1:7486785037741425832:2771] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:39.410995Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: CREATE TABLE test_show_create (\n Key Uint32,\n Value Bool DEFAULT true,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-28T09:09:39.411108Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F000268CB8 2025-03-28T09:09:39.411149Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7486785037741425730:2343], queryUid: , queryText: "CREATE TABLE test_show_create (\n Key Uint32,\n Value Bool DEFAULT true,\n PRIMARY KEY (Key)\n );\n ", keepInCache: 1, split: 0{ TraceId: 01jqe077838xdy2rzsv09hmpd5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTUzYjNiNWQtNDgyMjE4ZjktMTgzMGNjYS03MzQ1ZTBmZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-28T09:09:39.411298Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: CREATE TABLE test_show_create (\n Key Uint32,\n Value Bool DEFAULT true,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-28T09:09:39.411362Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7486785037741425730:2343], queueSize: 1 2025-03-28T09:09:39.412877Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7486785037741425730:2343], compileActor: [1:7486785037741425854:2354] 2025-03-28T09:09:39.936516Z node 1 :KQP_YQL INFO: TraceId: 01jqe077838xdy2rzsv09hmpd5, SessionId: CompileActor 2025-03-28 09:09:39.935 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F013F383640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 '('"Key" (AsOptionalType (DataType 'Uint32)) '('columnConstrains '()) '())) (let $2 '('"Value" (AsOptionalType (DataType 'Bool)) '('columnConstrains '('('default (Bool '"true")))) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) ) 2025-03-28T09:09:39.937712Z node 1 :KQP_YQL TRACE: TraceId: 01jqe077838xdy2rzsv09hmpd5, SessionId: CompileActor 2025-03-28 09:09:39.937 TRACE ydb-core-sys_view-ut(pid=172576, tid=0x00007F013F383640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 '('"Key" (AsOptionalType (DataType 'Uint32)) '('columnConstrains '()) '())) (let $2 '('"Value" (AsOptionalType (DataType 'Bool)) '('columnConstrains '('('default (Bool '"true")))) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")))) (let $4 (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-28T09:09:39.938032Z node 1 :KQP_YQL DEBUG: TraceId: 01jqe077838xdy2rzsv09hmpd5, SessionId: CompileActor 2025-03-28 09:09:39.937 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F013F383640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 239us 2025-03-28T09:09:39.938565Z node 1 :KQP_YQL DEBUG: TraceId: 01jqe077838xdy2rzsv09hmpd5, SessionId: CompileActor 2025-03-28 09:09:39.938 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F013F383640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-03-28T09:09:39.940190Z node 1 :KQP_YQL DEBUG: TraceId: 01jqe077838xdy2rzsv09hmpd5, SessionId: CompileActor 2025-03-28 09:09:39.940 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F013F383640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-03-28T09:09:39.940911Z node 1 :KQP_YQL TRACE: TraceId: 01jqe077838xdy2rzsv09hmpd5, SessionId: CompileActor 2025-03-28 09:09:39.940 TRACE ydb-core-sys_view-ut(pid=172576, tid=0x00007F013F383640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 '('"Key" (AsOptionalType (DataType 'Uint32)) '('columnConstrains '()) '())) (let $2 '('"Value" (AsOptionalType (DataType 'Bool)) '('columnConstrains '('('default (Bool '"true")))) '())) (let $3 '('('mode 'create) '('columns '($1 $2)) '('primarykey '('"Key")))) (let $4 (Write! world (DataSink '"kikimr" '"db") (Key '('tablescheme (String '"/Root/test_show_create"))) (Void) $3)) (return (Commit! $4 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-28T09:09:39.967636Z node 1 :KQP_YQL DEBUG: TraceId: 01jqe077838xdy2rzsv09hmpd5, SessionId: CompileActor 2025-03-28 09:09:39.967 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F013F383640) [perf] type_ann_expr.cpp:47: Execution of [TypeAnnotationTransformer::DoTransform] took 22.9ms 2025-03-28T09:09:39.968236Z node 1 :KQP_YQL DEBUG: TraceId: 01jqe077838xdy2rzsv09hmpd5, SessionId: CompileActor 2025-03-28 09:09:39.968 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F013F383640) [perf] yql_expr_constraint.cpp:3226: Execution of [ConstraintTransformer::DoTransform] took 336us 2025-03-28T09:09:39.968357Z node 1 :KQP_YQL DEBUG: TraceId: 01jqe077838xdy2rzsv09hmpd5, SessionId: CompileActor 2025-03-28 09:09:39.968 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F013F383640) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 54us 2025-03-28T09:09:39.968865Z node 1 :KQP_YQL DEBUG: TraceId: 01jqe077838xdy2rzsv09hmpd5, SessionId: CompileActor 2025-03-28 09:09:39.968 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F013F383640) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 447us 2025-03-28T09:09:39.974078Z node 1 :KQP_YQL INFO: TraceId: 01j ... y: 1 } 2025-03-28T09:11:01.889246Z node 31 :TX_PROXY ERROR: Actor# [31:7486785389821561689:3106] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/OWY1OTZiNjQtOGJiNWQ1ZmEtMzliNmM0MzEtNWI4MDhkZWU=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:01.893835Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-03-28T09:11:02.017075Z node 31 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n DROP TABLE `test_show_create`;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-28T09:11:02.017209Z node 31 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F000BA1F28 2025-03-28T09:11:02.017272Z node 31 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [31:7486785385526593782:2342], queryUid: , queryText: "\n DROP TABLE `test_show_create`;\n ", keepInCache: 1, split: 0{ TraceId: 01jqe09r40bgcknpy3yncgqm1g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=OWY1OTZiNjQtOGJiNWQ1ZmEtMzliNmM0MzEtNWI4MDhkZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-28T09:11:02.017494Z node 31 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n DROP TABLE `test_show_create`;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-28T09:11:02.017555Z node 31 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [31:7486785385526593782:2342], queueSize: 1 2025-03-28T09:11:02.018478Z node 31 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [31:7486785385526593782:2342], compileActor: [31:7486785394116529120:2393] 2025-03-28T09:11:02.038218Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.037 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) ) 2025-03-28T09:11:02.038936Z node 31 :KQP_YQL TRACE: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.038 TRACE ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) (return (Commit! $1 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-28T09:11:02.039187Z node 31 :KQP_YQL DEBUG: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.039 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [perf] yql_expr_optimize.cpp:540: Execution of [ExpandApply] took 166us 2025-03-28T09:11:02.039566Z node 31 :KQP_YQL DEBUG: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.039 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core eval] yql_eval_expr.cpp:384: EvaluateExpression - start 2025-03-28T09:11:02.041423Z node 31 :KQP_YQL DEBUG: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.041 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core eval] yql_eval_expr.cpp:1156: EvaluateExpression - finish 2025-03-28T09:11:02.042165Z node 31 :KQP_YQL TRACE: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.041 TRACE ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Write! world (DataSink '"kikimr" '"db") (Key '('table (String '"/Root/test_show_create"))) (Void) '('('mode 'drop)))) (return (Commit! $1 (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-03-28T09:11:02.045630Z node 31 :KQP_YQL DEBUG: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.045 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [perf] type_ann_expr.cpp:47: Execution of [TypeAnnotationTransformer::DoTransform] took 261us 2025-03-28T09:11:02.045933Z node 31 :KQP_YQL DEBUG: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.045 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [perf] yql_expr_constraint.cpp:3226: Execution of [ConstraintTransformer::DoTransform] took 169us 2025-03-28T09:11:02.046017Z node 31 :KQP_YQL DEBUG: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.045 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [perf] yql_expr_csee.cpp:620: Execution of [UpdateCompletness] took 31us 2025-03-28T09:11:02.046289Z node 31 :KQP_YQL DEBUG: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.046 DEBUG ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [perf] yql_expr_csee.cpp:633: Execution of [EliminateCommonSubExpressions] took 220us 2025-03-28T09:11:02.048282Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.047 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropTable! world $1 '"/Root/test_show_create" '() '"table" '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-03-28T09:11:02.048341Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.048 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-28T09:11:02.048383Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.048 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-28T09:11:02.048426Z node 31 :KQP_YQL TRACE: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.048 TRACE ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-28T09:11:02.048475Z node 31 :KQP_YQL TRACE: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.048 TRACE ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-03-28T09:11:02.048563Z node 31 :KQP_YQL TRACE: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.048 TRACE ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:387: {1}, callable #42 2025-03-28T09:11:02.048922Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.048 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:466: Register async execution for node #42 2025-03-28T09:11:02.049009Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.048 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Async 2025-03-28T09:11:02.049130Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.049 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:133: Completed async execution for node #42 2025-03-28T09:11:02.049191Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.049 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:153: State is ExecutionComplete after apply async changes for node #42 2025-03-28T09:11:02.049238Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.049 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:59: Begin, root #43 2025-03-28T09:11:02.049273Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.049 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:72: Collect unused nodes for root #43, status: Ok 2025-03-28T09:11:02.049309Z node 31 :KQP_YQL TRACE: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.049 TRACE ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:387: {0}, callable #43 2025-03-28T09:11:02.049364Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.049 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:577: Node #43 finished execution 2025-03-28T09:11:02.049419Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.049 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:594: Node #43 created 0 trackable nodes: 2025-03-28T09:11:02.049451Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.049 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:87: Finish, output #43, status: Ok 2025-03-28T09:11:02.049500Z node 31 :KQP_YQL INFO: TraceId: 01jqe09r40bgcknpy3yncgqm1g, SessionId: CompileActor 2025-03-28 09:11:02.049 INFO ydb-core-sys_view-ut(pid=172576, tid=0x00007F012EF8C640) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #43 2025-03-28T09:11:02.050576Z node 31 :KQP_COMPILE_SERVICE DEBUG: Received response, sender: [31:7486785385526593782:2342], status: SUCCESS, compileActor: [31:7486785394116529120:2393] 2025-03-28T09:11:02.050731Z node 31 :KQP_COMPILE_SERVICE DEBUG: Send response, sender: [31:7486785385526593782:2342], queryUid: 4bf2fac0-d0ec1c5c-76f574c6-6a2618c9, status:SUCCESS 2025-03-28T09:11:02.064453Z node 31 :TX_PROXY ERROR: Actor# [31:7486785394116529132:3221] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:02.072246Z node 31 :TX_PROXY ERROR: Actor# [31:7486785394116529141:3228] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/OWY1OTZiNjQtOGJiNWQ1ZmEtMzliNmM0MzEtNWI4MDhkZWU=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:02.183267Z node 31 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 31, TabletId: 72075186224037889 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 23679, MsgBus: 4840 2025-03-28T09:09:06.089741Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784895594828526:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:06.093799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00187d/r3tmp/tmp5ySYEU/pdisk_1.dat 2025-03-28T09:09:06.515104Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:06.535268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:06.535359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:06.537212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23679, node 1 2025-03-28T09:09:06.615247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:06.615267Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:06.615274Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:06.615389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4840 TClient is connected to server localhost:4840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:07.134275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:09.036280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784908479730940:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.036283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784908479730948:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.036375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:09.039760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:09.049268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784908479730954:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:09.140676Z node 1 :TX_PROXY ERROR: Actor# [1:7486784908479731007:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:09.446580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:09.629040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:09.629119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:09.629221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:09.629222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:09.629464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:09.629482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:09.629639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:09.629646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:09.629773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:09.629800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:09.629919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:09.629941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:09.630050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:09.630090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:09.630172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:09.630209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:09.630301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:09.630311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:09.630647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:09.630647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:09.630901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:09.630929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:09.631025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784908479731256:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:09.631049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784908479731316:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:09.661503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784908479731310:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:09.661549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784908479731310:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:09.661701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_i ... tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.576602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.583052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.589211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.591684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.608736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.621689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.629378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.637155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.640883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.650996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.652992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.667227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.668915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.675708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.679140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.684758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.687513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.693824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.693832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.699849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.700638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.707176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.709014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.714673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.716894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.722647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.728238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.736574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.742457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.746216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.752591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.766376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.768445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.773435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.783330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.788661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.793678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.799075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.805153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.810445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.820392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.824004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.834089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.837112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.848423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:46.848957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:47.120645Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe07shba1ysv8k56rb9cc0z", SessionId: ydb://session/3?node_id=1&id=YzkxOTQ4NWUtMjVhMWVmYmItYTA2MzA4MDctYjI5Njk4YTk=, Slow query, duration: 49.188916s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:10:47.376910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:47.377293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:47.378253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> BasicUsage::WriteSessionSwitchDatabases [GOOD] |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/ut/ydb-services-config-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |81.5%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2025-03-28T09:08:35.915536Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1743152915915503 2025-03-28T09:08:36.644994Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784768337789855:2223];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:36.645041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:08:36.779348Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784769185349665:2253];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:36.779671Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e33/r3tmp/tmpnWUv2S/pdisk_1.dat 2025-03-28T09:08:37.138938Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:37.149928Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:08:37.733654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:37.765016Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:37.783235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:37.783361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:37.784738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:37.784784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:37.791463Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:08:37.806739Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:08:37.806972Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:37.808432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11457, node 1 2025-03-28T09:08:38.052575Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/000e33/r3tmp/yandexzYU9F6.tmp 2025-03-28T09:08:38.052616Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/000e33/r3tmp/yandexzYU9F6.tmp 2025-03-28T09:08:38.052838Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/000e33/r3tmp/yandexzYU9F6.tmp 2025-03-28T09:08:38.052996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:08:38.138237Z INFO: TTestServer started on Port 22144 GrpcPort 11457 TClient is connected to server localhost:22144 PQClient connected to localhost:11457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:38.607498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:08:41.443350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784790660186264:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.443581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.443876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486784790660186291:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:41.451838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-03-28T09:08:41.490198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486784790660186293:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-28T09:08:41.578885Z node 2 :TX_PROXY ERROR: Actor# [2:7486784790660186321:2131] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:41.734214Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486784769185349665:2253];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:41.734279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:41.812882Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784768337789855:2223];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:41.813172Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:08:41.835105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-03-28T09:08:41.842324Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486784790660186336:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:41.842538Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzQ2MTBlZDAtZDk4MDllNTYtOWU1YWZiYTUtNGRkYWZlZjc=, ActorId: [2:7486784790660186262:2309], ActorState: ExecuteState, TraceId: 01jqe05ets8q7p758nzmkt2rht, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:41.842037Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486784789812627284:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:08:41.842332Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2NhMTExZjQtYzMyYjU2ZmEtMjg3NGU3NDctODI3ZGZlYmQ=, ActorId: [1:7486784789812627243:2337], ActorState: ExecuteState, TraceId: 01jqe05ewvfnw06hys9qqtetxw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:08:41.844563Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:41.844521Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:08:42.011679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-03-28T09:08:42.176761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:11457", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, false, 1000); 2025-03-28T09:08:42.684258Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jqe05fp89v4wg246r4jg60f2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDU2MmNiZTMtMmYxNGNmZTUtYzg1NGEyOC00M2ExZTc2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486784794107594985:2988] === CheckClustersList. Ok 2025-03-28T09:08:48.688308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:11457 MetaRequest { CmdGetTopicMetadata { Topic: ... .764020Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 3, Generation: 1 2025-03-28T09:10:44.764067Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [3:7486785316789293491:3608], now have 1 active actors on pipe 2025-03-28T09:10:44.764094Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:10:44.764116Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:10:44.764192Z node 3 :PERSQUEUE INFO: new Cookie src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src_id 2025-03-28T09:10:44.764286Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:10:44.764332Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:10:44.764440Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:10:44.764455Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:10:44.764516Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:10:44.764628Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0 2025-03-28T09:10:45.774849Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486785316789293463:3608] (SourceId=src_id, PreferedPartition=(NULL)) Update the table 2025-03-28T09:10:45.807495Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486785316789293463:3608] (SourceId=src_id, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=1 Status=SUCCESS 2025-03-28T09:10:45.807527Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486785316789293463:3608] (SourceId=src_id, PreferedPartition=(NULL)) Start idle 2025-03-28T09:11:03.474610Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] TPersQueueReadBalancer::HandleWakeup 2025-03-28T09:11:03.474654Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 4 2025-03-28T09:11:03.478599Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-03-28T09:11:03.479416Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 4 DataSize: 0 UsedReserveSize: 0 2025-03-28T09:11:03.479533Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--test-topic] ProcessPendingStats. PendingUpdates size 1 2025-03-28T09:11:04.815328Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Write 1 messages with Id from 1 to 1 >>> Got event: ReadyToAcceptEvent >>> Ready to answer: ok 2025-03-28T09:11:04.819237Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Write session: try to update token 2025-03-28T09:11:04.819290Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Send 1 message(s) (0 left), first sequence number is 3 2025-03-28T09:11:04.826785Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-28T09:11:04.827124Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-28T09:11:04.830853Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:11:04.830892Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:11:04.830987Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-03-28T09:11:04.831106Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:11:04.831385Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:11:04.831409Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:11:04.831465Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 3 partNo : 0 messageNo: 1 size 98 offset: -1 2025-03-28T09:11:04.831637Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2025-03-28T09:11:04.832700Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2025-03-28T09:11:04.833284Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 160 WTime 1743153064831 2025-03-28T09:11:04.833404Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:11:04.833424Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:11:04.833442Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:11:04.833457Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:11:04.833474Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000psrc_id 2025-03-28T09:11:04.833488Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-03-28T09:11:04.833503Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:11:04.833520Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:11:04.833537Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:11:04.833586Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:11:04.833656Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 160 2025-03-28T09:11:04.845543Z node 3 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 160 actorID [3:7486784882997588151:2489] 2025-03-28T09:11:04.845689Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:11:04.845734Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:11:04.845774Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-03-28T09:11:04.845982Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] topic 'rt3.dc1--test-topicCounters. CacheSize 480 CachedBlobs 3 2025-03-28T09:11:04.846013Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-28T09:11:04.846086Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:11:04.847220Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2025-03-28T09:11:04.847376Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 10000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2025-03-28T09:11:04.847405Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2025-03-28T09:11:04.847431Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Write session: acknoledged message 1 2025-03-28T09:11:04.850825Z node 3 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 size 160 2025-03-28T09:11:04.855396Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0 grpc read done: success: 0 data: 2025-03-28T09:11:04.855426Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0 grpc read failed 2025-03-28T09:11:04.855457Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0 grpc closed 2025-03-28T09:11:04.855473Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0 is DEAD 2025-03-28T09:11:04.855902Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:11:04.861220Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2025-03-28T09:11:04.861327Z :ERROR: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2025-03-28T09:11:04.861366Z :ERROR: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2025-03-28T09:11:04.861401Z :INFO: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Write session will now close 2025-03-28T09:11:04.861474Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Write session: aborting 2025-03-28T09:11:04.862799Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7486785316789293491:3608] destroyed 2025-03-28T09:11:04.862878Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:11:04.915990Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4813d6ae-e91e327b-2dc987f5-57db6aa1_0] MessageGroupId [src_id] Write session: destroy |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |81.5%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |81.5%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |81.5%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats >> LocalTableWriter::ConsistentWrite [GOOD] |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> LocalTableWriter::ApplyInCorrectOrder [GOOD] |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |81.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental >> LocalTableWriter::WaitTxIds [GOOD] >> LocalTableWriter::WriteTable [GOOD] >> LocalTableWriter::DecimalKeys [GOOD] >> LocalTableWriter::SupportedTypes [GOOD] >> TFlatTest::LargeDatashardReplyRW [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2025-03-28T09:11:05.653212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785406994738004:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:05.667510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fa6/r3tmp/tmpmBR643/pdisk_1.dat 2025-03-28T09:11:06.839638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:06.924406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:06.930435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:06.971535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:07.033610Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:07.197950Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.163306s 2025-03-28T09:11:07.198026Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.163396s TClient is connected to server localhost:16236 TServer::EnableGrpc on GrpcPort 18765, node 1 2025-03-28T09:11:08.207417Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:08.207450Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:08.207459Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:08.207614Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16236 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:09.212605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:09.397482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153069840 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-28T09:11:10.063111Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handshake: worker# [1:7486785424174607809:2300] 2025-03-28T09:11:10.063410Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:11:10.063661Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:11:10.063698Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Send handshake: worker# [1:7486785424174607809:2300] 2025-03-28T09:11:10.064290Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.071309Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-03-28T09:11:10.071482Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2025-03-28T09:11:10.071665Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428469575206:2365] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-28T09:11:10.071718Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.071827Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428469575206:2365] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-03-28T09:11:10.100269Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428469575206:2365] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:10.100339Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.100410Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-03-28T09:11:10.103830Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.104453Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.105086Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2025-03-28T09:11:10.105232Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2025-03-28T09:11:10.105444Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428469575206:2365] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-03-28T09:11:10.111228Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428469575206:2365] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:10.111292Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.111336Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2025-03-28T09:11:10.111857Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.112052Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2025-03-28T09:11:10.112182Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428469575206:2365] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-03-28T09:11:10.119605Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428469575206:2365] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:10.119690Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.119738Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2025-03-28T09:11:10.123118Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428469575203:2365] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.662846Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785406994738004:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:10.681409Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2025-03-28T09:11:05.649704Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785408450286853:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:05.676734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e7f/r3tmp/tmpdvKcKE/pdisk_1.dat 2025-03-28T09:11:06.858423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:06.921892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:06.928773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:06.970154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:07.024743Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:07.194974Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.157257s 2025-03-28T09:11:07.195099Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.157406s TClient is connected to server localhost:8570 TServer::EnableGrpc on GrpcPort 29119, node 1 2025-03-28T09:11:08.198026Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:08.198050Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:08.198057Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:08.198173Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:09.214537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:09.402959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153069847 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-28T09:11:10.034893Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785429925124057:2361] Handshake: worker# [1:7486785429925124058:2362] 2025-03-28T09:11:10.035333Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785429925124057:2361] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:11:10.035559Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785429925124057:2361] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:11:10.035592Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785429925124057:2361] Send handshake: worker# [1:7486785429925124058:2362] 2025-03-28T09:11:10.039246Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785429925124057:2361] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.095184Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785429925124057:2361] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-03-28T09:11:10.095400Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785429925124057:2361] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-03-28T09:11:10.095557Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785429925124063:2361] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-28T09:11:10.095612Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785429925124057:2361] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.095737Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785429925124063:2361] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-03-28T09:11:10.110842Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785429925124063:2361] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:10.110959Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785429925124057:2361] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.111026Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785429925124057:2361] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2025-03-28T09:11:05.651331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785406552567690:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:05.670887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f97/r3tmp/tmp7P0fL4/pdisk_1.dat 2025-03-28T09:11:06.921724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:06.924364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:06.930836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:06.975616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:07.024810Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:07.195771Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.158045s 2025-03-28T09:11:07.195843Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.158128s TClient is connected to server localhost:3640 TServer::EnableGrpc on GrpcPort 19191, node 1 2025-03-28T09:11:08.199229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:08.199255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:08.199266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:08.199430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:09.254093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:09.394373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153069840 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-28T09:11:09.991336Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handshake: worker# [1:7486785423732437509:2300] 2025-03-28T09:11:09.991676Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:11:10.006955Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:11:10.017732Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Send handshake: worker# [1:7486785423732437509:2300] 2025-03-28T09:11:10.019246Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.069857Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-03-28T09:11:10.070063Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-03-28T09:11:10.070229Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428027404906:2364] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-28T09:11:10.070274Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.070357Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428027404906:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-03-28T09:11:10.095621Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428027404906:2364] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:10.095695Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.095765Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-03-28T09:11:10.100533Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.107069Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-03-28T09:11:10.107231Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2025-03-28T09:11:10.107347Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428027404906:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-03-28T09:11:10.115373Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428027404906:2364] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:10.115459Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.115525Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785423732437606:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } 2025-03-28T09:11:10.654822Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785406552567690:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:10.654896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2025-03-28T09:11:05.650934Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785406837891054:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:05.667372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e8f/r3tmp/tmpEks0tm/pdisk_1.dat 2025-03-28T09:11:06.907313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:06.947823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:06.947968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:06.975772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:07.047304Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:07.196641Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.147886s 2025-03-28T09:11:07.196742Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.147980s TClient is connected to server localhost:10299 TServer::EnableGrpc on GrpcPort 8312, node 1 2025-03-28T09:11:08.196193Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:08.196226Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:08.196236Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:08.196379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:09.227264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:09.401204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153069840 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-28T09:11:10.067053Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428312728256:2366] Handshake: worker# [1:7486785424017760862:2301] 2025-03-28T09:11:10.067418Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428312728256:2366] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:11:10.067668Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428312728256:2366] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:11:10.067713Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428312728256:2366] Send handshake: worker# [1:7486785424017760862:2301] 2025-03-28T09:11:10.068354Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428312728256:2366] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 36b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.068594Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428312728256:2366] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 36 },{ Order: 3 BodySize: 36 }] } 2025-03-28T09:11:10.068792Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428312728259:2366] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-28T09:11:10.068862Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428312728256:2366] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.068997Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428312728259:2366] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-03-28T09:11:10.107128Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428312728259:2366] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:10.107227Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428312728256:2366] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.107289Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785428312728256:2366] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2025-03-28T09:11:05.655090Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785408687645484:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:05.670857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fda/r3tmp/tmp7nukFk/pdisk_1.dat 2025-03-28T09:11:06.870604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:06.923482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:06.929823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:06.972503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:06.994788Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:07.198901Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.102851s 2025-03-28T09:11:07.198986Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.102951s TClient is connected to server localhost:30934 TServer::EnableGrpc on GrpcPort 14311, node 1 2025-03-28T09:11:08.197105Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:08.197129Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:08.197135Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:08.197338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:09.222824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:09.399343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153069847 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Decimal(1,0)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 1 DecimalScale: 0 } IsBuildInProgress: false } Columns { Name: "value" Type: "Decimal(35,10)" TypeId: 4865 I... (TRUNCATED) 2025-03-28T09:11:10.042789Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430162482693:2366] Handshake: worker# [1:7486785425867515299:2301] 2025-03-28T09:11:10.043139Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430162482693:2366] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:11:10.043405Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430162482693:2366] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:11:10.043451Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430162482693:2366] Send handshake: worker# [1:7486785425867515299:2301] 2025-03-28T09:11:10.048083Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430162482693:2366] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.048322Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430162482693:2366] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2025-03-28T09:11:10.054950Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785430162482696:2366] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-28T09:11:10.055022Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430162482693:2366] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.055142Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785430162482696:2366] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2025-03-28T09:11:10.099777Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785430162482696:2366] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:10.099842Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430162482693:2366] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.099909Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430162482693:2366] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2025-03-28T09:11:10.655670Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785408687645484:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:10.655718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2025-03-28T09:11:05.655371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785408788802118:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:05.668819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fc7/r3tmp/tmp1GWBXr/pdisk_1.dat 2025-03-28T09:11:06.941354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:06.941450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:06.942926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:06.974552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:07.057654Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:07.198969Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.127858s 2025-03-28T09:11:07.199049Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.127962s TClient is connected to server localhost:19208 TServer::EnableGrpc on GrpcPort 27014, node 1 2025-03-28T09:11:08.195442Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:08.195472Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:08.195480Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:08.195663Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:09.214093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:09.400065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153069847 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "int32_value" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ui... (TRUNCATED) 2025-03-28T09:11:10.010823Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430263639313:2362] Handshake: worker# [1:7486785425968671921:2299] 2025-03-28T09:11:10.011329Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430263639313:2362] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:11:10.011635Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430263639313:2362] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:11:10.022917Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430263639313:2362] Send handshake: worker# [1:7486785425968671921:2299] 2025-03-28T09:11:10.024218Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430263639313:2362] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.035293Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430263639313:2362] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2025-03-28T09:11:10.055203Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785430263639318:2362] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-28T09:11:10.055267Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430263639313:2362] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.055669Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785430263639318:2362] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2025-03-28T09:11:10.276008Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785430263639318:2362] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:10.276123Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430263639313:2362] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.276230Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785430263639313:2362] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2025-03-28T09:11:05.650914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785407013836581:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:05.667216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fd2/r3tmp/tmpkC6JeL/pdisk_1.dat 2025-03-28T09:11:06.895128Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:06.927137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:06.931558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:06.973338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:07.028723Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:07.199829Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.166754s 2025-03-28T09:11:07.199924Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.166855s TClient is connected to server localhost:23262 TServer::EnableGrpc on GrpcPort 10907, node 1 2025-03-28T09:11:08.198195Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:08.198246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:08.198255Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:08.198464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:09.212492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:09.395693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153069847 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-28T09:11:09.994889Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handshake: worker# [1:7486785424193706489:2365] 2025-03-28T09:11:09.995221Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:2:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:11:10.010858Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:11:10.017952Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Send handshake: worker# [1:7486785424193706489:2365] 2025-03-28T09:11:10.023019Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2025-03-28T09:11:10.060814Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2025-03-28T09:11:10.061014Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2025-03-28T09:11:10.061230Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428488673789:2364] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-28T09:11:10.061279Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.061377Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428488673789:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2025-03-28T09:11:10.096811Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428488673789:2364] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:10.096881Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:10.096958Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-03-28T09:11:10.655058Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785407013836581:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:10.655127Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:11.048098Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2025-03-28T09:11:11.048280Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2025-03-28T09:11:11.048399Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428488673789:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2025-03-28T09:11:11.055346Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:2:1][72075186224037888][1:7486785428488673789:2364] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:11:11.055427Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2025-03-28T09:11:11.055474Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 2][1:7486785424193706488:2364] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } >> TTopicApiDescribes::DescribeTopic [GOOD] >> KqpJoinOrder::TPCDS95+ColumnStore [GOOD] |81.6%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |81.6%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |81.6%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |81.6%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2025-03-28T09:10:40.009427Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785301414676953:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:40.009588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f23/r3tmp/tmprZLv7F/pdisk_1.dat 2025-03-28T09:10:40.792769Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:40.836197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:40.836287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:40.860175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24272 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:10:41.423706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:41.444168Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:41.474731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:45.008964Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785301414676953:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:45.009004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:55.070386Z node 1 :MINIKQL_ENGINE ERROR: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-03-28T09:10:55.083860Z node 1 :TX_DATASHARD ERROR: Datashard execution error for [1743153054447:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2025-03-28T09:10:55.090810Z node 1 :TX_PROXY ERROR: Actor# [1:7486785361544225450:5970] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2025-03-28T09:10:55.090899Z node 1 :TX_PROXY ERROR: Actor# [1:7486785361544225450:5970] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2025-03-28T09:10:55.885911Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785366930129867:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:55.885963Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f23/r3tmp/tmpHSJFqQ/pdisk_1.dat 2025-03-28T09:10:56.100446Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:56.100570Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:56.105126Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:10:56.125130Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61500 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:10:56.368258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:56.377862Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:56.405812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:00.889055Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785366930129867:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:00.889113Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:11.074924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:11.074961Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:11.465024Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976716361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-03-28T09:11:11.476276Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976716361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-03-28T09:11:11.482572Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976716361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-03-28T09:11:11.487521Z node 2 :TX_PROXY ERROR: Actor# [2:7486785431354645759:5937] txid# 281474976716361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::MultiRestore ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2025-03-28T09:10:59.247079Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785380519772800:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:59.247273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:10:59.274448Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785382604361264:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:59.528132Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018ee/r3tmp/tmpjkodrK/pdisk_1.dat 2025-03-28T09:10:59.558939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:10:59.559307Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:10:59.913817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:59.913908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:59.918799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:59.918865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:59.928300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:10:59.931372Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:10:59.935268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:10:59.943761Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5498, node 1 2025-03-28T09:11:00.108224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/0018ee/r3tmp/yandexDazmaY.tmp 2025-03-28T09:11:00.108252Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/0018ee/r3tmp/yandexDazmaY.tmp 2025-03-28T09:11:00.108470Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/0018ee/r3tmp/yandexDazmaY.tmp 2025-03-28T09:11:00.108629Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:11:00.147246Z INFO: TTestServer started on Port 65111 GrpcPort 5498 TClient is connected to server localhost:65111 PQClient connected to localhost:5498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:00.453285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:00.543594Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:11:00.558064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:11:03.242937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785397699642859:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:03.243129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:03.244256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785397699642879:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:03.248966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:11:03.455222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785397699642881:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:11:03.516957Z node 1 :TX_PROXY ERROR: Actor# [1:7486785397699642971:2763] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:03.812945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:03.827850Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486785399784230717:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:11:03.829509Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTg1NTYzZDItMmQzNmUyZTItMWIwOTIyZGQtZjg3MTExNjM=, ActorId: [2:7486785399784230686:2312], ActorState: ExecuteState, TraceId: 01jqe09sbv7sm5x6wz9csed5df, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:11:03.829150Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785397699642981:2350], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:11:03.831174Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTAxMWI0OWYtYWUyMGJmNjUtZWQyNmVkZGEtYWQ4NDhjM2Y=, ActorId: [1:7486785397699642855:2336], ActorState: ExecuteState, TraceId: 01jqe09s9qe34fv5cq6g9cvsb1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:11:03.832007Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:11:03.832153Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:11:03.938204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:04.101456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:11:04.247629Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785380519772800:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:04.247701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:04.270988Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785382604361264:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:04.271064Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:04.489525Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe09tax31h0nfka9kj34xvj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTYxOGVlYWItZjA5YTM2ZDktOGE2NDIwYWYtMzQxNjM4MGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486785401994610715:3092] === CheckClustersList. Ok CreateTopicNoLegacy: rt3.dc1--topic-x Create topic: /Root/PQ/rt3.dc1--topic-x AddTopic: rt3.dc1--topic-x ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic-x, dc = dc1 2025-03-28T09:11:09.595653Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-03-28T09:11:09.871482Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable mete ... etention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } consumer_stats { min_partitions_last_read_time { seconds: 1743153071 nanos: 231000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } } } topic_stats { min_last_write_time { seconds: 1743153071 nanos: 346000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2025-03-28T09:11:11.373251Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-03-28T09:11:11.373329Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" include_location: true 2025-03-28T09:11:11.373415Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2025-03-28T09:11:11.374968Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785432059383696:2599]: Request location 2025-03-28T09:11:11.376675Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785432059383698:2600] connected; active server actors: 1 2025-03-28T09:11:11.376729Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 2 2025-03-28T09:11:11.376740Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 1, NodeId 1, Generation 2 2025-03-28T09:11:11.376750Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 2, NodeId 2, Generation 2 2025-03-28T09:11:11.376762Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 2 2025-03-28T09:11:11.376773Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037899, partitionId 4, NodeId 1, Generation 2 2025-03-28T09:11:11.376781Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 5, NodeId 1, Generation 2 2025-03-28T09:11:11.376792Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037893, partitionId 6, NodeId 1, Generation 2 2025-03-28T09:11:11.376803Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 7, NodeId 2, Generation 2 2025-03-28T09:11:11.376812Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 8, NodeId 2, Generation 2 2025-03-28T09:11:11.376821Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037895, partitionId 9, NodeId 2, Generation 2 2025-03-28T09:11:11.376829Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037896, partitionId 10, NodeId 1, Generation 2 2025-03-28T09:11:11.376839Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 11, NodeId 1, Generation 2 2025-03-28T09:11:11.376847Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037894, partitionId 12, NodeId 2, Generation 2 2025-03-28T09:11:11.376856Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037897, partitionId 13, NodeId 2, Generation 2 2025-03-28T09:11:11.376864Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037900][rt3.dc1--topic-x] addPartitionToResponse tabletId 72075186224037898, partitionId 14, NodeId 1, Generation 2 2025-03-28T09:11:11.377066Z node 1 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [1:7486785432059383696:2599]: Got location Got response: 2025-03-28T09:11:11.378992Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785432059383698:2600] disconnected; active server actors: 1 2025-03-28T09:11:11.379015Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037900][rt3.dc1--topic-x] pipe [1:7486785432059383698:2600] disconnected no session operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743153070057 tx_id: 281474976710676 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe topic with no stats or location 2025-03-28T09:11:11.381497Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-03-28T09:11:11.381570Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//rt3.dc1--topic-x" 2025-03-28T09:11:11.381631Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1743153070057 tx_id: 281474976710676 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } } } } } Describe bad topic 2025-03-28T09:11:11.386644Z node 1 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-03-28T09:11:11.386729Z node 1 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2025-03-28T09:11:11.386830Z node 1 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore [GOOD] |81.7%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 15690, MsgBus: 25210 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001720/r3tmp/tmplk4x8n/pdisk_1.dat 2025-03-28T09:10:57.042839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:10:57.367634Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:57.383364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:57.383458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:57.393256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15690, node 1 2025-03-28T09:10:57.695213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:57.695235Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:57.695241Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:57.695336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25210 TClient is connected to server localhost:25210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:58.780087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:58.808015Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:10:58.831924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:59.081301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:59.439089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:59.552597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:01.632596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785389362482663:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:01.632753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:01.936842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:01.969299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:02.011156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:02.050135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:02.090492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:02.129969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:02.177024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785393657450471:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:02.177108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:02.177374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785393657450476:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:02.181165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:02.197011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785393657450478:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:02.280911Z node 1 :TX_PROXY ERROR: Actor# [1:7486785393657450531:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:03.521607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:03.645448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20019, MsgBus: 6271 2025-03-28T09:11:05.340866Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785409712165977:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:05.407120Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001720/r3tmp/tmp5ufww1/pdisk_1.dat 2025-03-28T09:11:05.503582Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:05.517868Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:05.517945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:05.520317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20019, node 2 2025-03-28T09:11:05.659178Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:05.659203Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:05.659209Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:05.659311Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6271 TClient is connected to server localhost:6271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:06.636591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:06.649585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:06.748721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:07.017992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:07.098302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:10.290810Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785409712165977:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:10.290926Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:10.419931Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785431187004072:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:10.420038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:10.549487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:10.612875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:10.677548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:10.725081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:10.785254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:10.873369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:11.027595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785435481971890:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:11.027671Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:11.028000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785435481971895:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:11.031819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:11.053670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785435481971897:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:11:11.121161Z node 2 :TX_PROXY ERROR: Actor# [2:7486785435481971951:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:12.294530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:12.370270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupPartialLeftPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 21336, MsgBus: 29510 2025-03-28T09:11:04.680870Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785405505364042:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:04.680956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00171d/r3tmp/tmpZln16A/pdisk_1.dat 2025-03-28T09:11:05.371388Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:05.400384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:05.400469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:05.402740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21336, node 1 2025-03-28T09:11:05.599186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:05.599210Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:05.599227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:05.599334Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29510 TClient is connected to server localhost:29510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:06.478817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:06.529153Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:06.549059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:06.788908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:07.120141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:07.291245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:09.285767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785426980202139:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:09.285890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:09.639126Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785405505364042:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:09.639211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:09.868269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.920367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.995586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:10.054156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:10.173418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:10.304730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:10.394218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785431275169962:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:10.394329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:10.394621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785431275169967:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:10.399633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:10.419625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785431275169969:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:10.515545Z node 1 :TX_PROXY ERROR: Actor# [1:7486785431275170026:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:11.951393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:11.991975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:11:12.037634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 query_phases { duration_us: 7328 table_access { name: "/Root/Join1_1" reads { rows: 8 bytes: 136 } partitions_count: 1 } cpu_time_us: 4286 affected_shards: 1 } query_phases { duration_us: 8894 table_access { name: "/Root/Join1_2" reads { rows: 3 bytes: 57 } partitions_count: 1 } cpu_time_us: 6670 affected_shards: 1 } compilation { duration_us: 1033524 cpu_time_us: 1027572 } process_cpu_time_us: 525 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":11,\"Plans\":[{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":9,\"Plans\":[{\"E-Size\":\"No estimate\",\"PlanNodeId\":8,\"LookupKeyColumns\":[\"Key1\"],\"Node Type\":\"TableLookup\",\"Path\":\"\\/Root\\/Join1_2\",\"Columns\":[\"Fk3\",\"Key1\",\"Key2\",\"Value\"],\"E-Rows\":\"No estimate\",\"Table\":\"Join1_2\",\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Iterator\":\"PartitionByKey\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"Name\":\"PartitionByKey\",\"Input\":\"precompute_0_0\"}],\"Node Type\":\"ConstantExpr-Aggregate\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"FinishTimeMs\":1743153073603,\"Host\":\"ghrun-mdsjyjltvm\",\"ResultRows\":2,\"ResultBytes\":7,\"OutputRows\":2,\"ComputeTimeUs\":73,\"NodeId\":1,\"OutputChannels\":[{\"ChannelId\":1,\"Rows\":2,\"DstStageId\":0,\"Bytes\":7}],\"TaskId\":1,\"OutputBytes\":7}],\"PeakMemoryUsageBytes\":131072,\"CpuTimeUs\":1414}],\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[3,7]}},\"Name\":\"RESULT\",\"Push\":{\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576]},\"ResultRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"OutputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FinishedTasks\":1,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"BaseTimeMs\":1743153073600,\"CpuTimeUs\":{\"Count\":1,\"Sum\":991,\"Max\":991,\"Min\":991,\"History\":[3,991]},\"OutputBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7}},\"CTE Name\":\"precompute_0_0\"}],\"PlanNodeType\":\"Connection\",\"E-Cost\":\"No estimate\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"TopSort\",\"Limit\":\"1001\",\"TopSortBy\":\"row.t2.Value\"},{\"Inputs\":[{\"InternalOperatorId\":3},{\"InternalOperatorId\":2}],\"E-Rows\":\"No estimate\",\"Condition\":\"t2.Key1 = t1.Fk21\",\"Name\":\"InnerJoin (MapJoin)\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"},{\"Inputs\":[],\"ToFlow\":\"precompute_0_0\",\"Name\":\"ToFlow\"},{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"E-Rows\":\"No estimate\",\"Predicate\":\"Exist(item.Key1)\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TopSort-InnerJoin (MapJoin)-ConstantExpr-Filter\",\"Stats\":{\"ComputeNodes\":[ ... 4,\"Min\":34},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":34,\"Max\":34,\"Min\":34,\"History\":[5,34]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"FirstMessageMs\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Bytes\":{\"Count\":1,\"Sum\":34,\"Max\":34,\"Min\":34,\"History\":[5,34]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":3133,\"Max\":3133,\"Min\":3133,\"History\":[5,3133]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"InputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1}}],\"Node Type\":\"Precompute_0\",\"Parent Relationship\":\"InitPlan\",\"PlanNodeType\":\"Materialize\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":1033524,\"CpuTimeUs\":1027572},\"ProcessCpuTimeUs\":525,\"TotalDurationUs\":1070519,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":0},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"E-Rows\":\"No estimate\",\"Columns\":[\"Fk3\",\"Key1\",\"Key2\",\"Value\"],\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\",\"Name\":\"TableLookup\",\"Table\":\"Join1_2\",\"LookupKeyColumns\":[\"Key1\"]}],\"Node Type\":\"TableLookup\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Predicate\":\"Exist(item.Key1)\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Filter\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":13,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Key (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Join1_1\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Join1_1\",\"ReadColumns\":[\"Fk21\",\"Fk22\",\"Key\",\"Value\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Size\":\"No estimate\",\"A-SelfCpu\":0.879,\"Name\":\"Filter\",\"Predicate\":\"Exist(item.Fk21) AND item.Value == \\\"Value3\\\"\",\"A-Rows\":2,\"E-Rows\":\"No estimate\",\"A-Cpu\":0.879,\"E-Cost\":\"No estimate\",\"A-Size\":34}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Condition\":\"t2.Key1 = t1.Fk21\",\"Name\":\"InnerJoin (MapJoin)\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"InnerJoin (MapJoin)\"}],\"Operators\":[{\"A-Rows\":3,\"A-SelfCpu\":1.798,\"A-Cpu\":2.677,\"A-Size\":108,\"Name\":\"TopSort\",\"Limit\":\"1001\",\"TopSortBy\":\"row.t2.Value\"}],\"Node Type\":\"TopSort\"}],\"Operators\":[{\"A-Rows\":3,\"A-SelfCpu\":0.603,\"A-Cpu\":3.28,\"A-Size\":108,\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"Fk21\" (OptionalType (DataType \'Int32))) \'(\'\"Fk22\" (OptionalType (DataType \'String))) \'(\'\"Key\" (OptionalType (DataType \'Int32))) \'(\'\"Value\" (OptionalType (DataType \'String))))))\n(let $1 (KqpTable \'\"/Root/Join1_1\" \'\"72057594046644480:16\" \'\"\" \'1))\n(let $2 \'(\'\"Fk21\" \'\"Fk22\" \'\"Key\" \'\"Value\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'() (Void) \'()))\n(let $4 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($32) (FromFlow (Filter (ToFlow $32) (lambda \'($33) (And (Exists (Member $33 \'\"Fk21\")) (Coalesce (== (Member $33 \'\"Value\") (String \'\"Value3\")) (Bool \'false))))))) \'(\'(\'\"_logical_id\" \'1035) \'(\'\"_id\" \'\"af7e2ebf-47da9e17-1816fcff-f925a801\"))))\n(let $5 (DqCnUnionAll (TDqOutput $4 \'0)))\n(let $6 (DqPhyStage \'($5) (lambda \'($34) $34) \'(\'(\'\"_logical_id\" \'1413) \'(\'\"_id\" \'\"b701f409-3309fe8-a8b6c7dc-c0e27e8b\"))))\n(let $7 (DqCnResult (TDqOutput $6 \'0) \'()))\n(let $8 \'(\'(\'\"type\" \'\"data\")))\n(let $9 (KqpPhysicalTx \'($4 $6) \'($7) \'() $8))\n(let $10 \'\"%kqp%tx_result_binding_0_0\")\n(let $11 (DataType \'Int32))\n(let $12 (OptionalType $11))\n(let $13 (OptionalType (DataType \'String)))\n(let $14 (StructType \'(\'\"Fk21\" $12) \'(\'\"Fk22\" $13) \'(\'\"Key\" $12) \'(\'\"Value\" $13)))\n(let $15 (ListType $14))\n(let $16 %kqp%tx_result_binding_0_0)\n(let $17 \'(\'(\'\"_logical_id\" \'1078) \'(\'\"_id\" \'\"8a9ade83-aa58729a-b5a1e8f6-bb3c4e55\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $18 (DqPhyStage \'() (lambda \'() (Iterator (PartitionByKey $16 (lambda \'($35) (Member $35 \'\"Fk21\")) (Void) (Void) (lambda \'($36) (Map (Filter (FlatMap $36 (lambda \'($37) (Map (Take (Nth $37 \'1) (Uint64 \'1)) (lambda \'($38) (AsStruct \'(\'\"Fk21\" (Member $38 \'\"Fk21\"))))))) (lambda \'($39) (Exists (Member $39 \'\"Fk21\")))) (lambda \'($40) (AsStruct \'(\'\"Key1\" (Member $40 \'\"Fk21\"))))))))) $17))\n(let $19 (KqpTable \'\"/Root/Join1_2\" \'\"72057594046644480:17\" \'\"\" \'1))\n(let $20 \'(\'\"Fk3\" \'\"Key1\" \'\"Key2\" \'\"Value\"))\n(let $21 (KqpCnStreamLookup (TDqOutput $18 \'0) $19 $20 (ListType (StructType \'(\'\"Key1\" $12))) \'(\'(\'\"Strategy\" \'\"LookupRows\"))))\n(let $22 (Uint64 \'\"1001\"))\n(let $23 (StructType \'(\'\"t1.Fk21\" $12) \'(\'\"t1.Fk22\" $13) \'(\'\"t1.Key\" $12) \'(\'\"t1.Value\" $13) \'(\'\"t2.Fk3\" $13) \'(\'\"t2.Key1\" $12) \'(\'\"t2.Key2\" $13) \'(\'\"t2.Value\" $13)))\n(let $24 \'(\'(\'\"_logical_id\" \'1298) \'(\'\"_id\" \'\"28eaf50e-fed5d614-b663de92-e5fe75c6\") \'(\'\"_wide_channels\" $23)))\n(let $25 (DqPhyStage \'($21) (lambda \'($41) (block \'(\n (let $42 \'(\'Many \'Hashed \'Compact))\n (let $43 (SqueezeToDict (FlatMap (ToFlow $16) (lambda \'($46) (block \'(\n (let $47 (Member $46 \'\"Fk21\"))\n (let $48 (Nothing (OptionalType (TupleType $11 $14))))\n (let $49 (IfPresent $47 (lambda \'($50) (Just \'($50 $46))) $48))\n (return (If (Exists $47) $49 $48))\n )))) (lambda \'($51) (Nth $51 \'0)) (lambda \'($52) (Nth $52 \'1)) $42))\n (let $44 (TopSort (FlatMap $43 (lambda \'($53) (block \'(\n (let $54 \'(\'\"Fk3\" \'\"t2.Fk3\" \'\"Key1\" \'\"t2.Key1\" \'\"Key2\" \'\"t2.Key2\" \'\"Value\" \'\"t2.Value\"))\n (let $55 \'(\'\"Fk21\" \'\"t1.Fk21\" \'\"Fk22\" \'\"t1.Fk22\" \'\"Key\" \'\"t1.Key\" \'\"Value\" \'\"t1.Value\"))\n (return (MapJoinCore (OrderedFilter (ToFlow $41) (lambda \'($56) (Exists (Member $56 \'\"Key1\")))) $53 \'\"Inner\" \'(\'\"Key1\") \'(\'\"Fk21\") $54 $55 \'(\'\"t2.Key1\") \'(\'\"t1.Fk21\")))\n )))) $22 (Bool \'true) (lambda \'($57) (Member $57 \'\"t2.Value\"))))\n (let $45 (lambda \'($58) (Member $58 \'\"t1.Fk21\") (Member $58 \'\"t1.Fk22\") (Member $58 \'\"t1.Key\") (Member $58 \'\"t1.Value\") (Member $58 \'\"t2.Fk3\") (Member $58 \'\"t2.Key1\") (Member $58 \'\"t2.Key2\") (Member $58 \'\"t2.Value\")))\n (return (FromFlow (ExpandMap $44 $45)))\n))) $24))\n(let $26 (DqCnMerge (TDqOutput $25 \'0) \'(\'(\'\"7\" \'\"Asc\"))))\n(let $27 (DqPhyStage \'($26) (lambda \'($59) (FromFlow (NarrowMap (Take (ToFlow $59) $22) (lambda \'($60 $61 $62 $63 $64 $65 $66 $67) (AsStruct \'(\'\"t1.Fk21\" $60) \'(\'\"t1.Fk22\" $61) \'(\'\"t1.Key\" $62) \'(\'\"t1.Value\" $63) \'(\'\"t2.Fk3\" $64) \'(\'\"t2.Key1\" $65) \'(\'\"t2.Key2\" $66) \'(\'\"t2.Value\" $67)))))) \'(\'(\'\"_logical_id\" \'1311) \'(\'\"_id\" \'\"e04a96d9-a4d28b1c-235caaa9-ae9c0a6b\"))))\n(let $28 \'($18 $25 $27))\n(let $29 (DqCnResult (TDqOutput $27 \'0) \'()))\n(let $30 (KqpTxResultBinding $15 \'0 \'0))\n(let $31 (KqpPhysicalTx $28 \'($29) \'(\'($10 $30)) $8))\n(return (KqpPhysicalQuery \'($9 $31) \'((KqpTxResultBinding (ListType $23) \'1 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 1070519 total_cpu_time_us: 1039053 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Join1_2\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":17},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Fk3\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key1\\\",\\\"Key2\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\",\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Join1_1\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":16},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Fk21\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Fk22\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743153073\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"f45ccfac-e3b29e52-fabc578b-d8c9fa9\",\"version\":\"1.0\"}" >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 15619, MsgBus: 4329 2025-03-28T09:08:49.471143Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784825961981225:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:49.471282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00189c/r3tmp/tmpFJkb7m/pdisk_1.dat 2025-03-28T09:08:50.002705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:50.007778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:50.007896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:50.011087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15619, node 1 2025-03-28T09:08:50.132578Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:50.132610Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:50.132622Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:50.132788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4329 TClient is connected to server localhost:4329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:50.839731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:50.886958Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:53.055523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784843141850937:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:53.055523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784843141850925:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:53.055635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:53.059292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:08:53.069856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784843141850939:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:08:53.127138Z node 1 :TX_PROXY ERROR: Actor# [1:7486784843141850990:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:53.469795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:08:53.664686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:53.664951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:53.665135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:53.665163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:53.665219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:53.665273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:53.665325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:53.665365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:53.665425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:53.665455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:53.665533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:53.665539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:53.665673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:53.665812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:53.665932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:53.666052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:53.666165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:53.666260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784843141851275:2358];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:53.668776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:53.669004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:53.669112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:53.669208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:53.669317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:53.669438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784843141851269:2356];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:53.706392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784843141851250:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:53.706450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486784843141851250:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstra ... 1474976710714; 2025-03-28T09:10:30.725891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.727740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.732895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.738241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.747859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.751627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.758458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.760348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.770243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.772229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.782195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.784287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.794353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.798413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.808566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.811889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.819587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.823895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.829246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.840010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.854080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.867476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.879196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.893936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.905051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.910269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.931496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.936516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.940088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.944617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.948817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.958409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.963196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.966804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.968071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.972125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.974231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:30.977826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:31.166449Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0712jdrtn2eyqpjjvw8p6", SessionId: ydb://session/3?node_id=1&id=YzVhN2Q5ZDQtODFiMjgxYjktMTliZDdiNGItOGRlYWJjZTE=, Slow query, duration: 58.279212s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:10:31.936364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:31.936794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:31.937487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486785216804065589:11876];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:10:31.937858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:02.406078Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe098d08ef53kxgscb2wqxd", SessionId: ydb://session/3?node_id=1&id=YzVhN2Q5ZDQtODFiMjgxYjktMTliZDdiNGItOGRlYWJjZTE=, Slow query, duration: 16.481435s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n$ws_wh =\n(select ws1.ws_order_number ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2\n from web_sales ws1 cross join web_sales ws2\n where ws1.ws_order_number = ws2.ws_order_number\n and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk);\n-- start query 1 in stream 0 using template query95.tpl and seed 2031708268\n select\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\nwhere\n cast(d_date as date) between cast('2002-4-01' as date) and\n (cast('2002-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'AL'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\nand ws1.ws_order_number in (select ws_order_number\n from $ws_wh)\nand ws1.ws_order_number in (select wr_order_number\n from web_returns cross join $ws_wh ws_wh\n where wr_order_number = ws_wh.ws_order_number)\norder by `order count`\nlimit 100;\n", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-03-28T09:11:07.089059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:11:07.089542Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:07.089616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00155b/r3tmp/tmpXzvgee/pdisk_1.dat 2025-03-28T09:11:07.573940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:07.574020Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:07.574063Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:07.574206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-28T09:11:07.574239Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:11:07.736289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-28T09:11:07.736531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.736747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:11:07.737027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:11:07.737103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.737195Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:07.737953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:07.738100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:11:07.738145Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:07.738181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:11:07.738364Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:07.738408Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:07.738481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.738549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:11:07.738583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:11:07.738623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:11:07.747524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:07.748185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:07.748234Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:11:07.748397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:07.748447Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:07.748515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.748565Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:11:07.748603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:11:07.748708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:07.749149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:07.749181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:11:07.749287Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:07.749318Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:07.749366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.749403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.749445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-28T09:11:07.749472Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:07.749506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:11:07.755236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:11:07.755840Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:07.755898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:11:07.756068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:11:07.757338Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:11:07.757384Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:11:07.757422Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-28T09:11:07.757555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-28T09:11:07.758087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:07.758139Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:07.758173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:07.758304Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-28T09:11:07.758334Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:11:07.758408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:07.758446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-28T09:11:07.758494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:07.795871Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-28T09:11:07.795982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-28T09:11:07.796021Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:07.796439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T09:11:07.796522Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-28T09:11:07.848485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:07.848597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:07.860579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:07.947755Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-28T09:11:07.948414Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:07.948466Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:07.948503Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:07.948653Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-28T09:11:07.948685Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:11:07.948775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:07.948895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... at tablet: 72057594046644480 2025-03-28T09:11:16.756179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 950 RawX2: 8589937345 } Origin: 72075186224037889 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-28T09:11:16.756231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:16.756268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-03-28T09:11:16.756305Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:16.756641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:16.756680Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:16.770123Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:11:16.770192Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-03-28T09:11:16.770254Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:11:16.770327Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:11:16.770546Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [2:1003:2797], Recipient [2:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:16.770582Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:16.770608Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:16.770774Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [2:739:2610], Recipient [2:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-28T09:11:16.770807Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-03-28T09:11:16.770863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-28T09:11:16.770896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715662, tablet: 72075186224037888, partId: 0 2025-03-28T09:11:16.771017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480, message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-28T09:11:16.771056Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-28T09:11:16.771117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715662:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 739 RawX2: 8589937202 } Origin: 72075186224037888 State: 2 TxId: 281474976715662 Step: 0 Generation: 1 2025-03-28T09:11:16.771161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715662:0, shardIdx: 72057594046644480:1, datashard: 72075186224037888, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:16.771197Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:16.771237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-03-28T09:11:16.771276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715662:0, datashard: 72075186224037888, at schemeshard: 72057594046644480 2025-03-28T09:11:16.771304Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 129 -> 240 2025-03-28T09:11:16.771409Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:16.771838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:16.771867Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:16.771897Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:0 2025-03-28T09:11:16.771986Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:950:2753] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-28T09:11:16.772037Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:739:2610] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-28T09:11:16.772124Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2025-03-28T09:11:16.772195Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-28T09:11:16.772366Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:409:2404], Recipient [2:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:16.772397Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:16.772438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:16.772486Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:0ProgressState, operation type TxCopyTable 2025-03-28T09:11:16.772525Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:16.772563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 281474976715662:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:11:16.772602Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715662, done: 0, blocked: 1 2025-03-28T09:11:16.772693Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715662:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715662 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-03-28T09:11:16.772742Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715662:0 240 -> 240 2025-03-28T09:11:16.773012Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-03-28T09:11:16.773054Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:11:16.773335Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:16.773363Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715662:0 2025-03-28T09:11:16.773456Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [2:409:2404], Recipient [2:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:16.773489Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:16.773588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:16.773632Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715662:0 ProgressState 2025-03-28T09:11:16.773741Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:16.773775Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-03-28T09:11:16.773811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-03-28T09:11:16.773861Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-03-28T09:11:16.773894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-03-28T09:11:16.773934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 1/1, is published: true 2025-03-28T09:11:16.773998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:930:2737] message: TxId: 281474976715662 2025-03-28T09:11:16.774049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-03-28T09:11:16.774091Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-03-28T09:11:16.774128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-03-28T09:11:16.774278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-03-28T09:11:16.774316Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-03-28T09:11:16.774719Z node 2 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:16.774800Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [2:930:2737] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715662 at schemeshard: 72057594046644480 2025-03-28T09:11:16.775261Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [2:938:2744], Recipient [2:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:11:16.775297Z node 2 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:11:16.775322Z node 2 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-28T09:11:16.946043Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [2:1030:2814], serverId# [2:1031:2815], sessionId# [0:0:0] 2025-03-28T09:11:16.946282Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe0a6he8frrq5wx0md247ag, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzVhOWEwYzgtYThkYjBiMmItZjBhZjgwY2MtMmZhODM2Njc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds2+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 17229, MsgBus: 8018 2025-03-28T09:09:17.518669Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784943802766495:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:17.522952Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001875/r3tmp/tmpcjnq6L/pdisk_1.dat 2025-03-28T09:09:17.967269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:17.967372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:17.975611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:17.998893Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17229, node 1 2025-03-28T09:09:18.063545Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:18.063564Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:18.063577Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:18.063734Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8018 TClient is connected to server localhost:8018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:18.673767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:18.695876Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:20.603235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784956687669065:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:20.603581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784956687669054:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:20.603640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:20.606953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:20.617030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784956687669068:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:20.683847Z node 1 :TX_PROXY ERROR: Actor# [1:7486784956687669119:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:21.003141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:21.290869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:21.291078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:21.291309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:21.291414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:21.291537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:21.291660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:21.291771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:21.292121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:21.292241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:21.292348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:21.292854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:21.292987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784960982636674:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:21.370283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:21.374828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:21.375076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:21.375180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:21.375269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:21.375361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:21.375450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:21.375543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:21.375640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:21.375745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:21.375837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:21.375927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784960982636670:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:21.399956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486784960982636664:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:21.400027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486784960982636664:2349];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstra ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.574436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.575361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.581392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.582452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.587970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.588347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.594046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.594134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.600122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.603196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.610246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.613162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.631619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.641732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.657175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.671664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.684143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.702320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.716620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.726500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.740660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.746554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.764820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.766605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.776780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.779498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.785864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.788196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.796887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.801905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.806660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.821290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.823492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.826808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.830069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.839732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.844993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.847453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.852157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.853518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.859426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.864635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.864899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:58.870509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:59.095250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:59.152616Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe08hmt575e45qh5gcsvgqh", SessionId: ydb://session/3?node_id=1&id=MjJlMzY0MDgtMTZmOTE0MzctMzc3OWMyNGUtMzFkMTc4MzQ=, Slow query, duration: 36.533231s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:10:59.497848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:59.498193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:59.498287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486785343234784216:11588];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:10:59.499103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TObjectStorageListingTest::TestFilter |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |81.7%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction >> KqpScanSpilling::HandleErrorsCorrectly >> KqpJoinOrder::CanonizedJoinOrderTPCH15 |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |81.7%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut >> KqpJoinOrder::FiveWayJoin-ColumnStore |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator >> KqpMultishardIndex::WriteIntoRenamingSyncIndex >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate >> KqpIndexes::SecondaryIndexOrderBy >> KqpIndexes::UpdateDeletePlan+UseSink >> KqpIndexes::NullInIndexTableNoDataRead >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel1 >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn >> KqpIndexes::WriteWithParamsFieldOrder >> KqpIndexes::UpsertWithNullKeysSimple >> KqpUniqueIndex::InsertNullInComplexFk >> KqpIndexes::UpsertMultipleUniqIndexes |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |81.7%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut >> KqpIndexes::UniqAndNoUniqSecondaryIndex >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel1 >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel1 >> KqpUniqueIndex::InsertFkPartialColumnSet >> KqpIndexes::SelectConcurentTX >> KqpIndexes::UpdateIndexSubsetPk >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore [GOOD] >> IncrementalBackup::MultiRestore [GOOD] >> IncrementalBackup::E2EBackupCollection >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24738, MsgBus: 14377 2025-03-28T09:10:32.555358Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785268540550484:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:32.555842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001723/r3tmp/tmp7TVGQD/pdisk_1.dat 2025-03-28T09:10:33.020568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:33.020669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:33.025656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:10:33.058917Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24738, node 1 2025-03-28T09:10:33.215259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:33.215281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:33.215289Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:33.215406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14377 TClient is connected to server localhost:14377 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:33.860307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:33.882237Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:10:35.928276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785281425452901:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:35.928366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:35.930773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785281425452910:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:35.935405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:10:35.954671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785281425452915:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:10:36.009141Z node 1 :TX_PROXY ERROR: Actor# [1:7486785285720420262:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:36.385003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:10:36.495012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:10:36.586128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:10:36.632856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:10:36.698337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:10:36.876054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:10:36.929259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:10:36.972391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.038059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.089804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.157815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.229651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.327927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:10:37.535983Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785268540550484:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:37.536193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:38.113872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:10:38.164765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.203620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.253835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.295658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.335540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.388256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.431435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.475071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.547760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.600843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.630965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.707272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.755346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.802924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.853494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:10:38.899629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.048580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.054352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.060117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.064972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.065268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.074292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.079616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.079938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.086162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.086955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.091876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.092050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.097325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.097325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.101634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.105050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.110923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.111166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.116630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.118619Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.122037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.124114Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.128102Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.129706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.133594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.134796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.138870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.139781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.144272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.145792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.152957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.154524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.159023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.159704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.164757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.164883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.170498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.172020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.176018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.180754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.185003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.192408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.199728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.204304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.209741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.286103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.314157Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0938y11g42svagrg372mt", SessionId: ydb://session/3?node_id=1&id=NWUwYTQ1ZDMtN2YzZmMyMzAtZWM3NDZkNGYtMTBkMzBlYTI=, Slow query, duration: 33.643417s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:11:14.548950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:14.549561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:14.549817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |81.8%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |81.7%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel >> TCacheTest::MigrationDeletedPathNavigate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2025-03-28T09:10:56.217732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:56.217785Z node 1 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:56.369189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2025-03-28T09:10:56.382912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 65543, Sender [1:174:2170], Recipient [1:70:2109]: NActors::TEvents::TEvPoison 2025-03-28T09:10:56.383562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:175:2067] recipient: [1:46:2093] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:178:2067] recipient: [1:24:2071] Leader for TabletID 72057594046678944 is [1:70:2109] sender: [1:179:2067] recipient: [1:177:2171] Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:181:2067] recipient: [1:177:2171] 2025-03-28T09:10:56.445514Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828672, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:10:56.467687Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828673, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:10:56.467887Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 268828684, Sender [1:177:2171], Recipient [1:180:2172]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:10:56.479484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:10:56.479584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:10:56.479620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:10:56.479672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:10:56.479711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:10:56.479739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:10:56.479830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:10:56.479895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:10:56.480242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:10:56.499161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:10:56.500377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:10:56.500529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:10:56.500745Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, received event# 65542, Sender [1:7238242728502259555:7369577], Recipient [1:180:2172]: TSystem::Undelivered 2025-03-28T09:10:56.500791Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateInit, processing event TEvents::TEvUndelivered 2025-03-28T09:10:56.500841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:56.500872Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:56.501031Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Clear operation queue and active pipes 2025-03-28T09:10:56.501063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:10:56.501695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:10:56.511235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.511388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.512855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.513007Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:10:56.522091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.522245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.522333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.528383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.528582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.545972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.556211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.556370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.578427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.578573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.593111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.593245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.593328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.610247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.610404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.610541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.610765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.622915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.623028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.623076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.623315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:10:56.624640Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:10:56.624777Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T09:10:56.625295Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [1:180:2172], Recipient [1:180:2172]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:10:56.625333Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:10:56.625896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:10:56.625946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:10:56.626089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:10:56.626142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:10:56.626177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:10:56.626206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:10:56.626458Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:196:2172], Recipient [1:180:2172]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:10:56.626490Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:10:56.626529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [1:180:2172] sender: [1:211:2067] recipient: [1:24:2071] 2025-03-28T09:10:56.656652Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:210:2189], Recipient [1:180:2172]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-03-28T09:10:56.656708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:10:56.794502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:10:56.794757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/USER_0, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:10:56.794849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: Root, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:10:56.795020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:10:56.795211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:10:56.795323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:10:56.795363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 7205759404667894 ... 72057594046678944, status: OK, at schemeshard: 72075186233409549 2025-03-28T09:10:57.646200Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125003, Sender [1:423:2337], Recipient [1:492:2384]: NKikimrScheme.TEvSyncTenantSchemeShard DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-28T09:10:57.646236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvSyncTenantSchemeShard 2025-03-28T09:10:57.646325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409549 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2025-03-28T09:10:57.646419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:10:57.646463Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:10:57.646551Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:423:2337], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:10:57.646626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:10:57.646664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 { Path: Root/USER_0/DirA TableId: [72057594046678944:3:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046678944, LocalPathId: 2] Params { Version: 3 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 SchemeShard: 72075186233409549 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:10:58.073971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:58.074032Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 1 2025-03-28T09:10:58.133145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:175:2067] recipient: [2:46:2093] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:178:2067] recipient: [2:24:2071] Leader for TabletID 72057594046678944 is [2:70:2109] sender: [2:179:2067] recipient: [2:177:2171] Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:181:2067] recipient: [2:177:2171] 2025-03-28T09:10:58.190616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:58.190678Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResults wait txId: 101 Leader for TabletID 72057594046678944 is [2:180:2172] sender: [2:211:2067] recipient: [2:24:2071] 2025-03-28T09:10:58.224164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-28T09:10:58.235825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 102:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:247:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:240:2215] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:248:2067] recipient: [2:240:2215] Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:252:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409546 is [0:0:0] sender: [2:252:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409547 is [0:0:0] sender: [2:253:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409546 is [2:250:2219] sender: [2:255:2067] recipient: [2:238:2213] Leader for TabletID 72075186233409547 is [2:254:2221] sender: [2:256:2067] recipient: [2:240:2215] TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 TestWaitNotification wait txId: 102 2025-03-28T09:10:58.256041Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 Leader for TabletID 72075186233409546 is [2:250:2219] sender: [2:289:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409547 is [2:254:2221] sender: [2:290:2067] recipient: [2:24:2071] FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 2025-03-28T09:10:58.314096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 TestWaitNotification wait txId: 104 Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:340:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409548 is [0:0:0] sender: [2:341:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:344:2067] recipient: [2:336:2285] Leader for TabletID 72075186233409548 is [2:343:2289] sender: [2:345:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-28T09:10:58.483421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomain, opId: 105:0, at schemeshard: 72057594046678944 Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:415:2333] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:420:2067] recipient: [2:415:2333] Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:422:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409549 is [0:0:0] sender: [2:422:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:424:2067] recipient: [2:415:2333] 2025-03-28T09:10:58.532369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:10:58.532442Z node 2 :IMPORT WARN: Table profiles were not loaded TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 Leader for TabletID 72075186233409549 is [2:423:2337] sender: [2:451:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2025-03-28T09:10:58.591912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:10:58.591979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:10:58.592359Z node 2 :FLAT_TX_SCHEMESHARD ERROR: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2025-03-28T09:10:58.592500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:10:58.613962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2025-03-28T09:10:58.614560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-03-28T09:10:58.664495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549 Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:556:2067] recipient: [2:552:2441] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:556:2067] recipient: [2:552:2441] Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:557:2067] recipient: [2:24:2071] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:557:2067] recipient: [2:24:2071] Leader for TabletID 72075186233409550 is [2:559:2445] sender: [2:560:2067] recipient: [2:552:2441] Leader for TabletID 72075186233409550 is [2:559:2445] sender: [2:561:2067] recipient: [2:24:2071] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 Forgetting tablet 72075186233409548 TestWaitNotification: OK eventTxId 108 2025-03-28T09:11:01.279382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:01.279567Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:01.375205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:01.375274Z node 2 :IMPORT WARN: Table profiles were not loaded |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.8%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |81.8%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |81.8%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> SystemView::AuthOwners_TableRange [GOOD] >> SystemView::AuthPermissions >> TObjectStorageListingTest::TestSkipShards [GOOD] >> KqpJoin::JoinAggregate >> KqpJoinOrder::TPCH9_100 [GOOD] |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |81.8%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> IncrementalBackup::E2EBackupCollection [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2025-03-28T09:11:19.802953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785468036224668:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:19.803025Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1f/r3tmp/tmpQPVGqv/pdisk_1.dat 2025-03-28T09:11:20.682277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:20.686705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:20.692459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:20.724593Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19715, node 1 2025-03-28T09:11:21.863507Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:21.863531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:21.863545Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:21.863693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24376 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:11:23.616378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:23.679225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1f/r3tmp/tmpOneU63/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62422, node 2 TClient is connected to server localhost:13929 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::E2EBackupCollection [GOOD] Test command err: 2025-03-28T09:11:08.889030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:11:08.889546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:08.889625Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001537/r3tmp/tmpTIWYhU/pdisk_1.dat 2025-03-28T09:11:09.420712Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.420821Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.420859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:09.420997Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-28T09:11:09.421045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:11:09.577370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-28T09:11:09.577641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.577888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:11:09.578144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:11:09.578262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.578369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:09.580512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:09.580701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:11:09.580750Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:09.580800Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:11:09.581018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.581069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.581142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.581212Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:11:09.581247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:11:09.581282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:11:09.581424Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:09.581902Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:09.581943Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:11:09.582061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.582109Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.582171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.582214Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:11:09.582245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:11:09.582326Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:09.582748Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:09.582782Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:11:09.582875Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.582904Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.582940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.582972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.583011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-28T09:11:09.583040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:09.583095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:11:09.586977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:11:09.587475Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:09.587525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:11:09.587704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:11:09.589055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:11:09.589111Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:11:09.589176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-28T09:11:09.589359Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-28T09:11:09.589833Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.589934Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.589972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:09.590119Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-28T09:11:09.590154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:11:09.590232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:09.590269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-28T09:11:09.590314Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:09.649110Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-28T09:11:09.649252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-28T09:11:09.649314Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:09.649749Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T09:11:09.649834Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-28T09:11:09.696742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:09.696897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:09.712357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:09.796031Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-28T09:11:09.796772Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.796830Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.796865Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:09.797017Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-28T09:11:09.797049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:11:09.797129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:09.797267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... ode 3 :TX_DATASHARD INFO: 72075186224037892 Sending notify to schemeshard 72057594046644480 txId 281474976715668 state Ready TxInFly 0 2025-03-28T09:11:34.139601Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:11:34.139942Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [3:1553:3216], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:34.139981Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:34.140010Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:34.140192Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269551620, Sender [3:1211:2937], Recipient [3:409:2404]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1211 RawX2: 12884904825 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-03-28T09:11:34.140223Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChanged 2025-03-28T09:11:34.140289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 1211 RawX2: 12884904825 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-03-28T09:11:34.140327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715668, tablet: 72075186224037892, partId: 1 2025-03-28T09:11:34.140447Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715668:1, at schemeshard: 72057594046644480, message: Source { RawX1: 1211 RawX2: 12884904825 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-03-28T09:11:34.140484Z node 3 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715668:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-28T09:11:34.140547Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715668:1 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 1211 RawX2: 12884904825 } Origin: 72075186224037892 State: 2 TxId: 281474976715668 Step: 0 Generation: 1 2025-03-28T09:11:34.140597Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715668:1, shardIdx: 72057594046644480:5, datashard: 72075186224037892, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:34.140627Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-28T09:11:34.140658Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715668:1, datashard: 72075186224037892, at schemeshard: 72057594046644480 2025-03-28T09:11:34.140692Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:1 129 -> 240 2025-03-28T09:11:34.140890Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715668:1 Constructed op# SrcTablePaths: "/Root/.backups/collections/MyCollection/19700101000002Z_incremental/Table" DstTablePath: "/Root/Table" SrcPathIds { OwnerId: 72057594046644480 LocalId: 15 } 2025-03-28T09:11:34.141055Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:34.141563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-28T09:11:34.141596Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:34.141630Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715668:1 2025-03-28T09:11:34.141696Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1211:2937] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-03-28T09:11:34.141807Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715668 datashard 72075186224037892 state Ready 2025-03-28T09:11:34.141859Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Got TEvSchemaChangedResult from SS at 72075186224037892 2025-03-28T09:11:34.142057Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:34.142093Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:34.142138Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-28T09:11:34.142191Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TRestoreMultipleIncrementalBackups TDone, operationId: 281474976715668:1 ProgressState 2025-03-28T09:11:34.142346Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:34.142384Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 1/2 2025-03-28T09:11:34.142425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 1/2 2025-03-28T09:11:34.142470Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715668, done: 1, blocked: 1 2025-03-28T09:11:34.142601Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 281474976715668:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 281474976715668 Name: CopyTableBarrier }, at tablet# 72057594046644480 2025-03-28T09:11:34.142640Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715668:0 240 -> 240 2025-03-28T09:11:34.152105Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:1 progress is 1/2 2025-03-28T09:11:34.152205Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 1/2 2025-03-28T09:11:34.152262Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 1/2, is published: true 2025-03-28T09:11:34.152926Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:34.152967Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715668:0 2025-03-28T09:11:34.153118Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:34.153154Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:34.153206Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:0, at schemeshard: 72057594046644480 2025-03-28T09:11:34.153256Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:0 ProgressState 2025-03-28T09:11:34.153391Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:34.153422Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 2/2 2025-03-28T09:11:34.153447Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-03-28T09:11:34.153484Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:0 progress is 2/2 2025-03-28T09:11:34.153507Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-03-28T09:11:34.153536Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 2/2, is published: true 2025-03-28T09:11:34.153620Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1429:3113] message: TxId: 281474976715668 2025-03-28T09:11:34.153687Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 2/2 2025-03-28T09:11:34.153734Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2025-03-28T09:11:34.153772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:0 2025-03-28T09:11:34.153956Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 4 2025-03-28T09:11:34.154004Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-03-28T09:11:34.154050Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2025-03-28T09:11:34.154073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:1 2025-03-28T09:11:34.154128Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 3 2025-03-28T09:11:34.154151Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 3 2025-03-28T09:11:34.154774Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:34.154886Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1429:3113] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-03-28T09:11:34.155348Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1437:3120], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:11:34.155392Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:11:34.155422Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-28T09:11:34.194600Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1553:3216], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:11:34.194719Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:11:34.194774Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-28T09:11:34.309602Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:11:34.309717Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:11:34.309825Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:11:34.309859Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:11:34.716743Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqe0aqtf2q350ss5tcwtvenb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MmJmYmJiM2MtODcwMzQwNjEtZGFkZmM3YmMtNWUwMzhlYjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] >> TLocksTest::BrokenSameShardLock [GOOD] >> KqpIndexes::UpdateDeletePlan+UseSink [GOOD] >> KqpIndexes::UpdateDeletePlan-UseSink >> KqpIndexes::WriteWithParamsFieldOrder [GOOD] >> KqpMultishardIndex::CheckPushTopSort |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans >> KqpUniqueIndex::ReplaceFkPartialColumnSet >> KqpUniqueIndex::InsertFkPartialColumnSet [GOOD] >> KqpUniqueIndex::InsertFkPkOverlap >> KqpIndexes::UpsertWithNullKeysSimple [GOOD] >> KqpIndexes::UpsertWithNullKeysComplex >> KqpIndexes::UpdateIndexSubsetPk [GOOD] >> KqpIndexes::UpdateOnReadColumns >> KqpIndexes::SelectConcurentTX [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH9_100 [GOOD] Test command err: Trying to start YDB, gRPC: 20952, MsgBus: 29677 2025-03-28T09:09:23.459381Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784971698364867:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:23.459498Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001873/r3tmp/tmpDa8IE8/pdisk_1.dat 2025-03-28T09:09:24.304883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:24.304974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:24.306082Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:24.312794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20952, node 1 2025-03-28T09:09:24.654328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:24.654350Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:24.654357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:24.654460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29677 TClient is connected to server localhost:29677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:25.801716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:25.824340Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:28.459640Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784971698364867:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:28.459712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:28.791772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784993173202022:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:28.791905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:28.792013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784993173202030:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:28.796297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:28.808153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784993173202036:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:28.891167Z node 1 :TX_PROXY ERROR: Actor# [1:7486784993173202089:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:29.413492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:29.850711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:29.850952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:29.851241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:29.851398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:29.851520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:29.851664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:29.851781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:29.851898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:29.852039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:29.852150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:29.852251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:29.852369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784997468169662:2356];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:29.855360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:29.855416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:29.855645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:29.855753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:29.855874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:29.856013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:29.856105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:29.856205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:29.856317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:29.856416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:29.856524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:29.856618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486784997468169652:2353];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:29.895556Z node 1 :T ... tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.603892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.605606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.614943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.620649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.626086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.628605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.637912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.637912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.644094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.646181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.649589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.651557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.655410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.657653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.667519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.667519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.673150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.673209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.679225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.680723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.684992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.686462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.690583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.692689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.695971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.698551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.701531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.704618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.707268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.710471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.716745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.722593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.725200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.728220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.731039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.733926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.736976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.739758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.745736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.747628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.757669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.760323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.780968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.785471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.789540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.921491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:08.988170Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe08wyaeq4yag65h0c6kbk4", SessionId: ydb://session/3?node_id=1&id=NDFhMzFkOS05MGZlN2E1MS05NzU5ZmE1OS1jNWQyY2NlZg==, Slow query, duration: 34.800680s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:11:09.341288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:09.341417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:09.342473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpUniqueIndex::UpdateOnHidenChanges-DataColumn [GOOD] >> KqpUniqueIndex::UpdateOnNullInComplexFk >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] >> KqpIndexes::NullInIndexTableNoDataRead [GOOD] >> KqpIndexes::NullInIndexTable >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex-UseSink >> KqpUniqueIndex::InsertNullInComplexFk [GOOD] >> KqpUniqueIndex::InsertNullInComplexFkDuplicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 28396, MsgBus: 23126 2025-03-28T09:09:29.856876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784996217396661:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001869/r3tmp/tmprtodih/pdisk_1.dat 2025-03-28T09:09:30.206143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:30.661240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:30.661356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:30.667904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:30.688878Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28396, node 1 2025-03-28T09:09:30.911262Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:30.911285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:30.911293Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:30.911413Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23126 TClient is connected to server localhost:23126 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:31.632052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:31.645802Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:34.842670Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784996217396661:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:34.842759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:35.991702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785021987200947:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:35.991825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:35.992239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785021987200959:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:35.996282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:36.019122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785021987200961:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:36.114014Z node 1 :TX_PROXY ERROR: Actor# [1:7486785026282168308:2347] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:36.520302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:36.814091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:36.814456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:36.814728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:36.814839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:36.814928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:36.815037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:36.815142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:36.815276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:36.815435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:36.815543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:36.815632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:36.815746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785026282168528:2356];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:36.826356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:36.826416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:36.826591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:36.827143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:36.827296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:36.827419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:36.827528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:36.827648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:36.827765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:36.827880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:36.827980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:36.828077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785026282168532:2358];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:36.868868Z node 1 :T ... TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:11:09.168467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039207;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:11:09.168590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039413;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 2025-03-28T09:11:09.168596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039207;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:11:09.168625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039207;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:11:09.168636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039413;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:11:09.168779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039413;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:11:09.168843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039413;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:11:09.168980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039413;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:11:09.169041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039413;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:11:09.169202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039413;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:11:09.169227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039413;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:11:09.169331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039413;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:11:09.169355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039413;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:11:09.169650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:11:09.169688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:11:09.169763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:11:09.169786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:11:09.169934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:11:09.169965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:11:09.170033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:11:09.170061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:11:09.170098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:11:09.170116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:11:09.170125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:11:09.170138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:11:09.170173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:11:09.170195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:11:09.170196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:11:09.170223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:11:09.170380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:11:09.170404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:11:09.170504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:11:09.170542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:11:09.170600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:11:09.170622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:11:09.170769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:11:09.170803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:11:09.170965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:11:09.170991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:11:09.171027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:11:09.171063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:11:09.171087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:11:09.171110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:11:09.171252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:11:09.171275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:11:09.171400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:11:09.171434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039333;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:11:09.171447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:11:09.171480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:11:09.171621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:11:09.171646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:11:09.171743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:11:09.171786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224039405;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; >> KqpIndexes::SecondaryIndexUpsert1DeleteUpdate [GOOD] >> KqpIndexes::SecondaryIndexUpsert2Update ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2025-03-28T09:10:43.694868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785312968433719:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:43.695255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f21/r3tmp/tmpf6br5H/pdisk_1.dat 2025-03-28T09:10:44.391779Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:44.417370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:44.417455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:44.421549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14283 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:10:44.829879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:44.856951Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:44.876666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:45.062279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:45.162652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:48.293009Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785336444191023:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:48.302892Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f21/r3tmp/tmpm9vapU/pdisk_1.dat 2025-03-28T09:10:48.578820Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:48.601469Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:48.601546Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:48.603217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15773 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:10:48.842301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:48.848928Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:48.857094Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:10:48.861275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:48.955376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:49.030510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:52.722388Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785352958080482:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:52.722440Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f21/r3tmp/tmp3T2ckS/pdisk_1.dat 2025-03-28T09:10:53.030642Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:53.045432Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:53.045517Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:53.046750Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19094 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:10:53.293695Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:53.299717Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:53.308761Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:10:53.314140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:53.490988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:53.550439Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:57.115946Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486785373140421805:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:57.116645Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f21/r3tmp/tmptsJNWW/pdisk_1.dat 2025-03-28T09:10:57.301563Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:57.314160Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:57.314237Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:57.317529Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8260 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 ... ion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:11:13.316552Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:13.375219Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:11:13.384651Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:13.541396Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:13.624410Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:11:18.021936Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486785461425330448:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:18.022011Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f21/r3tmp/tmpc3jmaa/pdisk_1.dat 2025-03-28T09:11:18.380527Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:18.404566Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:18.404675Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:18.407851Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12285 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:11:18.740343Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:18.755873Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:18.777464Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:11:18.784217Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:18.909562Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:19.002663Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:23.917420Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486785484601659951:2152];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f21/r3tmp/tmpXytBMo/pdisk_1.dat 2025-03-28T09:11:24.097235Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:11:24.293710Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:24.361115Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:24.361229Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:24.362905Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30896 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:11:24.937381Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:24.955433Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:24.984931Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:11:24.999181Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:25.261772Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:25.374198Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:30.465349Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486785514257203021:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:30.465430Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f21/r3tmp/tmpfBZcgl/pdisk_1.dat 2025-03-28T09:11:30.821653Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:30.821757Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:30.828433Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:30.835072Z node 10 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:14536 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:11:31.356990Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:31.367281Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:31.389602Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:11:31.395985Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:31.487115Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:31.579189Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> KqpJoinOrder::GeneralPrioritiesBug3 >> KqpIndexes::UpsertMultipleUniqIndexes [GOOD] >> KqpIndexes::UpsertNoIndexColumns >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH11 [GOOD] Test command err: Trying to start YDB, gRPC: 63916, MsgBus: 1851 2025-03-28T09:09:28.553631Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784990631752625:2252];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:28.554007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001865/r3tmp/tmpOQ4823/pdisk_1.dat 2025-03-28T09:09:29.027439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:29.027545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:29.035903Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:29.055446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63916, node 1 2025-03-28T09:09:29.187256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:29.187275Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:29.187282Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:29.187411Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1851 TClient is connected to server localhost:1851 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:29.954074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:29.975576Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:33.545513Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784990631752625:2252];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:33.545574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:33.769588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785012106589580:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:33.769721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:33.770181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785012106589592:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:33.779021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:33.802960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785012106589594:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:33.918402Z node 1 :TX_PROXY ERROR: Actor# [1:7486785012106589646:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:34.261330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:34.577830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:34.578090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:34.578358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:34.578541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:34.578656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:34.581415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:34.581467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:34.581672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:34.581776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:34.581872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:34.581984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:34.582083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:34.582184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:34.582285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:34.582392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:34.582524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:34.582649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785016401557168:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:34.583324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:34.583451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:34.583557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:34.583656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:34.583746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:34.583835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:34.583942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486785016401557166:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:34.681832Z node 1 :TX_C ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.430401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.430452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.437197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.437546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.443383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.444678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.448736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.450989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.455201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.460851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.463636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.470476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.473172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.477000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.480440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.483633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.486668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.492405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.494552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.498845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.502936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.504762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.508946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.510474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.515237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.516515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.521427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.522240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.532089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.534906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.539459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.545821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.556799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.560948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.567811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.569384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.574447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.577287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.581311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.586791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.589599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.594257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.596074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.602186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.602936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:15.810383Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe091ek97m2yhtjhga2agdk", SessionId: ydb://session/3?node_id=1&id=NjI5MDUxMzQtNjM0OGMyOTAtYzcyYjViZjUtNjRlM2I5OGM=, Slow query, duration: 37.006798s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:11:16.121711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:16.122130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:16.122791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486785364293957035:9969];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-28T09:11:16.123179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpUniqueIndex::UpdateOnFkAlreadyExist >> KqpIndexes::UniqAndNoUniqSecondaryIndex [GOOD] >> KqpIndexes::Uint8Index >> KqpIndexes::SecondaryIndexOrderBy [GOOD] >> KqpIndexes::SecondaryIndexOrderBy2 >> KqpMultishardIndex::DataColumnUpsertMixedSemantic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2025-03-28T09:07:35.857499Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:07:35.864999Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:07:35.865539Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:07:35.865785Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:07:35.919367Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:07:36.007432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:07:36.007491Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:36.017602Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:07:36.018895Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:07:36.020577Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:07:36.020652Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:07:36.020702Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:07:36.021135Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:07:36.021215Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:07:36.021283Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:07:36.103411Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:07:36.137104Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:07:36.137307Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:07:36.137410Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:07:36.137445Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:07:36.137483Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:07:36.137530Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:36.137772Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:36.137837Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:36.138080Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:07:36.138168Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:07:36.138224Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:36.138277Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:07:36.138312Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:07:36.138342Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:07:36.138373Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:07:36.138408Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:07:36.138450Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:07:36.138551Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:36.138600Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:36.138659Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:07:36.141206Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:07:36.141275Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:07:36.141369Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:07:36.141509Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:07:36.141550Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:07:36.141596Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:07:36.141659Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:36.141713Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:07:36.141745Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:07:36.141774Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:36.142056Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:07:36.142094Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:07:36.142125Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:07:36.142156Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:36.142193Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:07:36.142230Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:07:36.142265Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:07:36.142304Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:36.142334Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:07:36.159721Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:07:36.159793Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:07:36.159844Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:07:36.159884Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:07:36.159969Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:07:36.160627Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:36.160676Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:07:36.160716Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:07:36.160794Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:07:36.160823Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:07:36.160940Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:07:36.160999Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:36.161044Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:07:36.161074Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:07:36.169115Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:07:36.169207Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:07:36.169458Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:36.169500Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:07:36.169556Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:07:36.169592Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:07:36.169628Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:07:36.169667Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:07:36.169698Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:07:36.169741Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:36.169790Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:07:36.169835Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:07:36.169865Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:07:36.170049Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:07:36.170082Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:07:36.170105Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:07:36.170126Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:07:36.170150Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:07:36.170213Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:07:36.170251Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:07:36.170286Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:07:36.170317Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:07:36.170388Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:07:36.170429Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:07:36.170459Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:07:36.170494Z node 1 :TX_DATA ... c latency: 58 ms, propose latency: 58 ms, status: COMPLETE 2025-03-28T09:11:24.231563Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is DelayComplete 2025-03-28T09:11:24.231597Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2025-03-28T09:11:24.231627Z node 3 :TX_DATASHARD TRACE: Add [0:10] at 9437184 to execution unit CompletedOperations 2025-03-28T09:11:24.231660Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2025-03-28T09:11:24.231711Z node 3 :TX_DATASHARD TRACE: Execution status for [0:10] at 9437184 is Executed 2025-03-28T09:11:24.231738Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2025-03-28T09:11:24.231765Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:10] at 9437184 has finished 2025-03-28T09:11:24.261022Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:11:24.261103Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:10] at 9437184 on unit FinishPropose 2025-03-28T09:11:24.261166Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:11:30.153870Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 12884904022 } 2025-03-28T09:11:30.153964Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-28T09:11:30.154393Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [3:647:2622], Recipient [3:232:2225]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:30.154437Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:30.154479Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [3:646:2621], serverId# [3:647:2622], sessionId# [0:0:0] 2025-03-28T09:11:30.154735Z node 3 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [3:99:2134], Recipient [3:232:2225]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 99 RawX2: 12884904022 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006\ 2025-03-28T09:11:30.154781Z node 3 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:11:30.154868Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:11:30.155649Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2025-03-28T09:11:30.184856Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-03-28T09:11:30.184959Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2025-03-28T09:11:30.185012Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:11:30.185051Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:11:30.185113Z node 3 :TX_DATASHARD TRACE: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2025-03-28T09:11:30.185185Z node 3 :TX_DATASHARD TRACE: Activated operation [0:11] at 9437184 2025-03-28T09:11:30.185240Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-03-28T09:11:30.185269Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:11:30.185295Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:11:30.185323Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-28T09:11:30.190439Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-28T09:11:30.190665Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-28T09:11:30.190741Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-28T09:11:30.302134Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:11:30.302215Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-28T09:11:30.303069Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-28T09:11:30.306662Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-28T09:11:30.306956Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-28T09:11:30.307007Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-28T09:11:30.477747Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:11:30.477835Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-28T09:11:30.478675Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-28T09:11:30.513462Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2025-03-28T09:11:30.513765Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-28T09:11:30.513840Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-28T09:11:30.514523Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:11:30.514574Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-28T09:11:30.515371Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-28T09:11:30.844851Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-28T09:11:30.845703Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-28T09:11:30.845763Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-28T09:11:31.197168Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:11:31.197239Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-28T09:11:31.197874Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-28T09:11:31.526599Z node 3 :TX_DATASHARD TRACE: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2025-03-28T09:11:31.527715Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-28T09:11:31.527789Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-28T09:11:31.757058Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:11:31.757159Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-28T09:11:31.758008Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-28T09:11:31.762671Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-28T09:11:31.771005Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-28T09:11:31.771072Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-28T09:11:31.846279Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:11:31.846354Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-28T09:11:31.851262Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-28T09:11:31.853142Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-28T09:11:31.853327Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-28T09:11:31.853371Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-28T09:11:31.978464Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:11:31.978546Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-28T09:11:31.979403Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-28T09:11:31.990621Z node 3 :TX_DATASHARD TRACE: Tablet 9437184 is not ready for [0:11] execution 2025-03-28T09:11:31.995493Z node 3 :TX_DATASHARD DEBUG: tx 11 released its data 2025-03-28T09:11:31.995590Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Restart 2025-03-28T09:11:32.472355Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:11:32.472441Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2025-03-28T09:11:32.473402Z node 3 :TX_DATASHARD DEBUG: tx 11 at 9437184 restored its data 2025-03-28T09:11:33.923058Z node 3 :TX_DATASHARD TRACE: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2025-03-28T09:11:33.923185Z node 3 :TX_DATASHARD TRACE: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:11:33.923264Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-03-28T09:11:33.923308Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:11:33.923349Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit FinishPropose 2025-03-28T09:11:33.923388Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit FinishPropose 2025-03-28T09:11:33.923445Z node 3 :TX_DATASHARD TRACE: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 62 ms, propose latency: 62 ms, status: COMPLETE 2025-03-28T09:11:33.942962Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is DelayComplete 2025-03-28T09:11:33.943049Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2025-03-28T09:11:33.943086Z node 3 :TX_DATASHARD TRACE: Add [0:11] at 9437184 to execution unit CompletedOperations 2025-03-28T09:11:33.943124Z node 3 :TX_DATASHARD TRACE: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2025-03-28T09:11:33.943188Z node 3 :TX_DATASHARD TRACE: Execution status for [0:11] at 9437184 is Executed 2025-03-28T09:11:33.943219Z node 3 :TX_DATASHARD TRACE: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2025-03-28T09:11:33.943254Z node 3 :TX_DATASHARD TRACE: Execution plan for [0:11] at 9437184 has finished 2025-03-28T09:11:33.978360Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:11:33.978450Z node 3 :TX_DATASHARD TRACE: Complete execution for [0:11] at 9437184 on unit FinishPropose 2025-03-28T09:11:33.978516Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> KqpJoin::JoinAggregate [GOOD] >> KqpMultishardIndex::CheckPushTopSort [GOOD] >> KqpIndexes::UpdateDeletePlan-UseSink [GOOD] >> KqpIndexes::UniqIndexComplexPkComplexFkOverlap [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinAggregate [GOOD] Test command err: Trying to start YDB, gRPC: 17853, MsgBus: 9494 2025-03-28T09:11:32.512223Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785523985030107:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:32.512271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001718/r3tmp/tmpkR2tEs/pdisk_1.dat 2025-03-28T09:11:33.235976Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:33.249866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:33.249968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:33.261964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17853, node 1 2025-03-28T09:11:33.656749Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:33.656778Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:33.656786Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:33.656899Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9494 TClient is connected to server localhost:9494 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:34.814090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:34.858077Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:34.891220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:35.149338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:35.407019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:35.511822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:37.518850Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785523985030107:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:37.518951Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:38.265770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785549754835577:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:38.265921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:38.577370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:38.633792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:38.679074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:38.745566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:38.824859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:38.887845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:38.998863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785549754836101:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:38.998963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:39.003240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785549754836106:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:39.011715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:39.033694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785549754836108:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:39.115710Z node 1 :TX_PROXY ERROR: Actor# [1:7486785554049803459:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:41.044345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:41.152320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:11:41.228616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] >> KqpIndexes::ForbidViewModification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateDeletePlan-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1086, MsgBus: 11855 2025-03-28T09:11:23.908365Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785484107059782:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.915608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019c2/r3tmp/tmpBNzDQy/pdisk_1.dat 2025-03-28T09:11:24.992446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:24.992562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.015178Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.026843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.027087Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.286456Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.133276s 2025-03-28T09:11:25.286551Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.133399s TServer::EnableGrpc on GrpcPort 1086, node 1 2025-03-28T09:11:25.908612Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.908639Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.908646Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.908804Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11855 TClient is connected to server localhost:11855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.438940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.512477Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:27.615186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.235994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.454528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.577685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.906791Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785484107059782:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.906850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.026991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785509876865268:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.027151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.683657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.763534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.818792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.894442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.964661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.014804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.072097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518466800452:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.072197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.072713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518466800457:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.077856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.099457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785518466800459:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.180967Z node 1 :TX_PROXY ERROR: Actor# [1:7486785518466800514:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.986534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 26961, MsgBus: 8320 2025-03-28T09:11:37.845482Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785547178265253:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:37.845538Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019c2/r3tmp/tmp8FCpdm/pdisk_1.dat 2025-03-28T09:11:37.991580Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:38.023403Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:38.023543Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:38.025066Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26961, node 2 2025-03-28T09:11:38.103206Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:38.103233Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:38.103242Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:38.103385Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8320 TClient is connected to server localhost:8320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:38.748295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:38.793402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:38.999974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:39.288837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:11:39.395089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.851053Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785547178265253:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:42.851115Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:43.051500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785572948070795:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.051582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.112739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.194233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.242219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.283007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.325995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.383722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.492274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785572948071312:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.492367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.492610Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785572948071317:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.496714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:43.522914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785572948071319:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:43.629646Z node 2 :TX_PROXY ERROR: Actor# [2:7486785572948071375:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:45.172893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::CheckPushTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 65441, MsgBus: 3410 2025-03-28T09:11:23.910843Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785485964310865:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.915257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019ca/r3tmp/tmp5PUhxi/pdisk_1.dat 2025-03-28T09:11:24.993141Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.030856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.085617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.085734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.091631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.293945Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.132530s 2025-03-28T09:11:25.294054Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.132627s TServer::EnableGrpc on GrpcPort 65441, node 1 2025-03-28T09:11:25.887310Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.887347Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.887356Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.887454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3410 TClient is connected to server localhost:3410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.440892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.616506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.201325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.418720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:28.516219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:11:28.902877Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785485964310865:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.911510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.027853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785511734116371:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.028110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.692196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.775382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.822933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.898929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.946752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.034440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.108492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785520324051565:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.108575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.108841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785520324051571:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.113170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.126821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785520324051573:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.189187Z node 1 :TX_PROXY ERROR: Actor# [1:7486785520324051625:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.819159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:36.536759Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:36.578588Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 20507, MsgBus: 4875 2025-03-28T09:11:37.989224Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785544718029259:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:37.989286Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019ca/r3tmp/tmpf177wv/pdisk_1.dat 2025-03-28T09:11:38.163353Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:38.291794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:38.291891Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:38.300461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20507, node 2 2025-03-28T09:11:38.495326Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:38.495353Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:38.495362Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:38.495490Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4875 TClient is connected to server localhost:4875 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:39.154080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:39.171599Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:11:39.186869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:39.297882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:39.591012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:39.692011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.779889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785566192867490:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:42.779968Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:42.834422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:42.912190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:42.956628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:42.991748Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785544718029259:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:42.991819Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:42.998137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.040067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.121954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.188682Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785570487835308:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.188763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.189069Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785570487835313:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.193210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:43.221822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785570487835315:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:11:43.312132Z node 2 :TX_PROXY ERROR: Actor# [2:7486785570487835372:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:44.394277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::SecondaryIndexUpsert2Update [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |81.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> KqpUniqueIndex::ReplaceFkPartialColumnSet [GOOD] >> KqpUniqueIndex::UpdateFkAlreadyExist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 27090, MsgBus: 15713 2025-03-28T09:11:23.908261Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785484823240023:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.915372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019b5/r3tmp/tmpQoCUfd/pdisk_1.dat 2025-03-28T09:11:25.000054Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.100284Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.235025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.235137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.267982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27090, node 1 2025-03-28T09:11:25.919459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.919482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.919492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.919612Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15713 TClient is connected to server localhost:15713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.688455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.709790Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:27.722773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.233483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.579751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.754223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:29.094137Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785484823240023:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:29.106874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:30.244679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785514888012866:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.244810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.687559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.764791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.833547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.929419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.989545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.055083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.116094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785519182980684:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.116149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.116349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785519182980689:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.120425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.134266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785519182980691:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.215376Z node 1 :TX_PROXY ERROR: Actor# [1:7486785519182980746:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.742673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 10049, MsgBus: 13604 2025-03-28T09:11:38.853357Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785548506194845:2163];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019b5/r3tmp/tmptSWLbt/pdisk_1.dat 2025-03-28T09:11:38.947174Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:11:39.119947Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:39.135618Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:39.135708Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:39.139843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10049, node 2 2025-03-28T09:11:39.275208Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:39.275233Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:39.275241Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:39.275351Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13604 TClient is connected to server localhost:13604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:40.008248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.020520Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:11:40.040973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.181757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.427804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.551446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.972438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785565686065677:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:42.972618Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.020170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.081162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.160797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.203827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.243620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.314515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:43.405629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785569981033492:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.405736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.406724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785569981033498:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:43.410487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:43.420804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785569981033501:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:11:43.489324Z node 2 :TX_PROXY ERROR: Actor# [2:7486785569981033554:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:43.847858Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785548506194845:2163];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:43.856848Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:44.647693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH2 [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex-UseSink [GOOD] >> KqpIndexes::DuplicateUpsertInterleave >> KqpIndexes::NullInIndexTable [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInComplexFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 9189, MsgBus: 21944 2025-03-28T09:11:23.907838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785483718589047:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.914417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019d0/r3tmp/tmppAU2Nl/pdisk_1.dat 2025-03-28T09:11:24.903037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:24.911125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.016754Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.018444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.019360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.289095Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.134232s 2025-03-28T09:11:25.289176Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.134332s TServer::EnableGrpc on GrpcPort 9189, node 1 2025-03-28T09:11:25.891274Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.891298Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.891307Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.891409Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21944 TClient is connected to server localhost:21944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:28.029614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.049730Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:28.066862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.410322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.747618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.901836Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785483718589047:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.902045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:28.975067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:29.904472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785509488394593:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.904611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.679766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.762637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.881414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.980456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.055063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.128994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.222948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518078329729:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.223053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.223250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518078329734:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.228305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.244690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785518078329736:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.308535Z node 1 :TX_PROXY ERROR: Actor# [1:7486785518078329791:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.811053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 16029, MsgBus: 15277 2025-03-28T09:11:40.004559Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785555260114973:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:40.004594Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019d0/r3tmp/tmpWPeMdW/pdisk_1.dat 2025-03-28T09:11:40.182934Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:40.215855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:40.215943Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:40.220029Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16029, node 2 2025-03-28T09:11:40.335279Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:40.335308Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:40.335315Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:40.335437Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15277 TClient is connected to server localhost:15277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:40.944601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.951421Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:11:40.963820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:41.062184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:41.257428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:11:41.346088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:44.310477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785576734953230:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.310562Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.385002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.435634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.486455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.565736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.653467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.758671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.911191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785576734953757:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.911295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.911929Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785576734953762:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.915132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:44.937679Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:11:44.938843Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785576734953765:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:11:45.010808Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785555260114973:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:45.010991Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:45.015098Z node 2 :TX_PROXY ERROR: Actor# [2:7486785581029921116:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:46.514045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithNullKeysComplex [GOOD] Test command err: Trying to start YDB, gRPC: 19096, MsgBus: 6461 2025-03-28T09:11:23.911346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785484011114741:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.916539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019ae/r3tmp/tmpY6xTHP/pdisk_1.dat 2025-03-28T09:11:25.001538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.001669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.015986Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.017107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.018977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.289190Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.156343s 2025-03-28T09:11:25.289256Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.156427s TServer::EnableGrpc on GrpcPort 19096, node 1 2025-03-28T09:11:25.891372Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.891403Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.891411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.891536Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6461 TClient is connected to server localhost:6461 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.770042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.852187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.271660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.449311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.534478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.899522Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785484011114741:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.899576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.242339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785509780920259:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.242513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.689992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.755669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.858228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.925769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.015130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.098526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.185858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518370855428:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.185923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.186074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518370855433:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.189296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.200348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785518370855435:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.262446Z node 1 :TX_PROXY ERROR: Actor# [1:7486785518370855491:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.817120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:35.205422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:11:35.356400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:11:36.547265Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:37.576610Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 62386, MsgBus: 64688 2025-03-28T09:11:38.960197Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785548540384838:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:38.961068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019ae/r3tmp/tmpEcqDv9/pdisk_1.dat 2025-03-28T09:11:39.269525Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:39.329035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:39.329119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:39.330114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62386, node 2 2025-03-28T09:11:39.495824Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:39.495845Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:39.495858Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:39.495972Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64688 TClient is connected to server localhost:64688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:40.103866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.134366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.276338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.743645Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.895720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:43.966816Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785548540384838:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:43.966888Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:44.441368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785574310190380:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.441448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.490430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.535370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.601453Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.667228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.720025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.781358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.940144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785574310190893:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.940235Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.940578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785574310190898:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.945521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:44.980106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785574310190900:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:11:45.061168Z node 2 :TX_PROXY ERROR: Actor# [2:7486785578605158256:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:46.207006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.392195Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:47.442195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:11:47.532753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.898796Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:49.061453Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:49.090462Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::Uint8Index [GOOD] >> KqpUniqueIndex::UpdateOnFkAlreadyExist [GOOD] >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 >> TLocksTest::CK_GoodLock >> KqpIndexes::UpsertNoIndexColumns [GOOD] |81.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.9%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::Uint8Index [GOOD] Test command err: Trying to start YDB, gRPC: 15987, MsgBus: 3707 2025-03-28T09:11:23.907935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785486162418795:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.914839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019bb/r3tmp/tmpztodIm/pdisk_1.dat 2025-03-28T09:11:25.011554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.012742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.012874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.014875Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.027980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.288139Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.141933s 2025-03-28T09:11:25.288215Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.142025s TServer::EnableGrpc on GrpcPort 15987, node 1 2025-03-28T09:11:25.891207Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.891238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.891246Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.891352Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3707 TClient is connected to server localhost:3707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.967708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.029487Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:28.038629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:28.373026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:11:28.723391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.907065Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785486162418795:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.907137Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:28.941071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:29.808516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785511932224309:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.808622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.685128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.780629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.869919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.907600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.972322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.016739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.114161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785520522159462:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.114241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.114460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785520522159467:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.119057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.133914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785520522159469:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.220687Z node 1 :TX_PROXY ERROR: Actor# [1:7486785520522159524:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.817830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:38.672262Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0atb0bpm3tspy20zh0v91, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNmZTFkYTktNDRhZmNhMDgtN2M1OTFjYTItNGFlMTQ0NTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:38.709831Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNmZTFkYTktNDRhZmNhMDgtN2M1OTFjYTItNGFlMTQ0NTA=, ActorId: [1:7486785533407061713:2503], ActorState: ExecuteState, TraceId: 01jqe0atb0bpm3tspy20zh0v91, Create QueryResponse for error on request, msg: 2025-03-28T09:11:39.579445Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785554881898876:2621], TxId: 281474976710679, task: 1. Ctx: { TraceId : 01jqe0avzc717tvjzkqwxexy6v. SessionId : ydb://session/3?node_id=1&id=MWNmZTFkYTktNDRhZmNhMDgtN2M1OTFjYTItNGFlMTQ0NTA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:11:39.579899Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785554881898877:2622], TxId: 281474976710679, task: 2. Ctx: { TraceId : 01jqe0avzc717tvjzkqwxexy6v. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWNmZTFkYTktNDRhZmNhMDgtN2M1OTFjYTItNGFlMTQ0NTA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486785554881898873:2503], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:11:39.580233Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNmZTFkYTktNDRhZmNhMDgtN2M1OTFjYTItNGFlMTQ0NTA=, ActorId: [1:7486785533407061713:2503], ActorState: ExecuteState, TraceId: 01jqe0avzc717tvjzkqwxexy6v, Create QueryResponse for error on request, msg: 2025-03-28T09:11:39.991773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:39.991818Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:40.734083Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0awtkey09bctwxqhvpjw4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNmZTFkYTktNDRhZmNhMDgtN2M1OTFjYTItNGFlMTQ0NTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:40.734602Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWNmZTFkYTktNDRhZmNhMDgtN2M1OTFjYTItNGFlMTQ0NTA=, ActorId: [1:7486785533407061713:2503], ActorState: ExecuteState, TraceId: 01jqe0awtkey09bctwxqhvpjw4, Create QueryResponse for error on request, msg: 2025-03-28T09:11:40.770244Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:40.814774Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:40.886864Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:41.951329Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:41.984865Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 19325, MsgBus: 30663 2025-03-28T09:11:43.139743Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785571509309272:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:43.139809Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019bb/r3tmp/tmpMYVTrx/pdisk_1.dat 2025-03-28T09:11:43.495128Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:43.496735Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:43.496812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:43.499195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19325, node 2 2025-03-28T09:11:43.671248Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:43.671267Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:43.671275Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:43.671386Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30663 TClient is connected to server localhost:30663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:44.435961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:44.443923Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:44.461548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:44.569581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:44.797597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:44.906544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:48.050854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785592984147520:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:48.050967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:48.083699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:48.143081Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785571509309272:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:48.143140Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:48.161739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:48.219481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:48.304981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:48.394497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:48.494336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:48.573624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785592984148041:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:48.573716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:48.573907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785592984148046:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:48.577675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:48.597854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785592984148048:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:48.660745Z node 2 :TX_PROXY ERROR: Actor# [2:7486785592984148102:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:50.155911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:50.481403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:11:50.529872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:11:50.600341Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpUniqueIndex::UpdateOnFkSelectResultSameValue [GOOD] >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertNoIndexColumns [GOOD] Test command err: Trying to start YDB, gRPC: 11761, MsgBus: 16992 2025-03-28T09:11:23.908010Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785484606052794:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.914596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019b4/r3tmp/tmpmyDDsK/pdisk_1.dat 2025-03-28T09:11:25.019134Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.019542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.019635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.021658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.056903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11761, node 1 2025-03-28T09:11:25.291058Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.150355s 2025-03-28T09:11:25.291148Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.150455s 2025-03-28T09:11:25.920051Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.920074Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.920080Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.920207Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16992 TClient is connected to server localhost:16992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.440454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.613674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.273434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.449781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.530156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.899614Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785484606052794:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.899670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.098038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785510375858294:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.098160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.691520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.764301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.836099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.886975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.950812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.030226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.162230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518965793472:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.162324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.162638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518965793477:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.167373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.177414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785518965793479:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.248745Z node 1 :TX_PROXY ERROR: Actor# [1:7486785518965793534:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.826094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:37.665029Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:37.698026Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:37.818844Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:39.140612Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0av4317vkbggx5gd7ybdn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJkNTUyOC04YzJmNjQ5ZS0yZmM1ZDZkYy01NmJkMDM1YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:39.231023Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTJkNTUyOC04YzJmNjQ5ZS0yZmM1ZDZkYy01NmJkMDM1YQ==, ActorId: [1:7486785531850695734:2504], ActorState: ExecuteState, TraceId: 01jqe0av4317vkbggx5gd7ybdn, Create QueryResponse for error on request, msg: 2025-03-28T09:11:39.291837Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:39.374171Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:39.982739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:39.982780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:40.749180Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:40.801896Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 19124, MsgBus: 21457 2025-03-28T09:11:41.620286Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785563601888286:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:41.624052Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019b4/r3tmp/tmpvPfmfR/pdisk_1.dat 2025-03-28T09:11:41.777235Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:41.804399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:41.804487Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:41.806114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19124, node 2 2025-03-28T09:11:41.890965Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:41.890989Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:41.890996Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:41.891119Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21457 TClient is connected to server localhost:21457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:11:42.397368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:11:42.427742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.505147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.704944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.778355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:45.195774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785580781759241:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.195849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.254234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.335903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.403609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.448621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.521633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.601759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.734160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785580781759765:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.734251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.734767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785580781759770:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.739102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:45.756358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785580781759772:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:11:45.833664Z node 2 :TX_PROXY ERROR: Actor# [2:7486785580781759826:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:46.622877Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785563601888286:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:46.622951Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:47.128605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.215685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.320524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH2 [GOOD] Test command err: Trying to start YDB, gRPC: 24705, MsgBus: 8418 2025-03-28T09:09:29.359792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784997021278130:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:29.360659Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001866/r3tmp/tmpQpFikk/pdisk_1.dat 2025-03-28T09:09:30.001396Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:30.034271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:30.034341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:30.037417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24705, node 1 2025-03-28T09:09:30.311353Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:30.311382Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:30.311392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:30.311586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8418 TClient is connected to server localhost:8418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:31.351036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:31.442878Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:33.884828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785014201147907:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:33.884926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785014201147919:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:33.885003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:33.889913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:33.905590Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:09:33.905894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785014201147921:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:34.014935Z node 1 :TX_PROXY ERROR: Actor# [1:7486785018496115268:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:34.360494Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486784997021278130:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:34.360573Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:09:34.903260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:35.925660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:35.925903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:35.926217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:35.926347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:35.926456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:35.926572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:35.926704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:35.926829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:35.926949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:35.931089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:35.931267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:35.931392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785022791082818:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:35.981707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:35.981778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:35.982020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:35.982128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:35.982239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:35.982361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:35.982465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:35.982579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:35.999572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:35.999904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:36.000030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:36.000130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785022791082809:2353];tablet_id=72075186224037888 ... Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.171670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.171670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.177000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.177030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.182389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.182421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.187769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.187768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.193363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.197159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.203609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.206997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.208840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.214007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.226952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.226953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.232376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.240056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.240612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.245522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.251168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.251842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.258362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.263480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.269287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.270963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.281870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.288311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.293860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.295652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.305705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.306869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.312023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.317283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.317293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.324930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.330194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:19.650536Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0935r4sah5jajnrf2g35f", SessionId: ydb://session/3?node_id=1&id=MjU2YzRjMjAtYzQ3MDVlYjUtNzBiMTNiZjEtYTc4ZjM4YmY=, Slow query, duration: 39.081216s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:11:19.910020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:19.910068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:19.910257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486785362093546743:9707];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-28T09:11:19.910503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:37.454186Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0agv50x8r3sgwsbmhkya4", SessionId: ydb://session/3?node_id=1&id=MjU2YzRjMjAtYzQ3MDVlYjUtNzBiMTNiZjEtYTc4ZjM4YmY=, Slow query, duration: 10.072733s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "-- TPC-H/TPC-R Minimum Cost Supplier Query (Q2)\n-- using 1680793381 as a seed to the RNG\n\n$r = (select r_regionkey from\n `/Root/region`\nwhere r_name='AMERICA');\n\n$j1 = (select n_name,n_nationkey\n from `/Root/nation` as n\n join $r as r on\n n.n_regionkey = r.r_regionkey);\n\n$j2 = (select s_acctbal,s_name,s_address,s_phone,s_comment,n_name,s_suppkey\n from `/Root/supplier` as s\n join $j1 as j on\n s.s_nationkey = j.n_nationkey\n);\n\n$j3 = (select ps_partkey,ps_supplycost,s_acctbal,s_name,s_address,s_phone,s_comment,n_name\n from `/Root/partsupp` as ps\n join $j2 as j on\n ps.ps_suppkey = j.s_suppkey\n);\n\n$min_ps_supplycost = (select min(ps_supplycost) as min_ps_supplycost,ps_partkey\n from $j3\n group by ps_partkey\n);\n\n$p = (select p_partkey,p_mfgr\n from `/Root/part`\n where\n p_size = 10\n and p_type like '%COPPER'\n);\n\n$j4 = (select s_acctbal,\n s_name,\n n_name,\n p_partkey,\n p_mfgr,\n s_address,\n s_phone,\n s_comment\n from $p as p\n join $j3 as j on p.p_partkey = j.ps_partkey\n join $min_ps_supplycost as m on p.p_partkey = m.ps_partkey\n where min_ps_supplycost=ps_supplycost\n);\n\nselect\n s_acctbal,\n s_name,\n n_name,\n p_partkey,\n p_mfgr,\n s_address,\n s_phone,\n s_comment\nfrom $j4\norder by\n s_acctbal desc,\n n_name,\n s_name,\n p_partkey\nlimit 100;\n", parameters: 0b >> YdbOlapStore::LogPagingAfter [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns+UseSink [GOOD] >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink >> SystemView::AuthPermissions [GOOD] >> SystemView::AuthPermissions_Access >> KqpIndexes::SecondaryIndexOrderBy2 [GOOD] >> KqpIndexes::SecondaryIndexReplace+UseSink >> BasicStatistics::NotFullStatisticsDatashard [GOOD] >> KqpJoin::FullOuterJoinSizeCheck >> KqpMultishardIndex::DataColumnUpsertMixedSemantic [GOOD] >> KqpMultishardIndex::DataColumnSelect >> KqpJoin::RightTableKeyPredicate >> TLocksTest::Range_BrokenLock2 >> KqpIndexes::ForbidViewModification [GOOD] >> KqpIndexes::IndexOr |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |82.0%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun >> KqpIndexes::DuplicateUpsertInterleave [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [GOOD] >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |82.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UniqAndNoUniqSecondaryIndexWithCover [GOOD] Test command err: Trying to start YDB, gRPC: 63306, MsgBus: 21355 2025-03-28T09:11:23.971656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785485607841120:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.971717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00199a/r3tmp/tmpaMQ3SO/pdisk_1.dat 2025-03-28T09:11:25.022344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.107036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.232298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.232415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.255447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63306, node 1 2025-03-28T09:11:25.887295Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.887323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.887330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.887436Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21355 TClient is connected to server localhost:21355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.436925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.612447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.237147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.448802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.539516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.972912Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785485607841120:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.972970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.037740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785511377646364:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.037947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.687818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.736708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.782209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.833982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.918088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.008661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.074361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785519967581556:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.074428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.074484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785519967581561:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.079210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.098621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785519967581563:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.180253Z node 1 :TX_PROXY ERROR: Actor# [1:7486785519967581619:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.817634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:37.175750Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:37.217443Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:38.779628Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:40.015985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:40.016017Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:40.047562Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0aw0z6q0h2bt49m1k8x02, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc0MTM0NmQtODAzMzM5NGEtODIzODY2ZDktZTNhZjgxODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:40.076257Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njc0MTM0NmQtODAzMzM5NGEtODIzODY2ZDktZTNhZjgxODI=, ActorId: [1:7486785532852483816:2504], ActorState: ExecuteState, TraceId: 01jqe0aw0z6q0h2bt49m1k8x02, Create QueryResponse for error on request, msg: 2025-03-28T09:11:43.036258Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0ayss512bv3ap9t2p0m5f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc0MTM0NmQtODAzMzM5NGEtODIzODY2ZDktZTNhZjgxODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:43.036559Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njc0MTM0NmQtODAzMzM5NGEtODIzODY2ZDktZTNhZjgxODI=, ActorId: [1:7486785532852483816:2504], ActorState: ExecuteState, TraceId: 01jqe0ayss512bv3ap9t2p0m5f, Create QueryResponse for error on request, msg: 2025-03-28T09:11:43.106660Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:43.139423Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:43.541153Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785571507190429:2695], TxId: 281474976710706, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=Njc0MTM0NmQtODAzMzM5NGEtODIzODY2ZDktZTNhZjgxODI=. TraceId : 01jqe0b09b0s4spcd0egnrcbjh. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:11:43.541991Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785571507190430:2696], TxId: 281474976710706, task: 2. Ctx: { TraceId : 01jqe0b09b0s4spcd0egnrcbjh. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=Njc0MTM0NmQtODAzMzM5NGEtODIzODY2ZDktZTNhZjgxODI=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486785571507190426:2504], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:11:43.542369Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njc0MTM0NmQtODAzMzM5NGEtODIzODY2ZDktZTNhZjgxODI=, ActorId: [1:7486785532852483816:2504], ActorState: ExecuteState, TraceId: 01jqe0b09b0s4spcd0egnrcbjh, Create QueryResponse for error on request, msg: 2025-03-28T09:11:44.658915Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0b0p46pa4bfb9c8kdndce, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc0MTM0NmQtODAzMzM5NGEtODIzODY2ZDktZTNhZjgxODI=, CurrentExecutionId: , CustomerSupplied ... esponse for error on request, msg: 2025-03-28T09:11:46.416353Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0b1s33n61q9tm6q6rnrak, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc0MTM0NmQtODAzMzM5NGEtODIzODY2ZDktZTNhZjgxODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:46.416604Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njc0MTM0NmQtODAzMzM5NGEtODIzODY2ZDktZTNhZjgxODI=, ActorId: [1:7486785532852483816:2504], ActorState: ExecuteState, TraceId: 01jqe0b1s33n61q9tm6q6rnrak, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 11648, MsgBus: 3133 2025-03-28T09:11:47.286064Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785586856909427:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:47.286124Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00199a/r3tmp/tmpDh96Qc/pdisk_1.dat 2025-03-28T09:11:47.442560Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:47.446592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:47.446675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:47.455607Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11648, node 2 2025-03-28T09:11:47.510948Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:47.510968Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:47.510976Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:47.511120Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3133 TClient is connected to server localhost:3133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:48.020108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:48.046422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:48.117695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:48.521535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:48.650387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:51.502354Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785604036780380:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:51.502443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:51.555932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.638942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.689565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.768269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.836366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.915343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.986045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785604036780898:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:51.986118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:51.986292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785604036780903:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:51.990096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:52.009400Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785604036780905:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:11:52.079919Z node 2 :TX_PROXY ERROR: Actor# [2:7486785608331748255:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:52.286785Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785586856909427:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:52.286855Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:53.255223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.720128Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bbcn7r032jw4shh7g7kx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZjZjcwY2UtM2E1MDNlMGUtZDczZjI5ZDYtZWQ5Mjc0MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:55.720391Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZjZjcwY2UtM2E1MDNlMGUtZDczZjI5ZDYtZWQ5Mjc0MDM=, ActorId: [2:7486785612626715813:2490], ActorState: ExecuteState, TraceId: 01jqe0bbcn7r032jw4shh7g7kx, Create QueryResponse for error on request, msg: 2025-03-28T09:11:56.252754Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486785625511618333:2578], TxId: 281474976715679, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=NGZjZjcwY2UtM2E1MDNlMGUtZDczZjI5ZDYtZWQ5Mjc0MDM=. CustomerSuppliedId : . TraceId : 01jqe0bcjn3xarht4v79xwsqh0. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:11:56.253412Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486785625511618334:2579], TxId: 281474976715679, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=NGZjZjcwY2UtM2E1MDNlMGUtZDczZjI5ZDYtZWQ5Mjc0MDM=. CustomerSuppliedId : . TraceId : 01jqe0bcjn3xarht4v79xwsqh0. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486785625511618330:2490], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:11:56.253709Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZjZjcwY2UtM2E1MDNlMGUtZDczZjI5ZDYtZWQ5Mjc0MDM=, ActorId: [2:7486785612626715813:2490], ActorState: ExecuteState, TraceId: 01jqe0bcjn3xarht4v79xwsqh0, Create QueryResponse for error on request, msg: 2025-03-28T09:11:57.287078Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bd3b94msdnvf1zyb1wvn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NGZjZjcwY2UtM2E1MDNlMGUtZDczZjI5ZDYtZWQ5Mjc0MDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:57.287355Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NGZjZjcwY2UtM2E1MDNlMGUtZDczZjI5ZDYtZWQ5Mjc0MDM=, ActorId: [2:7486785612626715813:2490], ActorState: ExecuteState, TraceId: 01jqe0bd3b94msdnvf1zyb1wvn, Create QueryResponse for error on request, msg: 2025-03-28T09:11:57.324387Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:57.350779Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:57.386834Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:58.482832Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:58.559382Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: 2025-03-28T09:04:47.931391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:04:47.931720Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:04:47.931792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001129/r3tmp/tmpZ732jl/pdisk_1.dat 2025-03-28T09:04:48.396769Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7003, node 1 2025-03-28T09:04:48.704689Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:04:48.704749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:04:48.704784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:04:48.705377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:04:48.708155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:04:48.803577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:48.803696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:48.828712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16309 2025-03-28T09:04:49.431456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:04:52.953585Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:04:52.986152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:52.986251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:53.041820Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:04:53.045597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:53.324745Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:53.325477Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:53.326164Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:53.326391Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:53.330802Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:53.331070Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:53.331180Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:53.331309Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:53.331493Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:04:53.532534Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:04:53.532651Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:04:53.552362Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:04:53.720327Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:04:53.787495Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:04:53.787619Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:04:53.817167Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:04:53.818478Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:04:53.818712Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:04:53.818790Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:04:53.818858Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:04:53.818921Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:04:53.818973Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:04:53.819033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:04:53.822413Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:04:53.861310Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:53.861455Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:04:53.869081Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2606] 2025-03-28T09:04:53.879223Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1924:2623] 2025-03-28T09:04:53.879596Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1924:2623], schemeshard id = 72075186224037897 2025-03-28T09:04:53.885223Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:04:53.908239Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:04:53.908300Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:04:53.908371Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:04:53.927383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:04:53.947922Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:04:53.948092Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:04:54.160322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:04:54.406185Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:04:54.465785Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:04:55.611996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:55.612138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:55.630103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:04:56.213016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2542:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:56.213165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:56.214535Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2547:3123]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:04:56.214763Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:04:56.214848Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2549:3125] 2025-03-28T09:04:56.214919Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2549:3125] 2025-03-28T09:04:56.215454Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2550:2996] 2025-03-28T09:04:56.215733Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2549:3125], server id = [2:2550:2996], tablet id = 72075186224037894, status = OK 2025-03-28T09:04:56.215913Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2550:2996], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:04:56.215978Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:04:56.216188Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:04:56.216255Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2547:3123], StatRequests.size() = 1 2025-03-28T09:04:56.234664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2554:3129], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:56.234817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:56.235245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2559:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:04:56.241441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:04:56.438636Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:04:56.438784Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:04:56.506481Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2549:3125], schemeshard count = 1 2025-03-28T09:04:56.858368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorA ... 8T09:10:35.835386Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:10:39.047260Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:10:39.047649Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:10:39.047938Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:10:39.058826Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:10:39.058889Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:10:42.024046Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:10:42.024111Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:10:43.431276Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:10:44.970549Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:10:44.970625Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:10:46.410055Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:10:46.410404Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:10:46.410655Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:10:47.915219Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:10:47.915276Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:10:50.703094Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:10:50.715943Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:10:50.716009Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:10:53.515358Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:10:53.515670Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:10:53.515897Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:10:53.527765Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:10:53.527827Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:10:56.395277Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:10:56.395337Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:10:57.799809Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:10:59.250378Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:10:59.250443Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:10:59.250472Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:10:59.250500Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:10:59.499334Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:10:59.499400Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:00.985002Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:11:00.985327Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:11:00.985549Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:11:02.595305Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:02.595368Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:05.329830Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:11:05.343248Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:05.343308Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:08.211345Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:11:08.211529Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:11:08.211859Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:11:08.222796Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:08.222859Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:11.099216Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:11.099279Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:12.432531Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:11:14.055867Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:14.055930Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:15.475213Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:11:15.475366Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:11:15.475622Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:11:16.989772Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:16.989874Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:19.715800Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:11:19.737512Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:19.737594Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:22.691330Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:11:22.691756Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:11:22.691979Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:11:22.707248Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:22.707308Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:25.535377Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:25.535463Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:26.734583Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:11:28.095351Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:28.095432Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:29.436386Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:11:29.436750Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:11:29.436993Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:11:30.911320Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:30.911379Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:33.511332Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:11:33.523602Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:33.523668Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:36.383630Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:11:36.383930Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:11:36.384141Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:11:36.395093Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:36.395153Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:39.071095Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:39.071157Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:40.387958Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:11:41.851528Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:11:41.851587Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:11:41.851621Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:11:41.851653Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:11:42.271297Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:42.271359Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:43.602528Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:11:43.602904Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:11:43.603161Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:11:44.975255Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:44.975314Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:45.004741Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-28T09:11:45.004816Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 208.000000s, at schemeshard: 72075186224037897 2025-03-28T09:11:45.005046Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-03-28T09:11:45.048243Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:11:47.657915Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:11:47.671298Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:11:47.671378Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:11:50.474575Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:11:50.475064Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 ... waiting for TEvPropagateStatistics (done) 2025-03-28T09:11:50.475516Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:12651:7160]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:11:50.476159Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:11:50.487821Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:11:50.487917Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [2:12651:7160], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationDifferentJoinPredicateKeyTypeCorrectness2 [GOOD] Test command err: Trying to start YDB, gRPC: 61191, MsgBus: 1936 2025-03-28T09:11:00.927744Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785385117214258:2217];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:00.929141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00171e/r3tmp/tmpJKJHK9/pdisk_1.dat 2025-03-28T09:11:01.376006Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:01.397129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:01.397368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:01.400409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61191, node 1 2025-03-28T09:11:01.527185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:01.527206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:01.527227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:01.527337Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1936 TClient is connected to server localhost:1936 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:02.223968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:04.142559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785402297083963:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:04.142566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785402297083954:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:04.142644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:04.149487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:11:04.195026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785402297083968:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:11:04.298628Z node 1 :TX_PROXY ERROR: Actor# [1:7486785402297084019:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:04.633712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:11:04.816423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:11:04.873255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:04.919227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:04.974960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.210363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.253338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.325032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.368413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.418620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.495967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.572251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.623840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.926481Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785385117214258:2217];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:05.926548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:06.415394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:11:06.459431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:11:06.496662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:11:06.526291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:11:06.596152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:11:06.642032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:11:06.679520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:11:06.734999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:11:06.802356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:11:06.853142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:11:06.956168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.037744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.102935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.175787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.249591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.314456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.349951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:11:07.410568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.471588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.482610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.483917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.499387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.503232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.509591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.517825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.520209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.526157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.526199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.536176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.537196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.546825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.548224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.560105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.560642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.570546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.573946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.583862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.591272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.593144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.603031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.606517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.609480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.618368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.624366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.624668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.632984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.641911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.645550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.664857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.667546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.686186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.697419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.720447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.724379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.726534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.734302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.747422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.753876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.760790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.768374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.769160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.775605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.778594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.950596Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe09ywrd7r7dtq3wfmmh3w4", SessionId: ydb://session/3?node_id=1&id=OTQ1YTEyZTctYjRjZDY1N2UtYzIxNmJhNTgtNWE3Y2VlYmU=, Slow query, duration: 39.997536s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:11:49.235743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:49.236070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:49.236320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486785544031031205:5962];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-28T09:11:49.236602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin+UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 2221, MsgBus: 11203 2025-03-28T09:11:23.907822Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785484924849399:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.914433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019d2/r3tmp/tmpdcgz9y/pdisk_1.dat 2025-03-28T09:11:24.993599Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.019720Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.019828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.028764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.057579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.288158Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.168749s 2025-03-28T09:11:25.288234Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.168851s TServer::EnableGrpc on GrpcPort 2221, node 1 2025-03-28T09:11:25.923121Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.923175Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.923187Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.923309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11203 TClient is connected to server localhost:11203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.638341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.676352Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:27.701182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.274482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.506821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.626557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.901927Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785484924849399:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.902760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.128412Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785510694654908:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.128502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.683714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.773477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.823200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.885065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.920795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.993380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.135067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785519284590094:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.135156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.135355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785519284590099:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.139970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.154511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785519284590101:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.249650Z node 1 :TX_PROXY ERROR: Actor# [1:7486785519284590156:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.820502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:37.162540Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:37.330384Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785545054394688:2565], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestTable/Index/indexImplTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:11:37.330902Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmI4M2RjNGItODdmOTIwMzUtMjQ2YzUyNTgtNDA0Nzc1NjA=, ActorId: [1:7486785532169492346:2504], ActorState: ExecuteState, TraceId: 01jqe0atf46cpd6gk0vyqs3haa, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:11:37.379331Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785545054394698:2568], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Required global index not found, index name: WrongView, code: 2003 2025-03-28T09:11:37.380875Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmI4M2RjNGItODdmOTIwMzUtMjQ2YzUyNTgtNDA0Nzc1NjA=, ActorId: [1:7486785532169492346:2504], ActorState: ExecuteState, TraceId: 01jqe0atkw6ct74n8xj0jgacym, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:11:38.023216Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:39.210831Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:39.618422Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:39.643752Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 6876, MsgBus: 25158 2025-03-28T09:11:40.628268Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785556481324832:2119];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:40.628324Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019d2/r3tmp/tmpCsixWs/pdisk_1.dat 2025-03-28T09:11:40.841896Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:40.875213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:40.875298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:40.876547Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6876, node 2 2025-03-28T09:11:41.039052Z node 2 :NET_CLASSIFIER WARN ... 44480 2025-03-28T09:11:44.654080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.724168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.807971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.923534Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785573661196256:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.923624Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.923969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785573661196261:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.928829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:44.943096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785573661196263:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:45.011782Z node 2 :TX_PROXY ERROR: Actor# [2:7486785577956163613:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:45.630559Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785556481324832:2119];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:45.630630Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:46.262594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.088706Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:47.701577Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:47.738982Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:48.102376Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:48.118451Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 22979, MsgBus: 11355 2025-03-28T09:11:49.091639Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785596872091911:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:49.091686Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019d2/r3tmp/tmpwxacVR/pdisk_1.dat 2025-03-28T09:11:49.403619Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:49.431465Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:49.431562Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:49.433229Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22979, node 3 2025-03-28T09:11:49.586254Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:49.586280Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:49.586288Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:49.586412Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11355 TClient is connected to server localhost:11355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:50.508134Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:50.524299Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:50.617718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:50.830821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:50.933664Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:54.050844Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785618346930167:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:54.050964Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:54.094802Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785596872091911:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:54.094892Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:54.145991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.214137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.271380Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.355910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.416166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.484084Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.584448Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785618346930692:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:54.584550Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:54.584777Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785618346930697:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:54.589297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:54.605969Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785618346930699:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:54.693905Z node 3 :TX_PROXY ERROR: Actor# [3:7486785618346930753:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:56.067739Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:56.208741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel2 |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH14 [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |82.0%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel1 >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateImplicitNullInComplexFk2 [GOOD] Test command err: Trying to start YDB, gRPC: 2731, MsgBus: 19403 2025-03-28T09:11:42.915282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785567265044518:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:42.926768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001977/r3tmp/tmpwgIvZK/pdisk_1.dat 2025-03-28T09:11:43.512259Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:43.539201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:43.539302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:43.541646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2731, node 1 2025-03-28T09:11:43.650466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:43.650491Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:43.650497Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:43.650591Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19403 TClient is connected to server localhost:19403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:44.428788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:44.458082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:44.638485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:44.966014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:45.062306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:46.756387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785584444915463:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:46.764069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:47.102038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.145000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.188700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.214319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.246114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.279160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.317517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785588739883270:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:47.317584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:47.317746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785588739883275:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:47.321184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:47.332156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785588739883277:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:47.389964Z node 1 :TX_PROXY ERROR: Actor# [1:7486785588739883330:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:47.915208Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785567265044518:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:47.915273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:48.346772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:50.280420Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0b6j76z1wcgt44h2s8txa, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzNiZTZkNDQtNDBlZDRmNTUtOTlkNzJhN2MtMWNmMmY4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:50.298497Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzNiZTZkNDQtNDBlZDRmNTUtOTlkNzJhN2MtMWNmMmY4YQ==, ActorId: [1:7486785593034851703:2548], ActorState: ExecuteState, TraceId: 01jqe0b6j76z1wcgt44h2s8txa, Create QueryResponse for error on request, msg: 2025-03-28T09:11:50.999243Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0b79acjg5cctackx3e6mh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzNiZTZkNDQtNDBlZDRmNTUtOTlkNzJhN2MtMWNmMmY4YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:50.999460Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzNiZTZkNDQtNDBlZDRmNTUtOTlkNzJhN2MtMWNmMmY4YQ==, ActorId: [1:7486785593034851703:2548], ActorState: ExecuteState, TraceId: 01jqe0b79acjg5cctackx3e6mh, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29293, MsgBus: 25105 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001977/r3tmp/tmpZcYvN5/pdisk_1.dat 2025-03-28T09:11:52.219052Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:52.280131Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:52.288493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:52.288576Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:52.290346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29293, node 2 2025-03-28T09:11:52.475297Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:52.475321Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:52.475329Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:52.475465Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25105 TClient is connected to server localhost:25105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:53.320669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:53.340521Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:53.361368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:53.471577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:53.687107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:53.791028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.051700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785629025259039:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:56.051820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:56.137382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:56.205287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:56.247268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:56.289721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:56.340810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:56.393724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:56.507274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785629025259555:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:56.507437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:56.507906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785629025259561:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:56.571537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:56.590665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785629025259563:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:56.669791Z node 2 :TX_PROXY ERROR: Actor# [2:7486785629025259621:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:57.818011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:00.563333Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bg79amvt6ma7jbamey1w, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWU0NDIzYzAtMzNlYTdlNWEtZDBjYzVkY2UtY2ExODFkYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:00.563595Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWU0NDIzYzAtMzNlYTdlNWEtZDBjYzVkY2UtY2ExODFkYWY=, ActorId: [2:7486785637615195287:2548], ActorState: ExecuteState, TraceId: 01jqe0bg79amvt6ma7jbamey1w, Create QueryResponse for error on request, msg: 2025-03-28T09:12:01.972548Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bhf448v9thnm145v0vq1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWU0NDIzYzAtMzNlYTdlNWEtZDBjYzVkY2UtY2ExODFkYWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:01.979058Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWU0NDIzYzAtMzNlYTdlNWEtZDBjYzVkY2UtY2ExODFkYWY=, ActorId: [2:7486785637615195287:2548], ActorState: ExecuteState, TraceId: 01jqe0bhf448v9thnm145v0vq1, Create QueryResponse for error on request, msg: >> KqpIndexes::SecondaryIndexReplace+UseSink [GOOD] >> KqpJoin::FullOuterJoinSizeCheck [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel2 |82.1%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoin::RightTableKeyPredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnHidenChanges+DataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 1623, MsgBus: 11795 2025-03-28T09:11:42.257656Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785567796440882:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:42.258445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001983/r3tmp/tmpUjIBC6/pdisk_1.dat 2025-03-28T09:11:42.824829Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:42.837175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:42.837302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:42.841644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1623, node 1 2025-03-28T09:11:42.935080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:42.935107Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:42.935121Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:42.935271Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11795 TClient is connected to server localhost:11795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:43.563311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:43.589997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:43.828339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:44.087856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:44.182976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:11:46.633172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785584976311799:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:46.633313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:46.996233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.070906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.105423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.142024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.183532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.250512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.257851Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785567796440882:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:47.257985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:47.343755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785589271279612:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:47.343860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:47.344228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785589271279617:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:47.347905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:47.358012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785589271279619:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:47.448404Z node 1 :TX_PROXY ERROR: Actor# [1:7486785589271279677:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:48.645550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 63762, MsgBus: 5580 2025-03-28T09:11:53.448857Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785614427030140:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:53.448997Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001983/r3tmp/tmpKgVnwx/pdisk_1.dat 2025-03-28T09:11:53.717602Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63762, node 2 2025-03-28T09:11:53.855043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:53.855354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:53.891808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:53.951306Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:53.951336Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:53.951344Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:53.951495Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5580 TClient is connected to server localhost:5580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:54.577088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:54.587772Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:54.601839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:54.708077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:54.918511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:55.008763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.798658Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785631606901034:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:57.798829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:57.856055Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:57.907226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:57.954495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:58.023027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:58.065743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:58.114033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:58.214907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785635901868839:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:58.215013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:58.215474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785635901868844:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:58.219886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:58.243977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785635901868846:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:58.320626Z node 2 :TX_PROXY ERROR: Actor# [2:7486785635901868904:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:58.451473Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785614427030140:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:58.451549Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:59.493590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:01.732902Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bhfeectrt2mtjyje11gk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTYwYTQ0OWEtOTA5OWMxYTEtNmZjOTdjZGItYWQzM2YxMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:01.803260Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTYwYTQ0OWEtOTA5OWMxYTEtNmZjOTdjZGItYWQzM2YxMjE=, ActorId: [2:7486785640196837240:2549], ActorState: ExecuteState, TraceId: 01jqe0bhfeectrt2mtjyje11gk, Create QueryResponse for error on request, msg: >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogPagingAfter [GOOD] Test command err: 2025-03-28T09:08:43.259875Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784797472838209:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:43.260476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00148f/r3tmp/tmpLN1SLy/pdisk_1.dat 2025-03-28T09:08:43.699037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:43.699124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:08:43.703365Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:43.707886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11802, node 1 2025-03-28T09:08:43.818613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:43.818645Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:43.818655Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:43.818817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:44.152669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:5955 2025-03-28T09:08:44.526413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:44.675653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:44.675876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:44.676160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:44.676288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:44.676385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:44.676527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:44.676670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:44.676830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:44.676964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:44.677072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:44.677190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:44.677289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486784801767806549:2327];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:44.726172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:44.726237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:44.726457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:44.726569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:44.726662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:44.726878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:44.727049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:44.727163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:44.727271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:44.727357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:44.727472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:44.727596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486784801767806526:2325];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:44.767092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784801767806524:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:44.767155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784801767806524:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:44.767375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784801767806524:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:44.767508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784801767806524:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:44.767613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784801767806524:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:44.767731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784801767806524:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:44.767837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784801767806524:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:44.767950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784801767806524:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:44.768064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486784801767806524:2324];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Syn ... DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7486785607637590774:3640] 2025-03-28T09:11:51.943133Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710670;task_id=66;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:11:51.948315Z node 28 :KQP_EXECUTER INFO: ActorId: [28:7486785607637590687:3559] TxId: 281474976710670. Ctx: { TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 292243 DurationUs: 236910 Tables { TablePath: "/Root/OlapStore/log1" } ExecuterCpuTimeUs: 120516 StartTimeMs: 1743153111706 FinishTimeMs: 1743153111943 Stages { StageGuid: "d23ed252-8bbba6ec-4a47881a-abec2ef9" Program: "(\n(declare %kqp%tx_result_binding_0_0 (TupleType (ListType (TupleType (TupleType (OptionalType (DataType \'Timestamp)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (DataType \'Int32)) (TupleType (OptionalType (DataType \'Timestamp)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (OptionalType (DataType \'Utf8)) (DataType \'Int32))))))\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/OlapStore/log1\" \'\"72057594046644480:3\" \'\"\" \'1))\n (let $2 (OptionalType (DataType \'Utf8)))\n (let $3 (TupleType (OptionalType (DataType \'Timestamp)) $2 $2 $2 (DataType \'Int32)))\n (let $4 \'(\'\"level\" \'\"message\" \'\"resource_id\" \'\"resource_type\" \'\"timestamp\" \'\"uid\"))\n (let $5 \'(\'\"timestamp\" \'\"resource_type\" \'\"resource_id\" \'\"uid\"))\n (let $6 \'(\'(\'\"UsedKeyColumns\" $5) \'(\'\"ExpectedMaxRanges\" \'4) \'(\'\"PointPrefixLen\" \'0)))\n (let $7 (KqpWideReadOlapTableRanges $1 %kqp%tx_result_binding_0_0 $4 \'() $6 (lambda \'($10) (block \'(\n (let $11 \'(\'eq \'\"resource_type\" (String \'\"app\")))\n (let $12 \'(\'eq \'\"resource_id\" (String \'\"resource_1\")))\n (return (KqpOlapFilter $10 (KqpOlapAnd $11 $12)))\n )))))\n (let $8 (Bool \'true))\n (let $9 \'(\'(\'4 $8) \'(\'3 $8) \'(\'2 $8) \'(\'5 $8)))\n (return (FromFlow (WideTop $7 (Uint64 \'50) $9)))\n))))\n)\n" ComputeActors { CpuTimeUs: 2375 Tasks { TaskId: 56 CpuTimeUs: 680 FinishTimeMs: 1743153111929 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 102 BuildCpuTimeUs: 578 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-mdsjyjltvm" NodeId: 28 CreateTimeMs: 1743153111781 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743153111891 } Stages { StageId: 2 StageGuid: "a68fcc43-f335ac5c-739f6bf7-6606caca" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (ToFlow $1) (lambda \'($2 $3 $4 $5 $6 $7) (AsStruct \'(\'\"level\" $2) \'(\'\"message\" $3) \'(\'\"resource_id\" $4) \'(\'\"resource_type\" $5) \'(\'\"timestamp\" $6) \'(\'\"uid\" $7)))))))\n)\n" ComputeActors { CpuTimeUs: 1739 Tasks { TaskId: 66 StageId: 2 CpuTimeUs: 190 FinishTimeMs: 1743153111942 ComputeCpuTimeUs: 56 BuildCpuTimeUs: 134 HostName: "ghrun-mdsjyjltvm" NodeId: 28 CreateTimeMs: 1743153111787 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743153111891 } Stages { StageId: 1 StageGuid: "ca84f109-b7cd1557-766000fe-43b88710" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (Bool \'true))\n (let $3 \'(\'(\'4 $2) \'(\'3 $2) \'(\'2 $2) \'(\'5 $2)))\n (let $4 (WideTop (ToFlow $1) (Uint64 \'50) $3))\n (let $5 (Bool \'false))\n (let $6 \'(\'(\'4 $5) \'(\'3 $5) \'(\'2 $5) \'(\'5 $5)))\n (return (FromFlow (WideSort $4 $6)))\n))))\n)\n" BaseTimeMs: 1743153111891 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":7,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":6,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Stage\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":4,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Sort-Top\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Sort\",\"SortBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"50\",\"Name\":\"Top\",\"TopBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"UnionAll\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Top-Filter-TableRangeScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"50\",\"Name\":\"Top\",\"TopBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[{\"InternalOperatorId\":2}],\"Name\":\"Filter\",\"Predicate\":\"resource_type == \\\"app\\\" AND resource_id == \\\"resource_1\\\"\",\"Pushdown\":\"True\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableRangeScan\",\"Path\":\"\\/Root\\/OlapStore\\/log1\",\"ReadColumns\":[\"level\",\"message\",\"resource_id\",\"resource_type\",\"timestamp\",\"uid\"],\"ReadRanges\":[\"timestamp (3000000, +∞)\",\"resource_type (nginx, +∞)\",\"resource_id (resource_), +∞)\",\"uid (10, +∞)\"],\"ReadRangesExpectedSize\":4,\"ReadRangesKeys\":[\"timestamp\",\"resource_type\",\"resource_id\",\"uid\"],\"Scan\":\"Parallel\",\"SsaProgram\":{\"Command\":[{\"Assign\":{\"Column\":{\"Id\":11},\"Constant\":{\"Bytes\":\"app\"}}},{\"Assign\":{\"Column\":{\"Id\":12},\"Function\":{\"Arguments\":[{\"Id\":6},{\"Id\":11}],\"FunctionType\":2,\"KernelIdx\":0,\"YqlOperationId\":11}}},{\"Assign\":{\"Column\":{\"Id\":13},\"Constant\":{\"Bytes\":\"resource_1\"}}},{\"Assign\":{\"Column\":{\"Id\":14},\"Function\":{\"Arguments\":[{\"Id\":3},{\"Id\":13}],\"FunctionType\":2,\"KernelIdx\":1,\"YqlOperationId\":11}}},{\"Assign\":{\"Column\":{\"Id\":15},\"Function\":{\"Arguments\":[{\"Id\":12},{\"Id\":14}],\"FunctionType\":2,\"KernelIdx\":2,\"YqlOperationId\":0}}},{\"Filter\":{\"Predicate\":{\"Id\":15}}},{\"Projection\":{\"Columns\":[{\"Id\":7},{\"Id\":1},{\"Id\":3},{\"Id\":6},{\"Id\":5},{\"Id\":4}]}}],\"Version\":5},\"Table\":\"OlapStore\\/log1\"}],\"PlanNodeId\":1,\"StageGuid\":\"d23ed252-8bbba6ec-4a47881a-abec2ef9\",\"Stats\":{\"BaseTimeMs\":1743153111891,\"ComputeNodes\":[{\"CpuTimeUs\":2375,\"Tasks\":[{\"ComputeTimeUs\":102,\"FinishTimeMs\":1743153111929,\"Host\":\"ghrun-mdsjyjltvm\",\"NodeId\":28,\"TaskId\":56}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"OlapStore\\/log1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"ca84f109-b7cd1557-766000fe-43b88710\",\"Stats\":{\"BaseTimeMs\":1743153111891,\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"timestamp (Desc)\",\"resource_type (Desc)\",\"resource_id (Desc)\",\"uid (Desc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"a68fcc43-f335ac5c-739f6bf7-6606caca\",\"Stats\":{\"BaseTimeMs\":1743153111891,\"ComputeNodes\":[{\"CpuTimeUs\":1739,\"Tasks\":[{\"ComputeTimeUs\":56,\"FinishTimeMs\":1743153111942,\"Host\":\"ghrun-mdsjyjltvm\",\"NodeId\":28,\"TaskId\":66}]}],\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 3217 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\r\010\217\010\020\354\251\001\030\317\275\n B" } } 2025-03-28T09:11:51.948384Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7486785607637590687:3559] TxId: 281474976710670. Ctx: { TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:11:51.948478Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7486785607637590687:3559] TxId: 281474976710670. Ctx: { TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.171727s ReadRows: 0 ReadBytes: 0 ru: 114 rate limiter was not found force flag: 1 2025-03-28T09:11:51.948606Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, ActorId: [28:7486785599047656012:3559], ActorState: ExecuteState, TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-28T09:11:51.949195Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, ActorId: [28:7486785599047656012:3559], ActorState: ExecuteState, TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 266.043 QueriesCount: 1 2025-03-28T09:11:51.949281Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, ActorId: [28:7486785599047656012:3559], ActorState: ExecuteState, TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:11:51.949433Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, ActorId: [28:7486785599047656012:3559], ActorState: ExecuteState, TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:11:51.949493Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, ActorId: [28:7486785599047656012:3559], ActorState: ExecuteState, TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, EndCleanup, isFinal: 1 2025-03-28T09:11:51.949580Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, ActorId: [28:7486785599047656012:3559], ActorState: ExecuteState, TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7486785508853338360:2236] 2025-03-28T09:11:51.949635Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, ActorId: [28:7486785599047656012:3559], ActorState: unknown state, TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, Cleanup temp tables: 0 2025-03-28T09:11:51.955237Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153111000, txId: 18446744073709551615] shutting down 2025-03-28T09:11:51.955426Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=NWMwMGY3OC0yMzJhNzA3NC0yYzIyMzMxNS0zZWYyZDE3MQ==, ActorId: [28:7486785599047656012:3559], ActorState: unknown state, TraceId: 01jqe0b6vv166qjvz2xf5c9nb9, Session actor destroyed 2025-03-28T09:11:52.007154Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[28:7486785521738241184:2325];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-03-28T09:11:52.151969Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[28:7486785521738241186:2326];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:11:52.152122Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[28:7486785521738241231:2328];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:11:52.152174Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[28:7486785521738241202:2327];fline=actor.cpp:33;event=skip_flush_writing; |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |82.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |82.1%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10600, MsgBus: 2091 2025-03-28T09:11:23.908096Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785487298818837:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.915046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00199c/r3tmp/tmpP3o33G/pdisk_1.dat 2025-03-28T09:11:25.027813Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.099648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.099790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.101046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.130290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.287134Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.119183s 2025-03-28T09:11:25.287238Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.119277s TServer::EnableGrpc on GrpcPort 10600, node 1 2025-03-28T09:11:25.891386Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.891407Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.891412Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.891591Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2091 TClient is connected to server localhost:2091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.796448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.830701Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:27.850090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.188301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.415320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.513361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.900260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785487298818837:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.900390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.151564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785513068624330:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.151659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.691523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.774563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.828667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.881584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.939825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.020454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.127937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785521658559492:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.128003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.131070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785521658559497:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.135639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.147349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785521658559503:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.226097Z node 1 :TX_PROXY ERROR: Actor# [1:7486785521658559558:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.817682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:40.000623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:40.000651Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 15762, MsgBus: 24482 2025-03-28T09:11:44.381381Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785577336419584:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:44.381427Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00199c/r3tmp/tmpn8mkDC/pdisk_1.dat 2025-03-28T09:11:44.694997Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:44.732393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:44.732487Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:44.739934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15762, node 2 2025-03-28T09:11:44.971303Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:44.971332Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:44.971339Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:44.971480Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24482 TClient is connected to server localhost:24482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:45.811452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSu ... issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:48.782923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:48.843125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:48.912608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:48.986618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:49.053292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:49.148768Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:49.216725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785598811258345:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:49.216824Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:49.217251Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785598811258350:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:49.221027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:49.239617Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785598811258352:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:49.326295Z node 2 :TX_PROXY ERROR: Actor# [2:7486785598811258407:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:49.384302Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785577336419584:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:49.384376Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:50.468978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24840, MsgBus: 12747 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00199c/r3tmp/tmpxwsUit/pdisk_1.dat 2025-03-28T09:11:55.293735Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:55.429768Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:55.443804Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:55.443911Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:55.446398Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24840, node 3 2025-03-28T09:11:55.614067Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:55.614093Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:55.614101Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:55.614219Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12747 TClient is connected to server localhost:12747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:56.439132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.473160Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.708228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.990879Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.135594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:59.913505Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785640754623136:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.913609Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.991968Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.070379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.111537Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.187348Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.265050Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.307219Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.412918Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785645049590958:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.413001Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.413214Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785645049590963:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.417225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:00.437111Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:12:00.438199Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785645049590965:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:00.493530Z node 3 :TX_PROXY ERROR: Actor# [3:7486785645049591021:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:01.812428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:02.829748Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:02.856232Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:03.751922Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:03.778522Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoinSizeCheck [GOOD] Test command err: Trying to start YDB, gRPC: 3237, MsgBus: 13650 2025-03-28T09:11:55.599755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785621236481148:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:55.599810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001712/r3tmp/tmpUeQ1yB/pdisk_1.dat 2025-03-28T09:11:56.403830Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:56.406560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:56.406630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:56.415109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3237, node 1 2025-03-28T09:11:56.498843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:56.498884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:56.498898Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:56.498993Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13650 TClient is connected to server localhost:13650 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:57.226099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.241764Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:57.250213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.412021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.609078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.677219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:59.747584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785638416352107:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.747708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.065610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.108547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.147948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.184940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.246506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.325173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.406215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785642711319918:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.406309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.411249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785642711319923:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.417757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:00.431504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785642711319925:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:00.500731Z node 1 :TX_PROXY ERROR: Actor# [1:7486785642711319981:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:00.600678Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785621236481148:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:00.600763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:01.867083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:01.941950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:02.030139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableKeyPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 25479, MsgBus: 20755 2025-03-28T09:11:55.871133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785622096085838:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:55.873723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001710/r3tmp/tmpF8ziHv/pdisk_1.dat 2025-03-28T09:11:56.373511Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:56.377102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:56.377159Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:56.379087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25479, node 1 2025-03-28T09:11:56.523311Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:56.523336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:56.523343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:56.523479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20755 TClient is connected to server localhost:20755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:57.507586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.540011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.714217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.917310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:58.026801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:00.008964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785639275956804:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.009035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.316108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.397300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.441720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.503059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.552591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.603394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.694921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785643570924617:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.695003Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.695237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785643570924622:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:00.699901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:00.740464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785643570924624:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:00.834128Z node 1 :TX_PROXY ERROR: Actor# [1:7486785643570924681:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:00.871567Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785622096085838:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:00.871655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:02.421632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS88+ColumnStore [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink [GOOD] >> KqpIndexes::SelectConcurentTX2 |82.1%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexes::IndexOr [GOOD] >> KqpIndexes::IndexFilterPushDown >> TVectorIndexTests::CreateTableMultiColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkAlreadyExist [GOOD] Test command err: Trying to start YDB, gRPC: 28476, MsgBus: 15849 2025-03-28T09:11:38.664145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785548694802748:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:38.666878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00198f/r3tmp/tmpNDZGrm/pdisk_1.dat 2025-03-28T09:11:39.426020Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:39.450276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:39.450640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:39.453415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28476, node 1 2025-03-28T09:11:39.655220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:39.655243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:39.655252Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:39.655342Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15849 TClient is connected to server localhost:15849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:40.427911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.485025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.718724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:41.051288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:41.210481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:43.666905Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785548694802748:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:43.666979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:44.100873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785574464608291:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.100994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.491365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.570240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.617373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.705874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.785457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.846159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.941040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785574464608819:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.941142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.941485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785574464608824:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.946037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:44.970232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785574464608826:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:45.050648Z node 1 :TX_PROXY ERROR: Actor# [1:7486785578759576180:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:46.254026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 62268, MsgBus: 11792 2025-03-28T09:11:49.977859Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785598486352676:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:49.977901Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00198f/r3tmp/tmpSwzijD/pdisk_1.dat 2025-03-28T09:11:50.336241Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:50.387646Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:50.387925Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:50.392042Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62268, node 2 2025-03-28T09:11:50.524031Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:50.524065Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:50.524074Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:50.524207Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11792 TClient is connected to server localhost:11792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:51.217832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:51.231571Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:51.241367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:51.339976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:51.532579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:51.669246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:54.702870Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785619961190933:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:54.703000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:54.795829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.846446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.896565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.935149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.978974Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785598486352676:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:54.979074Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:55.014386Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.096871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.160258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785624256158753:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:55.160343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:55.160696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785624256158758:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:55.170296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:55.192963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785624256158760:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:55.273697Z node 2 :TX_PROXY ERROR: Actor# [2:7486785624256158817:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:56.551151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:58.972269Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0berpfs1k6amjsftbyd1t, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTc0MmVlMGMtMWMyMzBiYzctZGFkNmNjN2EtMzVlYzMyMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:58.984394Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTc0MmVlMGMtMWMyMzBiYzctZGFkNmNjN2EtMzVlYzMyMjE=, ActorId: [2:7486785628551127177:2550], ActorState: ExecuteState, TraceId: 01jqe0berpfs1k6amjsftbyd1t, Create QueryResponse for error on request, msg: 2025-03-28T09:11:59.978162Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bfrr14bkj7e62gfkk79c, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTc0MmVlMGMtMWMyMzBiYzctZGFkNmNjN2EtMzVlYzMyMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:59.978426Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTc0MmVlMGMtMWMyMzBiYzctZGFkNmNjN2EtMzVlYzMyMjE=, ActorId: [2:7486785628551127177:2550], ActorState: ExecuteState, TraceId: 01jqe0bfrr14bkj7e62gfkk79c, Create QueryResponse for error on request, msg: 2025-03-28T09:12:00.052742Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bgqy5cy612bj2b4kn4st, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTc0MmVlMGMtMWMyMzBiYzctZGFkNmNjN2EtMzVlYzMyMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:00.052994Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTc0MmVlMGMtMWMyMzBiYzctZGFkNmNjN2EtMzVlYzMyMjE=, ActorId: [2:7486785628551127177:2550], ActorState: ExecuteState, TraceId: 01jqe0bgqy5cy612bj2b4kn4st, Create QueryResponse for error on request, msg: 2025-03-28T09:12:01.123407Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bgt974ka7qqn5ppzm5q3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YTc0MmVlMGMtMWMyMzBiYzctZGFkNmNjN2EtMzVlYzMyMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:01.123654Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTc0MmVlMGMtMWMyMzBiYzctZGFkNmNjN2EtMzVlYzMyMjE=, ActorId: [2:7486785628551127177:2550], ActorState: ExecuteState, TraceId: 01jqe0bgt974ka7qqn5ppzm5q3, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH14 [GOOD] Test command err: Trying to start YDB, gRPC: 3524, MsgBus: 9676 2025-03-28T09:10:18.089492Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785207861441832:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:18.089629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001729/r3tmp/tmpThEhmG/pdisk_1.dat 2025-03-28T09:10:18.684582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:18.684686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:18.687877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3524, node 1 2025-03-28T09:10:18.734825Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:10:18.734856Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:10:18.774774Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:18.927480Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:18.927508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:18.927521Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:18.927699Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9676 TClient is connected to server localhost:9676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:19.893886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:19.935535Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:10:22.515709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785225041311478:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:22.515972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:22.516094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785225041311488:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:22.520397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:10:22.537077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785225041311492:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:10:22.614883Z node 1 :TX_PROXY ERROR: Actor# [1:7486785225041311543:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:22.939624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:10:23.090798Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785207861441832:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:23.090893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:23.292505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:10:23.292805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:10:23.293111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:10:23.293250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:10:23.293362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:10:23.293481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:10:23.293607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:10:23.293714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:10:23.293855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:10:23.293978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:10:23.294082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:10:23.294207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785229336279072:2350];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:10:23.295500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:10:23.295547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:10:23.295762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:10:23.295868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:10:23.295978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:10:23.296114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:10:23.296254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:10:23.296379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:10:23.296530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:10:23.296655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:10:23.296759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:10:23.296858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785229336279070:2349];tablet_id=72075 ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.804057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.809462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.821110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.826864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.837315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.839403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.850296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.851840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.864770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.868462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.879927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.886648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.887336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.894346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.905760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.908987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.918009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.923167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.929125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.936629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.945881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.946391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.954386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.963657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.969006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.976158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.977611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.985836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.990294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.004638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.007978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.015164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.017614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.021887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.027130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.030154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.034151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.036458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.040871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.042436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.047635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.048748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.053682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.055093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.059347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:49.289676Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe09vpc5765y7fb1r17adcw", SessionId: ydb://session/3?node_id=1&id=NDhhNTAyYTctNjU1NjBjODQtM2QwMDgxZjgtYjBmZTZiMjQ=, Slow query, duration: 43.612567s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:11:49.668087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:49.668541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486785435494747738:8199];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-28T09:11:49.668844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:49.670042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpMultishardIndex::DataColumnSelect [GOOD] |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> KqpUniqueIndex::InsertNullInPk >> KqpIndexes::MultipleSecondaryIndex+UseSink >> KqpUniqueIndex::UpdateFkSameValue >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink [GOOD] >> KqpMultishardIndex::SecondaryIndexSelectNull >> KqpJoin::IdxLookupLeftPredicate >> KqpJoinOrder::CanonizedJoinOrderTPCH8 >> YdbSdkSessions::SessionsServerLimit >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> TVectorIndexTests::CreateTableMultiColumn [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 6626, MsgBus: 9363 2025-03-28T09:08:44.221224Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784801867082671:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:08:44.221863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00172c/r3tmp/tmpur0utA/pdisk_1.dat 2025-03-28T09:08:44.646530Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:08:44.680013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:08:44.680127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6626, node 1 2025-03-28T09:08:44.681975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:08:44.743936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:08:44.743962Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:08:44.743968Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:08:44.744059Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9363 TClient is connected to server localhost:9363 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:08:45.532952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:08:45.547233Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:08:47.939039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784814751985230:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:47.939170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:47.939605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486784814751985242:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:08:47.943650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:08:47.954240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486784814751985244:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:08:48.019906Z node 1 :TX_PROXY ERROR: Actor# [1:7486784819046952591:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:08:48.422422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:08:48.693467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:48.693467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:48.693676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:48.693982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:48.694112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:48.694140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:08:48.694258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:48.694337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:08:48.694395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:48.694434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:08:48.694499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:48.694528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:08:48.694632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:48.694637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:08:48.695144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:48.695145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:08:48.695280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:08:48.695291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:48.695400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:08:48.695430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:48.695532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:08:48.695559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486784819046952861:2353];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:48.695704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:08:48.695824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486784819046952863:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:08:48.739986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784819046952865:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:08:48.740060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486784819046952865:2355];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract ... _state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.104383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.107403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.114097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.114514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.120308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.120308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.128566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.132553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.135016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.138292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.140804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.144767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.146831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.150634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.151768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.156169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.157410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.160790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:10:24.347432Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe06szg30nz0mgp4edryn4h", SessionId: ydb://session/3?node_id=1&id=NDAwNjAxNjItMjhiZjk4YjktZWY5ZDU0MzYtZWZiOTY3MGM=, Slow query, duration: 58.730204s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:10:25.014807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:25.015256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486784995140647641:7671];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-03-28T09:10:25.015615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:10:25.016754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:55.977055Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe09tjpajcyrjnbybk04f7e", SessionId: ydb://session/3?node_id=1&id=NDAwNjAxNjItMjhiZjk4YjktZWY5ZDU0MzYtZWZiOTY3MGM=, Slow query, duration: 51.441895s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query88.tpl and seed 318176889\nselect *\nfrom\n (select count(*) h8_30_to_9\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 8\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s1 cross join\n (select count(*) h9_to_9_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s2 cross join\n (select count(*) h9_30_to_10\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s3 cross join\n (select count(*) h10_to_10_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s4 cross join\n (select count(*) h10_30_to_11\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s5 cross join\n (select count(*) h11_to_11_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s6 cross join\n (select count(*) h11_30_to_12\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s7 cross join\n (select count(*) h12_to_12_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 12\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s8\n;", parameters: 0b >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13591, MsgBus: 20282 2025-03-28T09:11:23.910766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785484040945681:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.942662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019ed/r3tmp/tmpD4UXUJ/pdisk_1.dat 2025-03-28T09:11:24.899425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:24.915103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:24.954410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:24.991521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.019065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.288383Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.141399s 2025-03-28T09:11:25.288474Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.141491s TServer::EnableGrpc on GrpcPort 13591, node 1 2025-03-28T09:11:25.907526Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.907560Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.907570Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.907755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20282 TClient is connected to server localhost:20282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.434847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.511232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:27.614011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.197810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.526826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.679874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.903265Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785484040945681:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.908596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.181707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785509810751180:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.181860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.687635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.777479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.818814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.878747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.000024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.146393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.217201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518400686355:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.217293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.223358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518400686360:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.228422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.247025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785518400686362:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.319514Z node 1 :TX_PROXY ERROR: Actor# [1:7486785518400686418:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.852651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:34.957378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:11:35.106317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5206, MsgBus: 19364 2025-03-28T09:11:40.028943Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785558783234983:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:40.029008Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019ed/r3tmp/tmpLfMye8/pdisk_1.dat 2025-03-28T09:11:40.441546Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:40.494811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:40.499003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:40.507706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5206, node 2 2025-03-28T09:11:40.683315Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:40.683339Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:40.683347Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:40.683481Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19364 TClient is connected to server localhost:19364 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 720575940466 ... s } 2025-03-28T09:11:45.753288Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785580258073622:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.757490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:45.768876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785580258073624:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:11:45.865643Z node 2 :TX_PROXY ERROR: Actor# [2:7486785580258073679:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:47.121880Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.210192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.255676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22162, MsgBus: 31446 2025-03-28T09:11:50.643122Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785599771602428:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:50.643181Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019ed/r3tmp/tmpIWBuPD/pdisk_1.dat 2025-03-28T09:11:50.933264Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:50.950348Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:50.950444Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:50.953930Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22162, node 3 2025-03-28T09:11:51.151302Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:51.151327Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:51.151335Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:51.151462Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31446 TClient is connected to server localhost:31446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:51.772194Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:51.783522Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:51.797629Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:51.943871Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:52.159610Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:52.278383Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:54.978487Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785616951473397:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:54.978574Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:55.124137Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.197702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.240512Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.320775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.365932Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.426930Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.513362Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785621246441213:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:55.513459Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:55.513669Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785621246441218:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:55.517522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:55.527846Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785621246441220:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:55.593740Z node 3 :TX_PROXY ERROR: Actor# [3:7486785621246441276:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:55.646906Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785599771602428:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:55.647083Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:56.949984Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:58.374862Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:58.406880Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:01.079663Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:02.095595Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:03.154554Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:04.114811Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:04.147764Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:04.892565Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:04.944046Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:05.618811Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:05.664131Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:05.912359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:05.912386Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:06.177923Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:06.218796Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:06.253191Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:06.914794Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:06.956180Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS78 [GOOD] Test command err: Trying to start YDB, gRPC: 27433, MsgBus: 12992 2025-03-28T09:09:29.081872Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784997290280008:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:29.082576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001867/r3tmp/tmpHZavyw/pdisk_1.dat 2025-03-28T09:09:29.546372Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:29.568545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:29.568649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:29.570801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27433, node 1 2025-03-28T09:09:29.715270Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:29.715288Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:29.715295Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:29.715410Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12992 TClient is connected to server localhost:12992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:30.503879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:30.533981Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:09:32.756187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785010175182555:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:32.756343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:32.758253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785010175182567:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:09:32.762856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:09:32.776537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785010175182569:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:09:32.842085Z node 1 :TX_PROXY ERROR: Actor# [1:7486785010175182620:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:09:33.132702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:09:33.386638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:33.386917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:33.387175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:33.387309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:33.387440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:33.387592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:33.387709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:33.387792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:33.387872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:33.387951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:33.388029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:33.388129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785014470150134:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:33.388549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:33.388588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:09:33.388775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:09:33.388886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:09:33.389029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:09:33.389152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:09:33.389287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:09:33.389395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:09:33.389517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:09:33.389629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:09:33.389746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:09:33.389835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785014470150144:2358];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:09:33.431878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785014470150130:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:09:33.431947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785014470150130:2351];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abs ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.218436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.218877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039212;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.229993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.231791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.236321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.236933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.242627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.242738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.249399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.249425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.256151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.256444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.262714Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.262720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.268869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.268868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.274982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.277093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.282166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.284171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.289377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.293933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.296573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.300381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.397390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.399338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.404109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.410579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.417614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.420597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039202;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.427719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.430059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.433206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.436305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.438313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.440459Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.443808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.445011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.449095Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.450270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.454794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.456410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.462068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.463963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.484352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:14.588551Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe08zwv6w3pszk36nqrx1fw", SessionId: ydb://session/3?node_id=1&id=NTc2MjYwYzItM2JhZDY5NDctMjAwOWUwNDktNDAzNjJkZg==, Slow query, duration: 37.376304s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:11:15.072994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:15.073495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:15.074238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486785405312232065:11566];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-28T09:11:15.074630Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |82.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut >> TLocksTest::CK_GoodLock [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin-NotNull [GOOD] >> KqpMultishardIndex::YqWorksFineAfterAlterIndexTableDirectly [GOOD] >> KqpJoinOrder::FiveWayJoin-ColumnStore [GOOD] >> TLocksTest::CK_BrokenLock >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull |82.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |82.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |82.1%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableMultiColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:12:09.041748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:12:09.041941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:12:09.042059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:12:09.042109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:12:09.043928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:12:09.044015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:12:09.044205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:12:09.044354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:12:09.045663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:12:09.242362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:12:09.242441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:09.274921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:12:09.275300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:12:09.275504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:12:09.305704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:12:09.306186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:12:09.316094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:12:09.317240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:12:09.330268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:12:09.345763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:12:09.345854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:12:09.347641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:12:09.347723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:12:09.347837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:12:09.347975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:12:09.362085Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:12:09.551831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:12:09.554838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:12:09.556512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:12:09.558059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:12:09.558138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:12:09.561106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:12:09.561242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:12:09.561493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:12:09.561550Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:12:09.561676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:12:09.561729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:12:09.563797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:12:09.563852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:12:09.563908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:12:09.565654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:12:09.565695Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:12:09.565746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:12:09.565789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:12:09.570534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:12:09.573806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:12:09.575065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:12:09.576266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:12:09.576416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:12:09.576492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:12:09.577948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:12:09.578022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:12:09.580855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:12:09.580971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:12:09.586351Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:12:09.586423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:12:09.586630Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:12:09.586670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:12:09.587669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:12:09.587790Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:12:09.587888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:12:09.587921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:12:09.587973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:12:09.588007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:12:09.588063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:12:09.588130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:12:09.588170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:12:09.588212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:12:09.588289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:12:09.588324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:12:09.588354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:12:09.590314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:12:09.590419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:12:09.590456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "covered1" DataColumnNames: "covered2" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:12:10.319438Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:12:10.319770Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 309us result status StatusSuccess 2025-03-28T09:12:10.320193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:12:10.324813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:12:10.325088Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 290us result status StatusSuccess 2025-03-28T09:12:10.325571Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id1" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "id2" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "covered1" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "covered2" Type: "String" TypeId: 4097 Id: 5 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id1" KeyColumnNames: "id2" KeyColumnIds: 1 KeyColumnIds: 2 KeyColumnIds: 3 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateOnNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 10248, MsgBus: 8601 2025-03-28T09:11:23.908072Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785484209388582:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.914657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019a1/r3tmp/tmpkQkLby/pdisk_1.dat 2025-03-28T09:11:25.009010Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.019150Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.051993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.052104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.057595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.293176Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.174123s 2025-03-28T09:11:25.293264Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.174218s TServer::EnableGrpc on GrpcPort 10248, node 1 2025-03-28T09:11:25.902154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.935191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.935218Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.936237Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8601 TClient is connected to server localhost:8601 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:28.019943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.040969Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:28.051479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.353545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.781008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.915387Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785484209388582:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.915440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.018483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:29.808734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785509979194107:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.808826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.687590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.737118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.816849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.864102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.912243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.997498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.137861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518569129245:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.137952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.138336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518569129251:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.151030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.163387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785518569129253:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.255724Z node 1 :TX_PROXY ERROR: Actor# [1:7486785518569129313:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.738762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:37.460751Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0asrj6qbm4069zvtkg6fx, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzMwYmU3MGEtZTczZWY4MzItODE0OTAzN2UtMjgzOWQxNGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:37.487362Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzMwYmU3MGEtZTczZWY4MzItODE0OTAzN2UtMjgzOWQxNGE=, ActorId: [1:7486785535748999590:2563], ActorState: ExecuteState, TraceId: 01jqe0asrj6qbm4069zvtkg6fx, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 14587, MsgBus: 1382 2025-03-28T09:11:39.879179Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785556193463050:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:39.882044Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019a1/r3tmp/tmp6rrIH0/pdisk_1.dat 2025-03-28T09:11:40.281644Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:40.363705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:40.363816Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:40.368237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14587, node 2 2025-03-28T09:11:40.596556Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:40.596590Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:40.596598Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:40.596724Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1382 TClient is connected to server localhost:1382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:41.533370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:41.588907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:41.757248Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.047397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.174674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:44.554899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785577668301172:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.555028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.679089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.723594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.822850Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785556193463050:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:44.822954Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:44.847112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.901914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:44.973847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.055464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.146874Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785581963268993:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.146994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.154866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785581963268998:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.162377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:45.180327Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785581963269000:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:45.236898Z node 2 :TX_PROXY ERROR: Actor# [2:7486785581963269055:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:46.708487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:53.683893Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0b9jj30c6st0ssq3zre4m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDQ3Y2ZmNzAtMjMxM2JiMDYtMjg1NjI1YWItMmY0MjJjMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:11:53.684162Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDQ3Y2ZmNzAtMjMxM2JiMDYtMjg1NjI1YWItMmY0MjJjMjQ=, ActorId: [2:7486785590553204696:2553], ActorState: ExecuteState, TraceId: 01jqe0b9jj30c6st0ssq3zre4m, Create QueryResponse for error on request, msg: 2025-03-28T09:11:55.275807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:55.275840Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:57.122344Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-03-28T09:11:57.122383Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-03-28T09:12:02.258968Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-03-28T09:12:02.259080Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-03-28T09:12:02.259096Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-03-28T09:12:02.259732Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-03-28T09:12:02.287221Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-03-28T09:12:02.287255Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-28T09:12:02.287271Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-28T09:12:02.287288Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-03-28T09:12:02.287302Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-03-28T09:12:02.287316Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-03-28T09:12:02.287361Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-03-28T09:12:02.287445Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-03-28T09:12:02.414779Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bhvs60e6fw4862q39xdt, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDQ3Y2ZmNzAtMjMxM2JiMDYtMjg1NjI1YWItMmY0MjJjMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:02.414990Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDQ3Y2ZmNzAtMjMxM2JiMDYtMjg1NjI1YWItMmY0MjJjMjQ=, ActorId: [2:7486785590553204696:2553], ActorState: ExecuteState, TraceId: 01jqe0bhvs60e6fw4862q39xdt, Create QueryResponse for error on request, msg: 2025-03-28T09:12:03.696131Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bk65ckpcgg8kbqkg3yny, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDQ3Y2ZmNzAtMjMxM2JiMDYtMjg1NjI1YWItMmY0MjJjMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:03.696443Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDQ3Y2ZmNzAtMjMxM2JiMDYtMjg1NjI1YWItMmY0MjJjMjQ=, ActorId: [2:7486785590553204696:2553], ActorState: ExecuteState, TraceId: 01jqe0bk65ckpcgg8kbqkg3yny, Create QueryResponse for error on request, msg: 2025-03-28T09:12:05.400584Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0bnzj5kaby8pwx8563wp2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDQ3Y2ZmNzAtMjMxM2JiMDYtMjg1NjI1YWItMmY0MjJjMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:05.400859Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDQ3Y2ZmNzAtMjMxM2JiMDYtMjg1NjI1YWItMmY0MjJjMjQ=, ActorId: [2:7486785590553204696:2553], ActorState: ExecuteState, TraceId: 01jqe0bnzj5kaby8pwx8563wp2, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] Test command err: 2025-03-28T09:09:21.771132Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486784962013877261:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.771190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:21.923380Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486784960010700343:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.923422Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:09:21.923005Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486784963034897089:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:21.923092Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016c9/r3tmp/tmpdAfSms/pdisk_1.dat 2025-03-28T09:09:22.922030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:22.941739Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:22.948513Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:23.666558Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:23.686886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:23.687004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:23.729574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:23.729665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:23.769781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:23.769863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:23.840590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:23.840868Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:09:23.866118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:09:23.904034Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:09:23.904814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20116 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:09:25.019021Z node 1 :TX_PROXY DEBUG: actor# [1:7486784962013877499:2134] Handle TEvNavigate describe path dc-1 2025-03-28T09:09:25.019096Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784979193747204:2483] HANDLE EvNavigateScheme dc-1 2025-03-28T09:09:25.021786Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486784966308844823:2148], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:09:25.022263Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966308844968:2225][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486784966308844823:2148], cookie# 1 2025-03-28T09:09:25.032696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966308844973:2225][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966308844970:2225], cookie# 1 2025-03-28T09:09:25.032791Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784962013877154:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966308844973:2225], cookie# 1 2025-03-28T09:09:25.032802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966308844974:2225][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966308844971:2225], cookie# 1 2025-03-28T09:09:25.032835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966308844975:2225][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966308844972:2225], cookie# 1 2025-03-28T09:09:25.032875Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966308844973:2225][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784962013877154:2053], cookie# 1 2025-03-28T09:09:25.032906Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966308844968:2225][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784966308844970:2225], cookie# 1 2025-03-28T09:09:25.032933Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966308844968:2225][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:09:25.032957Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784962013877160:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966308844975:2225], cookie# 1 2025-03-28T09:09:25.033030Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486784962013877157:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486784966308844974:2225], cookie# 1 2025-03-28T09:09:25.033326Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966308844975:2225][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784962013877160:2059], cookie# 1 2025-03-28T09:09:25.033353Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486784966308844974:2225][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784962013877157:2056], cookie# 1 2025-03-28T09:09:25.033377Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966308844968:2225][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784966308844972:2225], cookie# 1 2025-03-28T09:09:25.033400Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966308844968:2225][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:09:25.033442Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966308844968:2225][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486784966308844971:2225], cookie# 1 2025-03-28T09:09:25.033458Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486784966308844968:2225][/dc-1] Unexpected sync response: sender# [1:7486784966308844971:2225], cookie# 1 2025-03-28T09:09:25.033530Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486784966308844823:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:09:25.073655Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486784966308844823:2148], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486784966308844968:2225] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:09:25.073788Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486784966308844823:2148], cacheItem# { Subscriber: { Subscriber: [1:7486784966308844968:2225] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:09:25.083756Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486784979193747205:2484], recipient# [1:7486784979193747204:2483], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:09:25.083857Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784979193747204:2483] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:09:25.154671Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784979193747204:2483] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:09:25.164713Z node 1 :TX_PROXY DEBUG: Actor# [1:7486784979193747204:2483] Handle TEvDescribeSchemeResult Forward to# [1:7486784979193747203:2482] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response ... 03.592204Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7486785048417904483:2122], cacheItem# { Subscriber: { Subscriber: [8:7486785052712872085:2317] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:12:03.592331Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7486785658303261762:2768], recipient# [8:7486785658303261761:2741], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:03.613495Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7486785048417904483:2122], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:03.613686Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7486785048417904483:2122], cacheItem# { Subscriber: { Subscriber: [8:7486785052712872085:2317] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:12:03.613797Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7486785658303261764:2769], recipient# [8:7486785658303261763:2742], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:03.623093Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [8:7486785048417904483:2122], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:03.623252Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [8:7486785048417904483:2122], cacheItem# { Subscriber: { Subscriber: [8:7486785091367577869:2372] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:12:03.623343Z node 8 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [8:7486785658303261766:2770], recipient# [8:7486785658303261765:2743], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:03.991180Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7486785037897964490:2219], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:03.991339Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7486785037897964490:2219], cacheItem# { Subscriber: { Subscriber: [7:7486785042192932083:2441] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:12:03.991464Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7486785656373258091:3712], recipient# [7:7486785656373258090:2728], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:04.120343Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7486785037897964490:2219], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:04.120509Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7486785037897964490:2219], cacheItem# { Subscriber: { Subscriber: [7:7486785042192932083:2441] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:12:04.120620Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7486785660668225389:3713], recipient# [7:7486785660668225388:2729], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:04.402872Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7486785037897964490:2219], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:04.403065Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7486785037897964490:2219], cacheItem# { Subscriber: { Subscriber: [7:7486785080847638270:2800] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:12:04.403214Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7486785660668225394:3714], recipient# [7:7486785660668225393:2730], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:04.991671Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7486785037897964490:2219], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:04.991838Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [7:7486785037897964490:2219], cacheItem# { Subscriber: { Subscriber: [7:7486785042192932083:2441] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:12:04.991931Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [7:7486785660668225402:3718], recipient# [7:7486785660668225401:2731], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexWithPrimaryKeySameComulns-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15454, MsgBus: 8259 2025-03-28T09:11:23.907807Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785486026294011:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.918031Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019b3/r3tmp/tmp3ifSCr/pdisk_1.dat 2025-03-28T09:11:25.022819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.022929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.023065Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.100057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.103007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.287132Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.115906s 2025-03-28T09:11:25.287207Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.115999s TServer::EnableGrpc on GrpcPort 15454, node 1 2025-03-28T09:11:25.883410Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.883440Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.883448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.883586Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8259 TClient is connected to server localhost:8259 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:28.032881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.059253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.307272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.685295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.817733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:29.045250Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785486026294011:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:29.098394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:30.054850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785516091066856:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.054943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.687779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.736650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.823180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.903333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.943389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.976664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.052859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785520386034678:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.052933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.054001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785520386034683:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.084774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.096707Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:11:31.097362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785520386034685:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.193271Z node 1 :TX_PROXY ERROR: Actor# [1:7486785520386034746:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:35.148123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 27468, MsgBus: 22400 2025-03-28T09:11:39.219249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785553208907013:2081];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019b3/r3tmp/tmp5WskNm/pdisk_1.dat 2025-03-28T09:11:39.435490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:11:39.519633Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:39.544078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:39.544188Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:39.551965Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27468, node 2 2025-03-28T09:11:39.855222Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:39.855242Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:39.855251Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:39.855362Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22400 TClient is connected to server localhost:22400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:40.700921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.712798Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown ... ARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:11:44.940506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785574683745763:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:45.016648Z node 2 :TX_PROXY ERROR: Actor# [2:7486785578978713114:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:46.231548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.180761Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:47.311842Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:49.063088Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:49.982707Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:50.011022Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:50.939137Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:50.986846Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:51.460085Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:52.295861Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:52.838043Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:53.222206Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:11:53.287237Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 62161, MsgBus: 12580 2025-03-28T09:11:54.813466Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785618892679754:2214];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019b3/r3tmp/tmp1fWcqN/pdisk_1.dat 2025-03-28T09:11:54.917480Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:11:54.994495Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:55.031462Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:55.031572Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:55.034345Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62161, node 3 2025-03-28T09:11:55.131260Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:55.131292Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:55.131301Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:55.131451Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12580 TClient is connected to server localhost:12580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:55.859704Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:55.867736Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:11:55.886623Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:55.986570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:56.239624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:11:56.354567Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:59.147767Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785640367517819:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.147851Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.205140Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.249324Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.321843Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.363715Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.448041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.500486Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.601734Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785640367518340:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.601830Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.602135Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785640367518345:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.608116Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:59.623603Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785640367518347:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:11:59.717740Z node 3 :TX_PROXY ERROR: Actor# [3:7486785640367518403:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:59.802644Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785618892679754:2214];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:59.802744Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:01.037053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:02.211892Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:02.235434Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:02.275052Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:03.995490Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:04.056558Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:04.093352Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:05.342923Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:05.378181Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:05.407625Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:06.654760Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:07.355676Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:08.715206Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:09.375565Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:09.973702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:09.973734Z node 3 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25488, MsgBus: 20558 2025-03-28T09:11:40.143378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785556856630006:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:40.166955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00198e/r3tmp/tmpqxZTMU/pdisk_1.dat 2025-03-28T09:11:40.998377Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:41.007261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:41.007383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:41.015387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25488, node 1 2025-03-28T09:11:41.345565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:41.345588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:41.345594Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:41.345700Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20558 TClient is connected to server localhost:20558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:42.372221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.403783Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:42.420828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.672906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.861690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.967182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:45.146882Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785556856630006:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:45.146955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:45.399692Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785578331468261:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.399809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:45.794785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.848846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.934324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.986493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:46.026405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:46.109563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:46.210789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785582626436083:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:46.210845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:46.211156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785582626436088:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:46.214102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:46.229857Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:11:46.230153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785582626436090:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:46.334027Z node 1 :TX_PROXY ERROR: Actor# [1:7486785582626436145:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:47.529545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 811 cpu_time_us: 811 } query_phases { duration_us: 86045 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 64115 affected_shards: 1 } query_phases { duration_us: 1832 cpu_time_us: 1832 } query_phases { duration_us: 8453 cpu_time_us: 6299 } query_phases { duration_us: 8735 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 3050 affected_shards: 2 } compilation { duration_us: 684967 cpu_time_us: 673390 } process_cpu_time_us: 28376 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":27,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":26,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_1_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743153108481,\"TaskId\":1,\"Host\":\"ghrun-mdsjyjltvm\",\"ComputeTimeUs\":73}],\"CpuTimeUs\":600}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\"}],\"BaseTimeMs\":1743153108480,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":600,\"Max\":600,\"Min\":600}},\"CTE Name\":\"precompute_1_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":25,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":24,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1743153108480,\"FinishedTasks\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_3_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":23,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":22,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_3_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743153108480,\"TaskId\":2,\"Host\":\"ghrun-mdsjyjltvm\",\"ComputeTimeUs\":91}],\"CpuTimeUs\":455}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\"}],\"BaseTimeMs\":1743153108480,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":455,\"Max\":455,\"Min\":455}},\"CTE Name\":\"precompute_3_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":20,\"Subplan Name\":\"CTE precompute_3_0\",\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Operators\":[{\"Inputs\":[{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"},{\"Other\":\"ConstantExpression\"}],\"Iterator\":\"FlatMa ... 72057594046644480 2025-03-28T09:11:54.851008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.892527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:54.937472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.016914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.076703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.170609Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785624141770776:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:55.170722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:55.171159Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785624141770781:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:55.175938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:55.193575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785624141770783:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:55.253410Z node 2 :TX_PROXY ERROR: Actor# [2:7486785624141770839:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:55.532308Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785602666932000:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:55.532387Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:56.435605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7222, MsgBus: 28255 2025-03-28T09:11:58.358388Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785636204640375:2199];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00198e/r3tmp/tmplqaTDG/pdisk_1.dat 2025-03-28T09:11:58.503947Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:11:58.585116Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:58.607825Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:58.607906Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:58.611777Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7222, node 3 2025-03-28T09:11:58.767232Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:58.767256Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:58.767263Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:58.767390Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28255 TClient is connected to server localhost:28255 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:59.273082Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:59.280089Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:11:59.290536Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:59.363049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:59.554405Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:59.643881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:02.641118Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785653384511184:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:02.641280Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:02.729841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:02.773490Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:02.857400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:02.903779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:02.989280Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:03.077923Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:03.142875Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785657679479003:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:03.142970Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:03.143236Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785657679479008:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:03.147538Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:03.168141Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785657679479010:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:03.253447Z node 3 :TX_PROXY ERROR: Actor# [3:7486785657679479066:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:03.355603Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785636204640375:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:03.355674Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:04.370327Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:05.958481Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnSelect [GOOD] Test command err: Trying to start YDB, gRPC: 13666, MsgBus: 28357 2025-03-28T09:11:45.247172Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785578690272823:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:45.255413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001976/r3tmp/tmpk6DwJw/pdisk_1.dat 2025-03-28T09:11:46.030874Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:46.033430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:46.033529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:46.043807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13666, node 1 2025-03-28T09:11:46.259275Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:46.259299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:46.259306Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:46.259428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28357 TClient is connected to server localhost:28357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:47.279397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:47.319595Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:47.351714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:47.678806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:48.038296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:48.169617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:50.252256Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785578690272823:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:50.331387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:50.761932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785600165111021:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:50.762078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:51.120938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.171861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.215941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.267324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.326992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.379158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:51.459815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785604460078835:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:51.459924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:51.460214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785604460078840:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:51.464449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:51.479298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785604460078842:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:51.571449Z node 1 :TX_PROXY ERROR: Actor# [1:7486785604460078898:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:52.745847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 1947, MsgBus: 1420 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001976/r3tmp/tmp7RzU2r/pdisk_1.dat 2025-03-28T09:11:55.597723Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785624756733702:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:55.597784Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:11:55.667869Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:55.683645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:55.683727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:55.691808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1947, node 2 2025-03-28T09:11:55.895019Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:55.895050Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:55.895058Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:55.895174Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1420 TClient is connected to server localhost:1420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:56.594872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.603756Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:56.616648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.717500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.974071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.097746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:59.472686Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785641936604496:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.472794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.525032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.561416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.628216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.679143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.728538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.802861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:59.901363Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785641936605017:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.901459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.902003Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785641936605022:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:59.906190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:59.925477Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785641936605024:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:00.026457Z node 2 :TX_PROXY ERROR: Actor# [2:7486785646231572375:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:00.449603Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785624756733702:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:00.449687Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:01.124473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:01.212045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:01.298399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:12:03.614927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexMetadata::HandleNotReadyIndex >> SystemView::AuthPermissions_Access [GOOD] >> KqpMultishardIndex::DataColumnWrite+UseSink >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel1 >> KqpIndexes::IndexFilterPushDown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoin-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29493, MsgBus: 6638 2025-03-28T09:11:21.953350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785476474116168:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:21.953477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001719/r3tmp/tmp9EkXQ0/pdisk_1.dat 2025-03-28T09:11:22.736425Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:22.777018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:22.777099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:22.787863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29493, node 1 2025-03-28T09:11:23.051856Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:23.051879Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:23.051886Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:23.052011Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6638 TClient is connected to server localhost:6638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:24.449781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:26.955639Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785476474116168:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:26.955693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:28.356712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785506538887852:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:28.356797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:28.362812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785506538887864:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:28.370820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:11:28.382172Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:11:28.382348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785506538887866:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:11:28.474742Z node 1 :TX_PROXY ERROR: Actor# [1:7486785506538887917:2349] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:29.214178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:11:29.375719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:11:29.427908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:29.465563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:29.544978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:29.707105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:29.809094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:29.870061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:29.949169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:11:29.996608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.039054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.077662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.111900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.855389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:11:30.949734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.980024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.021674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.057949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.098776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.131387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.157583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.188682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.222835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.269814Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.300182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.341169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.388035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.429388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.475558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.520438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.606314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.612410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.616854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.618634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.622638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.624538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.631277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.638137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.645183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.648078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.653290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.658779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.670462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.676979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.680268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.686292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.690652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.700096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.704564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.717408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.731127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.745072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.749273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.751030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.757048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.762357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.769946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.777153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.779399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.789347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.792734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.800380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.810174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.811738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.817403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.823246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.834136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.840602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.846853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.852732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.860920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.862984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.868829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.873324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:05.894617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:06.115016Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0apw8a8j0nx31hmhrkz41", SessionId: ydb://session/3?node_id=1&id=OTkxNTllMTItMzY2MjI2YTktNjYwNjE4MDMtYzc5YTQ3NzU=, Slow query, duration: 32.602045s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:12:06.565311Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:12:06.566551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7486785549488569569:3331];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-28T09:12:06.567035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:12:06.567743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexes::SelectConcurentTX2 [GOOD] >> KqpIndexes::SelectFromAsyncIndexedTable >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 >> KqpUniqueIndex::InsertNullInPk [GOOD] >> KqpUniqueIndex::InsertNullInFk >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel1 |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |82.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore >> TColumnShardTestSchema::RebootColdTiersWithStat >> KqpIndexes::CheckUpsertNonEquatableType+NotNull >> KqpIndexes::MultipleSecondaryIndex+UseSink [GOOD] >> KqpIndexes::MultipleSecondaryIndex-UseSink >> TObjectStorageListingTest::MaxKeysAndSharding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthPermissions_Access [GOOD] Test command err: 2025-03-28T09:09:33.134630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785013635860052:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:09:33.134676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015af/r3tmp/tmpnwz0zs/pdisk_1.dat 2025-03-28T09:09:34.200018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:09:34.301385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:09:34.330187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:09:34.330293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:09:34.347809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3099, node 1 2025-03-28T09:09:34.704174Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:09:34.704201Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:09:34.704219Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:09:34.704333Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5341 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:09:35.606538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:09:35.989386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:09:35.989609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:09:35.989678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:09:35.989818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-28T09:09:35.989950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:09:35.990030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:09:35.990045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:09:35.990107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:09:35.990134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:09:35.992385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-28T09:09:35.992560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-03-28T09:09:35.992787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:09:35.992801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:09:35.992937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:09:35.993022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:09:35.993046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486785017930827961:2398], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-28T09:09:35.993058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486785017930827961:2398], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-28T09:09:35.993095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:09:35.993109Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:09:35.993125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-28T09:09:35.993144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 waiting... 2025-03-28T09:09:35.997508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:09:35.999021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:09:35.999111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:09:35.999126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-28T09:09:35.999161Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-03-28T09:09:35.999184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:09:35.999379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:09:35.999417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:09:35.999421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-28T09:09:35.999429Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-03-28T09:09:35.999437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:09:35.999465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-28T09:09:36.000459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-03-28T09:09:36.000555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:09:36.002515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-28T09:09:36.002558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-28T09:09:36.002887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743152976047, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:09:36.003021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743152976047 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:09:36.003047Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-28T09:09:36.003183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-03-28T09:09:36.004415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-28T09:09:36.004560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:09:36.004602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:09:36.004634Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:09:36.011874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644 ... { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:11.755773Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user6tenant1admin },{ Sid: user2 },{ Sid: user1rootadmin }] Groups: [] } Children [.metadata,Dir1,Dir2,Table0,Tenant1,Tenant2] }] } 2025-03-28T09:12:11.755875Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486785692701128477:2450], row count: 4, finished: 0 2025-03-28T09:12:11.759057Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:11.760445Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata TableId: [72057594046644480:5:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [workload_manager] }] } 2025-03-28T09:12:11.760502Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486785692701128477:2450], row count: 0, finished: 0 2025-03-28T09:12:11.774575Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:11.776568Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager TableId: [72057594046644480:6:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [pools] }] } 2025-03-28T09:12:11.776647Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486785692701128477:2450], row count: 0, finished: 0 2025-03-28T09:12:11.779167Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:11.779554Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools TableId: [72057594046644480:7:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [default] }] } 2025-03-28T09:12:11.779608Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486785692701128477:2450], row count: 0, finished: 0 2025-03-28T09:12:11.779684Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:11.779929Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [72057594046644480:8:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindResourcePool DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:12:11.780042Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486785692701128477:2450], row count: 6, finished: 0 2025-03-28T09:12:11.780170Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:11.780390Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-28T09:12:11.780433Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486785692701128477:2450], row count: 1, finished: 0 2025-03-28T09:12:11.781227Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:11.783232Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir2 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-28T09:12:11.783299Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486785692701128477:2450], row count: 0, finished: 0 2025-03-28T09:12:11.786851Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:12:11.787235Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:12:11.787296Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486785692701128477:2450], row count: 0, finished: 0 2025-03-28T09:12:11.787781Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7486785692701128477:2450], owner: [31:7486785692701128473:2448], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-03-28T09:12:11.790793Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7486785628276616758:2144], database# , query hash# 12107705915200741666, cpu time# 164369 2025-03-28T09:12:11.791460Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153131746, txId: 281474976710692] shutting down 2025-03-28T09:12:11.814429Z node 33 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:12:11.816700Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 34 2025-03-28T09:12:11.817482Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:12:11.817625Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 33 2025-03-28T09:12:11.817907Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:12:11.818027Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 35 2025-03-28T09:12:11.818322Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:12:11.814510Z node 35 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:12:11.821286Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 32 2025-03-28T09:12:11.821825Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:12:11.837479Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486785637591073678:2107], Type=268959746 2025-03-28T09:12:11.837578Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486785637591073678:2107], Type=268959746 2025-03-28T09:12:11.837624Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486785637591073678:2107], Type=268959746 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexFilterPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 28135, MsgBus: 1774 2025-03-28T09:11:48.696506Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785592519651382:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:48.696579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001974/r3tmp/tmpl3rJYf/pdisk_1.dat 2025-03-28T09:11:49.257201Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:49.261002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:49.261091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:49.265633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28135, node 1 2025-03-28T09:11:49.535288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:49.535314Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:49.535321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:49.535441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1774 TClient is connected to server localhost:1774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:50.468817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:50.489778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:50.503882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:50.686398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:50.925856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:51.022668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:53.023744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785613994489633:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:53.023859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:53.383622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:53.466148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:53.547592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:53.582350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:53.617340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:53.679980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:53.704630Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785592519651382:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:53.704687Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:53.748157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785613994490149:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:53.748240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:53.748513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785613994490155:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:53.752009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:53.765282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785613994490157:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:53.829220Z node 1 :TX_PROXY ERROR: Actor# [1:7486785613994490210:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:55.096639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:55.959835Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785622584425408:2525], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {'(', DEFAULT, DISCARD, FROM, PROCESS, REDUCE, SELECT, VALUES} 2025-03-28T09:11:55.961083Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGJiYjdjYjMtZjM2MGY0ZWEtODQ0ZjA4NmEtNTcwM2JlMjM=, ActorId: [1:7486785622584425070:2492], ActorState: ExecuteState, TraceId: 01jqe0bcrq9at6m4ge1w3fre69, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:11:55.981739Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785622584425412:2527], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {'(', DEFAULT, DISCARD, FROM, PROCESS, REDUCE, SELECT, VALUES} 2025-03-28T09:11:55.982498Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGJiYjdjYjMtZjM2MGY0ZWEtODQ0ZjA4NmEtNTcwM2JlMjM=, ActorId: [1:7486785622584425070:2492], ActorState: ExecuteState, TraceId: 01jqe0bct74aknhv1tktfm3gq7, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:11:56.028934Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785626879392717:2530], status: GENERIC_ERROR, issues:
:3:41: Error: mismatched input 'VIEW' expecting {ON, SET} 2025-03-28T09:11:56.030130Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGJiYjdjYjMtZjM2MGY0ZWEtODQ0ZjA4NmEtNTcwM2JlMjM=, ActorId: [1:7486785622584425070:2492], ActorState: ExecuteState, TraceId: 01jqe0bcvg2zr12mfzpxagrhsh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:11:56.054886Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486785626879392724:2532], status: GENERIC_ERROR, issues:
:3:46: Error: mismatched input 'VIEW' expecting {, ';'} 2025-03-28T09:11:56.056269Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGJiYjdjYjMtZjM2MGY0ZWEtODQ0ZjA4NmEtNTcwM2JlMjM=, ActorId: [1:7486785622584425070:2492], ActorState: ExecuteState, TraceId: 01jqe0bcw7ekx5ajdyptd366nf, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 27019, MsgBus: 31855 2025-03-28T09:11:57.286970Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785629603234658:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:57.287366Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001974/r3tmp/tmpcRTJQj/pdisk_1.dat 2025-03-28T09:11:57.529603Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:57.538139Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:57.538219Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:57.540205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27019, node 2 2025-03-28T09:11:57.721608Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:57.721633Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:57.721640Z node 2 :NET_CLASSIFIER WARN: failed to initialize ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:01.784251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:01.874002Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785646783106138:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:01.874122Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:01.874488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785646783106143:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:01.881089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:01.902249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785646783106145:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:01.988279Z node 2 :TX_PROXY ERROR: Actor# [2:7486785646783106201:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:02.323569Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785629603234658:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:02.323697Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:03.219486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:03.333463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:03.555422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21177, MsgBus: 8465 2025-03-28T09:12:07.351662Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785673524717214:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:07.351794Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001974/r3tmp/tmp0Q0znY/pdisk_1.dat 2025-03-28T09:12:07.595866Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:07.601344Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:07.601439Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:07.603232Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21177, node 3 2025-03-28T09:12:07.707484Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:07.707511Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:07.707519Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:07.707649Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8465 TClient is connected to server localhost:8465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:08.329232Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:08.343646Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:12:08.364004Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:08.546674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:08.779863Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:08.876449Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:11.717232Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785690704588175:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:11.717325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:11.775757Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:11.821322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:11.887725Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:11.946185Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:11.999987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.050571Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.150898Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785694999555992:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:12.151021Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:12.151097Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785694999555997:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:12.154416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:12.164465Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785694999555999:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:12.244744Z node 3 :TX_PROXY ERROR: Actor# [3:7486785694999556054:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:12.351110Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785673524717214:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:12.351200Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:13.706737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.802939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.861005Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpJoin::IdxLookupLeftPredicate [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |82.2%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut >> KqpUniqueIndex::UpdateFkSameValue [GOOD] >> KqpUniqueIndex::UpdateFkPkOverlap >> KqpMultishardIndex::SecondaryIndexSelectNull [GOOD] >> KqpMultishardIndex::SecondaryIndexSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupLeftPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 31610, MsgBus: 18771 2025-03-28T09:12:10.065230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785685392481219:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:10.067959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00170a/r3tmp/tmp38vs4Z/pdisk_1.dat 2025-03-28T09:12:10.664800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:10.664887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:10.676717Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:10.689269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31610, node 1 2025-03-28T09:12:10.847248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:10.847266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:10.847272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:10.847372Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18771 TClient is connected to server localhost:18771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:11.750267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:11.774397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:11.988667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:12.191651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:12.297843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:14.387619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785702572352040:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:14.387751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:14.731767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:14.787473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:14.862295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:14.958597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:15.020820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:15.043057Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785685392481219:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:15.043104Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:15.095217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:15.170821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785706867319854:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:15.170952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:15.171239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785706867319859:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:15.178820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:15.206894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785706867319861:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:15.270476Z node 1 :TX_PROXY ERROR: Actor# [1:7486785706867319916:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:16.676005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:16.725827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:16.774755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |82.3%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex >> KqpJoinOrder::CanonizedJoinOrderTPCH7 [GOOD] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> KqpJoin::RightSemiJoin_FullScan >> KqpIndexes::ExplainCollectFullDiagnostics >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit >> YdbSdkSessions::SessionsServerLimit [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint32ToUint16-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 19186, MsgBus: 24737 2025-03-28T09:12:03.799182Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785655930884279:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:03.799549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00170f/r3tmp/tmpgHy9FY/pdisk_1.dat 2025-03-28T09:12:04.576077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:04.576155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:04.582526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:04.621270Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19186, node 1 2025-03-28T09:12:04.831170Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:04.831191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:04.834812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:04.854240Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24737 TClient is connected to server localhost:24737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:06.032341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:06.055430Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:06.069019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:06.265095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:06.501005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:06.600482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:08.696883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785677405722381:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:08.697028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:08.786043Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785655930884279:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:08.786143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:09.114110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.195499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.239925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.297620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.345243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.387095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.488413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785681700690195:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.488493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.488697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785681700690200:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.495742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:09.531205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785681700690202:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:09.635158Z node 1 :TX_PROXY ERROR: Actor# [1:7486785681700690261:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:10.948557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:11.071986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15848, MsgBus: 4427 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00170f/r3tmp/tmp3FNcAc/pdisk_1.dat 2025-03-28T09:12:13.354809Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:13.408107Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:13.449673Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:13.449742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:13.451175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15848, node 2 2025-03-28T09:12:13.621076Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:13.621100Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:13.621106Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:13.621245Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4427 TClient is connected to server localhost:4427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:14.536276Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:14.546134Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:14.559183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:14.705606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:12:14.936150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:15.046799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:17.721436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785715649816704:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:17.721529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:17.773559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:17.865633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:17.934469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:17.982784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:18.021852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:18.091233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:18.175133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785719944784513:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:18.175216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:18.175595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785719944784518:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:18.179399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:18.197838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785719944784520:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:18.265123Z node 2 :TX_PROXY ERROR: Actor# [2:7486785719944784576:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:19.632696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.705194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpIndexes::SelectFromAsyncIndexedTable [GOOD] >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |82.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |82.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |82.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertComplexFkPkOverlapDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 25320, MsgBus: 6500 2025-03-28T09:12:04.756120Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785663112346665:2124];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:04.771029Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001968/r3tmp/tmp2zzwm4/pdisk_1.dat 2025-03-28T09:12:05.353827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:05.353994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:05.368655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:05.434446Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25320, node 1 2025-03-28T09:12:05.724854Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:05.724878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:05.724890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:05.725003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6500 TClient is connected to server localhost:6500 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:06.621279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:06.647579Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:06.652974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:06.857327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:07.056401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:07.146678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:09.240258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785684587184852:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.240403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.566895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.620578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.673909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.747750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.755509Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785663112346665:2124];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:09.755572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:09.784193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.857090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.952109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785684587185379:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.952218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.953397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785684587185385:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.958029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:09.970558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785684587185387:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:10.058479Z node 1 :TX_PROXY ERROR: Actor# [1:7486785688882152737:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:11.464023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:12.524276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:1, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14123, MsgBus: 16216 2025-03-28T09:12:14.064249Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785703367465654:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:14.064279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001968/r3tmp/tmpo6eHZu/pdisk_1.dat 2025-03-28T09:12:14.463207Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:14.477782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:14.477874Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:14.480084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14123, node 2 2025-03-28T09:12:14.655609Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:14.655627Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:14.655635Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:14.655738Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16216 TClient is connected to server localhost:16216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:15.488078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:15.504229Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:15.549036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:15.687723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:15.883252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:15.982567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.064753Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785703367465654:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:19.064849Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:19.139905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785724842303919:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:19.139997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:19.333890Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.397924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.456843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.529616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.575354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.653926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.769111Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785724842304448:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:19.769203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:19.769548Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785724842304453:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:19.774027Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:19.796280Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785724842304455:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:19.889502Z node 2 :TX_PROXY ERROR: Actor# [2:7486785724842304510:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:21.345885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... >> KqpMultishardIndex::DataColumnWriteNull >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> KqpIndexes::CheckUpsertNonEquatableType+NotNull [GOOD] >> KqpIndexes::CheckUpsertNonEquatableType-NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH7 [GOOD] Test command err: Trying to start YDB, gRPC: 20400, MsgBus: 31006 2025-03-28T09:10:30.646077Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785259041180706:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:30.655619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001726/r3tmp/tmp3juhU6/pdisk_1.dat 2025-03-28T09:10:31.330006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:31.330084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:31.338676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:10:31.352726Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20400, node 1 2025-03-28T09:10:31.636922Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:31.636953Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:31.636962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:31.637060Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31006 TClient is connected to server localhost:31006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:32.691113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:34.988818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785276221050414:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:34.988971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:34.989594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785276221050426:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:34.993455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:10:35.019024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785276221050428:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:10:35.082618Z node 1 :TX_PROXY ERROR: Actor# [1:7486785280516017775:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:35.467869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:10:35.638970Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785259041180706:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:35.639066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:35.809985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:10:35.810177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:10:35.810486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:10:35.810619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:10:35.810755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:10:35.810993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:10:35.811171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:10:35.811328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:10:35.811463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:10:35.811586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:10:35.811738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:10:35.811851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785280516018040:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:10:35.811956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:10:35.812005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:10:35.812186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:10:35.812339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:10:35.812453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:10:35.812562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:10:35.812673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:10:35.812790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:10:35.812941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:10:35.813099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:10:35.813237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:10:35.813358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486785280516018038:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:10:35.847565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785280516018045:2354];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.c ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:57.975064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:57.983174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:57.983539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:57.993383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:57.995830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.005061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.006600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.010850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.012678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.017212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.019243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.031448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.034413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.042183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.052858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.053037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.063780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.063908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.071180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.078975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.081357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.085022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.090284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.091470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.097137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.097260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.103359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.103729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.113152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.116671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.121742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.122228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.128247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.130995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.135005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.141531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.141693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.148633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.154908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.155208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.160563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.164118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.167672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.169801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.176886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:58.399233Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0aa0e95rk4x05874p4k47", SessionId: ydb://session/3?node_id=1&id=OWExYWIzYjgtMTYzZjFhZDYtNDY4NWE3YmItZGYxNTNlNDA=, Slow query, duration: 38.063709s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:11:58.821423Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:58.821922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:58.822596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486785589753719527:10985];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:11:58.823004Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::GeneralPrioritiesBug3 [GOOD] >> KqpUniqueIndex::InsertNullInFk [GOOD] >> KqpJoinOrder::TPCDS16+ColumnStore [GOOD] >> KqpIndexMetadata::HandleNotReadyIndex [GOOD] >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertNullInFk [GOOD] Test command err: Trying to start YDB, gRPC: 22212, MsgBus: 31320 2025-03-28T09:12:07.979689Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785675658267055:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:07.983891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001963/r3tmp/tmpuNIHXt/pdisk_1.dat 2025-03-28T09:12:08.765430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:08.765533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:08.784092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:08.822183Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22212, node 1 2025-03-28T09:12:08.963018Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:08.963051Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:08.963058Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:08.963171Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31320 TClient is connected to server localhost:31320 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:09.764195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:09.812342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:10.169383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:10.407914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:10.539916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:12.460049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785697133105325:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:12.460148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:12.749905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.784600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.864488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.955805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.984243Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785675658267055:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:12.984313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:13.019556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.102231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.214823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785701428073152:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.214899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.215056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785701428073157:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.219237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:13.245006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785701428073159:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:13.336853Z node 1 :TX_PROXY ERROR: Actor# [1:7486785701428073214:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:14.499922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:16.952267Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785714312976677:2615], TxId: 281474976710681, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZjA0YThiZTctMzEzZjJhYTItZGQzZThjM2ItMjFiZGRmNDU=. TraceId : 01jqe0c0w3cpay42rhveew3sk6. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:12:16.952729Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785714312976678:2616], TxId: 281474976710681, task: 2. Ctx: { TraceId : 01jqe0c0w3cpay42rhveew3sk6. SessionId : ydb://session/3?node_id=1&id=ZjA0YThiZTctMzEzZjJhYTItZGQzZThjM2ItMjFiZGRmNDU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486785714312976674:2551], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:12:16.953095Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjA0YThiZTctMzEzZjJhYTItZGQzZThjM2ItMjFiZGRmNDU=, ActorId: [1:7486785705723041567:2551], ActorState: ExecuteState, TraceId: 01jqe0c0w3cpay42rhveew3sk6, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 16759, MsgBus: 29272 2025-03-28T09:12:17.972819Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785717983736101:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:17.972888Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001963/r3tmp/tmpmn8VhE/pdisk_1.dat 2025-03-28T09:12:18.143136Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:18.176803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:18.176884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:18.181867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16759, node 2 2025-03-28T09:12:18.283308Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:18.283337Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:18.283344Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:18.283475Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29272 TClient is connected to server localhost:29272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:12:19.035405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.070812Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:12:19.100437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.199640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.450535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.526099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.071806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785739458574343:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.071888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.173004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:22.229625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:22.273551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:22.323524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:22.371691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:22.411317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:22.478781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785739458574856:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.478876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.479180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785739458574861:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.487164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:22.498035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785739458574863:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:22.563557Z node 2 :TX_PROXY ERROR: Actor# [2:7486785739458574917:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:22.980443Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785717983736101:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:22.980530Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:23.814611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::InnerJoinWithNonIndexWherePredicate >> KqpIndexes::MultipleSecondaryIndex-UseSink [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |82.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |82.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> KqpUniqueIndex::ReplaceFkAlreadyExist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2025-03-28T09:06:50.512665Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:06:50.620928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:06:50.650417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:06:50.650853Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:06:50.663020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:06:50.663256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:06:50.663482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:06:50.663584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:06:50.663706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:06:50.663869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:06:50.663984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:06:50.664086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:06:50.664186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:06:50.664295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:06:50.664422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:06:50.664530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:06:50.704420Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:06:50.704725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:06:50.704778Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:06:50.704949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:06:50.705115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:06:50.705212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:06:50.705258Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:06:50.705362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:06:50.705443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:06:50.705490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:06:50.705517Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:06:50.705727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:06:50.705792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:06:50.705831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:06:50.705860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:06:50.705996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:06:50.706049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:06:50.706086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:06:50.706118Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:06:50.706227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:06:50.706276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:06:50.706321Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:06:50.706365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:06:50.706398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:06:50.706429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:06:50.706802Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-03-28T09:06:50.706898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-03-28T09:06:50.706996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-03-28T09:06:50.707112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=52; 2025-03-28T09:06:50.707271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:06:50.707323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:06:50.707355Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:06:50.707768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:06:50.707811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:06:50.707859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:06:50.708008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:06:50.708055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:06:50.708093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:06:50.708281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:06:50.708325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:06:50.708352Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:06:50.708488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:06:50.708535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:06:50.708577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-03-28T09:12:23.290272Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11195:12822];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-28T09:12:23.292752Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11195:12822];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug3 [GOOD] Test command err: Trying to start YDB, gRPC: 22672, MsgBus: 16629 2025-03-28T09:11:41.595100Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785563831981616:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:41.599406Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001716/r3tmp/tmpgL0yLU/pdisk_1.dat 2025-03-28T09:11:41.974759Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:42.021075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:42.021187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22672, node 1 2025-03-28T09:11:42.041380Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:42.155180Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:42.155204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:42.155213Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:42.155315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16629 TClient is connected to server localhost:16629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:42.741660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:42.755314Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:44.900941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785576716884112:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.901045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785576716884104:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.901354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:44.905401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:11:44.927829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785576716884118:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:11:45.019516Z node 1 :TX_PROXY ERROR: Actor# [1:7486785581011851465:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:45.396311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.523876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.559827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.587175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.651235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.826936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.910952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.939949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:45.968938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:11:46.040208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:11:46.093149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:11:46.149189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:46.208438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:11:46.594546Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785563831981616:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:46.594623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:46.911844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:11:46.955408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:11:46.988464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.019620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.055469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.123374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.159269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.204194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.271623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.299593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.331235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.364914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.399570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.452912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.482723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.509498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:11:47.545691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.607805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.613243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.613246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.618955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.619018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.624487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.624693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.630340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.630408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.636257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.636270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.641855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.641969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.648691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.648857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.654813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.655289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.660825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.661516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.666855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.668130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.672820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.674327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.686238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.689078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.689890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.695658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.696355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.702791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.702791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.713278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.713278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.719854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.720831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.725507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.726452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.732568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.734269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.738613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.739623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.744258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.749255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.749269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.759837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.759854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:19.838962Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0b65zfh9tpnwa9ce7k2b9", SessionId: ydb://session/3?node_id=1&id=ODRlMmYwZWMtMjY0MTI0ODUtZDQyN2JiMjktZTMxODNkMzY=, Slow query, duration: 30.654902s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:12:20.220563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:12:20.221143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:12:20.222242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7486785658321282174:4973];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-03-28T09:12:20.222723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] >> KqpIndexes::SecondaryIndexUpdateOnUsingIndex [GOOD] >> KqpIndexes::SecondaryIndexSelectUsingScripting >> KqpIndexes::UpsertWithoutExtraNullDelete+UseSink [GOOD] >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink >> KqpIndexes::ExplainCollectFullDiagnostics [GOOD] >> KqpIndexes::ForbidDirectIndexTableCreation >> KqpMultishardIndex::SortedRangeReadDesc >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [GOOD] Test command err: 2025-03-28T09:12:12.279466Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785694077272505:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:12.279544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cd0/r3tmp/tmp1Nz2ab/pdisk_1.dat 2025-03-28T09:12:13.366329Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:13.878082Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:13.888289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:13.888428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:13.896507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25574, node 1 2025-03-28T09:12:15.090155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:15.090173Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:15.090181Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:15.090321Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:17.051912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:17.282805Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785694077272505:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:17.282890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:17.615849Z node 1 :KQP_PROXY WARN: TraceId: "01jqe0c1yfebk5a0nbnpznyetb", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:17.678819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785715552110079:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:17.678901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785715552110091:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:17.678944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:17.691413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:12:17.735014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785715552110093:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:12:17.809159Z node 1 :TX_PROXY ERROR: Actor# [1:7486785715552110163:2680] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:22.122028Z node 1 :KQP_PROXY WARN: TraceId: "01jqe0c6b9fe9t92nksvxkjjrc", Active sessions limit exceeded, maximum allowed: 2
: Error: Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:24.318753Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486785745700099015:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:24.319420Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cd0/r3tmp/tmp7PMbpy/pdisk_1.dat 2025-03-28T09:12:24.789755Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:24.831450Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:24.831521Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:24.839793Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27154, node 4 2025-03-28T09:12:25.055546Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:25.055571Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:25.055578Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:25.055683Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:25.483735Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:28.735768Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0ccsz6nysqn6weadkn33f", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:28.751659Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cctf4m1h22ck2y9j2hcy", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:28.762128Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0ccts0rtv33cn528gtgbe", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:28.777203Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0ccv8c3a0635qf19tt952", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:28.784741Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0ccvga7qs7qpx10chcyh0", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:28.794823Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0ccvt3gq8a4eaxgec9e06", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:28.830093Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486785762879969276:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:28.830219Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:28.830819Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486785762879969288:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:28.834902Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:12:28.881112Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486785762879969290:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:12:28.967293Z node 4 :TX_PROXY ERROR: Actor# [4:7486785762879969362:2672] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:29.083003Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd4t3v8hyx5ryc5cew1y", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.094648Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd5665gnd1ez0cc8qsb8", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.103267Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd5f1pgcp8054egzksbp", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.123921Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd63ew3ddfh5awhx5kac", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.145094Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd6r1zqacg5qfzyr58sw", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.164093Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd7b9a2k3wtd8yafqxvn", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.184902Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd80f1g27fw4gqgndk4t", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.196832Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd8ccjqem1zyg8wjzfgy", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.211851Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd8v8ctafae4cr4yhczn", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.223648Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd97b73q60mh83v172qt", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.233968Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd9hfkarevwgb2jvyvgr", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.241630Z node 4 :KQP_PROXY WARN: TraceId: "01jqe0cd9s2vhgdyp82x6b4vtg", Active sessions limit exceeded, maximum allowed: 2 2025-03-28T09:12:29.298925Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486785745700099015:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:29.299008Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143153656.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143153656.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143153656.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143153656.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153656.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143153656.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143153656.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123152456.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123153656.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123153656.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123152456.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123152456.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123152456.000000s;Name=;Codec=}; 2025-03-28T09:10:58.882099Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:11:00.050947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:11:00.087116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:11:00.087601Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:11:00.107632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:11:00.107924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:11:00.108194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:11:00.108338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:11:00.108453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:11:00.108578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:11:00.108692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:11:00.108836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:11:00.108964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:11:00.109122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:11:00.109248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:11:00.109370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:11:00.180545Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:11:00.180966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:11:00.181056Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:11:00.181304Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:11:00.190954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:11:00.191106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:11:00.191175Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:11:00.191305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:11:00.191431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:11:00.191491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:11:00.191530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:11:00.191703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:11:00.191770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:11:00.191820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:11:00.191853Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:11:00.191940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:11:00.191993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:11:00.192037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:11:00.192068Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:11:00.192149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:11:00.192191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:11:00.192221Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:11:00.192275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:11:00.192316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:11:00.192360Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:11:00.192851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-03-28T09:11:00.192948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-03-28T09:11:00.197401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=4362; 2025-03-28T09:11:00.197629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=116; 2025-03-28T09:11:00.197907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:11:00.197989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:11:00.198037Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:11:00.198287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:11:00.198337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:11:00.198374Z node 1 :TX_COLUMNSHARD NOTICE: t ... d=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:12:29.871344Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:12:29.871412Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:12:29.871487Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:12:29.871552Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:12:29.871688Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:12:29.872021Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-28T09:12:29.872183Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:12:29.872385Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:12:29.872461Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:12:29.873008Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:12:29.873117Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:12:29.873709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1970:3975];trace_detailed=; 2025-03-28T09:12:29.874201Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:12:29.874469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:12:29.874677Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:12:29.874992Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:12:29.875499Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:12:29.875635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:12:29.875782Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:12:29.875835Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1970:3975] finished for tablet 9437184 2025-03-28T09:12:29.876368Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1969:3974];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743153149873629,"name":"_full_task","f":1743153149873629,"d_finished":0,"c":0,"l":1743153149875917,"d":2288},"events":[{"name":"bootstrap","f":1743153149873851,"d_finished":1183,"c":1,"l":1743153149875034,"d":1183},{"a":1743153149875467,"name":"ack","f":1743153149875467,"d_finished":0,"c":0,"l":1743153149875917,"d":450},{"a":1743153149875438,"name":"processing","f":1743153149875438,"d_finished":0,"c":0,"l":1743153149875917,"d":479},{"name":"ProduceResults","f":1743153149874585,"d_finished":722,"c":2,"l":1743153149875815,"d":722},{"a":1743153149875819,"name":"Finish","f":1743153149875819,"d_finished":0,"c":0,"l":1743153149875917,"d":98}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:12:29.876470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1969:3974];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:12:29.876941Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1969:3974];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743153149873629,"name":"_full_task","f":1743153149873629,"d_finished":0,"c":0,"l":1743153149876525,"d":2896},"events":[{"name":"bootstrap","f":1743153149873851,"d_finished":1183,"c":1,"l":1743153149875034,"d":1183},{"a":1743153149875467,"name":"ack","f":1743153149875467,"d_finished":0,"c":0,"l":1743153149876525,"d":1058},{"a":1743153149875438,"name":"processing","f":1743153149875438,"d_finished":0,"c":0,"l":1743153149876525,"d":1087},{"name":"ProduceResults","f":1743153149874585,"d_finished":722,"c":2,"l":1743153149875815,"d":722},{"a":1743153149875819,"name":"Finish","f":1743153149875819,"d_finished":0,"c":0,"l":1743153149876525,"d":706}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1970:3975]->[1:1969:3974] 2025-03-28T09:12:29.877058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:12:29.873082Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:12:29.877110Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:12:29.877244Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpUniqueIndex::InsertFkAlreadyExist >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex-UseSink >> KqpJoin::RightSemiJoin_FullScan [GOOD] |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS16+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 25702, MsgBus: 6666 2025-03-28T09:10:13.246674Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785183117457368:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:13.256157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00172b/r3tmp/tmptXV912/pdisk_1.dat 2025-03-28T09:10:13.898336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:13.898461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:13.899100Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:13.903356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25702, node 1 2025-03-28T09:10:14.148475Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:14.148495Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:14.148509Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:14.148634Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6666 TClient is connected to server localhost:6666 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:14.873567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:17.506977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785200297327067:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:17.507245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:17.507795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785200297327079:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:17.513071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:10:17.528606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785200297327081:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:10:17.613767Z node 1 :TX_PROXY ERROR: Actor# [1:7486785200297327132:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:18.149928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:10:18.243052Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785183117457368:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:18.243154Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:18.571733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:10:18.572055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:10:18.572439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:10:18.572672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:10:18.572816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:10:18.572969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:10:18.573154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:10:18.573346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:10:18.573514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:10:18.573719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:10:18.573857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:10:18.573962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785204592294718:2365];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:10:18.595979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:10:18.596053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:10:18.596317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:10:18.596445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:10:18.596564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:10:18.596662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:10:18.596765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:10:18.596867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:10:18.596988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:10:18.597094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:10:18.597227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:10:18.597324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785204592294716:2364];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:10:18.642362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785204592294692:2352];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp: ... p:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.928152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.934535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039204;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.934543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.940411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.940477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.946169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.946397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.952441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.952620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.958882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.960437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.964631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.969458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.973540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.974825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.979472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.980032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.984849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.985100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.989512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.990352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.994283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.995572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:47.999749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.000583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039245;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.004980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.005272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.010588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.011442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.016520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.021361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.021454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.030260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039190;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.032326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.035996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.038040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.040462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.043726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:48.189883Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe09wdj3z75nt4sr502s57y", SessionId: ydb://session/3?node_id=1&id=YzFkNzQ5ODctZTE3MTk1YzktNThjMjFjNTMtMTNjZWY3Mzk=, Slow query, duration: 41.770384s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:11:48.854869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:48.855327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:48.856066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486785475175280638:9340];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:11:48.856446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:12:18.250798Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0bjtjb716jp9vcdcam7be", SessionId: ydb://session/3?node_id=1&id=YzFkNzQ5ODctZTE3MTk1YzktNThjMjFjNTMtMTNjZWY3Mzk=, Slow query, duration: 16.119031s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "-- NB: Subquerys\n$orders_with_several_warehouses = (\n select cs_order_number\n from `/Root/test/ds/catalog_sales`\n group by cs_order_number\n having count(distinct cs_warehouse_sk) > 1\n);\n\n-- start query 1 in stream 0 using template query16.tpl and seed 171719422\nselect\n count(distinct cs1.cs_order_number) as `order count`\n ,sum(cs_ext_ship_cost) as `total shipping cost`\n ,sum(cs_net_profit) as `total net profit`\nfrom\n `/Root/test/ds/catalog_sales` cs1\n cross join `/Root/test/ds/date_dim`\n cross join `/Root/test/ds/customer_address`\n cross join `/Root/test/ds/call_center`\n left semi join $orders_with_several_warehouses cs2 on cs1.cs_order_number = cs2.cs_order_number\n left only join `/Root/test/ds/catalog_returns` cr1 on cs1.cs_order_number = cr1.cr_order_number\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand cs1.cs_ship_date_sk = d_date_sk\nand cs1.cs_ship_addr_sk = ca_address_sk\nand ca_state = 'IL'\nand cs1.cs_call_center_sk = cc_call_center_sk\nand cc_county in ('Richland County','Bronx County','Maverick County','Mesa County',\n 'Raleigh County'\n)\norder by `order count`\nlimit 100;\n", parameters: 0b >> HttpRequest::Probe |82.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |82.4%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_FullScan [GOOD] Test command err: Trying to start YDB, gRPC: 32514, MsgBus: 27434 2025-03-28T09:12:22.405007Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785738152284595:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:22.405520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001707/r3tmp/tmpPfwNk2/pdisk_1.dat 2025-03-28T09:12:23.136970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:23.137087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:23.143865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:23.186315Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32514, node 1 2025-03-28T09:12:23.439540Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:23.439559Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:23.439569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:23.439699Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27434 TClient is connected to server localhost:27434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:24.442234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:24.469992Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:24.478326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:24.717235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:25.010761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:25.138967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:27.398839Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785738152284595:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:27.398906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:27.584751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785759627122719:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.584871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:28.035866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:28.101861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:28.151518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:28.235474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:28.301295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:28.389159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:28.489567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785763922090543:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:28.489648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:28.489839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785763922090548:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:28.494145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:28.509212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785763922090550:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:28.600317Z node 1 :TX_PROXY ERROR: Actor# [1:7486785763922090608:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:29.739802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.785909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.825982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.899682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.947254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> KqpIndexes::CheckUpsertNonEquatableType-NotNull [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL >> KqpJoinOrder::TPCDS90+ColumnStore >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SecondaryIndexSelect [GOOD] Test command err: Trying to start YDB, gRPC: 9726, MsgBus: 12593 2025-03-28T09:12:09.145220Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785684203653397:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:09.145719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001954/r3tmp/tmpIGkdam/pdisk_1.dat 2025-03-28T09:12:09.865484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:09.865670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:09.868294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:09.875056Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9726, node 1 2025-03-28T09:12:10.080693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:10.080719Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:10.080740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:10.080900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12593 TClient is connected to server localhost:12593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:10.954184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:10.995048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:11.158941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:11.331065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:11.425663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:13.216100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785701383524219:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.216205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.479996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.517164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.564320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.596707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.629368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.718316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.827133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785701383524741:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.827235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.827971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785701383524746:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.832222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:13.849846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785701383524748:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:13.909890Z node 1 :TX_PROXY ERROR: Actor# [1:7486785701383524803:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:14.131518Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785684203653397:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:14.131621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:15.474207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 7266, MsgBus: 64223 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001954/r3tmp/tmp898dR2/pdisk_1.dat 2025-03-28T09:12:20.815237Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785728353041543:2293];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:20.815285Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:20.906564Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7266, node 2 2025-03-28T09:12:21.005103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:21.005213Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:21.051839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:21.099349Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:21.099379Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:21.099388Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:21.099498Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64223 TClient is connected to server localhost:64223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:21.860152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:21.866247Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:21.880728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:21.988249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.216884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.402574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:25.489876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785749827879410:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:25.489956Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:25.542111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.597565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.668959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.710860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.756266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.816438Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785728353041543:2293];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:25.816606Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:25.842270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.923314Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785749827879931:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:25.923421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:25.926978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785749827879937:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:25.932319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:25.962902Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785749827879940:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:26.057190Z node 2 :TX_PROXY ERROR: Actor# [2:7486785754122847292:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:27.352809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... >> KqpIndexes::ForbidDirectIndexTableCreation [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink >> TLocksTest::GoodDupLock |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |82.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] >> KqpIndexMetadata::TestNoReadFromMainTableBeforeJoin [GOOD] >> KqpIndexMetadata::HandleWriteOnlyIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] Test command err: 2025-03-28T09:12:12.282511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785696052829796:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:12.282574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ce2/r3tmp/tmpXBZwY7/pdisk_1.dat 2025-03-28T09:12:13.366502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:13.832863Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:13.891319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:13.891437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:13.899504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6628, node 1 2025-03-28T09:12:15.087465Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:15.087494Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:15.087502Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:15.087721Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:17.105592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:17.282877Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785696052829796:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:17.282939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:22.519345Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486785739354267323:2147];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:22.520168Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ce2/r3tmp/tmpvUkop0/pdisk_1.dat 2025-03-28T09:12:22.909876Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:22.964464Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:22.964574Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:22.976840Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2768, node 4 2025-03-28T09:12:23.222597Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:23.222624Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:23.222633Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:23.222810Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:23.699925Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:23.720408Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:12:27.506795Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486785739354267323:2147];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:27.506873Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SelectFromIndexesAndFreeSpaceLogicDoesntTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 18169, MsgBus: 28384 2025-03-28T09:12:07.182202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785672871865613:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:07.208422Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001965/r3tmp/tmpAPnMEO/pdisk_1.dat 2025-03-28T09:12:07.745514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:07.745640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:07.748491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:07.779215Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18169, node 1 2025-03-28T09:12:07.893314Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:07.893335Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:07.893340Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:07.893430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28384 TClient is connected to server localhost:28384 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:08.862217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:08.878033Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:08.897439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:09.070096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:09.343699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:09.480349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:11.829302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785690051736509:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:11.829420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:12.181689Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785672871865613:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:12.181780Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:12.264995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.309598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.354467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.388291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.427681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.519000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.595304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785694346704326:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:12.595438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:12.595641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785694346704332:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:12.600496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:12.618620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785694346704334:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:12.715524Z node 1 :TX_PROXY ERROR: Actor# [1:7486785694346704389:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:14.084207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:16.679411Z node 1 :TX_DATASHARD ERROR: Complete [1743153136718 : 281474976710681] from 72075186224037920 at tablet 72075186224037920, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:12:16.686453Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWJhOTA1ZDAtZTI1ZGVhMTQtZGVkNTJlZi04ZGE0MzdlZA==, ActorId: [1:7486785702936639245:2493], ActorState: ExecuteState, TraceId: 01jqe0c0ds1gwtq26phq41hcr7, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 16136, MsgBus: 22386 2025-03-28T09:12:17.822023Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785717742636639:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:17.822095Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001965/r3tmp/tmpNYykAj/pdisk_1.dat 2025-03-28T09:12:18.052590Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:18.092479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:18.092562Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:18.096326Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16136, node 2 2025-03-28T09:12:18.165324Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:18.165352Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:18.165360Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:18.165494Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22386 TClient is connected to server localhost:22386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:18.682273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:18.692484Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 2 ... ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:24.629910Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486785747807410578:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqe0c8sf9nc6kvpec6maxj1h, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRiYTQ5YmUtNjI4YTAwMDYtYTU4MDc5NDgtYWI4ZDBhOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-03-28T09:12:24.630135Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWRiYTQ5YmUtNjI4YTAwMDYtYTU4MDc5NDgtYWI4ZDBhOTE=, ActorId: [2:7486785747807410317:2492], ActorState: ExecuteState, TraceId: 01jqe0c8sf9nc6kvpec6maxj1h, Create QueryResponse for error on request, msg: 2025-03-28T09:12:24.652174Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486785747807410591:2492] TxId: 281474976710674. Ctx: { TraceId: 01jqe0c8t8csg6zhqzyqqt7k2s, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRiYTQ5YmUtNjI4YTAwMDYtYTU4MDc5NDgtYWI4ZDBhOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-03-28T09:12:24.652348Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWRiYTQ5YmUtNjI4YTAwMDYtYTU4MDc5NDgtYWI4ZDBhOTE=, ActorId: [2:7486785747807410317:2492], ActorState: ExecuteState, TraceId: 01jqe0c8t8csg6zhqzyqqt7k2s, Create QueryResponse for error on request, msg: 2025-03-28T09:12:24.664491Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486785747807410600:2492] TxId: 281474976710676. Ctx: { TraceId: 01jqe0c8tq3aba1fbx5av3pzp5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRiYTQ5YmUtNjI4YTAwMDYtYTU4MDc5NDgtYWI4ZDBhOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-03-28T09:12:24.664658Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWRiYTQ5YmUtNjI4YTAwMDYtYTU4MDc5NDgtYWI4ZDBhOTE=, ActorId: [2:7486785747807410317:2492], ActorState: ExecuteState, TraceId: 01jqe0c8tq3aba1fbx5av3pzp5, Create QueryResponse for error on request, msg: 2025-03-28T09:12:24.677899Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486785747807410609:2492] TxId: 281474976710678. Ctx: { TraceId: 01jqe0c8v416r8sn9aa7dckbw4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRiYTQ5YmUtNjI4YTAwMDYtYTU4MDc5NDgtYWI4ZDBhOTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Read operation can be performed on async index table: [72057594046644480:18:1] only with StaleRO isolation level 2025-03-28T09:12:24.678083Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWRiYTQ5YmUtNjI4YTAwMDYtYTU4MDc5NDgtYWI4ZDBhOTE=, ActorId: [2:7486785747807410317:2492], ActorState: ExecuteState, TraceId: 01jqe0c8v416r8sn9aa7dckbw4, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 8636, MsgBus: 13688 2025-03-28T09:12:25.607111Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785750442468985:2095];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:25.607175Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001965/r3tmp/tmpUlmzmh/pdisk_1.dat 2025-03-28T09:12:25.813739Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:25.833080Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:25.833176Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:25.834847Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8636, node 3 2025-03-28T09:12:25.895269Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:25.895298Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:25.895305Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:25.895430Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13688 TClient is connected to server localhost:13688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:26.432098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:26.439210Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:12:26.453147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:26.547211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:26.729448Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.856068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:29.600321Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785767622339891:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:29.600441Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:29.699393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.731478Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.770919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.841201Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.887597Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.943343Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:30.011006Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785771917307709:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:30.011095Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:30.011479Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785771917307714:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:30.015166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:30.029187Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785771917307716:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:30.082638Z node 3 :TX_PROXY ERROR: Actor# [3:7486785771917307769:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:30.607532Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785750442468985:2095];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:30.607624Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:31.285294Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:31.365857Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:31.436413Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpIndexes::SecondaryIndexSelectUsingScripting [GOOD] >> KqpIndexes::SecondaryIndexReplace-UseSink >> KqpMultishardIndex::SortedRangeReadDesc [GOOD] >> KqpMultishardIndex::SortByPk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpdateFkPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 12490, MsgBus: 12132 2025-03-28T09:12:08.423731Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785679274457401:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:08.423902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001956/r3tmp/tmp60EPUo/pdisk_1.dat 2025-03-28T09:12:09.123104Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:09.145165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:09.145266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:09.147366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12490, node 1 2025-03-28T09:12:09.335710Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:09.335752Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:09.335762Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:09.335921Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12132 TClient is connected to server localhost:12132 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:10.202239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:10.239973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:10.461064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:10.703977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:10.795197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:12.795454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785696454328170:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:12.795573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.319669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.361613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.404784Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785679274457401:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:13.405263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:13.411218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.457996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.488175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.567766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.644610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785700749295986:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.644698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.645080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785700749295991:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.650159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:13.662019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785700749295993:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:13.720791Z node 1 :TX_PROXY ERROR: Actor# [1:7486785700749296048:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:15.005840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 9198, MsgBus: 9682 2025-03-28T09:12:20.299228Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785729364331161:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:20.299282Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001956/r3tmp/tmpIxYVJw/pdisk_1.dat 2025-03-28T09:12:20.655234Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:20.670564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:20.678890Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:20.687837Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9198, node 2 2025-03-28T09:12:20.883356Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:20.883381Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:20.883391Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:20.883525Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9682 TClient is connected to server localhost:9682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:21.739964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:21.773048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:21.900642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.172247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.283474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:25.292251Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785729364331161:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:25.292328Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:25.685839Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785750839169263:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:25.685925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:25.808121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.872621Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.960155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.997960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.068926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.119914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.217038Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785755134137079:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:26.217169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:26.217497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785755134137084:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:26.221679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:26.234994Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785755134137086:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:26.306208Z node 2 :TX_PROXY ERROR: Actor# [2:7486785755134137145:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:27.557672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:35.589166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:35.589201Z node 2 :IMPORT WARN: Table profiles were not loaded |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpMultishardIndex::DataColumnWriteNull [GOOD] >> KqpMultishardIndex::DuplicateUpsert >> KqpMultishardIndex::WriteIntoRenamingSyncIndex [GOOD] >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex >> KqpMultishardIndex::DataColumnWrite+UseSink [GOOD] >> KqpMultishardIndex::DataColumnWrite-UseSink >> KqpIndexes::UpdateOnReadColumns [GOOD] >> KqpUniqueIndex::UpsertExplicitNullInComplexFk >> KqpJoinOrder::TPCDS88-ColumnStore >> KqpUniqueIndex::ReplaceFkAlreadyExist [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate >> KqpIndexes::MultipleModifications >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex-UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex+UseSink |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |82.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpdateOnReadColumns [GOOD] Test command err: Trying to start YDB, gRPC: 10673, MsgBus: 21281 2025-03-28T09:11:23.917669Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785485196888561:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.923372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019a4/r3tmp/tmptAfcqZ/pdisk_1.dat 2025-03-28T09:11:24.987651Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.026301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.026417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.026941Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.144009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10673, node 1 2025-03-28T09:11:25.891224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.891243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.891251Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.891368Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21281 TClient is connected to server localhost:21281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.625652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.662232Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:27.672853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.313026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.510912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.625985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.925292Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785485196888561:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.945522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.604897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785510966694130:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.605038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.687540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.751564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.848569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.907146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.997274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.083697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.177468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785519556629257:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.177544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.177836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785519556629262:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.181671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.190493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785519556629264:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.261350Z node 1 :TX_PROXY ERROR: Actor# [1:7486785519556629320:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:35.040383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14616, MsgBus: 14069 2025-03-28T09:11:39.243451Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785553546875057:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:39.243501Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019a4/r3tmp/tmpYFuXPI/pdisk_1.dat 2025-03-28T09:11:39.557081Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:39.596803Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:39.596887Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:39.607724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14616, node 2 2025-03-28T09:11:39.799297Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:39.799334Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:39.799343Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:39.799454Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14069 TClient is connected to server localhost:14069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:40.428030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.453673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:40.575341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976 ... ble, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.224006Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.322982Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.404846Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.490659Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486785727663650578:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:19.490838Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:19.492525Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486785727663650583:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:19.496971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:19.518164Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486785727663650585:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:19.617021Z node 5 :TX_PROXY ERROR: Actor# [5:7486785727663650642:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:21.453777Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.529527Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.591917Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 16733, MsgBus: 7331 2025-03-28T09:12:26.302420Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486785756707601707:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:26.302497Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019a4/r3tmp/tmprpmMMA/pdisk_1.dat 2025-03-28T09:12:26.520882Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:26.534496Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:26.534615Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:26.536703Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16733, node 6 2025-03-28T09:12:26.598495Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:26.598825Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:26.598842Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:26.599026Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7331 TClient is connected to server localhost:7331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:27.541235Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:27.549078Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:27.554768Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.654166Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:27.971180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:28.145798Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:12:31.302909Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486785756707601707:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:31.302994Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:31.988301Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486785778182439964:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:31.988405Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:32.182735Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:32.281491Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:32.412284Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:32.498349Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:32.578153Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:32.691585Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:32.831048Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486785782477407783:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:32.831195Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:32.831287Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486785782477407788:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:32.836149Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:32.891723Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486785782477407791:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:32.989238Z node 6 :TX_PROXY ERROR: Actor# [6:7486785782477407859:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:35.862586Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.118520Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.221317Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |82.4%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> KqpIndexes::InnerJoinWithNonIndexWherePredicate [GOOD] >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink [GOOD] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 >> KqpUniqueIndex::InsertFkAlreadyExist [GOOD] >> KqpUniqueIndex::InsertFkDuplicate >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] Test command err: 2025-03-28T09:12:12.309626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785695206833075:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:12.309719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cd7/r3tmp/tmpHzPdDO/pdisk_1.dat 2025-03-28T09:12:13.365726Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:13.818964Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:13.884887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:13.885028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:13.899042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65192, node 1 2025-03-28T09:12:15.090511Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:15.090532Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:15.090546Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:15.090654Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:17.048176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:17.309814Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785695206833075:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:17.309884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:28.583312Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486785765467440639:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:28.583457Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cd7/r3tmp/tmpaZeD0X/pdisk_1.dat 2025-03-28T09:12:29.051028Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:29.153480Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:29.153570Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:29.160784Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15699, node 4 2025-03-28T09:12:29.452339Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:29.452363Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:29.452371Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:29.452512Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:29.980177Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:33.534830Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486785765467440639:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:33.534927Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:44.019434Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:44.019458Z node 4 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::UpsertWithoutExtraNullDelete-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28806, MsgBus: 13746 2025-03-28T09:12:20.112289Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785731841708765:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:20.112942Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001923/r3tmp/tmpaicuJ1/pdisk_1.dat 2025-03-28T09:12:20.744605Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:20.793459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:20.793574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:20.800874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28806, node 1 2025-03-28T09:12:21.028228Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:21.028248Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:21.028256Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:21.028367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13746 TClient is connected to server localhost:13746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:22.024481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.068189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.265136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.533698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.669151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:25.101263Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785731841708765:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:25.101322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:25.268458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785753316546859:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:25.268577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:25.624036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.710636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.800461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.853173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.894704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.947629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.004588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785757611514672:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:26.004821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:26.005324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785757611514678:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:26.010904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:26.062911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785757611514680:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:26.114287Z node 1 :TX_PROXY ERROR: Actor# [1:7486785757611514735:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:27.200709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:28.379299Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 28905, MsgBus: 15559 2025-03-28T09:12:31.864998Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785777076174388:2224];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001923/r3tmp/tmpIadsNu/pdisk_1.dat 2025-03-28T09:12:31.984678Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:32.236604Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:32.300743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:32.300832Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:32.304016Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28905, node 2 2025-03-28T09:12:32.547216Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:32.547245Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:32.547253Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:32.547368Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15559 TClient is connected to server localhost:15559 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:33.817359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:33.827310Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:12:33.842191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.018351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.254381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.418798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:36.720446Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785777076174388:2224];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:36.720536Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:37.385089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785802845979755:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.385258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.407575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.499652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.575899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.650085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.723854Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.835806Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.938797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785802845980274:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.938947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.939227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785802845980280:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.943924Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:37.963240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785802845980282:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:38.037660Z node 2 :TX_PROXY ERROR: Actor# [2:7486785807140947635:3460] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:40.447016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.176522Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:44.574544Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:45.010524Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TLocksTest::CK_BrokenLock [GOOD] >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink [GOOD] >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] >> KqpMultishardIndex::DuplicateUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DuplicateUpsertInterleaveParams-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1786, MsgBus: 32058 2025-03-28T09:12:22.092045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785740250412982:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:22.092098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001921/r3tmp/tmpaUBV8w/pdisk_1.dat 2025-03-28T09:12:23.012595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:23.012687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:23.015395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:23.059485Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1786, node 1 2025-03-28T09:12:23.283248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:23.283269Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:23.283279Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:23.283377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32058 TClient is connected to server localhost:32058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:24.128903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:24.161345Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:24.177425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:24.381069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:24.619332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:24.718624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:27.023826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785761725251265:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.023945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.094808Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785740250412982:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:27.094891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:27.476010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.519277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.564754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.605377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.648909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.701579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.773209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785761725251782:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.773319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.773571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785761725251787:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.777951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:27.796307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785761725251789:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:27.882616Z node 1 :TX_PROXY ERROR: Actor# [1:7486785761725251844:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:29.678374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13802, MsgBus: 17580 2025-03-28T09:12:31.638029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785776777154535:2157];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:31.642253Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001921/r3tmp/tmpKp6tbL/pdisk_1.dat 2025-03-28T09:12:31.811777Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:31.811863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:31.812069Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:31.821375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13802, node 2 2025-03-28T09:12:31.976096Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:31.976118Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:31.976126Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:31.976255Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17580 TClient is connected to server localhost:17580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:32.598947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.608936Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:12:32.625127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.747955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-2 ... self is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:35.950937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.047985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785798251993171:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:36.048064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:36.054877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785798251993176:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:36.063275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:36.081787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785798251993178:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:36.183263Z node 2 :TX_PROXY ERROR: Actor# [2:7486785798251993234:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:36.639796Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785776777154535:2157];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:36.639859Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:37.333599Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.393565Z node 2 :TX_PROXY ERROR: Actor# [2:7486785802546960849:3700] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:37.425008Z node 2 :TX_PROXY ERROR: Actor# [2:7486785802546960863:3706] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:37.446149Z node 2 :TX_PROXY ERROR: Actor# [2:7486785802546960889:3730] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path is not a directory (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5365, MsgBus: 21805 2025-03-28T09:12:38.643545Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785807101996094:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:38.643663Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001921/r3tmp/tmphWuUca/pdisk_1.dat 2025-03-28T09:12:38.742053Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:38.766513Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:38.766608Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:38.768065Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5365, node 3 2025-03-28T09:12:38.915164Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:38.915185Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:38.915192Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:38.915314Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21805 TClient is connected to server localhost:21805 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:39.534640Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:39.553254Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:39.694483Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:39.882207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:40.054400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.466843Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785824281866977:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:42.466952Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:42.673766Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:42.727087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:42.814194Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:42.859944Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:42.923375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.009691Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.102940Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785828576834789:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.103087Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.106744Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785828576834794:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.111556Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:43.130566Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785828576834796:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:43.189601Z node 3 :TX_PROXY ERROR: Actor# [3:7486785828576834850:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:43.644267Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785807101996094:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:43.644326Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:45.414234Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.271494Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinSkipNullFilter+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10620, MsgBus: 7531 2025-03-28T09:12:38.076407Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785808558770552:2243];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:38.076473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001706/r3tmp/tmpFo4lcD/pdisk_1.dat 2025-03-28T09:12:38.812663Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:38.848285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:38.848355Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:38.876028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10620, node 1 2025-03-28T09:12:39.159646Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:39.159675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:39.159702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:39.159842Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7531 TClient is connected to server localhost:7531 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:40.064479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:40.089108Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:40.119469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:40.297591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:40.490444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:40.601472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.077830Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785808558770552:2243];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:43.077889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:43.419151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785830033608631:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.419239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:44.074507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:44.148440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:44.229575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:44.268437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:44.307328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:44.419939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:44.519110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785834328576455:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:44.519187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:44.519613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785834328576460:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:44.523713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:44.550292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785834328576462:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:44.622645Z node 1 :TX_PROXY ERROR: Actor# [1:7486785834328576517:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:46.008745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.053811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.089101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.163004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.234918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.271977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpIndexes::SecondaryIndexUsingInJoin-UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::CreateTableWithExplicitAsyncIndexSQL [GOOD] Test command err: Trying to start YDB, gRPC: 23456, MsgBus: 8338 2025-03-28T09:12:18.444939Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785720679001585:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:18.447255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001932/r3tmp/tmpq7t7Tr/pdisk_1.dat 2025-03-28T09:12:19.044533Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:19.045539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:19.045639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:19.052126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23456, node 1 2025-03-28T09:12:19.303108Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:19.303138Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:19.303145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:19.303277Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8338 TClient is connected to server localhost:8338 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:20.523101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:20.539923Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:20.546466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:20.762235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:20.943263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:21.050872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:23.073817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785742153839857:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:23.073936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:23.449144Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785720679001585:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:23.449234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:23.496027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.532921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.567154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.607393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.649046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.717816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.806244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785742153840375:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:23.806361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:23.806743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785742153840380:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:23.812758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:23.836421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785742153840382:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:23.894012Z node 1 :TX_PROXY ERROR: Actor# [1:7486785742153840438:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:25.575418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.291448Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 15662, MsgBus: 12244 2025-03-28T09:12:27.862360Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785760470708255:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001932/r3tmp/tmpztCKd3/pdisk_1.dat 2025-03-28T09:12:27.930522Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:28.299582Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:28.327552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:28.327655Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:28.335906Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15662, node 2 2025-03-28T09:12:28.533308Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:28.533333Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:28.533342Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:28.533444Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12244 TClient is connected to server localhost:12244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:29.149090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:29.172273Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:29.189178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:29.291903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESch ... ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:33.428927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:33.485098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:33.590310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:33.718703Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785786240514164:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:33.718774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:33.718951Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785786240514169:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:33.721948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:33.737103Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:12:33.738047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785786240514171:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:33.797246Z node 2 :TX_PROXY ERROR: Actor# [2:7486785786240514225:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:35.069070Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.531684Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:36.558130Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 27229, MsgBus: 63261 2025-03-28T09:12:37.720330Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785805245231796:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:37.727294Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001932/r3tmp/tmpuALi28/pdisk_1.dat 2025-03-28T09:12:37.959874Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:37.982177Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:37.982647Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:37.986098Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27229, node 3 2025-03-28T09:12:38.082251Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:38.082275Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:38.082286Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:38.082420Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63261 TClient is connected to server localhost:63261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:39.090526Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:39.103483Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:39.125085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:39.232001Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:39.530945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:39.646521Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.702402Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785805245231796:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:42.702464Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:43.534592Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785831015037315:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.543064Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.589030Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.668858Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.752505Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.823211Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.937511Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:44.192828Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:44.289219Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785835310005136:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:44.289324Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:44.289511Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785835310005141:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:44.294322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:44.314617Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785835310005143:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:44.381502Z node 3 :TX_PROXY ERROR: Actor# [3:7486785835310005198:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:46.570158Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.767725Z node 3 :TX_PROXY ERROR: Actor# [3:7486785843899940368:3834] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:48.086830Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:48.143014Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2025-03-28T09:11:52.043361Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785609568778821:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:52.043445Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1d/r3tmp/tmp27r9iX/pdisk_1.dat 2025-03-28T09:11:52.587095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:52.587196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:52.594173Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:52.598630Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1844 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:11:52.970160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:52.987360Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:53.010891Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:11:53.035403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:53.205480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:53.278181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.002328Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785627165329458:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:56.002867Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1d/r3tmp/tmpPsksH9/pdisk_1.dat 2025-03-28T09:11:56.204313Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:56.206960Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:56.207071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:56.208603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20633 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:11:56.485194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.491879Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.515824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.613498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:56.701969Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:00.476399Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785646023804881:2158];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1d/r3tmp/tmpnN5ja4/pdisk_1.dat 2025-03-28T09:12:00.567277Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:00.657744Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:00.670935Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:00.671018Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:00.672050Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25615 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:00.964421Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:00.975468Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:00.995185Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:12:01.010669Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:01.142425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:01.242189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1d/r3tmp/tmpO86xA7/pdisk_1.dat 2025-03-28T09:12:05.640907Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:05.719832Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:05.729931Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:05.730119Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:05.731862Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7463 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 Pa ... shed: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:19.324441Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:19.385000Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.574328Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:19.674240Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:12:24.632389Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486785746415821612:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:24.632454Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1d/r3tmp/tmpgIoLZy/pdisk_1.dat 2025-03-28T09:12:24.841954Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:24.874949Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:24.875041Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:24.877464Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17219 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:25.195562Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:25.227221Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:12:25.235018Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:25.336079Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:25.450502Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:31.662552Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486785776821902535:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:31.662673Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1d/r3tmp/tmpQu4IWi/pdisk_1.dat 2025-03-28T09:12:32.206385Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:32.240395Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:32.240510Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:32.256410Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2346 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:32.888684Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.910793Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.940242Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:12:32.946853Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:33.127577Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:33.213710Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:40.123360Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486785817438702876:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:40.124636Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1d/r3tmp/tmpZNoHqi/pdisk_1.dat 2025-03-28T09:12:40.578776Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:40.613572Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:40.618925Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:40.620897Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17074 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:41.143903Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.159322Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.190925Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:12:41.201635Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.362070Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.577943Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DuplicateUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 11006, MsgBus: 23312 2025-03-28T09:12:27.512031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785761449500948:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:27.512473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001914/r3tmp/tmpIifV4H/pdisk_1.dat 2025-03-28T09:12:28.549983Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:28.575103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:28.575192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:28.587397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:28.587746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11006, node 1 2025-03-28T09:12:28.866825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:28.866847Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:28.866854Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:28.866964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23312 TClient is connected to server localhost:23312 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:29.814265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:29.847818Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:29.876632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:12:30.034773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:30.269413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:30.418251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.511014Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785761449500948:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:32.511079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:33.739788Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785787219306342:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:33.740093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:34.115795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:34.191121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:34.267688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:34.352448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:34.396416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:34.472764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:34.572092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785791514274163:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:34.572166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:34.572494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785791514274168:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:34.582986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:34.607794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785791514274170:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:34.674898Z node 1 :TX_PROXY ERROR: Actor# [1:7486785791514274224:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:36.619186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 28576, MsgBus: 23719 2025-03-28T09:12:41.462357Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785818490093749:2157];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:41.534840Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001914/r3tmp/tmpFBuRVy/pdisk_1.dat 2025-03-28T09:12:41.695802Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:41.725145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:41.725229Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:41.726210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28576, node 2 2025-03-28T09:12:41.872567Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:41.872594Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:41.872601Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:41.872753Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23719 TClient is connected to server localhost:23719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:42.364503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.388108Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:12:42.416019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.609500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.838783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.926378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:45.426885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785835669964576:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:45.426982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:45.541646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.590773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.639748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.679603Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.749450Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.804455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.874957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785835669965094:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:45.875052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:45.876816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785835669965099:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:45.884814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:45.894944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785835669965101:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:45.974008Z node 2 :TX_PROXY ERROR: Actor# [2:7486785835669965156:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:46.462953Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785818490093749:2157];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:46.463028Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:47.154372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::Range_BrokenLock3 [GOOD] >> KqpJoinOrder::TPCDS61+ColumnStore [GOOD] >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink [GOOD] >> KqpUniqueIndex::UpsertExplicitNullInComplexFk [GOOD] >> KqpUniqueIndex::UpsertImplicitNullInComplexFk >> KqpIndexes::DoUpsertWithoutIndexUpdate-UniqIndex+UseSink [GOOD] >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink >> KqpIndexes::MultipleModifications [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin >> KqpIndexes::SecondaryIndexReplace-UseSink [GOOD] >> KqpMultishardIndex::SortByPk [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH5 |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::MultipleSecondaryIndexWithSameComulns+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61793, MsgBus: 1987 2025-03-28T09:12:08.778552Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785679942905439:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:08.778640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001947/r3tmp/tmpfncyjC/pdisk_1.dat 2025-03-28T09:12:09.363245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:09.363340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:09.368642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:09.421220Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61793, node 1 2025-03-28T09:12:09.599368Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:09.599404Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:09.599414Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:09.599564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1987 TClient is connected to server localhost:1987 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:10.675835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:10.706788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:10.945687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:11.136218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:11.229677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:13.005085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785697122776210:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.005245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.444454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.492724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.534493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.579166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.632502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.702082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:13.778305Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785679942905439:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:13.778456Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:13.797489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785701417744023:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.797589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.797817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785701417744028:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:13.808416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:13.824762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785701417744030:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:13.893248Z node 1 :TX_PROXY ERROR: Actor# [1:7486785701417744087:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:15.144181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:17.288876Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:17.321392Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:17.395854Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 4551, MsgBus: 15951 2025-03-28T09:12:18.443677Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785719710311319:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:18.443726Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001947/r3tmp/tmp7cEWVu/pdisk_1.dat 2025-03-28T09:12:18.781306Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:18.787794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:18.787893Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:18.789465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4551, node 2 2025-03-28T09:12:19.063258Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:19.063281Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:19.063288Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:19.063420Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15951 TClient is connected to server localhost:15951 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:19.772001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.806723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.953886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:20.307854Z node 2 ... : Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:24.068118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:24.098895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785745480117380:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:24.189521Z node 2 :TX_PROXY ERROR: Actor# [2:7486785745480117436:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:25.805369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:28.413202Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:28.487823Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:28.552326Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 27017, MsgBus: 24085 2025-03-28T09:12:30.111124Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785772500518810:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001947/r3tmp/tmpgxDz2I/pdisk_1.dat 2025-03-28T09:12:30.188604Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:30.340987Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:30.341084Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:30.343272Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:30.350222Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27017, node 3 2025-03-28T09:12:30.535433Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:30.535457Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:30.535465Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:30.535586Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24085 TClient is connected to server localhost:24085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:31.724218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:31.739646Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:31.765542Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:31.973028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:12:32.335042Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.447988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:35.007428Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785772500518810:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:35.037741Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:35.916036Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785793975356905:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:35.916129Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:35.988372Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.072905Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.172293Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.223490Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.276704Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.320122Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.395921Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785798270324720:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:36.396088Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:36.396400Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785798270324725:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:36.401941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:36.458711Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785798270324728:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:36.530060Z node 3 :TX_PROXY ERROR: Actor# [3:7486785798270324783:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:37.830671Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:39.408195Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:39.462791Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:39.510935Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:41.301481Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:41.347912Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:43.032849Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:45.123574Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:45.199882Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:45.346869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:45.346904Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:46.442299Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:46.515613Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:47.393261Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:47.445170Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:47.503537Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:48.420899Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:49.669944Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:50.174850Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:50.274932Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:50.308454Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:50.839370Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:51.207897Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::SortByPk [GOOD] Test command err: Trying to start YDB, gRPC: 9117, MsgBus: 16246 2025-03-28T09:12:32.047594Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785783711596079:2261];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:32.047826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018f2/r3tmp/tmp70THyz/pdisk_1.dat 2025-03-28T09:12:32.693472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:32.693572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:32.694706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:32.698372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9117, node 1 2025-03-28T09:12:32.891216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:32.891242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:32.891248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:32.891367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16246 TClient is connected to server localhost:16246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:33.825755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:33.858600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.006895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.214603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.309294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:36.288635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785800891466817:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:36.288766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:36.776770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.845184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.912463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:36.998765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.030444Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785783711596079:2261];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:37.030518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:37.074232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.153278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.275197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785805186434635:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.275279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.275657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785805186434640:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.280154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:37.296089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785805186434642:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:37.379943Z node 1 :TX_PROXY ERROR: Actor# [1:7486785805186434697:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:38.626569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 4921, MsgBus: 3660 2025-03-28T09:12:41.100524Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785818925354310:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:41.100576Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018f2/r3tmp/tmpQhZmMC/pdisk_1.dat 2025-03-28T09:12:41.437172Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:41.527322Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:41.527424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:41.535854Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4921, node 2 2025-03-28T09:12:41.671460Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:41.671484Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:41.671491Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:41.671627Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3660 TClient is connected to server localhost:3660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:42.528102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.549191Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:42.557232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.697777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.875620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.991973Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:46.106660Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785818925354310:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:46.141718Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:46.991377Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785840400192560:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.991513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.053312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.153467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.204871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.244119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.286455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.413039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.518262Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785844695160379:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.518356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.518803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785844695160384:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.523397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:47.560024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785844695160386:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:47.643853Z node 2 :TX_PROXY ERROR: Actor# [2:7486785844695160442:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:49.030914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |82.5%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2025-03-28T09:11:56.751531Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785628386751999:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:56.751576Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1b/r3tmp/tmpco2dTZ/pdisk_1.dat 2025-03-28T09:11:57.166525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:57.166602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:57.170303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:57.172795Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65074 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:11:57.478263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:57.503348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.646669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.716514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:00.384830Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785645981552457:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:00.384902Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1b/r3tmp/tmpAyGU99/pdisk_1.dat 2025-03-28T09:12:00.571630Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:00.593787Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:00.593855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:00.600859Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16921 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:12:00.928100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:12:00.933812Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:00.944051Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:12:00.948194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:01.036091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:01.120147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:04.498006Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785659839349719:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:04.498057Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1b/r3tmp/tmp21UQVL/pdisk_1.dat 2025-03-28T09:12:04.715832Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:04.741545Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:04.741636Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:04.744105Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11500 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:05.026354Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:05.039317Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:05.062988Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:12:05.078292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:05.170442Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:05.270085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:08.293031Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486785677991784327:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:08.293080Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1b/r3tmp/tmprWEYs3/pdisk_1.dat 2025-03-28T09:12:08.512601Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:08.531019Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:08.531140Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:08.534586Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16779 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Sche ... 0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:23.891834Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28352 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:24.241288Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:24.272882Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:24.393340Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:24.462888Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:28.584705Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486785762853073717:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:28.584753Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1b/r3tmp/tmpF9HRb9/pdisk_1.dat 2025-03-28T09:12:28.822049Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:28.868453Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:28.868562Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:28.870931Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16647 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:29.127574Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:29.134224Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:29.152752Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:29.235565Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:29.317275Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.852998Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486785791873657787:2207];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1b/r3tmp/tmpsxzvKm/pdisk_1.dat 2025-03-28T09:12:34.968910Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:35.372122Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:35.405094Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:35.405697Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:35.409392Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27059 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:35.893649Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:35.945798Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:36.135163Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:36.285270Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:43.119672Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486785830344355955:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:43.129825Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f1b/r3tmp/tmp6W9u6Y/pdisk_1.dat 2025-03-28T09:12:43.782889Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:43.842543Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:43.848968Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:43.865556Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10277 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:44.422093Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.435740Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.476517Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.722807Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.810620Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7743, MsgBus: 20942 2025-03-28T09:12:22.868591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785737901820556:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:22.869023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00191b/r3tmp/tmp5xSgjt/pdisk_1.dat 2025-03-28T09:12:23.447908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:23.447997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:23.449064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:23.452057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7743, node 1 2025-03-28T09:12:23.575303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:23.575332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:23.575338Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:23.575446Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20942 TClient is connected to server localhost:20942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:24.128889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:24.151398Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:24.171305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:24.393396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:24.657917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:24.760150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:26.740856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785755081691345:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:26.740977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.324950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.370152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.467635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.510753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.548001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.640296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:27.712231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785759376659162:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.712307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.712782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785759376659168:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.716460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:27.741659Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:12:27.742024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785759376659170:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:27.809568Z node 1 :TX_PROXY ERROR: Actor# [1:7486785759376659224:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:27.830188Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785737901820556:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:27.830267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:29.001820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.667743Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:30.397407Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 13538, MsgBus: 20774 2025-03-28T09:12:31.450921Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785778115625114:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:31.523815Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00191b/r3tmp/tmpGIETVO/pdisk_1.dat 2025-03-28T09:12:31.847320Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:31.889035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:31.889135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:31.890128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13538, node 2 2025-03-28T09:12:32.076623Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:32.076648Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:32.076656Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:32.076817Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20774 TClient is connected to server localhost:20774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:33.317586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:33.325422Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:33.336963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is un ... _TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.084829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.131607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.170968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.266819Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.347298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785803885431039:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.347404Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.348412Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785803885431044:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.352041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:37.362222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785803885431046:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:37.448267Z node 2 :TX_PROXY ERROR: Actor# [2:7486785803885431103:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:38.608524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4222, MsgBus: 14896 2025-03-28T09:12:40.726262Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785818128656280:2199];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00191b/r3tmp/tmpBs3dS5/pdisk_1.dat 2025-03-28T09:12:40.810384Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:41.076069Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:41.117601Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:41.117693Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:41.127915Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4222, node 3 2025-03-28T09:12:41.378887Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:41.378922Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:41.378933Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:41.379071Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14896 TClient is connected to server localhost:14896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:42.634453Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.675453Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:42.688510Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.807106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:43.031292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:43.158320Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:45.606875Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785818128656280:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:45.606965Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:46.639924Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785843898461701:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.640022Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.694098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.778092Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.849964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.935094Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.008625Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.074776Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.193282Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785848193429521:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.193385Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.193670Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785848193429526:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.198750Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:47.216736Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:12:47.217538Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785848193429528:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:47.285944Z node 3 :TX_PROXY ERROR: Actor# [3:7486785848193429582:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:49.568136Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:50.781510Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:50.835983Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:51.800761Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:12:51.843026Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpIndexes::CreateTableWithImplicitSyncIndexSQL [GOOD] >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::SpillingInRuntimeNodes+EnabledSpilling [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2909/001734/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk3 Trying to start YDB, gRPC: 64357, MsgBus: 14645 2025-03-28T09:11:01.013816Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785389708541372:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:01.014199Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001734/r3tmp/tmpIGInUT/pdisk_1.dat 2025-03-28T09:11:01.863493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:01.863623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:01.866128Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:01.868075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64357, node 1 2025-03-28T09:11:02.020530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:02.020551Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:02.020558Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:02.021729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14645 TClient is connected to server localhost:14645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:02.760243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:02.795853Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:02.817599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:02.963447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:11:03.136769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:03.215718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:05.417913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785406888412216:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:05.418147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:05.741776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.783084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.818147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.868875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.909356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:05.960264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:06.011028Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785389708541372:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:06.011233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:06.033504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785411183380024:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:06.033621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:06.033912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785411183380029:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:06.038883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:06.051554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785411183380031:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:06.114373Z node 1 :TX_PROXY ERROR: Actor# [1:7486785411183380083:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:16.870792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:16.870828Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '779) '('"_id" '"52687cbb-fa1e7b98-a61a838a-f7baf707") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '677) '('"_id" '"a45cd4cd-9820d5-b9593f09-8dd4eadd") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '689) '('"_id" '"cdc6c77d-7ac4d26-c86952cc-be2960e4")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel1 >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS61+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 22556, MsgBus: 31258 2025-03-28T09:10:15.776011Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785193281148138:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:15.776347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00172a/r3tmp/tmpHBE8W1/pdisk_1.dat 2025-03-28T09:10:16.302090Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:10:16.306965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:10:16.307039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:10:16.310898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22556, node 1 2025-03-28T09:10:16.509014Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:10:16.509033Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:10:16.509041Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:10:16.509169Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31258 TClient is connected to server localhost:31258 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:10:17.430402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:10:20.393620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785214755985153:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:20.393792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:20.402540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785214755985165:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:10:20.414530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:10:20.431215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785214755985167:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:10:20.581840Z node 1 :TX_PROXY ERROR: Actor# [1:7486785214755985218:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:10:20.778916Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785193281148138:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:10:20.778999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:10:21.487977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:10:21.813119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:10:21.813127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:10:21.813373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:10:21.813397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:10:21.813681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:10:21.813816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:10:21.813938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:10:21.813942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:10:21.814056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:10:21.814097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:10:21.814177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:10:21.814213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:10:21.814267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:10:21.814343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:10:21.814368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:10:21.814469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:10:21.814501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:10:21.814604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:10:21.814623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:10:21.814930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:10:21.815135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:10:21.815202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:10:21.815280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486785219050952811:2358];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:10:21.815326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486785219050952821:2363];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:10:21.868166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785219050952838:2364];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.c ... 6710714; 2025-03-28T09:11:51.264988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.271372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.271378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.277636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.277645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.283697Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.283698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.290159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.291952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.296564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.297292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.303237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.303238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.309035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.315097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.325446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.326090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.331212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.331306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.336983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.337341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.343563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.343719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.350160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.350225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.356738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.356738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.363248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.364811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.369461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.370343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.375703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.375712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.381731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.381840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.387757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.387876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:11:51.524371Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe09x7m62wn8g8whb2gmjnm", SessionId: ydb://session/3?node_id=1&id=N2MzZDllZjEtZWRkYWQ4MDItNmE0ZDczZTctYzAyOGNjM2E=, Slow query, duration: 44.270806s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:11:52.261370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:52.262003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:11:52.262018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486785476749036539:9219];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:11:52.262433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:12:37.466191Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0bwdqb1q5e64zmwa0dry9", SessionId: ydb://session/3?node_id=1&id=N2MzZDllZjEtZWRkYWQ4MDItNmE0ZDczZTctYzAyOGNjM2E=, Slow query, duration: 25.506007s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query61.tpl and seed 1930872976\nselect promotions,total,cast(promotions as float)/cast(total as float)*100\nfrom\n (select sum(ss_ext_sales_price) promotions\n from store_sales\n cross join store\n cross join promotion\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_promo_sk = p_promo_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y')\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) promotional_sales cross join\n (select sum(ss_ext_sales_price) total\n from store_sales\n cross join store\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) all_sales\norder by promotions, total\nlimit 100;\n", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexMetadata::HandleWriteOnlyIndex [GOOD] Test command err: Trying to start YDB, gRPC: 8919, MsgBus: 26152 2025-03-28T09:12:15.897950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785711016571096:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:15.917463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00193c/r3tmp/tmpx77ZcR/pdisk_1.dat 2025-03-28T09:12:16.682593Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:16.711315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:16.711437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:16.713010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8919, node 1 2025-03-28T09:12:16.897933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:16.897962Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:16.897972Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:16.898119Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26152 TClient is connected to server localhost:26152 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:17.961314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:18.016828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:18.209759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:18.563901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:18.670579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:20.898968Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785711016571096:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:20.969112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:21.083746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785736786376619:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.083869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.472037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.510890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.550172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.592187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.663871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.722260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.805708Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785736786377136:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.805823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.806152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785736786377141:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.810564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:21.826065Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:12:21.827485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785736786377143:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:21.923760Z node 1 :TX_PROXY ERROR: Actor# [1:7486785736786377199:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:23.141820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1179, MsgBus: 11693 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00193c/r3tmp/tmpYQxM8m/pdisk_1.dat 2025-03-28T09:12:29.413568Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785768518723238:2220];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:29.481492Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:29.618385Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:29.635627Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:29.635701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:29.648007Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1179, node 2 2025-03-28T09:12:29.859275Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:29.859299Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:29.859306Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:29.859418Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11693 TClient is connected to server localhost:11693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:30.523281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:30.539330Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:12:30.557977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:30.693756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T ... ernalPlanNodeId":11}],"Predicate":"Exist(item.id)","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"E-Size":"No estimate","LookupKeyColumns":["b","id"],"Node Type":"TableLookup","PlanNodeId":11,"Path":"\/Root\/tg","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"E-Rows":"No estimate","Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Input":"precompute_1_0","Name":"PartitionByKey"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_1_0"}],"Table":"tg","PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"TopSort-Filter"}],"Node Type":"Merge","SortColumns":["system_date (Desc)","id (Desc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"ExternalPlanNodeId":6}],"Limit":"11","Name":"Top","TopBy":"[row.b,row.pa_id,row.system_date,row.id]"}],"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1},{"InternalOperatorId":1}],"Limit":"11","Name":"Limit"},{"E-Rows":"No estimate","Inputs":[{"ExternalPlanNodeId":4}],"Predicate":"NOT If AND item.status != $status_1 AND item.am != $am_1","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"Tables":["tg\/tg_index\/indexImplTable"],"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":"%kqp%tx_result_binding_0_0","Reverse":true,"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/tg\/tg_index\/indexImplTable","E-Rows":"No estimate","ReadRangesPointPrefixLen":"2","ReadRangesKeys":["b","pa_id","system_date","id"],"Table":"tg\/tg_index\/indexImplTable","ReadColumns":["am","b","id","pa_id","status","system_date","type"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Limit-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Top"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"Stage"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/tg","reads":[{"lookup_by":["b","id"],"columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"type":"Lookup"}]},{"name":"\/Root\/tg\/tg_index\/indexImplTable","reads":[{"columns":["am","b","id","pa_id","status","system_date","type"],"reverse":true,"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Limit":"1001","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"Limit":"1001","Name":"TopSort","TopSortBy":"[row.system_date,row.id]"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.id)","E-Cost":"No estimate","E-Size":"No estimate","Name":"Filter"}],"Plans":[{"PlanNodeId":6,"Operators":[{"E-Rows":"No estimate","Columns":["am","b","cur","id","pa_id","product","status","system_date","type"],"Name":"TableLookup","E-Cost":"No estimate","E-Size":"No estimate","LookupKeyColumns":["b","id"],"Table":"tg"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Filter"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet_2","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2}}} Trying to start YDB, gRPC: 13482, MsgBus: 27309 2025-03-28T09:12:39.829616Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785813452698731:2073];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00193c/r3tmp/tmpjdWT1r/pdisk_1.dat 2025-03-28T09:12:40.021339Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:40.234804Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:40.293384Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:40.299012Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:40.307668Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13482, node 3 2025-03-28T09:12:40.478677Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:40.478716Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:40.478725Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:40.478849Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27309 TClient is connected to server localhost:27309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:41.324775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.336741Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.419798Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.680570Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.967406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.830877Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785813452698731:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:44.830952Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:45.764144Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785839222504257:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:45.764247Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:45.845044Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.913135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.977564Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.050421Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.186081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.243207Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.338920Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785843517472074:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.339041Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.339375Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785843517472079:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.343703Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:46.365420Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785843517472081:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:46.454081Z node 3 :TX_PROXY ERROR: Actor# [3:7486785843517472140:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:48.093789Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::ReplaceFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 16683, MsgBus: 18318 2025-03-28T09:12:30.324226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785771923135922:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:30.324646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018ff/r3tmp/tmpBkAqy1/pdisk_1.dat 2025-03-28T09:12:30.950998Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:30.959712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:30.959800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:30.961475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16683, node 1 2025-03-28T09:12:31.088435Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:31.088457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:31.088464Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:31.088576Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18318 TClient is connected to server localhost:18318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:31.732223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:31.759086Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:31.769652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:31.977363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.258273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.390894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.439608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785789103006735:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:34.439732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:34.760275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:34.801693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:34.848297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:34.914194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:35.002454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:35.087603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:35.205564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785793397974548:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:35.205624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:35.206353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785793397974554:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:35.214048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:35.235848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785793397974556:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:35.310825Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785771923135922:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:35.311138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:35.319594Z node 1 :TX_PROXY ERROR: Actor# [1:7486785793397974611:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:37.182647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:40.611777Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0cpt039qt24k0bj731gff, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NkNDBlNTYtMmViZTdiNTktNDFjNjlhNS1iYTIzMzY4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:40.624196Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2NkNDBlNTYtMmViZTdiNTktNDFjNjlhNS1iYTIzMzY4Mg==, ActorId: [1:7486785801987910351:2553], ActorState: ExecuteState, TraceId: 01jqe0cpt039qt24k0bj731gff, Create QueryResponse for error on request, msg: 2025-03-28T09:12:42.175982Z node 1 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0creq5aq0xw1y0q0pj3zy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2NkNDBlNTYtMmViZTdiNTktNDFjNjlhNS1iYTIzMzY4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:42.176246Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2NkNDBlNTYtMmViZTdiNTktNDFjNjlhNS1iYTIzMzY4Mg==, ActorId: [1:7486785801987910351:2553], ActorState: ExecuteState, TraceId: 01jqe0creq5aq0xw1y0q0pj3zy, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 22439, MsgBus: 22786 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018ff/r3tmp/tmpvqiJx0/pdisk_1.dat 2025-03-28T09:12:43.362333Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:43.506623Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:43.518910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:43.518989Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:43.523814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22439, node 2 2025-03-28T09:12:43.713933Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:43.713962Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:43.713971Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:43.714094Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22786 TClient is connected to server localhost:22786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:44.215025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.223572Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:44.231963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.314161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.488493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.574143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:47.063112Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785845994821432:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.063198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.121069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.204792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.258452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.304288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.381661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.458128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.570604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785845994821954:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.570710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.570971Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785845994821959:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.574981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:47.595021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785845994821961:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:47.690039Z node 2 :TX_PROXY ERROR: Actor# [2:7486785845994822018:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:48.969794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:51.365358Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0d1tjadesbbw62g8dmdfa, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTYzYzMxZGYtMjFjOWI5NDUtN2FlZmRjNDMtYmEyMDdhNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:51.365617Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTYzYzMxZGYtMjFjOWI5NDUtN2FlZmRjNDMtYmEyMDdhNzY=, ActorId: [2:7486785854584757631:2548], ActorState: ExecuteState, TraceId: 01jqe0d1tjadesbbw62g8dmdfa, Create QueryResponse for error on request, msg: 2025-03-28T09:12:52.118997Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0d2xn0py49z5mrxr4g2a5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTYzYzMxZGYtMjFjOWI5NDUtN2FlZmRjNDMtYmEyMDdhNzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:12:52.119276Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTYzYzMxZGYtMjFjOWI5NDUtN2FlZmRjNDMtYmEyMDdhNzY=, ActorId: [2:7486785854584757631:2548], ActorState: ExecuteState, TraceId: 01jqe0d2xn0py49z5mrxr4g2a5, Create QueryResponse for error on request, msg: >> KqpUniqueIndex::InsertFkDuplicate [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] >> KqpIndexes::SecondaryIndexInsert1 |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |82.6%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::InsertFkDuplicate [GOOD] Test command err: Trying to start YDB, gRPC: 12170, MsgBus: 19149 2025-03-28T09:12:32.899302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785781206002392:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:32.903442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018e0/r3tmp/tmpH1PUpp/pdisk_1.dat 2025-03-28T09:12:33.728477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:33.728593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:33.732091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:33.763653Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12170, node 1 2025-03-28T09:12:34.007390Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:34.007413Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:34.007424Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:34.007541Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19149 TClient is connected to server localhost:19149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:35.273057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:35.322491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:35.580702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:35.910773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:36.063904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:37.866901Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785781206002392:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:37.902153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:39.374730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785811270775111:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:39.374875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:39.756492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:39.802118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:39.868404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:40.040921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:40.078776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:40.168072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:40.246182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785815565742933:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:40.246273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:40.246724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785815565742938:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:40.251879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:40.271239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785815565742940:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:40.381455Z node 1 :TX_PROXY ERROR: Actor# [1:7486785815565742998:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:42.237817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:45.373386Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785837040580986:2606], TxId: 281474976710677, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0cwhb59q1etavb5r1js9k. SessionId : ydb://session/3?node_id=1&id=NTExYzhlMzktNTViYjMwMTAtNWMxMjEwOWYtMzM2ZjljZDA=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:12:45.373943Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785837040580987:2607], TxId: 281474976710677, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=NTExYzhlMzktNTViYjMwMTAtNWMxMjEwOWYtMzM2ZjljZDA=. TraceId : 01jqe0cwhb59q1etavb5r1js9k. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486785837040580983:2558], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:12:45.374432Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTExYzhlMzktNTViYjMwMTAtNWMxMjEwOWYtMzM2ZjljZDA=, ActorId: [1:7486785824155678669:2558], ActorState: ExecuteState, TraceId: 01jqe0cwhb59q1etavb5r1js9k, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 20294, MsgBus: 13634 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018e0/r3tmp/tmpsUo0oz/pdisk_1.dat 2025-03-28T09:12:47.016667Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:47.019737Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:47.039394Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:47.039492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:47.048997Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20294, node 2 2025-03-28T09:12:47.295226Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:47.295246Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:47.295252Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:47.295355Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13634 TClient is connected to server localhost:13634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:48.128557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:48.157511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:48.254299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:48.498285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:48.590411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.507288Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785861727447541:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.507395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.555421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.588142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.630171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.668791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.711058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.785920Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.896959Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785861727448065:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.897035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.899311Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785861727448070:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.903594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:51.918799Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785861727448072:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:52.009673Z node 2 :TX_PROXY ERROR: Actor# [2:7486785866022415424:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:53.565365Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:55.601272Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486785878907318777:2593], TxId: 281474976710677, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=OTllMjcwNTMtOTI5Y2EwNDAtNzRkOWU5NjQtNWQ0ZjVkMDQ=. CustomerSuppliedId : . TraceId : 01jqe0d6hs9k3gr805a6hth0hd. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-28T09:12:55.602088Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486785878907318778:2594], TxId: 281474976710677, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=OTllMjcwNTMtOTI5Y2EwNDAtNzRkOWU5NjQtNWQ0ZjVkMDQ=. TraceId : 01jqe0d6hs9k3gr805a6hth0hd. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486785878907318774:2549], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:12:55.602401Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTllMjcwNTMtOTI5Y2EwNDAtNzRkOWU5NjQtNWQ0ZjVkMDQ=, ActorId: [2:7486785870317383799:2549], ActorState: ExecuteState, TraceId: 01jqe0d6hs9k3gr805a6hth0hd, Create QueryResponse for error on request, msg: >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots >> KqpIndexes::SecondaryIndexUsingInJoin2+UseStreamJoin [GOOD] >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12458, MsgBus: 9699 2025-03-28T09:12:05.370564Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785665335699756:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:05.379449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00170d/r3tmp/tmpdUnfXi/pdisk_1.dat 2025-03-28T09:12:05.980040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:06.013788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:06.013858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:06.016101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12458, node 1 2025-03-28T09:12:06.107095Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:06.107116Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:06.107124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:06.107252Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9699 TClient is connected to server localhost:9699 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:06.982220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:07.004937Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:09.351671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785682515569462:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.351820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.352357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785682515569474:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.356794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:12:09.383994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785682515569476:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:12:09.484520Z node 1 :TX_PROXY ERROR: Actor# [1:7486785682515569529:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:09.807275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.112338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.194113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.234672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.296639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.359819Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785665335699756:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:10.359906Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:10.576221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.655890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.712653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.779392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.873311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.906855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:12:10.948549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:11.026375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:11.794588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:12:11.941222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:12:11.978535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.018000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.056015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.097909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.143221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.178226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.261739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.346112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.393891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.437410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.499980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.551693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.584961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.630477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.694974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.875919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.877744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.881946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.884226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.887593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.889895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.892998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.897187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.898327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.908181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.908564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.913467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.914097Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.919749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.919967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.926375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.932464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.938824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.942531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.944722Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.948890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.951566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.954656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.957601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.961331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.965499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.967432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.971523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.975130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.976704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.980673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.982373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.986394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.988453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.991167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.996600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:50.999535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:51.002919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:51.005658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:51.008639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:51.011276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:51.013546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:51.020187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:51.020186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:51.025911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:51.136230Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0bys92kbw1nnqf2yncgcp", SessionId: ydb://session/3?node_id=1&id=M2QzZGVhYWYtYjY0ZjM2NzUtMWY4NTMwY2QtOGRmZDU1Nzc=, Slow query, duration: 36.758583s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:12:51.399783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:12:51.399882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:12:51.400941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486785850019326229:6973];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-03-28T09:12:51.401393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexes::CreateTableWithExplicitSyncIndexSQL [GOOD] >> KqpIndexes::DeleteByIndex >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DoUpsertWithoutIndexUpdate+UniqIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6667, MsgBus: 4792 2025-03-28T09:12:33.012491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785784897485928:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:33.012534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018df/r3tmp/tmp9GTqEz/pdisk_1.dat 2025-03-28T09:12:33.593098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:33.596728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:33.596808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:33.599611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6667, node 1 2025-03-28T09:12:33.724220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:33.724242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:33.724249Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:33.724361Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4792 TClient is connected to server localhost:4792 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:34.381070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.394458Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:34.410364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.541598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.906541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:35.125722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:37.389449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785802077356823:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.389581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.839413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.898745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.980717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:38.016366Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785784897485928:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:38.016649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:38.023435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:38.088861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:38.157786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:38.279135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785806372324641:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:38.279272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:38.279550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785806372324646:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:38.284311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:38.312802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785806372324648:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:38.379264Z node 1 :TX_PROXY ERROR: Actor# [1:7486785806372324706:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:40.035983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 629 cpu_time_us: 629 } query_phases { duration_us: 1607 cpu_time_us: 1607 } query_phases { duration_us: 10428 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 10112 affected_shards: 1 } query_phases { duration_us: 1533 cpu_time_us: 1533 } query_phases { duration_us: 42860 cpu_time_us: 42860 } query_phases { duration_us: 5173 table_access { name: "/Root/TestTable/Index/indexImplTable" } cpu_time_us: 4031 } query_phases { duration_us: 918 cpu_time_us: 918 } query_phases { duration_us: 3289 cpu_time_us: 3289 } query_phases { duration_us: 4864 cpu_time_us: 4371 } query_phases { duration_us: 8392 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 31 } partitions_count: 1 } table_access { name: "/Root/TestTable/Index/indexImplTable" updates { rows: 1 bytes: 24 } partitions_count: 1 } cpu_time_us: 2931 affected_shards: 2 } compilation { duration_us: 1027676 cpu_time_us: 1014460 } process_cpu_time_us: 62751 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":46,\"Plans\":[{\"Tables\":[\"TestTable\"],\"PlanNodeId\":45,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_7_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743153161298,\"TaskId\":1,\"Host\":\"ghrun-mdsjyjltvm\",\"ComputeTimeUs\":64}],\"CpuTimeUs\":540}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\"}],\"BaseTimeMs\":1743153161297,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":540,\"Max\":540,\"Min\":540}},\"CTE Name\":\"precompute_7_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":44,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":43,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Delete\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_1\",\"Name\":\"Iterator\"}],\"Node Type\":\"Delete-ConstantExpr\",\"Stats\":{\"StageDurationUs\":0,\"PhysicalStageId\":1,\"BaseTimeMs\":1743153161297,\"FinishedTasks\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"CTE Name\":\"precompute_8_1\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":42,\"Plans\":[{\"Tables\":[\"TestTable\\/Index\\/indexImplTable\"],\"PlanNodeId\":41,\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\",\"Name\":\"Upsert\",\"Table\":\"TestTable\\/Index\\/indexImplTable\"},{\"Inputs\":[],\"Iterator\":\"precompute_8_0\",\"Name\":\"Iterator\"}],\"Node Type\":\"Upsert-ConstantExpr\",\"Stats\":{\"ComputeNodes\":[{\"Tasks\":[{\"NodeId\":1,\"FinishTimeMs\":1743153161297,\"TaskId\":2,\"Host\":\"ghrun-mdsjyjltvm\",\"ComputeTimeUs\":94}],\"CpuTimeUs\":420}],\"UseLlvm\":\"undefined\",\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":2,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/TestTable\\/Index\\/indexImplTable\"}],\"BaseTimeMs\":1743153161297,\"NodesScanShards\":[],\"CpuTimeUs\":{\"Count\":1,\"Sum\":420,\"Max\":420,\"Min\":420}},\"CTE Name\":\"precompute_8_0\"}],\"Node Type\":\"Effect\"},{\"PlanNodeId\":39,\"Subplan Name\":\"CTE precompute_8_0\",\"Plans\":[{\"PlanNodeId\":38,\"Plans\":[{\"PlanNodeId\":37,\"Plans\":[{\"PlanNodeId\":3 ... '(\'\"Key\" (Member $147 \'\"Key\")) \'(\'\"Value\" (Member $147 \'\"Value\")) \'(\'\"fk1\" (Member $147 \'\"fk1\")) \'(\'\"fk3\" (Member $147 \'\"fk3\"))))) \'0 $138))\n (let $140 (OrderedFilter $137 (lambda \'($148) (And (Exists (Member $148 \'\"fk1\")) (Exists (Member $148 \'\"fk2\")) (Exists (Member $148 \'\"fk3\"))))))\n (let $141 (lambda \'($150) (Void)))\n (let $142 (ToDict $140 (lambda \'($149) (AsStruct \'(\'\"fk1\" (Member $149 \'\"fk1\")) \'(\'\"fk2\" (Member $149 \'\"fk2\")) \'(\'\"fk3\" (Member $149 \'\"fk3\")))) $141 $35))\n (let $143 (Variant (DictKeys $142) \'1 $138))\n (let $144 (Variant (== (Length $140) (Length $142)) \'2 $138))\n (let $145 (ToDict $137 (lambda \'($151) (AsStruct \'(\'\"Key\" (Member $151 \'\"Key\")))) $141 $35))\n (let $146 (Variant $145 \'\"3\" $138))\n (return (Iterator (AsList $139 $143 $144 $146)))\n))) \'(\'(\'\"_logical_id\" \'4308) \'(\'\"_id\" \'\"ae198b92-d0ec3be6-5eb115e5-12b25823\"))))\n(let $59 (DqCnValue (TDqOutput $58 \'0)))\n(let $60 (DqCnValue (TDqOutput $58 \'2)))\n(let $61 (DqCnValue (TDqOutput $58 \'\"3\")))\n(let $62 (DqCnValue (TDqOutput $58 \'1)))\n(let $63 \'($59 $60 $61 $62))\n(let $64 (KqpTxResultBinding $54 \'\"3\" \'0))\n(let $65 (KqpPhysicalTx \'($58) $63 \'(\'($53 $64)) $3))\n(let $66 \'\"%kqp%tx_result_binding_4_3\")\n(let $67 (DqPhyStage \'() (lambda \'() (Iterator %kqp%tx_result_binding_4_3)) \'(\'(\'\"_logical_id\" \'4703) \'(\'\"_id\" \'\"4cd79a5c-455a8ec-c4f8e2b7-c991360f\"))))\n(let $68 (KqpTable \'\"/Root/TestTable/Index/indexImplTable\" \'\"72057594046644480:18\" \'\"\" \'1))\n(let $69 (KqpCnStreamLookup (TDqOutput $67 \'0) $68 \'(\'\"Key\") $55 $24))\n(let $70 \'\"%kqp%tx_result_binding_4_2\")\n(let $71 (Bool \'false))\n(let $72 (DqPhyStage \'($69) (lambda \'($152) (Map (Filter (Take $152 (Uint64 \'1)) (lambda \'($153) (Not (Contains %kqp%tx_result_binding_4_2 $153)))) (lambda \'($154) $71))) \'(\'(\'\"_logical_id\" \'4691) \'(\'\"_id\" \'\"8a7f92fb-cf115ace-fa32b74-2e6cb320\"))))\n(let $73 (DqCnUnionAll (TDqOutput $72 \'0)))\n(let $74 (Bool \'true))\n(let $75 (DqPhyStage \'($73) (lambda \'($155) (block \'(\n (let $156 (lambda \'($157 $158) $71))\n (return (FromFlow (Condense (ToFlow $155) $74 $156 $156)))\n))) \'(\'(\'\"_logical_id\" \'4671) \'(\'\"_id\" \'\"9062e61-11d40c94-5c9fe330-807d0351\"))))\n(let $76 \'($67 $72 $75))\n(let $77 (DqCnValue (TDqOutput $75 \'0)))\n(let $78 (KqpTxResultBinding $57 \'\"4\" \'2))\n(let $79 (KqpTxResultBinding $55 \'\"4\" \'\"3\"))\n(let $80 (KqpPhysicalTx $76 \'($77) \'(\'($70 $78) \'($66 $79)) $41))\n(let $81 \'\"%kqp%tx_result_binding_4_1\")\n(let $82 \'\"%kqp%tx_result_binding_5_0\")\n(let $83 \'\"%kqp%tx_result_binding_4_0\")\n(let $84 (DqPhyStage \'() (lambda \'() (block \'(\n (let $159 (KqpEnsure $74 %kqp%tx_result_binding_4_1 \'\"2012\" (Utf8 \'\"Duplicated keys found.\")))\n (let $160 (KqpEnsure $74 %kqp%tx_result_binding_5_0 \'\"2012\" (Utf8 \'\"Conflict with existing key.\")))\n (let $161 (If (And $159 $160) %kqp%tx_result_binding_4_0 (List $11)))\n (return (ToStream (Just (PartitionByKey $161 (lambda \'($162) (Member $162 \'\"Key\")) (Void) (Void) (lambda \'($163) (FlatMap $163 (lambda \'($164) (Last (ForwardList (Nth $164 \'1))))))))))\n))) \'(\'(\'\"_logical_id\" \'5033) \'(\'\"_id\" \'\"e7c89ae9-d81f7363-3a2ddae0-b57bacda\"))))\n(let $85 (DqCnValue (TDqOutput $84 \'0)))\n(let $86 (KqpTxResultBinding $11 \'\"4\" \'0))\n(let $87 (KqpTxResultBinding $56 \'\"4\" \'1))\n(let $88 (KqpTxResultBinding $56 \'\"5\" \'0))\n(let $89 \'(\'($83 $86) \'($81 $87) \'($82 $88)))\n(let $90 (KqpPhysicalTx \'($84) \'($85) $89 $3))\n(let $91 \'\"%kqp%tx_result_binding_6_0\")\n(let $92 %kqp%tx_result_binding_6_0)\n(let $93 (DqPhyStage \'() (lambda \'() (Iterator (AsList (ToDict (FlatMap (Map $92 (lambda \'($165) (AsStruct \'(\'\"Key\" (Member $165 \'\"Key\")) \'(\'\"fk1\" (Member $165 \'\"fk1\")) \'(\'\"fk3\" (Member $165 \'\"fk3\"))))) (lambda \'($166) (block \'(\n (let $167 (AsStruct \'(\'\"Key\" (Member $166 \'\"Key\"))))\n (return (IfPresent (Lookup $46 $167) (lambda \'($168) (Just \'($167 $168 (Or (AggrNotEquals (Member $166 \'\"fk1\") (Member $168 \'\"fk1\")) (AggrNotEquals (Member $166 \'\"fk3\") (Member $168 \'\"fk3\")))))) (Nothing (OptionalType (TupleType $19 $44 $56)))))\n)))) (lambda \'($169) (Nth $169 \'0)) (lambda \'($170) \'((Nth $170 \'1) (Nth $170 \'2))) $35)))) \'(\'(\'\"_logical_id\" \'5184) \'(\'\"_id\" \'\"d04adfb4-e167aa4a-4b7f9db6-bf14e833\"))))\n(let $94 (DqCnValue (TDqOutput $93 \'0)))\n(let $95 (KqpTxResultBinding $11 \'\"6\" \'0))\n(let $96 \'($91 $95))\n(let $97 (KqpPhysicalTx \'($93) \'($94) \'($51 $96) $3))\n(let $98 (DataSink \'\"KqpTableSink\" \'\"db\"))\n(let $99 (KqpTableSinkSettings $22 \'false \'\"upsert\" \'0 \'\"oltp\" \'false \'false \'()))\n(let $100 (DqPhyStage \'() (lambda \'() (Iterator $92)) \'(\'(\'\"_logical_id\" \'5732) \'(\'\"_id\" \'\"6e841431-6d1ab97-fadc8e04-49fedfc1\")) \'((DqSink \'0 $98 $99))))\n(let $101 \'\"%kqp%tx_result_binding_7_0\")\n(let $102 (DictType $19 (TupleType $44 $56)))\n(let $103 %kqp%tx_result_binding_7_0)\n(let $104 \'(\'(\'\"_logical_id\" \'5760) \'(\'\"_id\" \'\"7505a673-7524936d-9ba297a-36be6e2f\") $30))\n(let $105 (DqPhyStage \'() (lambda \'() (block \'(\n (let $171 (lambda \'($173) (block \'(\n (let $174 (Nth $173 \'1))\n (let $175 (Nth $174 \'0))\n (return (Member (Nth $173 \'0) \'\"Key\") (Member $175 \'\"fk1\") (Member $175 \'\"fk2\") (Member $175 \'\"fk3\") (Nth $174 \'1))\n ))))\n (let $172 (lambda \'($181 $182 $183 $184 $185) $181 $182 $183 $184))\n (return (FromFlow (WideMap (WideFilter (ExpandMap (ToFlow (DictItems $103)) $171) (lambda \'($176 $177 $178 $179 $180) $180)) $172)))\n))) $104))\n(let $106 (DqCnUnionAll (TDqOutput $105 \'0)))\n(let $107 (lambda \'($186) (FromFlow (NarrowMap (ToFlow $186) $34))))\n(let $108 (KqpTableSinkSettings $68 \'false \'\"delete\" \'1 \'\"oltp\" \'false \'false \'()))\n(let $109 (DqPhyStage \'($106) $107 \'(\'(\'\"_logical_id\" \'5746) \'(\'\"_id\" \'\"339ad04d-2b20ab21-5fa63ca9-60008fbe\")) \'((DqSink \'0 $98 $108))))\n(let $110 \'(\'(\'\"_logical_id\" \'5812) \'(\'\"_id\" \'\"13438f88-dfb2e91f-f8619b0c-a9907b9b\") $30))\n(let $111 (DqPhyStage \'() (lambda \'() (FromFlow (ExpandMap (FlatMap (Map (ToFlow $92) (lambda \'($187) (AsStruct \'(\'\"Key\" (Member $187 \'\"Key\")) \'(\'\"fk1\" (Member $187 \'\"fk1\")) \'(\'\"fk3\" (Member $187 \'\"fk3\"))))) (lambda \'($188) (block \'(\n (let $189 \'(\'\"Key\" (Member $188 \'\"Key\")))\n (let $190 \'(\'\"fk1\" (Member $188 \'\"fk1\")))\n (let $191 \'(\'\"fk3\" (Member $188 \'\"fk3\")))\n (return (IfPresent (Lookup $103 (AsStruct $189)) (lambda \'($192) (If (Nth $192 \'1) (Just (AsStruct $189 $190 \'(\'\"fk2\" (Member (Nth $192 \'0) \'\"fk2\")) $191)) (Nothing (OptionalType $29)))) (Just (AsStruct $189 $190 $47 $191))))\n)))) $26))) $110))\n(let $112 (DqCnUnionAll (TDqOutput $111 \'0)))\n(let $113 (KqpTableSinkSettings $68 \'false \'\"\" \'2 \'\"oltp\" \'false \'false \'()))\n(let $114 (DqPhyStage \'($112) $107 \'(\'(\'\"_logical_id\" \'5774) \'(\'\"_id\" \'\"5740a572-157cb40d-c7f25972-6fe3c89c\")) \'((DqSink \'0 $98 $113))))\n(let $115 \'($100 $105 $109 $111 $114))\n(let $116 (KqpTxResultBinding $102 \'\"7\" \'0))\n(let $117 (KqpPhysicalTx $115 \'() \'($96 \'($101 $116)) \'($40 \'(\'\"with_effects\"))))\n(let $118 \'($4 $17 $42 $52 $65 $80 $90 $97 $117))\n(return (KqpPhysicalQuery $118 \'() \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 1072356 total_cpu_time_us: 1045985 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/TestTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":16},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Value\\\",\\\"Id\\\":5,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk1\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk2\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk3\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"Indexes\\\":[{\\\"Name\\\":\\\"Index\\\",\\\"Type\\\":2,\\\"State\\\":1,\\\"SchemaVersion\\\":1,\\\"LocalPathId\\\":17,\\\"PathOwnerId\\\":8716544,\\\"KeyColumns\\\":[\\\"fk1\\\",\\\"fk2\\\",\\\"fk3\\\"]}],\\\"SecondaryGlobalIndexMetadata\\\":[{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/TestTable/Index/indexImplTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":18},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"fk3\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"fk1\\\",\\\"fk2\\\",\\\"fk3\\\",\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743153179\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"b6062bd8-3d9a767a-c73b6da5-1ca85a60\",\"version\":\"1.0\"}" 2025-03-28T09:13:01.217592Z node 3 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2025-03-28T09:07:10.703813Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:07:10.838773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:07:10.863595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:07:10.863928Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:07:10.872922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:07:10.873167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:07:10.873402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:07:10.873512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:07:10.873645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:07:10.873779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:07:10.873921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:07:10.874044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:07:10.874152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:07:10.874272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:07:10.874412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:07:10.874536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:07:10.904984Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:07:10.905365Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:07:10.905422Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:07:10.905647Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:10.905814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:07:10.905920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:07:10.905988Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:07:10.906107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:07:10.906209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:07:10.906262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:07:10.906292Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:07:10.906484Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:10.906556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:07:10.906601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:07:10.906634Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:07:10.906797Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:07:10.906861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:07:10.906914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:07:10.906953Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:07:10.907031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:07:10.907069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:07:10.907102Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:07:10.907158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:07:10.907202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:07:10.907238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:07:10.907642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-28T09:07:10.907757Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=61; 2025-03-28T09:07:10.907864Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-03-28T09:07:10.907971Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-03-28T09:07:10.908174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:07:10.908248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:07:10.908288Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:07:10.908494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:07:10.908540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:07:10.908569Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:07:10.908738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:07:10.908809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:07:10.908840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:07:10.909028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:07:10.909072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:07:10.909101Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:07:10.909250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:07:10.909311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:07:10.909367Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-03-28T09:13:02.408522Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11196:12823];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-28T09:13:02.411015Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11196:12823];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] >> KqpIndexes::InnerJoinSecondaryIndexLookupAndRightTablePredicateNonIndexColumn [GOOD] >> KqpIndexes::IndexTopSortPushDown >> KqpIndexes::SecondaryIndexInsert1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpUniqueIndex::UpsertImplicitNullInComplexFk [GOOD] Test command err: Trying to start YDB, gRPC: 31199, MsgBus: 2835 2025-03-28T09:12:42.391871Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785824395180853:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:42.400577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018c1/r3tmp/tmpCxUzpO/pdisk_1.dat 2025-03-28T09:12:43.098336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:43.098430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:43.104457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:43.116998Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31199, node 1 2025-03-28T09:12:43.278343Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:43.278373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:43.278381Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:43.278491Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2835 TClient is connected to server localhost:2835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:44.072738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.098133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:44.304695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:12:44.481783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:44.575340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.572810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785841575051676:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.572906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.925047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.983645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.039126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.115029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.143142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.212762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.303077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785845870019495:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.303157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.303485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785845870019500:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.309496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:47.323017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785845870019502:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:47.390432Z node 1 :TX_PROXY ERROR: Actor# [1:7486785845870019558:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:47.391035Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785824395180853:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:47.391149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:48.656687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... Trying to start YDB, gRPC: 19562, MsgBus: 15757 2025-03-28T09:12:52.491846Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785867364059119:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:52.492081Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018c1/r3tmp/tmpnyaOVq/pdisk_1.dat 2025-03-28T09:12:52.715930Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:52.755515Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:52.755594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:52.757021Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19562, node 2 2025-03-28T09:12:52.875288Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:52.875313Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:52.875321Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:52.875444Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15757 TClient is connected to server localhost:15757 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:53.616007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:53.638952Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:53.657984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:53.769796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:54.013363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:54.175578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:56.999890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785884543929941:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:56.999976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:57.045465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:57.094544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:57.138215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:57.187657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:57.249500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:57.324944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:57.433350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785888838897753:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:57.433454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:57.433758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785888838897758:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:57.437456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:57.474381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785888838897760:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:57.492591Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785867364059119:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:57.492680Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:57.571249Z node 2 :TX_PROXY ERROR: Actor# [2:7486785888838897816:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:58.645974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:04.820498Z node 2 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe0detxbqw14pxn3g26ns0n, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDM1YjI4ZjktMmY2NTM0Y2MtNTE3MzdiODMtNmU5NTQxMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:13:04.830941Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDM1YjI4ZjktMmY2NTM0Y2MtNTE3MzdiODMtNmU5NTQxMmY=, ActorId: [2:7486785893133866141:2549], ActorState: ExecuteState, TraceId: 01jqe0detxbqw14pxn3g26ns0n, Create QueryResponse for error on request, msg: |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |82.6%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay >> TObjectStorageListingTest::SchemaChecks [GOOD] >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexInsert1 [GOOD] Test command err: Trying to start YDB, gRPC: 11583, MsgBus: 18930 2025-03-28T09:11:23.908189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785485378758783:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.914861Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019c3/r3tmp/tmpbcyioW/pdisk_1.dat 2025-03-28T09:11:24.991646Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.059221Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.099421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.099534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.120309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.287133Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.134959s 2025-03-28T09:11:25.287209Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.135052s TServer::EnableGrpc on GrpcPort 11583, node 1 2025-03-28T09:11:25.895327Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.895352Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.895359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.895465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18930 TClient is connected to server localhost:18930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.817182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.898775Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:27.921956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.332270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.699388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.859697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.903913Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785485378758783:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.903971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:30.215626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785515443531626:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.215776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.685300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.743695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.837611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.885127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.941192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.026475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.110296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785519738499450:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.110424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.111089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785519738499455:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.114512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.131074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785519738499457:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.187945Z node 1 :TX_PROXY ERROR: Actor# [1:7486785519738499509:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.819903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:35.633620Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-03-28T09:11:35.634615Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:11:35.634712Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7486785536918369306:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:11:35.634859Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-03-28T09:11:35.634922Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7486785536918369306:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:11:35.635194Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:11:35.635233Z node 1 :BUILD_I ... TTxBuildProgress: Resume: id# 281474976715673 2025-03-28T09:12:15.615835Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7486785696553364014:2518], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:12:15.615859Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:12:15.616067Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976715673 2025-03-28T09:12:15.616119Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976715673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:7486785696553364014:2518], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710764, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710765, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:12:15.616468Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976715673 2025-03-28T09:12:15.616662Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976715673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_batch_rows: 50000 max_batch_bytes: 8388608 max_shards_in_flight: 32 max_retries_upload_batch: 50 } Progress: 100 } 2025-03-28T09:12:15.623103Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-03-28T09:12:20.522803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:20.522839Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:23.261922Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976715673 Trying to start YDB, gRPC: 30950, MsgBus: 25694 2025-03-28T09:12:58.475632Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785892337214275:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:58.475754Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019c3/r3tmp/tmpvMW6kW/pdisk_1.dat 2025-03-28T09:12:58.679671Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:58.708476Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:58.708590Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:58.710484Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30950, node 3 2025-03-28T09:12:58.765663Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:58.765694Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:58.765705Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:58.765929Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25694 TClient is connected to server localhost:25694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:59.350069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:59.360914Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:12:59.372591Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:59.484869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:59.755856Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:59.859955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:03.306845Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785913812052532:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:03.306970Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:03.366559Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:03.423648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:03.470540Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:03.475621Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785892337214275:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:03.475688Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:03.521899Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:03.559416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:03.626815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:03.688681Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785913812053046:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:03.688827Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:03.689091Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785913812053051:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:03.693497Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:03.720515Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785913812053053:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:03.803640Z node 3 :TX_PROXY ERROR: Actor# [3:7486785913812053109:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:05.436927Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpIndexes::JoinWithNonPKColumnsInPredicate+UseStreamJoin [GOOD] >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2025-03-28T09:12:18.542363Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785721413498105:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:18.542408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f19/r3tmp/tmpdquvKm/pdisk_1.dat 2025-03-28T09:12:19.120720Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:19.123738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:19.123882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:19.128748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15587, node 1 2025-03-28T09:12:20.295144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:20.295185Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:20.295195Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:20.295399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16376 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:22.311825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:12:22.385276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.543602Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785721413498105:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:23.543671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:34.126385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:34.126416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:03.396150Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785916204412093:2203];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f19/r3tmp/tmpA6YWAW/pdisk_1.dat 2025-03-28T09:13:03.549794Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:13:03.577272Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:03.601880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:03.601974Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:03.603185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5935, node 2 2025-03-28T09:13:03.687317Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:03.687342Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:03.687349Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:03.687479Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11643 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:13:03.939170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:03.960002Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:03.970373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest >> KqpScanSpilling::HandleErrorsCorrectly [GOOD] Test command err: cwd: /home/runner/.ya/build/build_root/2909/00172f/ydb/core/kqp/ut/spilling/test-results/unittest/testing_out_stuff/chunk0 Trying to start YDB, gRPC: 5218, MsgBus: 4717 2025-03-28T09:11:21.364710Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785475028313989:2085];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:21.364824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00172f/r3tmp/tmpC282WC/pdisk_1.dat 2025-03-28T09:11:21.842490Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:21.865023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:21.865146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:21.867314Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5218, node 1 2025-03-28T09:11:21.971296Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:21.971328Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:21.971336Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:21.971474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4717 TClient is connected to server localhost:4717 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:22.934910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:22.955942Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:22.960948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:23.109554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:23.311455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:23.410212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:25.717433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785492208184909:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:25.717539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:26.328558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:26.367203Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785475028313989:2085];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:26.367777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:26.378504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:26.454899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:26.531063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:26.574797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:26.663574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:26.727386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785496503152731:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:26.727466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:26.727655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785496503152736:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:26.732639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:26.746940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785496503152738:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:26.804870Z node 1 :TX_PROXY ERROR: Actor# [1:7486785496503152791:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:36.842990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:36.843027Z node 1 :IMPORT WARN: Table profiles were not loaded ( (let $1 (KqpTable '"/Root/KeyValue" '"72057594046644480:6" '"" '1)) (let $2 (KqpRowsSourceSettings $1 '('"Key" '"Value") '() (Void) '())) (let $3 (OptionalType (DataType 'Uint64))) (let $4 (OptionalType (DataType 'String))) (let $5 '('('"_logical_id" '779) '('"_id" '"9066c8f0-6bb9ada8-87165628-5447f688") '('"_wide_channels" (StructType '('"Key" $3) '('"Value" $4))))) (let $6 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $2)) (lambda '($17) (block '( (let $18 (lambda '($19) (Member $19 '"Key") (Member $19 '"Value"))) (return (FromFlow (ExpandMap (ToFlow $17) $18))) ))) $5)) (let $7 '('1)) (let $8 (DqCnHashShuffle (TDqOutput $6 '0) $7)) (let $9 (StructType '('"t1.Key" $3) '('"t1.Value" $4) '('"t2.Key" $3) '('"t2.Value" $4))) (let $10 '('('"_logical_id" '677) '('"_id" '"f35130c4-8fb1d38e-76548b63-138b3f70") '('"_wide_channels" $9))) (let $11 (DqPhyStage '($8) (lambda '($20) (block '( (let $21 '('0 '0 '1 '1)) (let $22 '('0 '2 '1 '3)) (let $23 (GraceSelfJoinCore (ToFlow $20) 'Full $7 $7 $21 $22 '('"t1.Value") '('"t2.Value") '())) (return (FromFlow (WideSort $23 '('('1 (Bool 'true)))))) ))) $10)) (let $12 (DqCnMerge (TDqOutput $11 '0) '('('1 '"Asc")))) (let $13 (DqPhyStage '($12) (lambda '($24) (FromFlow (NarrowMap (ToFlow $24) (lambda '($25 $26 $27 $28) (AsStruct '('"t1.Key" $25) '('"t1.Value" $26) '('"t2.Key" $27) '('"t2.Value" $28)))))) '('('"_logical_id" '689) '('"_id" '"7ba96658-4578d0ac-a9310387-9e5f8e12")))) (let $14 '($6 $11 $13)) (let $15 '('"t1.Key" '"t1.Value" '"t2.Key" '"t2.Value")) (let $16 (DqCnResult (TDqOutput $13 '0) $15)) (return (KqpPhysicalQuery '((KqpPhysicalTx $14 '($16) '() '('('"type" '"generic")))) '((KqpTxResultBinding (ListType $9) '0 '0)) '('('"type" '"query")))) ) 2025-03-28T09:13:07.127010Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 0, bytes: 1401088 2025-03-28T09:13:07.144696Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 1, bytes: 84 2025-03-28T09:13:07.144752Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 2, bytes: 2402376 2025-03-28T09:13:07.145213Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 3, bytes: 144 2025-03-28T09:13:07.145251Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 4, bytes: 1200936 2025-03-28T09:13:07.145415Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 5, bytes: 72 2025-03-28T09:13:07.145446Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 6, bytes: 1200744 2025-03-28T09:13:07.145640Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 7, bytes: 72 2025-03-28T09:13:07.145671Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 8, bytes: 2001584 2025-03-28T09:13:07.146069Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 9, bytes: 120 2025-03-28T09:13:07.146111Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 10, bytes: 1801952 2025-03-28T09:13:07.146460Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 11, bytes: 108 2025-03-28T09:13:07.146495Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 12, bytes: 2001792 2025-03-28T09:13:07.146829Z node 1 :KQP_COMPUTE ERROR: TxId: 281474976710971. Error: [TEvError] File size limit exceeded: 1/0Mb 2025-03-28T09:13:07.152975Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 13, bytes: 120 2025-03-28T09:13:07.153065Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 14, bytes: 2202288 2025-03-28T09:13:07.153821Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 15, bytes: 132 2025-03-28T09:13:07.153864Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 16, bytes: 2002000 2025-03-28T09:13:07.154492Z node 1 :KQP_COMPUTE ERROR: [Write] File size limit exceeded. From: [1:7486785925999888548:6488], blobId: 17, bytes: 120 2025-03-28T09:13:07.158734Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785925999888539:4538], TxId: 281474976710971, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmY3NjNmZC1iMzgzZmRhYi1lYTc3NTAyYS1hZWFmZjBkOA==. CustomerSuppliedId : . TraceId : 01jqe0dh63em70xm1w0aabkq8x. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: [Compute spilling][TEvError] File size limit exceeded: 1/0Mb }. 2025-03-28T09:13:07.159212Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785925999888540:4539], TxId: 281474976710971, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0dh63em70xm1w0aabkq8x. SessionId : ydb://session/3?node_id=1&id=ZmY3NjNmZC1iMzgzZmRhYi1lYTc3NTAyYS1hZWFmZjBkOA==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-03-28T09:13:07.186240Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmY3NjNmZC1iMzgzZmRhYi1lYTc3NTAyYS1hZWFmZjBkOA==, ActorId: [1:7486785921704921225:4531], ActorState: ExecuteState, TraceId: 01jqe0dh63em70xm1w0aabkq8x, Create QueryResponse for error on request, msg: >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] >> KqpIndexes::DeleteByIndex [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots >> KqpJoinOrder::CanonizedJoinOrderTPCH15 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SecondaryIndexUsingInJoin2-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 62468, MsgBus: 20639 2025-03-28T09:12:36.520599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785798839691060:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:36.521119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018c7/r3tmp/tmpuI3Bqj/pdisk_1.dat 2025-03-28T09:12:37.026138Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:37.030776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:37.030878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:37.032694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62468, node 1 2025-03-28T09:12:37.215094Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:37.215119Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:37.215125Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:37.215235Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20639 TClient is connected to server localhost:20639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:38.252943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:38.313078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:38.693749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:39.103289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:39.285537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.519124Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785798839691060:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:41.519192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:42.786679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785824609496493:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:42.786830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.157914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.276312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.333438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.381602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.424086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.484658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:43.616269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785828904464310:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.616362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.616787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785828904464316:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.621380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:43.651292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785828904464318:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:43.747310Z node 1 :TX_PROXY ERROR: Actor# [1:7486785828904464377:3466] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:45.165600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.271854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17383, MsgBus: 31003 2025-03-28T09:12:51.339124Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785864816162457:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018c7/r3tmp/tmp9AUHGm/pdisk_1.dat 2025-03-28T09:12:51.396078Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:51.652271Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:51.710618Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:51.710724Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:51.712628Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17383, node 2 2025-03-28T09:12:51.883296Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:51.883326Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:51.883336Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:51.883463Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31003 TClient is connected to server localhost:31003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:52.744809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:52.769228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:52.939724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:53.158653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose ... 4976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:56.130647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:56.209095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:56.261487Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785864816162457:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:56.261607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:56.262364Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:56.304876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:56.376213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785886291001044:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:56.376336Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:56.376588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785886291001049:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:56.382444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:56.419583Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785886291001051:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:56.522292Z node 2 :TX_PROXY ERROR: Actor# [2:7486785886291001113:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:57.668637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:57.764809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25622, MsgBus: 10416 2025-03-28T09:13:01.014955Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785907033793826:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:01.015072Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018c7/r3tmp/tmpNmbEgw/pdisk_1.dat 2025-03-28T09:13:01.216481Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:01.248945Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:01.249036Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:01.255837Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25622, node 3 2025-03-28T09:13:01.331347Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:01.331368Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:01.331375Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:01.331505Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10416 TClient is connected to server localhost:10416 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:01.880933Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:01.909751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:02.020313Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:02.244408Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:02.348652Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:04.932557Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785919918697464:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:04.932649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:04.987021Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.028536Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.069539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.103552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.176571Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.248955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.299015Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785924213665278:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:05.299141Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:05.299451Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785924213665283:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:05.303816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:05.318479Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785924213665285:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:05.394147Z node 3 :TX_PROXY ERROR: Actor# [3:7486785924213665338:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:06.024325Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785907033793826:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:06.024390Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:06.683734Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:06.737329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::DeleteByIndex [GOOD] Test command err: Trying to start YDB, gRPC: 3444, MsgBus: 17855 2025-03-28T09:12:47.008454Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785848012161849:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:47.008588Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018a8/r3tmp/tmpkpHZsH/pdisk_1.dat 2025-03-28T09:12:47.703915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:47.704031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:47.708356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:47.745040Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3444, node 1 2025-03-28T09:12:47.919202Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:47.919219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:47.919237Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:47.919351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17855 TClient is connected to server localhost:17855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:48.610237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:48.651449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:48.793034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:49.020622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:49.128089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:51.004940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785860897065505:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.005072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.325132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.403892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.463072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.563805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.630120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.705454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.822151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785865192033325:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.822266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.822759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785865192033330:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.827456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:51.851998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785865192033332:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:51.931570Z node 1 :TX_PROXY ERROR: Actor# [1:7486785865192033387:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:52.010141Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785848012161849:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:52.016000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:52.997458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:53.126980Z node 1 :TX_PROXY ERROR: Actor# [1:7486785873781968519:3817] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:54.024980Z node 1 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 29320, MsgBus: 64958 2025-03-28T09:12:54.928521Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785876316048234:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:54.929002Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018a8/r3tmp/tmppb7pzw/pdisk_1.dat 2025-03-28T09:12:55.107923Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:55.123817Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:55.123918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:55.125836Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29320, node 2 2025-03-28T09:12:55.180153Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:55.180178Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:55.180186Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:55.180299Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64958 TClient is connected to server localhost:64958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:55.594977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:55.607834Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:12:55.625380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 720575940 ... 15664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:58.488701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:58.523668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:58.568178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:58.617152Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785893495919693:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:58.617239Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:58.617506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785893495919698:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:58.621434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:58.632559Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785893495919700:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:58.700878Z node 2 :TX_PROXY ERROR: Actor# [2:7486785893495919754:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:59.733746Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:59.866290Z node 2 :TX_PROXY ERROR: Actor# [2:7486785897790887582:3799] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/TestTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:59.938007Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785876316048234:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:59.999190Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:00.994803Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:13:01.070246Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill Trying to start YDB, gRPC: 3431, MsgBus: 13093 2025-03-28T09:13:01.963262Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785905365323393:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:01.963321Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018a8/r3tmp/tmpGEsZg8/pdisk_1.dat 2025-03-28T09:13:02.126590Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:02.139436Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:02.139512Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:02.141265Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3431, node 3 2025-03-28T09:13:02.193392Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:02.193419Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:02.193433Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:02.193596Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13093 TClient is connected to server localhost:13093 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:02.674823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:02.700880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:02.777473Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:02.972256Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:03.063873Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:05.623167Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785922545194332:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:05.623259Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:05.672668Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.754599Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.840704Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.905110Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.936841Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:05.971354Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:06.074419Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785926840162149:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:06.074545Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:06.074807Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785926840162154:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:06.079115Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:06.097876Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:13:06.098258Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785926840162156:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:06.169462Z node 3 :TX_PROXY ERROR: Actor# [3:7486785926840162212:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:06.966219Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785905365323393:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:06.966286Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:07.315629Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 19091, MsgBus: 1718 2025-03-28T09:11:23.908310Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785486534720317:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.918201Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001990/r3tmp/tmpt0N9DZ/pdisk_1.dat 2025-03-28T09:11:24.898956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:24.910914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:24.999236Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.019993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.021512Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.291872Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.112389s 2025-03-28T09:11:25.291956Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.112492s TServer::EnableGrpc on GrpcPort 19091, node 1 2025-03-28T09:11:25.883728Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.883757Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.883767Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.883983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1718 TClient is connected to server localhost:1718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:28.029209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.065323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.373375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.644895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.764881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.982511Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785486534720317:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.987640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.387800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785512304525842:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.387909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.687583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.758387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.806386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.872390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.922842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.035580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.129033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785520894461004:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.129102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.129447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785520894461009:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.133996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.150290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785520894461011:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.236575Z node 1 :TX_PROXY ERROR: Actor# [1:7486785520894461066:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.818787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:35.403950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:11:35.596120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:11:39.982989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:39.983018Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 2210, MsgBus: 22772 2025-03-28T09:12:10.895779Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785685777482383:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:10.909785Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001990/r3tmp/tmpWrMVHL/pdisk_1.dat 2025-03-28T09:12:11.121155Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:11.180129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:11.180234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:11.184550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2210, node 2 2025-03-28T09:12:11.262575Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:11.262600Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:11.262608Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:11.262754Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22772 TClient is connected to server localhost:22772 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:12.028246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:12.055683Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:12.077582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:12:12.177174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:12.394615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:12.486784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:15.894859Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785685777482383:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:15.894948Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:16.071859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785711547287829:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:16.071942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:16.129536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:16.212855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:16.296161Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:16.377003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:16.427826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:16.517275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:16.626388Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785711547288353:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:16.626484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:16.630888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785711547288359:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:16.638334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:16.657788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785711547288361:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:16.753746Z node 2 :TX_PROXY ERROR: Actor# [2:7486785711547288416:3464] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:18.575609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:18.959036Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:12:19.106277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.251253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.364059Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.468913Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-28T09:12:19.468951Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-28T09:12:19.469272Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-28T09:12:26.059859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:26.059890Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCHRandomJoinViewJustWorks-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24304, MsgBus: 31747 2025-03-28T09:12:18.206930Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785722797289026:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:18.207120Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001709/r3tmp/tmpgiLEX6/pdisk_1.dat 2025-03-28T09:12:18.867706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:18.867813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:18.868213Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:18.888955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24304, node 1 2025-03-28T09:12:19.111334Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:19.111358Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:19.111366Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:19.111476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31747 TClient is connected to server localhost:31747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:20.182179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.646051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785739977158882:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.646147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.648365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785739977158894:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.660060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:12:22.677875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785739977158896:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:12:22.732117Z node 1 :TX_PROXY ERROR: Actor# [1:7486785739977158948:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:23.154812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.203805Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785722797289026:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:23.203862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:23.280219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.349629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.383842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.460897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.717535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.802811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.893444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.955458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.997900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:12:24.071926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:12:24.146311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:24.257528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.220825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:12:25.306730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.349537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.389011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.475346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.553721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.639124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.671616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.715803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.801374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.878623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.917524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.999778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.047728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.107929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.152186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.197623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:12:26.249566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.830881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.831103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.836264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.836264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.841578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.841578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.847099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.847104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.853259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.853280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.859325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.859397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.865413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.865413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.871356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.875590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.875966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.882260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.882276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.889410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.889447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.895596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.898454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.902241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.904288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.908221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.909929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.912543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.917715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.922450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.925643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.929370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.931434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.935238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.936834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.941064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.941930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.947052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.947076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.953262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.956014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038434;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.962774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.962890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.969391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:05.979631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:06.039982Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0ccep8e834fyrjj0tjt2t", SessionId: ydb://session/3?node_id=1&id=NThkZjMzNTgtYjYyZDM0MWUtZGI4OWJkYjItN2YxN2QwNjA=, Slow query, duration: 37.665264s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:13:06.309571Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:06.309817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:06.310314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486785894596007939:5853];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-28T09:13:06.310822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH15 [GOOD] Test command err: Trying to start YDB, gRPC: 5280, MsgBus: 6930 2025-03-28T09:11:21.367788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785478395713007:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:21.367855Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00171b/r3tmp/tmpFGhw6g/pdisk_1.dat 2025-03-28T09:11:21.792776Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:21.802315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:21.802394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:21.810328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5280, node 1 2025-03-28T09:11:21.938720Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:21.938744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:21.938755Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:21.938909Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6930 TClient is connected to server localhost:6930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:23.134071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:23.170850Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:26.357946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785499870550165:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:26.358071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:26.358144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785499870550177:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:26.362401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:11:26.370294Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785478395713007:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:26.370378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:26.384394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785499870550179:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:11:26.486918Z node 1 :TX_PROXY ERROR: Actor# [1:7486785499870550232:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:26.855983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:11:27.314016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:11:27.314250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:11:27.314571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:11:27.327532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:11:27.327605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:11:27.327871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:11:27.327994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:11:27.328117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:11:27.328231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:11:27.328389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:11:27.328515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:11:27.328658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:11:27.328794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:11:27.328905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:11:27.329027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785504165517804:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:11:27.330783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:11:27.330970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:11:27.331088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:11:27.331211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:11:27.331351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:11:27.331461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:11:27.331564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:11:27.331658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:11:27.331745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785504165517816:2364];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:11:27.463377Z node 1 :TX_COL ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.341856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.341923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.347927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.348101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.352208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.353517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.356303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.359295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.360184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.365110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.365453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.371209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.379043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.383089Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.384929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.389415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.389424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.395291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.395337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.401342Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.404684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.406182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.410469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.410767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.416352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.416819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.421830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.423321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.428181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.434384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.441421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.448028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.454307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.460100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.467039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.472446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.478596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.484600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.489068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.491204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.495787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.498890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.502488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.510609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.634804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:12:52.753987Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0bz2j15q8dk1pvm967kja", SessionId: ydb://session/3?node_id=1&id=OWRmZjhmYzAtNjU1OGNlYTktOTg3MjJhYzAtYjg0ODM2NTM=, Slow query, duration: 38.079502s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:12:53.271953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:12:53.272416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:12:53.273044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486785787633410276:10281];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-28T09:12:53.273397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] >> KqpIndexes::IndexTopSortPushDown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::WriteIntoRenamingAsyncIndex [GOOD] Test command err: Trying to start YDB, gRPC: 26891, MsgBus: 8596 2025-03-28T09:11:23.910753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785484513778749:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.918035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019ad/r3tmp/tmpwX0XtL/pdisk_1.dat 2025-03-28T09:11:25.021853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.091020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.091133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.091787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.096522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.287556Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.107836s 2025-03-28T09:11:25.287622Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.107917s TServer::EnableGrpc on GrpcPort 26891, node 1 2025-03-28T09:11:25.928254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.928288Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.928299Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.928511Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8596 TClient is connected to server localhost:8596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.458426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.610860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.201546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.415380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.524695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.958658Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785484513778749:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.972482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.027303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785510283584237:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.027447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.690761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.743455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.795197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.880784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.923961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.003550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.084591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518873519425:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.084678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.090953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785518873519430:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.094793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.104670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785518873519433:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.197986Z node 1 :TX_PROXY ERROR: Actor# [1:7486785518873519489:3465] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.741567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:37.306532Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 281474976710708 at tablet 72075186224037919 errors: SHARD_IS_BLOCKED (Can't propose tx 281474976710708 at blocked shard 72075186224037919) | 2025-03-28T09:11:37.306583Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710708 at tablet 72075186224037919 status: ERROR errors: SHARD_IS_BLOCKED (Can't propose tx 281474976710708 at blocked shard 72075186224037919) | 2025-03-28T09:11:37.306722Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486785544643325156:2566] TxId: 281474976710708. Ctx: { TraceId: 01jqe0ath3f4bhyv4damdmmf22, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM0YjBiYmYtZDE3NTU2MDAtMTY3NmYxM2MtOTYxZGZjMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: [EXECUTION_CANCELLED] Transaction was cleaned up; 2025-03-28T09:11:37.333816Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzM0YjBiYmYtZDE3NTU2MDAtMTY3NmYxM2MtOTYxZGZjMWU=, ActorId: [1:7486785536053389787:2566], ActorState: ExecuteState, TraceId: 01jqe0ath3f4bhyv4damdmmf22, Create QueryResponse for error on request, msg: 2025-03-28T09:11:39.979015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:11:39.979058Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:45.337641Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486785579003068594:2566] TxId: 281474976711030. Ctx: { TraceId: 01jqe0b28jfve4t0t2bcg50mqk, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM0YjBiYmYtZDE3NTU2MDAtMTY3NmYxM2MtOTYxZGZjMWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting data TxId 281474976711030 because datashard 72075186224037927: is in process of split opId 281474976715660 state SplitSrcWaitForNoTxInFlight (wrong shard state); 2025-03-28T09:11:45.337886Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzM0YjBiYmYtZDE3NTU2MDAtMTY3NmYxM2MtOTYxZGZjMWU=, ActorId: [1:7486785536053389787:2566], ActorState: ExecuteState, TraceId: 01jqe0b28jfve4t0t2bcg50mqk, Create QueryResponse for error on request, msg: 2025-03-28T09:11:45.413126Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-03-28T09:11:45.413177Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-03-28T09:11:45.413354Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-03-28T09:11:45.415787Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-03-28T09:11:45.415830Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-03-28T09:11:45.415850Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-03-28T09:11:45.415871Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TE ... will try to initialize from file: (empty maybe) 2025-03-28T09:12:41.895337Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:41.895472Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11196 TClient is connected to server localhost:11196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:42.432107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.442460Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:42.453554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.665085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:42.901932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:43.068010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:46.554817Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785820910571260:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:46.561252Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:47.028907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785842385409478:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.029017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.065826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.112573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.156089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.242980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.336692Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.411687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.524881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785846680377293:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.524965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.525354Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785846680377298:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.530195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:47.553013Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:12:47.553811Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785846680377300:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:47.642931Z node 2 :TX_PROXY ERROR: Actor# [2:7486785846680377359:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:49.104162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:49.757796Z node 2 :TX_DATASHARD ERROR: Shard 72075186224037937 cannot parse tx 281474976710685: Table '/Root/MultiShardIndexed' scheme changed. 2025-03-28T09:12:49.758053Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486785855270313407:2558] TxId: 281474976710685. Ctx: { TraceId: 01jqe0d1a7ar2tvssnnd3k5qdy, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjRiYjUwMjctYzA3MWU3M2EtZmU4YTgyM2MtYmU4NjFjNjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [SCHEME_CHANGED] Table '/Root/MultiShardIndexed' scheme changed.; 2025-03-28T09:12:49.758291Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjRiYjUwMjctYzA3MWU3M2EtZmU4YTgyM2MtYmU4NjFjNjI=, ActorId: [2:7486785855270313096:2558], ActorState: ExecuteState, TraceId: 01jqe0d1a7ar2tvssnnd3k5qdy, Create QueryResponse for error on request, msg: 2025-03-28T09:12:56.748871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:56.748894Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:59.474978Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037927][2:7486785855270313671:2635] Apply status: status# 2, reason# 7 2025-03-28T09:12:59.507587Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037927][2:7486785898219990989:2635] Handshake status: status# 2, reason# 7 2025-03-28T09:12:59.513998Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-03-28T09:12:59.515226Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037919][2:7486785855270313672:2635] Apply status: status# 2, reason# 7 2025-03-28T09:12:59.527169Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-03-28T09:12:59.531805Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-03-28T09:12:59.541374Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-03-28T09:12:59.549615Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-03-28T09:12:59.549655Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-28T09:12:59.549669Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-03-28T09:12:59.549683Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-03-28T09:12:59.549698Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-28T09:12:59.549711Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-03-28T09:12:59.549727Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-03-28T09:12:59.558154Z node 2 :CHANGE_EXCHANGE ERROR: [TableChangeSenderShard][72075186224037937:1][72075186224037919][2:7486785898219991390:2635] Handshake status: status# 2, reason# 7 2025-03-28T09:13:09.625598Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037919 not found 2025-03-28T09:13:09.625642Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-03-28T09:13:09.657718Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037942 not found 2025-03-28T09:13:09.657749Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037939 not found 2025-03-28T09:13:09.657768Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-28T09:13:09.658264Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-03-28T09:13:09.658281Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037940 not found 2025-03-28T09:13:09.658297Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::IndexTopSortPushDown [GOOD] Test command err: Trying to start YDB, gRPC: 17880, MsgBus: 20439 2025-03-28T09:12:29.913255Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785771134161878:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:29.913296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001902/r3tmp/tmpzz7x74/pdisk_1.dat 2025-03-28T09:12:30.775057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:30.775689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:30.775852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17880, node 1 2025-03-28T09:12:30.781360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:31.018289Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:31.018322Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:31.018330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:31.018441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20439 TClient is connected to server localhost:20439 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:32.138057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.176778Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:32.193290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.481421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.788477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.969669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:34.918825Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785771134161878:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:34.939909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:36.524387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785801198934594:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:36.524486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.136876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.195542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.287343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.356084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.400978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.465617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:37.609973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785805493902416:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.610081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.610433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785805493902421:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:37.614630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:37.641980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785805493902423:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:37.726592Z node 1 :TX_PROXY ERROR: Actor# [1:7486785805493902487:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:39.192989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:39.280881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:39.355869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11503, MsgBus: 22319 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001902/r3tmp/tmpcay0C9/pdisk_1.dat 2025-03-28T09:12:46.112841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:46.323635Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:46.328964Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:46.329045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:46.336147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11503, node 2 2025-03-28T09:12:46.535251Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:46.535278Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:46.535285Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:46.535401Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22319 TClient is connected to server localhost:22319 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:47.338063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:47.352897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:47.490399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCre ... hard: 72057594046644480 2025-03-28T09:12:50.896487Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:50.940478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:50.986278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.038043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.130823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785865403339592:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.130960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.131339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785865403339597:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.135680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:51.148887Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785865403339599:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:51.222060Z node 2 :TX_PROXY ERROR: Actor# [2:7486785865403339654:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:52.673148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:54.263808Z node 2 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:13:01.247347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:01.247373Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 5578, MsgBus: 2914 2025-03-28T09:13:06.246199Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785926514553473:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:06.246240Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001902/r3tmp/tmppiky2U/pdisk_1.dat 2025-03-28T09:13:06.427481Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:06.456216Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:06.456314Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:06.459039Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5578, node 3 2025-03-28T09:13:06.509297Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:06.509325Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:06.509335Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:06.509453Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2914 TClient is connected to server localhost:2914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:07.176301Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:07.187719Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:07.213058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:07.314273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:07.490406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:07.569906Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:10.483536Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785943694424442:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:10.483639Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:10.529138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:10.558425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:10.588802Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:10.620650Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:10.654765Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:10.725078Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:10.806168Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785943694424958:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:10.806251Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:10.806786Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785943694424963:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:10.810480Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:10.819690Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785943694424965:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:10.909329Z node 3 :TX_PROXY ERROR: Actor# [3:7486785943694425020:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:11.246782Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785926514553473:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:11.246854Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:12.068406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:12.166742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:12.230280Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |82.7%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> ExternalIndex::Simple [GOOD] |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large >> KqpQueryServiceScripts::ParseScript >> KqpQueryService::ExecuteQueryWithWorkloadManager >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex >> KqpQueryService::Followers >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery >> KqpQueryService::FlowControllOnHugeLiteralAsTable >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode >> KqpQueryService::ExecStats >> KqpQueryService::ExecuteQueryPg >> KqpQueryService::TableSink_HtapComplex+withOltpSink >> KqpQueryService::CreateTempTable >> KqpService::Shutdown >> KqpQueryServiceScripts::ExecuteScriptStatsBasic >> KqpQueryService::TableSink_HtapInteractive-withOltpSink >> KqpQueryService::AlterTempTable >> KqpQueryService::DdlUser >> KqpQueryServiceScripts::TestPaging >> KqpDocumentApi::RestrictWrite >> KqpQueryService::TableSink_OltpUpsert >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery >> KqpService::SwitchCache-UseCache >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap >> KqpQueryService::Write >> KqpQueryService::Ddl >> KqpQueryService::StreamExecuteQueryPure >> KqpQueryService::ExecuteCollectMeta >> KqpQueryServiceScripts::ExecuteScript >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin [GOOD] |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ydb_stress_tool |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |82.7%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool >> Yq_1::Basic_Null ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::JoinWithNonPKColumnsInPredicate-UseStreamJoin [GOOD] Test command err: Trying to start YDB, gRPC: 65388, MsgBus: 13155 2025-03-28T09:12:43.565447Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785829276458415:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:43.565753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018b1/r3tmp/tmpCqTK55/pdisk_1.dat 2025-03-28T09:12:44.135019Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:44.150177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:44.150242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:44.151819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65388, node 1 2025-03-28T09:12:44.235222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:44.235244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:44.235250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:44.235367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13155 TClient is connected to server localhost:13155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:44.857704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.889753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:45.037523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:45.214788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:45.311850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:47.449714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785846456329233:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.449963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:47.885712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.967958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:48.015621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:48.064339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:48.140428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:48.228467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:48.334247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785850751297049:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:48.334330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:48.334666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785850751297054:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:48.337978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:48.348404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785850751297056:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:48.418510Z node 1 :TX_PROXY ERROR: Actor# [1:7486785850751297113:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:48.570810Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785829276458415:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:48.586616Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:49.877933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14088, MsgBus: 29218 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018b1/r3tmp/tmpe7EAoJ/pdisk_1.dat 2025-03-28T09:12:52.733141Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:52.793200Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:52.847627Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:52.847711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:52.851448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14088, node 2 2025-03-28T09:12:53.054466Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:53.054490Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:53.054500Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:53.054634Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29218 TClient is connected to server localhost:29218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:54.021915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:54.061853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:54.245265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:54.492796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:54.615572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:57.205019Z node 2 :KQP_WORKLOAD_SERVICE ... : {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:57.720380Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:57.731084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785889911280009:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:57.800107Z node 2 :TX_PROXY ERROR: Actor# [2:7486785889911280063:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:58.864279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:1, at schemeshard: 72057594046644480 2025-03-28T09:12:58.964130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:59.040631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:12:59.085568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:12:59.179670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:12:59.273067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:13:07.789574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:07.789602Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 23751, MsgBus: 20872 2025-03-28T09:13:09.037951Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785941646534332:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:09.038244Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018b1/r3tmp/tmpSDfSFt/pdisk_1.dat 2025-03-28T09:13:09.209386Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:09.229761Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:09.229866Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:09.233641Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23751, node 3 2025-03-28T09:13:09.304107Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:09.304133Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:09.304144Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:09.304302Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20872 TClient is connected to server localhost:20872 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:09.859392Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:09.864152Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:09.869606Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:09.950680Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:13:10.150862Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:10.284175Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:13:13.140270Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785958826405283:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:13.140386Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:13.173483Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:13.209056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:13.239075Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:13.306648Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:13.340274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:13.413028Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:13.511304Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785958826405806:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:13.511425Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:13.511592Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486785958826405811:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:13.515945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:13.528397Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486785958826405813:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:13.583994Z node 3 :TX_PROXY ERROR: Actor# [3:7486785958826405868:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:14.038099Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486785941646534332:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:14.038204Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:14.658398Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-03-28T09:13:14.708748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:14.830172Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:14.886761Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:13:14.985966Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:13:15.066804Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpQueryService::ExecuteQuery >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel1 [GOOD] >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 >> TLocksTest::CK_Range_GoodLock [GOOD] >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [FAIL] Test command err: 2025-03-28T09:12:22.729212Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785737983757918:2278];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:22.729580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ccf/r3tmp/tmprrhfNP/pdisk_1.dat 2025-03-28T09:12:23.429518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:23.429632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:23.434270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:23.447729Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4538, node 1 2025-03-28T09:12:23.521899Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:12:23.521951Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:12:23.976736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:23.976770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:23.976779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:24.003632Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4423 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:24.799085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:27.619546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785759458595255:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.619658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:27.696922Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785737983757918:2278];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:27.696981Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:28.622298Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.155882s 2025-03-28T09:12:28.622336Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.155948s 2025-03-28T09:12:28.873432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:12:29.342849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785768048530072:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:29.342956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:29.346101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785768048530077:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:29.351397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:12:29.396288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785768048530079:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:12:29.468743Z node 1 :TX_PROXY ERROR: Actor# [1:7486785768048530162:2844] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:29.963450Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe0cdcs0faqy6pg9y3xh6aa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Yzk5YzFjYTYtZTlmYmU3ZmItOWNhMWI5ZGEtNmU1NDU5OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root assertion failed at ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:253, virtual void NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext &): (session.GetId() == sessionId) failed: ("ydb://session/3?node_id=1&id=ZmUxODRkZGQtZmIwMmY2NjYtOTE0OGJmNDUtYTNkNjRhMzc=" != "ydb://session/3?node_id=1&id=Yzk5YzFjYTYtZTlmYmU3ZmItOWNhMWI5ZGEtNmU1NDU5OTM=") , with diff: "ydb://session/3?node_id=1&id=(|Yzk5YzFjYTYt)Z(|TlmY)mU(xODRk|3)Z(GQtZ|)mI(wMmY2NjY|)tO(T|WNhMWI5ZG)E(0OGJ|tN)m(|U1)NDU(tY|5O)T(NkNjRh|)M(zc|)=" TBackTrace::Capture()+28 (0x18BA8FEC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x190710A0) NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext&)+7545 (0x18769FB9) std::__y1::__function::__func, void ()>::operator()()+280 (0x187DF188) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x190A80E6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19077C19) NTestSuiteYdbSdkSessions::TCurrentTest::Execute()+1204 (0x187DE034) NUnitTest::TTestFactory::Execute()+2438 (0x190794E6) NUnitTest::RunMain(int, char**)+5213 (0x190A265D) ??+0 (0x7FBCBFDC1D90) __libc_start_main+128 (0x7FBCBFDC1E40) _start+41 (0x160E3029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2025-03-28T09:12:39.253799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785812359229860:2192];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:39.253926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f17/r3tmp/tmp64r5Mh/pdisk_1.dat 2025-03-28T09:12:40.231165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:40.231277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:40.271007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:40.278983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:40.290987Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11438 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:40.988511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.023510Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.071484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.456845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:41.557548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:46.808977Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785841388349679:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:46.809033Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f17/r3tmp/tmpzc2ZwK/pdisk_1.dat 2025-03-28T09:12:47.126929Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:47.161927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:47.162043Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:47.163657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3585 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:47.500826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:47.514573Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:47.536015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:47.746472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:47.845369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:51.368200Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486785863081073967:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:51.368245Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f17/r3tmp/tmpcTo4dx/pdisk_1.dat 2025-03-28T09:12:51.755984Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:51.770382Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:51.770497Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:51.772730Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12212 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:12:52.060694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:52.090167Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:52.119983Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:12:52.125160Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:52.241120Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:52.336522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:56.643361Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486785886437864268:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:56.705732Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f17/r3tmp/tmpZbEeaq/pdisk_1.dat 2025-03-28T09:12:56.825024Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:56.841191Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:56.841328Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:56.842838Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7273 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId ... e 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:09.822186Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19242 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:13:10.061852Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:10.076932Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:10.087478Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:13:10.093185Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:10.169178Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:10.228549Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:13.761312Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486785958138606283:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:13.761383Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f17/r3tmp/tmp9Wj8J5/pdisk_1.dat 2025-03-28T09:13:13.875423Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:13.894104Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:13.894173Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:13.895500Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1450 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:13:14.137069Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:14.162199Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:14.242568Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:14.305946Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:17.934716Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486785975801349015:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:17.934811Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f17/r3tmp/tmpUqVCRr/pdisk_1.dat 2025-03-28T09:13:18.038242Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:18.074223Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:18.074320Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:18.075811Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14370 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:13:18.291944Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:18.397086Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:18.463332Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:18.525596Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f17/r3tmp/tmpcE3P9E/pdisk_1.dat 2025-03-28T09:13:22.244625Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:22.304671Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:22.343131Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:22.343267Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:22.349314Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7257 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:13:22.684473Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:22.697268Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:22.714429Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:22.803186Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:13:22.878954Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |82.8%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: 2025-03-28T09:07:50.781334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:07:50.781413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2025-03-28T09:07:50.781969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:07:50.782057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d65/r3tmp/tmpWXiBR6/pdisk_1.dat 2025-03-28T09:07:51.618520Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.106001s 2025-03-28T09:07:51.618717Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.106230s TServer::EnableGrpc on GrpcPort 29988, node 1 TClient is connected to server localhost:10776 2025-03-28T09:07:51.962569Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-03-28T09:07:51.962980Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvGetProxyServicesRequest 2025-03-28T09:07:52.008001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:07:52.107501Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:07:52.121413Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:07:52.121483Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:07:52.121531Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:07:52.122036Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:07:52.159360Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:07:52.160389Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:07:52.160589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:07:52.160704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:07:52.189029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:07:52.372076Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvProposeTransaction 2025-03-28T09:07:52.372163Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:07:52.372399Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:678:2571] 2025-03-28T09:07:52.482262Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:07:52.482394Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:07:52.483235Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:07:52.483351Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:07:52.483740Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:07:52.483983Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:07:52.484114Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:07:52.490912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:07:52.491636Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:07:52.492582Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:07:52.492662Z node 1 :TX_PROXY DEBUG: Actor# [1:678:2571] txid# 281474976715657 SEND to# [1:677:2570] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-03-28T09:07:52.719821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:752:2632];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:07:52.770268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:752:2632];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:07:52.770539Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-03-28T09:07:52.807176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:07:52.807426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:07:52.807734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:07:52.807865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:07:52.807987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:07:52.808168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:07:52.808306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:07:52.808441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:07:52.808603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:07:52.808731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:07:52.808842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:07:52.808979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:752:2632];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:07:52.833772Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-03-28T09:07:52.834092Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:755:2635];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:07:52.857162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037889;self_id=[1:755:2635];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:07:52.857446Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037889 2025-03-28T09:07:52.865939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:755:2635];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:07:52.866060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:755:2635];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:07:52.866343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:755:2635];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:07:52.866510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:755:2635];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:07:52.866648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:755:2635];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:07:52.867162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:755:2635];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:07:52.867320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7 ... e (ToFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '351) '('"_id" '"50c3ad71-9d30ed0a-a642bc07-19b11b")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) ) 2025-03-28T09:13:17.380890Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.380 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [KQP] kqp_transform.cpp:33: PhysicalPeepholeTransformer: ( (let $1 (KqpTable '"//Root/.metadata/initialization/migrations" '"72057594046644480:6" '"" '1)) (let $2 '('"componentId" '"instant" '"modificationId")) (let $3 (Uint64 '"1001")) (let $4 (KqpRowsSourceSettings $1 $2 '('('"ItemsLimit" $3) '('"Sequential" '1)) (Void) '())) (let $5 (OptionalType (DataType 'Utf8))) (let $6 (StructType '('"componentId" $5) '('"instant" (OptionalType (DataType 'Uint32))) '('"modificationId" $5))) (let $7 '('('"_logical_id" '338) '('"_id" '"67f229b9-49a5ac5b-e5e6088a-cd228af7") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $4)) (lambda '($12) (block '( (let $13 (lambda '($14) (Member $14 '"componentId") (Member $14 '"instant") (Member $14 '"modificationId"))) (return (FromFlow (ExpandMap (Take (ToFlow $12) $3) $13))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($15) (FromFlow (NarrowMap (Take (ToFlow $15) $3) (lambda '($16 $17 $18) (AsStruct '('"componentId" $16) '('"instant" $17) '('"modificationId" $18)))))) '('('"_logical_id" '351) '('"_id" '"50c3ad71-9d30ed0a-a642bc07-19b11b")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) 2025-03-28T09:13:17.390970Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.390 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:466: Register async execution for node #268 2025-03-28T09:13:17.391111Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.391 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:387: {3}, callable #277 2025-03-28T09:13:17.391213Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.391 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:577: Node #277 finished execution 2025-03-28T09:13:17.391279Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.391 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:594: Node #277 created 0 trackable nodes: 2025-03-28T09:13:17.391358Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.391 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:87: Finish, output #280, status: Async 2025-03-28T09:13:17.391954Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.391 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:133: Completed async execution for node #268 2025-03-28T09:13:17.392013Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.391 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:153: State is ExecutionRequired after apply async changes for node #268 2025-03-28T09:13:17.392066Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:59: Begin, root #280 2025-03-28T09:13:17.392109Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:72: Collect unused nodes for root #280, status: Ok 2025-03-28T09:13:17.392153Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-03-28T09:13:17.392200Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-03-28T09:13:17.392265Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-03-28T09:13:17.392340Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-03-28T09:13:17.392388Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:387: {4}, callable #268 2025-03-28T09:13:17.392519Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:577: Node #268 finished execution 2025-03-28T09:13:17.392572Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:594: Node #268 created 0 trackable nodes: 2025-03-28T09:13:17.392618Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:387: {3}, callable #275 2025-03-28T09:13:17.392664Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:577: Node #275 finished execution 2025-03-28T09:13:17.392717Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:387: {2}, callable #278 2025-03-28T09:13:17.392880Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:577: Node #278 finished execution 2025-03-28T09:13:17.392925Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:594: Node #278 created 0 trackable nodes: 2025-03-28T09:13:17.392972Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.392 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:387: {1}, callable #279 2025-03-28T09:13:17.393028Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.393 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:577: Node #279 finished execution 2025-03-28T09:13:17.393073Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.393 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:594: Node #279 created 0 trackable nodes: 2025-03-28T09:13:17.393126Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.393 TRACE ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:387: {0}, callable #280 2025-03-28T09:13:17.393175Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.393 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:577: Node #280 finished execution 2025-03-28T09:13:17.393218Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.393 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:594: Node #280 created 0 trackable nodes: 2025-03-28T09:13:17.393260Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.393 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:87: Finish, output #280, status: Ok 2025-03-28T09:13:17.393301Z node 1 :KQP_YQL INFO: TraceId: 01jqe0dw2r0xmc5wvg6zzm9n5e, SessionId: CompileActor 2025-03-28 09:13:17.393 INFO ydb-services-ext_index-ut(pid=144064, tid=0x00007FCA7B402D00) [core exec] yql_execution.cpp:93: Creating finalizing transformer, output #280 2025-03-28T09:13:17.410509Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-03-28T09:13:17.410588Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976716246 ProcessProposeKqpTransaction 2025-03-28T09:13:17.418083Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] Handle TEvExecuteKqpTransaction 2025-03-28T09:13:17.418142Z node 1 :TX_PROXY DEBUG: actor# [1:60:2107] TxId# 281474976716247 ProcessProposeKqpTransaction 2025-03-28T09:13:17.585340Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[1:752:2632];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:13:17.585714Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[1:755:2635];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:13:17.585804Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[1:761:2638];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:13:17.585882Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[1:765:2641];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:13:17.599685Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[1:752:2632];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-03-28T09:13:17.599884Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[1:755:2635];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-03-28T09:13:17.599987Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[1:761:2638];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-03-28T09:13:17.600080Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[1:765:2641];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7256, MsgBus: 5072 2025-03-28T09:12:45.091710Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785835989047377:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:45.098969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001703/r3tmp/tmpYZ8WGz/pdisk_1.dat 2025-03-28T09:12:45.961527Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:45.995952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:45.996036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:46.003741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7256, node 1 2025-03-28T09:12:46.400127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:46.400144Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:46.400152Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:46.400278Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5072 TClient is connected to server localhost:5072 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:47.321521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:47.343424Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:50.094802Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785835989047377:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:50.094912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:50.391571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785857463884431:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:50.391676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:50.392796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785857463884443:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:50.397973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:12:50.417661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785857463884445:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:12:50.494777Z node 1 :TX_PROXY ERROR: Actor# [1:7486785857463884496:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:50.862461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.003103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.037117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.082792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.128079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.357849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.436177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.471195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.504045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.532843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.571650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.602219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.647160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:52.469542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:12:52.520759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:12:52.611060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:12:52.686395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:12:52.761144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:12:52.805827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:12:52.851290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:12:52.898077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:12:52.942762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:12:53.010627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:12:53.063458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:12:53.119965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:12:53.154375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:12:53.237196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:12:53.295301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:12:53.341524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:12:53.396628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.962049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.962053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.966169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.967214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.969891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.970983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.974059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.974833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.978271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.978467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.982425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.982427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.986492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.986499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.990331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.990451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.994377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.994422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.998269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:20.998305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.002153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.002188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.005813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.005814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.009286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.009286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.012839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.013664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.016082Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.016927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.019162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.020107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.022323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.023372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.025381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.026502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.028583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.030164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.031823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.033388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.035502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.036855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.040397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.040669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.044878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.044892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:21.094849Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0d6q99cedq4wwmz3ref80", SessionId: ydb://session/3?node_id=1&id=MWU0ODU2MjgtMmExZmVhZTUtZTBmYzQxNDAtZmExYjdjYmE=, Slow query, duration: 25.819612s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:13:21.266491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:21.266495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:21.266978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] >> KqpQueryService::PeriodicTaskInSessionPool >> KqpQueryServiceScripts::ParseScript [GOOD] |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> KqpQueryServiceScripts::ListScriptExecutions >> KqpQueryService::StreamExecuteQueryPure [GOOD] >> KqpQueryService::AlterTempTable [GOOD] >> KqpQueryService::StreamExecuteQueryMultiResult >> KqpQueryServiceScripts::ValidateScript >> KqpQueryService::CTASWithoutPerStatement |82.8%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots >> KqpQueryService::ExecuteCollectMeta [GOOD] >> KqpQueryService::ExecuteDDLStatusCodeSchemeError >> KqpQueryService::StreamExecuteQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 11603, MsgBus: 6883 2025-03-28T09:12:04.499898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785663180989682:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:04.500433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001969/r3tmp/tmp8m3PCZ/pdisk_1.dat 2025-03-28T09:12:05.123537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:05.126904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:05.127017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:05.129761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11603, node 1 2025-03-28T09:12:05.245713Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:05.245736Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:05.245745Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:05.245883Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6883 TClient is connected to server localhost:6883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:06.267996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:06.296584Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:06.346774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:06.553244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:06.769230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:06.888167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:08.832351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785680360860501:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:08.832452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.180340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.287406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.341985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.385638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.430450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.480305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:09.488719Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785663180989682:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:09.488793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:09.550589Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785684655828315:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.550709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.551188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785684655828320:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:09.558020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:09.577349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785684655828322:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:09.665280Z node 1 :TX_PROXY ERROR: Actor# [1:7486785684655828377:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:11.001934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:11.292591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:12:11.387119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:12:20.117391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:20.117434Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 3023, MsgBus: 26375 2025-03-28T09:12:46.593027Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785841938751293:2198];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001969/r3tmp/tmp2SjLiz/pdisk_1.dat 2025-03-28T09:12:46.701797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:12:46.893298Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:46.912785Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:46.912876Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:46.915945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3023, node 2 2025-03-28T09:12:47.106711Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:47.106738Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:47.106746Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:47.106879Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26375 TClient is connected to server localhost:26375 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:48.028443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:48.044670Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:48.073043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:12:48.264315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:48.494799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:48.607326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:51.350499Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785863413589401:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.350585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.401942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.454385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.516622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.546882Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785841938751293:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:51.547329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:51.575750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.623013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.730249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.798819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785863413589924:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.798934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.799192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785863413589929:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:51.804577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:51.820673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785863413589931:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:51.919482Z node 2 :TX_PROXY ERROR: Actor# [2:7486785863413589988:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:53.561524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:54.045893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:12:54.280558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:12:54.380979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-03-28T09:12:54.469778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-03-28T09:12:54.553535Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-28T09:12:54.553591Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-28T09:12:54.553618Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-28T09:13:01.866838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:01.866876Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore [GOOD] >> KqpQueryService::CreateTempTable [GOOD] >> KqpQueryService::CreateAndDropTopic >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged >> KqpQueryService::Write [GOOD] >> KqpQueryServiceScripts::CancelScriptExecution >> KqpQueryService::ExecuteQueryPg [GOOD] >> KqpQueryService::ExecuteQueryPgTableSelect >> KqpDocumentApi::RestrictWrite [GOOD] >> KqpDocumentApi::AllowRead >> KqpQueryService::FlowControllOnHugeLiteralAsTable [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable+LongRow >> KqpQueryService::ExecuteQueryWithWorkloadManager [GOOD] >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier >> KqpQueryService::ClosedSessionRemovedWhileActiveWithQuery [GOOD] >> KqpQueryService::CloseSessionsWithLoad >> KqpQueryService::ExecStats [GOOD] >> KqpQueryService::ExecStatsAst >> KqpQueryService::DdlUser [GOOD] >> KqpQueryService::DdlTx >> KqpQueryServiceScripts::ExecuteScriptWithUnspecifiedMode [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithTimeout >> KqpQueryService::Ddl [GOOD] >> KqpQueryService::DdlColumnTable >> KqpQueryService::Followers [GOOD] >> KqpQueryService::FlowControllOnHugeRealTable-LongRow >> KqpQueryService::TableSink_HtapInteractive-withOltpSink [GOOD] >> KqpQueryService::TableSink_OlapInsert >> KqpQueryServiceScripts::ExecuteScriptStatsBasic [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsFull >> KqpService::Shutdown [GOOD] >> KqpService::SessionBusyRetryOperationSync >> KqpQueryService::ExecuteQuery [GOOD] >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback >> KqpQueryService::TableSink_OltpReplace+HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex >> KqpQueryServiceScripts::ExecuteScript [GOOD] >> KqpQueryServiceScripts::ExecuteMultiScript >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel1 [GOOD] >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 >> KqpQueryService::SessionFromPoolError >> KqpQueryServiceScripts::ForgetScriptExecutionOnLongQuery [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecutionRace >> KqpQueryServiceScripts::TestPaging [GOOD] >> KqpQueryServiceScripts::TestFetchMoreThanLimit >> KqpQueryService::TableSink_OltpUpsert [GOOD] >> KqpQueryService::TableSink_OltpUpdate >> KqpQueryService::ExecuteDDLStatusCodeSchemeError [GOOD] >> KqpQueryService::ExecStatsPlan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 16871, MsgBus: 11883 2025-03-28T09:11:53.677069Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785613521707596:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:53.677544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001713/r3tmp/tmpOeaNWb/pdisk_1.dat 2025-03-28T09:11:54.506971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:54.507072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:54.523509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:54.525350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16871, node 1 2025-03-28T09:11:54.735354Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:54.735381Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:54.735390Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:54.735496Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11883 TClient is connected to server localhost:11883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:55.573578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:57.987144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785630701577310:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:57.987273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:57.987765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785630701577322:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:57.991758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:11:58.005433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785630701577324:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:11:58.070803Z node 1 :TX_PROXY ERROR: Actor# [1:7486785634996544671:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:58.468692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:11:58.661195Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785613521707596:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:58.661260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:58.778582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:11:58.778853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:11:58.779114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:11:58.779246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:11:58.779342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:11:58.779462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:11:58.779617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:11:58.779731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:11:58.779832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:11:58.779968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:11:58.780168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:11:58.780300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634996544953:2357];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:11:58.785257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:11:58.785357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:11:58.785550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:11:58.785644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:11:58.785772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:11:58.786001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:11:58.786115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:11:58.786205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:11:58.786310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:11:58.786430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:11:58.786547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:11:58.786663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785634996544930:2352];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:11:58.840033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486785634996544991:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.c ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.654381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.655254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.657978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.659736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.661773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.664851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.665483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.669517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.670488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.673853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.675999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.677983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.682020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.682584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.687386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.687947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.691932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.693882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.696243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.699622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.700215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.704302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.705283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.709502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.711035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.714364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.716761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.718753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.721647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.722412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.725832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.726589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.729314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.731482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.732788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.736354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.736364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.740387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.742175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.744317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.748127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.748436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.752953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.754107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.757306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.869146Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0cztc1snpsd7jwqfcrp60", SessionId: ydb://session/3?node_id=1&id=OGJjM2ViOTItNWFlYjBiNDctMjQ0MDUxNDAtYTkwMGFiNDU=, Slow query, duration: 30.664173s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:13:19.086190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:19.086526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486785965709089154:12293];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:13:19.087248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:19.087797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> KqpQueryService::CTASWithoutPerStatement [GOOD] >> KqpQueryService::CheckIsolationLevelFroPerStatementMode >> KqpQueryService::ExecuteQueryPgTableSelect [GOOD] >> KqpQueryService::ExecuteQueryMultiScalar >> KqpQueryService::StreamExecuteQueryMultiResult [GOOD] >> KqpQueryService::TableSink_BadTransactions >> KqpQueryService::TableSink_HtapComplex+withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapComplex-withOltpSink >> KqpQueryService::CreateAndDropTopic [GOOD] >> KqpQueryService::CreateAndAlterTopic >> KqpDocumentApi::AllowRead [GOOD] >> KqpDocumentApi::RestrictAlter >> KqpQueryService::ExecuteQueryExplicitBeginCommitRollback [GOOD] >> KqpQueryService::ExecuteQueryExplicitTxTLI >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager >> KqpQueryService::DdlTx [GOOD] >> KqpQueryService::DdlWithExplicitTransaction >> KqpQueryService::FlowControllOnHugeRealTable+LongRow [GOOD] >> KqpQueryService::Explain >> KqpQueryServiceScripts::ValidateScript [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows >> KqpQueryService::TableSink_OltpReplace-HasSecondaryIndex [GOOD] >> KqpQueryService::TableSink_OltpOrder >> KqpQueryService::StreamExecuteQuery [GOOD] >> KqpQueryService::StreamExecuteCollectMeta >> KqpQueryService::ExecStatsAst [GOOD] >> KqpQueryService::DmlNoTx >> KqpService::SessionBusyRetryOperationSync [GOOD] >> KqpService::SwitchCache+UseCache >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] >> KqpQueryService::SessionFromPoolError [GOOD] >> KqpQueryService::ReturnAndCloseSameTime >> KqpQueryServiceScripts::CancelScriptExecution [GOOD] >> KqpQueryServiceScripts::EmptyNextFetchToken >> KqpQueryServiceScripts::ExecuteScriptStatsFull [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsNone >> KqpQueryService::TableSink_OlapInsert [GOOD] >> KqpQueryService::TableSink_OlapDelete >> KqpQueryService::ExecStatsPlan [GOOD] >> KqpQueryServiceScripts::ExecuteMultiScript [GOOD] >> KqpQueryServiceScripts::ExecuteScriptPg >> KqpDocumentApi::RestrictAlter [GOOD] >> KqpDocumentApi::RestrictDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 15954, MsgBus: 61991 2025-03-28T09:11:23.910759Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785486415991741:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:23.915075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019d7/r3tmp/tmpgiyipW/pdisk_1.dat 2025-03-28T09:11:25.017099Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:25.018134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:25.018215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:25.019252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:25.076629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:25.290398Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.139232s 2025-03-28T09:11:25.290486Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.139330s TServer::EnableGrpc on GrpcPort 15954, node 1 2025-03-28T09:11:25.891219Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:25.891240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:25.891250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:25.891350Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61991 TClient is connected to server localhost:61991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:27.631213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:27.693809Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:27.709473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.215005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:11:28.473054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:11:28.644511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:28.974491Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785486415991741:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:28.998266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:11:29.976882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785512185797278:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:29.977034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:30.688222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.743305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.783932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.857701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.888326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:11:30.924108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:11:31.055367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785520775732405:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.055516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.055994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785520775732411:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:31.079458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:11:31.098404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785520775732413:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:11:31.181169Z node 1 :TX_PROXY ERROR: Actor# [1:7486785520775732470:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:34.818655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:11:35.499005Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: SIMILARITY_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-03-28T09:11:35.499953Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:11:35.500031Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7486785537955602234:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:11:35.500164Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-03-28T09:11:35.500223Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7486785537955602234:2530], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:11:35.500497Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:11:35.500546Z node 1 :BUILD_I ... 81474976710673 2025-03-28T09:13:04.623569Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486785909519726524:2518], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:04.623668Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715766 2025-03-28T09:13:04.623720Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486785909519726524:2518], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:04.623973Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:13:04.624018Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486785909519726524:2518], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:04.624869Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976710673, cookie: 281474976710673, txId: 281474976715766, status: StatusAccepted 2025-03-28T09:13:04.624989Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486785909519726524:2518], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 16 2025-03-28T09:13:04.625746Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:13:04.625794Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486785909519726524:2518], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:04.628161Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfoId: 281474976710673 2025-03-28T09:13:04.628232Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486785909519726524:2518], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:04.628574Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:13:04.628626Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486785909519726524:2518], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:04.628652Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:13:04.629028Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:13:04.629079Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486785909519726524:2518], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:04.629435Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710673 2025-03-28T09:13:04.629604Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_batch_rows: 50000 max_batch_bytes: 8388608 max_shards_in_flight: 32 max_retries_upload_batch: 50 } Progress: 100 } 2025-03-28T09:13:04.634104Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037926 not found 2025-03-28T09:13:04.634124Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037928 not found 2025-03-28T09:13:04.634142Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2025-03-28T09:13:04.638717Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037927 not found 2025-03-28T09:13:10.768584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:10.768623Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:13.076520Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710673 >> KqpQueryServiceScripts::ForgetScriptExecutionRace [GOOD] >> KqpQueryServiceScripts::InvalidFetchToken >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] >> KqpQueryService::TableSink_BadTransactions [GOOD] >> KqpQueryService::ExecuteQueryExplicitTxTLI [GOOD] >> KqpQueryServiceScripts::TestFetchMoreThanLimit [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression >> KqpQueryService::DdlWithExplicitTransaction [GOOD] >> KqpQueryService::Ddl_Dml ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecStatsPlan [GOOD] Test command err: Trying to start YDB, gRPC: 13319, MsgBus: 31222 2025-03-28T09:13:23.296845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786002617676563:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.297332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00153b/r3tmp/tmpeBg4OY/pdisk_1.dat 2025-03-28T09:13:24.362958Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.365059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.368870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.368966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.399009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13319, node 1 2025-03-28T09:13:24.673024Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.673053Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.673076Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.673197Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31222 TClient is connected to server localhost:31222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:26.334176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.372910Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:26.386010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.650923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.843679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.939813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.292166Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786002617676563:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.292257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.969841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786024092514683:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.970017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.150439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.200470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.263247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.319267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.369357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.423038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.494489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786032682449799:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.494564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.495059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786032682449804:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.498583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.515315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786032682449806:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.597729Z node 1 :TX_PROXY ERROR: Actor# [1:7486786032682449862:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23142, MsgBus: 17185 2025-03-28T09:13:33.615238Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786045713142800:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:33.619190Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00153b/r3tmp/tmpX3GVGv/pdisk_1.dat 2025-03-28T09:13:34.025181Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.075203Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.075309Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.083984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23142, node 2 2025-03-28T09:13:34.335335Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:34.335356Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:34.335363Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:34.335478Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17185 TClient is connected to server localhost:17185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:34.996955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.002600Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:37.664214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786062893012468:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:37.664328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:37.664630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786062893012504:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:37.670257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:13:37.684970Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:13:37.686543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786062893012506:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:13:37.743263Z node 2 :TX_PROXY ERROR: Actor# [2:7486786062893012557:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:37.775303Z node 2 :TX_PROXY ERROR: Actor# [2:7486786062893012588:2349] txid# 281474976710660, issues: { message: "Type \'TzTimestamp\' specified for column \'payload\' is not supported by storage" severity: 1 } 2025-03-28T09:13:37.786742Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzgxYTYxMWYtNTQwOWNjN2UtMzU2MzM4ZTMtZmU0ODVlYjE=, ActorId: [2:7486786062893012466:2329], ActorState: ExecuteState, TraceId: 01jqe0edhff7vsqkhkbm2e4am8, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 20229, MsgBus: 9833 2025-03-28T09:13:38.904376Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786065039417069:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:38.905219Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00153b/r3tmp/tmpAfNOlM/pdisk_1.dat 2025-03-28T09:13:39.135635Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:39.156174Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:39.159226Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:39.169619Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20229, node 3 2025-03-28T09:13:39.347250Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:39.347291Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:39.347300Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:39.347430Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9833 TClient is connected to server localhost:9833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:40.091962Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:40.103341Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:40.117910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:40.207489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:13:40.468464Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:40.597740Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.690542Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786086514255169:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:43.690617Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:43.746973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:43.805935Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:43.826979Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786065039417069:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:43.827058Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:43.848408Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:43.914343Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:43.959354Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.019273Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.100684Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786090809222979:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.100792Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.101298Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786090809222984:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.104997Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:44.120236Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786090809222986:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:44.214752Z node 3 :TX_PROXY ERROR: Actor# [3:7486786090809223043:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryService::TableSink_OltpUpdate [GOOD] >> KqpQueryService::TableSink_Oltp_Replace+UseSink >> KqpQueryService::ExecuteQueryWithResourcePoolClassifier [GOOD] >> KqpQueryService::ExecuteRetryQuery >> KqpQueryService::Explain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiScalar [GOOD] Test command err: Trying to start YDB, gRPC: 23720, MsgBus: 17329 2025-03-28T09:13:23.299946Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785999503036566:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.300291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001562/r3tmp/tmp8lGgX0/pdisk_1.dat 2025-03-28T09:13:24.398470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.414203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.439046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.439154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.445683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23720, node 1 2025-03-28T09:13:24.691247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.691271Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.691278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.691391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17329 TClient is connected to server localhost:17329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.561591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.606445Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:25.633636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.837374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.378404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.522718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.271458Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785999503036566:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.298622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.653212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786020977874667:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.653320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.150340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.213036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.270808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.319898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.388444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.457864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.530432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786029567809782:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.530561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.530946Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786029567809788:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.534750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.546066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786029567809790:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:30.647234Z node 1 :TX_PROXY ERROR: Actor# [1:7486786029567809849:3460] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 31800, MsgBus: 20567 2025-03-28T09:13:34.576734Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786050127110872:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:34.576824Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001562/r3tmp/tmpabJsBL/pdisk_1.dat 2025-03-28T09:13:34.700474Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.727160Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.727262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.728356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31800, node 2 2025-03-28T09:13:34.899326Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:34.899352Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:34.899360Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:34.899468Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20567 TClient is connected to server localhost:20567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:35.408947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.428719Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:38.884189Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786067306980720:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.884298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.912951Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.031053Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786071601948123:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.031155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.031580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786071601948128:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.036274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:13:39.052693Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-03-28T09:13:39.053721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786071601948130:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:13:39.134190Z node 2 :TX_PROXY ERROR: Actor# [2:7486786071601948181:2398] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:39.577078Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786050127110872:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:39.577149Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15631, MsgBus: 4859 2025-03-28T09:13:40.489705Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786073921794839:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:40.489764Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001562/r3tmp/tmpvvLYRq/pdisk_1.dat 2025-03-28T09:13:40.637146Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:40.663355Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:40.663440Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:40.671755Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15631, node 3 2025-03-28T09:13:40.727156Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:40.727180Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:40.727188Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:40.727300Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4859 TClient is connected to server localhost:4859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:41.195171Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:41.204746Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:41.212312Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:41.291492Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:41.470550Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:41.553929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:13:43.978499Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786086806698510:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:43.978590Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.044814Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.104663Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.142853Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.188739Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.232814Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.308414Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.361941Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786091101666324:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.362054Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.362217Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786091101666329:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.367399Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:44.377302Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786091101666331:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:44.431340Z node 3 :TX_PROXY ERROR: Actor# [3:7486786091101666383:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:45.490894Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786073921794839:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:45.490978Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::CheckIsolationLevelFroPerStatementMode [GOOD] >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid >> KqpQueryService::DmlNoTx [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_BadTransactions [GOOD] Test command err: Trying to start YDB, gRPC: 21265, MsgBus: 4898 2025-03-28T09:13:23.450305Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786001480210152:2075];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00154d/r3tmp/tmpibh3Ng/pdisk_1.dat 2025-03-28T09:13:23.787671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:13:24.353225Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.372458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.372585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.393194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21265, node 1 2025-03-28T09:13:24.586475Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.586492Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.586498Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.586610Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4898 TClient is connected to server localhost:4898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:26.021436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.089308Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:26.118584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.361136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.676532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.821341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.446796Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786001480210152:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.446882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:29.003943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786022955048384:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:29.004065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.150284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.199449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.240074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.284578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.333707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.382187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.450850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031544983494:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.451040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.451686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031544983499:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.455987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.472755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786031544983502:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.532866Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031544983556:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13366, MsgBus: 26009 2025-03-28T09:13:33.355761Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786043556905097:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:33.426754Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00154d/r3tmp/tmpUGqctw/pdisk_1.dat 2025-03-28T09:13:33.557493Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:33.560768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:33.560855Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:33.567105Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13366, node 2 2025-03-28T09:13:33.723809Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:33.723837Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:33.723846Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:33.723990Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26009 TClient is connected to server localhost:26009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:34.302131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:34.318327Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:34.329894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:34.497405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:34.683247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... ... ched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:13:44.774298Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:13:44.774328Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:13:44.774415Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:13:44.774449Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:13:44.836212Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:44.840396Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:44.846296Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:44.846768Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:44.852514Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:44.852516Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:44.858402Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:44.858560Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:44.866228Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:44.866327Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:44.875821Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.956636Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786090921047724:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.956722Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.956943Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786090921047729:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.961276Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:13:44.974897Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786090921047731:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-28T09:13:45.033688Z node 3 :TX_PROXY ERROR: Actor# [3:7486786095216015078:2683] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:45.186102Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:13:45.186148Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:13:45.186565Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:13:45.186620Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7486786090921047214:2338];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893,72075186224037897;receive=72075186224037894; 2025-03-28T09:13:45.186771Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7486786090921047214:2338];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=15;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893;receive=72075186224037894; 2025-03-28T09:13:45.186847Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7486786090921047214:2338];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=16;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893;receive=72075186224037897; 2025-03-28T09:13:45.186912Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7486786090921047214:2338];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=17;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893;receive=72075186224037897; 2025-03-28T09:13:45.187383Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715663;tx_id=281474976715663;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715663; 2025-03-28T09:13:45.484956Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786073741177237:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:45.500230Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:45.639131Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWJjNWVmZGQtMmY0YzM4NGEtODc5YTEwY2YtYjQ1ODdlYw==, ActorId: [3:7486786095216015215:2497], ActorState: ExecuteState, TraceId: 01jqe0eqkm4r3xf61npt0495hw, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-03-28T09:13:45.767285Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Zjk1MWU2ZjctZjViNWFmNTMtZmZjM2JmZTEtNjNhM2I5Yjc=, ActorId: [3:7486786095216015237:2504], ActorState: ExecuteState, TraceId: 01jqe0eqxm8bvy0tazb60hsdbe, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-03-28T09:13:45.983092Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGI0ZjJiZjQtZGM5MjM2NjctNjc4OTJkM2EtYTIxOGY2NDk=, ActorId: [3:7486786095216015259:2512], ActorState: ExecuteState, TraceId: 01jqe0er1kaw135n0w2xg4tcey, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-03-28T09:13:46.302903Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NzU1OTYxNDAtNmVhZmZmNGQtZWI2YzFiZTYtYWRjNDhlMmU=, ActorId: [3:7486786095216015281:2521], ActorState: ExecuteState, TraceId: 01jqe0er8g68kt41dfcb0fwgf9, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-03-28T09:13:46.494540Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NWE4ZDQ0MjktNGQzNGQ2ZDgtN2ZjNzU3NGUtODAxYjNhMGM=, ActorId: [3:7486786099510982596:2529], ActorState: ExecuteState, TraceId: 01jqe0erjh8ankwpzq2k5enpd9, Create QueryResponse for error on request, msg: Write transactions between column and row tables are disabled at current time. 2025-03-28T09:13:46.636954Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; 2025-03-28T09:13:46.637414Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7486786090921047214:2338];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=31;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893,72075186224037894;receive=72075186224037897; 2025-03-28T09:13:46.637456Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7486786090921047214:2338];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=32;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893,72075186224037894;receive=72075186224037897; 2025-03-28T09:13:46.641109Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; 2025-03-28T09:13:46.641417Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; 2025-03-28T09:13:46.642142Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7486786090921047214:2338];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=34;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893;receive=72075186224037894; 2025-03-28T09:13:46.642240Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7486786090921047214:2338];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037889;local_tx_no=35;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037893;receive=72075186224037894; 2025-03-28T09:13:46.642756Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715671;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; >> KqpQueryService::StreamExecuteCollectMeta [GOOD] >> KqpQueryService::ShowCreateTableNotSuccess >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] >> KqpQueryService::CreateAndAlterTopic [GOOD] >> KqpQueryService::CreateOrDropTopicOverTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryExplicitTxTLI [GOOD] Test command err: Trying to start YDB, gRPC: 13651, MsgBus: 3020 2025-03-28T09:13:29.003028Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786023421752985:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:29.003339Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001524/r3tmp/tmpdm4vf6/pdisk_1.dat 2025-03-28T09:13:29.432102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:29.432222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:29.440199Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:29.458167Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13651, node 1 2025-03-28T09:13:29.549259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:29.549285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:29.549291Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:29.549396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3020 TClient is connected to server localhost:3020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:30.132019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:30.156428Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:30.171431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:30.382368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:30.644390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:30.723405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:32.616169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786040601623775:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:32.616274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:32.916076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.955658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.995167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:33.024977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:33.060497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:33.112724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:33.170007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786044896591583:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:33.170117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:33.170187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786044896591588:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:33.174432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:33.185741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786044896591590:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:33.262162Z node 1 :TX_PROXY ERROR: Actor# [1:7486786044896591643:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:33.998812Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786023421752985:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:33.998901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11163, MsgBus: 28273 2025-03-28T09:13:35.673156Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786053506532469:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:35.673226Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001524/r3tmp/tmpjpxLui/pdisk_1.dat 2025-03-28T09:13:35.911359Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:35.913108Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:35.913170Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:35.915100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11163, node 2 2025-03-28T09:13:36.011230Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:36.011252Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:36.011259Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:36.011364Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28273 TClient is connected to server localhost:28273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:36.580790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.591848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.671443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.818308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.889868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... .317300Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.349646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.378352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.415547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.451579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.499326Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786070686403924:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.499425Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.499430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786070686403929:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.502760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:39.516980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786070686403931:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:39.584463Z node 2 :TX_PROXY ERROR: Actor# [2:7486786070686403985:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:40.632833Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjE0NDAyYjQtYjdiYTAzMmItMWEyZTMxNS02MWJjMGQ1OQ==, ActorId: [2:7486786074981371541:2488], ActorState: ReadyState, TraceId: 01jqe0ejyec00gq2g0e4ps98p3, Create QueryResponse for error on request, msg: 2025-03-28T09:13:40.673933Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786053506532469:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:40.673987Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8190, MsgBus: 29568 2025-03-28T09:13:41.405196Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786077692450998:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:41.405607Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001524/r3tmp/tmpUiTpQ8/pdisk_1.dat 2025-03-28T09:13:41.558989Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:41.577561Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:41.577636Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:41.579334Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8190, node 3 2025-03-28T09:13:41.669466Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:41.669489Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:41.669501Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:41.669625Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29568 TClient is connected to server localhost:29568 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:42.240806Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.264169Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:42.281095Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.385141Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.581939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.668811Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:44.943729Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786090577354641:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.943820Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:45.013706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.096557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.181259Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.224963Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.270480Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.343177Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.418181Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786094872322461:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:45.418284Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:45.418469Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786094872322466:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:45.422805Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:45.457900Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786094872322468:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:45.515119Z node 3 :TX_PROXY ERROR: Actor# [3:7486786094872322523:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:46.405259Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786077692450998:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:46.405339Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:47.087243Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2UxNDZmNTAtNTNkNTYyZTUtYzRmMGIwY2ItYTE5MGZlNTY=, ActorId: [3:7486786099167290081:2489], ActorState: ExecuteState, TraceId: 01jqe0esa5bbhhm6prkyy7ga8x, Create QueryResponse for error on request, msg: >> KqpQueryService::SessionFromPoolSuccess |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Explain [GOOD] Test command err: Trying to start YDB, gRPC: 22204, MsgBus: 9469 2025-03-28T09:13:23.247118Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786002601044576:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.247183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001550/r3tmp/tmpziOAc0/pdisk_1.dat 2025-03-28T09:13:24.273242Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.321462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.321555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.323418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:24.323813Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 22204, node 1 2025-03-28T09:13:24.468317Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.468341Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.468347Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.468464Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9469 TClient is connected to server localhost:9469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.561263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.612204Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:25.635050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.852555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.519837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.639701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.247984Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786002601044576:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.248270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.479040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786024075882783:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.479170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.149843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.210080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.263126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.315233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.382769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.466176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.565675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786032665817910:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.565816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.566042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786032665817915:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.570310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.586905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786032665817917:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.671198Z node 1 :TX_PROXY ERROR: Actor# [1:7486786032665817972:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18835, MsgBus: 15560 2025-03-28T09:13:34.571363Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786046708609602:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:34.572615Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001550/r3tmp/tmpz6aggM/pdisk_1.dat 2025-03-28T09:13:34.799399Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.819963Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.820047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.831557Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18835, node 2 2025-03-28T09:13:35.005759Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:35.005796Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:35.005806Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:35.005927Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15560 TClient is connected to server localhost:15560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:35.536019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.549553Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:35.560188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.637997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09 ... tus: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.138354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.177865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.214007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.250620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.323828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.367623Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.452482Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786063888480989:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.452560Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.453014Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786063888480994:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.456705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:38.472020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786063888480996:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:38.535960Z node 2 :TX_PROXY ERROR: Actor# [2:7486786063888481051:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:39.560332Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786046708609602:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:39.560404Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:39.998222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 10 Trying to start YDB, gRPC: 32366, MsgBus: 25561 2025-03-28T09:13:41.547324Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786078484822711:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:41.548160Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001550/r3tmp/tmpWe7H2d/pdisk_1.dat 2025-03-28T09:13:41.674568Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:41.685436Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:41.685514Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:41.687641Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32366, node 3 2025-03-28T09:13:41.787706Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:41.787727Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:41.787735Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:41.787840Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25561 TClient is connected to server localhost:25561 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:42.298098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.310906Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:42.337709Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.426634Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.611001Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.746666Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:45.749431Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786095664693627:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:45.749562Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:45.820557Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.865223Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.905988Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.939427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.973342Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.062125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.150478Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786099959661440:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.150593Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.150969Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786099959661445:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.154839Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:46.167188Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786099959661447:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:46.260044Z node 3 :TX_PROXY ERROR: Actor# [3:7486786099959661502:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:46.548735Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786078484822711:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:46.548855Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots >> KqpQueryService::TableSink_OlapUpdate >> KqpQueryService::FlowControllOnHugeRealTable-LongRow [GOOD] >> KqpQueryService::ForbidInteractiveTxOnImplicitSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143153740.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143153740.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143153740.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153740.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143153740.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143153740.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123152540.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123153740.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123153740.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123152540.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123152540.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123152540.000000s;Name=;Codec=}; 2025-03-28T09:12:21.151523Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:12:21.282228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:12:21.315705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:12:21.323520Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:12:21.336361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:21.336587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:12:21.336817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:12:21.336937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:12:21.337048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:12:21.337158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:12:21.337262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:12:21.337400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:12:21.337684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:12:21.337814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:12:21.337939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:12:21.338071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:12:21.366854Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:12:21.367173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:12:21.367234Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:12:21.367400Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:12:21.367572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:12:21.367678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:12:21.367717Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:12:21.367808Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:12:21.367864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:12:21.367909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:12:21.367938Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:12:21.368088Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:12:21.368149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:12:21.368186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:12:21.368214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:12:21.368292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:12:21.368340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:12:21.368380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:12:21.368419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:12:21.368485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:12:21.368519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:12:21.368549Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:12:21.368595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:12:21.368637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:12:21.368668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:12:21.369050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-03-28T09:12:21.369130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-03-28T09:12:21.369201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-03-28T09:12:21.369308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-03-28T09:12:21.369480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:12:21.369535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:12:21.369569Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:12:21.369783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:12:21.369828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:12:21.369859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:12:21.370014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:12:21.370052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... ta.cpp:29;EXECUTE:finishLoadingTime=332; 2025-03-28T09:13:48.744574Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=53952; 2025-03-28T09:13:48.755989Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=11332; 2025-03-28T09:13:48.763249Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6292; 2025-03-28T09:13:48.763355Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7264; 2025-03-28T09:13:48.763527Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=106; 2025-03-28T09:13:48.763644Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=69; 2025-03-28T09:13:48.763771Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=82; 2025-03-28T09:13:48.763874Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=58; 2025-03-28T09:13:48.774483Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10538; 2025-03-28T09:13:48.788981Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=14384; 2025-03-28T09:13:48.789117Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=45; 2025-03-28T09:13:48.789199Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=28; 2025-03-28T09:13:48.789258Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-03-28T09:13:48.789307Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-03-28T09:13:48.789355Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-28T09:13:48.789438Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-03-28T09:13:48.789484Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2025-03-28T09:13:48.789573Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2025-03-28T09:13:48.789616Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-03-28T09:13:48.789687Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2025-03-28T09:13:48.789810Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=71; 2025-03-28T09:13:48.790173Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=318; 2025-03-28T09:13:48.790216Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=107351; 2025-03-28T09:13:48.790354Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=31203592;raw_bytes=48253350;count=18;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:13:48.790456Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:13:48.790512Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:13:48.790568Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:13:48.808708Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:13:48.808859Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:13:48.808924Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:13:48.808995Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-28T09:13:48.809055Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:13:48.809106Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:13:48.809150Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:13:48.809187Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:13:48.809278Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:13:48.809775Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:13:48.809855Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2212:4085];tablet_id=9437184;parent=[1:2172:4052];fline=manager.cpp:82;event=ask_data;request=request_id=128;1={portions_count=18};; 2025-03-28T09:13:48.810905Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:13:48.811332Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:13:48.811364Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:13:48.811386Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:13:48.811423Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:13:48.811480Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:13:48.811530Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-28T09:13:48.811586Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:13:48.811626Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:13:48.811668Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:13:48.811704Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:13:48.811786Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:13:48.819555Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=18;path_id=1; 2025-03-28T09:13:48.820596Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DmlNoTx [GOOD] Test command err: Trying to start YDB, gRPC: 64478, MsgBus: 5956 2025-03-28T09:13:23.279672Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785999255958011:2168];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.279733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001525/r3tmp/tmpmYX0dy/pdisk_1.dat 2025-03-28T09:13:24.353656Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.386816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.396377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.396504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.403849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64478, node 1 2025-03-28T09:13:24.651305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.651335Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.651343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.651461Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5956 TClient is connected to server localhost:5956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.617980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.663139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.881241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.562620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.658208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.280356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785999255958011:2168];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.280439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.648890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786020730796172:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.649002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.149843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.200765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.236651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.282869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.333110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.444250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.533747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786029320731290:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.533821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.534183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786029320731295:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.537711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.550303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786029320731297:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.614674Z node 1 :TX_PROXY ERROR: Actor# [1:7486786029320731353:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 23518, MsgBus: 8565 2025-03-28T09:13:34.724179Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786049260472819:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:34.724239Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001525/r3tmp/tmpHJ5DER/pdisk_1.dat 2025-03-28T09:13:34.922626Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.937285Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.937398Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.939210Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23518, node 2 2025-03-28T09:13:35.068480Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:35.068504Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:35.068518Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:35.068645Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8565 TClient is connected to server localhost:8565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:36.130268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.143777Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:36.153473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.241657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.451105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 2814 ... ose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.554864Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786070735311595:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.555000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.562408Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786070735311600:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.571545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:39.586350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786070735311602:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:39.661942Z node 2 :TX_PROXY ERROR: Actor# [2:7486786070735311657:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:39.724421Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786049260472819:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:39.724498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:40.920635Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486786075030279232:2497], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem
:1:8: Error: At function: Member
:1:8: Error: Member not found: test_ast_column 2025-03-28T09:13:40.922363Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzRiY2E5MmQtNWMyODc2NzUtZjVhNzE4YzYtYWJiNzg2OTA=, ActorId: [2:7486786075030279230:2496], ActorState: ExecuteState, TraceId: 01jqe0ek626kksjr03z2pw20ey, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:13:41.223580Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486786079325246585:2502], TxId: 281474976710672, task: 1. Ctx: { TraceId : 01jqe0eka8fj2ckr8v5715d37z. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NjMxZTQ1ZDUtYWQ0NTY1YjktYjkwY2EyYzgtOWEyMzgxYWU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(43):
:1:8: Failed to unwrap empty optional }. 2025-03-28T09:13:41.229819Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjMxZTQ1ZDUtYWQ0NTY1YjktYjkwY2EyYzgtOWEyMzgxYWU=, ActorId: [2:7486786075030279262:2502], ActorState: ExecuteState, TraceId: 01jqe0eka8fj2ckr8v5715d37z, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 26387, MsgBus: 5352 2025-03-28T09:13:42.467202Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786084040641892:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:42.467246Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001525/r3tmp/tmp0jiTfD/pdisk_1.dat 2025-03-28T09:13:42.641497Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:42.658851Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:42.658935Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:42.660476Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26387, node 3 2025-03-28T09:13:42.721501Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:42.721523Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:42.721531Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:42.721644Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5352 TClient is connected to server localhost:5352 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:43.245578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.250291Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:43.268711Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.368703Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.542586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.639155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:46.270008Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786101220512821:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.270101Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.328756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.382931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.420828Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.460820Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.508751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.581751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.643842Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786101220513337:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.643940Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.644285Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786101220513342:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.648222Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:46.664994Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786101220513344:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:46.754253Z node 3 :TX_PROXY ERROR: Actor# [3:7486786101220513399:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:47.468082Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786084040641892:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:47.468173Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpDocumentApi::RestrictWriteExplicitPrepare >> KqpQueryService::PeriodicTaskInSessionPool [GOOD] >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle >> KqpQueryServiceScripts::ExecuteScriptWithWorkloadManager [GOOD] >> KqpQueryServiceScripts::ExplainScript >> KqpQueryServiceScripts::ListScriptExecutions [GOOD] >> KqpQueryServiceScripts::Tcl >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithParameters >> KqpQueryService::ExecuteQueryUpsertDoesntChangeIndexedValuesIfNotChanged [GOOD] >> KqpQueryService::ExecuteQueryPure >> KqpQueryService::TableSink_HtapComplex-withOltpSink [GOOD] >> KqpQueryService::TableSink_HtapInteractive+withOltpSink >> KqpService::CloseSessionsWithLoad >> KqpQueryService::ReadManyRanges >> KqpQueryServiceScripts::ExecuteScriptStatsProfile >> KqpDocumentApi::RestrictDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineDistanceNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 12680, MsgBus: 63016 2025-03-28T09:12:17.945298Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785718660056636:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:17.945348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001930/r3tmp/tmpWSPt2q/pdisk_1.dat 2025-03-28T09:12:18.524323Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:18.528636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:18.528721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:18.532609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12680, node 1 2025-03-28T09:12:18.683147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:18.683169Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:18.683175Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:18.683268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63016 TClient is connected to server localhost:63016 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:19.645140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.669929Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:19.678416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.804441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.984978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:20.057575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:22.576967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785740134894828:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.577075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.912078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:22.944536Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785718660056636:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:22.944598Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:22.978282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.017801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.089927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.140151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.231158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.408495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785744429862653:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:23.408573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:23.408856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785744429862658:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:23.413280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:23.426776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785744429862660:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:23.508822Z node 1 :TX_PROXY ERROR: Actor# [1:7486785744429862716:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:25.466478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:25.837630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:12:25.934778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:12:33.527988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:33.528036Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 16302, MsgBus: 27658 2025-03-28T09:13:02.252723Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486785909270867679:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:02.252772Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001930/r3tmp/tmpowqDGj/pdisk_1.dat 2025-03-28T09:13:02.398230Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:02.415513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:02.415609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:02.417133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16302, node 2 2025-03-28T09:13:02.495341Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:02.495368Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:02.495377Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:02.495520Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27658 TClient is connected to server localhost:27658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:03.013448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:03.023107Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:03.036723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:03.126556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:03.425675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:03.539923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:06.437888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785926450738619:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:06.437958Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:06.495458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:06.569955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:06.642376Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:06.682584Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:06.752256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:06.824126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:06.889953Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785926450739143:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:06.890077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:06.890355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785926450739148:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:06.895002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:06.989885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785926450739150:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:07.078732Z node 2 :TX_PROXY ERROR: Actor# [2:7486785930745706506:3461] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:07.258802Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785909270867679:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:07.269364Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:08.235183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:08.590370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:13:08.654083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:13:08.701334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-03-28T09:13:08.753789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710763:0, at schemeshard: 72057594046644480 2025-03-28T09:13:08.818118Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-28T09:13:08.818485Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-28T09:13:08.818505Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-28T09:13:17.378832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:17.378866Z node 2 :IMPORT WARN: Table profiles were not loaded >> KqpQueryService::ExecuteQueryInteractiveTx |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |82.9%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithTimeout [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpDocumentApi::RestrictDrop [GOOD] Test command err: Trying to start YDB, gRPC: 27477, MsgBus: 7722 2025-03-28T09:13:23.168324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786001630911640:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.168374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00152c/r3tmp/tmpVgKvhQ/pdisk_1.dat 2025-03-28T09:13:24.309835Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.322626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.334858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.336088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.339690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27477, node 1 2025-03-28T09:13:24.469171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.469204Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.469213Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.469320Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7722 TClient is connected to server localhost:7722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.618109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.657373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.846118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.385246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.487352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.171151Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786001630911640:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.171231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.578580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786023105749777:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.578722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.149983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.218902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.259077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.295532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.332956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.416911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.483112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031695684896:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.483201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.483448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031695684901:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.487739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.501606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786031695684903:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.573965Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031695684958:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.238646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.384832Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786040285619928:2509], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-03-28T09:13:32.386498Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTM4ZTI1Y2UtODIwZmE4YmQtZGYzM2JhYzgtMzY4MDhkNTk=, ActorId: [1:7486786040285619837:2498], ActorState: ExecuteState, TraceId: 01jqe0eaxr0ezs2z0fetm91s6q, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 26084, MsgBus: 15677 2025-03-28T09:13:34.563577Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786047710660599:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:34.563614Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00152c/r3tmp/tmp66KcNy/pdisk_1.dat 2025-03-28T09:13:34.848361Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.879582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.879658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.883834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26084, node 2 2025-03-28T09:13:35.043245Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:35.043276Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:35.043285Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:35.043382Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15677 TClient is connected to server localhost:15677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Ver ... t proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.836329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.931743Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.982302Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786089524651987:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.982372Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.982661Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786089524651992:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.986973Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:44.996191Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786089524651994:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:45.085532Z node 3 :TX_PROXY ERROR: Actor# [3:7486786093819619344:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:46.075172Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786076639747848:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:46.075229Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:46.235378Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:61: Error: At function: KiAlterTable!
:2:61: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-03-28T09:13:46.359313Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4679, MsgBus: 21335 2025-03-28T09:13:47.305295Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486786103889400273:2145];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:47.335012Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00152c/r3tmp/tmplcEgC7/pdisk_1.dat 2025-03-28T09:13:47.541395Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:47.568258Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:47.568360Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:47.571008Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4679, node 4 2025-03-28T09:13:47.662994Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:47.663017Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:47.663025Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:47.663131Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21335 TClient is connected to server localhost:21335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:48.219554Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.228574Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:48.236177Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.339038Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.529398Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.614303Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:51.132738Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786121069271136:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:51.132825Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:51.202193Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.250874Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.308879Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.377074Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.432670Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.496620Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.667955Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786121069271660:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:51.668045Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:51.668417Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786121069271665:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:51.674508Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:51.699807Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486786121069271667:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:51.762622Z node 4 :TX_PROXY ERROR: Actor# [4:7486786121069271722:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:52.307102Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486786103889400273:2145];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:52.307196Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:52.916664Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480
: Error: Type annotation, code: 1030
:2:24: Error: At function: KiDropTable!
:2:24: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 >> KqpService::SessionBusy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptStatsNone [GOOD] Test command err: Trying to start YDB, gRPC: 65519, MsgBus: 12541 2025-03-28T09:13:23.204601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786000411972817:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.205168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001529/r3tmp/tmpEd1ubV/pdisk_1.dat 2025-03-28T09:13:24.324177Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.360068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.360165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.360966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.370138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65519, node 1 2025-03-28T09:13:24.758205Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.758240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.758270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.758415Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12541 TClient is connected to server localhost:12541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:26.268923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.308141Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:26.325438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.625157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.959798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.052542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.202801Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786000411972817:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.202880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:29.077882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786026181778258:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:29.077997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.150739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.188307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.236641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.274019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.332877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.403966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.524090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786030476746081:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.524164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.525110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786030476746086:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.528699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.539058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786030476746088:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.615057Z node 1 :TX_PROXY ERROR: Actor# [1:7486786030476746144:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.162305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.163902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.164971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6818, MsgBus: 9502 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001529/r3tmp/tmpztT2dv/pdisk_1.dat 2025-03-28T09:13:35.747318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:35.936379Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:36.048794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:36.048878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:36.050310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6818, node 2 2025-03-28T09:13:36.231240Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:36.231268Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:36.231278Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:36.231397Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9502 TClient is connected to server localhost:9502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:36.869003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.875393Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:1 ... , opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.972687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:40.009977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:40.068037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:40.135015Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786072810884843:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:40.135141Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:40.135375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786072810884849:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:40.138497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:40.149409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786072810884851:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:40.239783Z node 2 :TX_PROXY ERROR: Actor# [2:7486786072810884907:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:41.325461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:41.327064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:41.328153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11367, MsgBus: 25578 2025-03-28T09:13:45.062623Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786096765371750:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:45.064250Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001529/r3tmp/tmpGSu6Ch/pdisk_1.dat 2025-03-28T09:13:45.266988Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:45.275810Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:45.275930Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:45.301833Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11367, node 3 2025-03-28T09:13:45.360213Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:45.360239Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:45.360250Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:45.360416Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25578 TClient is connected to server localhost:25578 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:13:45.855527Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.883375Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:46.029441Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:46.341287Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:46.444121Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:49.055774Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786113945242691:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.055858Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.133718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.199880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.279611Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.329183Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.381715Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.478910Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.606295Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786113945243211:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.606430Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.606744Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786113945243216:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.610974Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:49.628586Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:13:49.628856Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786113945243218:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:49.728033Z node 3 :TX_PROXY ERROR: Actor# [3:7486786113945243277:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:50.054934Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786096765371750:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:50.054993Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:51.304989Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.314920Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.316817Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] >> KqpQueryService::ExecuteRetryQuery [GOOD] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14934, MsgBus: 8315 2025-03-28T09:13:23.259582Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786001429379797:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.259649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001540/r3tmp/tmpQ7Pq5g/pdisk_1.dat 2025-03-28T09:13:24.286069Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.297046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.297198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.316800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:24.324969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 14934, node 1 2025-03-28T09:13:24.715171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.715196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.715203Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.715308Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8315 TClient is connected to server localhost:8315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.689418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.734062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:28.260248Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786001429379797:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.260321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.475028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786022904216862:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.475247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.151059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.336882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031494151569:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.336973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.337310Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031494151576:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.341170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.372395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786031494151578:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:13:30.468694Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031494151632:2412] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.640638Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786040084086333:2387], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2025-03-28T09:13:32.642246Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2IwNjdkMmEtNzFiNDg3MmMtNzE0MDJiYTUtYjU0M2ZlYWE=, ActorId: [1:7486786040084086331:2386], ActorState: ExecuteState, TraceId: 01jqe0eb5r2n784bpykb43ptw3, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 6155, MsgBus: 9313 2025-03-28T09:13:38.479706Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786065758464858:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:38.480308Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001540/r3tmp/tmpQ3Dlkq/pdisk_1.dat 2025-03-28T09:13:38.598160Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:38.633762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:38.633840Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6155, node 2 2025-03-28T09:13:38.638199Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:38.715195Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:38.715217Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:38.715224Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:38.715331Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9313 TClient is connected to server localhost:9313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:39.198159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:39.206045Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:41.647949Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786078643367392:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.648031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.659104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:41.724681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786078643367494:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.724758Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.724999Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786078643367499:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.729141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:13:41.746884Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786078643367501:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:13:41.815828Z node 2 :TX_PROXY ERROR: Actor# [2:7486786078643367552:2393] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } WAIT_INDEXATION: 0 2025-03-28T09:13:43.482233Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786065758464858:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:43.482319Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 5544, MsgBus: 4144 2025-03-28T09:13:48.626287Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786107201272650:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:48.626317Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001540/r3tmp/tmpsdW3zM/pdisk_1.dat 2025-03-28T09:13:48.901681Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:48.924745Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:48.924826Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:48.926147Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5544, node 3 2025-03-28T09:13:49.021690Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:49.021712Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:49.021719Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:49.021817Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4144 TClient is connected to server localhost:4144 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:49.572847Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:49.593737Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:52.559115Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786124381142496:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.559238Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.625075Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:53.060785Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:13:53.611324Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786128676111214:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:53.611483Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:53.611833Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786128676111220:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:53.623980Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:13:53.630820Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786107201272650:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:53.631748Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:53.665733Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-28T09:13:53.666891Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786128676111222:2447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:13:53.716788Z node 3 :TX_PROXY ERROR: Actor# [3:7486786128676111285:3265] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryService::ShowCreateTableNotSuccess [GOOD] >> KqpQueryService::TempTablesDrop >> KqpService::SwitchCache-UseCache [GOOD] >> KqpService::ToDictCache+UseCache >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] >> KqpQueryService::ExecuteQueryPure [GOOD] >> KqpQueryService::ExecuteQueryScalar >> KqpQueryService::SessionFromPoolSuccess [GOOD] >> KqpQueryService::SeveralCTAS+UseSink >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteRetryQuery [GOOD] Test command err: Trying to start YDB, gRPC: 21867, MsgBus: 26885 2025-03-28T09:13:23.279278Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786002812127305:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.279340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00152b/r3tmp/tmpWNDXoF/pdisk_1.dat 2025-03-28T09:13:24.170382Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.228614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.228708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.233369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21867, node 1 2025-03-28T09:13:24.467966Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.467992Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.467998Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.468112Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26885 TClient is connected to server localhost:26885 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.630117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.641679Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:25.657414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.303511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.532762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.622471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.280199Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786002812127305:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.280260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.371541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786024286965549:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.371676Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.149839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.199387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.227982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.268161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.358438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.466375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.576842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786032876900684:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.576927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.577270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786032876900689:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.581190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.602328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786032876900691:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.662390Z node 1 :TX_PROXY ERROR: Actor# [1:7486786032876900745:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.705902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786041466835717:2514], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-28T09:13:32.705985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786041466835719:2516], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-28T09:13:32.706065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-28T09:13:32.706145Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486786041466835718:2515], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=YWQ5NDc0NmYtYzNjOGZiNGItMjIzZDhjYjQtY2Y2NWJlYmM=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-28T09:13:32.706245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486786041466835718:2515], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=YWQ5NDc0NmYtYzNjOGZiNGItMjIzZDhjYjQtY2Y2NWJlYmM=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-03-28T09:13:32.706340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7486786041466835715:2513]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-03-28T09:13:32.706432Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWQ5NDc0NmYtYzNjOGZiNGItMjIzZDhjYjQtY2Y2NWJlYmM=, ActorId: [1:7486786041466835715:2513], ActorState: ExecuteState, TraceId: 01jqe0eb8tf2zpwj301qwpa7ek, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-03-28T09:13:32.706682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7486786041466835715:2513]: Pool another_pool_id not found Trying to start YDB, gRPC: 28108, MsgBus: 18737 2025-03-28T09:13:34.687403Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786050061367152:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:34.687527Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00152b/r3tmp/tmpgploZz/pdisk_1.dat 2025-03-28T09:13:34.963596Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.976435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.976520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.979743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28108, node 2 2025-03-28T09:13:35.047167Z node 2 :NET_CLASSIFIER WARN: distributable config ... don't have access permissions } 2025-03-28T09:13:38.679780Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786067241238628:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.683463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:38.696381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786067241238630:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:38.765417Z node 2 :TX_PROXY ERROR: Actor# [2:7486786067241238687:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:39.693939Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786050061367152:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:39.694047Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:40.302253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:40.325200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480 2025-03-28T09:13:41.157709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-28T09:13:41.743931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:1, at schemeshard: 72057594046644480 2025-03-28T09:13:42.584838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-28T09:13:43.457833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.089440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715699:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.855894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715717:0, at schemeshard: 72057594046644480 Wait resource pool classifier 0.112922s: status = SUCCESS, issues = 2025-03-28T09:13:47.990369Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzJhMjk1NjctMzNjOGNhY2ItYmQwZmRjZDYtNDFiNDBmYWY=, ActorId: [2:7486786105895946037:2837], ActorState: ExecuteState, TraceId: 01jqe0et6k5ta35qxnwr5spmc0, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool MyPool Trying to start YDB, gRPC: 14786, MsgBus: 26609 2025-03-28T09:13:48.972106Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786109588912364:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:48.972152Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00152b/r3tmp/tmpPJZmDX/pdisk_1.dat 2025-03-28T09:13:49.272552Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:49.308539Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:49.308636Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:49.310589Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14786, node 3 2025-03-28T09:13:49.391233Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:49.391259Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:49.391267Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:49.391393Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26609 TClient is connected to server localhost:26609 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:50.001506Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:50.017518Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:50.022095Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:50.145658Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:50.341023Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:50.470794Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.826877Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786131063750606:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:53.826997Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:53.870661Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:53.927878Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:53.972463Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786109588912364:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:53.972515Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:53.987225Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.030462Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.075253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.130562Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.184952Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786135358718415:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:54.185048Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:54.185347Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786135358718420:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:54.188924Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:54.205092Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786135358718422:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:54.260232Z node 3 :TX_PROXY ERROR: Actor# [3:7486786135358718476:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryService::IssuesInCaseOfSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterTable_DropNotNull_WithSetFamily_Valid [GOOD] Test command err: Trying to start YDB, gRPC: 29039, MsgBus: 20355 2025-03-28T09:13:23.232384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786001384144056:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.232465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001574/r3tmp/tmppUy9WP/pdisk_1.dat 2025-03-28T09:13:24.306437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.354944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.359700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.359818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.367757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29039, node 1 2025-03-28T09:13:24.595168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.595187Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.595194Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.595307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20355 TClient is connected to server localhost:20355 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.820149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.066388Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:28.238944Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786001384144056:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.239038Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.741744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786022858981206:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.741895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.742358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786022858981218:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.771681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:13:28.790876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786022858981220:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:13:28.895592Z node 1 :TX_PROXY ERROR: Actor# [1:7486786022858981271:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:30.258577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-03-28T09:13:30.530794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.628462Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031448916121:2495] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:30.648957Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031448916128:2500] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/YTQyODM3NTYtOGI4OWMzYjQtNzJjZWExMDUtODhiZjRjNjk=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:30.685037Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:13:30.705034Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031448916190:2548] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:30.707079Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031448916197:2553] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/YTQyODM3NTYtOGI4OWMzYjQtNzJjZWExMDUtODhiZjRjNjk=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:30.709689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.244330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.373004Z node 1 :TX_PROXY ERROR: Actor# [1:7486786040038850982:2670] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.377507Z node 1 :TX_PROXY ERROR: Actor# [1:7486786040038850989:2675] txid# 281474976710675, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/YTQyODM3NTYtOGI4OWMzYjQtNzJjZWExMDUtODhiZjRjNjk=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.407338Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-28T09:13:32.452429Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786040038851045:2420], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:21: Error: At function: KiReadTable!
:3:21: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:13:32.453874Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2UyMjM5ZjItNTk0NDBlNjAtZDYxOWZhNmMtYTJmNjljYjM=, ActorId: [1:7486786040038851043:2419], ActorState: ExecuteState, TraceId: 01jqe0eazt2p27e3vsxf4b1njw, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:13:32.508710Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786040038851055:2425], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:13:32.510297Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzUzNzZhYzAtYzg3YmJmYjgtNzNkZGI3MTItYmNlODMxNTM=, ActorId: [1:7486786040038851053:2424], ActorState: ExecuteState, TraceId: 01jqe0eb203d3ha79h8g6xe32t, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 12264, MsgBus: 24106 2025-03-28T09:13:33.421217Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786046009089862:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:33.421417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001574/r3tmp/tmpJlD500/pdisk_1.dat 2025-03-28T09:13:33.630895Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:33.634773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:33.634862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:33.645353Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12264, node 2 2025-03-28T09:13:33.855334Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:33.855369Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:33.855376Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:33.855498Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24106 TClient is connected to server localhost:24106 WaitRootIsUp 'Root'... TClient::Ls ... type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.197742Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.240118Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.283149Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.349470Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.433809Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786091002380647:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.433945Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.434004Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786091002380652:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:44.438560Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:44.453324Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:13:44.453511Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786091002380654:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:44.530349Z node 3 :TX_PROXY ERROR: Actor# [3:7486786091002380709:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:44.908003Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786069527541893:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:44.908059Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:46.285356Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.410319Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.474612Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:47.133830Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:13:47.339239Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:13:47.464991Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:13:47.667900Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:13:47.969249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.195775Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6162, MsgBus: 28828 2025-03-28T09:13:49.495638Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486786111197228629:2150];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:49.587308Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001574/r3tmp/tmp4wuFvP/pdisk_1.dat 2025-03-28T09:13:49.791593Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:49.822477Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:49.822616Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:49.832352Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6162, node 4 2025-03-28T09:13:50.013880Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:50.013904Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:50.013915Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:50.014071Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28828 TClient is connected to server localhost:28828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:50.955057Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.476378Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486786111197228629:2150];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:54.476482Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:55.318420Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786136967032964:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.318513Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.319057Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786136967032976:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.324830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:13:55.339370Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486786136967032978:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:13:55.437579Z node 4 :TX_PROXY ERROR: Actor# [4:7486786136967033029:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:55.591277Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-28T09:13:56.118830Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7486786141262000481:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:84: Error: Failed to convert type: Struct<'id':Int32,'val1':Null,'val2':Int32> to Struct<'id':Int32,'val1':Int32,'val2':Int32?>
:2:84: Error: Failed to convert 'val1': Null to Int32
:2:84: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:13:56.120740Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZTI0YmU0Y2MtNTZkNDIzNDUtNDk5NGUyODYtODM4NWEyZjE=, ActorId: [4:7486786141262000479:2359], ActorState: ExecuteState, TraceId: 01jqe0f23tfrfjkqvhcteb6s0r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:13:56.172463Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:56.245290Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateTableNotSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 64988, MsgBus: 22421 2025-03-28T09:13:33.789314Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786043154779681:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:33.789367Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001521/r3tmp/tmpzFPTGA/pdisk_1.dat 2025-03-28T09:13:34.431245Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.432605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.432668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.436275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64988, node 1 2025-03-28T09:13:34.607202Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:34.607219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:34.607223Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:34.607310Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22421 TClient is connected to server localhost:22421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:35.606171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.632443Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:35.648189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.916561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.140800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.273979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:38.794823Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786043154779681:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:38.795042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:38.944623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786064629617938:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.944716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.369120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.403898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.438711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.521393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.560392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.611223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.695906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786068924585756:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.695987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.696051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786068924585761:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.699502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:39.708683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786068924585763:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:39.803677Z node 1 :TX_PROXY ERROR: Actor# [1:7486786068924585821:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25405, MsgBus: 26353 2025-03-28T09:13:42.008362Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786082861816773:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:42.019992Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001521/r3tmp/tmpsb0QIU/pdisk_1.dat 2025-03-28T09:13:42.197413Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:42.223045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:42.223124Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:42.228354Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25405, node 2 2025-03-28T09:13:42.364785Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:42.364803Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:42.364812Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:42.364915Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26353 TClient is connected to server localhost:26353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:42.831812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.844563Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:42.857097Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.951853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.158758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... eshard: 72057594046644480 2025-03-28T09:13:46.612503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.650352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.692237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.735304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.797446Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786100041688227:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.797557Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.799231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786100041688232:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.804939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:46.820217Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786100041688234:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:46.905737Z node 2 :TX_PROXY ERROR: Actor# [2:7486786100041688290:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:47.003901Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786082861816773:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:47.003965Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4350, MsgBus: 2329 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001521/r3tmp/tmpXidkdK/pdisk_1.dat 2025-03-28T09:13:50.098875Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:50.139378Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:50.141123Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:50.141197Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:50.155752Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4350, node 3 2025-03-28T09:13:50.323297Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:50.323317Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:50.323325Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:50.323434Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2329 TClient is connected to server localhost:2329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:13:51.261391Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.313114Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:51.422246Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:51.734623Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:51.883387Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.751701Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786135391451835:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:54.751801Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:54.824656Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.877705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.935069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.020353Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.076842Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.165154Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.260500Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786139686419655:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.260610Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.261434Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786139686419660:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.271142Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:55.297565Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786139686419662:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:55.367632Z node 3 :TX_PROXY ERROR: Actor# [3:7486786139686419717:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:57.401630Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486786148276354602:2498], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: Cannot find table 'db.[/Root/test_show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:13:57.402878Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTI1OWU1MTctNjgzZjA2OWQtZjdhZGU5NmUtMzY1ZjYwNzM=, ActorId: [3:7486786148276354593:2492], ActorState: ExecuteState, TraceId: 01jqe0f3az3exrhx3kdkcv9vks, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:13:57.433522Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486786148276354634:2501], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:13:57.434920Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTI1OWU1MTctNjgzZjA2OWQtZjdhZGU5NmUtMzY1ZjYwNzM=, ActorId: [3:7486786148276354593:2492], ActorState: ExecuteState, TraceId: 01jqe0f3da8vsv4pfyq79shz2g, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::EmptyNextFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 25813, MsgBus: 22134 2025-03-28T09:13:23.247222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786002492128114:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.247302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00154f/r3tmp/tmpuauZUA/pdisk_1.dat 2025-03-28T09:13:24.317622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.320056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.320632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.321915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25813, node 1 2025-03-28T09:13:24.349726Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.382548Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:13:24.383462Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:13:24.496219Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.496244Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.496266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.496373Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22134 TClient is connected to server localhost:22134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.577526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.608824Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:25.630543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.853228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:26.403521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:13:26.518203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.247739Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786002492128114:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.247817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.424556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786023966966369:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.424672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.150918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.200199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.260865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.348239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.396131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.489121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.563400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786032556901494:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.563505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.563748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786032556901499:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.568809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.582669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786032556901501:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.642111Z node 1 :TX_PROXY ERROR: Actor# [1:7486786032556901553:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 31952, MsgBus: 11975 2025-03-28T09:13:34.607553Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786049435319238:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:34.608266Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00154f/r3tmp/tmpdRuhva/pdisk_1.dat 2025-03-28T09:13:34.791816Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.829498Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.829597Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.832315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31952, node 2 2025-03-28T09:13:35.039405Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:35.039435Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:35.039445Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:35.039578Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11975 TClient is connected to server localhost:11975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:36.134220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.144594Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.173083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:36.285661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation p ... ndo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.678639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.760899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.846481Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786070910157903:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.846553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.850801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786070910157908:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.854120Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:39.870097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786070910157910:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:39.923745Z node 2 :TX_PROXY ERROR: Actor# [2:7486786070910157963:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:40.925564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:40.928538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:40.929869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4940, MsgBus: 27448 2025-03-28T09:13:44.688790Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786089538777579:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00154f/r3tmp/tmpyHP2dk/pdisk_1.dat 2025-03-28T09:13:44.777996Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:13:44.915181Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:44.926418Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:44.926514Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:44.933052Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4940, node 3 2025-03-28T09:13:45.171230Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:45.171255Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:45.171265Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:45.171390Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27448 TClient is connected to server localhost:27448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:46.029193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:46.051105Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:46.073384Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:46.185948Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:46.365389Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:46.445061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:49.409131Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786111013615678:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.409271Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.465380Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.517436Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.566329Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.613549Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786089538777579:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:49.613626Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:49.645458Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.750796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.829517Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.937985Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786111013616197:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.938091Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.938328Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786111013616202:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.942944Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:49.971587Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786111013616204:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:50.041326Z node 3 :TX_PROXY ERROR: Actor# [3:7486786115308583555:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:51.326867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.328586Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.330090Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.796756Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153235817, txId: 281474976710715] shutting down >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun |82.9%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] >> KqpMultishardIndex::DataColumnWrite-UseSink [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::InvalidFetchToken [GOOD] Test command err: Trying to start YDB, gRPC: 64429, MsgBus: 1765 2025-03-28T09:13:23.279546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786001830517698:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.279811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001564/r3tmp/tmpCrqEpG/pdisk_1.dat 2025-03-28T09:13:24.402873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.453432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.453544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.457201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64429, node 1 2025-03-28T09:13:24.668784Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:13:24.675022Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:13:24.695056Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.899231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.899251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.899264Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.899378Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1765 TClient is connected to server localhost:1765 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:26.608188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.659110Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:26.679824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.008760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.312057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.454371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.277935Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786001830517698:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.278047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:29.259763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786027600323107:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:29.259873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.149615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.233405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.282919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.325937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.396429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.517542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.622404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031895290933:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.622494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.622775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031895290938:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.627108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.645732Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:13:30.646130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786031895290940:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.747303Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031895290995:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.157786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.159098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.160321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:33.283674Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-28T09:13:33.325223Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-28T09:13:33.391195Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-28T09:13:33.425508Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-28T09:13:33.450384Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-28T09:13:33.478500Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-28T09:13:33.502739Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-28T09:13:33.533891Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-28T09:13:33.571036Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-28T09:13:33.592171Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
: Error: Operation is still running } 2025-03-28T09:13:33.650802Z node 1 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: 67ce8eb4-a9b294d5-70bc75e8-f698e89b, reply PRECONDITION_FAILED, issues: {
... l default not found or you don't have access permissions } 2025-03-28T09:13:41.816192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786077170261342:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.820467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:41.840427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786077170261344:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:41.950530Z node 2 :TX_PROXY ERROR: Actor# [2:7486786077170261399:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:42.184890Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786059990389998:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:42.184959Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:43.207342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:43.209456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:43.211315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.018528Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: f7421255-dd21325f-9fef8d0f-4e9213aa, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=MjU1YjNiYjEtODA1ZTMwZDQtOTIwZDhlM2ItYWUxN2VmMjg=, TxId: 2025-03-28T09:13:46.050522Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: f7421255-dd21325f-9fef8d0f-4e9213aa, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=MmZlZWM0NjgtOTZkY2I0OGUtNTdkYzg1NWYtMTVlNzVkMmU=, TxId: 2025-03-28T09:13:46.074528Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: f7421255-dd21325f-9fef8d0f-4e9213aa, reply NOT_FOUND, issues: {
: Error: No such execution } Trying to start YDB, gRPC: 31174, MsgBus: 32211 2025-03-28T09:13:47.638767Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786103410753965:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:47.638921Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001564/r3tmp/tmpBn2j6O/pdisk_1.dat 2025-03-28T09:13:47.795048Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:47.812409Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:47.812484Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:47.814608Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31174, node 3 2025-03-28T09:13:47.906699Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:47.906722Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:47.906733Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:47.906868Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32211 TClient is connected to server localhost:32211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:48.466125Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.476540Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:48.493875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.582237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.839919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.954371Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:51.978102Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786120590624929:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:51.978219Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.048823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.104454Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.189050Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.248868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.300641Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.426224Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.544799Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786124885592743:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.544904Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.545301Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786124885592748:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.549992Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:52.566315Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786124885592750:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:52.638613Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786103410753965:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:52.638706Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:52.660398Z node 3 :TX_PROXY ERROR: Actor# [3:7486786124885592807:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:54.373831Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.375944Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.378372Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 >> KqpQueryService::TableSink_Htap+withOltpSink >> KqpQueryService::DdlGroup >> KqpQueryService::TableSink_OlapDelete [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> KqpDocumentApi::RestrictWriteExplicitPrepare [GOOD] >> KqpDocumentApi::Scripting >> KqpQueryServiceScripts::ExplainScript [GOOD] >> KqpQueryServiceScripts::ForgetScriptExecution >> KqpQueryService::ReadManyRanges [GOOD] >> KqpQueryService::ReadManyShardsRange >> KqpQueryService::ShowCreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ForbidInteractiveTxOnImplicitSession [GOOD] Test command err: Trying to start YDB, gRPC: 24717, MsgBus: 30490 2025-03-28T09:13:27.196089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:13:27.196755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:27.196823Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00153e/r3tmp/tmppgGMxW/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24717, node 1 2025-03-28T09:13:27.740551Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:27.740630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:27.740673Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:27.742598Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:27.746754Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:13:27.790059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:27.790200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:27.804194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30490 TClient is connected to server localhost:30490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:28.162488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.215223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.585586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:29.188291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:29.539335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:30.390122Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1809:3405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.390366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.443663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.700979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.995131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:31.264537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:31.513016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:31.885668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.196061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2401:3860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:32.196219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:32.196570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2406:3865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:32.206679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:32.392314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2408:3867], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:32.456318Z node 1 :TX_PROXY ERROR: Actor# [1:2469:3909] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8748, MsgBus: 28691 2025-03-28T09:13:35.033752Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786051397533034:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:35.034644Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00153e/r3tmp/tmp92HOMI/pdisk_1.dat 2025-03-28T09:13:35.266626Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:35.287941Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:35.288035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:35.289779Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8748, node 2 2025-03-28T09:13:35.423256Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:35.423284Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:35.423292Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:35.423415Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28691 TClient is connected to server localhost:28691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:35.988813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.007067Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.036375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:36.141252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.305901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.386292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:38.367619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] Ac ... WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.460181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.505537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.574067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.628118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.682999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.781362Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786064282437208:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.781436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.782074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786064282437213:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.786667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:38.800744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786064282437215:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:38.869876Z node 2 :TX_PROXY ERROR: Actor# [2:7486786064282437271:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:40.034149Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786051397533034:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:40.039851Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:40.131452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 120 2025-03-28T09:13:50.250092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:50.250132Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 13294, MsgBus: 8529 2025-03-28T09:13:51.506441Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786120007469610:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:51.569477Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00153e/r3tmp/tmpI6Vjb6/pdisk_1.dat 2025-03-28T09:13:51.829213Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:51.829318Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:51.875642Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:51.877367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13294, node 3 2025-03-28T09:13:52.151406Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:52.151427Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:52.151445Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:52.151584Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8529 TClient is connected to server localhost:8529 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:53.298739Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.306083Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:53.321065Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.481607Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.816292Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.910467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:56.503055Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786120007469610:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:56.503125Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:57.343853Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786145777275025:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.343937Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.443709Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.509869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.560210Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.618953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.695954Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.770243Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.891046Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786145777275554:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.891130Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.891384Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786145777275559:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.894750Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:57.909262Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786145777275561:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:57.993204Z node 3 :TX_PROXY ERROR: Actor# [3:7486786145777275616:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 13508, MsgBus: 8386 2025-03-28T09:13:23.256662Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785999085376860:2191];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.256727Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001539/r3tmp/tmpkBmnfj/pdisk_1.dat 2025-03-28T09:13:24.478387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.511068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.511445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.511551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.513914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13508, node 1 2025-03-28T09:13:24.843142Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.843163Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.843168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.843275Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8386 TClient is connected to server localhost:8386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:26.309350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.362906Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.385252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:26.628446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.902565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.996786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.258888Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785999085376860:2191];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.258982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.899554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786020560214984:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.899673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.150510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.203155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.235169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.271240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.354205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.445913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.512716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786029150150101:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.512822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.513015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786029150150106:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.516132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.534877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786029150150108:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.635896Z node 1 :TX_PROXY ERROR: Actor# [1:7486786029150150166:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.160790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.168530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.169796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:35.101380Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153215132, txId: 281474976710704] shutting down Trying to start YDB, gRPC: 1819, MsgBus: 15817 2025-03-28T09:13:36.723621Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786055899701589:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:36.724128Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001539/r3tmp/tmpSeMnos/pdisk_1.dat 2025-03-28T09:13:36.998460Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:37.030168Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:37.030261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:37.031532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1819, node 2 2025-03-28T09:13:37.159250Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:37.159276Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:37.159285Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:37.159390Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15817 TClient is connected to server localhost:15817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:37.927073Z node 2 ... h pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.334435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.334826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786077374540349:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.342943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:41.357702Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786077374540351:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:41.421370Z node 2 :TX_PROXY ERROR: Actor# [2:7486786077374540406:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:41.724660Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786055899701589:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:41.724710Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:42.781231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:42.787341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:42.789832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.751572Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153225786, txId: 281474976715703] shutting down 2025-03-28T09:13:46.140761Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153226171, txId: 281474976715706] shutting down Trying to start YDB, gRPC: 13522, MsgBus: 65137 2025-03-28T09:13:47.221199Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786104158323985:2214];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:47.307494Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001539/r3tmp/tmpH1ZwwV/pdisk_1.dat 2025-03-28T09:13:47.428152Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:47.430225Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:47.430312Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:47.432277Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13522, node 3 2025-03-28T09:13:47.667544Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:47.667566Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:47.667578Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:47.667717Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65137 TClient is connected to server localhost:65137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:13:48.493491Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.503055Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:48.525414Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.657996Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.939816Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:49.052911Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:52.175233Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786125633162095:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.175338Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.206845Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786104158323985:2214];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:52.206906Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:52.225860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.304644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.384259Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.441892Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.517981Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.598413Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:52.749272Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786125633162625:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.749380Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.749707Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786125633162630:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:52.754085Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:52.772855Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786125633162632:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:52.834564Z node 3 :TX_PROXY ERROR: Actor# [3:7486786125633162687:3460] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:54.415701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.417498Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.420396Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.310810Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153238330, txId: 281474976715709] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CreateOrDropTopicOverTable [GOOD] Test command err: Trying to start YDB, gRPC: 23997, MsgBus: 25283 2025-03-28T09:13:23.220506Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785999553662205:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.220570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001543/r3tmp/tmpv7pEeW/pdisk_1.dat 2025-03-28T09:13:24.316920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.317364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.317464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.323877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:24.423241Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23997, node 1 2025-03-28T09:13:24.707273Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.707298Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.707304Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.707408Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25283 TClient is connected to server localhost:25283 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.816934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.876250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:28.222850Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785999553662205:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.222939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.794400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786021028499357:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.794482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786021028499365:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.794533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.803329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:13:28.822125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786021028499371:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:13:28.902470Z node 1 :TX_PROXY ERROR: Actor# [1:7486786021028499423:2350] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:30.269090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:1, at schemeshard: 72057594046644480 2025-03-28T09:13:32.090073Z node 1 :TX_PROXY ERROR: Actor# [1:7486786038208368871:2495] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/MWVmOTg0MmItMTA1YWUzZGEtN2M1NzkzZDUtMmFmNDFjMzQ=\', error: path is temporary (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.118357Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWVmOTg0MmItMTA1YWUzZGEtN2M1NzkzZDUtMmFmNDFjMzQ=, ActorId: [1:7486786021028499327:2331], ActorState: ExecuteState, TraceId: 01jqe0eamd4mkxfp3f7cfdxjrm, Create QueryResponse for error on request, msg: 2025-03-28T09:13:32.215559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.239505Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:13:32.240490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.246460Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786038208368899:2378], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:13:32.246890Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzMwZGMyNjYtZTMxZTM2ZC1jMDJhOGNkNy0xMmRjZTQyOA==, ActorId: [1:7486786038208368897:2377], ActorState: ExecuteState, TraceId: 01jqe0ears9zcgy92fkewzk7f0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 12187, MsgBus: 15018 2025-03-28T09:13:34.280592Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786047798645800:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:34.280629Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001543/r3tmp/tmp1RrfQf/pdisk_1.dat 2025-03-28T09:13:34.645159Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.701962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.702059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.703455Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12187, node 2 2025-03-28T09:13:34.955210Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:34.955233Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:34.955240Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:34.955344Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15018 TClient is connected to server localhost:15018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:35.843125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.850285Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:39.019513Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786069273482818:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.019608Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.022629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786069273482830:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.029050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 28 ... pose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:56.890864Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486786144229329143:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:56.994970Z node 4 :TX_PROXY ERROR: Actor# [4:7486786144229329201:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:58.805519Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.948557Z node 4 :TX_PROXY ERROR: Actor# [4:7486786152819264183:3738] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2025-03-28T09:13:58.950031Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710672, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-03-28T09:13:58.950203Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjBkMTMzOTItNzRkYWRiMTItNTU4ZDIxNjAtYzQ1NGZhYTk=, ActorId: [4:7486786152819264173:2513], ActorState: ExecuteState, TraceId: 01jqe0f4wk3szskh0q4sp111q3, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743153236923, tx_id: 281474976710668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153232240, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153231820, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236790, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236881, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236517, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236580, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236629, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153231953, tx_id: 281474976710660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236713, tx_id: 281474976710665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153238904, tx_id: 281474976710671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153231575, tx_id: 281474976710658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-03-28T09:13:59.007358Z node 4 :TX_PROXY ERROR: Actor# [4:7486786157114231518:3768] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup" severity: 1 } 2025-03-28T09:13:59.007739Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710674, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup 2025-03-28T09:13:59.007900Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjBkMTMzOTItNzRkYWRiMTItNTU4ZDIxNjAtYzQ1NGZhYTk=, ActorId: [4:7486786152819264173:2513], ActorState: ExecuteState, TraceId: 01jqe0f4xxbcc58rzjc3ex0f65, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: unexpected path type (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges), expected types: EPathTypePersQueueGroup Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743153236923, tx_id: 281474976710668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153232240, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153231820, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236790, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236881, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236517, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236580, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236629, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153231953, tx_id: 281474976710660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236713, tx_id: 281474976710665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153238904, tx_id: 281474976710671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153231575, tx_id: 281474976710658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-03-28T09:13:59.042114Z node 4 :TX_PROXY ERROR: Actor# [4:7486786157114231541:3781] txid# 281474976710676, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:59.042489Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710676, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-28T09:13:59.042642Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjBkMTMzOTItNzRkYWRiMTItNTU4ZDIxNjAtYzQ1NGZhYTk=, ActorId: [4:7486786152819264173:2513], ActorState: ExecuteState, TraceId: 01jqe0f4zj2fmhz0j8610zm82q, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-28T09:13:59.065202Z node 4 :TX_PROXY ERROR: Actor# [4:7486786157114231556:3788] txid# 281474976710678, issues: { message: "Check failed: path: \'/Root/TmpTable\', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:59.065777Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710678, ProxyStatus: ExecError, SchemeShardReason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-28T09:13:59.065954Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjBkMTMzOTItNzRkYWRiMTItNTU4ZDIxNjAtYzQ1NGZhYTk=, ActorId: [4:7486786152819264173:2513], ActorState: ExecuteState, TraceId: 01jqe0f50a301sfjh4f6fcjmyq, Create QueryResponse for error on request, msg: Query failed, status: GENERIC_ERROR:
: Error: Scheme operation failed, status: ExecError, reason: Check failed: path: '/Root/TmpTable', error: path is not a topic (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) Scheme entry: { name: .metadata, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1743153236923, tx_id: 281474976710668 } } Scheme entry: { name: BatchUpload, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153232240, tx_id: 281474976710661 } } Scheme entry: { name: EightShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153231820, tx_id: 281474976710659 } } Scheme entry: { name: Join1, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236790, tx_id: 281474976710666 } } Scheme entry: { name: Join2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236881, tx_id: 281474976710667 } } Scheme entry: { name: KeyValue, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236517, tx_id: 281474976710662 } } Scheme entry: { name: KeyValue2, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236580, tx_id: 281474976710663 } } Scheme entry: { name: KeyValueLargePartition, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236629, tx_id: 281474976710664 } } Scheme entry: { name: Logs, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153231953, tx_id: 281474976710660 } } Scheme entry: { name: Test, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153236713, tx_id: 281474976710665 } } Scheme entry: { name: TmpTable, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153238904, tx_id: 281474976710671 } } Scheme entry: { name: TwoShard, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153231575, tx_id: 281474976710658 } } Scheme entry: { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } } 2025-03-28T09:13:59.111427Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestAstWithCompression [GOOD] Test command err: Trying to start YDB, gRPC: 6225, MsgBus: 2778 2025-03-28T09:13:23.220953Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786000525490807:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.221004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001535/r3tmp/tmpnKh461/pdisk_1.dat 2025-03-28T09:13:24.156719Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.160425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.160554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.283243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6225, node 1 2025-03-28T09:13:24.471246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.471265Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.471278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.471393Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2778 TClient is connected to server localhost:2778 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.825406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.840691Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.853795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:26.424014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.646982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.758508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.221049Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786000525490807:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.221122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.560591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786022000329067:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.560695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.152731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.198363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.244191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.292636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.334103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.432938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.531798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786030590264197:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.531884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.532371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786030590264202:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.537009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.566732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786030590264204:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.661280Z node 1 :TX_PROXY ERROR: Actor# [1:7486786030590264261:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.164641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.165891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.167159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:35.439661Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-28T09:13:35.439956Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486786052065102303:2838] TxId: 281474976710709. Ctx: { TraceId: 01jqe0edknd6973kpvf5x1cy4c, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhiMjFiZDQtY2E5MjAyYzItZDliM2ZkMGItYTI4MzhlYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:13:35.441477Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjhiMjFiZDQtY2E5MjAyYzItZDliM2ZkMGItYTI4MzhlYzY=, ActorId: [1:7486786052065102269:2838], ActorState: ExecuteState, TraceId: 01jqe0edknd6973kpvf5x1cy4c, Create QueryResponse for error on request, msg: 2025-03-28T09:13:35.442043Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153215475, txId: 281474976710708] shutting down 2025-03-28T09:13:35.446516Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486786052065102308:2844], TxId: 281474976710709, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0edknd6973kpvf5x1cy4c. SessionId : ydb://session/3?node_id=1&id=YjhiMjFiZDQtY2E5MjAyYzItZDliM2ZkMGItYTI4MzhlYzY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486786052065102303:2838], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:13:35.979637Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-28T09:13:35.979801Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486786052065102381:2855] TxId: 281474976710713. Ctx: { TraceId: 01jqe0edz56f8r5bm05vf6hb2w, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWUwZTIyZGUtNWM0M2QzZWEtYjk3NGE0ZmItMzljYjNjNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:13:35.980974Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWUwZTIyZGUtNWM0M2QzZWEtYjk3NGE0ZmItMzljYjNjNjM=, ActorId: [1:7486786052065102347:2855], ActorState: ExecuteState, TraceId: 01jqe0edz56f8r5bm05vf6hb2w, Create QueryResponse for error on request, msg: 2025-03-28T09:13:35.981324Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153216007, txId: 281474976710712] shutting down 2025-03-28T09:13:35.981422Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486786052065102386:2862], TxId: 281474976710713, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0edz56f8r5bm05vf6hb2w. SessionId : ydb://session/3?node_id=1&id=OWUwZTIyZGUtNWM0M2QzZWEtYjk3NGE0ZmItMzljYjNjNjM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486786052065102381:2855], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:13:36.422745Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-28T09:13:36.422983Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486786056360069747:2873] TxId: 281474976710717. Ctx: { TraceId: 01jqe0eeg505h18km5wvyw3p7p, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYzZDk5MmUtM ... 644480 2025-03-28T09:13:42.591225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786081622881988:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.591356Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.591712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786081622881993:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.599373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:42.613462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786081622881995:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:42.666795Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786060148043252:2088];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:42.666861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:42.701262Z node 2 :TX_PROXY ERROR: Actor# [2:7486786081622882052:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:44.072421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.074043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.075633Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:47.067729Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153227095, txId: 281474976715712] shutting down Trying to start YDB, gRPC: 19786, MsgBus: 3880 2025-03-28T09:13:48.196371Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786108352402011:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:48.196433Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001535/r3tmp/tmpSDXA4y/pdisk_1.dat 2025-03-28T09:13:48.494533Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:48.572412Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:48.572503Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:48.573743Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19786, node 3 2025-03-28T09:13:48.758605Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:48.758633Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:48.758640Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:48.758801Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3880 TClient is connected to server localhost:3880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:49.656944Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:49.665345Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:49.689272Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:49.824288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:50.065956Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:50.200965Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.196414Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786108352402011:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:53.196473Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:53.975959Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786129827240261:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:53.976089Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:54.025237Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.074298Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.126749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.172934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.219877Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.289788Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.408217Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786134122208074:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:54.408290Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:54.408578Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786134122208079:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:54.413651Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:54.439761Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:13:54.440124Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786134122208081:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:54.533511Z node 3 :TX_PROXY ERROR: Actor# [3:7486786134122208139:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:56.178198Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:56.193102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:56.201747Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpQueryService::TableSink_Olap_Replace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_HtapInteractive+withOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 3552, MsgBus: 2517 2025-03-28T09:13:23.175319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785999267043619:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.175380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001561/r3tmp/tmpkpIZ7N/pdisk_1.dat 2025-03-28T09:13:24.310993Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.352344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.355261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.355372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.357408Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3552, node 1 2025-03-28T09:13:24.647183Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.647213Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.647226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.647320Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2517 TClient is connected to server localhost:2517 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.576345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.186554Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785999267043619:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.204397Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.247037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786020741880769:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.247198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.193268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.464718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:30.464997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:30.465306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:30.465426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:13:30.465535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:13:30.465645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:13:30.465767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:13:30.465878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:13:30.466010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:13:30.466176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:13:30.466288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:13:30.466414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786029331815532:2347];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:13:30.469039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:30.469097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:30.469306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:30.469437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:13:30.469558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:13:30.469653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:13:30.469742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:13:30.469835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:13:30.469934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:13:30.470049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:13:30.470149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:13:30.470270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029331815534:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:13:30.546669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486786029331815538:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:30.546759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486786029331815538:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:30.546988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486786029331815538:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:30.547088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486786029331815538:2350];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:13:30.547187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486786029331815538:2350];table ... ateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:13:57.389606Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:13:57.389826Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:13:57.389861Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:13:57.399660Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:13:57.399745Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:13:57.400010Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:13:57.400050Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:13:57.400198Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:13:57.400235Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:13:57.401200Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:13:57.401249Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:13:57.401369Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:13:57.401402Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:13:57.401595Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:13:57.401627Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:13:57.401735Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:13:57.401770Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:13:57.401844Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:13:57.401883Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:13:57.401933Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:13:57.401969Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:13:57.403602Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:13:57.403665Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:13:57.403924Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:13:57.403964Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:13:57.404142Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:13:57.404174Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:13:57.404425Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:13:57.404462Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:13:57.404605Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:13:57.404641Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:13:57.483390Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:57.487644Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:57.504043Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:57.508934Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:57.519832Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:57.522254Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:57.531458Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:57.533488Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:57.546029Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:57.546034Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:13:57.557967Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.649638Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786124291523531:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:57.649718Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:57.712722Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786145766361247:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.712805Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786145766361252:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.712818Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.717845Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:13:57.737031Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786145766361254:2417], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-28T09:13:57.804158Z node 3 :TX_PROXY ERROR: Actor# [3:7486786145766361310:2663] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:58.049382Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; 2025-03-28T09:13:58.287851Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapDelete [GOOD] Test command err: Trying to start YDB, gRPC: 26918, MsgBus: 8593 2025-03-28T09:13:23.167809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785998971879370:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.168021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001526/r3tmp/tmpuRdEyO/pdisk_1.dat 2025-03-28T09:13:24.106603Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.123851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.123940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.140546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26918, node 1 2025-03-28T09:13:24.466182Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.466203Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.466230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.466350Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8593 TClient is connected to server localhost:8593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:26.351501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.170832Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785998971879370:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.170940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.528607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786020446716375:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.528705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.197022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.520312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:30.520557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:30.520862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:30.521005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:13:30.521131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:13:30.521253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:13:30.521363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:13:30.521484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:13:30.521615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:13:30.521741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:13:30.521858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:13:30.521992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786029036651187:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:13:30.524832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:30.524924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:30.525155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:30.525265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:13:30.525365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:13:30.525487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:13:30.525620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:13:30.525729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:13:30.525839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:13:30.525937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:13:30.526056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:13:30.526154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786029036651117:2346];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:13:30.581046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029036651111:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:30.581127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029036651111:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:30.581412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029036651111:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:30.581537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029036651111:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:13:30.581657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786029036651111:2343];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:13:30.581776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=7207518622403 ... WARN: tablet_id=72075186224037888;self_id=[3:7486786119141415591:2342];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.030559Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7486786119141415593:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.030583Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;self_id=[3:7486786119141415593:2343];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.030660Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;self_id=[3:7486786119141415582:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.030711Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;self_id=[3:7486786119141415582:2339];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.030807Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;self_id=[3:7486786119141415595:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.030833Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;self_id=[3:7486786119141415595:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.030890Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;self_id=[3:7486786119141415587:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.030918Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7486786119141415577:2338];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.030934Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;self_id=[3:7486786119141415587:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.030951Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;self_id=[3:7486786119141415577:2338];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.031031Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7486786119141415585:2340];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.031055Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;self_id=[3:7486786119141415585:2340];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.031085Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;self_id=[3:7486786119141415573:2336];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.031123Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;self_id=[3:7486786119141415573:2336];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.031154Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;self_id=[3:7486786119141415575:2337];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.031180Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;self_id=[3:7486786119141415575:2337];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.031228Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;self_id=[3:7486786119141415597:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.031254Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;self_id=[3:7486786119141415597:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=281474976715669;problem=finished; 2025-03-28T09:13:55.115023Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786119141415582:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.115448Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[3:7486786119141415597:2345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.123096Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786119141415582:2339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.123380Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[3:7486786119141415597:2345];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.156870Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;task_id=f92fcaf0-bb411f0-bdc6727b-e98101a7;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.157344Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.209138Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7486786119141415585:2340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.209734Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[3:7486786119141415585:2340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.218282Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[3:7486786119141415577:2338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.218618Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[3:7486786119141415573:2336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.218818Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[3:7486786119141415587:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.219005Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7486786119141415593:2343];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.219187Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[3:7486786119141415595:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.219385Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7486786119141415591:2342];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037888;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.219585Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[3:7486786119141415575:2337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.220205Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[3:7486786119141415577:2338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.220417Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[3:7486786119141415573:2336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.220558Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[3:7486786119141415587:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.220691Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[3:7486786119141415593:2343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.220818Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[3:7486786119141415595:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.220950Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[3:7486786119141415591:2342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037888;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.221126Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[3:7486786119141415575:2337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:13:55.235524Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:13:55.237078Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;commit_tx_id=281474976715672;commit_lock_id=281474976715671;fline=manager.cpp:94;broken_lock_id=281474976715669; 2025-03-28T09:13:55.237333Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpService::SessionBusy [GOOD] >> KqpService::SessionBusyRetryOperation >> KqpQueryServiceScripts::Tcl [GOOD] >> KqpQueryServiceScripts::ExecuteScriptStatsProfile [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter >> KqpQueryService::Ddl_Dml [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTx [GOOD] >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery >> KqpQueryServiceScripts::ExecuteScriptWithParameters [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter >> KqpQueryService::TempTablesDrop [GOOD] >> KqpQueryService::Tcl >> KqpQueryService::SeveralCTAS+UseSink [GOOD] >> KqpQueryService::SeveralCTAS-UseSink >> KqpJoinOrder::CanonizedJoinOrderTPCH8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::Tcl [GOOD] Test command err: Trying to start YDB, gRPC: 10717, MsgBus: 14033 2025-03-28T09:13:23.191170Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786000023407461:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.202511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001527/r3tmp/tmpsFXrkt/pdisk_1.dat 2025-03-28T09:13:24.325465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.407901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.407994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.409279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:24.465543Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10717, node 1 2025-03-28T09:13:24.656330Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.656358Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.656364Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.656485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14033 TClient is connected to server localhost:14033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.780915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.829266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.418168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.660662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.789418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.194821Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786000023407461:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.194903Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:29.023873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786025793213029:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:29.023973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.150383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.206452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.278567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.358517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.450754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.540305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.628116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786030088180857:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.628189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.628353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786030088180862:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.631206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.646263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786030088180864:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.746363Z node 1 :TX_PROXY ERROR: Actor# [1:7486786030088180918:3468] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 32261, MsgBus: 6394 2025-03-28T09:13:33.337491Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786043323291522:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:33.337559Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001527/r3tmp/tmpTs95QH/pdisk_1.dat 2025-03-28T09:13:33.501505Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:33.512903Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:33.512992Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:33.523919Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32261, node 2 2025-03-28T09:13:33.737231Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:33.737260Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:33.737268Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:33.737382Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6394 TClient is connected to server localhost:6394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:34.659950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:34.683357Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:34.706419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:34.795190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.145236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 2 ... 6786064798130310:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.589015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:38.605577Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786064798130312:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:38.669266Z node 2 :TX_PROXY ERROR: Actor# [2:7486786064798130367:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:39.845279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.850721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.852948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.467588Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:48.467619Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 1549, MsgBus: 28915 2025-03-28T09:13:52.210871Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786124133194531:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:52.235827Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001527/r3tmp/tmpjfoDBN/pdisk_1.dat 2025-03-28T09:13:52.513632Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:52.531576Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:52.531681Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:52.533818Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1549, node 3 2025-03-28T09:13:52.667268Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:52.667292Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:52.667305Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:52.667458Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28915 TClient is connected to server localhost:28915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:53.349467Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.375155Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.460176Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.732940Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.867696Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.197307Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786124133194531:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:57.197393Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:57.242876Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786145608032693:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.243020Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.323142Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.398644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.505249Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.570381Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.627880Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.773544Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.899090Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786145608033216:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.899227Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.906878Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786145608033223:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.915888Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:57.940432Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:13:57.943370Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786145608033225:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:58.050359Z node 3 :TX_PROXY ERROR: Actor# [3:7486786149903000578:3463] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:00.035637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.037379Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.039056Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.615569Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486786158492935771:2519], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: COMMIT not supported inside YDB query, code: 2008 2025-03-28T09:14:00.617347Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDNhMTUxOC1kN2I2Y2IxMC05NWIwNzFhLTgyNTg1NDY3, ActorId: [3:7486786158492935768:2518], ActorState: ExecuteState, TraceId: 01jqe0f5yx9xkwx5334859v6qs, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:14:02.948533Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486786167082871072:2749], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:13: Error: At function: Commit!
:3:13: Error: ROLLBACK not supported inside YDB query, code: 2008 2025-03-28T09:14:02.950438Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzNkYzgyYmUtY2IwNDJhOTctNjQyZmYzZGMtYzgxNjM0NWU=, ActorId: [3:7486786167082871069:2748], ActorState: ExecuteState, TraceId: 01jqe0f8rh80p498gmc991vjtt, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::Ddl_Dml [GOOD] Test command err: Trying to start YDB, gRPC: 12813, MsgBus: 11636 2025-03-28T09:13:23.217034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786000694181885:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.241781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001598/r3tmp/tmp565uLr/pdisk_1.dat 2025-03-28T09:13:24.314816Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.794151Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.822570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.831071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.840321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12813, node 1 2025-03-28T09:13:25.079339Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:25.079375Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:25.079384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:25.079527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11636 TClient is connected to server localhost:11636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:27.000711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.014665Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.028670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:27.269264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.493331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.639347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.222922Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786000694181885:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.223081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:29.577256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786026463987236:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:29.577353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.151379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.219548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.268677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.343584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.392351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.457551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.513552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786030758955056:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.513630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.513951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786030758955061:2466], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.518546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.538290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786030758955063:2467], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.600301Z node 1 :TX_PROXY ERROR: Actor# [1:7486786030758955118:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.402437Z node 1 :TX_PROXY ERROR: Actor# [1:7486786039348890028:3708] txid# 281474976710672, issues: { message: "User already exists" severity: 1 } 2025-03-28T09:13:32.418129Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTIwMTQwYWUtNzg3YjIxM2QtNjAwYjVmNTEtZjAxNDhlZjI=, ActorId: [1:7486786039348890015:2504], ActorState: ExecuteState, TraceId: 01jqe0eays19833bh431kwvq7t, Create QueryResponse for error on request, msg: 2025-03-28T09:13:32.522655Z node 1 :TX_PROXY ERROR: Actor# [1:7486786039348890094:3742] txid# 281474976710676, issues: { message: "User not found" severity: 1 } 2025-03-28T09:13:32.522945Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTFiNTliMDUtNmM4ZTJiZWUtYjQ5YjZhNS0yNzQwYzNiMg==, ActorId: [1:7486786039348890088:2514], ActorState: ExecuteState, TraceId: 01jqe0eb2rcpsezzpmnccs9b1n, Create QueryResponse for error on request, msg: 2025-03-28T09:13:32.543918Z node 1 :TX_PROXY ERROR: Actor# [1:7486786039348890110:3749] txid# 281474976710678, issues: { message: "User not found" severity: 1 } 2025-03-28T09:13:32.544176Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzMxNzMzNDYtZDczZmYzZGMtYzA3YzY5MDQtYzI0NDMzMjE=, ActorId: [1:7486786039348890104:2517], ActorState: ExecuteState, TraceId: 01jqe0eb3m11f8ny13fm1ax9bw, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 15315, MsgBus: 13018 2025-03-28T09:13:34.736816Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786047716249278:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001598/r3tmp/tmpuyVkHP/pdisk_1.dat 2025-03-28T09:13:34.803312Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:13:34.960125Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.979060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.979151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.980255Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15315, node 2 2025-03-28T09:13:35.147265Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:35.147290Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:35.147299Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:35.147441Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13018 TClient is connected to server localhost:13018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersio ... WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.315004Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786140532168828:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.328741Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:55.363527Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486786140532168830:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:55.425798Z node 4 :TX_PROXY ERROR: Actor# [4:7486786140532168889:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:57.133212Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.582435Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.598976Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=M2NhYjBlMDUtMWVkNDgxNDQtODA1MWQ1OWYtODRiZDQ3ZGI=, ActorId: [4:7486786149122103852:2510], ActorState: ExecuteState, TraceId: 01jqe0f37365pb2dg7bz6yy6sv, Create QueryResponse for error on request, msg: 2025-03-28T09:13:57.730721Z node 4 :KQP_COMPILE_SERVICE WARN: queryId in recompile request and queryId in cache are different, queryId in request: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (1, \"1\");\n SELECT * FROM TestDdlDml2;\n UPSERT INTO TestDdlDml2 (Key, Value1) VALUES (2, \"2\");\n SELECT * FROM TestDdlDml2;\n CREATE TABLE TestDdlDml33 (\n Key Uint64,\n PRIMARY KEY (Key)\n );\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }}, queryId in cache: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n UPSERT INTO TestDdlDml2 (Key, Value1, Value2) VALUES (1, \"1\", \"1\");\n SELECT * FROM TestDdlDml2;\n ALTER TABLE TestDdlDml2 DROP COLUMN Value2;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_database":"Root" }, "settings": { "ydb_database":"Root" }, "rollback_settings": { } } }} 2025-03-28T09:13:58.029537Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.336951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.869647Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7486786153417071659:2597], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:11:17: Error: At function: KiReadTable!
:11:17: Error: Cannot find table 'db.[/Root/TestDdlDml5]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:13:58.871557Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZmZjNWY2MDctNGVkZmYxZTEtNTI2Yzk2MDEtOGQ0ZjEwMzQ=, ActorId: [4:7486786153417071516:2571], ActorState: ExecuteState, TraceId: 01jqe0f48k4vqvfe5dd2p9p7xz, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:13:58.980846Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715690:0, at schemeshard: 72057594046644480 2025-03-28T09:13:59.234858Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715691:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.181834Z node 4 :TX_PROXY ERROR: Actor# [4:7486786162007006601:4243] txid# 281474976715697, issues: { message: "Check failed: path: \'/Root/TestDdl1\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:00.182110Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715697, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl1', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 19], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-28T09:14:00.182294Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWJiMzg2MTUtNzM2NDNmNDgtZGJmMWMwNTctOGI5ZjNjZmU=, ActorId: [4:7486786162007006578:2663], ActorState: ExecuteState, TraceId: 01jqe0f616d1f98fz37e4jxzjt, Create QueryResponse for error on request, msg: 2025-03-28T09:14:00.232250Z node 4 :TX_PROXY ERROR: Actor# [4:7486786162007006628:4255] txid# 281474976715699, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:00.232355Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715699, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-28T09:14:00.232533Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MTlmZGM2ZWItZWY0OTMxOTQtOGFkMTcyODQtNzgyYTAzN2E=, ActorId: [4:7486786162007006612:2673], ActorState: ExecuteState, TraceId: 01jqe0f6454kk9gkkqh0j84zer, Create QueryResponse for error on request, msg: 2025-03-28T09:14:00.770059Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715704:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.948303Z node 4 :TX_PROXY ERROR: Actor# [4:7486786162007006841:4371] txid# 281474976715705, issues: { message: "Check failed: path: \'/Root/TestDdl2\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:00.958364Z node 4 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976715705, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl2', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 20], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-28T09:14:00.958566Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjhiYmM2MmEtZThkZGM5ZjEtMjkzMzhjMzQtMWQyNzIxOTE=, ActorId: [4:7486786162007006726:2710], ActorState: ExecuteState, TraceId: 01jqe0f6h30s6fqg7ybthj48bv, Create QueryResponse for error on request, msg: 2025-03-28T09:14:01.131484Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7486786166301974193:2749], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl4]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:14:01.133126Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YTVhMzAwNmMtZWZkMmVmNjAtOWMzNTc0MGQtMjg0MTNjN2Y=, ActorId: [4:7486786166301974190:2747], ActorState: ExecuteState, TraceId: 01jqe0f70r7zgsd7q67gkdzcjf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:14:01.606188Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480 2025-03-28T09:14:02.371451Z node 4 :KQP_COMPILE_ACTOR ERROR: Get parsing result with error, self: [4:7486786170596941741:2802], owner: [4:7486786136237200963:2399], statement id: 1 2025-03-28T09:14:02.371870Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NzY4ZGY3MTItZDBkY2NiODctNTFmOGQzODctYmEzNWJmMjM=, ActorId: [4:7486786170596941739:2801], ActorState: ExecuteState, TraceId: 01jqe0f87qfcrr491s451dpzc2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:14:02.661255Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7486786170596941790:2821], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:3:44: Error: Failed to convert 'Value': String to Optional
:3:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:14:02.663479Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZWJkM2U1OTYtNDUyMDgzNC05MWUyYjNjYS1hNTQyYTBmZg==, ActorId: [4:7486786170596941768:2812], ActorState: ExecuteState, TraceId: 01jqe0f8cd7d2d8rq2wf0xbvtw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:14:02.793173Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715722:0, at schemeshard: 72057594046644480 2025-03-28T09:14:03.006174Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7486786170596941915:2845], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:8:29: Error: At function: KiWriteTable!
:8:44: Error: Failed to convert type: Struct<'Key':Int32,'Value':String> to Struct<'Key':Uint64?,'Value':Uint64?>
:8:44: Error: Failed to convert 'Value': String to Optional
:8:44: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:14:03.008276Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDVlNWI3MzgtNjY3ZDk3MWEtYmYzM2Q0ZmMtZDI5NTA4NjQ=, ActorId: [4:7486786170596941819:2830], ActorState: ExecuteState, TraceId: 01jqe0f8j8d9952c2pfnk5vw51, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> KqpQueryService::ExecuteQueryScalar [GOOD] >> KqpQueryService::TableSink_OlapUpdate [GOOD] >> KqpQueryService::TableSink_OlapOrder >> KqpQueryService::TableSink_OltpLiteralUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryScalar [GOOD] Test command err: Trying to start YDB, gRPC: 2147, MsgBus: 12386 2025-03-28T09:13:34.350746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786050226027650:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:34.350858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151f/r3tmp/tmp6WmmqE/pdisk_1.dat 2025-03-28T09:13:35.189292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:35.189393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:35.195875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:35.249667Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2147, node 1 2025-03-28T09:13:35.458729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:35.458761Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:35.458768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:35.458878Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12386 TClient is connected to server localhost:12386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:36.506806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.556998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.673954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.870531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.986190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:39.182849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786071700865914:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.182954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.351124Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786050226027650:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:39.351198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:39.422588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.458874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.526786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.553815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.584602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.619966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.677574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786071700866427:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.677646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.677913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786071700866432:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.682038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:39.691112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786071700866434:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:39.773585Z node 1 :TX_PROXY ERROR: Actor# [1:7486786071700866489:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:41.012723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:50.213029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:50.213070Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 8508, MsgBus: 7540 2025-03-28T09:13:52.367072Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786126177057722:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:52.367124Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151f/r3tmp/tmpJX4RWI/pdisk_1.dat 2025-03-28T09:13:52.611766Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:52.613754Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:52.613829Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:52.615683Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8508, node 2 2025-03-28T09:13:52.705892Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:52.705918Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:52.705940Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:52.706069Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7540 TClient is connected to server localhost:7540 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:53.200813Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.214026Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:53.225816Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.307123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025- ... RN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786139061961350:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.875516Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.928304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.973997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:56.012744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:56.100326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:56.175125Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:56.240151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:56.295134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786143356929168:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:56.295197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:56.295389Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786143356929173:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:56.298615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:56.312700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786143356929175:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:56.413703Z node 2 :TX_PROXY ERROR: Actor# [2:7486786143356929231:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:57.367151Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786126177057722:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:57.367211Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1753, MsgBus: 5376 2025-03-28T09:13:58.656931Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786149587601392:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151f/r3tmp/tmpYpGrVD/pdisk_1.dat 2025-03-28T09:13:58.704475Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:13:58.786210Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:58.850639Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:58.853529Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:58.854882Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1753, node 3 2025-03-28T09:13:58.987207Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:58.987229Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:58.987238Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:58.987353Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5376 TClient is connected to server localhost:5376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:59.560815Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:59.623230Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:59.756705Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:59.961616Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:00.050338Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:02.573366Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786166767472184:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:02.573474Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:02.646016Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:02.718614Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:02.806135Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:02.934975Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:03.020941Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:03.128737Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:03.259970Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786171062440003:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:03.260092Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:03.260568Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786171062440008:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:03.264884Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:03.286089Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786171062440011:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:14:03.344989Z node 3 :TX_PROXY ERROR: Actor# [3:7486786171062440065:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:03.627720Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786149587601392:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:03.627832Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::ReadManyShardsRange [GOOD] >> KqpQueryService::ReadManyRangesAndPoints |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |83.0%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut >> KqpDocumentApi::Scripting [GOOD] >> KqpQueryService::AlterTable_DropNotNull_Valid ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH8 [GOOD] Test command err: Trying to start YDB, gRPC: 62052, MsgBus: 22207 2025-03-28T09:12:10.213963Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785688457541652:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:10.214028Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00170c/r3tmp/tmpPxNJw6/pdisk_1.dat 2025-03-28T09:12:10.805906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:10.826195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:10.826295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:10.827474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62052, node 1 2025-03-28T09:12:11.013943Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:11.013991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:11.013998Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:11.014114Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22207 TClient is connected to server localhost:22207 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:11.929385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:14.343068Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785705637411506:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:14.343205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:14.343542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785705637411518:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:14.347982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:12:14.373738Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:12:14.374228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785705637411520:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:12:14.469544Z node 1 :TX_PROXY ERROR: Actor# [1:7486785705637411571:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:14.871609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:15.241568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:15.241821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:12:15.242167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:12:15.242340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:12:15.242452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:12:15.243977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:15.244050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:12:15.245868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:12:15.246223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:12:15.246382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:12:15.246515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:12:15.246615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:12:15.246635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:12:15.248662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:12:15.248877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486785709932379119:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:12:15.251259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:12:15.251451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:12:15.251759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:12:15.251889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:12:15.251994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:12:15.252142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:12:15.252255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:12:15.252344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:12:15.252471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486785709932379129:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:12:15.300586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785709932379125:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:15.300660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486785709932379125:2358];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abs ... 0714; 2025-03-28T09:13:32.530674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.537768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.538586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.545383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.545731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.552931Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.553077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.561839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.566295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.578564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.582366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.584875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.593477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.608518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.609822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.619550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.619700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.626432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.628190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.633677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.641382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.642965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.649337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.654855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.659039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.660605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.667213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.667441Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.673644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.674510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.683701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.684230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.689531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.689955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.695480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.695688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.701399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.826174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:32.849331Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0de1naaxtz5dcy4mw65t8", SessionId: ydb://session/3?node_id=1&id=NzQ0NzMyYjEtOWYxNDNhOTgtZTcyNTU1MzctZmIwNjg0NTA=, Slow query, duration: 30.074880s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:13:33.113600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:33.113657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:33.113978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486785997695243558:10857];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-28T09:13:33.114389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:54.640265Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0en2jfswvmrw0n31fhfzw", SessionId: ydb://session/3?node_id=1&id=NzQ0NzMyYjEtOWYxNDNhOTgtZTcyNTU1MzctZmIwNjg0NTA=, Slow query, duration: 11.901072s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/\";\n\n-- TPC-H/TPC-R National Market Share Query (Q8)\n-- TPC TPC-H Parameter Substitution (Version 2.17.2 build 0)\n-- using 1680793381 as a seed to the RNG\n\nselect\n o_year,\n sum(case\n when nation = 'MOZAMBIQUE' then volume\n else 0\n end) / sum(volume) as mkt_share\nfrom\n (\n select\n DateTime::GetYear(o_orderdate) as o_year,\n l_extendedprice * (1 - l_discount) as volume,\n n2.n_name as nation\n from\n part\n cross join supplier\n cross join lineitem\n cross join orders\n cross join customer\n cross join nation n1\n cross join nation n2\n cross join region\n where\n p_partkey = l_partkey\n and s_suppkey = l_suppkey\n and l_orderkey = o_orderkey\n and o_custkey = c_custkey\n and c_nationkey = n1.n_nationkey\n and n1.n_regionkey = r_regionkey\n and r_name = 'AFRICA'\n and s_nationkey = n2.n_nationkey\n and o_orderdate between date('1995-01-01') and date('1996-12-31')\n and p_type = 'ECONOMY PLATED COPPER'\n ) as all_nations\ngroup by\n o_year\norder by\n o_year;", parameters: 0b >> KqpQueryService::DdlGroup [GOOD] >> KqpQueryService::DdlPermission >> KqpQueryService::IssuesInCaseOfSuccess [GOOD] >> KqpQueryService::MaterializeTxResults >> KqpQueryService::ShowCreateTable [GOOD] >> KqpQueryService::ShowCreateTableDisable >> SystemView::AuthGroups >> SystemView::ConcurrentScans >> DbCounters::TabletsSimple |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> SystemView::PgTablesOneSchemeShardDataQuery >> KqpQueryService::TableSink_Olap_Replace [GOOD] >> KqpQueryService::TableSink_OlapUpsert >> SystemView::PartitionStatsOneSchemeShard >> SystemView::VSlotsFields >> SystemView::AuthUsers >> KqpService::SessionBusyRetryOperation [GOOD] >> KqpService::RangeCache-UseCache >> KqpQueryService::SeveralCTAS-UseSink [GOOD] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-UseSink [FAIL] Test command err: Trying to start YDB, gRPC: 32746, MsgBus: 63357 2025-03-28T09:12:16.747311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785711985984660:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:16.747433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00193b/r3tmp/tmpaoDf91/pdisk_1.dat 2025-03-28T09:12:17.315235Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:17.323025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:17.323115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:17.328095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32746, node 1 2025-03-28T09:12:17.409862Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:17.409883Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:17.409890Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:17.410014Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63357 TClient is connected to server localhost:63357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:18.299286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:18.351929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:18.630557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:18.922591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:19.062841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:21.471817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785733460822915:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.471964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.750338Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785711985984660:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:21.750408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:21.834511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.876567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.917248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.965953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.995079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:22.074620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:22.197298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785737755790732:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.197420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.199872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785737755790737:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:22.204711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:22.221793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785737755790739:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:22.322796Z node 1 :TX_PROXY ERROR: Actor# [1:7486785737755790797:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:23.289503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:32.303008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:32.303074Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:33.619148Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785785000435495:2883], TxId: 281474976710719, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0ch6jc9fmpqfvckns8cwn. SessionId : ydb://session/3?node_id=1&id=NzNkNTg3YWYtYzI3N2Y0MzctNzliOWJhZTgtNzgwNGVkMWM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:12:33.619663Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486785785000435496:2884], TxId: 281474976710719, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NzNkNTg3YWYtYzI3N2Y0MzctNzliOWJhZTgtNzgwNGVkMWM=. CustomerSuppliedId : . TraceId : 01jqe0ch6jc9fmpqfvckns8cwn. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486785785000435492:2500], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:12:33.620155Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzNkNTg3YWYtYzI3N2Y0MzctNzliOWJhZTgtNzgwNGVkMWM=, ActorId: [1:7486785742050758387:2500], ActorState: ExecuteState, TraceId: 01jqe0ch6jc9fmpqfvckns8cwn, Create QueryResponse for error on request, msg: 2025-03-28T09:12:33.714667Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710721 at tablet 72075186224037925 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037925) | 2025-03-28T09:12:33.716552Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710721 at tablet 72075186224037929 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037929) | 2025-03-28T09:12:33.717470Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710721 at tablet 72075186224037928 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037928) | 2025-03-28T09:12:33.718043Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710721 at tablet 72075186224037924 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037924) | 2025-03-28T09:12:33.718177Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710721 at tablet 72075186224037923 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037923) | 2025-03-28T09:12:33.721541Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710721 at tablet 72075186224037922 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037922) | 2025-03-28T09:12:33.732378Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710721 at tablet 72075186224037920 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037920) | 2025-03-28T09:12:33.732979Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710721 at tablet 72075186224037921 status: OVERLOADED errors: WRONG_SHARD_STATE (Wrong shard state: SplitSrcWaitForNoTxInFlight tablet id: 72075186224037921) | 2025-03-28T09:12:33.734052Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976710721 at tablet 72075186224037919 status: OVERLOADED errors: WRONG_SH ... us: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.513339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.565478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.644788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.722033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.798034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.859537Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:46.898983Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486785819778812926:2227];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:46.899110Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:46.959081Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785841253651515:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.959178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.959497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486785841253651521:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.966601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:46.985218Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486785841253651523:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:12:47.078037Z node 2 :TX_PROXY ERROR: Actor# [2:7486785845548618877:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:48.493095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:57.013918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:12:57.013948Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:58.219888Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486785892793263440:2854], TxId: 281474976715731, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0d92tctvsemykcj5vq4a2. SessionId : ydb://session/3?node_id=2&id=OTE1NGE1ZGUtMjA0NWQxZGUtYTJkMmQ4ZjYtMTIwOWE4OGQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:12:58.220616Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486785892793263441:2855], TxId: 281474976715731, task: 2. Ctx: { TraceId : 01jqe0d92tctvsemykcj5vq4a2. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OTE1NGE1ZGUtMjA0NWQxZGUtYTJkMmQ4ZjYtMTIwOWE4OGQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486785892793263437:2492], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:12:58.221044Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTE1NGE1ZGUtMjA0NWQxZGUtYTJkMmQ4ZjYtMTIwOWE4OGQ=, ActorId: [2:7486785849843586446:2492], ActorState: ExecuteState, TraceId: 01jqe0d92tctvsemykcj5vq4a2, Create QueryResponse for error on request, msg: 2025-03-28T09:12:59.036484Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486785897088231633:2492] TxId: 281474976715738. Ctx: { TraceId: 01jqe0d9s9c1sgf3eqv8v0yqvd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OTE1NGE1ZGUtMjA0NWQxZGUtYTJkMmQ4ZjYtMTIwOWE4OGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting data TxId 281474976715738 because datashard 72075186224037919: is in process of split opId 281474976710661 state SplitSrcWaitForPartitioningChanged (wrong shard state); 2025-03-28T09:12:59.036799Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTE1NGE1ZGUtMjA0NWQxZGUtYTJkMmQ4ZjYtMTIwOWE4OGQ=, ActorId: [2:7486785849843586446:2492], ActorState: ExecuteState, TraceId: 01jqe0d9s9c1sgf3eqv8v0yqvd, Create QueryResponse for error on request, msg: 2025-03-28T09:12:59.056357Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-28T09:12:59.056407Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037933 not found 2025-03-28T09:12:59.056449Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037932 not found 2025-03-28T09:12:59.079944Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037928 not found 2025-03-28T09:12:59.079985Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037927 not found 2025-03-28T09:12:59.080004Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037921 not found 2025-03-28T09:12:59.080020Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037920 not found 2025-03-28T09:12:59.080044Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037931 not found 2025-03-28T09:12:59.081164Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037929 not found 2025-03-28T09:12:59.119517Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037923 not found 2025-03-28T09:13:09.025826Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-28T09:13:09.041933Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037934 not found 2025-03-28T09:13:09.042556Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037919 not found 2025-03-28T09:13:09.047972Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037922 not found 2025-03-28T09:13:09.051212Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037930 not found 2025-03-28T09:13:19.033033Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037941 not found 2025-03-28T09:13:19.034247Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-28T09:13:29.129312Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037940 not found 2025-03-28T09:13:29.139124Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037942 not found 2025-03-28T09:13:29.139163Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037939 not found 2025-03-28T09:13:29.141973Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037943 not found 2025-03-28T09:13:29.144362Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037938 not found 2025-03-28T09:13:59.354035Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037944 not found 2025-03-28T09:13:59.368982Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037945 not found assertion failed at ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2037, virtual void NKikimr::NKqp::NTestSuiteKqpMultishardIndex::TTestCaseDataColumnWrite::Execute_(NUnitTest::TTestContext &) [UseSink = false]: (result.IsSuccess())
: Error: Kikimr cluster or one of its subsystems is overloaded., code: 2006
: Error: [WRONG_SHARD_STATE] Rejecting data TxId 281474976715738 because datashard 72075186224037919: is in process of split opId 281474976710661 state SplitSrcWaitForPartitioningChanged (wrong shard state) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x196C311B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19B885CF 2. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2037: Execute_ @ 0x193073C2 3. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: operator() @ 0x192C1507 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354:1) &> @ 0x192C1507 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354:1) &> @ 0x192C1507 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x192C1507 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x192C1507 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19BBF5F5 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19BBF5F5 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19BBF5F5 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19B8F148 12. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: Execute @ 0x192C06D3 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19B90A15 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19BB9B6C 15. ??:0: ?? @ 0x7F94C1166D8F 16. ??:0: ?? @ 0x7F94C1166E3F 17. ??:0: ?? @ 0x1640E028 >> KqpQueryService::PeriodicTaskInSessionPoolSessionCloseByIdle [GOOD] >> KqpQueryService::ReadDatashardAndColumnshard >> SystemView::StoragePoolsFields |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |83.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 >> KqpQueryService::Tcl [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectOlap >> KqpQueryService::TableSink_OltpOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::SeveralCTAS-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31309, MsgBus: 11214 2025-03-28T09:13:50.313158Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786118726177958:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:50.313210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151a/r3tmp/tmpW17rhG/pdisk_1.dat 2025-03-28T09:13:51.069476Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:51.075267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:51.075387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:51.079209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31309, node 1 2025-03-28T09:13:51.415248Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:51.415270Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:51.415276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:51.415395Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11214 TClient is connected to server localhost:11214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:52.620634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:52.657964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:52.839708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.030342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:53.109823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:13:54.935720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786135906048773:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:54.935855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.273243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.319023Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786118726177958:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:55.319117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:55.329206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.373855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.443240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.481544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.526336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.574993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786140201016582:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.575080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.575457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786140201016587:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:55.580282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:55.596381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786140201016589:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:55.687362Z node 1 :TX_PROXY ERROR: Actor# [1:7486786140201016644:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5044, MsgBus: 9793 2025-03-28T09:13:58.784284Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786151183123914:2149];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151a/r3tmp/tmpWW2RbH/pdisk_1.dat 2025-03-28T09:13:58.994532Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:13:59.039696Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:59.065382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:59.065484Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:59.071760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5044, node 2 2025-03-28T09:13:59.227258Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:59.227280Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:59.227287Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:59.227397Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9793 TClient is connected to server localhost:9793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:00.067428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:00.079107Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:02.972275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786168362993657:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:02.972354Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786168362993646:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:02.972654Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:02.977896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:14:02.992021Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786168362993660:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:14:03.065423Z node 2 :TX_PROXY ERROR: Actor# [2:7486786172657961009:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:03.279313Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-28T09:14:03.284481Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:1, at schemeshard: 72057594046644480 2025-03-28T09:14:03.609217Z node 2 :TX_PROXY ERROR: Actor# [2:7486786172657961296:2508] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:03.622363Z node 2 :TX_PROXY ERROR: Actor# [2:7486786172657961303:2513] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZjFkZjBlZWMtODgzNmU4NWItYjM5ZTY2NTMtNjg1NGM3YzY=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:03.625527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:03.787613Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786151183123914:2149];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:03.787698Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:03.918657Z node 2 :TX_PROXY ERROR: Actor# [2:7486786172657961495:2631] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:03.928144Z node 2 :TX_PROXY ERROR: Actor# [2:7486786172657961502:2636] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ZjFkZjBlZWMtODgzNmU4NWItYjM5ZTY2NTMtNjg1NGM3YzY=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:03.932269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5555, MsgBus: 9042 2025-03-28T09:14:05.622362Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786183235936754:2087];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151a/r3tmp/tmp0EIKKf/pdisk_1.dat 2025-03-28T09:14:05.702569Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:14:05.854847Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:05.878422Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:05.878512Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:05.883972Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5555, node 3 2025-03-28T09:14:05.957485Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:05.957509Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:05.957540Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:05.957671Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9042 TClient is connected to server localhost:9042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:06.547797Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:06.552784Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:14:09.404550Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786200415806541:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:09.404649Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:09.406335Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786200415806553:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:09.412516Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:14:09.431141Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786200415806555:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:14:09.520105Z node 3 :TX_PROXY ERROR: Actor# [3:7486786200415806606:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:09.636780Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-03-28T09:14:09.656141Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2025-03-28T09:14:09.668425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:1, at schemeshard: 72057594046644480 2025-03-28T09:14:10.021212Z node 3 :TX_PROXY ERROR: Actor# [3:7486786204710774190:2505] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:10.023370Z node 3 :TX_PROXY ERROR: Actor# [3:7486786204710774197:2510] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ODcyODYyM2UtNzczMjM3MS02MjIwODNhNS1mOTVkMzc5Nw==\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:10.026443Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:10.205851Z node 3 :TX_PROXY ERROR: Actor# [3:7486786204710774384:2623] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:10.207734Z node 3 :TX_PROXY ERROR: Actor# [3:7486786204710774391:2628] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/ODcyODYyM2UtNzczMjM3MS02MjIwODNhNS1mOTVkMzc5Nw==\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:10.210259Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:10.623010Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786183235936754:2087];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:10.623137Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryService::ExecuteQueryInteractiveTxCommitWithQuery [GOOD] >> KqpQueryService::ExecuteQueryMultiResult >> KqpQueryService::TableSink_OltpLiteralUpsert [GOOD] >> KqpQueryService::TableSink_OltpInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpOrder [GOOD] Test command err: Trying to start YDB, gRPC: 1726, MsgBus: 26129 2025-03-28T09:13:23.263745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785999943442520:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.263890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001538/r3tmp/tmpa6a4rm/pdisk_1.dat 2025-03-28T09:13:24.454324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.714066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.714165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.723234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:24.767810Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1726, node 1 2025-03-28T09:13:24.971333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.971362Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.971374Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.971485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26129 TClient is connected to server localhost:26129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:26.482361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.530974Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:28.266859Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785999943442520:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.266918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:29.036644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786025713246745:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:29.036827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.166157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.550164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786030008214209:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.550252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.550536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786030008214214:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.555244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.572525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786030008214216:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:13:30.679156Z node 1 :TX_PROXY ERROR: Actor# [1:7486786030008214269:2450] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18646, MsgBus: 23942 2025-03-28T09:13:35.999305Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786051893328136:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:36.000148Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001538/r3tmp/tmp01fO2k/pdisk_1.dat 2025-03-28T09:13:36.413798Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:36.423686Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:36.423767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:36.424871Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18646, node 2 2025-03-28T09:13:36.661280Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:36.661306Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:36.661313Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:36.661439Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23942 TClient is connected to server localhost:23942 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:37.248925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:37.259315Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:40.161086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786073368165245:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:40.161186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:40.179426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:40.239187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786073368165346:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:40.239269Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:40.239568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786073368165351:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:40.244114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:13:40.257830Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786073368165353:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:13:40.329356Z node 2 :TX_PROXY ERROR: Actor# [2:7486786073368165404:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: ... 6.427963Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486786187126144029:2342] TxId: 281474976710755. Ctx: { TraceId: 01jqe0fc4qavq7mwq1btmf5er2, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:14:06.429046Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, ActorId: [3:7486786101226794593:2342], ActorState: ExecuteState, TraceId: 01jqe0fc4qavq7mwq1btmf5er2, Create QueryResponse for error on request, msg: 2025-03-28T09:14:06.609762Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=97; 2025-03-28T09:14:06.610067Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786187126144071:2436], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7486786109816729527:2436]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7486786187126144071:2436].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:14:06.610125Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786187126144063:2436], SessionActorId: [3:7486786109816729527:2436], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7486786109816729527:2436]. isRollback=0 2025-03-28T09:14:06.610283Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTliMGM5MTctMTQxOGI2NzQtMjhlZWUzNDEtZTVlNTI3OTQ=, ActorId: [3:7486786109816729527:2436], ActorState: ExecuteState, TraceId: 01jqe0fc9ydbwefda6rwxy3zns, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7486786187126144064:2436] from: [3:7486786187126144063:2436] 2025-03-28T09:14:06.610339Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486786187126144064:2436] TxId: 281474976710756. Ctx: { TraceId: 01jqe0fc9ydbwefda6rwxy3zns, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTliMGM5MTctMTQxOGI2NzQtMjhlZWUzNDEtZTVlNTI3OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:14:06.611354Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTliMGM5MTctMTQxOGI2NzQtMjhlZWUzNDEtZTVlNTI3OTQ=, ActorId: [3:7486786109816729527:2436], ActorState: ExecuteState, TraceId: 01jqe0fc9ydbwefda6rwxy3zns, Create QueryResponse for error on request, msg: 2025-03-28T09:14:06.748440Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=98; 2025-03-28T09:14:06.748613Z node 3 :TX_DATASHARD ERROR: Prepare transaction failed. txid 98 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:14:06.748706Z node 3 :TX_DATASHARD ERROR: Errors while proposing transaction txid 98 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:14:06.748862Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786187126144107:2436], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7486786109816729527:2436]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7486786187126144107:2436].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:14:06.748920Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786187126144097:2436], SessionActorId: [3:7486786109816729527:2436], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7486786109816729527:2436]. isRollback=0 2025-03-28T09:14:06.749082Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTliMGM5MTctMTQxOGI2NzQtMjhlZWUzNDEtZTVlNTI3OTQ=, ActorId: [3:7486786109816729527:2436], ActorState: ExecuteState, TraceId: 01jqe0fcetfpp213c1rxwrrvyq, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7486786187126144098:2436] from: [3:7486786187126144097:2436] 2025-03-28T09:14:06.749142Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486786187126144098:2436] TxId: 281474976710757. Ctx: { TraceId: 01jqe0fcetfpp213c1rxwrrvyq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTliMGM5MTctMTQxOGI2NzQtMjhlZWUzNDEtZTVlNTI3OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:14:06.750151Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MTliMGM5MTctMTQxOGI2NzQtMjhlZWUzNDEtZTVlNTI3OTQ=, ActorId: [3:7486786109816729527:2436], ActorState: ExecuteState, TraceId: 01jqe0fcetfpp213c1rxwrrvyq, Create QueryResponse for error on request, msg: 2025-03-28T09:14:06.910604Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=99; 2025-03-28T09:14:06.910892Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786187126144143:2342], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7486786101226794593:2342]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7486786187126144143:2342].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:14:06.911001Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786187126144135:2342], SessionActorId: [3:7486786101226794593:2342], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7486786101226794593:2342]. isRollback=0 2025-03-28T09:14:06.911249Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, ActorId: [3:7486786101226794593:2342], ActorState: ExecuteState, TraceId: 01jqe0fck57x7y0wp334zbz8zb, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7486786187126144136:2342] from: [3:7486786187126144135:2342] 2025-03-28T09:14:06.911318Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486786187126144136:2342] TxId: 281474976710758. Ctx: { TraceId: 01jqe0fck57x7y0wp334zbz8zb, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:14:06.912637Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, ActorId: [3:7486786101226794593:2342], ActorState: ExecuteState, TraceId: 01jqe0fck57x7y0wp334zbz8zb, Create QueryResponse for error on request, msg: 2025-03-28T09:14:07.095145Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=100; 2025-03-28T09:14:07.095480Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786191421111478:2342], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7486786101226794593:2342]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7486786191421111478:2342].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:14:07.095546Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786191421111468:2342], SessionActorId: [3:7486786101226794593:2342], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7486786101226794593:2342]. isRollback=0 2025-03-28T09:14:07.095730Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, ActorId: [3:7486786101226794593:2342], ActorState: ExecuteState, TraceId: 01jqe0fcs85r9xd9k9gfgd3dv1, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7486786191421111469:2342] from: [3:7486786191421111468:2342] 2025-03-28T09:14:07.095795Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486786191421111469:2342] TxId: 281474976710759. Ctx: { TraceId: 01jqe0fcs85r9xd9k9gfgd3dv1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:14:07.097413Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, ActorId: [3:7486786101226794593:2342], ActorState: ExecuteState, TraceId: 01jqe0fcs85r9xd9k9gfgd3dv1, Create QueryResponse for error on request, msg: 2025-03-28T09:14:07.470112Z node 3 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=101; 2025-03-28T09:14:07.470588Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786191421111512:2342], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [3:7486786101226794593:2342]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[3:7486786191421111512:2342].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:14:07.470660Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786191421111504:2342], SessionActorId: [3:7486786101226794593:2342], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[3:7486786101226794593:2342]. isRollback=0 2025-03-28T09:14:07.470874Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, ActorId: [3:7486786101226794593:2342], ActorState: ExecuteState, TraceId: 01jqe0fczt1pk30s5z2jwdk12b, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7486786191421111505:2342] from: [3:7486786191421111504:2342] 2025-03-28T09:14:07.470943Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486786191421111505:2342] TxId: 281474976710760. Ctx: { TraceId: 01jqe0fczt1pk30s5z2jwdk12b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:14:07.472593Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjI5YjIxNjgtODc0YTg0OWUtNWUyZTJiZDUtMjJkNGEwNjk=, ActorId: [3:7486786101226794593:2342], ActorState: ExecuteState, TraceId: 01jqe0fczt1pk30s5z2jwdk12b, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpQueryService::ReadManyRangesAndPoints [GOOD] >> KqpQueryService::TableSink_Htap+withOltpSink [GOOD] >> KqpQueryService::TableSink_Htap-withOltpSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadManyRangesAndPoints [GOOD] Test command err: Trying to start YDB, gRPC: 12843, MsgBus: 13530 2025-03-28T09:13:53.312415Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786128098089807:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:53.312457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001513/r3tmp/tmpVepm7H/pdisk_1.dat 2025-03-28T09:13:54.089896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:54.089988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:54.091067Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:54.134506Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12843, node 1 2025-03-28T09:13:54.391726Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:54.391765Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:54.391773Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:54.393191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13530 TClient is connected to server localhost:13530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:55.529798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:58.315691Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786128098089807:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:58.315768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:58.509314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786149572926890:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.509447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.937577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:59.307904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786153867894742:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:59.307988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:59.308329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786153867894747:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:59.312396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:13:59.338979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786153867894749:2383], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:13:59.405277Z node 1 :TX_PROXY ERROR: Actor# [1:7486786153867894804:2703] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30232, MsgBus: 31083 2025-03-28T09:14:02.191529Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786168686952182:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:02.192038Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001513/r3tmp/tmp4foDsd/pdisk_1.dat 2025-03-28T09:14:02.540558Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:02.543568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:02.543647Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:02.549318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30232, node 2 2025-03-28T09:14:02.737398Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:02.737417Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:02.737439Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:02.737549Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31083 TClient is connected to server localhost:31083 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:03.568082Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:03.579368Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:14:06.566868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786185866822030:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:06.566989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:06.594023Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:06.725600Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786185866822253:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:06.725711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:06.725954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786185866822258:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:06.729836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:14:06.740503Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786185866822260:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:14:06.806760Z node 2 :TX_PROXY ERROR: Actor# [2:7486786185866822311:2473] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:07.195407Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786168686952182:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:07.195467Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16445, MsgBus: 5206 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001513/r3tmp/tmp0FB6Jc/pdisk_1.dat 2025-03-28T09:14:08.641137Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:14:08.799153Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:08.799247Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:08.802903Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:08.814123Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16445, node 3 2025-03-28T09:14:08.987474Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:08.987500Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:08.987511Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:08.987622Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5206 TClient is connected to server localhost:5206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:09.919803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:09.931227Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:13.730762Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786216975189377:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:13.730884Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:13.763624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:14.143528Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786221270157118:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:14.143623Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:14.143940Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786221270157123:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:14.148396Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:14:14.183053Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:14:14.187056Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786221270157125:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:14:14.290799Z node 3 :TX_PROXY ERROR: Actor# [3:7486786221270157185:2636] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryService::AlterTable_DropNotNull_Valid [GOOD] >> KqpQueryService::AlterCdcTopic |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> KqpQueryServiceScripts::ExecuteScriptWithForgetAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl >> KqpQueryService::ShowCreateTableDisable [GOOD] >> KqpQueryService::ShowCreateSysView >> KqpService::SwitchCache+UseCache [GOOD] >> KqpQueryService::MaterializeTxResults [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup >> KqpQueryService::DdlPermission [GOOD] >> KqpQueryService::DdlSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::SwitchCache+UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 11157, MsgBus: 13974 2025-03-28T09:13:23.252130Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786001584821224:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.258968Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001594/r3tmp/tmpeAN4Pj/pdisk_1.dat 2025-03-28T09:13:24.160215Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.262831Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.269548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.294351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11157, node 1 2025-03-28T09:13:24.467137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.467161Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.467168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.467285Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13974 TClient is connected to server localhost:13974 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.578248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.602020Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.637011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:25.820957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.445111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.629027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.262796Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786001584821224:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.262934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:28.885023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786023059659474:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.885162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.150430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.190746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.238879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.277972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.343791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.438620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.512795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031649594592:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.512863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.513199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031649594597:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.517943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.531866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786031649594599:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.594380Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031649594652:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29004, MsgBus: 15726 2025-03-28T09:13:35.615415Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786053299289133:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:35.615468Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001594/r3tmp/tmp5B1Oo4/pdisk_1.dat 2025-03-28T09:13:35.838311Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:35.871681Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:35.871779Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:35.872918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29004, node 2 2025-03-28T09:13:35.902613Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:35.902641Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:35.902648Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:35.902787Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15726 TClient is connected to server localhost:15726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:36.705037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.716085Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:36.730009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:36.859138Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:13:37.082984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... DljMmYtM2JmNGZkNjA=, ActorId: [2:7486786079069095739:2551], ActorState: ExecuteState, TraceId: 01jqe0ekv69tfha6jt4e1ymv7r, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2025-03-28T09:13:41.622992Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDFkMzA4ZDUtNmVhMGRiNWYtYTY5NmMzZDAtNTE4M2Q3ZTQ=, ActorId: [2:7486786079069095788:2564], ActorState: ExecuteState, TraceId: 01jqe0ekzpdh1459j7d9jca7kn, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-03-28T09:13:41.623069Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDFkMzA4ZDUtNmVhMGRiNWYtYTY5NmMzZDAtNTE4M2Q3ZTQ=, ActorId: [2:7486786079069095788:2564], ActorState: ExecuteState, TraceId: 01jqe0ekzpdh1459j7d9jca7kn, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-03-28T09:13:41.623103Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDFkMzA4ZDUtNmVhMGRiNWYtYTY5NmMzZDAtNTE4M2Q3ZTQ=, ActorId: [2:7486786079069095788:2564], ActorState: ExecuteState, TraceId: 01jqe0ekzpdh1459j7d9jca7kn, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-03-28T09:13:41.638032Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDFkMzA4ZDUtNmVhMGRiNWYtYTY5NmMzZDAtNTE4M2Q3ZTQ=, ActorId: [2:7486786079069095788:2564], ActorState: ExecuteState, TraceId: 01jqe0ekzpdh1459j7d9jca7kn, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-03-28T09:13:41.777675Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTg1MmNjZC03NjE0Yjc4YS01ZDk1OWU3Zi1lZmNlZDk2ZQ==, ActorId: [2:7486786079069095838:2577], ActorState: ExecuteState, TraceId: 01jqe0em4gexqq0kbwa8f64bdz, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-03-28T09:13:41.778299Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTg1MmNjZC03NjE0Yjc4YS01ZDk1OWU3Zi1lZmNlZDk2ZQ==, ActorId: [2:7486786079069095838:2577], ActorState: ExecuteState, TraceId: 01jqe0em4gexqq0kbwa8f64bdz, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-03-28T09:13:41.779067Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTg1MmNjZC03NjE0Yjc4YS01ZDk1OWU3Zi1lZmNlZDk2ZQ==, ActorId: [2:7486786079069095838:2577], ActorState: ExecuteState, TraceId: 01jqe0em4gexqq0kbwa8f64bdz, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-03-28T09:13:41.967055Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjY0MGRkZmMtYTg5OWY3MTUtZGViNzM4ZjQtNGMyNTI5ZDI=, ActorId: [2:7486786079069095883:2587], ActorState: ExecuteState, TraceId: 01jqe0emad8h1ms5xbmp3kkgx9, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-03-28T09:13:41.967676Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MjY0MGRkZmMtYTg5OWY3MTUtZGViNzM4ZjQtNGMyNTI5ZDI=, ActorId: [2:7486786079069095883:2587], ActorState: ExecuteState, TraceId: 01jqe0emad8h1ms5xbmp3kkgx9, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-03-28T09:13:42.323891Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzFjZTk4YzUtNzI0YzE2OTQtNjM4ZTI0MTgtMjYyMDk5ZjQ=, ActorId: [2:7486786083364063217:2596], ActorState: ExecuteState, TraceId: 01jqe0emnc6sf9vwq8jgfzbvef, Reply query error, msg: Pending previous query completion proxyRequestId: 67 Trying to start YDB, gRPC: 26705, MsgBus: 26640 2025-03-28T09:13:43.717774Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786085386447184:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:43.718583Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001594/r3tmp/tmpCFuLtu/pdisk_1.dat 2025-03-28T09:13:43.817616Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:43.850525Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:43.850608Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:43.851936Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26705, node 3 2025-03-28T09:13:43.926894Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:43.926917Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:43.926926Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:43.927077Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26640 TClient is connected to server localhost:26640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:44.419642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:44.436322Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:44.552786Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:44.897488Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:45.004898Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.047181Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786106861285274:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:48.047283Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:48.100693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.184102Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.219934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.272311Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.323723Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.374138Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.449723Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786106861285788:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:48.449822Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:48.450216Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786106861285793:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:48.453607Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:48.474499Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786106861285795:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:48.572673Z node 3 :TX_PROXY ERROR: Actor# [3:7486786106861285851:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:48.679018Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786085386447184:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:48.679081Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:49.848049Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.795087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:58.795113Z node 3 :IMPORT WARN: Table profiles were not loaded took: 29.357680s took: 29.390340s took: 29.393259s took: 29.371779s took: 29.400427s took: 29.416429s took: 29.420369s took: 29.422935s took: 29.425189s took: 29.488923s >> KqpQueryService::ExecuteQueryMultiResult [GOOD] >> SystemView::StoragePoolsFields [GOOD] >> SystemView::StoragePoolsRanges >> SystemView::VSlotsFields [GOOD] >> SystemView::TopPartitionsByCpuFields >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] >> KqpQueryServiceScripts::TestTruncatedByRows [GOOD] >> KqpQueryServiceScripts::TestTruncatedBySize >> SystemView::PgTablesOneSchemeShardDataQuery [GOOD] >> SystemView::ShowCreateTable >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ExecuteQueryMultiResult [GOOD] Test command err: Trying to start YDB, gRPC: 64426, MsgBus: 3080 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00150b/r3tmp/tmpNnQV6o/pdisk_1.dat 2025-03-28T09:13:54.964572Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:55.246204Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:55.250103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:55.254040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:55.257648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64426, node 1 2025-03-28T09:13:55.659354Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:55.659378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:55.659384Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:55.659487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3080 TClient is connected to server localhost:3080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:57.024769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.067424Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:57.088881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.373956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.660966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.769206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:00.423915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786158101527415:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:00.424035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:00.766847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.815198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.853601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.924344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.962213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:01.051045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:01.148243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786162396495232:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:01.148326Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:01.148575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786162396495237:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:01.153099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:01.172146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786162396495239:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:01.238413Z node 1 :TX_PROXY ERROR: Actor# [1:7486786162396495293:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5026, MsgBus: 5105 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00150b/r3tmp/tmpkNTwC9/pdisk_1.dat 2025-03-28T09:14:05.044822Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:14:05.072749Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:05.079414Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:05.079514Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:05.087986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5026, node 2 2025-03-28T09:14:05.283027Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:05.283048Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:05.283055Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:05.283179Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5105 TClient is connected to server localhost:5105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:06.028110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:06.051846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:06.131683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:06.366199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:06.486370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:09.885120Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786198837317351:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:09.885210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:10.012581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:10.059708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:10.099649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:10.144622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:10.230253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:10.314007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:10.407310Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786203132285168:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:10.407421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:10.407688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786203132285173:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:10.412133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:10.426889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786203132285175:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:10.512642Z node 2 :TX_PROXY ERROR: Actor# [2:7486786203132285230:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 32728, MsgBus: 5502 2025-03-28T09:14:14.138646Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786218640859902:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:14.138752Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00150b/r3tmp/tmpOrKwgJ/pdisk_1.dat 2025-03-28T09:14:14.304867Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:14.321827Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:14.322272Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:14.330449Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32728, node 3 2025-03-28T09:14:14.482991Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:14.483024Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:14.483035Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:14.483180Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5502 TClient is connected to server localhost:5502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:15.150554Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.167285Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.255437Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.561459Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.654023Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:18.510284Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786235820730777:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:18.510398Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:18.565060Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:18.634163Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:18.716425Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:18.802723Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:18.847523Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:18.959720Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:19.082179Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786240115698591:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:19.082300Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:19.082868Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786240115698596:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:19.093853Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:19.129249Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786240115698598:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:19.139014Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786218640859902:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:19.139091Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:19.215760Z node 3 :TX_PROXY ERROR: Actor# [3:7486786240115698657:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Yq_1::Basic_TaggedLiteral [GOOD] >> SystemView::ConcurrentScans [GOOD] >> SystemView::Describe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReadDatashardAndColumnshard [GOOD] Test command err: Trying to start YDB, gRPC: 23819, MsgBus: 16316 2025-03-28T09:13:32.782414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786038657891793:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:32.787803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001523/r3tmp/tmpKLu6Xr/pdisk_1.dat 2025-03-28T09:13:33.251044Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:33.257292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:33.257403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:33.260341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23819, node 1 2025-03-28T09:13:33.349454Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:33.349479Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:33.349486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:33.349635Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16316 TClient is connected to server localhost:16316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:34.457926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:34.481186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:34.753286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:34.986771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.126610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:37.783213Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786038657891793:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:37.783333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:37.805971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786060132730047:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:37.806095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.112264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.156835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.200528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.260706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.297387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.337499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.436625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786064427697860:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.436700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.436878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786064427697865:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.440807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:38.453924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786064427697867:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:38.537445Z node 1 :TX_PROXY ERROR: Actor# [1:7486786064427697923:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:48.238801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:48.238836Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 16055, MsgBus: 28131 2025-03-28T09:13:52.033205Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786124909924818:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:52.033250Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001523/r3tmp/tmpq0z0Nt/pdisk_1.dat 2025-03-28T09:13:52.476144Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:52.497900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:52.497983Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:52.499489Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16055, node 2 2025-03-28T09:13:52.723363Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:52.723396Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:52.723404Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:52.723532Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28131 TClient is connected to server localhost:28131 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:53.614356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.637495Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:53.658792Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.755119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.990225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 7205759404664448 ... ma::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:14:19.074989Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:14:19.075135Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:14:19.075182Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:14:19.075392Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:14:19.075427Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:14:19.075518Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:14:19.075553Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:14:19.075621Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:14:19.075679Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:14:19.075727Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:14:19.075756Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:14:19.076364Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:14:19.076417Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:14:19.076663Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:14:19.076701Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:14:19.076878Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:14:19.076914Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:14:19.077124Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:14:19.077157Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:14:19.077288Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:14:19.077314Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:14:19.092649Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:14:19.092747Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:14:19.092875Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:14:19.092909Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:14:19.093092Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:14:19.093132Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:14:19.093260Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:14:19.093309Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:14:19.093392Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:14:19.093422Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:14:19.093464Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:14:19.093494Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:14:19.094186Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:14:19.094226Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:14:19.094468Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:14:19.094502Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:14:19.094676Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:14:19.099279Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:14:19.099567Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:14:19.099606Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:14:19.099776Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:14:19.099807Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:14:19.143518Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:19.144059Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:19.154177Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:19.155770Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:19.162090Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:19.169172Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:19.171998Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:19.181969Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:19.183655Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:19.189699Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:19.434901Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; >> KqpQueryService::TableSink_OlapUpsert [GOOD] >> KqpQueryService::TableSink_OltpDelete >> KqpQueryService::TableSink_OltpInsert [GOOD] >> KqpQueryService::TableSink_OltpInteractive >> KqpService::ToDictCache+UseCache [GOOD] >> KqpService::ToDictCache-UseCache >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtlAndForgetAfter [GOOD] Test command err: Trying to start YDB, gRPC: 14946, MsgBus: 17579 2025-03-28T09:13:23.181935Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786001094310747:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.182000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001528/r3tmp/tmp1j9q56/pdisk_1.dat 2025-03-28T09:13:24.363953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.669356Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.715793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.715887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.738619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14946, node 1 2025-03-28T09:13:24.927378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.927405Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.927420Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.927557Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17579 TClient is connected to server localhost:17579 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:26.672274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.715346Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:26.726623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.051555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.335749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:27.463964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.174367Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786001094310747:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.174449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:29.488254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786026864116259:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:29.488363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.155063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.251144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.300280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.345766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.389614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.453612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.543675Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031159084079:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.543782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.550180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031159084084:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.557145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.578060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786031159084086:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.642861Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031159084142:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19553, MsgBus: 15916 2025-03-28T09:13:34.707999Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786048181463718:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:34.708927Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001528/r3tmp/tmpI691jI/pdisk_1.dat 2025-03-28T09:13:35.015195Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:35.065034Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:35.065114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:35.070117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19553, node 2 2025-03-28T09:13:35.255294Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:35.255318Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:35.255326Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:35.255464Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15916 TClient is connected to server localhost:15916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:36.080160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.105479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.249220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.501842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId ... 44480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:03.601548Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:03.604385Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:03.606350Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:06.825139Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153246807, txId: 281474976710697] shutting down 2025-03-28T09:14:07.192390Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153247220, txId: 281474976710700] shutting down 2025-03-28T09:14:07.620123Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153247647, txId: 281474976710703] shutting down 2025-03-28T09:14:07.664502Z node 4 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: ac4638ae-760f077f-e90ba95d-5e76d954, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=4&id=NDNkZmM4YzMtZDExZWE5ZTEtYmI3ZjYzYWYtNThiYTU1MTM=, TxId: Trying to start YDB, gRPC: 15160, MsgBus: 6998 2025-03-28T09:14:09.052262Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486786196529776212:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:09.052312Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001528/r3tmp/tmpbW5E2j/pdisk_1.dat 2025-03-28T09:14:09.340040Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:09.340162Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:09.344799Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:09.369671Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15160, node 5 2025-03-28T09:14:09.615372Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:09.615403Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:09.615413Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:09.615568Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6998 TClient is connected to server localhost:6998 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:10.592107Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:10.622161Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:10.743457Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:14:10.970981Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:11.084743Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:14:14.058760Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486786196529776212:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:14.089189Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:15.375743Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486786222299581636:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:15.375961Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:15.459517Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.516148Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.571965Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.624673Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.701680Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.744073Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.806314Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486786222299582154:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:15.806426Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:15.811538Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486786222299582159:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:15.815859Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:15.828665Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486786222299582161:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:15.928611Z node 5 :TX_PROXY ERROR: Actor# [5:7486786222299582225:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:17.530207Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:17.534828Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:17.536684Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:20.123202Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: a70ca312-81930552-7a67011d-78761c99, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=NzM1YmM2ODEtZDY0Mzk4OGMtMjIzYmI5MDctYmE0NDg0NmU=, TxId: 2025-03-28T09:14:21.505540Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: a70ca312-81930552-7a67011d-78761c99, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=NjM5YmNlYjktN2NjZWVmMTQtZTc4MDhiMDMtZTU2NTFjYmE=, TxId: 2025-03-28T09:14:21.718944Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: a70ca312-81930552-7a67011d-78761c99, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-03-28T09:14:21.890503Z node 5 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: a70ca312-81930552-7a67011d-78761c99, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=5&id=ZGU5MDY3ZWMtNGMxNzg4ZDgtNzYzNmM5M2ItYTBkYTgzMTE=, TxId: 2025-03-28T09:14:21.890655Z node 5 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: a70ca312-81930552-7a67011d-78761c99, check lease failed 2025-03-28T09:14:23.159820Z node 5 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: a70ca312-81930552-7a67011d-78761c99, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=5&id=OTBkYTFlMy0xZDE5MTM4Zi1jNmM2NmQyOS0zNTg3ODI1Nw==, TxId: |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2025-03-28T09:13:26.631845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786013139624872:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:26.631901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0328 09:13:27.616483407 205578 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:13:27.618895790 205578 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:13:27.826967Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2516: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2516 } ] 2025-03-28T09:13:28.152786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:28.846744Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2516: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2516 } ] 2025-03-28T09:13:28.899790Z node 1 :YQL_NODES_MANAGER ERROR: ydb/core/fq/libs/actors/nodes_manager.cpp:322: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2516: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2516 2025-03-28T09:13:29.153993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:30.182947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:30.614097Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2516: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2516 } ] 2025-03-28T09:13:31.186593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:31.521319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:13:31.577753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486786034614461563:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:13:31.596304Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786013139624872:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:31.596360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:31.658213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486786034614461563:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000d0e/r3tmp/tmptF8ZD4/pdisk_1.dat 2025-03-28T09:13:31.857221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486786034614461563:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:13:32.314709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:32.314778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486786034614461563:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:13:32.562963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; E0328 09:13:32.571832915 205702 dns_resolver.cc:162] no server name supplied in dns URI E0328 09:13:32.572026187 205702 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2025-03-28T09:13:32.615554Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:32.827736Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2516: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:2516 } ] 2025-03-28T09:13:32.847067Z node 1 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:2516: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:2516 2025-03-28T09:13:32.851265Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7486786034614461563:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:13:33.137356Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:33.225122Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.108741s 2025-03-28T09:13:33.225268Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.108875s TServer::EnableGrpc on GrpcPort 2516, node 1 2025-03-28T09:13:34.033748Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:34.034718Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:34.034735Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:34.034888Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13775 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:35.024757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:36.127692Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Successfully created directory "Root/yq" 2025-03-28T09:13:36.127720Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Reply for create directory "Root/yq": 2025-03-28T09:13:36.184416Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/nodes". Create session OK 2025-03-28T09:13:36.184441Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:13:36.184449Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/nodes" 2025-03-28T09:13:36.191737Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/jobs". Create session OK 2025-03-28T09:13:36.191765Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-28T09:13:36.191771Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/jobs" 2025-03-28T09:13:36.211189Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/queries". Create session OK 2025-03-28T09:13:36.211221Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:13:36.211229Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/queries" 2025-03-28T09:13:36.211302Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/quotas". Create session OK 2025-03-28T09:13:36.211311Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:13:36.211315Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/quotas" 2025-03-28T09:13:36.212183Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/compute_databases". Create session OK 2025-03-28T09:13:36.212212Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:13:36.212225Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/compute_databases" 2025-03-28T09:13:36.212252Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenants". Create session OK 2025-03-28T09:13:36.212261Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:13:36.212266Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/tenants" 2025-03-28T09:13:36.212976Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/bindings". Create session OK 2025-03-28T09:13:36.212987Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:13:36.212996Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/bindings" 2025-03-28T09:13:36.213736Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/connections". Create session OK 2025-03-28T09:13:36.213748Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:13:36.213753Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Call create table "Root/yq/connections" 2025-03-28T09:13:36.214515Z node 1 :YQ_CONTROL_PLANE_STORAGE DEBUG: Create table "Root/yq/tenant_acks". Create session OK 2025-03-28T09:13:36.214526Z node 1 :YQ_CO ... hed with error: 2025-03-28T09:14:23.070465Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.070561Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.070597Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.070622Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.070723Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.070752Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.070778Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.070853Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.070880Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.070903Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.070988Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071021Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071046Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071085Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071196Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071223Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071250Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071332Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071362Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071386Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071413Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071511Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071537Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071561Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071590Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071752Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071839Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.071957Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072016Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072059Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072085Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072125Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072222Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072247Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072273Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072305Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072379Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072414Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072442Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072512Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072558Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072605Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072658Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072733Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072760Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072784Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072863Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072902Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072929Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.072956Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073033Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073085Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073158Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073227Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073453Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073495Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073527Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073557Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073651Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073678Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073703Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073790Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073818Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073841Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073925Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073950Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.073991Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074096Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074128Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074153Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074240Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074265Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074341Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074368Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074420Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074518Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074544Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074566Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074614Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074954Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.074984Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.075134Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.075159Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.075313Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.075385Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.075777Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.075845Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076071Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076253Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076349Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076389Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076469Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076512Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076537Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076562Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076644Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076696Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076722Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076753Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076860Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076891Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076908Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.076993Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077054Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077080Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077203Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077243Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077273Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077321Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077438Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077483Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077510Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077620Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077651Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077675Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077772Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077845Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077876Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077964Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.077997Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.078015Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.078076Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.078127Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.078161Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.078399Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.078430Z node 4 :FQ_PENDING_FETCHER ERROR: Error with GetTask:
: Error: Client is stopped 2025-03-28T09:14:23.078439Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.078467Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.078521Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: 2025-03-28T09:14:23.078573Z node 4 :FQ_QUOTA_SERVICE ERROR: SyncQuota finished with error: >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] >> KqpQueryService::ShowCreateSysView [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfter [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::PrefixedVectorIndexOrderByCosineDistanceNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 19619, MsgBus: 13913 2025-03-28T09:12:16.958850Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785712879946986:2216];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:16.959058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001934/r3tmp/tmp7Gl6FR/pdisk_1.dat 2025-03-28T09:12:17.525693Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:17.542361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:17.542468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:17.547480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19619, node 1 2025-03-28T09:12:17.758670Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:17.758709Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:17.758740Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:17.758848Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13913 TClient is connected to server localhost:13913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:18.407529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:18.424637Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:18.442434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:18.713965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:12:19.002523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:19.092824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:21.249226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785734354785076:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.249364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.578296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.613890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.684125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.717823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.748233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.791172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:21.877487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785734354785596:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.877630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.877969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785734354785601:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:21.882577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:12:21.909298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785734354785603:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:12:21.948603Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785712879946986:2216];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:21.948644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:21.977729Z node 1 :TX_PROXY ERROR: Actor# [1:7486785734354785659:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:23.253043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:23.598856Z node 1 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 281474976710673 DatabaseName: "/Root" Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { vector_settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 2 } clusters: 2 levels: 1 } } } } 2025-03-28T09:12:23.599767Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:12:23.599840Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7486785742944720751:2517], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:12:23.599958Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715757 2025-03-28T09:12:23.600015Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7486785742944720751:2517], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 0, LockTxStatus: StatusSuccess, LockTxDone: 0, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:12:23.600272Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:12:23.600319Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Locking, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [1:7486785742944720751:2517], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, ... 81474976710673 2025-03-28T09:13:38.617911Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486786055390484294:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:38.618015Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, BuildIndexId: 281474976710673, txId# 281474976715766 2025-03-28T09:13:38.618088Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvAllocateResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486786055390484294:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:38.618331Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:13:38.618372Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486786055390484294:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:38.621251Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 281474976710673, cookie: 281474976710673, txId: 281474976715766, status: StatusAccepted 2025-03-28T09:13:38.621373Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486786055390484294:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976715766 SchemeshardId: 72057594046644480 PathId: 16 2025-03-28T09:13:38.621886Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:13:38.621946Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486786055390484294:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:38.624372Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfoId: 281474976710673 2025-03-28T09:13:38.624437Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976715766, buildInfo: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486786055390484294:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:38.624752Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:13:38.624808Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486786055390484294:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:38.624832Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:13:38.625054Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 281474976710673 2025-03-28T09:13:38.625099Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 281474976710673, Uid: , DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1], TablePathId: [OwnerId: 72057594046644480, LocalPathId: 16], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index, IndexColumn: user, IndexColumn: emb, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [3:7486786055390484294:2516], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976715757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976715758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976715765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976715766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 180, upload bytes: 3540, read rows: 69, read bytes: 2322 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:13:38.626049Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/Root" IndexBuildId: 281474976710673 2025-03-28T09:13:38.626247Z node 3 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 281474976710673 State: STATE_DONE Settings { source_path: "/Root/TestTable" index { name: "index" index_columns: "user" index_columns: "emb" global_vector_kmeans_tree_index { } } max_batch_rows: 50000 max_batch_bytes: 8388608 max_shards_in_flight: 32 max_retries_upload_batch: 50 } Progress: 100 } 2025-03-28T09:13:38.637064Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037927 not found 2025-03-28T09:13:38.637086Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037926 not found 2025-03-28T09:13:38.641145Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037928 not found 2025-03-28T09:13:38.641167Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037929 not found 2025-03-28T09:13:44.282199Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:44.282234Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:47.082854Z node 3 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976710673 >> SystemView::PartitionStatsOneSchemeShard [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_ReplaceFromSelectOlap [GOOD] Test command err: Trying to start YDB, gRPC: 1651, MsgBus: 1719 2025-03-28T09:13:58.556122Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786150742682502:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:58.556174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ff/r3tmp/tmpe0eYRb/pdisk_1.dat 2025-03-28T09:13:59.258535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:59.260709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:59.260795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:59.282210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1651, node 1 2025-03-28T09:13:59.531162Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:59.531183Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:59.531190Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:59.531286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1719 TClient is connected to server localhost:1719 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:00.774190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:00.815129Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:03.501484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786172217519648:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:03.501631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:03.504053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786172217519660:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:03.508566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:14:03.525033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786172217519662:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:14:03.557858Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786150742682502:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:03.557931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:03.604262Z node 1 :TX_PROXY ERROR: Actor# [1:7486786172217519713:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:03.964921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:2, at schemeshard: 72057594046644480 2025-03-28T09:14:04.287644Z node 1 :TX_PROXY ERROR: Actor# [1:7486786176512487251:2474] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:04.300606Z node 1 :TX_PROXY ERROR: Actor# [1:7486786176512487259:2480] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.tmp/sessions/MTVlOTI1MmUtYWVjZTVkZWYtMjcyMWE2NzMtN2FkOTRlNmU=\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:04.322363Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:14:04.335440Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786176512487312:2368], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:14:04.336312Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTVlOTI1MmUtYWVjZTVkZWYtMjcyMWE2NzMtN2FkOTRlNmU=, ActorId: [1:7486786172217519619:2328], ActorState: ExecuteState, TraceId: 01jqe0fa536zpy98wsbwf54nce, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:14:04.409500Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786176512487323:2375], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:17: Error: At function: KiReadTable!
:3:17: Error: Cannot find table 'db.[/Root/test/Temp]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:14:04.410765Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGQ5OTk1YS1kNzYyYTFkYS04MDQzZTVkZS0xNzA1NGYyZg==, ActorId: [1:7486786176512487319:2372], ActorState: ExecuteState, TraceId: 01jqe0fa770ewg7r64vgx8jg85, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 3454, MsgBus: 24627 2025-03-28T09:14:05.300628Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786180287506619:2085];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:05.300766Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ff/r3tmp/tmpYJbsE6/pdisk_1.dat 2025-03-28T09:14:05.565242Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:05.587053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:05.587129Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 3454, node 2 2025-03-28T09:14:05.591718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:05.782496Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:05.782519Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:05.782525Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:05.782633Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24627 TClient is connected to server localhost:24627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:06.568240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:06.577236Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:06.604144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:06.694941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:06.903781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:07.064646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:09.950828Z nod ... OLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-28T09:14:24.382178Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-28T09:14:25.043447Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.043708Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.043726Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.043903Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.044220Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.044767Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.044975Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.051090Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.051527Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.052110Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.052690Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.053181Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.053736Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; 2025-03-28T09:14:25.057461Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=23;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.057568Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=24;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.057618Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=25;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.057680Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=26;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.057727Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=27;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.057773Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=28;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.057821Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=29;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.057873Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=30;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.057930Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=31;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.058028Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=32;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.058076Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=33;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.058135Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=34;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037910;receive=72075186224037915; 2025-03-28T09:14:25.058316Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=36;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058377Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=37;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058425Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=38;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058510Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=39;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058561Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=40;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058622Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=41;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058720Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=42;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058778Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=43;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058822Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=44;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058868Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=45;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058915Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=46;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.058962Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;self_id=[3:7486786245605264437:2492];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037909;local_tx_no=47;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037910;receive=72075186224037903; 2025-03-28T09:14:25.059739Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=281474976715672;tx_id=281474976715672;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715672; >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] >> KqpQueryService::AlterCdcTopic [GOOD] |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ShowCreateSysView [GOOD] Test command err: Trying to start YDB, gRPC: 12810, MsgBus: 26901 2025-03-28T09:14:02.549187Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786167139608601:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:02.549773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014df/r3tmp/tmp90ymAf/pdisk_1.dat 2025-03-28T09:14:03.357107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:03.360718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:03.360818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:03.378213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12810, node 1 2025-03-28T09:14:03.577747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:03.577774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:03.577782Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:03.577913Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26901 TClient is connected to server localhost:26901 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:04.347428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:04.374650Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:04.388899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:04.597978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:04.780415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:04.862978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:07.048042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786188614446873:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.048187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.550886Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786167139608601:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:07.550954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:07.625289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.661901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.694916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.735136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.777007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.865097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.916794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786188614447388:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.916878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.917382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786188614447393:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.921108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:07.935422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786188614447395:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:07.993139Z node 1 :TX_PROXY ERROR: Actor# [1:7486786188614447450:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:09.831862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24999, MsgBus: 9466 2025-03-28T09:14:11.031295Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786204129597530:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:11.032160Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014df/r3tmp/tmp4Sog72/pdisk_1.dat 2025-03-28T09:14:11.307017Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:11.308132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:11.308203Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:11.312762Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24999, node 2 2025-03-28T09:14:11.443460Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:11.443481Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:11.443488Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:11.443610Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9466 TClient is connected to server localhost:9466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:12.101412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:12.109793Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:14:12.126934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:12.293369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28 ... d: [2:7486786229899403587:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:16.967726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:17.021185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786229899403589:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:14:17.089707Z node 2 :TX_PROXY ERROR: Actor# [2:7486786234194370941:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:18.926949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:19.016673Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486786238489338627:2508], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiReadTable!
:2:35: Error: SHOW CREATE statement is not supported 2025-03-28T09:14:19.018414Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mzc2ZGU4MDUtOThmOTVkYjQtODE0MGZjZmEtNjc0ZGI5YTY=, ActorId: [2:7486786238489338523:2494], ActorState: ExecuteState, TraceId: 01jqe0frff07d0swn5x9s2wd75, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 26273, MsgBus: 17076 2025-03-28T09:14:20.620609Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786244077633468:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:20.621224Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014df/r3tmp/tmpgT7U9W/pdisk_1.dat 2025-03-28T09:14:20.727955Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:20.754906Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:20.754991Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:20.757074Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26273, node 3 2025-03-28T09:14:20.799473Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:20.799497Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:20.799506Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:20.799630Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17076 TClient is connected to server localhost:17076 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:21.304373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:21.310769Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:14:21.323915Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:21.409203Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:21.608276Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:21.703082Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:24.747147Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786261257504394:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:24.747223Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:24.812058Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:24.868628Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:24.931697Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:24.985868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.036693Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.123151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.213832Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786265552472211:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:25.213911Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:25.214255Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786265552472216:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:25.218985Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:25.264315Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:14:25.265325Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786265552472218:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:14:25.322067Z node 3 :TX_PROXY ERROR: Actor# [3:7486786265552472273:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:25.625174Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786244077633468:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:25.625237Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:27.274334Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:27.414739Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486786274142407259:2506], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:14:27.416485Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmU3ZjIwMWYtZGU5Y2ExYTctNWNjYWE5ZDYtODJiZThkYzU=, ActorId: [3:7486786274142407153:2492], ActorState: ExecuteState, TraceId: 01jqe0g0mzbv2w26zzk3kfvx2g, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:14:27.474210Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486786274142407272:2509], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/.sys/show_create]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:14:27.475973Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YmU3ZjIwMWYtZGU5Y2ExYTctNWNjYWE5ZDYtODJiZThkYzU=, ActorId: [3:7486786274142407153:2492], ActorState: ExecuteState, TraceId: 01jqe0g0q43v4wzxpbgxzxfsxf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> KqpService::RangeCache-UseCache [GOOD] >> SystemView::StoragePoolsRanges [GOOD] >> SystemView::SystemViewFailOps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ForgetScriptExecution [GOOD] Test command err: Trying to start YDB, gRPC: 11788, MsgBus: 15862 2025-03-28T09:13:41.450242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786077081473231:2259];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:41.450293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151b/r3tmp/tmpNaPts8/pdisk_1.dat 2025-03-28T09:13:42.026763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:42.026896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:42.034345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11788, node 1 2025-03-28T09:13:42.095198Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:42.106150Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:13:42.271203Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:42.271221Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:42.271228Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:42.271330Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15862 TClient is connected to server localhost:15862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:43.307905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.379685Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:43.401459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.666227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.913347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:44.029040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:46.099819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786098556311275:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.099988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.450827Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786077081473231:2259];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:46.452092Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:46.483246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.560701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.612959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.687460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.724517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.799714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:46.948056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786098556311807:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.948121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.948477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786098556311812:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:46.952604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:46.967344Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:13:46.967599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786098556311814:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:47.033356Z node 1 :TX_PROXY ERROR: Actor# [1:7486786102851279164:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:48.157015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.158547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.159919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:13:51.048634Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153231078, txId: 281474976710704] shutting down 2025-03-28T09:13:51.083168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786120031149835:2808], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-28T09:13:51.083236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786120031149833:2806], DatabaseId: /Root, PoolId: another_pool_id, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-28T09:13:51.083301Z node 1 :KQP_WORKLOAD_SERVICE ERROR: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486786120031149834:2807], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=M2RjMzFiODMtZWYxZDEyODQtZWMwNTU4YjgtNmZhNmY1NGU=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-28T09:13:51.083375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolResolverActor] ActorId: [1:7486786120031149834:2807], DatabaseId: /Root, PoolId: another_pool_id, SessionId: ydb://session/3?node_id=1&id=M2RjMzFiODMtZWYxZDEyODQtZWMwNTU4YjgtNmZhNmY1NGU=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-03-28T09:13:51.083419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool another_pool_id, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } 2025-03-28T09:13:51.083498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply continue error NOT_FOUND to [1:7486786120031149830:2805]: {
: Error: Failed to resolve pool id another_pool_id subissue: {
: Error: Resource pool another_pool_id not found or you don't have access permissions } } 2025-03-28T09:13:51.083609Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2RjMzFiODMtZWYxZDEyODQtZWMwNTU4YjgtNmZhNmY1NGU=, ActorId: [1:7486786120031149830:2805], ActorState: ExecuteState, TraceId: 01jqe0ex6pbq4hfgv7acqprf2v, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool 2025-03-28T09:13:51.083809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7486786120031149830:2805]: Pool another_pool_id not found Trying to start YDB, gRPC: 27560, MsgBus: 22385 2025-03-28T09:13 ... nsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:56.879624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.000350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786142401828765:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.000495Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.000889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786146696796066:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.005006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:57.025924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786146696796068:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:57.083483Z node 2 :TX_PROXY ERROR: Actor# [2:7486786146696796123:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:57.142779Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786125221957262:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:57.142847Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:58.525180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.526953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.528606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5445, MsgBus: 15488 2025-03-28T09:14:02.108384Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786166427674305:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:02.108434Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151b/r3tmp/tmpZDXiJj/pdisk_1.dat 2025-03-28T09:14:02.433783Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:02.483588Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:02.483688Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:02.488156Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5445, node 3 2025-03-28T09:14:02.702100Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:02.702127Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:02.702136Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:02.702266Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15488 TClient is connected to server localhost:15488 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:14:03.449971Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:14:03.488958Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:03.631139Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:03.882348Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:03.986769Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:07.110793Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786166427674305:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:07.110868Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:07.226837Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786187902512561:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.226957Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.340945Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.469412Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.557670Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.657907Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.750700Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.844227Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.917844Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786187902513087:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.917956Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.918942Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786187902513092:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.923764Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:07.937779Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786187902513094:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:08.023825Z node 3 :TX_PROXY ERROR: Actor# [3:7486786192197480445:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:09.501354Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:09.507489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:09.509034Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:17.426628Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:14:17.426662Z node 3 :IMPORT WARN: Table profiles were not loaded |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> KqpQueryService::CloseSessionsWithLoad [GOOD] >> KqpQueryService::ClosedSessionRemovedFromPool |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |83.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::AlterCdcTopic [GOOD] Test command err: Trying to start YDB, gRPC: 27680, MsgBus: 3051 2025-03-28T09:13:51.933432Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786122131658757:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:51.940658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001515/r3tmp/tmptpo6hX/pdisk_1.dat 2025-03-28T09:13:52.656423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:52.656528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:52.660284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:52.734236Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27680, node 1 2025-03-28T09:13:53.071011Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:53.071031Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:53.071055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:53.071190Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3051 TClient is connected to server localhost:3051 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:54.424438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.455084Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:54.467223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.660094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.872612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.990637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:56.932976Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786122131658757:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:56.933067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:57.776088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786147901464263:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.776226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.199821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.279193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.404043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.503858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.589106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.666252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.778344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786152196432094:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.778433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.779629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786152196432099:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.783812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:58.804277Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:13:58.804543Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786152196432101:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:58.886433Z node 1 :TX_PROXY ERROR: Actor# [1:7486786152196432156:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:00.375755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.542878Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786160786367121:2506], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 2025-03-28T09:14:00.543122Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTNiMWViMi1kMDcyNmVjMi0zYmEyYjNlMi1iMGE1OTk3NQ==, ActorId: [1:7486786160786367032:2495], ActorState: ExecuteState, TraceId: 01jqe0f6ck8b0je1szfa43rvmr, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:3:25: Error: At function: KiWriteTable!
:3:25: Error: Document API table cannot be modified from YQL query: /Root/DocumentApiTest, code: 2008 Trying to start YDB, gRPC: 29700, MsgBus: 11091 2025-03-28T09:14:01.975203Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786164561976020:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:02.043132Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001515/r3tmp/tmpQ9jYgs/pdisk_1.dat 2025-03-28T09:14:02.291210Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:02.328671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:02.328764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:02.335873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29700, node 2 2025-03-28T09:14:02.547156Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:02.547179Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:02.547186Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:02.547295Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11091 TClient is connected to server localhost:11091 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Sche ... in>: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:15.563470Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786222244345261:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:15.567704Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:14:15.589623Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786222244345263:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:14:15.669684Z node 3 :TX_PROXY ERROR: Actor# [3:7486786222244345315:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:15.749634Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-28T09:14:16.312872Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486786226539312764:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:30: Error: At function: KiWriteTable!
:2:65: Error: Failed to convert type: Struct<'id':Int32,'val':Null> to Struct<'id':Int32,'val':Int32>
:2:65: Error: Failed to convert 'val': Null to Int32
:2:65: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:14:16.314594Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZTUyMGU4M2ItYmI0M2Q3ZDktNjhhZTg5ZjgtNmNmZmRlN2Y=, ActorId: [3:7486786226539312762:2359], ActorState: ExecuteState, TraceId: 01jqe0fntjb9bhqsqy0352af1m, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:14:16.369218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4203, MsgBus: 19525 2025-03-28T09:14:18.308815Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486786235033062755:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:18.308874Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001515/r3tmp/tmpEusKpC/pdisk_1.dat 2025-03-28T09:14:18.762677Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:18.769157Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:18.769257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:18.784533Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4203, node 4 2025-03-28T09:14:18.984321Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:18.984343Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:18.984351Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:18.984475Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19525 TClient is connected to server localhost:19525 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:20.246994Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:20.269300Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:20.412199Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:20.706454Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:20.810567Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:23.310617Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486786235033062755:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:23.310716Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:25.084511Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786265097835482:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:25.084628Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:25.137827Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.190477Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.239729Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.370587Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.452222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.563797Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.665545Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786265097836011:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:25.665653Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:25.666107Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786265097836016:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:25.676032Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:25.719508Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486786265097836018:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:25.810413Z node 4 :TX_PROXY ERROR: Actor# [4:7486786265097836080:3464] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:28.019084Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:28.396422Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037919:1][4:7486786277982738470:2523] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:18:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-28T09:14:28.656527Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:28.810800Z node 4 :TX_PROXY ERROR: Actor# [4:7486786277982738669:3949] txid# 281474976710674, issues: { message: "Cannot change partition count. Use split/merge instead" severity: 1 } 2025-03-28T09:14:28.811297Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZWFkODFhM2YtYTc4MWU1ZGYtOTE3ODkxNGEtNjVkYjYyZjY=, ActorId: [4:7486786277982738572:2538], ActorState: ExecuteState, TraceId: 01jqe0g200703ghqctrhn94e0c, Create QueryResponse for error on request, msg: Query failed, status: BAD_REQUEST:
: Error: Cannot change partition count. Use split/merge instead, code: 2017 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 27311, MsgBus: 25094 2025-03-28T09:13:56.055172Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786142592435798:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:56.056122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001505/r3tmp/tmpUuVI9H/pdisk_1.dat 2025-03-28T09:13:56.580584Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:56.584061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:56.584162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:56.587282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27311, node 1 2025-03-28T09:13:56.711375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:56.711402Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:56.711409Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:56.711526Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25094 TClient is connected to server localhost:25094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:57.411817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.437026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.647963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.843325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.915027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:59.906359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786155477339455:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:59.906533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:00.469105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.511182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.569818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.616176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.648850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.688005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.775117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786159772307262:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:00.775219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:00.775441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786159772307267:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:00.779854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:00.796239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786159772307269:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:00.891078Z node 1 :TX_PROXY ERROR: Actor# [1:7486786159772307327:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:01.051032Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786142592435798:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:01.051117Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:02.546808Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmNDFlODktMzJlNDZiYzQtNTg4MTQwNTQtMmRjNTFjYjU=, ActorId: [1:7486786168362242183:2492], ActorState: ExecuteState, TraceId: 01jqe0f8dfbdfrn05755tqhv2p, Reply query error, msg: Pending previous query completion proxyRequestId: 7 2025-03-28T09:14:02.546885Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmNDFlODktMzJlNDZiYzQtNTg4MTQwNTQtMmRjNTFjYjU=, ActorId: [1:7486786168362242183:2492], ActorState: ExecuteState, TraceId: 01jqe0f8dfbdfrn05755tqhv2p, Reply query error, msg: Pending previous query completion proxyRequestId: 8 2025-03-28T09:14:02.546917Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmNDFlODktMzJlNDZiYzQtNTg4MTQwNTQtMmRjNTFjYjU=, ActorId: [1:7486786168362242183:2492], ActorState: ExecuteState, TraceId: 01jqe0f8dfbdfrn05755tqhv2p, Reply query error, msg: Pending previous query completion proxyRequestId: 9 2025-03-28T09:14:02.546945Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmNDFlODktMzJlNDZiYzQtNTg4MTQwNTQtMmRjNTFjYjU=, ActorId: [1:7486786168362242183:2492], ActorState: ExecuteState, TraceId: 01jqe0f8dfbdfrn05755tqhv2p, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-03-28T09:14:02.546972Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmNDFlODktMzJlNDZiYzQtNTg4MTQwNTQtMmRjNTFjYjU=, ActorId: [1:7486786168362242183:2492], ActorState: ExecuteState, TraceId: 01jqe0f8dfbdfrn05755tqhv2p, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-03-28T09:14:02.547488Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmNDFlODktMzJlNDZiYzQtNTg4MTQwNTQtMmRjNTFjYjU=, ActorId: [1:7486786168362242183:2492], ActorState: ExecuteState, TraceId: 01jqe0f8dfbdfrn05755tqhv2p, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-03-28T09:14:02.547546Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmNDFlODktMzJlNDZiYzQtNTg4MTQwNTQtMmRjNTFjYjU=, ActorId: [1:7486786168362242183:2492], ActorState: ExecuteState, TraceId: 01jqe0f8dfbdfrn05755tqhv2p, Reply query error, msg: Pending previous query completion proxyRequestId: 13 2025-03-28T09:14:02.554672Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmNDFlODktMzJlNDZiYzQtNTg4MTQwNTQtMmRjNTFjYjU=, ActorId: [1:7486786168362242183:2492], ActorState: ExecuteState, TraceId: 01jqe0f8dfbdfrn05755tqhv2p, Reply query error, msg: Pending previous query completion proxyRequestId: 14 2025-03-28T09:14:02.554757Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTJmNDFlODktMzJlNDZiYzQtNTg4MTQwNTQtMmRjNTFjYjU=, ActorId: [1:7486786168362242183:2492], ActorState: ExecuteState, TraceId: 01jqe0f8dfbdfrn05755tqhv2p, Reply query error, msg: Pending previous query completion proxyRequestId: 15 Trying to start YDB, gRPC: 28760, MsgBus: 16539 2025-03-28T09:14:03.616207Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786172775100753:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:03.616269Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001505/r3tmp/tmpCJh8og/pdisk_1.dat 2025-03-28T09:14:03.895621Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:03.909216Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:03.909299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:03.912630Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28760, node 2 2025-03-28T0 ... xMzA4ZjUtNTUxYjJlZjItMjYyMzhiMGMtYWIxNTM0NmI=, ActorId: [2:7486786202839874664:2553], ActorState: ExecuteState, TraceId: 01jqe0fg2e1yke45bdz6rk303x, Reply query error, msg: Pending previous query completion proxyRequestId: 47 2025-03-28T09:14:10.390978Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmYxMzA4ZjUtNTUxYjJlZjItMjYyMzhiMGMtYWIxNTM0NmI=, ActorId: [2:7486786202839874664:2553], ActorState: ExecuteState, TraceId: 01jqe0fg2e1yke45bdz6rk303x, Reply query error, msg: Pending previous query completion proxyRequestId: 48 2025-03-28T09:14:10.391010Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmYxMzA4ZjUtNTUxYjJlZjItMjYyMzhiMGMtYWIxNTM0NmI=, ActorId: [2:7486786202839874664:2553], ActorState: ExecuteState, TraceId: 01jqe0fg2e1yke45bdz6rk303x, Reply query error, msg: Pending previous query completion proxyRequestId: 49 2025-03-28T09:14:10.547555Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTFhNTQ2NjUtYzYyNGRlZjYtZWNkN2RiMDQtYWNlMzQ4MmY=, ActorId: [2:7486786202839874714:2565], ActorState: ExecuteState, TraceId: 01jqe0fg7jdgmbhftdst6hg63t, Reply query error, msg: Pending previous query completion proxyRequestId: 52 2025-03-28T09:14:10.547653Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTFhNTQ2NjUtYzYyNGRlZjYtZWNkN2RiMDQtYWNlMzQ4MmY=, ActorId: [2:7486786202839874714:2565], ActorState: ExecuteState, TraceId: 01jqe0fg7jdgmbhftdst6hg63t, Reply query error, msg: Pending previous query completion proxyRequestId: 53 2025-03-28T09:14:10.551281Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTFhNTQ2NjUtYzYyNGRlZjYtZWNkN2RiMDQtYWNlMzQ4MmY=, ActorId: [2:7486786202839874714:2565], ActorState: ExecuteState, TraceId: 01jqe0fg7jdgmbhftdst6hg63t, Reply query error, msg: Pending previous query completion proxyRequestId: 54 2025-03-28T09:14:10.553955Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTFhNTQ2NjUtYzYyNGRlZjYtZWNkN2RiMDQtYWNlMzQ4MmY=, ActorId: [2:7486786202839874714:2565], ActorState: ExecuteState, TraceId: 01jqe0fg7jdgmbhftdst6hg63t, Reply query error, msg: Pending previous query completion proxyRequestId: 55 2025-03-28T09:14:10.682959Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWQxYTgxNzAtMTFiZTFiMmUtNTFjZTU5ODEtMjA0MjRmYTQ=, ActorId: [2:7486786202839874765:2578], ActorState: ExecuteState, TraceId: 01jqe0fgbt99tntx3gs1sw3exq, Reply query error, msg: Pending previous query completion proxyRequestId: 58 2025-03-28T09:14:10.683026Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWQxYTgxNzAtMTFiZTFiMmUtNTFjZTU5ODEtMjA0MjRmYTQ=, ActorId: [2:7486786202839874765:2578], ActorState: ExecuteState, TraceId: 01jqe0fgbt99tntx3gs1sw3exq, Reply query error, msg: Pending previous query completion proxyRequestId: 59 2025-03-28T09:14:10.683058Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NWQxYTgxNzAtMTFiZTFiMmUtNTFjZTU5ODEtMjA0MjRmYTQ=, ActorId: [2:7486786202839874765:2578], ActorState: ExecuteState, TraceId: 01jqe0fgbt99tntx3gs1sw3exq, Reply query error, msg: Pending previous query completion proxyRequestId: 60 2025-03-28T09:14:10.823312Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDM5YzUxMjAtYzFmNDU1YTEtMzY5NDE1Y2UtNmM3OWM4YTE=, ActorId: [2:7486786202839874793:2588], ActorState: ExecuteState, TraceId: 01jqe0fgg5dx1615887xdn4p5y, Reply query error, msg: Pending previous query completion proxyRequestId: 63 2025-03-28T09:14:10.823869Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDM5YzUxMjAtYzFmNDU1YTEtMzY5NDE1Y2UtNmM3OWM4YTE=, ActorId: [2:7486786202839874793:2588], ActorState: ExecuteState, TraceId: 01jqe0fgg5dx1615887xdn4p5y, Reply query error, msg: Pending previous query completion proxyRequestId: 64 2025-03-28T09:14:10.972329Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzUzMzkxNWMtYmMxNTNmOGUtZjIwM2UzNGEtYzg0YWMxZWU=, ActorId: [2:7486786202839874819:2598], ActorState: ExecuteState, TraceId: 01jqe0fgmv05st4caajy9ja8pt, Reply query error, msg: Pending previous query completion proxyRequestId: 67 Trying to start YDB, gRPC: 29519, MsgBus: 4691 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001505/r3tmp/tmp2bHcb4/pdisk_1.dat 2025-03-28T09:14:12.377253Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:14:12.402944Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:12.404871Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:12.404940Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:12.408730Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29519, node 3 2025-03-28T09:14:12.541787Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:12.541806Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:12.541814Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:12.541930Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4691 TClient is connected to server localhost:4691 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:13.227041Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:13.235350Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:13.249350Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:13.336193Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:13.592088Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:13.698197Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:16.003701Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786224587367852:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:16.003779Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:16.034828Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:16.067025Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:16.099767Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:16.129672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:16.162069Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:16.193473Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:16.280562Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786228882335658:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:16.280629Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:16.280904Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786228882335663:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:16.284464Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:16.294152Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786228882335665:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:16.373794Z node 3 :TX_PROXY ERROR: Actor# [3:7486786228882335720:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:27.370051Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:14:27.370087Z node 3 :IMPORT WARN: Table profiles were not loaded took: 10.808731s took: 10.804487s took: 10.811799s took: 10.830437s took: 10.800210s took: 10.836179s took: 10.824294s took: 10.838785s took: 10.840519s took: 10.809827s >> SystemView::AuthGroups [GOOD] >> SystemView::AuthGroups_Access >> KqpJoinOrder::TPCDS90+ColumnStore [GOOD] >> KqpQueryService::TableSink_OltpInteractive [GOOD] >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] >> SystemView::Describe [GOOD] >> SystemView::DescribeSystemFolder >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] >> SystemView::AuthUsers [GOOD] >> SystemView::AuthUsers_LockUnlock >> KqpQueryService::TableSink_Htap-withOltpSink [GOOD] >> KqpQueryService::TableSink_DisableSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 12209, MsgBus: 19911 2025-03-28T09:14:07.674868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786188783543405:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:07.681763Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014cc/r3tmp/tmp5B8Lvw/pdisk_1.dat 2025-03-28T09:14:08.172476Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:08.174572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:08.174679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:08.176500Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12209, node 1 2025-03-28T09:14:08.307304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:08.307326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:08.307349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:08.307818Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19911 TClient is connected to server localhost:19911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:09.185195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:09.199507Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:12.344680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786210258380396:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:12.344773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:12.674822Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786188783543405:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:12.674886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:12.914757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:13.106821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786214553347799:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:13.106886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:13.107950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786214553347804:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:13.112318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:14:13.129276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786214553347806:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:14:13.204009Z node 1 :TX_PROXY ERROR: Actor# [1:7486786214553347858:2403] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 22087, MsgBus: 8597 2025-03-28T09:14:14.895799Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786218646782167:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:14.895846Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014cc/r3tmp/tmpbY6khh/pdisk_1.dat 2025-03-28T09:14:15.180710Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:15.184237Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:15.184296Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:15.186109Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22087, node 2 2025-03-28T09:14:15.287242Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:15.287261Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:15.287267Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:15.287378Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8597 TClient is connected to server localhost:8597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:14:15.915210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:14:19.031504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786240121619311:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:19.031580Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:19.042430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:19.112227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786240121619413:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:19.112298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:19.112617Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786240121619418:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:19.117520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:14:19.129791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786240121619420:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:14:19.212544Z node 2 :TX_PROXY ERROR: Actor# [2:7486786240121619471:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:19.731684Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-03-28T09:14:19.746459Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:14:19.748736Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:14:19.749017Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486786240121619558:2367], Table: `/Root/DataShard` ([72057594046644480:2:1]), SessionActorId: [2:7486786240121619542:2367]Got CONSTRAINT VIOLATION for table `/Root/DataShard`. ShardID=72075186224037888, Sink=[2:7486786240121619558:2367].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:14:19.749573Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486786240121619551:2367], SessionActorId: [2:7486786240121619542:2367], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7486786240121619542:2367]. isRollback=0 2025-03-28T09:14:19.749841Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWQxMmY1OC04NjViZGY3LWNkZDA1YjhkLTJhNmE4OWJj, ActorId: [2:7486786240121619542:2367], ActorState: ExecuteState, TraceId: 01jqe0fs42fgtr7fpcmyexhxy3, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7486786240121619552:2367] from: [2:7486786240121619551:2367] 2025-03-28T09:14:19.749930Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486786240121619552:2367] TxId: 281474976715663. Ctx: { TraceId: 01jqe0fs42fgtr7fpcmyexhxy3, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZWQxMmY1OC04NjViZGY3LWNkZDA1YjhkLTJhNmE4OWJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/DataShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:14:19.750970Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWQxMmY1OC04NjViZGY3LWNkZDA1YjhkLTJhNmE4OWJj, ActorId: [2:7486786240121619542:2367], ActorState: ExecuteState, TraceId: 01jqe0fs42fgtr7fpcmyexhxy3, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 2025-03-28T09:14:19.902811Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786218646782167:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:19.902891Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 26355, MsgBus: 4702 2025-03-28T09:14:25.804954Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786267671914600:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:25.816598Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014cc/r3tmp/tmpJaXRvp/pdisk_1.dat 2025-03-28T09:14:26.114087Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:26.132240Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:26.132331Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:26.134229Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26355, node 3 2025-03-28T09:14:26.261667Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:26.261691Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:26.261700Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:26.261820Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4702 TClient is connected to server localhost:4702 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:27.067532Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:30.265759Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786289146751720:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:30.265854Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:30.284676Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:30.624076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:14:30.878490Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786267671914600:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:30.882743Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:31.104016Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786293441720440:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:31.104107Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:31.118834Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786293441720445:2447], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:31.118961Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:31.122835Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786293441720450:2450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:31.131419Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:14:31.164412Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786293441720452:2451], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-28T09:14:31.241868Z node 3 :TX_PROXY ERROR: Actor# [3:7486786293441720534:3294] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |83.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::MixedReadQueryWithoutStreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 14763, MsgBus: 65047 2025-03-28T09:13:59.379451Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786157599650771:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:59.407535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f9/r3tmp/tmpcbq6TI/pdisk_1.dat 2025-03-28T09:14:00.154928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:00.155052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:00.159334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:00.189846Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14763, node 1 2025-03-28T09:14:00.293291Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:00.293326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:00.293337Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:00.293479Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65047 TClient is connected to server localhost:65047 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:01.005366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:01.047531Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:01.069195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:01.300973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:01.680328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:01.759726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:04.376610Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786157599650771:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:04.376694Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:04.619677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786179074488889:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:04.619798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:04.941386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:05.016461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:05.053824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:05.100268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:05.176838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:05.219319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:05.276633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786183369456706:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:05.276704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:05.276911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786183369456711:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:05.281510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:05.302866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786183369456713:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:05.410225Z node 1 :TX_PROXY ERROR: Actor# [1:7486786183369456771:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:06.799466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:06.884454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:06.947639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22941, MsgBus: 10737 2025-03-28T09:14:10.845856Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786203003354774:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:10.845907Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f9/r3tmp/tmpC8LVU0/pdisk_1.dat 2025-03-28T09:14:10.983491Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:11.035907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:11.036003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:11.037279Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22941, node 2 2025-03-28T09:14:11.225436Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:11.225469Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:11.225477Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:11.225606Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10737 TClient is connected to server localhost:10737 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:12.060957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:12.071451Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:14:12.089825Z n ... vent=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.066176Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.069823Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.076130Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.076130Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.082707Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.082710Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.089378Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037918;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.089378Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.096064Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.096064Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.102711Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.107000Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.109609Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037952;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.113219Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037915;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.117219Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.119929Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.124777Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.127060Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.131650Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.133384Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.138251Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.139613Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.145062Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.145916Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.151936Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.158791Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037950;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.162945Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.165890Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.172538Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.174617Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.182881Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.185732Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.190095Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.192072Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.196867Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.198181Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.203669Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.204379Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.210354Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.210627Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.218204Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.218204Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.224616Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.227369Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.231741Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.238972Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710661; 2025-03-28T09:14:30.642605Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; 2025-03-28T09:14:30.643121Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; 2025-03-28T09:14:30.643121Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; 2025-03-28T09:14:30.643996Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[3:7486786273946958776:2398];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=13;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037947;receive=72075186224037899; 2025-03-28T09:14:30.644067Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;self_id=[3:7486786273946958776:2398];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037907;local_tx_no=14;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037903,72075186224037947;receive=72075186224037899; 2025-03-28T09:14:30.644646Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=281474976710664;tx_id=281474976710664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710664; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithResultsTtl [GOOD] Test command err: Trying to start YDB, gRPC: 2303, MsgBus: 30934 2025-03-28T09:13:52.486216Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786126202619669:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:52.486621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001514/r3tmp/tmpLbk3X1/pdisk_1.dat 2025-03-28T09:13:53.359561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:53.392949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:53.395153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:53.399947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2303, node 1 2025-03-28T09:13:53.691260Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:53.691287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:53.691302Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:53.691448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30934 TClient is connected to server localhost:30934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:54.759102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.826867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:55.054905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:55.393604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:13:55.558620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.465840Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786126202619669:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:57.465900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:58.310095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786151972425112:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.310202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.740538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.810621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.877115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.925291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.984418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:59.045100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:59.137136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786156267392930:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:59.137262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:59.137568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786156267392935:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:59.141621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:59.167277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786156267392937:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:59.246615Z node 1 :TX_PROXY ERROR: Actor# [1:7486786156267392993:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:00.703969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.705467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.706516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:04.001370Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153244028, txId: 281474976710703] shutting down Trying to start YDB, gRPC: 13950, MsgBus: 30836 2025-03-28T09:14:04.992521Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786179115600172:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:05.090643Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001514/r3tmp/tmprmZDFM/pdisk_1.dat 2025-03-28T09:14:05.292733Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:05.308475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:05.308593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:05.310707Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13950, node 2 2025-03-28T09:14:05.423383Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:05.423417Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:05.423427Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:05.423556Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30836 TClient is connected to server localhost:30836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:06.214898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:06.225215Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09: ... 2025-03-28T09:14:11.685285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:11.689335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:14.778052Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionOperationQueryActor] TraceId: c140ec84-e789651d-2045d578-f38979c0, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=OTJiYjBiNjktOTg3NDVlZjMtN2ZjZWUzMTAtMTcxZDc2Yzk=, TxId: 2025-03-28T09:14:16.676463Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: c140ec84-e789651d-2045d578-f38979c0, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=NWY5ZGVkYWItM2MwNzMzOWQtOWE2Yjg0ZTctM2FlMWY2OGE=, TxId: 2025-03-28T09:14:16.849819Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TForgetScriptExecutionOperationActor] ExecutionId: c140ec84-e789651d-2045d578-f38979c0, reply NOT_FOUND, issues: {
: Error: No such execution } 2025-03-28T09:14:16.880899Z node 2 :KQP_PROXY WARN: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: c140ec84-e789651d-2045d578-f38979c0, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=2&id=ZDg0NzFjMDAtZDRiN2ZmMTUtOTkyMWU2ZTctZDBkMmVhOTk=, TxId: 2025-03-28T09:14:16.881005Z node 2 :KQP_PROXY WARN: [ScriptExecutions] [TCancelScriptExecutionOperationActor] ExecutionId: c140ec84-e789651d-2045d578-f38979c0, check lease failed 2025-03-28T09:14:17.500074Z node 2 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: c140ec84-e789651d-2045d578-f38979c0, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Script execution not found }, SessionId: ydb://session/3?node_id=2&id=MjdkY2UyYjItNjIyNmUyZjQtNTFmYmNhMTYtNzAyZTI5MWQ=, TxId: Trying to start YDB, gRPC: 28901, MsgBus: 10596 2025-03-28T09:14:19.806924Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786241726845743:2078];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001514/r3tmp/tmpsB7fU9/pdisk_1.dat 2025-03-28T09:14:20.016941Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:14:20.185699Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:20.339200Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:20.339295Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:20.343477Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28901, node 3 2025-03-28T09:14:20.549460Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:20.549483Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:20.549491Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:20.549619Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10596 TClient is connected to server localhost:10596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:21.639881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:21.655082Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:21.670823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:21.865509Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:22.318499Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:22.474477Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:24.810196Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786241726845743:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:24.810253Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:25.567038Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786267496651248:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:25.567154Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:25.674309Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.764712Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.850939Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.963736Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:26.032783Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:26.126321Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:26.209023Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786271791619061:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:26.209126Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:26.209468Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786271791619066:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:26.214139Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:26.248236Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:14:26.249336Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786271791619068:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:26.350189Z node 3 :TX_PROXY ERROR: Actor# [3:7486786271791619126:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:27.959579Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:27.961673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:27.964963Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:31.315038Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153271349, txId: 281474976710706] shutting down 2025-03-28T09:14:31.698935Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153271727, txId: 281474976710709] shutting down 2025-03-28T09:14:32.016724Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153272035, txId: 281474976710712] shutting down 2025-03-28T09:14:32.054069Z node 3 :KQP_PROXY WARN: [TQueryBase] [TGetScriptExecutionResultQueryActor] TraceId: 6ddfd469-6a12953f-656d0bdb-a4088bda, State: Get results info, Finish with NOT_FOUND, Issues: {
: Error: Results are expired }, SessionId: ydb://session/3?node_id=3&id=NDM0ZWQ4MWItMjE1MzdmMWUtY2U2MDlhYmMtNjNmOWE5MDM=, TxId: |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpIndexes::SimpleVectorIndexOrderByCosineSimilarityNotNullableLevel2 [GOOD] Test command err: Trying to start YDB, gRPC: 22794, MsgBus: 17274 2025-03-28T09:13:05.457567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785922348347923:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:05.457641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00189d/r3tmp/tmpgKXbo7/pdisk_1.dat 2025-03-28T09:13:05.895003Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22794, node 1 2025-03-28T09:13:05.937421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:05.937646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:05.941340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:06.009780Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:06.009806Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:06.009814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:06.009918Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17274 TClient is connected to server localhost:17274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:06.590185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:06.625138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:06.815104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:07.024171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:07.111197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:09.060913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785939528218907:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:09.061025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:09.381548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:09.465326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:09.505923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:09.547225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:09.583349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:09.616645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:09.669588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785939528219419:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:09.669677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:09.669760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785939528219425:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:09.672789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:09.682592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785939528219427:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:09.779883Z node 1 :TX_PROXY ERROR: Actor# [1:7486785939528219480:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:10.457463Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785922348347923:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:10.457532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:10.761488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:10.983519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:13:11.073780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:13:20.890497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:20.890533Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 32283, MsgBus: 24795 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00189d/r3tmp/tmpsKnaRf/pdisk_1.dat 2025-03-28T09:13:37.307268Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:37.309617Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:37.341212Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:37.341313Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:37.344561Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32283, node 2 2025-03-28T09:13:37.563091Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:37.563116Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:37.563124Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:37.563259Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24795 TClient is connected to server localhost:24795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:38.344171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:38.352156Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:38.362114Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:38.517731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:38.865076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:39.037930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:42.116295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786080674306988:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.116439Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.187778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:42.223957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:42.312927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:42.429040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:42.504003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:42.591112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:42.692350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786080674307511:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.692449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.692834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786080674307516:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.743061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:42.758226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786080674307518:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:42.845881Z node 2 :TX_PROXY ERROR: Actor# [2:7486786080674307577:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:44.431509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:44.836597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:13:44.971877Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.076555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.153797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.241721Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037925 not found 2025-03-28T09:13:45.250614Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037926 not found 2025-03-28T09:13:45.254884Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037924 not found 2025-03-28T09:13:52.302795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:52.302830Z node 2 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS90+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 17408, MsgBus: 61906 2025-03-28T09:12:38.040161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785802459054921:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:38.040535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001705/r3tmp/tmpVvDetA/pdisk_1.dat 2025-03-28T09:12:38.984104Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:39.039115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:39.039212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:39.042799Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:39.043975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17408, node 1 2025-03-28T09:12:39.327120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:39.327139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:39.327145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:39.327255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61906 TClient is connected to server localhost:61906 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:40.581879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:40.639393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:43.032533Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785802459054921:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:43.032595Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:43.473380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785828228859237:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.473524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.473970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785828228859249:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:43.478329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:12:43.511178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785828228859251:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:12:43.614608Z node 1 :TX_PROXY ERROR: Actor# [1:7486785828228859304:2347] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:44.914507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.224316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:45.224531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:12:45.224772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:12:45.224931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:12:45.225054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:12:45.225177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:12:45.225239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:45.225270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:12:45.225286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:12:45.225377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:12:45.225387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:12:45.225468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:12:45.225482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:12:45.225564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:12:45.225580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:12:45.225783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:12:45.225898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:12:45.226002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:12:45.226110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:12:45.226194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:12:45.226318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:12:45.226431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486785836818794136:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:12:45.230180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785836818794148:2362];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:12:45.230335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=7207518622403789 ... sTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.034607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.038642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.048863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.052038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.056431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.057131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.065266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.069216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.075723Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.078357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.086327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.093637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.101737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.104325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.115084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.120028Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.129997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.130674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.140916Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.141376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.147731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.147861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.154618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.157978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.161410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.164322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.172817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.182241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.182580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.205050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.215968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.223449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.233717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.242918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.249683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.274287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.419261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.426256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:59.590978Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0e0dx3nq1r26jtb1er630", SessionId: ydb://session/3?node_id=1&id=ZDA0MDUxMTgtMTczNjc3YWQtODgwYzllMDEtNTc2NzJmYWQ=, Slow query, duration: 37.992627s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:14:00.570867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:14:00.571245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486786004322555481:7900];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-28T09:14:00.571372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:14:00.571623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:14:21.631684Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0ffc7358qsn55n71p7296", SessionId: ydb://session/3?node_id=1&id=ZDA0MDUxMTgtMTczNjc3YWQtODgwYzllMDEtNTc2NzJmYWQ=, Slow query, duration: 11.955974s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query90.tpl and seed 2031708268\nselect cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio\n from ( select count(*) amc\n from web_sales cross join household_demographics cross join time_dim cross join web_page\n where ws_sold_time_sk = time_dim.t_time_sk\n and ws_ship_hdemo_sk = household_demographics.hd_demo_sk\n and ws_web_page_sk = web_page.wp_web_page_sk\n and time_dim.t_hour between 9 and 9+1\n and household_demographics.hd_dep_count = 3\n and web_page.wp_char_count between 5000 and 5200) at cross join\n ( select count(*) pmc\n from web_sales cross join household_demographics cross join time_dim cross join web_page\n where ws_sold_time_sk = time_dim.t_time_sk\n and ws_ship_hdemo_sk = household_demographics.hd_demo_sk\n and ws_web_page_sk = web_page.wp_web_page_sk\n and time_dim.t_hour between 16 and 16+1\n and household_demographics.hd_dep_count = 3\n and web_page.wp_char_count between 5000 and 5200) pt\n order by am_pm_ratio\n limit 100;\n ", parameters: 0b >> SystemView::SystemViewFailOps [GOOD] >> SystemView::TabletsFields >> KqpQueryService::TableSink_OltpDelete [GOOD] |83.3%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestCreateTablet >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] >> THiveTest::TestLocalDisconnect >> THiveTest::TestHiveBalancerWithPrefferedDC1 >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> SystemView::ShowCreateTable [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleOut >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> SystemView::QueryStats >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> THiveTest::TestDrain >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TCutHistoryRestrictions::BasicTest [GOOD] >> THeavyPerfTest::TTestLoadEverything >> THiveTest::TestUpdateChannelValues >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] |83.3%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> KqpJoinOrder::TPCDS88-ColumnStore [GOOD] >> SystemView::PartitionStatsOneSchemeShardDataQuery [GOOD] >> KqpQueryService::TableSink_DisableSink [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> THiveTest::TestCreateTablet [GOOD] >> TStorageBalanceTest::TestScenario2 >> THiveTest::TestLocalDisconnect [GOOD] >> SystemView::TabletsFields [GOOD] >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer >> SystemView::Nodes >> THiveTest::TestCreate100Tablets >> THiveTest::TestLocalReplacement >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> SystemView::TabletsFollowers >> KqpJoinOrder::CanonizedJoinOrderTPCH5 [GOOD] >> KqpQueryService::TableSink_OlapOrder [GOOD] >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestStorageBalancer [GOOD] >> SystemView::DescribeSystemFolder [GOOD] >> SystemView::AuthGroups_Access [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> DbCounters::TabletsSimple [GOOD] >> JsonChangeRecord::DataChange [GOOD] >> KqpQueryService::ClosedSessionRemovedFromPool [GOOD] >> JsonChangeRecord::DataChangeVersion [GOOD] >> KqpQueryService::CloseConnection >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> SystemView::AuthGroups_ResultOrder >> THiveTest::TestCreateSubHiveCreateTablet >> SystemView::DescribeAccessDenied >> KqpQueryService::TableSink_OlapRWQueries |83.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> LabeledDbCounters::OneTablet >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion >> THiveTest::TestHiveRestart >> THiveTest::TestRestartsWithFollower >> SystemView::TabletsFollowers [GOOD] |83.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |83.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |83.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |83.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |83.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |83.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> THiveTest::TestCreateSubHiveCreateManyTablets >> SystemView::TabletsRanges >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestLimitedNodeList >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> SystemView::QueryStats [GOOD] >> KqpQueryService::TableSink_OlapRWQueries [GOOD] >> SystemView::AuthUsers_LockUnlock [GOOD] >> KqpService::ToDictCache-UseCache [GOOD] >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] >> SystemView::AuthUsers_Access >> SystemView::Nodes [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestDrain [GOOD] >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> SystemView::QueryStatsFields >> SystemView::PartitionStatsLocksFields >> THiveTest::TestDrainWithMaxTabletsScheduled >> THiveTest::TestSpreadNeighboursDifferentOwners >> THiveTest::TestHiveFollowersWithChangingDC >> ObjectDistribution::TestAllowedDomainsAndDown >> THiveTest::TestHiveBalancerWithPreferredDC3 >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> SystemView::DescribeAccessDenied [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets >> SystemView::CollectPreparedQueries |83.3%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |83.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] |83.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::ToDictCache-UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 1357, MsgBus: 25101 2025-03-28T09:13:23.261051Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786002466576896:2263];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.261112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001552/r3tmp/tmpY5o8la/pdisk_1.dat 2025-03-28T09:13:24.385000Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.414621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.414746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.417042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:24.427121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1357, node 1 2025-03-28T09:13:24.835215Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.835240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.835247Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.835353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25101 TClient is connected to server localhost:25101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:26.328301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.352090Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:26.365058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.586558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.828603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.995208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.260742Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786002466576896:2263];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.260801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:29.053091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786028236382254:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:29.053202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.149881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.195361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.251347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.309481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.358403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.448929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.521449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786032531350080:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.521531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.521776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786032531350085:2467], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.526122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.543632Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:13:30.544558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786032531350087:2468], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.616532Z node 1 :TX_PROXY ERROR: Actor# [1:7486786032531350142:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.184466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.349936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:39.349963Z node 1 :IMPORT WARN: Table profiles were not loaded took: 24.869472s took: 24.894391s took: 24.901111s took: 24.902099s took: 24.914349s took: 24.912122s took: 24.921038s took: 24.909208s took: 24.928644s took: 24.940285s Trying to start YDB, gRPC: 11185, MsgBus: 3889 2025-03-28T09:13:58.615058Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786149807080683:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:58.615134Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001552/r3tmp/tmpX3DGeT/pdisk_1.dat 2025-03-28T09:13:58.871670Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:58.922599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:58.930747Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:58.933380Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11185, node 2 2025-03-28T09:13:59.149295Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:59.149329Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:59.149339Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:59.149484Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3889 TClient is connected to server localhost:3889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:00.140206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpA ... Actor;event=timeout;self_id=[2:7486786149807080683:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:03.618468Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; took: 4.243971s took: 4.250315s took: 4.253191s took: 4.371634s took: 1.272868s took: 1.276479s took: 1.277790s took: 1.285709s took: 1.062203s took: 1.066116s took: 1.068790s took: 1.069474s took: 1.130363s took: 1.130939s took: 1.131206s took: 1.136848s took: 1.560427s took: 1.562836s took: 1.563750s took: 1.568052s took: 1.260763s took: 1.262488s took: 1.263173s took: 1.272015s took: 1.158340s took: 1.158758s took: 1.161717s took: 1.162242s took: 1.653740s took: 1.657859s took: 1.658773s took: 1.666769s 2025-03-28T09:14:13.863090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:14:13.863118Z node 2 :IMPORT WARN: Table profiles were not loaded took: 1.517838s took: 1.518873s took: 1.519296s took: 1.522812s took: 1.354757s took: 1.366083s took: 1.366418s took: 1.429895s took: 1.268368s took: 1.270429s took: 1.270104s took: 1.288036s took: 1.350701s took: 1.471920s took: 1.476542s took: 1.476924s took: 1.469124s took: 1.472948s took: 1.474338s took: 1.481568s took: 1.118911s took: 1.122711s took: 1.126068s took: 1.130582s took: 1.236717s took: 1.237583s took: 1.240285s took: 1.336931s took: 1.165081s took: 1.167318s took: 1.167854s took: 1.170457s Trying to start YDB, gRPC: 14388, MsgBus: 63747 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001552/r3tmp/tmpO4lEEv/pdisk_1.dat 2025-03-28T09:14:26.426083Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:14:26.650652Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:26.662400Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:26.666879Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:26.678265Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14388, node 3 2025-03-28T09:14:26.927374Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:26.927400Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:26.927408Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:26.927558Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63747 TClient is connected to server localhost:63747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:27.873113Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:27.883929Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:32.108346Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786297807869770:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:32.108432Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786297807869778:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:32.108497Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:32.113296Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:14:32.114435Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786297807869817:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:32.114499Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786297807869821:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:32.114534Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786297807869822:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:32.114560Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786297807869823:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:32.114592Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:32.121979Z node 3 :TX_PROXY ERROR: Actor# [3:7486786297807869833:2330] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:14:32.122200Z node 3 :TX_PROXY ERROR: Actor# [3:7486786297807869831:2328] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:14:32.122266Z node 3 :TX_PROXY ERROR: Actor# [3:7486786297807869832:2329] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:14:32.127980Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786297807869829:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:14:32.128025Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786297807869784:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:14:32.128053Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786297807869830:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:14:32.128109Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786297807869828:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:14:32.205658Z node 3 :TX_PROXY ERROR: Actor# [3:7486786297807869879:2369] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:32.208922Z node 3 :TX_PROXY ERROR: Actor# [3:7486786297807869886:2374] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:32.232285Z node 3 :TX_PROXY ERROR: Actor# [3:7486786297807869906:2384] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:32.232486Z node 3 :TX_PROXY ERROR: Actor# [3:7486786297807869911:2388] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } took: 5.345434s took: 5.348011s took: 5.349117s took: 5.349539s took: 1.292242s took: 1.299096s took: 1.302517s took: 1.445601s took: 1.097846s took: 1.100421s took: 1.111914s took: 1.118000s took: 1.222016s took: 1.221951s took: 1.224048s took: 1.225493s took: 0.964346s took: 0.973649s took: 0.974510s took: 0.977289s took: 0.962491s took: 0.964148s took: 0.967714s took: 0.991713s took: 0.887620s took: 0.888773s took: 0.888822s took: 0.889161s took: 0.924023s took: 0.925500s took: 0.926082s took: 0.926811s 2025-03-28T09:14:41.554479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:14:41.554510Z node 3 :IMPORT WARN: Table profiles were not loaded took: 0.874442s took: 0.876132s took: 0.876264s took: 0.876366s took: 0.891325s took: 0.893118s took: 0.893318s took: 0.894252s took: 0.862744s took: 0.864751s took: 0.864781s took: 0.867874s took: 0.883036s took: 0.884503s took: 0.885929s took: 0.888311s took: 1.026145s took: 1.029447s took: 1.030130s took: 1.032788s took: 0.852518s took: 0.859458s took: 0.862226s took: 0.862687s took: 0.918175s took: 0.919142s took: 0.920750s took: 0.923829s took: 0.823663s took: 0.825174s took: 0.826227s took: 0.867365s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS88-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7016, MsgBus: 31344 2025-03-28T09:12:42.437487Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785826689494204:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:42.437644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001704/r3tmp/tmptaPmq5/pdisk_1.dat 2025-03-28T09:12:43.199865Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:43.220462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:43.220564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:43.223503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7016, node 1 2025-03-28T09:12:43.458639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:43.458656Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:43.458663Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:43.458817Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31344 TClient is connected to server localhost:31344 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:44.165250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:44.175231Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:12:46.523873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785843869364064:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.523974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.524320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785843869364076:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:46.528863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:12:46.552351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785843869364078:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:12:46.647186Z node 1 :TX_PROXY ERROR: Actor# [1:7486785843869364129:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:46.990787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.197524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.239762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.277753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.319627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.437356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785826689494204:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:47.437423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:12:47.646434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.725892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.797588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.858310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.920671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:12:47.961009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:12:48.001066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:12:48.030500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:12:48.753819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:12:48.812482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:12:48.945552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.035160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.091672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.169707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.227780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.299330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.354253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.422941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.535994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.611803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.655231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.702742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.762569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.815479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:12:49.861357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... _state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.156399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.159637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.160775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.164578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.165688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.168492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.171760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.172355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.176144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.177705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.180333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.183552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.184321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.188194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.189136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.192360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.195432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.196349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:19.248338Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0d2wteqtwvhwnqsnvyhs7", SessionId: ydb://session/3?node_id=1&id=NzhkNGRjNDUtM2IyMmE2NDItYWRjOTBlZDMtYzI3YTk3MTg=, Slow query, duration: 27.893071s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:13:19.554146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:19.554181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:19.554407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486785955538540154:5905];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-28T09:13:19.554655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:14:36.150512Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0ew55cejpc5nn7kmvbbpr", SessionId: ydb://session/3?node_id=1&id=NzhkNGRjNDUtM2IyMmE2NDItYWRjOTBlZDMtYzI3YTk3MTg=, Slow query, duration: 46.155076s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query88.tpl and seed 318176889\nselect *\nfrom\n (select count(*) h8_30_to_9\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 8\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s1 cross join\n (select count(*) h9_to_9_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s2 cross join\n (select count(*) h9_30_to_10\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 9\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s3 cross join\n (select count(*) h10_to_10_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s4 cross join\n (select count(*) h10_30_to_11\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 10\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s5 cross join\n (select count(*) h11_to_11_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s6 cross join\n (select count(*) h11_30_to_12\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 11\n and time_dim.t_minute >= 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s7 cross join\n (select count(*) h12_to_12_30\n from store_sales cross join household_demographics cross join time_dim cross join store\n where ss_sold_time_sk = time_dim.t_time_sk\n and ss_hdemo_sk = household_demographics.hd_demo_sk\n and ss_store_sk = s_store_sk\n and time_dim.t_hour = 12\n and time_dim.t_minute < 30\n and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or\n (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or\n (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2))\n and store.s_store_name = 'ese') s8\n;", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH5 [GOOD] Test command err: Trying to start YDB, gRPC: 16007, MsgBus: 8944 2025-03-28T09:12:53.506766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785870532064377:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:12:53.509134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001702/r3tmp/tmpmeS3Jf/pdisk_1.dat 2025-03-28T09:12:54.190512Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:54.218776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:54.218876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:54.227859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16007, node 1 2025-03-28T09:12:54.527699Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:54.527755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:54.527769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:54.527897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8944 TClient is connected to server localhost:8944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:12:55.178816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:12:57.602384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785887711934228:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:57.602531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:57.602990Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785887711934240:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:57.608151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:12:57.637729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785887711934242:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:12:57.699277Z node 1 :TX_PROXY ERROR: Actor# [1:7486785887711934294:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:12:58.200663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:12:58.474864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:58.474974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:58.475243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:12:58.475244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:12:58.480060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:12:58.480272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:12:58.480373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:12:58.480481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:12:58.480561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:12:58.480632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:12:58.480720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:12:58.480777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:12:58.480804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:12:58.480877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:12:58.480951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:12:58.480962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486785892006901834:2349];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:12:58.481053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:12:58.481193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:12:58.481298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:12:58.481405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:12:58.481516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:12:58.481709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:12:58.481852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:12:58.481973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486785892006901836:2350];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:12:58.523315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785892006901873:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:58.523389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486785892006901873:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:12:58.523665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_i ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.258011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.261244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.265123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.267141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.272727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.272861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.279200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.283483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.285189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.289565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.290888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.295915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.296476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.302120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.302297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039250;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.311040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.317500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.323221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.326991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.333409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.338751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.351121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.355568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.362839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.367275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.372914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.376543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.384431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.387987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.394490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.400449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.412533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.416121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.426862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.428705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.439542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.441949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.451497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.457954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.465226Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.468256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.480702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.486034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.492411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.495961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:14:17.831533Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0eddz5d1b2tx11d9khtd1", SessionId: ydb://session/3?node_id=1&id=OGNhOTczOTgtOTRhNzRhYWEtOTkwYTM4NDctN2Y0N2M1ODY=, Slow query, duration: 42.919181s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:14:18.449358Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:14:18.449853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:14:18.450676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486786175474799836:10849];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-28T09:14:18.451090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_DisableSink [GOOD] Test command err: Trying to start YDB, gRPC: 29214, MsgBus: 10573 2025-03-28T09:14:01.160248Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786165451410293:2270];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:01.166962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f3/r3tmp/tmphak8gP/pdisk_1.dat 2025-03-28T09:14:01.807231Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:01.821244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:01.821342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:01.824708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29214, node 1 2025-03-28T09:14:01.888783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:01.888805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:01.888817Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:01.888927Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10573 TClient is connected to server localhost:10573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:02.488343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:02.532728Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:04.736425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786178336312624:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:04.736550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:05.312050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:05.487808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:14:05.487809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:14:05.488030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:14:05.488107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:14:05.489038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:14:05.489291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:14:05.489734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:14:05.489883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:14:05.490007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:14:05.490105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:14:05.490839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:14:05.491064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:14:05.491214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:14:05.491329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:14:05.491475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486786182631280088:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:14:05.491476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:14:05.491636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:14:05.491759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:14:05.491896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:14:05.492025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:14:05.492129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:14:05.492228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:14:05.492324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:14:05.492416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486786182631280085:2339];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:14:05.538861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786182631280090:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:14:05.538927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786182631280090:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:14:05.539170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786182631280090:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:14:05.539307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786182631280090:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:14:05.539430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786182631280090:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:14:05.539575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786182631280090:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:14:05.539701Z node 1 :TX_COLU ... tract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:14:39.790281Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:14:39.790519Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:14:39.790559Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:14:39.790845Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:14:39.790891Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:14:39.815349Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:14:39.815433Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:14:39.815557Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:14:39.815592Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:14:39.815820Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:14:39.815869Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:14:39.815968Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:14:39.816039Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:14:39.816123Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:14:39.816159Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:14:39.816207Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:14:39.816244Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:14:39.816905Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:14:39.816958Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:14:39.817200Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:14:39.817245Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:14:39.817400Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:14:39.817446Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:14:39.817676Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:14:39.817722Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:14:39.817854Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:14:39.817892Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:14:39.846718Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037898;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.847402Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.855025Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.855032Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.862916Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.862932Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.874231Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.874231Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.881096Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.881303Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.890937Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.893285Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.898765Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.898944Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.906542Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.906543Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:39.926744Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786327402566381:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:39.926848Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:39.927120Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786327402566386:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:39.931657Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:14:39.943629Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786327402566388:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:14:40.006823Z node 3 :TX_PROXY ERROR: Actor# [3:7486786331697533736:2717] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:40.101460Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486786331697533760:2436] TxId: 281474976715661. Ctx: { TraceId: 01jqe0gcxj9zavv271frg8yb93, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NTA3OWM2YzktZDNhMzc5M2UtZjk0YzIwNWUtZDY4ODg3NDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Data manipulation queries do not support column shard tables. 2025-03-28T09:14:40.101676Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NTA3OWM2YzktZDNhMzc5M2UtZjk0YzIwNWUtZDY4ODg3NDg=, ActorId: [3:7486786327402566379:2436], ActorState: ExecuteState, TraceId: 01jqe0gcxj9zavv271frg8yb93, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OltpDelete [GOOD] Test command err: Trying to start YDB, gRPC: 2134, MsgBus: 24824 2025-03-28T09:14:02.958569Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786170258843606:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:02.975045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d6/r3tmp/tmpEdkZti/pdisk_1.dat 2025-03-28T09:14:03.801688Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:03.829307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:03.829432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:03.838476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2134, node 1 2025-03-28T09:14:04.071326Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:04.071347Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:04.071361Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:04.071495Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24824 TClient is connected to server localhost:24824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:04.717593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:07.519611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786191733680615:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.519727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.884129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.950062Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786170258843606:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:07.950164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:08.075778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:14:08.075987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:14:08.076006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:14:08.076103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:14:08.076283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:14:08.076351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:14:08.076449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:14:08.076472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:14:08.076577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:14:08.076593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:14:08.076757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:14:08.076758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:14:08.076919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:14:08.076920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:14:08.077271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:14:08.077277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:14:08.077433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:14:08.077454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:14:08.077579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:14:08.077585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:14:08.077718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:14:08.077753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:14:08.077833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786196028648084:2341];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:14:08.077849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486786196028648074:2339];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:14:08.121938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486786196028648069:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:14:08.122039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486786196028648069:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:14:08.122293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486786196028648069:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:14:08.122419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486786196028648069:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:14:08.122532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486786196028648069:2337];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:14:08.122657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=720751862240 ... ::TEvPeriodicWakeup;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.605928Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[2:7486786231984655728:2342];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037890;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.606280Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7486786231984655725:2341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.607883Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7486786231984655852:2347];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.607883Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7486786231984655714:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.608406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7486786231984655714:2339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.608517Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[2:7486786231984655852:2347];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037894;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.608634Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7486786231984655768:2346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.608721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7486786231984655764:2344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.609245Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7486786231984655760:2343];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037891;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.609285Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[2:7486786231984655722:2340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037893;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.609542Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[2:7486786231984655725:2341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037892;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.609808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7486786231984655712:2338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.755826Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[2:7486786231984655768:2346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037897;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.756204Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7486786231984655712:2338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037895;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:18.772088Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[2:7486786231984655764:2344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037896;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:19.019580Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-28T09:14:19.020202Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-28T09:14:19.023535Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:19.023729Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:14:19.092353Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486786240574591270:2589], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiWriteTable!
:2:29: Error: Missing key column in input: Col1 for table: /Root/DataShard, code: 2029 2025-03-28T09:14:19.093797Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTBhODQ3MmQtOWRhYmMwNjAtZGZjZmMxNGYtYzg1ZTczNGQ=, ActorId: [2:7486786240574591268:2588], ActorState: ExecuteState, TraceId: 01jqe0frgx3rqtwb4g3ka9176c, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 26093, MsgBus: 27043 2025-03-28T09:14:25.627299Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786266257375483:2103];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:25.627357Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d6/r3tmp/tmpHwLREQ/pdisk_1.dat 2025-03-28T09:14:26.020684Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:26.059964Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:26.060062Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:26.079867Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26093, node 3 2025-03-28T09:14:26.267293Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:26.267317Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:26.267325Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:26.267449Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27043 TClient is connected to server localhost:27043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:27.283594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:27.292697Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:30.633380Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786266257375483:2103];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:30.635474Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:30.646848Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786287732212584:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:30.646980Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:30.681076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:30.851287Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786287732212691:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:30.851415Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:30.855161Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786287732212696:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:30.864949Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:14:30.890345Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786287732212698:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:14:30.970099Z node 3 :TX_PROXY ERROR: Actor# [3:7486786287732212749:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_OlapRWQueries [GOOD] Test command err: Trying to start YDB, gRPC: 11896, MsgBus: 32127 2025-03-28T09:13:51.244932Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786119936478455:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:51.250081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001517/r3tmp/tmpbal3Yg/pdisk_1.dat 2025-03-28T09:13:52.262519Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:52.283534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:52.283627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:52.285419Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:13:52.287586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11896, node 1 2025-03-28T09:13:52.615194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:52.615214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:52.615225Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:52.615315Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32127 TClient is connected to server localhost:32127 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:53.677745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:53.721325Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:56.247476Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786119936478455:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:56.247533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:57.048640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786145706282774:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.048764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.409021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.619053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:57.619330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:57.619725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:57.619867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:13:57.619985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:13:57.620114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:13:57.620215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:13:57.620327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:13:57.620437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:13:57.620565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:13:57.620683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:13:57.620845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486786145706282931:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:13:57.621081Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:57.621175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:57.621362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:57.621486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:13:57.621620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:13:57.621752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:13:57.621871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:13:57.621984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:13:57.622113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:13:57.622230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:13:57.622367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:13:57.622493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486786145706282961:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:13:57.660127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786145706282933:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:57.660201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786145706282933:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:57.660437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786145706282933:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:57.660539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486786145706282933:2340];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register; ... =CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:14:46.926023Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:14:46.926055Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:14:46.961486Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:14:46.961567Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:14:46.961844Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:14:46.961984Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:14:46.962103Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:14:46.962218Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:14:46.962333Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:14:46.962465Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:14:46.962585Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:14:46.962742Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:14:46.962918Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:14:46.963046Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[3:7486786356602193704:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:14:46.966761Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:14:46.966831Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:14:46.966932Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:14:46.966969Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:14:46.967185Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:14:46.967228Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:14:46.967325Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:14:46.967366Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:14:46.967435Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:14:46.967465Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:14:46.967506Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:14:46.967539Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:14:46.968221Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:14:46.968277Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:14:46.968486Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:14:46.968530Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:14:46.968673Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:14:46.968712Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:14:46.968915Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:14:46.968956Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:14:46.969087Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:14:46.969123Z node 3 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:14:46.985809Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:46.986292Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:46.993166Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:14:47.011765Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786360897161105:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:47.011839Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:47.011895Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786360897161110:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:47.016208Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:14:47.029188Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786360897161112:2362], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:14:47.114185Z node 3 :TX_PROXY ERROR: Actor# [3:7486786360897161163:2435] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:48.145860Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=281474976715665;tx_id=281474976715665;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715665; 2025-03-28T09:14:48.145898Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=281474976715665;tx_id=281474976715665;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715665; 2025-03-28T09:14:48.146553Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=281474976715665;tx_id=281474976715665;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715665; 2025-03-28T09:14:48.174363Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786343717291095:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:48.174466Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::ExecuteScriptWithCancelAfterAndTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 25412, MsgBus: 13760 2025-03-28T09:13:53.662595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786128108494704:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:53.662632Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00150d/r3tmp/tmpA2pNTR/pdisk_1.dat 2025-03-28T09:13:54.171838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:54.177361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:54.177486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:54.189881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25412, node 1 2025-03-28T09:13:54.317414Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:54.317435Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:54.317442Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:54.317564Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13760 TClient is connected to server localhost:13760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:55.153395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:55.225466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:55.402403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:55.613714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:55.715223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.865220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786145288365638:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.865308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.211510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.282398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.372027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.435160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.542385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.645503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:58.676979Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786128108494704:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:58.678407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:58.740934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786149583333458:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.741129Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.741518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786149583333463:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:58.746320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:58.762509Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:13:58.762783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786149583333465:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:58.847726Z node 1 :TX_PROXY ERROR: Actor# [1:7486786149583333522:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:00.476013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.476974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:00.478175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6287, MsgBus: 21654 2025-03-28T09:14:04.413510Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786176956399044:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:04.420256Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00150d/r3tmp/tmp9RH3UR/pdisk_1.dat 2025-03-28T09:14:04.581815Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:04.583493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:04.583566Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:04.586017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6287, node 2 2025-03-28T09:14:04.683239Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:04.683260Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:04.683268Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:04.683369Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21654 TClient is connected to server localhost:21654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:05.152146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:05.177183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, ... type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:34.142107Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:34.453707Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786305865397589:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:34.453832Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:34.459721Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786305865397594:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:34.468530Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:34.491345Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486786305865397597:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:34.564568Z node 4 :TX_PROXY ERROR: Actor# [4:7486786305865397654:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:36.176378Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:36.178620Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:36.180193Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:36.611455Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjIyZGQ2NmYtMjNhY2ZjMzgtMTE0ZGQyNWUtNDA3MDRiZDg=, ActorId: [4:7486786314455332817:2515], ActorState: ExecuteState, TraceId: 01jqe0g98ad23pd64checjajfv, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 7512, MsgBus: 9660 2025-03-28T09:14:39.500272Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486786329337496147:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:39.500336Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00150d/r3tmp/tmpVFq98c/pdisk_1.dat 2025-03-28T09:14:39.683186Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:39.702824Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:39.702939Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:39.707703Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7512, node 5 2025-03-28T09:14:39.775830Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:39.775848Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:39.775855Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:39.775967Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9660 TClient is connected to server localhost:9660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:40.439550Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:40.464330Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:40.571298Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:40.736457Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:40.818650Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:43.624984Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486786346517367115:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:43.625093Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:43.690767Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:43.733345Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:43.770997Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:43.813772Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:43.862455Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:43.941760Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:43.992020Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486786346517367630:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:43.992112Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:43.992298Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486786346517367635:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:43.997040Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:44.010897Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486786346517367637:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:44.085181Z node 5 :TX_PROXY ERROR: Actor# [5:7486786350812334986:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:44.500562Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486786329337496147:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:44.500640Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:45.324028Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:45.326470Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:45.332534Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:45.871281Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=NTM1MDkwOTMtYWRjMmI3NjItOGUwNTdmZmQtZDhkN2RiZjI=, ActorId: [5:7486786355107302876:2513], ActorState: ExecuteState, TraceId: 01jqe0gj620vwjc8cd0vmw8p4d, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryServiceScripts::TestTruncatedBySize [GOOD] Test command err: Trying to start YDB, gRPC: 8695, MsgBus: 23201 2025-03-28T09:13:33.400706Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786044860342913:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:33.404545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001522/r3tmp/tmpo0Mdwx/pdisk_1.dat 2025-03-28T09:13:33.897530Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8695, node 1 2025-03-28T09:13:33.904948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:33.905048Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:33.915807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:13:34.073286Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:34.073308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:34.073314Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:34.073403Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23201 TClient is connected to server localhost:23201 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:34.897774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:34.919504Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:34.936196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.209451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.460991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.596524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:38.387691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786066335181179:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.387818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:38.406978Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786044860342913:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:38.416675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:38.869504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:38.923216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.005799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.071841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.120924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.184855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.285406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786070630149002:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.285483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.285826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786070630149007:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:39.290337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:39.306460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786070630149009:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:39.375559Z node 1 :TX_PROXY ERROR: Actor# [1:7486786070630149065:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9849, MsgBus: 16498 2025-03-28T09:13:41.675554Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786076305241567:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:41.675676Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001522/r3tmp/tmpwcXkA7/pdisk_1.dat 2025-03-28T09:13:42.058863Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:42.092961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:42.093060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:42.099019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9849, node 2 2025-03-28T09:13:42.347338Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:42.347359Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:42.347366Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:42.347478Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16498 TClient is connected to server localhost:16498 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:43.473016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.485523Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:13:43.493689Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.595414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:43.854548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... ... <|idx:1785|>,<|idx:1786|>,<|idx:1787|>,<|idx:1788|>,<|idx:1789|>,<|idx:1790|>,<|idx:1791|>,<|idx:1792|>,<|idx:1793|>,<|idx:1794|>,<|idx:1795|>,<|idx:1796|>,<|idx:1797|>,<|idx:1798|>,<|idx:1799|>,<|idx:1800|>,<|idx:1801|>,<|idx:1802|>,<|idx:1803|>,<|idx:1804|>,<|idx:1805|>,<|idx:1806|>,<|idx:1807|>,<|idx:1808|>,<|idx:1809|>,<|idx:1810|>,<|idx:1811|>,<|idx:1812|>,<|idx:1813|>,<|idx:1814|>,<|idx:1815|>,<|idx:1816|>,<|idx:1817|>,<|idx:1818|>,<|idx:1819|>,<|idx:1820|>,<|idx:1821|>,<|idx:1822|>,<|idx:1823|>,<|idx:1824|>,<|idx:1825|>,<|idx:1826|>,<|idx:1827|>,<|idx:1828|>,<|idx:1829|>,<|idx:1830|>,<|idx:1831|>,<|idx:1832|>,<|idx:1833|>,<|idx:1834|>,<|idx:1835|>,<|idx:1836|>,<|idx:1837|>,<|idx:1838|>,<|idx:1839|>,<|idx:1840|>,<|idx:1841|>,<|idx:1842|>,<|idx:1843|>,<|idx:1844|>,<|idx:1845|>,<|idx:1846|>,<|idx:1847|>,<|idx:1848|>,<|idx:1849|>,<|idx:1850|>,<|idx:1851|>,<|idx:1852|>,<|idx:1853|>,<|idx:1854|>,<|idx:1855|>,<|idx:1856|>,<|idx:1857|>,<|idx:1858|>,<|idx:1859|>,<|idx:1860|>,<|idx:1861|>,<|idx:1862|>,<|idx:1863|>,<|idx:1864|>,<|idx:1865|>,<|idx:1866|>,<|idx:1867|>,<|idx:1868|>,<|idx:1869|>,<|idx:1870|>,<|idx:1871|>,<|idx:1872|>,<|idx:1873|>,<|idx:1874|>,<|idx:1875|>,<|idx:1876|>,<|idx:1877|>,<|idx:1878|>,<|idx:1879|>,<|idx:1880|>,<|idx:1881|>,<|idx:1882|>,<|idx:1883|>,<|idx:1884|>,<|idx:1885|>,<|idx:1886|>,<|idx:1887|>,<|idx:1888|>,<|idx:1889|>,<|idx:1890|>,<|idx:1891|>,<|idx:1892|>,<|idx:1893|>,<|idx:1894|>,<|idx:1895|>,<|idx:1896|>,<|idx:1897|>,<|idx:1898|>,<|idx:1899|>,<|idx:1900|>,<|idx:1901|>,<|idx:1902|>,<|idx:1903|>,<|idx:1904|>,<|idx:1905|>,<|idx:1906|>,<|idx:1907|>,<|idx:1908|>,<|idx:1909|>,<|idx:1910|>,<|idx:1911|>,<|idx:1912|>,<|idx:1913|>,<|idx:1914|>,<|idx:1915|>,<|idx:1916|>,<|idx:1917|>,<|idx:1918|>,<|idx:1919|>,<|idx:1920|>,<|idx:1921|>,<|idx:1922|>,<|idx:1923|>,<|idx:1924|>,<|idx:1925|>,<|idx:1926|>,<|idx:1927|>,<|idx:1928|>,<|idx:1929|>,<|idx:1930|>,<|idx:1931|>,<|idx:1932|>,<|idx:1933|>,<|idx:1934|>,<|idx:1935|>,<|idx:1936|>,<|idx:1937|>,<|idx:1938|>,<|idx:1939|>,<|idx:1940|>,<|idx:1941|>,<|idx:1942|>,<|idx:1943|>,<|idx:1944|>,<|idx:1945|>,<|idx:1946|>,<|idx:1947|>,<|idx:1948|>,<|idx:1949|>,<|idx:1950|>,<|idx:1951|>,<|idx:1952|>,<|idx:1953|>,<|idx:1954|>,<|idx:1955|>,<|idx:1956|>,<|idx:1957|>,<|idx:1958|>,<|idx:1959|>,<|idx:1960|>,<|idx:1961|>,<|idx:1962|>,<|idx:1963|>,<|idx:1964|>,<|idx:1965|>,<|idx:1966|>,<|idx:1967|>,<|idx:1968|>,<|idx:1969|>,<|idx:1970|>,<|idx:1971|>,<|idx:1972|>,<|idx:1973|>,<|idx:1974|>,<|idx:1975|>,<|idx:1976|>,<|idx:1977|>,<|idx:1978|>,<|idx:1979|>,<|idx:1980|>,<|idx:1981|>,<|idx:1982|>,<|idx:1983|>,<|idx:1984|>,<|idx:1985|>,<|idx:1986|>,<|idx:1987|>,<|idx:1988|>,<|idx:1989|>,<|idx:1990|>,<|idx:1991|>,<|idx:1992|>,<|idx:1993|>,<|idx:1994|>,<|idx:1995|>,<|idx:1996|>,<|idx:1997|>,<|idx:1998|>,<|idx:1999|>]);", parameters: 0b 2025-03-28T09:14:22.600564Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153262550, txId: 281474976716235] shutting down Trying to start YDB, gRPC: 5182, MsgBus: 17316 2025-03-28T09:14:24.160473Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786262686562645:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:24.160526Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001522/r3tmp/tmpdSTZiC/pdisk_1.dat 2025-03-28T09:14:24.425549Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:24.429149Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:24.429253Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:24.430608Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5182, node 3 2025-03-28T09:14:24.627522Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:24.627548Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:24.627558Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:24.627720Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17316 TClient is connected to server localhost:17316 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:25.743600Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:25.766955Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:25.889076Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:26.215151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:26.340822Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:29.046045Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786284161400908:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:29.050915Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:29.104355Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:29.139427Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:29.168069Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786262686562645:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:29.168137Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:29.189174Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:29.233462Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:29.269191Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:29.335583Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:29.422755Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786284161401421:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:29.422857Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:29.423269Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786284161401426:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:29.431053Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:29.445361Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:14:29.446015Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786284161401428:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:29.510678Z node 3 :TX_PROXY ERROR: Actor# [3:7486786284161401484:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:31.447370Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:31.449761Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:14:31.461868Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:14:37.630774Z node 3 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153277628, txId: 281474976710752] shutting down >> SystemView::TabletsRanges [GOOD] >> SystemView::TabletsRangesPredicateExtractDisabled >> SystemView::AuthGroups_ResultOrder [GOOD] >> SystemView::AuthGroupMembers |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge >> KqpService::CloseSessionsWithLoad [GOOD] >> KqpService::PatternCache |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestRestartTablets >> KqpQueryService::DdlSecret [GOOD] >> KqpQueryService::DdlMixedDml >> TTxAllocatorClientTest::Boot |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> SystemView::TopPartitionsByCpuFields [GOOD] >> SystemView::TopPartitionsByCpuTables >> TTxAllocatorClientTest::ZeroRange >> TTxAllocatorClientTest::InitiatingRequest |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> KqpJoin::JoinMismatchDictKeyTypes |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveBalancerWithFollowers >> SystemView::QueryStatsFields [GOOD] >> SystemView::PartitionStatsTtlFields >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestServerlessComputeResourcesMode >> TTxAllocatorClientTest::InitiatingRequest [GOOD] >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> TTxAllocatorClientTest::Boot [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |83.5%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2025-03-28T09:14:54.373527Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:14:54.387462Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:14:54.404714Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:14:54.541413Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.587403Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:14:54.648321Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.648418Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.648541Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:14:54.648636Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.648710Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.658002Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:14:54.658220Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2025-03-28T09:14:54.373526Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:14:54.387438Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:14:54.404736Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:14:54.536894Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.584711Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:14:54.647500Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.647612Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.647745Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:14:54.647905Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.647960Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.657934Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:14:54.658166Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-28T09:14:54.658972Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-03-28T09:14:54.689579Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.689653Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.689831Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-03-28T09:14:54.689875Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2025-03-28T09:14:54.375152Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:14:54.391051Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:14:54.406939Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:14:54.537113Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.585305Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:14:54.647738Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.647857Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.648000Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:14:54.648146Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.648192Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.657933Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:14:54.658217Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-28T09:14:54.659023Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-03-28T09:14:54.687151Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.687249Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.687427Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-03-28T09:14:54.687469Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 2025-03-28T09:14:54.687684Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-28T09:14:54.687873Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-28T09:14:54.688094Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-28T09:14:54.688276Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-28T09:14:54.688421Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-03-28T09:14:54.713755Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.713850Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.713996Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2025-03-28T09:14:54.714042Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 5000 to# 10000 2025-03-28T09:14:54.714287Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-28T09:14:54.714529Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-28T09:14:54.714729Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-28T09:14:54.714978Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2025-03-28T09:14:54.715115Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-03-28T09:14:54.715594Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.715663Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.715776Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2025-03-28T09:14:54.715821Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 10000 to# 15000 2025-03-28T09:14:54.716032Z node 1 :TX_ALLOCATOR_CLIENT WARN: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |83.5%| [TA] $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.5%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> KqpQueryService::CloseConnection [GOOD] |83.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] >> SystemView::CollectPreparedQueries [GOOD] >> SystemView::CollectScanQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::CloseConnection [GOOD] Test command err: Trying to start YDB, gRPC: 25387, MsgBus: 10176 2025-03-28T09:13:23.278783Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785998978081989:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.279782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00155f/r3tmp/tmpxE6CzT/pdisk_1.dat 2025-03-28T09:13:24.086136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.086247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.094810Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.128081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25387, node 1 2025-03-28T09:13:24.468726Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.468743Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.468749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.468835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10176 TClient is connected to server localhost:10176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.666585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.749404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.342553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.564677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.679995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:28.245369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786020452920238:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.245508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.279994Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785998978081989:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.280064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:30.153402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.219449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.317816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.364019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.438263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.513468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.582513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786029042855368:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.582609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.582711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786029042855373:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.588666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.605502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786029042855375:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.700496Z node 1 :TX_PROXY ERROR: Actor# [1:7486786029042855431:3467] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29987, MsgBus: 22006 2025-03-28T09:13:34.619851Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786048666624191:2150];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:34.622983Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00155f/r3tmp/tmp2CMD0g/pdisk_1.dat 2025-03-28T09:13:34.839722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:34.839808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:34.841209Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:34.876383Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29987, node 2 2025-03-28T09:13:34.954510Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:34.954530Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:34.954539Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:34.954700Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22006 TClient is connected to server localhost:22006 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:35.492858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.513145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.625455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.860648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:35.944989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:38.075427Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService ... ZDYzMDA0OTYtODdkYjgyYzgtYzRjMGY0ZDQ=, ActorId: [4:7486786389197722255:2582], ActorState: ExecuteState, TraceId: 01jqe0gt9c7x6q04syj64g9r92, Create QueryResponse for error on request, msg: 2025-03-28T09:14:53.719873Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-28T09:14:53.720203Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486786389197722357:2606] TxId: 281474976715685. Ctx: { TraceId: 01jqe0gtbw0m3zejbj5qzgj9ev, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Mzk5OTZiMTgtYjFlNGUxMDgtMzRiYzRkMGMtMzJmMzFjYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:14:53.775724Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786389197722368:2612], TxId: 281474976715685, task: 5. Ctx: { SessionId : ydb://session/3?node_id=4&id=Mzk5OTZiMTgtYjFlNGUxMDgtMzRiYzRkMGMtMzJmMzFjYzE=. TraceId : 01jqe0gtbw0m3zejbj5qzgj9ev. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7486786389197722357:2606], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:14:53.776309Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486786389197722357:2606] TxId: 281474976715685. Ctx: { TraceId: 01jqe0gtbw0m3zejbj5qzgj9ev, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Mzk5OTZiMTgtYjFlNGUxMDgtMzRiYzRkMGMtMzJmMzFjYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NYql::NDq::TEvDqCompute::TEvChannelData 2025-03-28T09:14:53.777837Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=Mzk5OTZiMTgtYjFlNGUxMDgtMzRiYzRkMGMtMzJmMzFjYzE=, ActorId: [4:7486786389197722354:2606], ActorState: ExecuteState, TraceId: 01jqe0gtbw0m3zejbj5qzgj9ev, Create QueryResponse for error on request, msg: 2025-03-28T09:14:53.935287Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-28T09:14:53.935679Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486786389197722671:2686] TxId: 281474976715696. Ctx: { TraceId: 01jqe0gtj8eb940smsakzxd9b4, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ODJhZWNkZWEtZjYzMmYzMGItNDM3MmNkMTktNjI0MmRlMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:14:53.935801Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786389197722683:2692], TxId: 281474976715696, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ODJhZWNkZWEtZjYzMmYzMGItNDM3MmNkMTktNjI0MmRlMzc=. TraceId : 01jqe0gtj8eb940smsakzxd9b4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486786389197722671:2686], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:14:53.971428Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786389197722681:2690], TxId: 281474976715696, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ODJhZWNkZWEtZjYzMmYzMGItNDM3MmNkMTktNjI0MmRlMzc=. TraceId : 01jqe0gtj8eb940smsakzxd9b4. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486786389197722671:2686], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:14:53.972879Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ODJhZWNkZWEtZjYzMmYzMGItNDM3MmNkMTktNjI0MmRlMzc=, ActorId: [4:7486786389197722668:2686], ActorState: ExecuteState, TraceId: 01jqe0gtj8eb940smsakzxd9b4, Create QueryResponse for error on request, msg: 2025-03-28T09:14:54.095058Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-28T09:14:54.095333Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486786393492690168:2735] TxId: 281474976715703. Ctx: { TraceId: 01jqe0gtq39an05r5kdna40a7v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzAwZTNlNDUtYTExYzI0MGQtMzRiNWJiNjUtNzMzNjEzODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:14:54.145404Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-28T09:14:54.146176Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486786393492690190:2738] TxId: 281474976715704. Ctx: { TraceId: 01jqe0gtrn2ea6kjfmsz5aw4g6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ODQ0MTAzNDgtMjUwYmNlNmItMTEwM2ZiMGEtMzQ4N2ZmMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:14:54.148042Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486786393492690168:2735] TxId: 281474976715703. Ctx: { TraceId: 01jqe0gtq39an05r5kdna40a7v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzAwZTNlNDUtYTExYzI0MGQtMzRiNWJiNjUtNzMzNjEzODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NKikimr::NKqp::TEvKqpNode::TEvStartKqpTasksResponse 2025-03-28T09:14:54.148255Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486786393492690190:2738] TxId: 281474976715704. Ctx: { TraceId: 01jqe0gtrn2ea6kjfmsz5aw4g6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ODQ0MTAzNDgtMjUwYmNlNmItMTEwM2ZiMGEtMzQ4N2ZmMjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Unexpected event while waiting for shutdown: NKikimr::NKqp::TEvKqpNode::TEvStartKqpTasksResponse 2025-03-28T09:14:54.766835Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-28T09:14:54.767447Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486786393492690914:2929] TxId: 281474976715727. Ctx: { TraceId: 01jqe0gvb91egv78b9te65z1zc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NTQzZDg1MDEtNGI3MDQzMzQtODZhM2VlYjQtYmY0MWE3MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:14:54.767600Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690925:2935], TxId: 281474976715727, task: 5. Ctx: { TraceId : 01jqe0gvb91egv78b9te65z1zc. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=NTQzZDg1MDEtNGI3MDQzMzQtODZhM2VlYjQtYmY0MWE3MDE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486786393492690914:2929], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:14:54.784354Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690923:2933], TxId: 281474976715727, task: 3. Ctx: { SessionId : ydb://session/3?node_id=4&id=NTQzZDg1MDEtNGI3MDQzMzQtODZhM2VlYjQtYmY0MWE3MDE=. TraceId : 01jqe0gvb91egv78b9te65z1zc. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7486786393492690914:2929], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:14:54.786252Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTQzZDg1MDEtNGI3MDQzMzQtODZhM2VlYjQtYmY0MWE3MDE=, ActorId: [4:7486786393492690911:2929], ActorState: ExecuteState, TraceId: 01jqe0gvb91egv78b9te65z1zc, Create QueryResponse for error on request, msg: 2025-03-28T09:14:55.096048Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MzAwZTNlNDUtYTExYzI0MGQtMzRiNWJiNjUtNzMzNjEzODY=, ActorId: [4:7486786393492690165:2735], ActorState: ExecuteState, TraceId: 01jqe0gtq39an05r5kdna40a7v, Create QueryResponse for error on request, msg: 2025-03-28T09:14:55.146604Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ODQ0MTAzNDgtMjUwYmNlNmItMTEwM2ZiMGEtMzQ4N2ZmMjM=, ActorId: [4:7486786393492690187:2738], ActorState: ExecuteState, TraceId: 01jqe0gtrn2ea6kjfmsz5aw4g6, Create QueryResponse for error on request, msg: 2025-03-28T09:14:55.154894Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690200:2741], TxId: 281474976715703, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0gtq39an05r5kdna40a7v. SessionId : ydb://session/3?node_id=4&id=MzAwZTNlNDUtYTExYzI0MGQtMzRiNWJiNjUtNzMzNjEzODY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle undelivered TEvState event, abort execution 2025-03-28T09:14:55.156795Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690202:2742], TxId: 281474976715703, task: 3. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MzAwZTNlNDUtYTExYzI0MGQtMzRiNWJiNjUtNzMzNjEzODY=. TraceId : 01jqe0gtq39an05r5kdna40a7v. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle undelivered TEvState event, abort execution 2025-03-28T09:14:55.157117Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690203:2743], TxId: 281474976715703, task: 4. Ctx: { SessionId : ydb://session/3?node_id=4&id=MzAwZTNlNDUtYTExYzI0MGQtMzRiNWJiNjUtNzMzNjEzODY=. CustomerSuppliedId : . TraceId : 01jqe0gtq39an05r5kdna40a7v. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle undelivered TEvState event, abort execution 2025-03-28T09:14:55.157367Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690205:2745], TxId: 281474976715704, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ODQ0MTAzNDgtMjUwYmNlNmItMTEwM2ZiMGEtMzQ4N2ZmMjM=. TraceId : 01jqe0gtrn2ea6kjfmsz5aw4g6. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle undelivered TEvState event, abort execution 2025-03-28T09:14:55.157580Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690204:2744], TxId: 281474976715703, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=MzAwZTNlNDUtYTExYzI0MGQtMzRiNWJiNjUtNzMzNjEzODY=. TraceId : 01jqe0gtq39an05r5kdna40a7v. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle undelivered TEvState event, abort execution 2025-03-28T09:14:55.157896Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690208:2747], TxId: 281474976715704, task: 3. Ctx: { CustomerSuppliedId : . TraceId : 01jqe0gtrn2ea6kjfmsz5aw4g6. SessionId : ydb://session/3?node_id=4&id=ODQ0MTAzNDgtMjUwYmNlNmItMTEwM2ZiMGEtMzQ4N2ZmMjM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle undelivered TEvState event, abort execution 2025-03-28T09:14:55.158178Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690210:2749], TxId: 281474976715704, task: 5. Ctx: { TraceId : 01jqe0gtrn2ea6kjfmsz5aw4g6. SessionId : ydb://session/3?node_id=4&id=ODQ0MTAzNDgtMjUwYmNlNmItMTEwM2ZiMGEtMzQ4N2ZmMjM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle undelivered TEvState event, abort execution 2025-03-28T09:14:55.158607Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690199:2740], TxId: 281474976715703, task: 1. Ctx: { SessionId : ydb://session/3?node_id=4&id=MzAwZTNlNDUtYTExYzI0MGQtMzRiNWJiNjUtNzMzNjEzODY=. TraceId : 01jqe0gtq39an05r5kdna40a7v. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle undelivered TEvState event, abort execution 2025-03-28T09:14:55.159240Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690206:2746], TxId: 281474976715704, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=ODQ0MTAzNDgtMjUwYmNlNmItMTEwM2ZiMGEtMzQ4N2ZmMjM=. CustomerSuppliedId : . TraceId : 01jqe0gtrn2ea6kjfmsz5aw4g6. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle undelivered TEvState event, abort execution 2025-03-28T09:14:55.159484Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486786393492690209:2748], TxId: 281474976715704, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=ODQ0MTAzNDgtMjUwYmNlNmItMTEwM2ZiMGEtMzQ4N2ZmMjM=. TraceId : 01jqe0gtrn2ea6kjfmsz5aw4g6. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle undelivered TEvState event, abort execution >> SystemView::AuthUsers_Access [GOOD] >> SystemView::AuthUsers_ResultOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::TabletsRangesPredicateExtractDisabled [GOOD] Test command err: 2025-03-28T09:14:12.814307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786211446673220:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:13.228794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001569/r3tmp/tmpEpfAEs/pdisk_1.dat 2025-03-28T09:14:13.771699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:13.771817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:13.779636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:13.792802Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11297, node 1 2025-03-28T09:14:14.083276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:14.083299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:14.083305Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:14.083394Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:14.965800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.007144Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:14:17.786856Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786211446673220:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:17.786921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:18.420390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786237216477470:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:18.420700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:18.424796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786237216477497:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:18.430108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:14:18.452800Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:14:18.453491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786237216477499:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:14:18.517071Z node 1 :TX_PROXY ERROR: Actor# [1:7486786237216477550:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:21.369503Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe0fmzzckfckhxkc2pp66sm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFjNGIzNTItNTQzZWI3MGUtM2ZjYzE0Y2QtZWIxOGFjMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:14:21.593407Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7486786250101379498:2353], owner: [1:7486786250101379494:2351], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-03-28T09:14:21.602683Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7486786250101379498:2353], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:14:21.651478Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7486786250101379498:2353], row count: 1, finished: 1 2025-03-28T09:14:21.651540Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7486786250101379498:2353], owner: [1:7486786250101379494:2351], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-03-28T09:14:21.821110Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153261234, txId: 281474976710660] shutting down test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001569/r3tmp/tmpqspP9y/pdisk_1.dat 2025-03-28T09:14:23.401458Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:14:23.457810Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:23.483435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:23.483506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:23.484679Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29639, node 2 2025-03-28T09:14:23.731242Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:23.731265Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:23.731272Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:23.731377Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:24.192981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:24.219207Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:14:27.417876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786276462895261:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:27.418027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:27.418781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786276462895286:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:27.423267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:14:27.437211Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786276462895288:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:14:27.530492Z node 2 :TX_PROXY ERROR: Actor# [2:7486786276462895339:2339] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:27.667090Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe0fxm00p9pt7nm6r75d19b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzgxYTJkMzAtZGVjODczZTYtNzg1MGZkYjEtMjMxOWExZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:14:27.670963Z node 2 :SYSTEM_VIEWS INFO: Scan started, actor: [2:7486786276462895374:2341], owner: [2:7486786276462895370:2339], scan id: 0, table id: [72057594046644480:1:0:ds_storage_pools] 2025-03-28T09:14:27.675130Z node 2 :SYSTEM_VIEWS INFO: Scan prepared, actor: [2:7486786276462895374:2341], schemeshard id: 72057594046644480, hive id: 720575940379 ... scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:51.264034Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153291256, txId: 281474976715673] shutting down 2025-03-28T09:14:51.379884Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqe0gr036e60tge1mqczhy0j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTVmZTdkMi1kMTllZTVkYi1lZjAwOTA3Ni1lNGUyMWM5OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:14:51.381244Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7486786380433244277:2430], owner: [10:7486786380433244273:2428], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:51.381899Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7486786380433244277:2430], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:14:51.382286Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7486786380433244277:2430], row count: 3, finished: 1 2025-03-28T09:14:51.382318Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7486786380433244277:2430], owner: [10:7486786380433244273:2428], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:51.385055Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153291378, txId: 281474976715675] shutting down 2025-03-28T09:14:51.549367Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqe0gr4zez8w2e0pasac08vn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=N2IwNGYzOTctZTFhZDZmOTQtYzRiMzE4ZTktZDQ4YjI1ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:14:51.552019Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7486786380433244308:2439], owner: [10:7486786380433244305:2437], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:51.552745Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7486786380433244308:2439], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:14:51.553189Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7486786380433244308:2439], row count: 4, finished: 1 2025-03-28T09:14:51.553223Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7486786380433244308:2439], owner: [10:7486786380433244305:2437], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:51.556430Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153291548, txId: 281474976715677] shutting down 2025-03-28T09:14:51.679665Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486786358958406512:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:51.679756Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:51.693527Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqe0gr97e57sbby9yt8mvnxw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YWU2ZjFmZjMtMjI4MzZkNDAtYzViOTQ3OTItZWE5OTkzYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:14:51.695796Z node 10 :SYSTEM_VIEWS INFO: Scan started, actor: [10:7486786380433244341:2448], owner: [10:7486786380433244337:2446], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:51.696655Z node 10 :SYSTEM_VIEWS INFO: Scan prepared, actor: [10:7486786380433244341:2448], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:14:51.697185Z node 10 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [10:7486786380433244341:2448], row count: 4, finished: 1 2025-03-28T09:14:51.697228Z node 10 :SYSTEM_VIEWS INFO: Scan finished, actor: [10:7486786380433244341:2448], owner: [10:7486786380433244337:2446], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:51.700760Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153291692, txId: 281474976715679] shutting down 2025-03-28T09:14:52.621934Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7486786385229503707:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:52.622091Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001569/r3tmp/tmpFIxNWv/pdisk_1.dat 2025-03-28T09:14:52.746844Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:52.780155Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:52.780290Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:52.782285Z node 11 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22832, node 11 2025-03-28T09:14:52.829712Z node 11 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:52.829737Z node 11 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:52.829746Z node 11 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:52.829904Z node 11 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:53.194138Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:53.204636Z node 11 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:14:56.354501Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:56.452752Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486786402409373818:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:56.452778Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7486786402409373826:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:56.452842Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:56.456883Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:14:56.467576Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7486786402409373832:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:14:56.536042Z node 11 :TX_PROXY ERROR: Actor# [11:7486786402409373883:2493] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:56.798409Z node 11 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe0gx208440h6cekp3mhnqk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=11&id=ZTU2YzQ1ZTgtNWU0MjAxODQtNjM2MjZiMWYtNWRmZDM0MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:14:56.800228Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7486786402409373933:2365], owner: [11:7486786402409373932:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:56.800710Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7486786402409373933:2365], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:14:56.801148Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7486786402409373933:2365], row count: 4, finished: 1 2025-03-28T09:14:56.801175Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7486786402409373933:2365], owner: [11:7486786402409373932:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:56.801289Z node 11 :SYSTEM_VIEWS INFO: Scan started, actor: [11:7486786402409373939:2368], owner: [11:7486786402409373932:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:56.802553Z node 11 :SYSTEM_VIEWS INFO: Scan prepared, actor: [11:7486786402409373939:2368], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:14:56.802872Z node 11 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [11:7486786402409373939:2368], row count: 4, finished: 1 2025-03-28T09:14:56.802896Z node 11 :SYSTEM_VIEWS INFO: Scan finished, actor: [11:7486786402409373939:2368], owner: [11:7486786402409373932:2364], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:14:56.806031Z node 11 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153296795, txId: 281474976715661] shutting down >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] >> KqpQueryService::DdlMixedDml [GOOD] >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinMismatchDictKeyTypes [GOOD] Test command err: Trying to start YDB, gRPC: 9552, MsgBus: 32566 2025-03-28T09:14:53.960024Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786386220104820:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:53.960176Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001700/r3tmp/tmp6FKyHx/pdisk_1.dat 2025-03-28T09:14:54.238245Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9552, node 1 2025-03-28T09:14:54.306471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:54.306580Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:54.310221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:54.312416Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:54.312447Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:54.312456Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:54.312560Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32566 TClient is connected to server localhost:32566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:54.779457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:54.827662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:14:54.934928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:14:55.099656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:55.180499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:56.779866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786399105008480:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:56.779939Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:57.040868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.066654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.090949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.115674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.159150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.195580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.248363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786403399976288:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:57.248432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:57.248596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786403399976293:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:57.252908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:57.261847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786403399976295:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:57.346997Z node 1 :TX_PROXY ERROR: Actor# [1:7486786403399976349:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:58.451988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:14:58.959910Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786386220104820:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:58.960012Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlMixedDml [GOOD] Test command err: Trying to start YDB, gRPC: 22253, MsgBus: 12326 2025-03-28T09:14:01.487467Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786163615356966:2216];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:01.511425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ea/r3tmp/tmpF8SkuJ/pdisk_1.dat 2025-03-28T09:14:02.036498Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:02.048001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:02.048154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 22253, node 1 2025-03-28T09:14:02.051785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:02.147962Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:02.147993Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:02.148000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:02.148126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12326 TClient is connected to server localhost:12326 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:02.943513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:02.979988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:03.181238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:03.487942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:03.650630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:06.265032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786185090195075:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:06.265167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:06.451679Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786163615356966:2216];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:06.451762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:06.682432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:06.723800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:06.799655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:06.838015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:06.875788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:06.953152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:07.052995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786189385162897:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.053087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.053776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786189385162902:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:07.058225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:07.069238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786189385162905:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:14:07.155874Z node 1 :TX_PROXY ERROR: Actor# [1:7486786189385162959:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:08.574247Z node 1 :TX_PROXY ERROR: Actor# [1:7486786193680130555:3681] txid# 281474976710672, issues: { message: "Group already exists" severity: 1 } 2025-03-28T09:14:08.598396Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTU0MTAzODMtNzgyMDkyYzItYjIyMmQyNDktOWJjNjllMjk=, ActorId: [1:7486786193680130549:2500], ActorState: ExecuteState, TraceId: 01jqe0fe95fxktp55tvhhr12pt, Create QueryResponse for error on request, msg: 2025-03-28T09:14:08.708634Z node 1 :TX_PROXY ERROR: Actor# [1:7486786193680130623:3722] txid# 281474976710676, issues: { message: "Group not found" severity: 1 } 2025-03-28T09:14:08.708933Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjc4N2IyMi0zNjAzYmQ0Zi1lOGMxYmZiMS0zMzAwM2Q0Zg==, ActorId: [1:7486786193680130617:2510], ActorState: ExecuteState, TraceId: 01jqe0fedn1bty4hh8q1gy5a0b, Create QueryResponse for error on request, msg: 2025-03-28T09:14:08.864359Z node 1 :TX_PROXY ERROR: Actor# [1:7486786193680130700:3769] txid# 281474976710681, issues: { message: "Group already exists" severity: 1 } 2025-03-28T09:14:08.864764Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODUxNWEyM2MtM2QxZTVmNWItNDE4Yjk4NzktYTFkMWU3YzE=, ActorId: [1:7486786193680130694:2522], ActorState: ExecuteState, TraceId: 01jqe0fejafmebgxjxzgr1ycy0, Create QueryResponse for error on request, msg: 2025-03-28T09:14:09.031579Z node 1 :TX_PROXY ERROR: Actor# [1:7486786197975098084:3817] txid# 281474976710686, issues: { message: "Role \"user1\" is already a member of role \"group1\"" issue_code: 2 severity: 3 } 2025-03-28T09:14:09.064082Z node 1 :TX_PROXY ERROR: Actor# [1:7486786197975098098:3824] txid# 281474976710687, issues: { message: "Member account not found" severity: 1 } 2025-03-28T09:14:09.064496Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzcwNmMyMTUtYmI1NDYyZjItM2VhMjI1MjktZDE3ZTQxYQ==, ActorId: [1:7486786197975098092:2537], ActorState: ExecuteState, TraceId: 01jqe0fers2snw4c6f7adkdatc, Create QueryResponse for error on request, msg: 2025-03-28T09:14:09.124535Z node 1 :TX_PROXY ERROR: Actor# [1:7486786197975098142:3852] txid# 281474976710690, issues: { message: "Role \"user1\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-03-28T09:14:09.165240Z node 1 :TX_PROXY ERROR: Actor# [1:7486786197975098161:3864] txid# 281474976710691, issues: { message: "Role \"user3\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-03-28T09:14:09.199362Z node 1 :TX_PROXY ERROR: Actor# [1:7486786197975098188:3879] txid# 281474976710693, issues: { message: "Member account not found" severity: 1 } 2025-03-28T09:14:09.199622Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGU0MzM4ZTktYWJhZTBiNDQtODM0YjlkMDktYjFkZWIyN2M=, ActorId: [1:7486786197975098169:2549], ActorState: ExecuteState, TraceId: 01jqe0fewt1j9v6v8q1whp2a8x, Create QueryResponse for error on request, msg: 2025-03-28T09:14:09.227472Z node 1 :TX_PROXY ERROR: Actor# [1:7486786197975098206:3887] txid# 281474976710695, issues: { message: "Role \"user1\" is already a member of role \"group1\"" issue_code: 2 severity: 3 } 2025-03-28T09:14:09.309622Z node 1 :TX_PROXY ERROR: Actor# [1:7486786197975098266:3928] txid# 281474976710698, issues: { message: "Role \"user3\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-03-28T09:14:09.366556Z node 1 :TX_PROXY ERROR: Actor# [1:7486786197975098289:3941] txid# 281474976710700, issues: { message: "Role \"user1\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-03-28T09:14:09.404170Z node 1 :TX_PROXY ERROR: Actor# [1:7486786197975098303:3948] txid# 281474976710701, issues: { message: "Role \"user2\" is not a member of role \"group1\"" issue_code: 3 severity: 2 } 2025-03-28T09:14:09.498368Z node 1 :TX_PROXY ERROR: Actor# [1:7486786197975098365:3990] txid# 281474976710704, issues: { message: "Group already exists" severity: 1 } 2025-03-28T09:14:09.498534Z node 1 :KQP_SESSION ... CREATE OBJECT my_secret_2 (TYPE SECRET) WITH (value="qwerty"); 2025-03-28T09:14:50.427213Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786374993576542:4192], TxId: 281474976715888, task: 1. Ctx: { SessionId : ydb://session/3?node_id=3&id=YTJkMzU4YzMtNGVlODYwZjctMTllZmYxYzMtYTY5MDEwNDg=. TraceId : 01jqe0gptbfqwfj0wmzscdv41w. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:14:50.427451Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486786374993576544:4193], TxId: 281474976715888, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=YTJkMzU4YzMtNGVlODYwZjctMTllZmYxYzMtYTY5MDEwNDg=. TraceId : 01jqe0gptbfqwfj0wmzscdv41w. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7486786374993576539:4094], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:14:50.427763Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YTJkMzU4YzMtNGVlODYwZjctMTllZmYxYzMtYTY5MDEwNDg=, ActorId: [3:7486786370698608945:4094], ActorState: ExecuteState, TraceId: 01jqe0gptbfqwfj0wmzscdv41w, Create QueryResponse for error on request, msg: 2025-03-28T09:14:50.431650Z node 3 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jqe0gpbsfgf7rnyzxfwgexfg" } } } } ;request=session_id: "ydb://session/3?node_id=3&id=YTJkMzU4YzMtNGVlODYwZjctMTllZmYxYzMtYTY5MDEwNDg=" tx_control { tx_id: "01jqe0gpbsfgf7rnyzxfwgexfg" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/values`\nSELECT ownerUserId,secretId,value FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "value" type { type_id: UTF8 } } } } } } value { items { items { text_value: "" } items { text_value: "my_secret_2" } items { text_value: "qwerty" } } } } } ; 2025-03-28T09:14:50.431929Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=OTY3ZDJiMDUtMjI0YjAyZTEtODZjYzVmN2QtMzM0ZWZjNGY=, ActorId: [3:7486786370698608937:4089], ActorState: ExecuteState, TraceId: 01jqe0gnx88me7zrendgvaab76, Create QueryResponse for error on request, msg: Execute SQL: UPSERT OBJECT my_secret_2 (TYPE SECRET) WITH value = "edcba"; Trying to start YDB, gRPC: 64879, MsgBus: 1720 2025-03-28T09:14:53.387881Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486786385425733572:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:53.387950Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014ea/r3tmp/tmpcLA0QI/pdisk_1.dat 2025-03-28T09:14:53.541716Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:53.545436Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:53.545523Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:53.547360Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64879, node 4 2025-03-28T09:14:53.594967Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:53.594998Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:53.595008Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:53.595208Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1720 TClient is connected to server localhost:1720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:54.116328Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:54.128971Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:54.208442Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:54.413800Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:54.487536Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:57.061563Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786402605604542:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:57.061670Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:57.114135Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.144054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.177025Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.208970Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.241528Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.276047Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:14:57.406164Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786402605605061:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:57.406237Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786402605605066:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:57.406245Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:57.410118Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:14:57.422106Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486786402605605068:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:14:57.502516Z node 4 :TX_PROXY ERROR: Actor# [4:7486786402605605122:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:58.387974Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486786385425733572:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:58.388032Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:58.708461Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:7486786406900572687:2493], status: GENERIC_ERROR, issues:
: Error: Optimization, code: 1070
:8:25: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 2025-03-28T09:14:58.708644Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YWFlMTVmOGItZGQ1MGZjNjUtZDYxMzZjOTUtNjY1NDZhYzM=, ActorId: [4:7486786406900572680:2489], ActorState: ExecuteState, TraceId: 01jqe0gz7sbqfjaswqn734j4zx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> THiveTest::TestStopTenant [GOOD] >> TScaleRecommenderTest::BasicTest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> SystemView::AuthGroupMembers [GOOD] >> SystemView::AuthGroupMembers_Access >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> SystemView::PartitionStatsLocksFields [GOOD] >> SystemView::PartitionStatsFields >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> TScaleRecommenderTest::BasicTest [GOOD] >> TStorageBalanceTest::TestScenario1 >> SystemView::CollectScanQueries [GOOD] >> SystemView::CollectScriptingQueries >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> THiveTest::TestFollowers >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> JsonChangeRecord::Heartbeat [GOOD] >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet |83.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] |83.5%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |83.6%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> SystemView::AuthUsers_ResultOrder [GOOD] >> SystemView::AuthUsers_TableRange >> SystemView::TopPartitionsByCpuTables [GOOD] >> SystemView::TopPartitionsByCpuRanges >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> SystemView::CollectScriptingQueries [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestHiveBalancer >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::CollectScriptingQueries [GOOD] Test command err: 2025-03-28T09:14:11.986222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786209176720151:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:11.986300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00158e/r3tmp/tmpKEIilT/pdisk_1.dat 2025-03-28T09:14:13.094828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:14:13.162631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:13.172112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:13.178586Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:13.198220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8091, node 1 2025-03-28T09:14:13.824784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:13.824821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:13.824830Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:13.824963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:14.905956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.149447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:16.990818Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786209176720151:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:16.990896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:17.951876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786234946524999:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.951973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786234946525013:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.952025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786234946525012:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.952188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.957628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:14:17.957893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786234946525034:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.958208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786234946525029:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.979111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786234946525085:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.979180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786234946525083:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.979245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.980917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786234946525095:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.980982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786234946525096:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.989051Z node 1 :TX_PROXY ERROR: Actor# [1:7486786234946525068:2704] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:14:17.989213Z node 1 :TX_PROXY ERROR: Actor# [1:7486786234946525069:2705] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:14:17.989831Z node 1 :TX_PROXY ERROR: Actor# [1:7486786234946525031:2691] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:14:17.995856Z node 1 :TX_PROXY ERROR: Actor# [1:7486786234946525097:2720] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:14:17.996004Z node 1 :TX_PROXY ERROR: Actor# [1:7486786234946525106:2726] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:14:18.001165Z node 1 :TX_PROXY ERROR: Actor# [1:7486786234946525107:2727] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:14:18.026470Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:14:18.030424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786234946525066:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:14:18.030506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786234946525027:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:14:18.030539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786234946525104:2387], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:14:18.030570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786234946525028:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:14:18.030601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786234946525067:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:14:18.031124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786234946525094:2384], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:14:18.031200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786234946525105:2388], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:14:18.105739Z node 1 :TX_PROXY ERROR: Actor# [1:7486786239241492567:2825] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:18.121194Z node 1 :TX_PROXY ERROR: Actor# [1:748678623924149257 ... Id: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:58.442614Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:58.452161Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:15:01.966297Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7486786423999216118:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:01.966297Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [22:7486786423999216129:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:01.966400Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:01.970260Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:15:01.981720Z node 22 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [22:7486786423999216132:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:15:02.070884Z node 22 :TX_PROXY ERROR: Actor# [22:7486786428294183479:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:15:02.173231Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe0h2ecempg20nmrenh0291, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=NzdlNjMxY2QtNjJkM2JiMTMtOTdhNTVjOWQtZmFhODA5YWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:02.316837Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqe0h2n9c4yk280jnn8c2pn8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=MTM3YWJlNTMtOTk0NzE1MmYtOWIzZGI3ODMtMzljODBhZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:02.387645Z node 22 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153302359, txId: 281474976710662] shutting down 2025-03-28T09:15:02.544065Z node 22 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe0h2vz8ex5a5njc59qppa1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=NzE1ZDhjNjctZWJlMmYxNDktZmQ2MWRiMzgtOWEyZWE1NGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:02.546802Z node 22 :SYSTEM_VIEWS INFO: Scan started, actor: [22:7486786428294183591:2366], owner: [22:7486786428294183588:2364], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-28T09:15:02.547349Z node 22 :SYSTEM_VIEWS INFO: Scan prepared, actor: [22:7486786428294183591:2366], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:02.547833Z node 22 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [22:7486786428294183591:2366], row count: 2, finished: 1 2025-03-28T09:15:02.547869Z node 22 :SYSTEM_VIEWS INFO: Scan finished, actor: [22:7486786428294183591:2366], owner: [22:7486786428294183588:2364], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-28T09:15:02.550899Z node 22 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153302543, txId: 281474976710664] shutting down 2025-03-28T09:15:03.672366Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7486786432489014021:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:15:03.672490Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00158e/r3tmp/tmpQN6nnq/pdisk_1.dat 2025-03-28T09:15:03.789839Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:15:03.816681Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:15:03.816793Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:15:03.818513Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3377, node 23 2025-03-28T09:15:03.869351Z node 23 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:15:03.869377Z node 23 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:15:03.869387Z node 23 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:15:03.869541Z node 23 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:15:04.213688Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:15:04.223155Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:15:07.717925Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7486786449668883950:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:07.717953Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7486786449668883958:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:07.718003Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:07.721873Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:15:07.732335Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7486786449668883964:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:15:07.835119Z node 23 :TX_PROXY ERROR: Actor# [23:7486786449668884016:2397] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:15:07.933635Z node 23 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe0h8234z9d28vk301wwkb8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=23&id=YTg2MDM1ZmItYjQwMzQzOTktNDQxNjA0MGUtZGY1ZDFlMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:08.174159Z node 23 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe0h8cvah5tn68n0xn0qm65, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=23&id=NWZlY2Y3YS0zYmMxMzBhNS01Y2Y3NTY4MS1kNWM4YWEyMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:08.217627Z node 23 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153308218, txId: 281474976715662] shutting down 2025-03-28T09:15:08.438951Z node 23 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe0h8me2jdxh0g7mtpbrd18, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=23&id=YTE5OWQ0MWUtMTVhMTg2NDItNWFjZGJiYzctZDMzNDUyZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:08.441783Z node 23 :SYSTEM_VIEWS INFO: Scan started, actor: [23:7486786453963851431:2368], owner: [23:7486786453963851428:2366], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-28T09:15:08.442476Z node 23 :SYSTEM_VIEWS INFO: Scan prepared, actor: [23:7486786453963851431:2368], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:08.443145Z node 23 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [23:7486786453963851431:2368], row count: 2, finished: 1 2025-03-28T09:15:08.443193Z node 23 :SYSTEM_VIEWS INFO: Scan finished, actor: [23:7486786453963851431:2368], owner: [23:7486786453963851428:2366], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-28T09:15:08.446291Z node 23 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153308437, txId: 281474976715664] shutting down >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestDownAfterDrain >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] >> SystemView::PartitionStatsFields [GOOD] >> SystemView::PDisksFields ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: Took 8.387512 seconds >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> SystemView::AuthGroupMembers_Access [GOOD] >> SystemView::AuthGroupMembers_ResultOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2025-03-28T09:14:39.911330Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:39.938276Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:39.961999Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-28T09:14:39.990821Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:14:40.011945Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-28T09:14:40.012018Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.012911Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-03-28T09:14:40.012948Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.013053Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.013403Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.027443Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.027496Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.029240Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.029423Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.029556Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.029695Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.029876Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030032Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030186Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030219Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.030294Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-03-28T09:14:40.030321Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-03-28T09:14:40.030369Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.030413Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.036411Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.038193Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.079215Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-28T09:14:40.079285Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.079317Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:14:40.106726Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-03-28T09:14:40.106795Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-03-28T09:14:40.106929Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.109192Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-28T09:14:40.109278Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-28T09:14:40.109342Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.109378Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-28T09:14:40.113060Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-28T09:14:40.113262Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.114958Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-03-28T09:14:40.115069Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-03-28T09:14:40.117823Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.120335Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-28T09:14:40.120842Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.120893Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-03-28T09:14:40.121012Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-28T09:14:40.126887Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:14:40.127112Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2091] 2025-03-28T09:14:40.127144Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2091] 2025-03-28T09:14:40.127335Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:14:40.127472Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.127533Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2091] 2025-03-28T09:14:40.136046Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.140515Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-03-28T09:14:40.140610Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-03-28T09:14:40.140743Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-03-28T09:14:40.140796Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:14:40.140921Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:14:40.140959Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-03-28T09:14:40.141025Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-28T09:14:40.141252Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-28T09:14:40.143287Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-03-28T09:14:40.143349Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-03-28T09:14:40.143461Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-28T09:14:40.146631Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-28T09:14:40.148772Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-28T09:14:40.148897Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:14:40.148957Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-03-28T09:14:40.149055Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-03-28T09:14:40.149085Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-03-28T09:14:40.149189Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.149343Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-03-28T09:14:40.149372Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:14:40.150048Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-03-28T09:14:40.150216Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-03-28T09:14:40.150261Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-03-28T09:14:40.152511Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-03-28T09:14:40.152604Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-03-28T09:14:40.152639Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-03-28T09:14:40.152691Z node ... 92Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 1} 2025-03-28T09:15:11.297928Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594046678944 Cookie: 2} 2025-03-28T09:15:11.298092Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [34:326:2265] CurrentLeaderTablet: [34:342:2274] CurrentGeneration: 2 CurrentStep: 0} 2025-03-28T09:15:11.298147Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72057594046678944 CurrentLeader: [34:326:2265] CurrentLeaderTablet: [34:342:2274] CurrentGeneration: 2 CurrentStep: 0} 2025-03-28T09:15:11.298275Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594046678944 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594046678944 Cookie: 0 CurrentLeader: [34:326:2265] CurrentLeaderTablet: [34:342:2274] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:15:11.298342Z node 35 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594046678944 followers: 0 2025-03-28T09:15:11.298417Z node 35 :TABLET_RESOLVER DEBUG: SelectForward node 35 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [34:326:2265] 2025-03-28T09:15:11.298535Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] forward result remote node 34 [35:547:2090] 2025-03-28T09:15:11.298677Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] remote node connected [35:547:2090] 2025-03-28T09:15:11.298765Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [35:547:2090] 2025-03-28T09:15:11.298954Z node 34 :PIPE_SERVER DEBUG: [72057594046678944] Accept Connect Originator# [35:547:2090] 2025-03-28T09:15:11.299225Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] connected with status OK role: Leader [35:547:2090] 2025-03-28T09:15:11.299287Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send queued [35:547:2090] 2025-03-28T09:15:11.299388Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] send [35:547:2090] 2025-03-28T09:15:11.299421Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944] push event to server [35:547:2090] 2025-03-28T09:15:11.299494Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594046678944]::SendEvent [35:547:2090] 2025-03-28T09:15:11.299616Z node 34 :PIPE_SERVER DEBUG: [72057594046678944] Push Sender# [35:546:2090] EventType# 271122945 2025-03-28T09:15:11.299746Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2025-03-28T09:15:11.299816Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:11.300026Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:15:11.300119Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:11.301441Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [35:553:2091] 2025-03-28T09:15:11.301476Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [35:553:2091] 2025-03-28T09:15:11.301510Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [35:554:2092] 2025-03-28T09:15:11.301525Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [35:554:2092] 2025-03-28T09:15:11.301686Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [35:553:2091] 2025-03-28T09:15:11.301746Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] queue send [35:554:2092] 2025-03-28T09:15:11.301991Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:11.302065Z node 35 :TABLET_RESOLVER DEBUG: SelectForward node 35 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [34:325:2264] 2025-03-28T09:15:11.302233Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:11.302496Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 34 [35:553:2091] 2025-03-28T09:15:11.302709Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [35:553:2091] 2025-03-28T09:15:11.302754Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [35:553:2091] 2025-03-28T09:15:11.303035Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:15:11.303530Z node 34 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [35:553:2091] 2025-03-28T09:15:11.303934Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-28T09:15:11.304027Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-28T09:15:11.304115Z node 34 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-28T09:15:11.304369Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [35:553:2091] 2025-03-28T09:15:11.304409Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [35:553:2091] 2025-03-28T09:15:11.304447Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [35:553:2091] 2025-03-28T09:15:11.304545Z node 35 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [35:553:2091] 2025-03-28T09:15:11.304906Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [34:463:2365] CurrentLeaderTablet: [34:478:2377] CurrentGeneration: 1 CurrentStep: 0} 2025-03-28T09:15:11.304988Z node 35 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [34:463:2365] CurrentLeaderTablet: [34:478:2377] CurrentGeneration: 1 CurrentStep: 0} 2025-03-28T09:15:11.305057Z node 35 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [34:463:2365] CurrentLeaderTablet: [34:478:2377] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:15:11.305082Z node 35 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037888 followers: 0 2025-03-28T09:15:11.305144Z node 35 :TABLET_RESOLVER DEBUG: SelectForward node 35 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [34:463:2365] 2025-03-28T09:15:11.305225Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result remote node 34 [35:554:2092] 2025-03-28T09:15:11.305426Z node 34 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [35:550:2091] EventType# 268959744 2025-03-28T09:15:11.305658Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-03-28T09:15:11.305769Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:11.305976Z node 34 :HIVE WARN: HIVE#72057594037927937 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:15:11.306135Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 208b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-03-28T09:15:11.306254Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{24, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:11.306487Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] remote node connected [35:554:2092] 2025-03-28T09:15:11.306521Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [35:554:2092] 2025-03-28T09:15:11.306728Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-03-28T09:15:11.306825Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:11.306947Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:15:11.307035Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} Tx{25, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:11.307407Z node 34 :PIPE_SERVER DEBUG: [72075186224037888] Accept Connect Originator# [35:554:2092] 2025-03-28T09:15:11.307556Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connected with status OK role: Leader [35:554:2092] 2025-03-28T09:15:11.307583Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send queued [35:554:2092] 2025-03-28T09:15:11.307604Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [35:554:2092] 2025-03-28T09:15:11.307645Z node 35 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [35:554:2092] 2025-03-28T09:15:11.307825Z node 34 :PIPE_SERVER DEBUG: [72075186224037888] Push Sender# [35:551:2092] EventType# 268959744 2025-03-28T09:15:11.307953Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2025-03-28T09:15:11.307997Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:11.308170Z node 34 :HIVE WARN: HIVE#72075186224037888 Node(35, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:15:11.308381Z node 34 :HIVE WARN: HIVE#72075186224037888 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:15:11.308445Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-03-28T09:15:11.308482Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:5} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:11.308635Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-03-28T09:15:11.308678Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:11.308745Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:15:11.308784Z node 34 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerDifferentResources >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> SystemView::AuthUsers_TableRange [GOOD] >> SystemView::AuthPermissions_ResultOrder >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestFollowersCrossDC_Easy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] Test command err: 2025-03-28T09:11:08.693803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:11:08.694433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:11:08.694502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001549/r3tmp/tmpvUgQGU/pdisk_1.dat 2025-03-28T09:11:09.215867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.215967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.216007Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:09.216151Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-28T09:11:09.216187Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:11:09.388226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-28T09:11:09.388547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.388784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:11:09.389026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:11:09.389169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.389286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:09.390132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:09.390312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:11:09.390358Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:09.390398Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:11:09.390595Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.390640Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.390761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.390847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:11:09.390889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:11:09.390925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:11:09.391062Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:09.391562Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:09.391601Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:11:09.391728Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.391776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.391877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.391991Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:11:09.392044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:11:09.392122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:09.392524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:09.392554Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T09:11:09.392650Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.392680Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:11:09.392717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.392788Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:11:09.392863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-28T09:11:09.392893Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:11:09.392928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:11:09.397206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:11:09.397761Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:11:09.397843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:11:09.398044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:11:09.399339Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:11:09.399397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:11:09.399440Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-28T09:11:09.399589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-28T09:11:09.400009Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.400058Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.400092Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:09.400214Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-28T09:11:09.400246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:11:09.400356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:09.400398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-28T09:11:09.400441Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:09.468496Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-28T09:11:09.468630Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-28T09:11:09.468697Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:11:09.469220Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T09:11:09.469320Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-28T09:11:09.519225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:09.519386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:09.531123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:09.620033Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-28T09:11:09.620801Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.620860Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:11:09.620898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:11:09.621080Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-28T09:11:09.621121Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:11:09.621210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:11:09.621346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... 4046644480 2025-03-28T09:14:00.275296Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:3 ProgressState 2025-03-28T09:14:00.275373Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:14:00.275401Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:3 progress is 5/7 2025-03-28T09:14:00.275428Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 5/7 2025-03-28T09:14:00.275459Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:3 progress is 5/7 2025-03-28T09:14:00.275484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 5/7 2025-03-28T09:14:00.275511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 5/7, is published: true 2025-03-28T09:14:00.275767Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:14:00.275800Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:14:00.275843Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:6, at schemeshard: 72057594046644480 2025-03-28T09:14:00.275873Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:6 ProgressState 2025-03-28T09:14:00.275943Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:14:00.275969Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:6 progress is 6/7 2025-03-28T09:14:00.275995Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 6/7 2025-03-28T09:14:00.276022Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:6 progress is 6/7 2025-03-28T09:14:00.276048Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 6/7 2025-03-28T09:14:00.276075Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 6/7, is published: true 2025-03-28T09:14:00.276347Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [3:409:2404], Recipient [3:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:14:00.276380Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:14:00.276426Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715668:4, at schemeshard: 72057594046644480 2025-03-28T09:14:00.276455Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715668:4 ProgressState 2025-03-28T09:14:00.276517Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:14:00.276546Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:4 progress is 7/7 2025-03-28T09:14:00.276570Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-03-28T09:14:00.276615Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715668:4 progress is 7/7 2025-03-28T09:14:00.276639Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-03-28T09:14:00.276663Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715668, ready parts: 7/7, is published: true 2025-03-28T09:14:00.276739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:1240:2966] message: TxId: 281474976715668 2025-03-28T09:14:00.276806Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715668 ready parts: 7/7 2025-03-28T09:14:00.276885Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:0 2025-03-28T09:14:00.276935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:0 2025-03-28T09:14:00.277025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 17] was 2 2025-03-28T09:14:00.277077Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:1 2025-03-28T09:14:00.277100Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:1 2025-03-28T09:14:00.277135Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 18] was 2 2025-03-28T09:14:00.277161Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:2 2025-03-28T09:14:00.277183Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:2 2025-03-28T09:14:00.277212Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 19] was 3 2025-03-28T09:14:00.277237Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:3 2025-03-28T09:14:00.277258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:3 2025-03-28T09:14:00.277347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 20] was 3 2025-03-28T09:14:00.277385Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-03-28T09:14:00.277437Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:4 2025-03-28T09:14:00.277460Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:4 2025-03-28T09:14:00.277520Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 21] was 3 2025-03-28T09:14:00.277545Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 3 2025-03-28T09:14:00.277573Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:5 2025-03-28T09:14:00.277595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:5 2025-03-28T09:14:00.277643Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 22] was 3 2025-03-28T09:14:00.277670Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-03-28T09:14:00.277696Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715668:6 2025-03-28T09:14:00.277719Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715668:6 2025-03-28T09:14:00.277804Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 23] was 3 2025-03-28T09:14:00.277833Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046644480, LocalPathId: 16] was 3 2025-03-28T09:14:00.278439Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:14:00.278510Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:14:00.278606Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:14:00.278670Z node 3 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:14:00.278830Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [3:1240:2966] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715668 at schemeshard: 72057594046644480 2025-03-28T09:14:00.279336Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [3:1247:2972], Recipient [3:409:2404]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:14:00.279379Z node 3 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:14:00.279412Z node 3 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-28T09:14:00.463770Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [3:1542:3207], serverId# [3:1543:3208], sessionId# [0:0:0] 2025-03-28T09:14:00.463993Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqe0f674b7tz7bj3r7vgennm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDY2NTgxMjAtNDY1MmRlNDMtZGJlMGZkYTYtMmEyZTIzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 2 } items { uint32_value: 20 } }, { items { uint32_value: 3 } items { uint32_value: 30 } }, { items { uint32_value: 4 } items { uint32_value: 40 } }, { items { uint32_value: 5 } items { uint32_value: 50 } } 2025-03-28T09:14:00.649721Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [3:1571:3224], serverId# [3:1572:3225], sessionId# [0:0:0] 2025-03-28T09:14:00.649875Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqe0f6dj76whyhj2h0wk80wy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2ZiZGY2ZTctYzE1Y2JkMmItOTEwZDI3OGMtOWQyNGU5OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 11 } items { uint32_value: 101 } }, { items { uint32_value: 21 } items { uint32_value: 201 } }, { items { uint32_value: 31 } items { uint32_value: 301 } }, { items { uint32_value: 41 } items { uint32_value: 401 } }, { items { uint32_value: 51 } items { uint32_value: 501 } } 2025-03-28T09:14:00.826706Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1600:3241], serverId# [3:1601:3242], sessionId# [0:0:0] 2025-03-28T09:14:00.826942Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqe0f6jtc0z7ntv3757j8rc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MzllNDdiMTAtMjljZWEyZC04ZTJiYmQ3ZC0zMjA3ZTU4Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 12 } items { uint32_value: 102 } }, { items { uint32_value: 22 } items { uint32_value: 202 } }, { items { uint32_value: 32 } items { uint32_value: 302 } }, { items { uint32_value: 42 } items { uint32_value: 402 } }, { items { uint32_value: 52 } items { uint32_value: 502 } } 2025-03-28T09:14:01.069453Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [3:1629:3258], serverId# [3:1630:3259], sessionId# [0:0:0] 2025-03-28T09:14:01.069711Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqe0f6r9e6h48tyfwnba8t4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTlhYWNkYTctMmZhYzU4NmItZDMxM2ZmNGYtYzIyMTUxYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 13 } items { uint32_value: 103 } }, { items { uint32_value: 23 } items { uint32_value: 203 } }, { items { uint32_value: 33 } items { uint32_value: 303 } }, { items { uint32_value: 43 } items { uint32_value: 403 } }, { items { uint32_value: 53 } items { uint32_value: 503 } } >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> SystemView::PDisksFields [GOOD] >> SystemView::GroupsFields |83.6%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |83.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> SystemView::AuthGroupMembers_ResultOrder [GOOD] >> SystemView::AuthGroupMembers_TableRange >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> THiveTest::TestCreateTabletReboots >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> SystemView::GroupsFields [GOOD] >> SystemView::AuthPermissions_ResultOrder [GOOD] >> SystemView::AuthPermissions_Selects |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> KqpQueryService::DdlColumnTable [GOOD] >> KqpQueryService::DdlCache >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards >> TStorageBalanceTest::TestScenario1 [GOOD] >> SystemView::TopPartitionsByCpuRanges [GOOD] >> SystemView::TopPartitionsByCpuFollowers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::GroupsFields [GOOD] Test command err: 2025-03-28T09:14:12.288388Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786210395867734:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:12.288441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001595/r3tmp/tmpDKkgBb/pdisk_1.dat 2025-03-28T09:14:13.136536Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:13.140729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:13.140863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:13.154137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7223, node 1 2025-03-28T09:14:13.825089Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:13.825110Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:13.825116Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:13.825232Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:14.907058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.074268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.108593Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486786225892202787:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:15.108645Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:14:15.118407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:15.118476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:15.131292Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-28T09:14:15.199001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.210803Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486786225281301116:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:15.367192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:15.465927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:15.466030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:15.496673Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-28T09:14:15.547428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:15.552766Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:14:15.984160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:16.028698Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786229825275920:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:16.029408Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:14:16.033295Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786227504834086:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:16.033340Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:14:16.044771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:16.191948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:16.192040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:16.295626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:16.295703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:16.336177Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:14:16.368352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:16.413820Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:14:16.415269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:17.294806Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786210395867734:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:17.294887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:20.142455Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486786225892202787:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:20.148743Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:20.192240Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486786225281301116:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:20.192344Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:21.028502Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786229825275920:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:21.028565Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:21.033571Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786227504834086:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:21.033644Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant2/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:21.084047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:14:21.305280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786249050574782:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:21.305281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786249050574793:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:21.305369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:21.310496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480 2025-03-28T09:14:21.348728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786249050574796:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2025-03-28T09:14:21.472213Z node 1 :TX_PROXY ERROR: Actor# [1:7486786249050574877:3058] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:22.660086Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe0ftqpdrhkq9y03sw6v0ef, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTBjMTVmYjktMjJlYzZjNDMtZDdjMmZiODUtZTNjMjYzYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09 ... nId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:14.874933Z node 26 :SYSTEM_VIEWS INFO: Scan started, actor: [26:7486786476271361819:2340], owner: [26:7486786476271361815:2338], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-03-28T09:15:14.884848Z node 26 :SYSTEM_VIEWS INFO: Scan prepared, actor: [26:7486786476271361819:2340], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:14.893708Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7486786476271361819:2340], row count: 1, finished: 1 2025-03-28T09:15:14.893777Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7486786476271361819:2340], owner: [26:7486786476271361815:2338], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-03-28T09:15:14.897098Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153314871, txId: 281474976715660] shutting down 2025-03-28T09:15:16.077708Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe0hg213nw96m36w7fz2bdp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=ZWMzYjEzNTctNTI0Njg2Y2MtZDkxMDI3NWYtODBjMzNhODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:16.080431Z node 26 :SYSTEM_VIEWS INFO: Scan started, actor: [26:7486786484861296455:2354], owner: [26:7486786484861296452:2352], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-03-28T09:15:16.081099Z node 26 :SYSTEM_VIEWS INFO: Scan prepared, actor: [26:7486786484861296455:2354], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:16.081342Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7486786484861296455:2354], row count: 1, finished: 1 2025-03-28T09:15:16.081381Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7486786484861296455:2354], owner: [26:7486786484861296452:2352], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-03-28T09:15:16.084301Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153316076, txId: 281474976715662] shutting down 2025-03-28T09:15:16.100662Z node 26 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[26:7486786463386459173:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:15:16.100767Z node 26 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:15:17.313787Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe0hh73fkya2yq8n2hhzvez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=Mzc0MmFjMmQtZDZhYWMyZWYtYjA1Mzg4OTUtZjEzNTI4OWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:17.316017Z node 26 :SYSTEM_VIEWS INFO: Scan started, actor: [26:7486786489156263797:2369], owner: [26:7486786489156263794:2367], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-03-28T09:15:17.316551Z node 26 :SYSTEM_VIEWS INFO: Scan prepared, actor: [26:7486786489156263797:2369], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:17.316700Z node 26 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [26:7486786489156263797:2369], row count: 1, finished: 1 2025-03-28T09:15:17.316724Z node 26 :SYSTEM_VIEWS INFO: Scan finished, actor: [26:7486786489156263797:2369], owner: [26:7486786489156263794:2367], scan id: 0, table id: [72057594046644480:1:0:ds_pdisks] 2025-03-28T09:15:17.319744Z node 26 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153317312, txId: 281474976715664] shutting down 2025-03-28T09:15:18.671785Z node 27 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[27:7486786492891441626:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:15:18.671886Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001595/r3tmp/tmp0cs33H/pdisk_1.dat 2025-03-28T09:15:18.784851Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:15:18.818877Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:15:18.818975Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:15:18.823008Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23138, node 27 2025-03-28T09:15:18.882276Z node 27 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:15:18.882308Z node 27 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:15:18.882318Z node 27 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:15:18.882506Z node 27 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63597 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:15:19.207808Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:15:22.398581Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7486786510071311652:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:22.398643Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7486786510071311639:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:22.398964Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:22.401798Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:15:22.419520Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [27:7486786510071311668:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:15:22.497503Z node 27 :TX_PROXY ERROR: Actor# [27:7486786510071311735:2571] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:15:22.924297Z node 27 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe0hka220cvznfa8bwef13w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=27&id=ZjRjNmMyOWItYmY0NzFlNGUtN2JiNDNjZTUtNGY5YWJjOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:22.926849Z node 27 :SYSTEM_VIEWS INFO: Scan started, actor: [27:7486786510071311780:2345], owner: [27:7486786510071311776:2343], scan id: 0, table id: [72057594046644480:1:0:ds_groups] 2025-03-28T09:15:22.927339Z node 27 :SYSTEM_VIEWS INFO: Scan prepared, actor: [27:7486786510071311780:2345], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:22.927781Z node 27 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [27:7486786510071311780:2345], row count: 0, finished: 1 2025-03-28T09:15:22.927837Z node 27 :SYSTEM_VIEWS INFO: Scan finished, actor: [27:7486786510071311780:2345], owner: [27:7486786510071311776:2343], scan id: 0, table id: [72057594046644480:1:0:ds_groups] 2025-03-28T09:15:22.931227Z node 27 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153322923, txId: 281474976715660] shutting down 2025-03-28T09:15:23.672141Z node 27 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[27:7486786492891441626:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:15:23.672222Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:15:24.128227Z node 27 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe0hqx24y8wbmmavf7h32n5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=27&id=ZDJlOGY4NDUtYjk2NGE2OTctZjk3N2MxNzMtNWZjYzQ2ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:24.130639Z node 27 :SYSTEM_VIEWS INFO: Scan started, actor: [27:7486786518661246430:2359], owner: [27:7486786518661246427:2357], scan id: 0, table id: [72057594046644480:1:0:ds_groups] 2025-03-28T09:15:24.131326Z node 27 :SYSTEM_VIEWS INFO: Scan prepared, actor: [27:7486786518661246430:2359], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:24.131587Z node 27 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [27:7486786518661246430:2359], row count: 1, finished: 1 2025-03-28T09:15:24.131628Z node 27 :SYSTEM_VIEWS INFO: Scan finished, actor: [27:7486786518661246430:2359], owner: [27:7486786518661246427:2357], scan id: 0, table id: [72057594046644480:1:0:ds_groups] 2025-03-28T09:15:24.134990Z node 27 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153324127, txId: 281474976715662] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario1 [GOOD] Test command err: 2025-03-28T09:14:39.911322Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:39.935592Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:39.962007Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-28T09:14:39.990752Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:14:40.011932Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-28T09:14:40.012007Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.012824Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-03-28T09:14:40.012868Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.012971Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.013281Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.027537Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.027604Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.029586Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.029731Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.029889Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030038Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030201Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030349Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030467Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030494Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.030562Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-03-28T09:14:40.030594Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-03-28T09:14:40.030648Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.030884Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.036424Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.038182Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.079224Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-28T09:14:40.079285Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.079320Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:14:40.106729Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-03-28T09:14:40.106821Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-03-28T09:14:40.106972Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.109208Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-28T09:14:40.109303Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-28T09:14:40.109353Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.109392Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-28T09:14:40.113893Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-28T09:14:40.114123Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.115366Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-03-28T09:14:40.115455Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-03-28T09:14:40.118209Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.120365Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-28T09:14:40.120800Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.120844Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-03-28T09:14:40.120916Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-28T09:14:40.126883Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:14:40.127113Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2091] 2025-03-28T09:14:40.127147Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2091] 2025-03-28T09:14:40.127292Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:14:40.127453Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.127506Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2091] 2025-03-28T09:14:40.136111Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.140523Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-03-28T09:14:40.140630Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-03-28T09:14:40.140758Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-03-28T09:14:40.140816Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:14:40.140928Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:14:40.140968Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-03-28T09:14:40.141038Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-28T09:14:40.141203Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-28T09:14:40.143271Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-03-28T09:14:40.143339Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-03-28T09:14:40.143443Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-28T09:14:40.146629Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-28T09:14:40.148777Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-28T09:14:40.148896Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:14:40.148958Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-03-28T09:14:40.149094Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-03-28T09:14:40.149135Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-03-28T09:14:40.149229Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.149362Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-03-28T09:14:40.149388Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:14:40.150084Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-03-28T09:14:40.150213Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-03-28T09:14:40.150269Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-03-28T09:14:40.152469Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-03-28T09:14:40.152549Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-03-28T09:14:40.152589Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-03-28T09:14:40.152635Z node ... x{540, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{361, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-03-28T09:15:26.454352Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{540, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:26.465851Z node 24 :BS_PROXY_PUT INFO: [a0855708664ec8fa] bootstrap ActorId# [24:3851:5136] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:182:0:0:247:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:15:26.465974Z node 24 :BS_PROXY_PUT DEBUG: [a0855708664ec8fa] Id# [72057594037927937:2:182:0:0:247:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:15:26.466016Z node 24 :BS_PROXY_PUT DEBUG: [a0855708664ec8fa] restore Id# [72057594037927937:2:182:0:0:247:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:15:26.466059Z node 24 :BS_PROXY_PUT DEBUG: [a0855708664ec8fa] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:182:0:0:247:1] Marker# BPG33 2025-03-28T09:15:26.466093Z node 24 :BS_PROXY_PUT DEBUG: [a0855708664ec8fa] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:182:0:0:247:1] Marker# BPG32 2025-03-28T09:15:26.466177Z node 24 :BS_PROXY DEBUG: Send to queueActorId# [24:39:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:182:0:0:247:1] FDS# 247 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:15:26.467551Z node 24 :BS_PROXY_PUT DEBUG: [a0855708664ec8fa] received {EvVPutResult Status# OK ID# [72057594037927937:2:182:0:0:247:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 197 } Cost# 81944 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 198 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-28T09:15:26.467640Z node 24 :BS_PROXY_PUT DEBUG: [a0855708664ec8fa] Result# TEvPutResult {Id# [72057594037927937:2:182:0:0:247:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-28T09:15:26.467687Z node 24 :BS_PROXY_PUT INFO: [a0855708664ec8fa] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:182:0:0:247:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:15:26.467776Z node 24 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.575 sample PartId# [72057594037927937:2:182:0:0:247:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 24 } TEvVPutResult{ TimestampMs# 1.966 VDiskId# [0:1:0:0:0] NodeId# 24 Status# OK } ] } 2025-03-28T09:15:26.467971Z node 24 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:182:0:0:247:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-28T09:15:26.468127Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} commited cookie 1 for step 182 2025-03-28T09:15:26.468462Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{541, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-03-28T09:15:26.468526Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{541, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:26.468723Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{541, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{362, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-03-28T09:15:26.468768Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:183} Tx{541, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:26.468863Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [24:513:2455] 2025-03-28T09:15:26.468891Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [24:513:2455] 2025-03-28T09:15:26.468943Z node 24 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [24:473:2426] EventType# 268637702 c[def1] *************************--------------------------------------------------------------------------- (0.25) *************************--------------------------------------------------------------------------- (0.25) 2025-03-28T09:15:26.569957Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-28T09:15:26.570058Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:26.570181Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004762592}: tablet 72075186224037910 wasn't changed 2025-03-28T09:15:26.570229Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004762592}: tablet 72075186224037910 skipped channel 0 2025-03-28T09:15:26.570319Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004762592}: tablet 72075186224037910 skipped channel 1 2025-03-28T09:15:26.570368Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923004762592}: tablet 72075186224037910 skipped channel 2 2025-03-28T09:15:26.570457Z node 24 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923004762592}(72075186224037910)::Execute - TryToBoot was not successfull 2025-03-28T09:15:26.570526Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{363, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-03-28T09:15:26.570578Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{542, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:26.582420Z node 24 :BS_PROXY_PUT INFO: [99e6a56c7c5c0a86] bootstrap ActorId# [24:3853:5138] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:183:0:0:247:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:15:26.582562Z node 24 :BS_PROXY_PUT DEBUG: [99e6a56c7c5c0a86] Id# [72057594037927937:2:183:0:0:247:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:15:26.582621Z node 24 :BS_PROXY_PUT DEBUG: [99e6a56c7c5c0a86] restore Id# [72057594037927937:2:183:0:0:247:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:15:26.582675Z node 24 :BS_PROXY_PUT DEBUG: [99e6a56c7c5c0a86] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:183:0:0:247:1] Marker# BPG33 2025-03-28T09:15:26.582730Z node 24 :BS_PROXY_PUT DEBUG: [99e6a56c7c5c0a86] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:183:0:0:247:1] Marker# BPG32 2025-03-28T09:15:26.582892Z node 24 :BS_PROXY DEBUG: Send to queueActorId# [24:39:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:183:0:0:247:1] FDS# 247 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:15:26.584521Z node 24 :BS_PROXY_PUT DEBUG: [99e6a56c7c5c0a86] received {EvVPutResult Status# OK ID# [72057594037927937:2:183:0:0:247:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 198 } Cost# 81944 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 199 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-28T09:15:26.584626Z node 24 :BS_PROXY_PUT DEBUG: [99e6a56c7c5c0a86] Result# TEvPutResult {Id# [72057594037927937:2:183:0:0:247:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-28T09:15:26.584677Z node 24 :BS_PROXY_PUT INFO: [99e6a56c7c5c0a86] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:183:0:0:247:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:15:26.584796Z node 24 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.722 sample PartId# [72057594037927937:2:183:0:0:247:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 24 } TEvVPutResult{ TimestampMs# 2.374 VDiskId# [0:1:0:0:0] NodeId# 24 Status# OK } ] } 2025-03-28T09:15:26.585070Z node 24 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:183:0:0:247:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-28T09:15:26.585306Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} commited cookie 1 for step 183 2025-03-28T09:15:26.585708Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{543, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-03-28T09:15:26.585757Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{543, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:26.585984Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{543, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{364, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-03-28T09:15:26.586039Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:184} Tx{543, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:26.586150Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [24:513:2455] 2025-03-28T09:15:26.586182Z node 24 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [24:513:2455] 2025-03-28T09:15:26.586242Z node 24 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [24:473:2426] EventType# 268637702 c[def1] *************************--------------------------------------------------------------------------- (0.25) *************************--------------------------------------------------------------------------- (0.25) 2025-03-28T09:15:26.687206Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{544, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-28T09:15:26.687296Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{544, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:26.687401Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003062432}: tablet 72075186224037903 wasn't changed 2025-03-28T09:15:26.687458Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003062432}: tablet 72075186224037903 skipped channel 0 2025-03-28T09:15:26.687534Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003062432}: tablet 72075186224037903 skipped channel 1 2025-03-28T09:15:26.687566Z node 24 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923003062432}: tablet 72075186224037903 skipped channel 2 2025-03-28T09:15:26.687672Z node 24 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923003062432}(72075186224037903)::Execute - TryToBoot was not successfull 2025-03-28T09:15:26.687762Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{544, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{365, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-03-28T09:15:26.687815Z node 24 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:185} Tx{544, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] >> SystemView::AuthGroupMembers_TableRange [GOOD] >> KqpQueryService::DdlCache [GOOD] >> KqpQueryService::DdlExecuteScript >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestExternalBoot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthGroupMembers_TableRange [GOOD] Test command err: 2025-03-28T09:14:12.166802Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786209371997582:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:12.167591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001582/r3tmp/tmpcE7Ofa/pdisk_1.dat 2025-03-28T09:14:13.061026Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:13.061124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:13.111441Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:13.111743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:13.207814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 15537, node 1 2025-03-28T09:14:13.823198Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:13.823228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:13.823235Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:13.823338Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:14.918920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.274213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:14:15.279151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.279240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:14:15.279387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-28T09:14:15.279534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:14:15.279624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:14:15.279644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.279721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:14:15.279761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:14:15.287394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-28T09:14:15.287553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-03-28T09:14:15.287749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:14:15.287767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:14:15.287884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:14:15.287962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:14:15.287976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486786213666965366:2406], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-28T09:14:15.287988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486786213666965366:2406], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-28T09:14:15.288020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.288039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.288060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-28T09:14:15.288083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 waiting... 2025-03-28T09:14:15.295377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:14:15.297370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:14:15.297462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:14:15.297472Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-28T09:14:15.297494Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-03-28T09:14:15.297517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:14:15.298108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:14:15.298181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2025-03-28T09:14:15.298189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2025-03-28T09:14:15.298201Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-03-28T09:14:15.298212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:14:15.298255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-28T09:14:15.298389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:14:15.298399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2025-03-28T09:14:15.298417Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:14:15.315390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2025-03-28T09:14:15.315496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:14:15.315568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-28T09:14:15.315594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2025-03-28T09:14:15.324063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153255361, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:14:15.324202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743153255361 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:14:15.324224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-28T09:14:15.324384Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 128 -> 240 2025-03-28T09:14:15.324414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710658:0, at tablet# 72057594046644480 2025-03-28T09:14:15.324551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 202 ... EM_VIEWS INFO: Scan finished, actor: [31:7486786537120389048:2441], owner: [31:7486786537120389045:2439], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-28T09:15:28.497269Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7486786507055615986:2213], database# , query hash# 3383218636718949612, cpu time# 148236 2025-03-28T09:15:28.497980Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153328484, txId: 281474976715695] shutting down 2025-03-28T09:15:28.650161Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715698. Ctx: { TraceId: 01jqe0hwbv2kbgz20kcy02x1q9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=ZDEyYTA0MTYtMjZmZWMxZTktZjgzOWJkMDMtODczNmJiMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:28.652711Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7486786537120389082:2450], owner: [31:7486786537120389078:2448], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-28T09:15:28.653427Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7486786537120389082:2450], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:28.653474Z node 31 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 1 is admin: 1 2025-03-28T09:15:28.653580Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:15:28.654002Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user3 },{ Sid: user2 },{ Sid: user1 }] Groups: [{ Sid: group1 Members: [user2,user1] },{ Sid: group3 Members: [user2,user1] },{ Sid: group2 Members: [user3,user2,user1] }] } Children [.metadata,Table0,Tenant1,Tenant2] }] } 2025-03-28T09:15:28.654090Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486786537120389082:2450], row count: 1, finished: 1 2025-03-28T09:15:28.654115Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7486786537120389082:2450], owner: [31:7486786537120389078:2448], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-28T09:15:28.657398Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7486786507055615986:2213], database# , query hash# 10825990382896916327, cpu time# 136824 2025-03-28T09:15:28.658070Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153328648, txId: 281474976715697] shutting down 2025-03-28T09:15:28.795507Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715700. Ctx: { TraceId: 01jqe0hwgtbdsy2vwx1g8df3qw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=MTcxOTc3OGYtNzUyNjI0NTEtNjAxZDRiNTQtYTI5Y2ExZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:28.797715Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7486786537120389115:2459], owner: [31:7486786537120389111:2457], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-28T09:15:28.798497Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7486786537120389115:2459], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:28.798531Z node 31 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 1 is admin: 1 2025-03-28T09:15:28.798615Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:15:28.799001Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user3 },{ Sid: user2 },{ Sid: user1 }] Groups: [{ Sid: group1 Members: [user2,user1] },{ Sid: group3 Members: [user2,user1] },{ Sid: group2 Members: [user3,user2,user1] }] } Children [.metadata,Table0,Tenant1,Tenant2] }] } 2025-03-28T09:15:28.799083Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486786537120389115:2459], row count: 2, finished: 1 2025-03-28T09:15:28.799123Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7486786537120389115:2459], owner: [31:7486786537120389111:2457], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-28T09:15:28.802774Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7486786507055615986:2213], database# , query hash# 12756478633923396544, cpu time# 123169 2025-03-28T09:15:28.803492Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153328788, txId: 281474976715699] shutting down 2025-03-28T09:15:28.810554Z node 34 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-03-28T09:15:28.810465Z 2025-03-28T09:15:29.031896Z node 33 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-03-28T09:15:29.031790Z 2025-03-28T09:15:29.040485Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715702. Ctx: { TraceId: 01jqe0hwnn5mse6ms7p9hdg99f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=OTg5NjU3MGUtMjE0NGU0ZGQtYzAwOGUxZTctY2Q0ZGE2M2I=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:29.042197Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7486786541415356455:2469], owner: [31:7486786541415356451:2467], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-28T09:15:29.043402Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7486786541415356455:2469], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:29.043438Z node 31 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 1 is admin: 1 2025-03-28T09:15:29.043511Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:15:29.043782Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user3 },{ Sid: user2 },{ Sid: user1 }] Groups: [{ Sid: group1 Members: [user2,user1] },{ Sid: group3 Members: [user2,user1] },{ Sid: group2 Members: [user3,user2,user1] }] } Children [.metadata,Table0,Tenant1,Tenant2] }] } 2025-03-28T09:15:29.043829Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486786541415356455:2469], row count: 1, finished: 1 2025-03-28T09:15:29.043844Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7486786541415356455:2469], owner: [31:7486786541415356451:2467], scan id: 0, table id: [72057594046644480:1:0:auth_group_members] 2025-03-28T09:15:29.047397Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153329039, txId: 281474976715701] shutting down 2025-03-28T09:15:29.048697Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7486786507055615986:2213], database# , query hash# 11357838469093417614, cpu time# 211993 2025-03-28T09:15:29.067671Z node 34 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:15:29.067179Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 34 2025-03-28T09:15:29.067752Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:15:29.067864Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 33 2025-03-28T09:15:29.068109Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:15:29.078217Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 35 2025-03-28T09:15:29.079295Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:15:29.067655Z node 33 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:15:29.104478Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7486786513732049076:2099], Type=268959746 2025-03-28T09:15:29.104548Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7486786513732049076:2099], Type=268959746 2025-03-28T09:15:29.104580Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7486786513732049076:2099], Type=268959746 2025-03-28T09:15:29.116073Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486786511071516867:2102], Type=268959746 2025-03-28T09:15:29.116517Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486786511071516867:2102], Type=268959746 2025-03-28T09:15:29.116636Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486786511071516867:2102], Type=268959746 2025-03-28T09:15:29.116785Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 32 2025-03-28T09:15:29.117314Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connected -> Disconnected >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestGetStorageInfo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBoot [GOOD] Test command err: 2025-03-28T09:14:39.911400Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:39.935854Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:39.954403Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:39.962462Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:68:2073] ControllerId# 72057594037932033 2025-03-28T09:14:39.962525Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:39.972132Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:39.972532Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:39.974978Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:39.975047Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:39.994795Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:74:2077] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995011Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:75:2078] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995213Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:76:2079] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995341Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:77:2080] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995504Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:78:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995641Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:79:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995786Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:80:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995816Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.004559Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:68:2073] 2025-03-28T09:14:40.004639Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:68:2073] 2025-03-28T09:14:40.004703Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.004747Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.017005Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.017347Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:40.020082Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.020262Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-28T09:14:40.020955Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:14:40.022095Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-28T09:14:40.022151Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.022925Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:91:2077] ControllerId# 72057594037932033 2025-03-28T09:14:40.022950Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.023009Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.023176Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.024891Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.024939Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:14:40.042667Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.042754Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.044432Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:99:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.044600Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:100:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.044752Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:101:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.044904Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:102:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.045042Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:103:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.045194Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:104:2087] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.045350Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:105:2088] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.045392Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.045459Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:91:2077] 2025-03-28T09:14:40.045486Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:91:2077] 2025-03-28T09:14:40.045531Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.045565Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.046378Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.046473Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:40.049030Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.049144Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.049919Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:114:2074] ControllerId# 72057594037932033 2025-03-28T09:14:40.049966Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.050023Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.050229Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.052432Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.052479Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.054038Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:120:2078] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.054185Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:121:2079] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.054342Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:122:2080] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.054483Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:123:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.054628Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:124:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.054791Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:125:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.054913Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:126:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.054940Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.054993Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:114:2074] 2025-03-28T09:14:40.055017Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:114:2074] 2025-03-28T09:14:40.055068Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.055100Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.055684Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.055867Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.058203Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:68:2073] 2025-03-28T09:14:40.058403Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:114:2074] 2025-03-28T09:14:40.058439Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.058466Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:14:40.069801Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.069859Z node 3 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-28T09:14:40.088085Z node 3 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-28T09:14:40.099464Z node 3 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-28T09:14:40.099652Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.112060Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:91:2077] 2025-03-28T09:14:40. ... ard result local node, try to connect [28:315:2290] 2025-03-28T09:15:37.422429Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [28:315:2290] 2025-03-28T09:15:37.422500Z node 28 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [28:315:2290] 2025-03-28T09:15:37.422604Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [28:315:2290] 2025-03-28T09:15:37.422639Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [28:315:2290] 2025-03-28T09:15:37.422667Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [28:315:2290] 2025-03-28T09:15:37.425550Z node 28 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [28:283:2269] EventType# 268637702 2025-03-28T09:15:37.425826Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-03-28T09:15:37.425930Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:37.426164Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:15:37.426263Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:37.426534Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-28T09:15:37.426618Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:37.427061Z node 28 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005807104}(72075186224037888)::Execute - TryToBoot was not successfull 2025-03-28T09:15:37.427202Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-03-28T09:15:37.427309Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:37.440573Z node 28 :BS_PROXY_PUT INFO: [aeed6b7f2709b4c0] bootstrap ActorId# [28:318:2293] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:693:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:15:37.440697Z node 28 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Id# [72057594037927937:2:4:0:0:693:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:15:37.440745Z node 28 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] restore Id# [72057594037927937:2:4:0:0:693:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:15:37.440798Z node 28 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:693:1] Marker# BPG33 2025-03-28T09:15:37.440836Z node 28 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:693:1] Marker# BPG32 2025-03-28T09:15:37.440957Z node 28 :BS_PROXY DEBUG: Send to queueActorId# [28:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:693:1] FDS# 693 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:15:37.446321Z node 28 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:693:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 85456 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-28T09:15:37.446439Z node 28 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-28T09:15:37.446494Z node 28 :BS_PROXY_PUT INFO: [aeed6b7f2709b4c0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:15:37.446618Z node 28 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.53 sample PartId# [72057594037927937:2:4:0:0:693:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 28 } TEvVPutResult{ TimestampMs# 5.928 VDiskId# [0:1:0:0:0] NodeId# 28 Status# OK } ] } 2025-03-28T09:15:37.446782Z node 28 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:693:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-28T09:15:37.446915Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-03-28T09:15:37.447202Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:15:37.447303Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-28T09:15:37.447352Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-28T09:15:37.447386Z node 28 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-28T09:15:37.447423Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:15:37.447474Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:15:37.447515Z node 28 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:15:37.448708Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [28:322:2296] 2025-03-28T09:15:37.448771Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [28:322:2296] 2025-03-28T09:15:37.448900Z node 28 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:37.448992Z node 28 :TABLET_RESOLVER DEBUG: SelectForward node 28 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [28:276:2265] 2025-03-28T09:15:37.449081Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [28:322:2296] 2025-03-28T09:15:37.449146Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [28:322:2296] 2025-03-28T09:15:37.449205Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [28:322:2296] 2025-03-28T09:15:37.449274Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [28:322:2296] 2025-03-28T09:15:37.449404Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [28:322:2296] 2025-03-28T09:15:37.449557Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [28:322:2296] 2025-03-28T09:15:37.449628Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [28:322:2296] 2025-03-28T09:15:37.449695Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [28:322:2296] 2025-03-28T09:15:37.449772Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [28:322:2296] 2025-03-28T09:15:37.449843Z node 28 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [28:322:2296] 2025-03-28T09:15:37.449920Z node 28 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [28:321:2295] EventType# 268697624 2025-03-28T09:15:37.450164Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-03-28T09:15:37.450256Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:37.450492Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{6, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-03-28T09:15:37.450587Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:37.463551Z node 28 :BS_PROXY_PUT INFO: [49bb8b081a887568] bootstrap ActorId# [28:325:2299] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:15:37.463715Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Id# [72057594037927937:2:5:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:15:37.463802Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] restore Id# [72057594037927937:2:5:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:15:37.463896Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:92:1] Marker# BPG33 2025-03-28T09:15:37.463965Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:92:1] Marker# BPG32 2025-03-28T09:15:37.464164Z node 28 :BS_PROXY DEBUG: Send to queueActorId# [28:35:2079] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:15:37.467611Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-28T09:15:37.467756Z node 28 :BS_PROXY_PUT DEBUG: [49bb8b081a887568] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-28T09:15:37.467852Z node 28 :BS_PROXY_PUT INFO: [49bb8b081a887568] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:15:37.468057Z node 28 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.894 sample PartId# [72057594037927937:2:5:0:0:92:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 28 } TEvVPutResult{ TimestampMs# 4.356 VDiskId# [0:1:0:0:0] NodeId# 28 Status# OK } ] } 2025-03-28T09:15:37.468281Z node 28 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-28T09:15:37.468462Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> THiveTest::TestExternalBootWhenLocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2025-03-28T09:14:39.916751Z node 4 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:39.935690Z node 4 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:39.953406Z node 4 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:39.962124Z node 4 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [4:153:2077] ControllerId# 72057594037932033 2025-03-28T09:14:39.962165Z node 4 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:39.970809Z node 4 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:39.971217Z node 4 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:39.973221Z node 4 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:39.973276Z node 4 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:39.994281Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:159:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.994484Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:160:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.994620Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:161:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.994769Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:162:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.994879Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:163:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995001Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:164:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995192Z node 4 :BS_PROXY DEBUG: Group# 0 Actor# [4:152:2076] Create Queue# [4:165:2087] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995223Z node 4 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.004716Z node 4 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [4:153:2077] 2025-03-28T09:14:40.004777Z node 4 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [4:153:2077] 2025-03-28T09:14:40.004829Z node 4 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.004878Z node 4 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.017135Z node 4 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.017272Z node 5 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:40.019589Z node 5 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.019683Z node 5 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.020252Z node 5 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [5:173:2077] ControllerId# 72057594037932033 2025-03-28T09:14:40.020282Z node 5 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.020340Z node 5 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.020473Z node 5 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.021916Z node 5 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.021968Z node 5 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.023390Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:179:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.023520Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:180:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.023620Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:181:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.023696Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:182:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.023776Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:183:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.023931Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:184:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.024009Z node 5 :BS_PROXY DEBUG: Group# 0 Actor# [5:172:2076] Create Queue# [5:185:2087] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.024040Z node 5 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.024086Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [5:173:2077] 2025-03-28T09:14:40.024114Z node 5 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [5:173:2077] 2025-03-28T09:14:40.024141Z node 5 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.024168Z node 5 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.024532Z node 5 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.024609Z node 6 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:40.026790Z node 6 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.026910Z node 6 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.027673Z node 6 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [6:193:2077] ControllerId# 72057594037932033 2025-03-28T09:14:40.027703Z node 6 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.027749Z node 6 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.027922Z node 6 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.029721Z node 6 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.029752Z node 6 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.030962Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:199:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.031081Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:200:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.031228Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:201:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.031356Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:202:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.031467Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:203:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.031638Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:204:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.031757Z node 6 :BS_PROXY DEBUG: Group# 0 Actor# [6:192:2076] Create Queue# [6:205:2087] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.031773Z node 6 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.031822Z node 6 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [6:193:2077] 2025-03-28T09:14:40.031844Z node 6 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [6:193:2077] 2025-03-28T09:14:40.031868Z node 6 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.031891Z node 6 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.032199Z node 6 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.032354Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:40.034535Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.034744Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-28T09:14:40.035334Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:14:40.036359Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-28T09:14:40.036421Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.037240Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:214:2080] ControllerId# 72057594037932033 2025-03-28T09:14:40.037280Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.037365Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.037519Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.049211Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.049251Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.050361Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:213:2079] Create Queue# [1:222:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.050496Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:213:2079] Create Queue# [1:223:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.050586Z node 1 :BS_PROX ... 534Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037892 CurrentLeader: [61:1945:2265] CurrentLeaderTablet: [61:1951:2268] CurrentGeneration: 3 CurrentStep: 0} 2025-03-28T09:15:38.110615Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037892 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037892 Cookie: 0 CurrentLeader: [61:1945:2265] CurrentLeaderTablet: [61:1951:2268] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2025-03-28T09:15:38.110647Z node 56 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037892 followers: 0 2025-03-28T09:15:38.110709Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1945:2265] 2025-03-28T09:15:38.110822Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] forward result remote node 61 [56:2074:2736] 2025-03-28T09:15:38.110953Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] remote node connected [56:2074:2736] 2025-03-28T09:15:38.110999Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892]::SendEvent [56:2074:2736] 2025-03-28T09:15:38.111202Z node 61 :PIPE_SERVER DEBUG: [72075186224037892] Accept Connect Originator# [56:2074:2736] 2025-03-28T09:15:38.111611Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] connected with status OK role: Leader [56:2074:2736] 2025-03-28T09:15:38.111651Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037892] send queued [56:2074:2736] 2025-03-28T09:15:38.112788Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] ::Bootstrap [56:2078:2738] 2025-03-28T09:15:38.112834Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] lookup [56:2078:2738] 2025-03-28T09:15:38.112933Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:38.112981Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1293:2098] 2025-03-28T09:15:38.113083Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] forward result remote node 61 [56:2078:2738] 2025-03-28T09:15:38.113173Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] remote node connected [56:2078:2738] 2025-03-28T09:15:38.113209Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893]::SendEvent [56:2078:2738] 2025-03-28T09:15:38.113385Z node 61 :PIPE_SERVER DEBUG: [72075186224037893] Accept Connect Originator# [56:2078:2738] 2025-03-28T09:15:38.113788Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] connected with status OK role: Leader [56:2078:2738] 2025-03-28T09:15:38.113850Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037893] send queued [56:2078:2738] 2025-03-28T09:15:38.115246Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] ::Bootstrap [56:2081:2740] 2025-03-28T09:15:38.115284Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [56:2081:2740] 2025-03-28T09:15:38.115338Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:38.115380Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [60:1297:2099] 2025-03-28T09:15:38.115493Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 60 [56:2081:2740] 2025-03-28T09:15:38.115612Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [56:2081:2740] 2025-03-28T09:15:38.115648Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [56:2081:2740] 2025-03-28T09:15:38.116004Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connect request undelivered [56:2081:2740] 2025-03-28T09:15:38.116049Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] immediate retry [56:2081:2740] 2025-03-28T09:15:38.116077Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] lookup [56:2081:2740] 2025-03-28T09:15:38.116137Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037894 entry.State: StNormal 2025-03-28T09:15:38.116316Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037894 entry.State: StProblemResolve ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:38.116423Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:15:38.116545Z node 56 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2025-03-28T09:15:38.116597Z node 56 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2025-03-28T09:15:38.116633Z node 56 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2025-03-28T09:15:38.116681Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [61:1946:2266] CurrentLeaderTablet: [61:1952:2269] CurrentGeneration: 3 CurrentStep: 0} 2025-03-28T09:15:38.116765Z node 56 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 CurrentLeader: [61:1946:2266] CurrentLeaderTablet: [61:1952:2269] CurrentGeneration: 3 CurrentStep: 0} 2025-03-28T09:15:38.116868Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037894 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037894 Cookie: 0 CurrentLeader: [61:1946:2266] CurrentLeaderTablet: [61:1952:2269] CurrentGeneration: 3 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {7, 10, 0}} 2025-03-28T09:15:38.116908Z node 56 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72075186224037894 followers: 0 2025-03-28T09:15:38.116953Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1946:2266] 2025-03-28T09:15:38.117067Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] forward result remote node 61 [56:2081:2740] 2025-03-28T09:15:38.117181Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] remote node connected [56:2081:2740] 2025-03-28T09:15:38.117215Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894]::SendEvent [56:2081:2740] 2025-03-28T09:15:38.117441Z node 61 :PIPE_SERVER DEBUG: [72075186224037894] Accept Connect Originator# [56:2081:2740] 2025-03-28T09:15:38.117858Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] connected with status OK role: Leader [56:2081:2740] 2025-03-28T09:15:38.117916Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037894] send queued [56:2081:2740] 2025-03-28T09:15:38.119338Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] ::Bootstrap [56:2085:2742] 2025-03-28T09:15:38.119383Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] lookup [56:2085:2742] 2025-03-28T09:15:38.119437Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:38.119480Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1791:2193] 2025-03-28T09:15:38.119575Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] forward result remote node 61 [56:2085:2742] 2025-03-28T09:15:38.119667Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] remote node connected [56:2085:2742] 2025-03-28T09:15:38.119703Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895]::SendEvent [56:2085:2742] 2025-03-28T09:15:38.119952Z node 61 :PIPE_SERVER DEBUG: [72075186224037895] Accept Connect Originator# [56:2085:2742] 2025-03-28T09:15:38.120293Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] connected with status OK role: Leader [56:2085:2742] 2025-03-28T09:15:38.120331Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037895] send queued [56:2085:2742] 2025-03-28T09:15:38.121343Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] ::Bootstrap [56:2088:2744] 2025-03-28T09:15:38.121384Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] lookup [56:2088:2744] 2025-03-28T09:15:38.121441Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:38.121490Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [61:1794:2195] 2025-03-28T09:15:38.121603Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] forward result remote node 61 [56:2088:2744] 2025-03-28T09:15:38.121712Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] remote node connected [56:2088:2744] 2025-03-28T09:15:38.121749Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896]::SendEvent [56:2088:2744] 2025-03-28T09:15:38.121991Z node 61 :PIPE_SERVER DEBUG: [72075186224037896] Accept Connect Originator# [56:2088:2744] 2025-03-28T09:15:38.122330Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] connected with status OK role: Leader [56:2088:2744] 2025-03-28T09:15:38.122387Z node 56 :PIPE_CLIENT DEBUG: TClient[72075186224037896] send queued [56:2088:2744] 2025-03-28T09:15:38.123827Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [56:2090:2745] 2025-03-28T09:15:38.123902Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [56:2090:2745] 2025-03-28T09:15:38.124031Z node 56 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:38.124144Z node 56 :TABLET_RESOLVER DEBUG: SelectForward node 56 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [56:596:2274] 2025-03-28T09:15:38.124305Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [56:2090:2745] 2025-03-28T09:15:38.124418Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [56:2090:2745] 2025-03-28T09:15:38.124513Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [56:2090:2745] 2025-03-28T09:15:38.124589Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [56:2090:2745] 2025-03-28T09:15:38.124758Z node 56 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [56:2090:2745] 2025-03-28T09:15:38.124973Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [56:2090:2745] 2025-03-28T09:15:38.125070Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [56:2090:2745] 2025-03-28T09:15:38.125126Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [56:2090:2745] 2025-03-28T09:15:38.125226Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [56:2090:2745] 2025-03-28T09:15:38.125305Z node 56 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [56:2090:2745] 2025-03-28T09:15:38.125418Z node 56 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [56:567:2269] EventType# 268697616 >> SystemView::AuthPermissions_Selects [GOOD] >> THiveTest::TestExternalBootWhenLocked [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootWhenLocked [GOOD] Test command err: 2025-03-28T09:15:04.688380Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:15:04.691994Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:15:04.692197Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:15:04.693096Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:68:2073] ControllerId# 72057594037932033 2025-03-28T09:15:04.693132Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:15:04.693233Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:15:04.693575Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:15:04.696035Z node 3 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:15:04.696086Z node 3 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:15:04.698076Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:74:2077] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.698200Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:75:2078] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.698309Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:76:2079] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.698387Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:77:2080] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.698493Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:78:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.698595Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:79:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.698707Z node 3 :BS_PROXY DEBUG: Group# 0 Actor# [3:67:2072] Create Queue# [3:80:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.698737Z node 3 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:15:04.698796Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [3:68:2073] 2025-03-28T09:15:04.698838Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [3:68:2073] 2025-03-28T09:15:04.698880Z node 3 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:15:04.698908Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:15:04.699197Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:15:04.699393Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:15:04.701370Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:15:04.701496Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-28T09:15:04.701966Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:15:04.702799Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-28T09:15:04.702849Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:15:04.703358Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:91:2077] ControllerId# 72057594037932033 2025-03-28T09:15:04.703379Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:15:04.703432Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:15:04.703551Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:15:04.704678Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:15:04.704717Z node 3 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:15:04.713915Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:15:04.713981Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:15:04.715656Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:99:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.715825Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:100:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.715963Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:101:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.716121Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:102:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.716254Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:103:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.716415Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:104:2087] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.716573Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:90:2076] Create Queue# [1:105:2088] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.716609Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:15:04.716672Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:91:2077] 2025-03-28T09:15:04.716698Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:91:2077] 2025-03-28T09:15:04.716745Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:15:04.716779Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:15:04.717547Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:15:04.717649Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:15:04.720144Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:15:04.720270Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:15:04.721052Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:114:2074] ControllerId# 72057594037932033 2025-03-28T09:15:04.721084Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:15:04.721141Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:15:04.721340Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:15:04.723318Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:15:04.723357Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:15:04.724935Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:120:2078] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.725088Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:121:2079] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.725239Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:122:2080] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.725377Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:123:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.725539Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:124:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.725709Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:125:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.725839Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:113:2073] Create Queue# [2:126:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:15:04.725865Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:15:04.725934Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:114:2074] 2025-03-28T09:15:04.725960Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:114:2074] 2025-03-28T09:15:04.725996Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:15:04.726028Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:15:04.726599Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:15:04.726814Z node 3 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:04.726938Z node 3 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [3:68:2073] 2025-03-28T09:15:04.727077Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:114:2074] 2025-03-28T09:15:04.727110Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:15:04.727137Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:15:04.734845Z node 3 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:15:04.734898Z node 3 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-28T09:15:04.739648Z node 3 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-28T09:15:04.739878Z node 3 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-28T09:15:04.740020Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:04.749468Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:91:2077] 2025-03-28T09:15:04. ... st# true Marker# BPP21 2025-03-28T09:15:42.089591Z node 59 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.027 sample PartId# [72057594037927937:2:8:0:0:200:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 59 } TEvVPutResult{ TimestampMs# 8.364 VDiskId# [0:1:0:0:0] NodeId# 59 Status# OK } ] } 2025-03-28T09:15:42.089877Z node 59 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:8:0:0:200:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-28T09:15:42.090088Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} commited cookie 1 for step 8 2025-03-28T09:15:42.090364Z node 59 :TABLET_MAIN DEBUG: Tablet: 72075186224037888 Received TEvTabletStop from [59:91:2093], reason = ReasonStop Marker# TSYS29 2025-03-28T09:15:42.090424Z node 59 :PIPE_SERVER DEBUG: [72075186224037888] Stop 2025-03-28T09:15:42.090860Z node 59 :TABLET_MAIN NOTICE: Tablet: 72075186224037888 Type: Dummy, EReason: ReasonPill, SuggestedGeneration: 1, KnownGeneration: 1 Marker# TSYS31 2025-03-28T09:15:42.090922Z node 59 :PIPE_SERVER DEBUG: [72075186224037888] Detach 2025-03-28T09:15:42.091182Z node 59 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:3} suiciding, Waste{1:0, 289b +(0, 0b), 2 trc, -0b acc} 2025-03-28T09:15:42.091853Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] peer closed [59:440:2351] 2025-03-28T09:15:42.091926Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [59:440:2351] 2025-03-28T09:15:42.092237Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send [59:92:2093] 2025-03-28T09:15:42.092292Z node 59 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [59:92:2093] 2025-03-28T09:15:42.092408Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [59:91:2093] EventType# 268960257 2025-03-28T09:15:42.092736Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2025-03-28T09:15:42.092819Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:42.092975Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:15:42.093092Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{22, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:42.093358Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2025-03-28T09:15:42.093449Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:42.093582Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{13, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:15:42.093678Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{23, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:42.094241Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [59:454:2358] 2025-03-28T09:15:42.094305Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [59:454:2358] 2025-03-28T09:15:42.094430Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:42.094530Z node 59 :TABLET_RESOLVER DEBUG: SelectForward node 59 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [59:374:2299] 2025-03-28T09:15:42.094648Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result local node, try to connect [59:454:2358] 2025-03-28T09:15:42.095330Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888]::SendEvent [59:454:2358] 2025-03-28T09:15:42.095537Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect request undelivered [59:454:2358] 2025-03-28T09:15:42.095635Z node 59 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [59:454:2358] 2025-03-28T09:15:42.095790Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvTabletProblem tabletId: 72075186224037888 entry.State: StNormal 2025-03-28T09:15:42.096024Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:15:42.096200Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-28T09:15:42.096285Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-28T09:15:42.096342Z node 59 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-28T09:15:42.096411Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [59:374:2299] CurrentLeaderTablet: [59:389:2311] CurrentGeneration: 1 CurrentStep: 0} 2025-03-28T09:15:42.096504Z node 59 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 CurrentLeader: [59:374:2299] CurrentLeaderTablet: [59:389:2311] CurrentGeneration: 1 CurrentStep: 0} 2025-03-28T09:15:42.096619Z node 59 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StProblemResolve success: true ev: {EvInfo Status: 0 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [59:374:2299] CurrentLeaderTablet: [59:389:2311] CurrentGeneration: 1 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {3, 6, 0}} 2025-03-28T09:15:42.096827Z node 59 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-03-28T09:15:42.097235Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [60:456:2093] 2025-03-28T09:15:42.097313Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [60:456:2093] 2025-03-28T09:15:42.097430Z node 60 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:15:42.097513Z node 60 :TABLET_RESOLVER DEBUG: SelectForward node 60 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [59:323:2263] 2025-03-28T09:15:42.097604Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [60:456:2093] 2025-03-28T09:15:42.097689Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received pending shutdown [60:456:2093] 2025-03-28T09:15:42.097765Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result remote node 59 [60:456:2093] 2025-03-28T09:15:42.097936Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] remote node connected [60:456:2093] 2025-03-28T09:15:42.098013Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [60:456:2093] 2025-03-28T09:15:42.098289Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [60:456:2093] 2025-03-28T09:15:42.099516Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [60:456:2093] 2025-03-28T09:15:42.099601Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [60:456:2093] 2025-03-28T09:15:42.099674Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [60:456:2093] 2025-03-28T09:15:42.099770Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [60:456:2093] 2025-03-28T09:15:42.099874Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [60:456:2093] 2025-03-28T09:15:42.099929Z node 60 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [60:456:2093] 2025-03-28T09:15:42.100258Z node 59 :PIPE_SERVER DEBUG: [72057594037927937] Push Sender# [60:443:2088] EventType# 268697624 2025-03-28T09:15:42.100516Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2025-03-28T09:15:42.100613Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:15:42.100883Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{13, redo 83b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-03-28T09:15:42.100988Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:15:42.116439Z node 59 :BS_PROXY_PUT INFO: [9eafe310d4d96c0d] bootstrap ActorId# [59:459:2361] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:92:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:15:42.116628Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Id# [72057594037927937:2:9:0:0:92:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:15:42.116705Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] restore Id# [72057594037927937:2:9:0:0:92:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:15:42.116793Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG33 2025-03-28T09:15:42.116861Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:92:1] Marker# BPG32 2025-03-28T09:15:42.117068Z node 59 :BS_PROXY DEBUG: Send to queueActorId# [59:59:2081] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:92:1] FDS# 92 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:15:42.118837Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:92:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 24 } Cost# 80724 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 25 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-28T09:15:42.119013Z node 59 :BS_PROXY_PUT DEBUG: [9eafe310d4d96c0d] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-28T09:15:42.119128Z node 59 :BS_PROXY_PUT INFO: [9eafe310d4d96c0d] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:15:42.119355Z node 59 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.907 sample PartId# [72057594037927937:2:9:0:0:92:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 59 } TEvVPutResult{ TimestampMs# 2.708 VDiskId# [0:1:0:0:0] NodeId# 59 Status# OK } ] } 2025-03-28T09:15:42.119561Z node 59 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:92:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-28T09:15:42.119721Z node 59 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthPermissions_Selects [GOOD] Test command err: 2025-03-28T09:14:12.403126Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786212989908265:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:12.403635Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00158a/r3tmp/tmphEnirb/pdisk_1.dat 2025-03-28T09:14:13.436144Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:13.446155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:14:13.457089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:13.457190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:13.464341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22744, node 1 2025-03-28T09:14:13.950359Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:13.950378Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:13.950386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:13.960194Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:14.905033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.330133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "Tenant1" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:14:15.330461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateExtSubDomain Propose, path/Root/Tenant1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.330584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Tenant1, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:14:15.331165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-28T09:14:15.331390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:14:15.331517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:14:15.331568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.331699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:14:15.331770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:14:15.339829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-28T09:14:15.340574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/Tenant1 2025-03-28T09:14:15.341000Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:14:15.341284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:14:15.341469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:14:15.341745Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:14:15.341976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486786217284876026:2394], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 1 2025-03-28T09:14:15.342125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486786217284876026:2394], at schemeshard: 72057594046644480, txId: 281474976715658, path id: 2 2025-03-28T09:14:15.342235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.342356Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:15.342479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715658:0, at tablet# 72057594046644480 2025-03-28T09:14:15.342591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715658 ready parts: 1/1 waiting... 2025-03-28T09:14:15.353139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:14:15.355132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-03-28T09:14:15.357468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-03-28T09:14:15.357739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-03-28T09:14:15.357819Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 4 2025-03-28T09:14:15.357899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:14:15.358291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-03-28T09:14:15.359171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715658 2025-03-28T09:14:15.359325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715658 2025-03-28T09:14:15.359376Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715658, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 2 2025-03-28T09:14:15.359433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:14:15.359535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-03-28T09:14:15.359829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:14:15.360085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 0/1, is published: true 2025-03-28T09:14:15.360229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:14:15.362950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715658 msg type: 269090816 2025-03-28T09:14:15.363059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715658, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:14:15.367854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-03-28T09:14:15.368064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715658 2025-03-28T09:14:15.370308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153255410, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:14:15.370587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743153255410 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:14:15.370804Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet# 72057594046644480 2025-03-28T09:14:15.371110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715658:0 128 -> 240 2025-03-28T09:14:15.371335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715658:0, at tablet# 72057594046644480 2025-03-28T09:14:15.371629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 202 ... 4480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:37.752148Z node 31 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-03-28T09:15:37.752252Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:15:37.752698Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-03-28T09:15:37.752751Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486786575362579940:2434], row count: 0, finished: 0 2025-03-28T09:15:37.752854Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:15:37.753104Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-03-28T09:15:37.753152Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486786575362579940:2434], row count: 0, finished: 0 2025-03-28T09:15:37.755222Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:15:37.759323Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-28T09:15:37.759453Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486786575362579940:2434], row count: 2, finished: 0 2025-03-28T09:15:37.759699Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7486786575362579940:2434], owner: [31:7486786575362579937:2432], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-03-28T09:15:37.766144Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153337740, txId: 281474976715687] shutting down 2025-03-28T09:15:37.766970Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7486786528117937611:2217], database# , query hash# 3187945588805523718, cpu time# 220369 2025-03-28T09:15:37.988448Z node 31 :KQP_EXECUTER ERROR: TxId: 281474976715690. Ctx: { TraceId: 01jqe0j5dm6jtjd1rmj893wbgb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=31&id=Njg3NTZmNzgtNTY2Y2VkOWItNTA2ZTNiM2YtNDdmMTQ1YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:15:37.991633Z node 31 :SYSTEM_VIEWS INFO: Scan started, actor: [31:7486786575362579985:2443], owner: [31:7486786575362579982:2441], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-03-28T09:15:38.035089Z node 31 :SYSTEM_VIEWS INFO: Scan prepared, actor: [31:7486786575362579985:2443], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:15:38.035128Z node 31 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-03-28T09:15:38.035210Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:15:38.035797Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } Children [.metadata,Dir1,Table0,Tenant1,Tenant2] }] } 2025-03-28T09:15:38.035857Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486786575362579985:2443], row count: 0, finished: 0 2025-03-28T09:15:38.037382Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:15:38.041681Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [SubDir1,SubDir2] }] } 2025-03-28T09:15:38.041751Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486786575362579985:2443], row count: 0, finished: 0 2025-03-28T09:15:38.042633Z node 31 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:15:38.042970Z node 31 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1/SubDir1 TableId: [72057594046644480:10:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-28T09:15:38.043030Z node 31 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [31:7486786575362579985:2443], row count: 1, finished: 0 2025-03-28T09:15:38.043952Z node 31 :SYSTEM_VIEWS INFO: Scan finished, actor: [31:7486786575362579985:2443], owner: [31:7486786575362579982:2441], scan id: 0, table id: [72057594046644480:1:0:auth_permissions] 2025-03-28T09:15:38.048467Z node 31 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [31:7486786528117937611:2217], database# , query hash# 15123460272068726277, cpu time# 246658 2025-03-28T09:15:38.049230Z node 31 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153337987, txId: 281474976715689] shutting down 2025-03-28T09:15:38.068633Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 34 2025-03-28T09:15:38.069146Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:15:38.070538Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 35 2025-03-28T09:15:38.071359Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:15:38.072447Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 33 2025-03-28T09:15:38.072950Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:15:38.071063Z node 35 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:15:38.073653Z node 33 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:15:38.074151Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 32 2025-03-28T09:15:38.075635Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:15:38.079028Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[35:7486786534384971098:2099], Type=268959746 2025-03-28T09:15:38.079072Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486786534229923391:2102], Type=268959746 2025-03-28T09:15:38.079098Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486786534229923391:2102], Type=268959746 2025-03-28T09:15:38.079124Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486786534229923391:2102], Type=268959746 2025-03-28T09:15:38.079148Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486786534229923391:2102], Type=268959746 2025-03-28T09:15:38.079171Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486786534229923391:2102], Type=268959746 2025-03-28T09:15:38.079199Z node 31 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[32:7486786534229923391:2102], Type=268959746 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] >> KqpQueryService::DdlExecuteScript [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] >> LabeledDbCounters::OneTablet [GOOD] >> LabeledDbCounters::OneTabletRemoveCounters >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::DdlExecuteScript [GOOD] Test command err: Trying to start YDB, gRPC: 9587, MsgBus: 13252 2025-03-28T09:13:23.186299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786001174734072:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.186490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001571/r3tmp/tmpTS4P64/pdisk_1.dat 2025-03-28T09:13:24.096492Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.105793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.105914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.186555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9587, node 1 2025-03-28T09:13:24.467167Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.467189Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.467196Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.467286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13252 TClient is connected to server localhost:13252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:25.563324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:25.601126Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:25.630149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.249040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.493344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:26.594567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:13:28.003576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786018354604901:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.003666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.179661Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786001174734072:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.179734Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:30.150465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.383598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.439127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.481460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.523842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.617034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:30.680701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031239507322:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.680785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.681098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786031239507327:2468], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.685519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:30.702432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786031239507329:2469], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:30.809137Z node 1 :TX_PROXY ERROR: Actor# [1:7486786031239507385:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.199565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:13:32.609284Z node 1 :TX_PROXY ERROR: Actor# [1:7486786039829442456:3796] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/TestDdl_0\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.609770Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710674, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/TestDdl_0', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges) 2025-03-28T09:13:32.609956Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWMxZDhkMWMtOGFiZjA3Ny0zNjIyNDE4Mi1iMmQ1MGYwYw==, ActorId: [1:7486786039829442444:2526], ActorState: ExecuteState, TraceId: 01jqe0eb5402pwmc6ez064x66k, Create QueryResponse for error on request, msg: 2025-03-28T09:13:32.665723Z node 1 :TX_PROXY ERROR: Actor# [1:7486786039829442480:3808] txid# 281474976710676, issues: { message: "Check failed: path: \'/Root/TestDdl_0\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:32.804161Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-03-28T09:13:32.824733Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786039829442582:2551], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:13:32.825489Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTVhMDY3ZGUtMjJhZDMyNTYtZDNiNzE1M2YtMWVlZmM1NTU=, ActorId: [1:7486786039829442579:2550], ActorState: ExecuteState, TraceId: 01jqe0ebcbd4gtjfdjwd9prmq2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:13:32.846212Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786039829442590:2555], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:29: Error: At function: KiDropTable!
:2:29: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:13:32.846408Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWNkOTJmMjAtOGIwMGRiZmYtZGUyZmI5OWQtOWUyMzMwMjA=, ActorId: [1:7486786039829442588:2554], ActorState: ExecuteState, TraceId: 01jqe0ebd1f2jzhgvw6493h032, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:13:32.872047Z node 1 :TX_PROXY ERROR: Actor# [1:7486786039829442604:3890] txid# 281474976710679, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-28T09:13:32.891939Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486786039829442610:2563], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/TestDdl_0]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:13:32.892123Z ... T_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:15:30.434153Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786546842324923:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:30.434256Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:30.434727Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486786546842324928:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:30.439917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:15:30.486306Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486786546842324930:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:15:30.590746Z node 3 :TX_PROXY ERROR: Actor# [3:7486786546842324989:3463] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:15:31.464494Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486786529662453439:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:15:31.464575Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:15:31.953925Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:15:32.143085Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037919 not found 2025-03-28T09:15:32.177779Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23030, MsgBus: 16361 2025-03-28T09:15:33.662678Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486786559917130499:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:15:33.662866Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001571/r3tmp/tmpjUTIpg/pdisk_1.dat 2025-03-28T09:15:33.969892Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:15:34.021916Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:15:34.022021Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:15:34.027910Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23030, node 4 2025-03-28T09:15:34.232504Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:15:34.232527Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:15:34.232536Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:15:34.232661Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16361 TClient is connected to server localhost:16361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:15:35.161122Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:15:35.179748Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:15:35.195409Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:15:35.300313Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:15:35.574313Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:15:35.750655Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:15:38.662009Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486786559917130499:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:15:38.662086Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:15:39.156524Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786585686935913:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:39.156644Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:39.300809Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:15:39.372443Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:15:39.470070Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:15:39.541406Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:15:39.594499Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:15:39.661652Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:15:39.779154Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786585686936437:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:39.779302Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:39.779602Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486786585686936443:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:15:39.784605Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:15:39.818253Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486786585686936445:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:15:39.876837Z node 4 :TX_PROXY ERROR: Actor# [4:7486786585686936501:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:15:41.583682Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:15:41.586222Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:15:41.591693Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:15:42.271684Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> SystemView::TopPartitionsByCpuFollowers [GOOD] >> SystemView::TopPartitionsByTliFields >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64 [GOOD] Test command err: Trying to start YDB, gRPC: 29185, MsgBus: 11506 2025-03-28T09:11:53.632975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785613324283310:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001715/r3tmp/tmpPrShGR/pdisk_1.dat 2025-03-28T09:11:53.983502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:11:54.286118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:11:54.286224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:11:54.291700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:11:54.358074Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29185, node 1 2025-03-28T09:11:54.631120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:11:54.631141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:11:54.631148Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:11:54.631231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11506 TClient is connected to server localhost:11506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:11:55.351746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:11:55.365840Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:11:57.804537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785630504152969:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:57.804658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:57.806279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486785630504152981:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:11:57.810657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:11:57.826668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486785630504152983:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:11:57.919638Z node 1 :TX_PROXY ERROR: Actor# [1:7486785630504153034:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:11:58.303493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:11:58.634806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:11:58.635152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:11:58.635434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:11:58.635560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:11:58.635686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:11:58.635793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:11:58.635898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:11:58.636026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:11:58.636149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:11:58.636271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:11:58.636375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:11:58.636497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486785634799120607:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:11:58.665059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:11:58.665125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:11:58.665325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:11:58.665431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:11:58.665557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:11:58.665655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:11:58.665736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:11:58.665829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:11:58.665929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:11:58.666063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:11:58.666168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:11:58.666270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486785634799120617:2361];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:11:58.720058Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785613324283310:2220];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:11:58.730490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486785634799120605:2355];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last ... 13:18.358437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.364038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.364038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.369712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.369712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.375597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.375598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.381201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.381200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.387248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.387248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.393500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.393501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.399491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.399491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.405127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.405261Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.410727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.410727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.416075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.416074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.421725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.421726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.428500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:13:18.531605Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0cxpm1pv8psp7jqrvrxsg", SessionId: ydb://session/3?node_id=1&id=OTM5NzkwN2QtNjM3M2RlZTUtM2FiZGE0OWEtODYyY2QzZDQ=, Slow query, duration: 32.495166s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:13:18.879111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486785849547521442:7702];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-28T09:13:18.879138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:18.879678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:13:18.879954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:15:55.532674Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0g95ac0m9nc7w9hknmp0a", SessionId: ydb://session/3?node_id=1&id=OTM5NzkwN2QtNjM3M2RlZTUtM2FiZGE0OWEtODYyY2QzZDQ=, Slow query, duration: 79.457174s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n\n$cs_ui =\n\n (select catalog_sales.cs_item_sk cs_item_sk\n\n ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund\n\n from catalog_sales as catalog_sales\n\n cross join catalog_returns as catalog_returns\n\n where cs_item_sk = cr_item_sk\n\n and cs_order_number = cr_order_number\n\n group by catalog_sales.cs_item_sk\n\n having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit));\n\n$cross_sales =\n\n (select item.i_product_name product_name\n\n ,item.i_item_sk item_sk\n\n ,store.s_store_name store_name\n\n ,store.s_zip store_zip\n\n ,ad1.ca_street_number b_street_number\n\n ,ad1.ca_street_name b_street_name\n\n ,ad1.ca_city b_city\n\n ,ad1.ca_zip b_zip\n\n ,ad2.ca_street_number c_street_number\n\n ,ad2.ca_street_name c_street_name\n\n ,ad2.ca_city c_city\n\n ,ad2.ca_zip c_zip\n\n ,d1.d_year as syear\n\n ,d2.d_year as fsyear\n\n ,d3.d_year s2year\n\n ,count(*) cnt\n\n ,sum(ss_wholesale_cost) s1\n\n ,sum(ss_list_price) s2\n\n ,sum(ss_coupon_amt) s3\n\n FROM store_sales as store_sales\n\n cross join store_returns as store_returns\n\n cross join $cs_ui cs_ui\n\n cross join date_dim d1\n\n cross join date_dim d2\n\n cross join date_dim d3\n\n cross join store as store\n\n cross join customer as customer\n\n cross join customer_demographics cd1\n\n cross join customer_demographics cd2\n\n cross join promotion as promotion\n\n cross join household_demographics hd1\n\n cross join household_demographics hd2\n\n cross join customer_address ad1\n\n cross join customer_address ad2\n\n cross join income_band ib1\n\n cross join income_band ib2\n\n cross join item as item\n\n WHERE ss_store_sk = s_store_sk AND\n\n ss_sold_date_sk = d1.d_date_sk AND\n\n ss_customer_sk = c_customer_sk AND\n\n ss_cdemo_sk= cd1.cd_demo_sk AND\n\n ss_hdemo_sk = hd1.hd_demo_sk AND\n\n ss_addr_sk = ad1.ca_address_sk and\n\n ss_item_sk = i_item_sk and\n\n ss_item_sk = sr_item_sk and\n\n ss_ticket_number = sr_ticket_number and\n\n ss_item_sk = cs_ui.cs_item_sk and\n\n c_current_cdemo_sk = cd2.cd_demo_sk AND\n\n c_current_hdemo_sk = hd2.hd_demo_sk AND\n\n c_current_addr_sk = ad2.ca_address_sk and\n\n c_first_sales_date_sk = d2.d_date_sk and\n\n c_first_shipto_date_sk = d3.d_date_sk and\n\n ss_promo_sk = p_promo_sk and\n\n hd1.hd_income_band_sk = ib1.ib_income_band_sk and\n\n hd2.hd_income_band_sk = ib2.ib_income_band_sk and\n\n cd1.cd_marital_status <> cd2.cd_marital_status and\n\n i_color in ('azure','gainsboro','misty','blush','hot','lemon') and\n\n i_current_price between 80 and 80 + 10 and\n\n i_current_price between 80 + 1 and 80 + 15\n\ngroup by item.i_product_name\n\n ,item.i_item_sk\n\n ,store.s_store_name\n\n ,store.s_zip\n\n ,ad1.ca_street_number\n\n ,ad1.ca_street_name\n\n ,ad1.ca_city\n\n ,ad1.ca_zip\n\n ,ad2.ca_street_number\n\n ,ad2.ca_street_name\n\n ,ad2.ca_city\n\n ,ad2.ca_zip\n\n ,d1.d_year\n\n ,d2.d_year\n\n ,d3.d_year\n\n);\n\n-- start query 1 in stream 0 using template query64.tpl and seed 1220860970\n\nselect cs1.product_name\n\n ,cs1.store_name\n\n ,cs1.store_zip\n\n ,cs1.b_street_number\n\n ,cs1.b_street_name\n\n ,cs1.b_city\n\n ,cs1.b_zip\n\n ,cs1.c_street_number\n\n ,cs1.c_street_name\n\n ,cs1.c_city\n\n ,cs1.c_zip\n\n ,cs1.syear\n\n ,cs1.cnt\n\n ,cs1.s1 as s11\n\n ,cs1.s2 as s21\n\n ,cs1.s3 as s31\n\n ,cs2.s1 as s12\n\n ,cs2.s2 as s22\n\n ,cs2.s3 as s32\n\n ,cs2.syear\n\n ,cs2.cnt\n\nfrom $cross_sales cs1 cross join $cross_sales cs2\n\nwhere cs1.item_sk=cs2.item_sk and\n\n cs1.syear = 1999 and\n\n cs2.syear = 1999 + 1 and\n\n cs2.cnt <= cs1.cnt and\n\n cs1.store_name = cs2.store_name and\n\n cs1.store_zip = cs2.store_zip\n\norder by cs1.product_name\n\n ,cs1.store_name\n\n ,cs2.cnt\n\n ,s11\n\n ,s21\n\n ,s22;\n\n\n\n-- end query 1 in stream 0 using template query64.tpl\n", parameters: 0b >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> SystemView::TopPartitionsByTliFields [GOOD] >> SystemView::TabletsShards >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> TTxAllocatorClientTest::ZeroRange [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2025-03-28T09:14:54.373526Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:14:54.387440Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:14:54.406910Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:14:54.536975Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.589458Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:14:54.647448Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.647568Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.647695Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:14:54.647860Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.647914Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.657928Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:14:54.658166Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-28T09:14:54.658972Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#5000 2025-03-28T09:14:54.687104Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.687208Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:14:54.687376Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2025-03-28T09:14:54.687419Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 5000 >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] |83.7%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.7%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> SystemView::TabletsShards [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::TabletsShards [GOOD] Test command err: 2025-03-28T09:14:11.511683Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786206392479720:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:11.511744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00158b/r3tmp/tmp7OCZMp/pdisk_1.dat 2025-03-28T09:14:12.948961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:14:12.996633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:13.003020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:13.011279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:13.036561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:13.251670Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.131695s 2025-03-28T09:14:13.251730Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.131772s TServer::EnableGrpc on GrpcPort 10418, node 1 2025-03-28T09:14:13.822832Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:13.822850Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:13.822857Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:13.822947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:14.915885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:16.513255Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786206392479720:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:16.550633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:17.502565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786232162284147:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.507189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.535069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786232162284160:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:17.572463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:14:17.606880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786232162284162:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:14:17.743125Z node 1 :TX_PROXY ERROR: Actor# [1:7486786232162284216:2345] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:21.368993Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe0fn058f4jxtc8drbpmtav, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I3ZjFlMDItNmZhMWNmZmEtZTk5ZGJiNjQtNzRkZDNiMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:14:21.574665Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7486786249342153466:2356], owner: [1:7486786249342153462:2354], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-03-28T09:14:21.607424Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7486786249342153466:2356], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:14:21.646675Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7486786249342153466:2356], row count: 2, finished: 1 2025-03-28T09:14:21.652236Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7486786249342153466:2356], owner: [1:7486786249342153462:2354], scan id: 0, table id: [72057594046644480:1:0:ds_vslots] 2025-03-28T09:14:21.844761Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153261234, txId: 281474976710660] shutting down 2025-03-28T09:14:24.475187Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786264836225520:2218];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:24.620082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00158b/r3tmp/tmpDPscEq/pdisk_1.dat 2025-03-28T09:14:24.837912Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:24.898266Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:24.898344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:24.916346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7669, node 2 2025-03-28T09:14:25.303530Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:25.303554Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:25.303561Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:25.303671Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62672 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:25.652176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:25.704448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:14:25.710905Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [5:7486786263078675739:2063] waiting... 2025-03-28T09:14:25.735753Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:14:25.762612Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [2:7486786264836225368:2075] 2025-03-28T09:14:25.875161Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [2:7486786260541257956:2072] 2025-03-28T09:14:25.795524Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486786267373643267:2222];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:25.831718Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486786267769511873:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:25.931790Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [3:7486786260675623504:2063] 2025-03-28T09:14:25.903460Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Tenant1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:14:26.001327Z node 5 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [5:7486786263078675739:2063] waiting... 2025-03-28T09:14:26.057270Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:26.067594Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:26.110913Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:26.110995Z node 2 :HIVE WARN: HI ... :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[23:7486786642770664383:2110], Type=268959746 2025-03-28T09:16:10.718046Z node 22 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[23:7486786642770664383:2110], Type=268959746 2025-03-28T09:16:10.718077Z node 22 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[23:7486786642770664383:2110], Type=268959746 2025-03-28T09:16:10.718111Z node 22 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[23:7486786642770664383:2110], Type=268959746 2025-03-28T09:16:10.833089Z node 26 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [26:7486786626921442789:2063] 2025-03-28T09:16:10.885894Z node 24 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [24:7486786642773074346:2206] 2025-03-28T09:16:10.934988Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [25:7486786626719275723:2063] 2025-03-28T09:16:10.950876Z node 26 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [26:7486786635511377454:2073] 2025-03-28T09:16:10.993376Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [25:7486786626719275723:2063] 2025-03-28T09:16:10.993459Z node 26 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [26:7486786635511377454:2073] 2025-03-28T09:16:10.994981Z node 26 :SYSTEM_VIEWS DEBUG: Send counters: service id# [26:7486786635511377454:2073], processor id# 72075186224037893, database# /Root/Tenant1, generation# 7087366267693218509, node id# 26, is retrying# 0, is labeled# 0 2025-03-28T09:16:11.031589Z node 26 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [26:7486786635511377454:2073], processor id# 72075186224037893, database# /Root/Tenant1 2025-03-28T09:16:11.031798Z node 26 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [26:7486786635511377454:2073], database# /Root/Tenant1, processor id# 72075186224037893 2025-03-28T09:16:11.048439Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [25:7486786635309210390:2073] 2025-03-28T09:16:11.049734Z node 25 :SYSTEM_VIEWS DEBUG: Send counters: service id# [25:7486786635309210390:2073], processor id# 72075186224037893, database# /Root/Tenant1, generation# 17641563848316145066, node id# 25, is retrying# 0, is labeled# 0 2025-03-28T09:16:11.083088Z node 25 :SYSTEM_VIEWS WARN: Summary delivery problem: service id# [25:7486786635309210390:2073], processor id# 72075186224037893, database# /Root/Tenant1 2025-03-28T09:16:11.083181Z node 25 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [25:7486786635309210390:2073], database# /Root/Tenant1, processor id# 72075186224037893 2025-03-28T09:16:12.000457Z node 26 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [26:7486786635511377454:2073], interval end# 2025-03-28T09:16:12.000000Z, event interval end# 2025-03-28T09:16:12.000000Z 2025-03-28T09:16:12.000457Z node 26 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [26:7486786626921442789:2063], interval end# 2025-03-28T09:16:12.000000Z, event interval end# 2025-03-28T09:16:12.000000Z 2025-03-28T09:16:12.000495Z node 26 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [26:7486786626921442789:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-03-28T09:16:12.000492Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [25:7486786626719275723:2063], interval end# 2025-03-28T09:16:12.000000Z, event interval end# 2025-03-28T09:16:12.000000Z 2025-03-28T09:16:12.000505Z node 26 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [26:7486786635511377454:2073], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-03-28T09:16:12.000534Z node 25 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [25:7486786626719275723:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-03-28T09:16:12.000857Z node 25 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [25:7486786635309210390:2073], interval end# 2025-03-28T09:16:12.000000Z, event interval end# 2025-03-28T09:16:12.000000Z 2025-03-28T09:16:12.000912Z node 25 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [25:7486786635309210390:2073], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-03-28T09:16:12.001755Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [23:7486786642770664149:2074], interval end# 2025-03-28T09:16:12.000000Z, event interval end# 2025-03-28T09:16:12.000000Z 2025-03-28T09:16:12.001766Z node 23 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessInterval: service id# [23:7486786629885762179:2063], interval end# 2025-03-28T09:16:12.000000Z, event interval end# 2025-03-28T09:16:12.000000Z 2025-03-28T09:16:12.001801Z node 23 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [23:7486786642770664149:2074], query logs count# 0, processor ids count# 1, processor id to database count# 1 2025-03-28T09:16:12.001801Z node 23 :SYSTEM_VIEWS DEBUG: Rotate logs: service id# [23:7486786629885762179:2063], query logs count# 0, processor ids count# 0, processor id to database count# 0 2025-03-28T09:16:14.052821Z node 27 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[27:7486786736813512969:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:16:14.052922Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00158b/r3tmp/tmpw5pvZ7/pdisk_1.dat 2025-03-28T09:16:14.245294Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:16:14.268997Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:16:14.269122Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:16:14.271331Z node 27 :HIVE WARN: HIVE#72057594037968897 Node(27, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6840, node 27 2025-03-28T09:16:14.339253Z node 27 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:16:14.339284Z node 27 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:16:14.339294Z node 27 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:16:14.339492Z node 27 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14973 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:16:14.734818Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:16:14.748137Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:16:19.053925Z node 27 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[27:7486786736813512969:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:16:19.054042Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:16:19.755704Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7486786758288350282:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:16:19.755810Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [27:7486786758288350289:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:16:19.755932Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:16:19.762513Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:16:19.777400Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [27:7486786758288350299:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:16:19.844868Z node 27 :TX_PROXY ERROR: Actor# [27:7486786758288350350:2461] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:16:20.001147Z node 27 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe0k9nc23cbp1wnty29de73, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=27&id=Yzk5NGYyNzgtOTQ4MDNkMGYtNzQ0NjRjNjYtNGVkZjJkNDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:16:20.008843Z node 27 :SYSTEM_VIEWS INFO: Scan started, actor: [27:7486786762583317692:2357], owner: [27:7486786762583317689:2355], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:16:20.023746Z node 27 :SYSTEM_VIEWS INFO: Scan prepared, actor: [27:7486786762583317692:2357], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:16:20.028376Z node 27 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [27:7486786762583317692:2357], row count: 3, finished: 1 2025-03-28T09:16:20.028439Z node 27 :SYSTEM_VIEWS INFO: Scan finished, actor: [27:7486786762583317692:2357], owner: [27:7486786762583317689:2355], scan id: 0, table id: [72057594046644480:1:0:hive_tablets] 2025-03-28T09:16:20.032666Z node 27 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153379999, txId: 281474976715661] shutting down >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> SystemView::PartitionStatsTtlFields [GOOD] >> SystemView::QueryStatsAllTables >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2025-03-28T09:06:58.461804Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:06:58.602248Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:06:58.625987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:06:58.626257Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:06:58.633893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:06:58.634100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:06:58.634318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:06:58.634445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:06:58.634560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:06:58.634769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:06:58.634877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:06:58.634993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:06:58.635113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:06:58.635219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:06:58.635326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:06:58.635456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:06:58.680008Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:06:58.680370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:06:58.680421Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:06:58.680590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:06:58.680759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:06:58.680851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:06:58.680894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:06:58.680989Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:06:58.681068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:06:58.681105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:06:58.681137Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:06:58.681292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:06:58.681351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:06:58.681387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:06:58.681416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:06:58.681552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:06:58.681606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:06:58.681644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:06:58.681672Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:06:58.681738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:06:58.682285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:06:58.682339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:06:58.682405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:06:58.682446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:06:58.682480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:06:58.682904Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=72; 2025-03-28T09:06:58.683006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-03-28T09:06:58.683097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-03-28T09:06:58.683178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-03-28T09:06:58.683383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:06:58.683456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:06:58.683499Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:06:58.683671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:06:58.683711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:06:58.683742Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:06:58.683897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:06:58.683943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:06:58.683970Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:06:58.684147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:06:58.684191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:06:58.684220Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:06:58.684368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:06:58.684428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:06:58.684476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... T09:16:33.490008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10503:12464];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-28T09:16:34.324294Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:16:34.324412Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=17; 2025-03-28T09:16:34.325052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=549; 2025-03-28T09:16:34.325105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=632; 2025-03-28T09:16:34.330412Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:16:34.330516Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=13; 2025-03-28T09:16:34.353190Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=22548; 2025-03-28T09:16:34.376861Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=22020; 2025-03-28T09:16:34.377017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=23695; 2025-03-28T09:16:34.377242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=143; 2025-03-28T09:16:34.377410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=113; 2025-03-28T09:16:34.377628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=164; 2025-03-28T09:16:34.377827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=130; 2025-03-28T09:16:34.378098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=212; 2025-03-28T09:16:34.378151Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=47576; 2025-03-28T09:16:34.384161Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:16:34.384281Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-03-28T09:16:34.388708Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4317; 2025-03-28T09:16:34.422120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=33284; 2025-03-28T09:16:34.422307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=52; 2025-03-28T09:16:34.422405Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=38; 2025-03-28T09:16:34.422466Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-03-28T09:16:34.422530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2025-03-28T09:16:34.422589Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=12; 2025-03-28T09:16:34.422706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=55; 2025-03-28T09:16:34.422767Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=10; 2025-03-28T09:16:34.422887Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=70; 2025-03-28T09:16:34.422947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=10; 2025-03-28T09:16:34.423038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=44; 2025-03-28T09:16:34.423164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=75; 2025-03-28T09:16:34.423283Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=71; 2025-03-28T09:16:34.423336Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=38985; 2025-03-28T09:16:34.423572Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961708;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:16:34.424754Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10503:12464];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:16:34.424849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10503:12464];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:16:34.424961Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:16:34.425034Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-28T09:16:34.425337Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:16:34.425441Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:16:34.425747Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-28T09:16:34.425853Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T09:16:34.425913Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:16:34.425980Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:16:34.426050Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:16:34.426188Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:16:34.431464Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:16:34.437357Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:16:34.439225Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:16:34.439296Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:16:34.439333Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:16:34.439401Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:16:34.439502Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:16:34.439838Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-28T09:16:34.439948Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T09:16:34.440012Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:16:34.440088Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:16:34.440149Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:16:34.440270Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998000s; 2025-03-28T09:16:34.440347Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10503:12464];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> LabeledDbCounters::OneTabletRemoveCounters [GOOD] >> LabeledDbCounters::OneTabletRestart >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 2025-03-28 09:16:57,790 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:16:57,985 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 129819 46.4M 46.0M 23.4M test_tool run_ut @/home/runner/.ya/build/build_root/2909/000d6c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk35/testing_out_stuff/test_tool.args 130126 1.7G 1.7G 1.7G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/2909/000d6c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-03-28T09:07:00.032870Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:07:00.136842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:07:00.162435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:07:00.162813Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:07:00.173594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:07:00.173827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:07:00.174071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:07:00.174203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:07:00.174333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:07:00.174461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:07:00.174574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:07:00.174730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:07:00.174850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:07:00.174958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:07:00.175075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:07:00.175192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:07:00.216655Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:07:00.217206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:07:00.217277Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:07:00.217530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:00.217743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:07:00.217849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:07:00.217910Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:07:00.218027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:07:00.218107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:07:00.218156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:07:00.218188Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:07:00.218362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:00.218426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:07:00.218470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:07:00.218505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:07:00.218666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:07:00.218781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:07:00.218843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:07:00.218884Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:07:00.218987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:07:00.219036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:07:00.219075Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:07:00.219139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:07:00.219190Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:07:00.219262Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:07:00.219780Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=58; 2025-03-28T09:07:00.219896Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-03-28T09:07:00.220035Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=54; 2025-03-28T09:07:00.220147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-03-28T09:07:00.220381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:07:00.220458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:07:00.220499Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:07:00.220726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:07:00.220774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:07:00.220806Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:07:00.220974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:07:00.221031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:07:00.221064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:07:00.221269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:07:00.221316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:07:00.221347Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:07:00.221508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:07:00.221563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:07:00.221616Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-28T09:16:57.542220Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:16:57.543580Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-03-28T09:16:57.560081Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ASSEMBLER::SPEC;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2025-03-28T09:16:57.562301Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ASSEMBLER::LAST_PK;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2025-03-28T09:16:57.632218Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=SNAPSHOT;details={};;scan_step_idx=4; 2025-03-28T09:16:57.640429Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ALLOCATE_MEMORY::Fetching;details={stage=Fetching;column_ids=[];};;scan_step_idx=5; 2025-03-28T09:16:57.640567Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=OPTIONAL_ASSEMBLER::LAST;details={columns=(column_ids=6;column_names=message;);;};;scan_step_idx=6; 2025-03-28T09:16:57.640982Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=7; 2025-03-28T09:16:57.644569Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:16:57.644784Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:2;records_count:56668;schema=timestamp: uint64 message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:57.644834Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-28T09:16:57.644880Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:16:57.658909Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:16:57.658983Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=33; 2025-03-28T09:16:57.659032Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=33;intervalId=1316; 2025-03-28T09:16:57.659092Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:43;event=interval_result_absent;interval_idx=2;merger=0;interval_id=1285; 2025-03-28T09:16:57.659136Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:67;event=wait_interval;remained=53;interval_idx=2; 2025-03-28T09:16:57.659303Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:2;records_count:56668;schema=timestamp: uint64 message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:57.659351Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-28T09:16:57.659395Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:16:57.659887Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-03-28T09:16:57.678456Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ASSEMBLER::SPEC;details={columns=(column_ids=4294967040,4294967041,4294967042;column_names=_yql_plan_step,_yql_tx_id,_yql_write_id;);;};;scan_step_idx=2; 2025-03-28T09:16:57.680795Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ASSEMBLER::LAST_PK;details={columns=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;};;scan_step_idx=3; 2025-03-28T09:16:57.756342Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=SNAPSHOT;details={};;scan_step_idx=4; 2025-03-28T09:16:57.765052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=ALLOCATE_MEMORY::Fetching;details={stage=Fetching;column_ids=[];};;scan_step_idx=5; 2025-03-28T09:16:57.765183Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=OPTIONAL_ASSEMBLER::LAST;details={columns=(column_ids=6;column_names=message;);;};;scan_step_idx=6; 2025-03-28T09:16:57.765617Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=7; 2025-03-28T09:16:57.770001Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:16:57.770073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=8; 2025-03-28T09:16:57.770127Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=8;intervalId=1291; 2025-03-28T09:16:57.770188Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:43;event=interval_result_absent;interval_idx=2;merger=0;interval_id=1285; 2025-03-28T09:16:57.770233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=scanner.cpp:67;event=wait_interval;remained=53;interval_idx=2; 2025-03-28T09:16:57.770408Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:2;records_count:56668;schema=timestamp: uint64 message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:57.770451Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-28T09:16:57.770496Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:16:57.773603Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:16:57.773671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-03-28T09:16:57.773716Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=39; 2025-03-28T09:16:57.773772Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=39;interval_id=1322;memory=11844398;count=1; 2025-03-28T09:16:57.774298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15550:17524];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=39; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/000d6c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk35/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/000d6c/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk35/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 2025-03-28 09:17:00,472 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:17:00,654 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 130368 46.6M 41.0M 23.8M test_tool run_ut @/home/runner/.ya/build/build_root/2909/000d67/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk32/testing_out_stuff/test_tool.args 130656 1.8G 1.8G 1.7G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/2909/000d67/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-03-28T09:07:03.061352Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:07:03.205908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:07:03.227394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:07:03.227783Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:07:03.235853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:07:03.236050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:07:03.236226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:07:03.236341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:07:03.236436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:07:03.236535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:07:03.236604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:07:03.236683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:07:03.236760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:07:03.236823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:07:03.236892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:07:03.236970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:07:03.278770Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:07:03.279215Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:07:03.279278Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:07:03.279537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:03.279724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:07:03.279831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:07:03.279883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:07:03.279987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:07:03.280055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:07:03.280099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:07:03.280131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:07:03.280325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:03.280399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:07:03.280449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:07:03.280481Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:07:03.280621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:07:03.280678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:07:03.280733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:07:03.280765Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:07:03.280850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:07:03.280892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:07:03.280927Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:07:03.280977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:07:03.281015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:07:03.281064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:07:03.281508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-03-28T09:07:03.281605Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-03-28T09:07:03.281703Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-03-28T09:07:03.281809Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:07:03.282005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:07:03.282065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:07:03.282099Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:07:03.282286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:07:03.282328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:07:03.282362Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:07:03.282520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:07:03.282574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:07:03.282604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:07:03.282842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:07:03.282888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:07:03.282918Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:07:03.283049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:07:03.283104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:07:03.283171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... tractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:16:56.262795Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:16:56.263462Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:16:56.263614Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:11353;schema=timestamp: int64 message: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:56.263674Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:16:56.263795Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;);columns=2;rows=11353; 2025-03-28T09:16:56.263883Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=215707;num_rows=11353;batch_columns=timestamp,message; 2025-03-28T09:16:56.264075Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:15386:17361];bytes=215707;rows=11353;faults=0;finished=0;fault=0;schema=timestamp: int64 message: string; 2025-03-28T09:16:56.264212Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:56.264361Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:56.264481Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:56.264832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:16:56.264949Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:56.265057Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:56.265127Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:15388:17362] finished for tablet 9437184 2025-03-28T09:16:56.265775Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:15386:17361];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.031},{"events":["l_bootstrap"],"t":0.087},{"events":["f_processing","f_task_result"],"t":0.094},{"events":["l_task_result"],"t":22.542},{"events":["f_ack"],"t":22.557},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":22.734}],"full":{"a":1743153393530867,"name":"_full_task","f":1743153393530867,"d_finished":0,"c":0,"l":1743153416265195,"d":22734328},"events":[{"name":"bootstrap","f":1743153393531210,"d_finished":87056,"c":1,"l":1743153393618266,"d":87056},{"a":1743153416264808,"name":"ack","f":1743153416087971,"d_finished":104980,"c":49,"l":1743153416264508,"d":105367},{"a":1743153416264792,"name":"processing","f":1743153393625273,"d_finished":9950310,"c":458,"l":1743153416264511,"d":9950713},{"name":"ProduceResults","f":1743153393562319,"d_finished":305155,"c":509,"l":1743153416265095,"d":305155},{"a":1743153416265099,"name":"Finish","f":1743153416265099,"d_finished":0,"c":0,"l":1743153416265195,"d":96},{"name":"task_result","f":1743153393625297,"d_finished":9829873,"c":409,"l":1743153416073352,"d":9829873}],"id":"9437184::14"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:56.265864Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:15386:17361];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:16:56.266467Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:15386:17361];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.031},{"events":["l_bootstrap"],"t":0.087},{"events":["f_processing","f_task_result"],"t":0.094},{"events":["l_task_result"],"t":22.542},{"events":["f_ack"],"t":22.557},{"events":["l_ProduceResults","f_Finish"],"t":22.734},{"events":["l_ack","l_processing","l_Finish"],"t":22.735}],"full":{"a":1743153393530867,"name":"_full_task","f":1743153393530867,"d_finished":0,"c":0,"l":1743153416265924,"d":22735057},"events":[{"name":"bootstrap","f":1743153393531210,"d_finished":87056,"c":1,"l":1743153393618266,"d":87056},{"a":1743153416264808,"name":"ack","f":1743153416087971,"d_finished":104980,"c":49,"l":1743153416264508,"d":106096},{"a":1743153416264792,"name":"processing","f":1743153393625273,"d_finished":9950310,"c":458,"l":1743153416264511,"d":9951442},{"name":"ProduceResults","f":1743153393562319,"d_finished":305155,"c":509,"l":1743153416265095,"d":305155},{"a":1743153416265099,"name":"Finish","f":1743153416265099,"d_finished":0,"c":0,"l":1743153416265924,"d":825},{"name":"task_result","f":1743153393625297,"d_finished":9829873,"c":409,"l":1743153416073352,"d":9829873}],"id":"9437184::14"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;;); 2025-03-28T09:16:56.266583Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:16:33.529706Z;index_granules=0;index_portions=51;index_batches=41754;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=114576060;inserted_portions_bytes=10667072;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=125243132;selected_rows=0; 2025-03-28T09:16:56.266633Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:16:56.275155Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:15388:17362];TabletId=9437184;ScanId=0;TxId=122;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,6;column_names=message,timestamp;);;program_input=(column_ids=1,6;column_names=message,timestamp;);;; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/000d67/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk32/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/000d67/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk32/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString 2025-03-28 09:17:02,672 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:17:02,836 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 130771 46.7M 46.4M 23.8M test_tool run_ut @/home/runner/.ya/build/build_root/2909/000d5f/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args 131470 1.5G 1.5G 1.5G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/2909/000d5f/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-03-28T09:07:04.706018Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:07:04.815816Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:07:04.844150Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:07:04.844699Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:07:04.854318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:07:04.854613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:07:04.854958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:07:04.855129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:07:04.855270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:07:04.855458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:07:04.855618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:07:04.855790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:07:04.855956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:07:04.856107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:07:04.856300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:07:04.856458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:07:04.882984Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:07:04.883360Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:07:04.883425Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:07:04.883723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:04.883924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:07:04.884030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:07:04.884087Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:07:04.884203Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:07:04.884302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:07:04.884354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:07:04.884396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:07:04.884591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:07:04.884692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:07:04.884749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:07:04.884785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:07:04.884980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:07:04.885052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:07:04.885111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:07:04.885146Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:07:04.885244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:07:04.885295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:07:04.885332Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:07:04.885388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:07:04.885435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:07:04.885473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:07:04.885990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-03-28T09:07:04.886102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=51; 2025-03-28T09:07:04.886209Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=53; 2025-03-28T09:07:04.886320Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-03-28T09:07:04.886531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:07:04.886605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:07:04.886649Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:07:04.886932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:07:04.886989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:07:04.887027Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:07:04.887246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:07:04.887306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:07:04.887346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:07:04.887572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:07:04.887648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:07:04.887685Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:07:04.887873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:07:04.887930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:07:04.888000Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 106;method=PutObject;id=[9437184:2:77:255:176:2776:0]; 2025-03-28T09:17:02.637135Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:177:2736:0]; 2025-03-28T09:17:02.637198Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:178:8592:0]; 2025-03-28T09:17:02.637247Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:179:2784:0]; 2025-03-28T09:17:02.637294Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:180:2784:0]; 2025-03-28T09:17:02.637355Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:181:2784:0]; 2025-03-28T09:17:02.637409Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:182:2888:0]; 2025-03-28T09:17:02.637459Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:183:2784:0]; 2025-03-28T09:17:02.637528Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:184:2792:0]; 2025-03-28T09:17:02.637608Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:185:2888:0]; 2025-03-28T09:17:02.637670Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:186:2792:0]; 2025-03-28T09:17:02.637737Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:187:2792:0]; 2025-03-28T09:17:02.637804Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:188:2856:0]; 2025-03-28T09:17:02.637857Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:189:2800:0]; 2025-03-28T09:17:02.637913Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:190:2792:0]; 2025-03-28T09:17:02.638002Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:191:2792:0]; 2025-03-28T09:17:02.638098Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:192:2792:0]; 2025-03-28T09:17:02.638176Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:193:2784:0]; 2025-03-28T09:17:02.638302Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:194:2784:0]; 2025-03-28T09:17:02.638370Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:195:2840:0]; 2025-03-28T09:17:02.638432Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:196:2792:0]; 2025-03-28T09:17:02.638493Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:197:2784:0]; 2025-03-28T09:17:02.638559Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:198:2880:0]; 2025-03-28T09:17:02.638619Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:199:2784:0]; 2025-03-28T09:17:02.638701Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:200:2784:0]; 2025-03-28T09:17:02.638782Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:201:2880:0]; 2025-03-28T09:17:02.638865Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:202:2784:0]; 2025-03-28T09:17:02.638947Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:203:2784:0]; 2025-03-28T09:17:02.639022Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:204:2840:0]; 2025-03-28T09:17:02.639085Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:205:2776:0]; 2025-03-28T09:17:02.639143Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:206:2776:0]; 2025-03-28T09:17:02.639206Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:207:2744:0]; 2025-03-28T09:17:02.639265Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:208:2744:0]; 2025-03-28T09:17:02.639324Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:209:2728:0]; 2025-03-28T09:17:02.639386Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:210:2728:0]; 2025-03-28T09:17:02.639444Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:211:2800:0]; 2025-03-28T09:17:02.639503Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:212:8872:0]; 2025-03-28T09:17:02.639565Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:213:2784:0]; 2025-03-28T09:17:02.639627Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:214:2784:0]; 2025-03-28T09:17:02.639694Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:215:2864:0]; 2025-03-28T09:17:02.639753Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:216:2784:0]; 2025-03-28T09:17:02.639816Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:217:2784:0]; 2025-03-28T09:17:02.639877Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:218:2880:0]; 2025-03-28T09:17:02.639958Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:219:2784:0]; 2025-03-28T09:17:02.640040Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:220:2792:0]; 2025-03-28T09:17:02.640105Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:221:2888:0]; 2025-03-28T09:17:02.640166Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:222:2800:0]; 2025-03-28T09:17:02.640230Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:223:2792:0]; 2025-03-28T09:17:02.640292Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:224:2848:0]; 2025-03-28T09:17:02.640354Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:225:2792:0]; 2025-03-28T09:17:02.640415Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:226:2792:0]; 2025-03-28T09:17:02.640476Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:227:2792:0]; 2025-03-28T09:17:02.640540Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:228:2800:0]; 2025-03-28T09:17:02.640602Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:229:2784:0]; 2025-03-28T09:17:02.640665Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:230:2784:0]; 2025-03-28T09:17:02.640727Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:231:2848:0]; 2025-03-28T09:17:02.640789Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:232:2784:0]; 2025-03-28T09:17:02.640850Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:233:2784:0]; 2025-03-28T09:17:02.640933Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:234:2888:0]; 2025-03-28T09:17:02.641009Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:235:2784:0]; 2025-03-28T09:17:02.641072Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:236:2784:0]; 2025-03-28T09:17:02.641133Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:237:2872:0]; 2025-03-28T09:17:02.641198Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:238:2784:0]; 2025-03-28T09:17:02.641256Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:239:2784:0]; 2025-03-28T09:17:02.641317Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:240:2816:0]; 2025-03-28T09:17:02.641381Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:241:2784:0]; 2025-03-28T09:17:02.641441Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:242:2784:0]; 2025-03-28T09:17:02.641501Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:243:2784:0]; 2025-03-28T09:17:02.641596Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:244:2784:0]; 2025-03-28T09:17:02.641678Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:245:2728:0]; 2025-03-28T09:17:02.641741Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:77:255:246:8760:0]; 2025-03-28T09:17:02.646427Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:78:255:1:2776:0]; 2025-03-28T09:17:02.646546Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:78:255:2:2808:0]; 2025-03-28T09:17:02.646609Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:78:255:3:2776:0]; 2025-03-28T09:17:02.646670Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:78:255:4:2728:0]; 2025-03-28T09:17:02.646750Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:78:255:5:9096:0]; 2025-03-28T09:17:02.646810Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:78:255:6:2776:0]; 2025-03-28T09:17:02.646868Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:78:255:7:2808:0]; 2025-03-28T09:17:02.646925Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:78:255:8:2776:0]; 2025-03-28T09:17:02.646975Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:78:255:9:2728:0]; 2025-03-28T09:17:02.647027Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:78:255:10:9096:0]; Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/000d5f/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/000d5f/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001c81/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit.txt 2025-03-28T09:17:04.599660Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001c89/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit.txt |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001c7b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit.txt 2025-03-28T09:17:06.737558Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:06.737499Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-03-28T09:17:06.497994Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:07.013738Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:07.013697Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-03-28T09:17:06.847080Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:07.242629Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:07.242594Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-03-28T09:17:07.121517Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:07.478836Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:07.478803Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-28T09:17:07.350969Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:07.664994Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:07.664957Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-03-28T09:17:07.592188Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:07.843909Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:07.843875Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-03-28T09:17:07.773180Z","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001c79/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit.txt |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] >> test_auditlog.py::test_single_dml_query_logged[select] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TStorageBalanceTest::TestScenario3 [GOOD] >> test_auditlog.py::test_dynconfig [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: c[def1] ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) c[def1] ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) ------------------------------ (0) 2025-03-28T09:14:40.401561Z node 7 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:40.405230Z node 7 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.405438Z node 7 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.406423Z node 7 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [7:285:2080] ControllerId# 72057594037932033 2025-03-28T09:14:40.406480Z node 7 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.406572Z node 7 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.406944Z node 7 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.409491Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:40.411935Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.412070Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-28T09:14:40.412524Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:14:40.413321Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-28T09:14:40.413360Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.413888Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:297:2083] ControllerId# 72057594037932033 2025-03-28T09:14:40.413930Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.414004Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.414157Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.415860Z node 7 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.415914Z node 7 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.419310Z node 7 :BS_PROXY DEBUG: Group# 0 Actor# [7:284:2079] Create Queue# [7:302:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.419521Z node 7 :BS_PROXY DEBUG: Group# 0 Actor# [7:284:2079] Create Queue# [7:303:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.419668Z node 7 :BS_PROXY DEBUG: Group# 0 Actor# [7:284:2079] Create Queue# [7:304:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.419803Z node 7 :BS_PROXY DEBUG: Group# 0 Actor# [7:284:2079] Create Queue# [7:305:2087] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.419955Z node 7 :BS_PROXY DEBUG: Group# 0 Actor# [7:284:2079] Create Queue# [7:306:2088] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.420087Z node 7 :BS_PROXY DEBUG: Group# 0 Actor# [7:284:2079] Create Queue# [7:307:2089] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.420215Z node 7 :BS_PROXY DEBUG: Group# 0 Actor# [7:284:2079] Create Queue# [7:308:2090] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.420253Z node 7 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.420410Z node 7 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [7:285:2080] 2025-03-28T09:14:40.420445Z node 7 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [7:285:2080] 2025-03-28T09:14:40.420496Z node 7 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.420706Z node 7 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.431955Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.432016Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.433639Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:296:2082] Create Queue# [1:316:2088] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.433828Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:296:2082] Create Queue# [1:317:2089] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.433926Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:296:2082] Create Queue# [1:318:2090] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.434034Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:296:2082] Create Queue# [1:319:2091] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.434129Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:296:2082] Create Queue# [1:320:2092] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.434233Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:296:2082] Create Queue# [1:321:2093] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.434329Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:296:2082] Create Queue# [1:322:2094] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.434347Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.434463Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:297:2083] 2025-03-28T09:14:40.434485Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:297:2083] 2025-03-28T09:14:40.434532Z node 7 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.434579Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.434609Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.435426Z node 8 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:40.437603Z node 8 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.437692Z node 8 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.438225Z node 8 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [8:331:2081] ControllerId# 72057594037932033 2025-03-28T09:14:40.438247Z node 8 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.438288Z node 8 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.438424Z node 8 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.440738Z node 8 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.440774Z node 8 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.441985Z node 8 :BS_PROXY DEBUG: Group# 0 Actor# [8:330:2080] Create Queue# [8:337:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.442179Z node 8 :BS_PROXY DEBUG: Group# 0 Actor# [8:330:2080] Create Queue# [8:338:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.442332Z node 8 :BS_PROXY DEBUG: Group# 0 Actor# [8:330:2080] Create Queue# [8:339:2087] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.443725Z node 8 :BS_PROXY DEBUG: Group# 0 Actor# [8:330:2080] Create Queue# [8:340:2088] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.443954Z node 8 :BS_PROXY DEBUG: Group# 0 Actor# [8:330:2080] Create Queue# [8:341:2089] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.444095Z node 8 :BS_PROXY DEBUG: Group# 0 Actor# [8:330:2080] Create Queue# [8:342:2090] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.444238Z node 8 :BS_PROXY DEBUG: Group# 0 Actor# [8:330:2080] Create Queue# [8:343:2091] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.444263Z node 8 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.444362Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [8:331:2081] 2025-03-28T09:14:40.444387Z node 8 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [8:331:2081] 2025-03-28T09:14:40.444422Z node 8 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.444459Z node 8 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.444975Z node 8 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor b ... TICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005400992}(72075186224037967)::Execute - TryToBoot was not successfull 2025-03-28T09:17:24.374355Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} Tx{1498, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{1001, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-03-28T09:17:24.374404Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} Tx{1498, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:17:24.401091Z node 11 :BS_PROXY_PUT INFO: [4e7cbe65221c24a5] bootstrap ActorId# [11:11653:6273] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:495:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:17:24.401229Z node 11 :BS_PROXY_PUT DEBUG: [4e7cbe65221c24a5] Id# [72057594037927937:2:495:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:17:24.401269Z node 11 :BS_PROXY_PUT DEBUG: [4e7cbe65221c24a5] restore Id# [72057594037927937:2:495:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:17:24.401316Z node 11 :BS_PROXY_PUT DEBUG: [4e7cbe65221c24a5] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:495:0:0:246:1] Marker# BPG33 2025-03-28T09:17:24.401351Z node 11 :BS_PROXY_PUT DEBUG: [4e7cbe65221c24a5] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:495:0:0:246:1] Marker# BPG32 2025-03-28T09:17:24.401607Z node 11 :BS_PROXY DEBUG: Send to queueActorId# [11:449:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:495:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:17:24.405438Z node 11 :BS_PROXY_PUT DEBUG: [4e7cbe65221c24a5] received {EvVPutResult Status# OK ID# [72057594037927937:2:495:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 512 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 513 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-28T09:17:24.405565Z node 11 :BS_PROXY_PUT DEBUG: [4e7cbe65221c24a5] Result# TEvPutResult {Id# [72057594037927937:2:495:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-28T09:17:24.405607Z node 11 :BS_PROXY_PUT INFO: [4e7cbe65221c24a5] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:495:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:17:24.405705Z node 11 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.017 sample PartId# [72057594037927937:2:495:0:0:246:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 11 } TEvVPutResult{ TimestampMs# 4.892 VDiskId# [0:1:0:0:0] NodeId# 11 Status# OK } ] } 2025-03-28T09:17:24.406318Z node 11 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:495:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-28T09:17:24.406882Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} commited cookie 1 for step 495 2025-03-28T09:17:24.408068Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} Tx{1499, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2025-03-28T09:17:24.408118Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} Tx{1499, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:17:24.408298Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} Tx{1499, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{1002, redo 303b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2025-03-28T09:17:24.408344Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:496} Tx{1499, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:17:24.408436Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [11:1291:2642] 2025-03-28T09:17:24.408462Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [11:1291:2642] 2025-03-28T09:17:24.408505Z node 11 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [11:1233:2604] EventType# 268637702 c[def1] ****------------------------------------------------------------------------------------------------ (0.044) ******---------------------------------------------------------------------------------------------- (0.062) *******--------------------------------------------------------------------------------------------- (0.068) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.06) ******---------------------------------------------------------------------------------------------- (0.062) ****------------------------------------------------------------------------------------------------ (0.044) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.05) 2025-03-28T09:17:24.511234Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:497} Tx{1500, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-28T09:17:24.511315Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:497} Tx{1500, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:17:24.511440Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005402112}: tablet 72075186224037923 wasn't changed 2025-03-28T09:17:24.511475Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005402112}: tablet 72075186224037923 skipped channel 0 2025-03-28T09:17:24.511549Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005402112}: tablet 72075186224037923 skipped channel 1 2025-03-28T09:17:24.511589Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{88923005402112}: tablet 72075186224037923 skipped channel 2 2025-03-28T09:17:24.511650Z node 11 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005402112}(72075186224037923)::Execute - TryToBoot was not successfull 2025-03-28T09:17:24.511714Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:497} Tx{1500, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{1003, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2025-03-28T09:17:24.511764Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:497} Tx{1500, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:17:24.525637Z node 11 :BS_PROXY_PUT INFO: [d78e71c5436674e9] bootstrap ActorId# [11:11655:6275] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:496:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:17:24.525768Z node 11 :BS_PROXY_PUT DEBUG: [d78e71c5436674e9] Id# [72057594037927937:2:496:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:17:24.525806Z node 11 :BS_PROXY_PUT DEBUG: [d78e71c5436674e9] restore Id# [72057594037927937:2:496:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:17:24.525854Z node 11 :BS_PROXY_PUT DEBUG: [d78e71c5436674e9] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:496:0:0:246:1] Marker# BPG33 2025-03-28T09:17:24.525884Z node 11 :BS_PROXY_PUT DEBUG: [d78e71c5436674e9] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:496:0:0:246:1] Marker# BPG32 2025-03-28T09:17:24.525992Z node 11 :BS_PROXY DEBUG: Send to queueActorId# [11:449:2089] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:496:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:17:24.529368Z node 11 :BS_PROXY_PUT DEBUG: [d78e71c5436674e9] received {EvVPutResult Status# OK ID# [72057594037927937:2:496:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 513 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 514 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-28T09:17:24.529502Z node 11 :BS_PROXY_PUT DEBUG: [d78e71c5436674e9] Result# TEvPutResult {Id# [72057594037927937:2:496:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-28T09:17:24.529547Z node 11 :BS_PROXY_PUT INFO: [d78e71c5436674e9] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:496:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:17:24.529662Z node 11 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.732 sample PartId# [72057594037927937:2:496:0:0:246:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 11 } TEvVPutResult{ TimestampMs# 4.15 VDiskId# [0:1:0:0:0] NodeId# 11 Status# OK } ] } 2025-03-28T09:17:24.530341Z node 11 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:496:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-28T09:17:24.530858Z node 11 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:497} commited cookie 1 for step 496 2025-03-28T09:17:24.531887Z node 11 :HIVE WARN: HIVE#72057594037927937 THive::StateWork unhandled event type: 2146435089 event: NKikimr::NHive::TEvPrivate::TEvStorageBalancerOut 2025-03-28T09:17:24.544704Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send [11:1291:2642] 2025-03-28T09:17:24.544765Z node 11 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [11:1291:2642] 2025-03-28T09:17:24.544813Z node 11 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [11:1233:2604] EventType# 268637702 c[def1] ****------------------------------------------------------------------------------------------------ (0.044) ******---------------------------------------------------------------------------------------------- (0.062) *******--------------------------------------------------------------------------------------------- (0.068) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.046) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.06) ******---------------------------------------------------------------------------------------------- (0.062) ****------------------------------------------------------------------------------------------------ (0.044) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.05) |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001c44/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit.txt 2025-03-28T09:17:22.676614Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:22.676567Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-03-28T09:17:22.591683Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001c1b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit.txt 2025-03-28T09:17:27.927810Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> SystemView::QueryStatsAllTables [GOOD] >> SystemView::QueryStatsRetries >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001c21/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dynconfig/audit.txt 2025-03-28T09:17:26.933316Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills >> test_disk.py::TestSafeDiskBreak::test_erase_method |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001bca/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit.txt 2025-03-28T09:17:36.908746Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001be1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit.txt 2025-03-28T09:17:34.686863Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> SystemView::QueryStatsRetries [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001bc3/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit.txt 2025-03-28T09:17:37.348958Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:37.348916Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-03-28T09:17:37.242735Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001b82/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit.txt 2025-03-28T09:17:41.399591Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001b83/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit.txt 2025-03-28T09:17:41.344187Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:41.344134Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-28T09:17:41.176267Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::QueryStatsRetries [GOOD] Test command err: 2025-03-28T09:14:12.371741Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786210912546455:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:12.371842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00159d/r3tmp/tmplNaICT/pdisk_1.dat 2025-03-28T09:14:13.437580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:14:13.445539Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:13.445625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:13.455712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:13.479563Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2455, node 1 2025-03-28T09:14:13.823064Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:13.823089Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:13.823099Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:13.823208Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:14.972644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.150912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.924456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:17.316046Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786210912546455:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:17.316151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:14:18.373235Z node 1 :KQP_COMPILE_SERVICE INFO: Subscribed for config changes 2025-03-28T09:14:18.373278Z node 1 :KQP_COMPILE_SERVICE INFO: Updated config 2025-03-28T09:14:18.493264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786236682351316:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:18.493345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786236682351328:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:18.493372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:14:18.497413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:14:18.544435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786236682351330:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-28T09:14:18.631723Z node 1 :TX_PROXY ERROR: Actor# [1:7486786236682351406:2894] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:14:18.632964Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: , DatabaseId: /Root, UserSid: , Text: \n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"" }, "settings": { "ydb_user":"" }, "rollback_settings": { } } }} 2025-03-28T09:14:18.642327Z node 1 :KQP_COMPILE_SERVICE DEBUG: Perform request, TraceId.SpanIdPtr: 0x000050F00006AAD8 2025-03-28T09:14:18.642394Z node 1 :KQP_COMPILE_SERVICE DEBUG: Received compile request, sender: [1:7486786236682351302:2345], queryUid: , queryText: "\n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n ", keepInCache: 0, split: 0{ TraceId: 01jqe0fqz5930z84qt7r3fwjd7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTcyMGFiNzQtZjQ2NWQ3YWItZWEyNTdkNWQtYTAwYjUzZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default} 2025-03-28T09:14:18.642547Z node 1 :KQP_COMPILE_SERVICE DEBUG: Try to find query by queryId, queryId: {Cluster: db, Database: , DatabaseId: /Root, UserSid: , Text: \n SELECT schemaname, tablename, tableowner, tablespace, hasindexes, hasrules, hastriggers, rowsecurity FROM `Root/.sys/pg_tables` WHERE tablename = PgName(\"Table0\") OR tablename = PgName(\"Table1\") ORDER BY tablename;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"" }, "settings": { "ydb_user":"" }, "rollback_settings": { } } }} 2025-03-28T09:14:18.642616Z node 1 :KQP_COMPILE_SERVICE DEBUG: Added request to queue, sender: [1:7486786236682351302:2345], queueSize: 1 2025-03-28T09:14:18.659841Z node 1 :KQP_COMPILE_SERVICE DEBUG: Created compile actor, sender: [1:7486786236682351302:2345], compileActor: [1:7486786236682351424:2357] 2025-03-28T09:14:19.528211Z node 1 :KQP_YQL INFO: TraceId: 01jqe0fqz5930z84qt7r3fwjd7, SessionId: CompileActor 2025-03-28 09:14:19.504 INFO ydb-core-sys_view-ut(pid=215975, tid=0x00007F9D3E9D4640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"Root/.sys/pg_tables"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Filter (Right! $1) (lambda '($18) (Coalesce (Or (== (Member $18 '"tablename") (PgConst '"Table0" (PgType 'name))) (== (Member $18 '"tablename") (PgConst '"Table1" (PgType 'name)))) (Bool 'false))))) (let $4 (TypeOf $3)) (let $5 (SqlProjectItem $4 '"schemaname" (lambda '($19) (Member $19 '"schemaname")))) (let $6 (SqlProjectItem $4 '"tablename" (lambda '($20) (Member $20 '"tablename")))) (let $7 (SqlProjectItem $4 '"tableowner" (lambda '($21) (Member $21 '"tableowner")))) (let $8 (SqlProjectItem $4 '"tablespace" (lambda '($22) (Member $22 '"tablespace")))) (let $9 (SqlProjectItem $4 '"hasindexes" (lambda '($23) (Member $23 '"hasindexes")))) (let $10 (SqlProjectItem $4 '"hasrules" (lambda '($24) (Member $24 '"hasrules")))) (let $11 (SqlProjectItem $4 '"hastriggers" (lambda '($25) (Member $25 '"hastriggers")))) (let $12 (SqlProjectItem $4 '"rowsecurity" (lambda '($26) (Member $26 '"rowsecurity")))) (let $13 '($5 $6 $7 $8 $9 $10 $11 $12)) (let $14 (Sort (PersistableRepr (SqlProject $3 $13)) (Bool 'true) (lambda '($27) (PersistableRepr (Member $27 '"tablename"))))) (let $15 '('"schemaname" '"tablename" '"tableowner" '"tablespace" '"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity")) (let $16 '('('type) '('autoref) '('columns $15))) (let $17 (Write! (Left! $1) $2 (Key) $14 $16)) (return (Commit! $17 $2)) ) 2025-03-28T09:14:19.546901Z node 1 :KQP_YQL TRACE: TraceId: 01jqe0fqz5930z84qt7r3fwjd7, SessionId: CompileActor 2025-03-28 09:14:19.545 TRACE ydb-core-sys_view-ut(pid=215975, tid=0x00007F9D3E9D4640) [KQP] kqp_transform.cpp:33: YqlTransformer: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"Root/.sys/pg_tables"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Filter (Right! $1) (lambda '($18) (Coalesce (Or (== (Member $18 '"tablename") (PgConst '"Table0" (PgType 'name))) (== (Member $18 '"tablename") (PgConst '"Table1" (PgType 'name)))) (Bool 'false))))) (let $4 (TypeOf $3)) (let $5 (SqlProjectItem $4 '"schemaname" (lambda '($19) (Member $19 '"schemaname")))) (let $6 (SqlProjectItem $4 '"tablename" (lambda '($20) (Member $20 '"tablename")))) (let $7 (SqlProjectItem $4 '"tableowner" (lambda '($21) (Member $21 '"tableowner")))) (let $8 (SqlProjectItem $4 '"tablespace" (lambda '($22) (Member $22 '"tablespace")))) (let $9 (SqlProjectItem $4 '"hasindexes" (lambda '($23) (Member $23 '"hasindexes")))) (let $10 (SqlProjectItem $4 '"hasrules" (lambda '($24) (Member $24 '"hasrules")))) (let $11 (SqlProjectItem $4 '"hastriggers" (lambda '($25) (Member $25 '"hastriggers")))) (let $12 (SqlProjectItem $4 '"rowsecurity" (lambda '($26) (Member $26 '"rowsecurity")))) (let $13 '($5 $6 $7 $8 $9 $10 $11 $12)) (let $14 (Sort (PersistableRepr (SqlProject $3 $13)) (Bool 'true) (lambda '($27) (PersistableRepr (Member $27 '"tablename"))))) (let $15 '('"schemaname" '"tablename" '"tableowner" '"tablespace" '"hasindexes" '"hasrules" '"hastriggers" '"rowsecurity")) (let $16 '('('type) '('autoref) '('columns $15))) (let $17 (Write! (Left! $1) $2 (Key) $14 $16)) (return (Commit! (Commit! $17 $2) (DataSink '"kikimr" '"db") '('('"mode" '"flush")))) ) 2025-0 ... UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:17:31.872365Z node 61 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:17:31.927500Z node 61 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:17:35.261746Z node 61 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[61:7486787060387289473:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:17:35.261877Z node 61 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:17:37.278711Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [61:7486787090452061586:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:17:37.278828Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [61:7486787090452061581:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:17:37.279111Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:17:37.285392Z node 61 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:17:37.314262Z node 61 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [61:7486787090452061595:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:17:37.388297Z node 61 :TX_PROXY ERROR: Actor# [61:7486787090452061666:2708] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:17:37.612208Z node 61 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe0nt3v9a2mr49bpnmv04ab, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=61&id=ZDZmOTc1NjgtZGNkZmU3NjQtMTE3NGIzNzAtOTJkODQ0ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:17:37.827216Z node 61 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe0ntff5cek9k9we2yqd601, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=61&id=YzRmNmQyY2EtNWJiOTkzZWQtMzU4MTcwNjYtODVlN2FkMjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:17:37.830783Z node 61 :SYSTEM_VIEWS INFO: Scan started, actor: [61:7486787090452061739:2368], owner: [61:7486787090452061736:2366], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2025-03-28T09:17:37.832756Z node 61 :SYSTEM_VIEWS INFO: Scan prepared, actor: [61:7486787090452061739:2368], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:17:37.833390Z node 61 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [61:7486787090452061739:2368], row count: 1, finished: 1 2025-03-28T09:17:37.833439Z node 61 :SYSTEM_VIEWS INFO: Scan finished, actor: [61:7486787090452061739:2368], owner: [61:7486787090452061736:2366], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_request_units_one_hour] 2025-03-28T09:17:37.841715Z node 61 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153457821, txId: 281474976715662] shutting down 2025-03-28T09:17:41.069720Z node 66 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[66:7486787109793032279:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:17:41.070236Z node 66 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00159d/r3tmp/tmpBFF0GT/pdisk_1.dat 2025-03-28T09:17:41.440228Z node 66 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:17:41.513131Z node 66 :HIVE WARN: HIVE#72057594037968897 Node(66, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:17:41.513294Z node 66 :HIVE WARN: HIVE#72057594037968897 Node(66, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:17:41.520449Z node 66 :HIVE WARN: HIVE#72057594037968897 Node(66, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10375, node 66 2025-03-28T09:17:41.646003Z node 66 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:17:41.646049Z node 66 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:17:41.646062Z node 66 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:17:41.646301Z node 66 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:17:42.276111Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:17:42.310559Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:17:46.061588Z node 66 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[66:7486787109793032279:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:17:46.061694Z node 66 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:17:47.772534Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [66:7486787135562837107:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:17:47.772651Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [66:7486787135562837099:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:17:47.772929Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:17:47.785435Z node 66 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:17:47.831953Z node 66 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [66:7486787135562837113:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:17:47.944277Z node 66 :TX_PROXY ERROR: Actor# [66:7486787135562837190:2726] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:17:48.377049Z node 66 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe0p4bp69ck96hvfc5q4af7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=66&id=YmEzYjU1OGItNGM5Mzg3Zi1lMzhiNjU0My1jMGVlZjlmNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:17:48.594405Z node 66 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqe0p4zw8591mbj4hx8p8njm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=66&id=Y2FiMzc5OGMtY2Q2ZmRhODItMmVlMTk0ZDUtOTQ4MDE1MDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:17:48.597870Z node 66 :SYSTEM_VIEWS INFO: Scan started, actor: [66:7486787139857804566:2368], owner: [66:7486787139857804563:2366], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-28T09:17:48.602452Z node 66 :SYSTEM_VIEWS INFO: Scan prepared, actor: [66:7486787139857804566:2368], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:17:48.611442Z node 66 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [66:7486787139857804566:2368], row count: 1, finished: 1 2025-03-28T09:17:48.611512Z node 66 :SYSTEM_VIEWS INFO: Scan finished, actor: [66:7486787139857804566:2368], owner: [66:7486787139857804563:2366], scan id: 0, table id: [72057594046644480:1:0:top_queries_by_read_bytes_one_minute] 2025-03-28T09:17:48.616995Z node 66 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153468591, txId: 281474976710662] shutting down |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001b94/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit.txt 2025-03-28T09:17:40.498551Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:40.498498Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-28T09:17:40.336905Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001b6e/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit.txt 2025-03-28T09:17:44.655909Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:44.655860Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-03-28T09:17:44.643649Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:44.791134Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:44.791096Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-03-28T09:17:44.767355Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:44.926379Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:44.926343Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2025-03-28T09:17:44.900750Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:45.065190Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:45.065158Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-28T09:17:45.038843Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:45.194027Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:45.193987Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2025-03-28T09:17:45.177020Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:45.320424Z: {"database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:45.320388Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-03-28T09:17:45.303786Z","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001b62/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit.txt 2025-03-28T09:17:46.783369Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001b68/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit.txt 2025-03-28T09:17:46.846550Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","cloud_id":"cloud-id-A","end_time":"2025-03-28T09:17:46.846503Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-28T09:17:46.684883Z","subject":"root@builtin","detailed_status":"SUCCESS","resource_id":"database-id-C","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001b57/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit.txt 2025-03-28T09:17:48.492844Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1053: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> THeavyPerfTest::TTestLoadEverything [GOOD] >> THiveImplTest::BootQueueSpeed ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001b33/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit.txt 2025-03-28T09:17:52.507110Z: {"tx_id":"01jqe0p8zq7t7jhvcdhwqz6ejp","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:52.507062Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-03-28T09:17:52.498828Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2025-03-28T09:17:52.913201Z: {"tx_id":"01jqe0p8zq7t7jhvcdhwqz6ejp","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:52.913156Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2025-03-28T09:17:52.528555Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2025-03-28T09:17:52.928010Z: {"tx_id":"01jqe0p8zq7t7jhvcdhwqz6ejp","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:52.927955Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2025-03-28T09:17:52.920539Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001b2a/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit.txt 2025-03-28T09:17:56.145583Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:17:56.145528Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2025-03-28T09:17:55.901173Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001b1d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit.txt 2025-03-28T09:17:57.415985Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001af1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit.txt 2025-03-28T09:18:01.965824Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:18:01.965765Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2025-03-28T09:18:01.645879Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/2909/001aed/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit.txt 2025-03-28T09:18:03.040096Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2025-03-28T09:18:03.040046Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2025-03-28T09:18:02.879709Z","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |85.3%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |85.3%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveTest::TestBlockCreateTablet >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState >> TCheckpointStorageTest::ShouldRegisterCoordinator >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks >> TStorageServiceTest::ShouldNotRegisterPrevGeneration >> TCheckpointStorageTest::ShouldCreateCheckpoint >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams >> TStorageServiceTest::ShouldRegister >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> TStateStorageTest::ShouldIssueErrorOnWrongGetStateParams [GOOD] >> TStateStorageTest::ShouldIssueErrorOnNonExistentState >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] >> TCheckpointStorageTest::ShouldRegisterCoordinator [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range >> TStateStorageTest::ShouldSaveGetOldSmallState2Tasks [GOOD] >> TStorageServiceTest::ShouldCreateCheckpoint >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] >> TStateStorageTest::ShouldSaveGetOldSmallState [GOOD] >> TStorageServiceTest::ShouldRegister [GOOD] >> TStorageServiceTest::ShouldRegisterNextGeneration >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> TStateStorageTest::ShouldSaveGetOldBigState >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 >> TStorageServiceTest::ShouldNotRegisterPrevGeneration [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered >> TCheckpointStorageTest::ShouldCreateCheckpoint [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> TCheckpointStorageTest::ShouldGetCoordinators [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteNoCheckpoints2 >> TStateStorageTest::ShouldSaveGetOldBigState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::TestCheckSubHiveForwarding >> TStateStorageTest::ShouldIssueErrorOnNonExistentState [GOOD] >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration >> TStateStorageTest::ShouldLoadLastSnapshot >> TStorageServiceTest::ShouldRegisterNextGeneration [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint >> TStorageServiceTest::ShouldNotCreateCheckpointWhenUnregistered [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointTwice >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] >> TStorageServiceTest::ShouldNotCreateCheckpointAfterGenerationChanged [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation >> TCheckpointStorageTest::ShouldUpdateCheckpointStatusForCheckpointsWithTheSameGenAndNo [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints >> TStateStorageTest::ShouldDeleteNoCheckpoints2 [GOOD] >> TStateStorageTest::ShouldDeleteCheckpoints |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementSmallState [GOOD] >> TStateStorageTest::ShouldSaveGetIncrementBigState >> TStorageServiceTest::ShouldCreateCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveDrain >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> TStateStorageTest::ShouldLoadLastSnapshot [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration2 [GOOD] |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGeneration [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> TStateStorageTest::ShouldDeleteCheckpoints [GOOD] >> TStateStorageTest::ShouldDeleteGraph >> TStateStorageTest::ShouldSaveGetIncrementBigState [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendState |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGenerationAndTransact [GOOD] |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TCheckGenerationTest::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> TCheckpointStorageTest::ShouldMarkCheckpointsGc [GOOD] >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints |85.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckSameGeneration [GOOD] >> TCheckpointStorageTest::ShouldCreateGetCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> TStateStorageTest::ShouldNotGetNonExistendSnaphotState [GOOD] >> TStateStorageTest::ShouldLoadIncrementSnapshot >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::TestCheckSubHiveMigration >> TStorageServiceTest::ShouldNotCreateCheckpointTwice [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation >> TStateStorageTest::ShouldDeleteGraph [GOOD] >> TStateStorageTest::ShouldGetMultipleStates |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] >> TStorageServiceTest::ShouldNotAbortCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |85.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> TStorageServiceTest::ShouldPendingAndCompleteCheckpoint [GOOD] >> TStorageServiceTest::ShouldSaveState >> TCheckpointStorageTest::ShouldGetCheckpointsEmpty [GOOD] >> TCheckpointStorageTest::ShouldDeleteGraph >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] >> TStorageServiceTest::ShouldGetCheckpoints [GOOD] >> TStorageServiceTest::ShouldAbortCheckpoint |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> TStorageServiceTest::ShouldNotPendingCheckpointWithoutCreation [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |85.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldRegisterCheckNextGeneration [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldNotGetNonExistendState [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointWithoutPending [GOOD] >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] >> TStorageServiceTest::ShouldSaveState [GOOD] >> TStorageServiceTest::ShouldUseGc >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed >> TStateStorageTest::ShouldGetMultipleStates [GOOD] |85.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TRegisterCheckTest::ShouldNotRegisterCheckPrevGeneration [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success |85.4%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCheckpointStorageTest::ShouldDeleteGraph [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options >> TCheckpointStorageTest::ShouldNotDeleteUnmarkedCheckpoints [GOOD] >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldLoadIncrementSnapshot [GOOD] >> TGcTest::ShouldRemovePreviousCheckpoints [GOOD] >> TGcTest::ShouldIgnoreIncrementCheckpoint >> TStorageServiceTest::ShouldAbortCheckpoint [GOOD] >> TStorageServiceTest::ShouldGetState |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldGetMultipleStates [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotPendingCheckpointGenerationChanged [GOOD] Test command err: 2025-03-28T09:18:25.800715Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7486787293480342765:2048] with connection to localhost:29033:local 2025-03-28T09:18:25.801148Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:26.765153Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:26.765190Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:26.770842Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:26.949371Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.16] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldNotRegisterPrevGeneration/coordinators_sync, pk: graph_graphich, current generation: 17, expected/new generation: 16, operation: RegisterCheck, code: 400130 2025-03-28T09:18:26.949393Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.16] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:27.991600Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7486787308681415834:2048] with connection to localhost:29033:local 2025-03-28T09:18:27.991726Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:28.748272Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointWhenUnregistered/coordinators_sync, pk: graph_graphich, current generation: 0, expected/new generation: 17, operation: Check, code: 400130 2025-03-28T09:18:28.748301Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:30.009400Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7486787316339056338:2048] with connection to localhost:29033:local 2025-03-28T09:18:30.009497Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:30.266505Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:30.266541Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:30.266919Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:31.923784Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:31.923838Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:31.924381Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:32.545734Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to create checkpoint:
: Error: Conflict with existing key., code: 2012 2025-03-28T09:18:32.545774Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:34.250823Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7486787330322348699:2048] with connection to localhost:29033:local 2025-03-28T09:18:34.250922Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:34.690521Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:34.690553Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:34.697619Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-28T09:18:35.044002Z node 4 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-03-28T09:18:35.044039Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:18:37.023023Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7486787340028799963:2048] with connection to localhost:29033:local 2025-03-28T09:18:37.023685Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:37.413667Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:37.413707Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:37.414013Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:39.022536Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:39.022571Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:39.022923Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:39.433998Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-03-28T09:18:39.434426Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:39.434861Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-28T09:18:39.700968Z node 5 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to set 'PendingCommit' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-03-28T09:18:39.701003Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok >> TStorageServiceTest::ShouldUseGc [GOOD] >> TStorageServiceTest::ShouldGetState [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldNotCompleteCheckpointGenerationChanged [GOOD] Test command err: 2025-03-28T09:18:25.851938Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7486787294808789931:2048] with connection to localhost:28195:local 2025-03-28T09:18:25.852058Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:26.685828Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:26.685864Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:26.694751Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:28.118744Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:28.118771Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:28.137281Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:28.605406Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-03-28T09:18:28.605439Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:28.606021Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-03-28T09:18:28.871472Z node 1 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:2] Failed to create checkpoint:
: Warning: Table: local/TStorageServiceTestShouldNotCreateCheckpointAfterGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-03-28T09:18:28.871515Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-03-28T09:18:30.101064Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7486787313806623724:2048] with connection to localhost:28195:local 2025-03-28T09:18:30.101170Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:30.400482Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:30.400527Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:30.400838Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-03-28T09:18:30.855378Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-03-28T09:18:30.855406Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-03-28T09:18:32.458823Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7486787325673762567:2048] with connection to localhost:28195:local 2025-03-28T09:18:32.458923Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:32.817766Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:32.817798Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:32.819785Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-03-28T09:18:33.184193Z node 3 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] [17:1] Failed to abort checkpoint:
: Warning: Failed to select checkpoint '17:1', code: 400080 2025-03-28T09:18:33.184233Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-03-28T09:18:34.773894Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7486787331845540308:2048] with connection to localhost:28195:local 2025-03-28T09:18:34.773993Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:35.146901Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:35.146936Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:35.147357Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:36.754814Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:36.754846Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:36.755324Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-03-28T09:18:37.232730Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Selected checkpoint '17:1' with status Pending, while expected PendingCommit, code: 400080 2025-03-28T09:18:37.232759Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-03-28T09:18:38.841253Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7486787350762578136:2048] with connection to localhost:28195:local 2025-03-28T09:18:38.841341Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:39.246988Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:39.247020Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:39.263424Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:41.239769Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:41.239809Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:41.243510Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-28T09:18:41.998946Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-03-28T09:18:41.998984Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:18:42.009778Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:42.514800Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-03-28T09:18:42.514834Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:42.515449Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-03-28T09:18:42.680909Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Failed to set 'Completed' status:
: Warning: Table: local/TStorageServiceTestShouldNotPendingCheckpointGenerationChanged/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: Check, code: 400130 2025-03-28T09:18:42.680947Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldRetryOnExistingGraphDescId [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> TVPatchTests::PatchPartOk >> TLocksTest::UpdateLockedKey >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldUseGc [GOOD] Test command err: 2025-03-28T09:18:25.860967Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7486787292492978164:2048] with connection to localhost:26747:local 2025-03-28T09:18:25.861074Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:26.899678Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:26.899706Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:28.018827Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7486787307190124021:2048] with connection to localhost:26747:local 2025-03-28T09:18:28.018936Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:28.218819Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:28.218849Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:28.219063Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:28.583455Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.18] Graph registered 2025-03-28T09:18:28.583484Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.18] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:28.583887Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:28.794618Z node 2 :STREAMS_STORAGE_SERVICE WARN: [graph_graphich.17] Failed to register graph:
: Warning: Table: local/TStorageServiceTestShouldRegisterNextGeneration/coordinators_sync, pk: graph_graphich, current generation: 18, expected/new generation: 17, operation: RegisterCheck, code: 400130 2025-03-28T09:18:28.794648Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:29.811080Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7486787313114769297:2048] with connection to localhost:26747:local 2025-03-28T09:18:29.811177Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:30.050444Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:30.050480Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:30.050842Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:31.571066Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:31.571101Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:31.575235Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-28T09:18:32.204481Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-03-28T09:18:32.204517Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:18:32.204937Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-03-28T09:18:32.593957Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-03-28T09:18:32.593990Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-03-28T09:18:32.598154Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-28T09:18:33.017245Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-03-28T09:18:33.017277Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:18:33.019402Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-03-28T09:18:33.298581Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-03-28T09:18:33.298652Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-03-28T09:18:33.303023Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:33.627939Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:35.422902Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7486787336382561954:2048] with connection to localhost:26747:local 2025-03-28T09:18:35.423019Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:35.913441Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:35.913475Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:35.918822Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:37.311695Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:37.311719Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:37.312112Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-03-28T09:18:37.501605Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-03-28T09:18:37.504181Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-03-28T09:18:39.038953Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [5:7486787352555580337:2048] with connection to localhost:26747:local 2025-03-28T09:18:39.039022Z node 5 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [5:7486787356850547736:2130] 2025-03-28T09:18:39.039056Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:39.408529Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:39.408568Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:39.410971Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:41.054269Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:41.054315Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:41.057346Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-28T09:18:41.768698Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-03-28T09:18:41.768822Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:18:41.786812Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCompleteCheckpointRequest 2025-03-28T09:18:42.359165Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'Completed' 2025-03-28T09:18:42.359207Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvNewCheckpointSucceeded 2025-03-28T09:18:42.359231Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCompleteCheckpointResponse 2025-03-28T09:18:42.359367Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:1 for graph 'graph_graphich' 2025-03-28T09:18:42.359942Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-03-28T09:18:42.633881Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-03-28T09:18:42.633916Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-03-28T09:18:42.635049Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-28T09:18:42.782944Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-03-28T09:18:42.782981Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:18:42.783431Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-03-28T09:18:43.063364Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-03-28T09:18:43.063401Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvNewCheckpointSucceeded 2025-03-28T09:18:43.063431Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-03-28T09:18:43.063606Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:2 for graph 'graph_graphich' 2025-03-28T09:18:43.066798Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-03-28T09:18:43.175292Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:1 2025-03-28T09:18:43.176720Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:2 2025-03-28T09:18:43.279228Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-03-28T09:18:43.279275Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-03-28T09:18:43.279731Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-28T09:18:43.424681Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'PendingCommit' 2025-03-28T09:18:43.424709Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:18:43.425213Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCompleteCheckpointRequest 2025-03-28T09:18:43.606958Z node 5 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Status updated to 'Completed' 2025-03-28T09:18:43.607004Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvNewCheckpointSucceeded 2025-03-28T09:18:43.607023Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCompleteCheckpointResponse 2025-03-28T09:18:43.609581Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 17:3 for graph 'graph_graphich' 2025-03-28T09:18:43.609982Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:43.663059Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph_graphich' up to 17:3 2025-03-28T09:18:43.892630Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:43.996437Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:44.028676Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:44.131890Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:44.158764Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:44.259761Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:44.275328Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:44.378947Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:44.396360Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:44.500185Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:44.523266Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:44.630833Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:44.651334Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:44.755657Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:44.773575Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:44.875024Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:44.891985Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:44.994361Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:45.017422Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:45.118994Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:45.187666Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:45.289023Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:45.316270Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:45.417204Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:45.432304Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:45.535327Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:45.551228Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:45.652727Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:45.665502Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:45.770378Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:45.797457Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:45.907407Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:46.029925Z node 5 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse >> TVPatchTests::PatchPartOk [GOOD] >> ColumnStatistics::CountMinSketchServerlessStatistics ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2025-03-28T09:18:50.848663Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:18:50.850987Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-28T09:18:50.851065Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-28T09:18:50.851306Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-03-28T09:18:50.853444Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:18:50.853671Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-03-28T09:18:50.855027Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-03-28T09:18:50.855233Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-03-28T09:18:50.856837Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2025-03-28T09:18:50.856892Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-03-28T09:18:50.856992Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStorageServiceTest::ShouldGetState [GOOD] Test command err: 2025-03-28T09:18:27.876910Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [1:7486787305847291870:2048] with connection to localhost:28197:local 2025-03-28T09:18:27.877002Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:28.210890Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:28.210926Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:28.211358Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:29.778044Z node 1 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:29.778075Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:31.437372Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [2:7486787318076506223:2048] with connection to localhost:28197:local 2025-03-28T09:18:31.437448Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:31.893425Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:31.893458Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:31.893928Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:33.177443Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:33.177472Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:33.185953Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-03-28T09:18:33.647433Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-03-28T09:18:33.647467Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-03-28T09:18:33.658840Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Got TEvCreateCheckpointRequest 2025-03-28T09:18:34.090953Z node 2 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:3] Checkpoint created 2025-03-28T09:18:34.090990Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:3] Send TEvCreateCheckpointResponse 2025-03-28T09:18:34.094479Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:34.459844Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:36.352939Z node 3 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [3:7486787339838583685:2048] with connection to localhost:28197:local 2025-03-28T09:18:36.353063Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:36.633840Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:36.633877Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:36.635046Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:38.123697Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:38.123724Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:38.124187Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-28T09:18:38.816113Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Status updated to 'PendingCommit' 2025-03-28T09:18:38.816144Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:18:38.818045Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCreateCheckpointRequest 2025-03-28T09:18:39.259906Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint created 2025-03-28T09:18:39.259942Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCreateCheckpointResponse 2025-03-28T09:18:39.260188Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvSetCheckpointPendingCommitStatusRequest 2025-03-28T09:18:39.434740Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'PendingCommit' 2025-03-28T09:18:39.434761Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:18:39.439313Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvCompleteCheckpointRequest 2025-03-28T09:18:39.691367Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Status updated to 'Completed' 2025-03-28T09:18:39.691408Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvCompleteCheckpointResponse 2025-03-28T09:18:39.695133Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvAbortCheckpointRequest 2025-03-28T09:18:40.082942Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint aborted 2025-03-28T09:18:40.082980Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvAbortCheckpointResponse 2025-03-28T09:18:40.083761Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Got TEvAbortCheckpointRequest 2025-03-28T09:18:40.389511Z node 3 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:2] Checkpoint aborted 2025-03-28T09:18:40.389561Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:2] Send TEvAbortCheckpointResponse 2025-03-28T09:18:40.391042Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Got TEvGetCheckpointsMetadataRequest 2025-03-28T09:18:40.775410Z node 3 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] Send TEvGetCheckpointsMetadataResponse 2025-03-28T09:18:42.761536Z node 4 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped TStorageProxy [4:7486787364738168263:2048] with connection to localhost:28197:local 2025-03-28T09:18:42.761643Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Got TEvRegisterCoordinatorRequest 2025-03-28T09:18:43.198347Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] Graph registered 2025-03-28T09:18:43.198383Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] Send TEvRegisterCoordinatorResponse 2025-03-28T09:18:43.198783Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Got TEvCreateCheckpointRequest 2025-03-28T09:18:44.775264Z node 4 :STREAMS_STORAGE_SERVICE INFO: [graph_graphich.17] [17:1] Checkpoint created 2025-03-28T09:18:44.775290Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich.17] [17:1] Send TEvCreateCheckpointResponse 2025-03-28T09:18:44.779023Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvSaveTaskState: task 1317 2025-03-28T09:18:45.012904Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] TEvSaveTaskState Apply: task: 1317 2025-03-28T09:18:45.012957Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Send TEvSaveTaskStateResult: task: 1317 2025-03-28T09:18:45.015445Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] Got TEvGetTaskState: tasks {1317} 2025-03-28T09:18:45.015471Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] GetState, tasks: 1317 2025-03-28T09:18:45.707020Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ListOfStates results: 2025-03-28T09:18:45.707122Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] taskId 1317 checkpoint id: 17:1, rows count: 1 2025-03-28T09:18:45.707162Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SkipStatesInFuture, skip 0 checkpoints 2025-03-28T09:18:45.859016Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] SelectState: task_id 1317, seq_no 1, blob_seq_num 0 2025-03-28T09:18:46.407271Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] DeserializeState, task id 1317, blob size 49 2025-03-28T09:18:46.407352Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [17:1] ApplyIncrements 2025-03-28T09:18:46.416005Z node 4 :STREAMS_STORAGE_SERVICE DEBUG: [graph_graphich] [{ Id: 1 Generation: 17 }] Send TEvGetTaskStateResult: tasks: {1317} >> TGcTest::ShouldIgnoreIncrementCheckpoint [GOOD] >> TStateStorageTest::ShouldCountStates |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |85.5%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::HotTiersRevCompression |85.6%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] >> test_cp_ic.py::TestCpIc::test_discovery |85.6%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_retry.py::TestRetry::test_fail_first[kikimr0] >> TStateStorageTest::ShouldCountStates [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TCheckpointStorageTest::ShouldDeleteMarkedCheckpoints [GOOD] >> test_ttl.py::TestTTLOnIndexedTable::test_case |85.6%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case >> test_ttl.py::TestTTLDefaultEnv::test_case >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup >> KqpQueryService::ReturnAndCloseSameTime [GOOD] >> KqpQueryService::ReplaceIntoWithDefaultValue >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpoint_storage/ut/unittest >> TStateStorageTest::ShouldCountStatesNonExistentCheckpoint [GOOD] Test command err: 2025-03-28T09:18:35.678858Z node 1 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [1:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-03-28T09:18:36.089243Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-03-28T09:18:36.974950Z node 1 :STREAMS_STORAGE_SERVICE DEBUG: GC deleted checkpoints of graph 'graph' up to 11:3 Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/TGcTestShouldRemovePreviousCheckpoints"); SELECT * FROM checkpoints_graphs_description; 2025-03-28T09:18:48.252078Z node 2 :STREAMS_STORAGE_SERVICE INFO: Successfully bootstrapped storage GC [2:36:2083] Count graph descriptions query: --!syntax_v1 PRAGMA TablePathPrefix("local/ShouldIgnoreIncrementCheckpoint"); SELECT * FROM checkpoints_graphs_description; 2025-03-28T09:18:48.875887Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC received upperbound checkpoint 11:3 for graph 'graph' 2025-03-28T09:18:48.875962Z node 2 :STREAMS_STORAGE_SERVICE DEBUG: GC skip increment checkpoint for graph 'graph' >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] |85.6%| [TA] $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_dispatch.py::TestMapping::test_mapping |85.6%| [TA] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> ttl_unavailable_s3.py::TestUnavailableS3::test >> unstable_connection.py::TestUnstableConnection::test >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> data_correctness.py::TestDataCorrectness::test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> KqpQueryService::TableSink_ReplaceFromSelectLargeOlap [GOOD] >> KqpQueryService::TableSink_ReplaceDuplicatesOlap |85.7%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |85.7%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::ReplaceIntoWithDefaultValue [GOOD] Test command err: Trying to start YDB, gRPC: 18755, MsgBus: 2298 2025-03-28T09:13:37.084799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786062771106085:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:37.101668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151e/r3tmp/tmptb2wOD/pdisk_1.dat 2025-03-28T09:13:37.869810Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:37.873438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:37.873575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:37.877458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18755, node 1 2025-03-28T09:13:38.047429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:38.047452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:38.047462Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:38.047572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2298 TClient is connected to server localhost:2298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:38.721033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:38.742408Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:38.755368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:13:38.908571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:13:39.097684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:39.195395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:41.346461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786079950976978:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.347155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:41.687737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:41.774190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:41.822666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:41.878742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:41.926864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:41.983544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:42.069396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786084245944789:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.069491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.069956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786084245944794:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:42.074530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:42.087894Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786062771106085:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:42.087992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:42.091242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786084245944796:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:42.177789Z node 1 :TX_PROXY ERROR: Actor# [1:7486786084245944854:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19726, MsgBus: 18015 2025-03-28T09:13:44.303060Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486786090877351652:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:44.303097Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151e/r3tmp/tmpdKUqvf/pdisk_1.dat 2025-03-28T09:13:44.549867Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:44.563676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:44.563771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:44.567555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19726, node 2 2025-03-28T09:13:44.775251Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:44.775272Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:44.775280Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:44.775398Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18015 TClient is connected to server localhost:18015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:13:45.556111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:13:45.584817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:45.669564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:45.854479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:45.942369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:48.784023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786108057222600:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:48.784101Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:48.847156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.895145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:48.972040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.033297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.073289Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.147858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:49.251078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786112352190418:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.251179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.251591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486786112352190423:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:49.263893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:49.282845Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:13:49.285054Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486786112352190425:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:13:49.305100Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486786090877351652:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:49.305257Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:49.364989Z node 2 :TX_PROXY ERROR: Actor# [2:7486786112352190481:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:59.531712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:13:59.531746Z node 2 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 14908, MsgBus: 27238 2025-03-28T09:19:00.558700Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486787448717655484:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:00.559044Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00151e/r3tmp/tmpfbOBSD/pdisk_1.dat 2025-03-28T09:19:00.873266Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:00.892975Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:00.893111Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:00.899916Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14908, node 3 2025-03-28T09:19:01.075238Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:19:01.075259Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:19:01.075269Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:19:01.075422Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27238 TClient is connected to server localhost:27238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:19:01.641030Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:01.649090Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:19:04.191468Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486787465897525338:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:04.191572Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486787465897525329:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:04.191698Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:04.196801Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:19:04.209951Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486787465897525344:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:19:04.268770Z node 3 :TX_PROXY ERROR: Actor# [3:7486787465897525395:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:19:04.328909Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinCustomColumnOrder-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12077, MsgBus: 4309 2025-03-28T09:18:51.976261Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787410197616469:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:18:51.976400Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016ff/r3tmp/tmpQo3Oo0/pdisk_1.dat 2025-03-28T09:18:52.537453Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:18:52.541360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:18:52.541461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:18:52.544245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12077, node 1 2025-03-28T09:18:52.827120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:18:52.827141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:18:52.827148Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:18:52.827273Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4309 TClient is connected to server localhost:4309 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:18:53.633493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:53.655895Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:18:53.674469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:53.981856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:54.263735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:18:54.362675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:18:56.448079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787431672454713:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:18:56.448231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:18:56.747213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:18:56.818614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:18:56.906995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:18:56.950385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:18:56.976354Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787410197616469:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:18:56.976452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:18:57.019714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:18:57.077473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:18:57.178973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787435967422538:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:18:57.179051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:18:57.179428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787435967422543:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:18:57.183550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:18:57.197616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787435967422545:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:18:57.293939Z node 1 :TX_PROXY ERROR: Actor# [1:7486787435967422601:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:18:58.495187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:18:58.524536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:18:58.553326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:18:58.589426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:18:58.618898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:18:58.646577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3015, MsgBus: 2855 2025-03-28T09:19:00.388122Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486787449666439384:2088];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:00.388191Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016ff/r3tmp/tmp9zvHkj/pdisk_1.dat 2025-03-28T09:19:00.536196Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:00.558142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:00.558204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:00.560032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3015, node 2 2025-03-28T09:19:00.655330Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:19:00.655352Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:19:00.655362Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:19:00.655481Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2855 TClient is connected to server localhost:2855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:19:01.137341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:01.148143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:01.213341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:01.348873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:01.488676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:03.505904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787462551343003:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:03.505985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:03.551017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:19:03.579843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:19:03.609448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:19:03.641321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:19:03.673003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:19:03.709922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:19:03.757836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787462551343511:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:03.757910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:03.757926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787462551343516:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:03.761541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:19:03.771171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486787462551343518:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:19:03.870805Z node 2 :TX_PROXY ERROR: Actor# [2:7486787462551343572:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:19:04.912620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:19:04.953166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:19:05.026261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:19:05.063466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:19:05.090496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:19:05.117605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:19:05.388004Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486787449666439384:2088];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:05.388077Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_cp_ic.py::TestCpIc::test_discovery [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> KqpQueryService::TableSink_ReplaceDuplicatesOlap [GOOD] >> KqpQueryService::TableSink_Oltp_Replace-UseSink |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] |85.7%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |85.7%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> TLocksTest::SetLockNothing [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2025-03-28T09:18:50.789710Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787405252287434:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:18:50.794983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f15/r3tmp/tmpa7ZPp7/pdisk_1.dat 2025-03-28T09:18:51.288673Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:18:51.329935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:18:51.330077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:18:51.335985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2295 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:18:51.741663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:51.783244Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:51.800822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:51.984822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:52.044628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:55.062416Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486787427811912260:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:18:55.062469Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f15/r3tmp/tmpLsiC4u/pdisk_1.dat 2025-03-28T09:18:55.279613Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:18:55.302884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:18:55.302995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:18:55.304811Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29362 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:18:55.622361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:55.628794Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:55.641608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:55.724581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:55.808228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:58.784354Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486787439379398123:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:18:58.784418Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f15/r3tmp/tmpee7UQs/pdisk_1.dat 2025-03-28T09:18:58.948308Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:18:58.974312Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:18:58.974386Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:18:58.977259Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7943 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:18:59.145566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:59.152023Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:18:59.166893Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:18:59.230194Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:18:59.275019Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:02.643244Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486787457901532965:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:02.643333Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f15/r3tmp/tmp7tcLHU/pdisk_1.dat 2025-03-28T09:19:02.783892Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:02.807906Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:02.807973Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:02.809236Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25980 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:19:02.976488Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:19:02.998571Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:03.058144Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:03.107937Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:06.246276Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486787472509109475:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:06.246330Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f15/r3tmp/tmpMj1DBo/pdisk_1.dat 2025-03-28T09:19:06.345585Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:06.380873Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:06.380957Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:06.382784Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7172 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:19:06.574180Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:19:06.595219Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:06.665430Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:06.711677Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:10.139636Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486787489968014425:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:10.139894Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f15/r3tmp/tmptYtpkY/pdisk_1.dat 2025-03-28T09:19:10.249339Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:10.273654Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:10.273738Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:10.275320Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10895 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:19:10.477650Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:10.487908Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:10.504579Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:19:10.509606Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:10.582886Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:10.657565Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:14.104863Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486787506962700672:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:14.104913Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f15/r3tmp/tmpuhCfP7/pdisk_1.dat 2025-03-28T09:19:14.265714Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:14.282627Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:14.282936Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:14.284431Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20307 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:19:14.562778Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:19:14.571041Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:14.594322Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:19:14.601376Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:14.730589Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:14.794764Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH1 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] >> TFlatTest::WriteMergeAndRead >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] >> HttpRequest::Probe [GOOD] >> ColumnStatistics::CountMinSketchStatistics >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: 2025-03-28T09:12:40.337193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:12:40.337822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:12:40.337958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001004/r3tmp/tmpfM89p3/pdisk_1.dat 2025-03-28T09:12:41.915102Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11626, node 1 2025-03-28T09:12:45.259767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:12:45.259830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:12:45.259867Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:12:45.260489Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:12:45.305487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:12:45.424063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:45.431011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:45.463441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1839 2025-03-28T09:12:46.444079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:12:51.168467Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:12:51.308981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:51.309106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:51.399238Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:12:51.404072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:51.820780Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:12:51.852785Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:12:51.853507Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:12:51.853666Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:12:51.853925Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:12:51.854042Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:12:51.854155Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:12:51.854241Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:12:51.854380Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:12:52.096381Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:12:52.096532Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:12:52.120533Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:12:52.355881Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:12:52.433130Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:12:52.433253Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:12:52.484937Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:12:52.496743Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:12:52.497089Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:12:52.497169Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:12:52.497230Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:12:52.497293Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:12:52.497374Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:12:52.497446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:12:52.498290Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:12:52.558039Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:12:52.559028Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:12:52.587326Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:12:52.587396Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:12:52.587475Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:12:52.606297Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:12:52.606467Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:12:52.677125Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:12:52.677764Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:12:52.794149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:12:52.819534Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:12:52.819732Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:12:53.284113Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:12:53.771240Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:12:53.823489Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:12:56.008533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:56.008726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:12:56.331902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:12:56.907721Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:56.915993Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:12:56.916413Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:12:56.916579Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:12:56.916739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:12:56.916887Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:12:56.917023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:12:56.917174Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:12:56.917325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:12:56.917462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:12:56.917600Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:12:56.917722Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2384:2884];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:12:57.000806Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2393:2888];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:12:57.000938Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2393:2888];tablet_id=72075186224037900;process=T ... 025-03-28T09:19:24.145239Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:19:24.145265Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:19:24.936296Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:19:24.936541Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:19:25.589537Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:19:25.589621Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId= }l}ⴴ_( 2025-03-28T09:19:25.589679Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:19:27.011429Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:19:27.011578Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:19:27.011638Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:19:27.012313Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:19:27.026404Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:19:27.026908Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:19:27.026992Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:19:27.027544Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:19:27.043925Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:19:27.044190Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:19:27.045184Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15685:9901], server id = [2:15690:9906], tablet id = 72075186224037899, status = OK 2025-03-28T09:19:27.045337Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15685:9901], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:19:27.047730Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15686:9902], server id = [2:15691:9907], tablet id = 72075186224037900, status = OK 2025-03-28T09:19:27.047834Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15686:9902], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:19:27.049041Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15687:9903], server id = [2:15693:9909], tablet id = 72075186224037901, status = OK 2025-03-28T09:19:27.049136Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15687:9903], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:19:27.050149Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15688:9904], server id = [2:15692:9908], tablet id = 72075186224037902, status = OK 2025-03-28T09:19:27.050222Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15688:9904], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:19:27.050514Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15689:9905], server id = [2:15694:9910], tablet id = 72075186224037903, status = OK 2025-03-28T09:19:27.050573Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15689:9905], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:19:27.053533Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:19:27.054360Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:19:27.054947Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15685:9901], server id = [2:15690:9906], tablet id = 72075186224037899 2025-03-28T09:19:27.055006Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:19:27.056554Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:19:27.057027Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15686:9902], server id = [2:15691:9907], tablet id = 72075186224037900 2025-03-28T09:19:27.057059Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:19:27.057159Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:19:27.057517Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:19:27.057884Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15687:9903], server id = [2:15693:9909], tablet id = 72075186224037901 2025-03-28T09:19:27.057917Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:19:27.058240Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15700:9916], server id = [2:15702:9918], tablet id = 72075186224037904, status = OK 2025-03-28T09:19:27.058332Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15700:9916], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:19:27.058469Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15701:9917], server id = [2:15703:9919], tablet id = 72075186224037905, status = OK 2025-03-28T09:19:27.058524Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15701:9917], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:19:27.059656Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15688:9904], server id = [2:15692:9908], tablet id = 72075186224037902 2025-03-28T09:19:27.059696Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:19:27.060456Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15689:9905], server id = [2:15694:9910], tablet id = 72075186224037903 2025-03-28T09:19:27.060491Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:19:27.060713Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15704:9920], server id = [2:15706:9922], tablet id = 72075186224037906, status = OK 2025-03-28T09:19:27.060799Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15704:9920], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:19:27.061292Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15705:9921], server id = [2:15709:9925], tablet id = 72075186224037907, status = OK 2025-03-28T09:19:27.061356Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15705:9921], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:19:27.062584Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:15707:9923], server id = [2:15708:9924], tablet id = 72075186224037908, status = OK 2025-03-28T09:19:27.062653Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:15707:9923], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:19:27.062984Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:19:27.063507Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:19:27.065552Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15700:9916], server id = [2:15702:9918], tablet id = 72075186224037904 2025-03-28T09:19:27.065596Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:19:27.065993Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15701:9917], server id = [2:15703:9919], tablet id = 72075186224037905 2025-03-28T09:19:27.066029Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:19:27.066192Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:19:27.066598Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:19:27.066852Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15704:9920], server id = [2:15706:9922], tablet id = 72075186224037906 2025-03-28T09:19:27.066884Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:19:27.066926Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:19:27.066974Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:19:27.067226Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:19:27.067453Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:19:27.067741Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15705:9921], server id = [2:15709:9925], tablet id = 72075186224037907 2025-03-28T09:19:27.067773Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:19:27.067990Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:19:27.071659Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:15707:9923], server id = [2:15708:9924], tablet id = 72075186224037908 2025-03-28T09:19:27.071705Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:19:27.072716Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:19:27.107255Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTczZmY3NGQtYzE0Zjc3YWUtOWQxZGIyNzAtYTI4NzNkYjI=, TxId: 2025-03-28T09:19:27.107351Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTczZmY3NGQtYzE0Zjc3YWUtOWQxZGIyNzAtYTI4NzNkYjI=, TxId: 2025-03-28T09:19:27.108206Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:19:27.128316Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:19:27.128394Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId= }l}ⴴ_(, ActorId=[1:4095:3318] 2025-03-28T09:19:27.132322Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:15746:9193]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:19:27.132585Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:19:27.132643Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:19:27.135006Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:19:27.135074Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:19:27.135908Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 2 ] 2025-03-28T09:19:27.167486Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table1[Value]=4' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpQueryService::TableSink_Oltp_Replace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9131, MsgBus: 10475 2025-03-28T09:13:23.301593Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486785999493610874:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:23.301822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00152e/r3tmp/tmpTlpNVh/pdisk_1.dat 2025-03-28T09:13:24.215876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:24.215994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:24.223862Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:24.284814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9131, node 1 2025-03-28T09:13:24.689187Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:24.689223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:24.689230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:24.689347Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10475 TClient is connected to server localhost:10475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:26.370352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:26.439373Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:28.274805Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486785999493610874:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:28.274889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; CREATE TABLE `/Root/ColumnShard1` (Col1 Int64 NOT NULL, Col2 Int32 NOT NULL, PRIMARY KEY (Col1)) PARTITION BY HASH(Col1) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1000); 2025-03-28T09:13:28.731800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786020968447898:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:28.731929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:30.743878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:13:37.126747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:37.126986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:37.127102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:37.127156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:37.127287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:37.127347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:37.127425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:13:37.127455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:13:37.127623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:13:37.127627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:13:37.127817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:13:37.127938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:13:37.127947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:13:37.128057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:13:37.128072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:13:37.128169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:13:37.128176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:13:37.128285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:13:37.128297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:13:37.128433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:13:37.128581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:13:37.128690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:13:37.128797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486786055328190289:2367];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:13:37.129270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038886;self_id=[1:7486786055328190299:2372];tablet_id=72075186224038886;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:13:37.171705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7486786055328190500:2412];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:13:37.171852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7486786055328190500:2412];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:13:37.172119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7486786055328190500:2412];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:13:37.172268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038881;self_id=[1:7486786055328190500:2412];tablet_id=72075186224038881;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesClean ... V1Chunks_V2; 2025-03-28T09:19:10.825099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:19:10.825141Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:19:10.825313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:19:10.825350Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:19:10.853839Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:19:10.854095Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:19:10.863065Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037893;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:19:10.863270Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:19:10.872523Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037892;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:19:10.872558Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:19:10.881806Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:19:10.881960Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:19:10.890526Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:19:10.890525Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715658;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715658; 2025-03-28T09:19:10.891512Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486787471519993713:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:10.891874Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:19:10.931040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787492994831345:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:10.931137Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:10.931249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787492994831350:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:10.937100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:19:10.961484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486787492994831352:2406], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:19:11.027745Z node 2 :TX_PROXY ERROR: Actor# [2:7486787497289798699:2589] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:19:11.126172Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715662;tx_id=281474976715662;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715662; 2025-03-28T09:19:11.171958Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037894;tx_state=TTxProgressTx::Execute;tx_current=281474976715664;tx_id=281474976715664;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715664; Trying to start YDB, gRPC: 15564, MsgBus: 28162 2025-03-28T09:19:12.970062Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486787499653106934:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:12.970130Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00152e/r3tmp/tmp799yzm/pdisk_1.dat 2025-03-28T09:19:13.168362Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:13.186148Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:13.186252Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:13.187814Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15564, node 3 2025-03-28T09:19:13.240010Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:19:13.240039Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:19:13.240050Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:19:13.240247Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28162 TClient is connected to server localhost:28162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:19:13.977124Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:19:13.985950Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:19:17.912852Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486787521127944068:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:17.912990Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:17.970921Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486787499653106934:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:17.971011Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:19:17.972043Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:19:18.266552Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:19:18.705893Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486787525422912752:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:18.706081Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:18.706421Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486787525422912757:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:18.712919Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:19:18.744463Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486787525422912759:2447], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:19:18.826054Z node 3 :TX_PROXY ERROR: Actor# [3:7486787525422912830:3260] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftSemiJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 24623, MsgBus: 25164 2025-03-28T09:19:23.315805Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787546530739502:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:23.315862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016fc/r3tmp/tmpRNVf6x/pdisk_1.dat 2025-03-28T09:19:23.877770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:23.886066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:23.886212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:23.888529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24623, node 1 2025-03-28T09:19:24.098171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:19:24.098192Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:19:24.098198Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:19:24.098328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25164 TClient is connected to server localhost:25164 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:19:25.093623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:25.116098Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:19:25.153423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:25.420206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:25.652492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:25.794511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:28.007976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787568005577741:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:28.008082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:28.320481Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787546530739502:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:28.320553Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:19:28.371919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:19:28.451114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:19:28.505767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:19:28.579981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:19:28.632677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:19:28.689452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:19:28.759401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787568005578257:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:28.759553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:28.760114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787568005578262:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:28.765079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:19:28.785753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787568005578264:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:19:28.856145Z node 1 :TX_PROXY ERROR: Actor# [1:7486787568005578319:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:19:30.283195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:19:30.359634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:19:30.402001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:19:30.441824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:19:30.478217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:19:30.551946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] >> TFlatTest::WriteSplitAndRead [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |85.8%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |85.8%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2025-03-28T09:19:27.688349Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787565750522201:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:27.688468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f13/r3tmp/tmpcu2b4c/pdisk_1.dat 2025-03-28T09:19:28.389134Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:28.431062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:28.431150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:28.437914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4048 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:19:28.826188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:19:28.888711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:29.193625Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.026s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:19:29.195818Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.012s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:19:29.264586Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-28T09:19:29.280190Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-03-28T09:19:29.330570Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:19:29.334651Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2025-03-28T09:19:29.337419Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:19:29.337496Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:19:29.338008Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:19:29.341134Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-28T09:19:29.346294Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:19:29.348429Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:19:29.348478Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:19:29.351449Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:19:29.358759Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:19:29.358867Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:19:29.362901Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=0, 4 blobs 9r (max 9), put Spent{time=0.009s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4073 2983 5183)b }, ecr=1.000 2025-03-28T09:19:29.367423Z node 1 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:19:29.367448Z node 1 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-03-28T09:19:29.367592Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:19:29.374841Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:19:29.374924Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153569045 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-03-28T09:19:29.387020Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:19:29.459104Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.459399Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:19:29.461077Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.461817Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:19:29.462448Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-28T09:19:29.463381Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.463524Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:19:29.463954Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-28T09:19:29.464617Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.470845Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:19:29.471421Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-28T09:19:29.472277Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.472416Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:19:29.472870Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-28T09:19:29.473581Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.474320Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:19:29.474767Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-28T09:19:29.475522Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.475640Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:19:29.476068Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-28T09:19:29.476708Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.482914Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:19:29.483562Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-28T09:19:29.484447Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.484581Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:19:29.485101Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-28T09:19:29.485801Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.486452Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:19:29.486897Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-28T09:19:29.487566Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.487670Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:19:29.488053Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-28T09:19:29.488736Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.493570Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:19:29.494089Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-28T09:19:29.494985Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.495124Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:19:29.495593Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-28T09:19:29.496292Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.502877Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:19:29.503458Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037888 restored its data 2025-03-28T09:19:29.504392Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.504541Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:19:29.504985Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 at 72075186224037889 restored its data 2025-03-28T09:19:29.505724Z node 1 :TX_DATASHARD DEBUG: tx 281474976710680 released its data 2025-03-28T09:19:29.506374Z node 1 :TX_DATASHARD ... 24037892, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:19:34.382829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:19:34.382899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:19:34.382934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:19:34.383153Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-03-28T09:19:34.383164Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-03-28T09:19:34.383177Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline TClient::Ls request: /dc-1/Dir/TableOld 2025-03-28T09:19:34.383814Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:19:34.383975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486787594768781296 RawX2: 4503608217307458 } TabletId: 72075186224037890 State: 4 2025-03-28T09:19:34.384005Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:19:34.384135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486787594768781309 RawX2: 4503608217307460 } TabletId: 72075186224037891 State: 4 2025-03-28T09:19:34.384152Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:19:34.384265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486787594768781309 RawX2: 4503608217307460 } TabletId: 72075186224037891 State: 4 2025-03-28T09:19:34.384284Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:19:34.384694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:19:34.384799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:19:34.384837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:19:34.385623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-28T09:19:34.385854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-28T09:19:34.386034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-28T09:19:34.386143Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-28T09:19:34.386290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-28T09:19:34.386962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:19:34.386977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-28T09:19:34.387005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:19:34.387020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-28T09:19:34.387028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-03-28T09:19:34.387076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:19:34.387208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:19:34.387322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:19:34.387428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:19:34.387503Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:19:34.389387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:19:34.389404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:19:34.389441Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:19:34.389455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:19:34.389478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:19:34.390268Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-28T09:19:34.390307Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-28T09:19:34.390335Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7486787594768781436:2622], serverId# [2:7486787594768781439:2625], sessionId# [0:0:0] 2025-03-28T09:19:34.390351Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-28T09:19:34.390362Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-28T09:19:34.390372Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-28T09:19:34.390387Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7486787594768781437:2623], serverId# [2:7486787594768781440:2626], sessionId# [0:0:0] 2025-03-28T09:19:34.390399Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-03-28T09:19:34.390414Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:7486787594768781438:2624], serverId# [2:7486787594768781441:2627], sessionId# [0:0:0] 2025-03-28T09:19:34.390426Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-03-28T09:19:34.390442Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7486787590473813761:2385], serverId# [2:7486787590473813762:2386], sessionId# [0:0:0] 2025-03-28T09:19:34.391563Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-28T09:19:34.391590Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-03-28T09:19:34.391603Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-28T09:19:34.391923Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-28T09:19:34.392403Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-28T09:19:34.392483Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-28T09:19:34.394086Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-28T09:19:34.394152Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-28T09:19:34.395531Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-28T09:19:34.395576Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-28T09:19:34.396841Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-28T09:19:34.396878Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-28T09:19:34.398604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486787590473813641 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-03-28T09:19:34.398671Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:19:34.399820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-28T09:19:34.401677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:19:34.401908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:19:34.402053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:19:34.402065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:19:34.402108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:19:34.402645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:19:34.402702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:19:34.402754Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:19:34.403325Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-28T09:19:34.403358Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-28T09:19:34.403386Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7486787590473813771:2392], serverId# [2:7486787590473813772:2393], sessionId# [0:0:0] 2025-03-28T09:19:34.404280Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-28T09:19:34.411829Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-28T09:19:34.411938Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] Test command err: 2025-03-28T09:14:39.911320Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:39.935922Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:39.953339Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:39.962137Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:45:2072] ControllerId# 72057594037932033 2025-03-28T09:14:39.962194Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:39.970841Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:39.971244Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:39.974512Z node 2 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:39.974586Z node 2 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:39.994256Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:51:2076] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.994406Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:52:2077] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.994489Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:53:2078] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.994616Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:54:2079] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.994813Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:55:2080] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.994927Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:56:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995017Z node 2 :BS_PROXY DEBUG: Group# 0 Actor# [2:44:2071] Create Queue# [2:57:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:39.995044Z node 2 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.004701Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [2:45:2072] 2025-03-28T09:14:40.004765Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [2:45:2072] 2025-03-28T09:14:40.004831Z node 2 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.004910Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.017051Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:40.024062Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.024231Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-28T09:14:40.024782Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:14:40.025790Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-28T09:14:40.025844Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.026664Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:68:2076] ControllerId# 72057594037932033 2025-03-28T09:14:40.026719Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.026781Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.026962Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.043116Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.043180Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.044666Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:76:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.044857Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:77:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.044973Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:78:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.045107Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:79:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.045282Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:80:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.045415Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:81:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.045564Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:67:2075] Create Queue# [1:82:2087] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.045599Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.045675Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:68:2076] 2025-03-28T09:14:40.045699Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:68:2076] 2025-03-28T09:14:40.045733Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.045764Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.046643Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.079828Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:68:2076] 2025-03-28T09:14:40.079897Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.079929Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:14:40.103976Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.104107Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [2:45:2072] 2025-03-28T09:14:40.104139Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.104159Z node 2 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:14:40.104192Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.104315Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.104437Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.104464Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-28T09:14:40.107565Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-28T09:14:40.111530Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-28T09:14:40.119173Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.119221Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-28T09:14:40.119317Z node 2 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-28T09:14:40.119490Z node 2 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-28T09:14:40.119727Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:68:2076] 2025-03-28T09:14:40.119788Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.120277Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.120500Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.126889Z node 2 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.127455Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.135222Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [2:49:2064] 2025-03-28T09:14:40.135280Z node 2 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [2:49:2064] 2025-03-28T09:14:40.135328Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-28T09:14:40.135398Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-03-28T09:14:40.135464Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-03-28T09:14:40.135495Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-03-28T09:14:40.135554Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:14:40.135644Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:72:2064] 2025-03-28T09:14:40.135665Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:72:2064] 2025-03-28T09:14:40.135731Z node 2 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-03-2 ... 09:19:31.775003Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:19:31.775037Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-03-28T09:19:31.775096Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:409:2367] 2025-03-28T09:19:31.775135Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [14:409:2367] 2025-03-28T09:19:31.786305Z node 14 :BS_PROXY_PUT INFO: [848c0e06882585e9] bootstrap ActorId# [14:413:2369] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:9:0:0:199:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:19:31.786504Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] Id# [72057594037927937:2:9:0:0:199:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:19:31.786585Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] restore Id# [72057594037927937:2:9:0:0:199:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:19:31.786708Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG33 2025-03-28T09:19:31.786774Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:9:0:0:199:1] Marker# BPG32 2025-03-28T09:19:31.786987Z node 14 :BS_PROXY DEBUG: Send to queueActorId# [14:36:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:9:0:0:199:1] FDS# 199 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:19:31.788446Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] received {EvVPutResult Status# OK ID# [72057594037927937:2:9:0:0:199:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 24 } Cost# 81566 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 25 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-28T09:19:31.788605Z node 14 :BS_PROXY_PUT DEBUG: [848c0e06882585e9] Result# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-28T09:19:31.788696Z node 14 :BS_PROXY_PUT INFO: [848c0e06882585e9] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:19:31.788887Z node 14 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.944 sample PartId# [72057594037927937:2:9:0:0:199:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 14 } TEvVPutResult{ TimestampMs# 2.418 VDiskId# [0:1:0:0:0] NodeId# 14 Status# OK } ] } 2025-03-28T09:19:31.789114Z node 14 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:9:0:0:199:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-28T09:19:31.789281Z node 14 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:10} commited cookie 1 for step 9 2025-03-28T09:19:31.799812Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] client retry [14:143:2159] 2025-03-28T09:19:31.799914Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] lookup [14:143:2159] 2025-03-28T09:19:31.800060Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936131 entry.State: StInit ev: {EvForward TabletID: 72057594037936131 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:19:31.800214Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:19:31.800333Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2025-03-28T09:19:31.800387Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2025-03-28T09:19:31.800421Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2025-03-28T09:19:31.800464Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-03-28T09:19:31.800519Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-03-28T09:19:31.800554Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131} 2025-03-28T09:19:31.800640Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936131 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936131 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:19:31.800681Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936131 followers: 0 2025-03-28T09:19:31.800763Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] forward result error, check reconnect [14:143:2159] 2025-03-28T09:19:31.800802Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037936131] schedule retry [14:143:2159] 2025-03-28T09:19:31.821704Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [14:409:2367] 2025-03-28T09:19:31.821785Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [14:409:2367] 2025-03-28T09:19:31.821910Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:19:31.822047Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:19:31.822199Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-28T09:19:31.822249Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-28T09:19:31.822284Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-28T09:19:31.822326Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:19:31.822375Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:19:31.822410Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:19:31.822490Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:19:31.822533Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-03-28T09:19:31.822603Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:409:2367] 2025-03-28T09:19:31.822641Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] schedule retry [14:409:2367] 2025-03-28T09:19:31.835269Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] client retry [14:409:2367] 2025-03-28T09:19:31.835359Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [14:409:2367] 2025-03-28T09:19:31.835462Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:19:31.835614Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:19:31.835727Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-28T09:19:31.835776Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-28T09:19:31.835813Z node 14 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-28T09:19:31.835856Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:19:31.835901Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:19:31.835969Z node 14 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:19:31.836070Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:19:31.836111Z node 14 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-03-28T09:19:31.836185Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [14:409:2367] 2025-03-28T09:19:31.836258Z node 14 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed, check aliveness [14:409:2367] 2025-03-28T09:19:31.903061Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [14:417:2370] 2025-03-28T09:19:31.903122Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [14:417:2370] 2025-03-28T09:19:31.903210Z node 14 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:19:31.903263Z node 14 :TABLET_RESOLVER DEBUG: SelectForward node 14 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [14:271:2262] 2025-03-28T09:19:31.903335Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [14:417:2370] 2025-03-28T09:19:31.903385Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [14:417:2370] 2025-03-28T09:19:31.903443Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [14:417:2370] 2025-03-28T09:19:31.903569Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [14:417:2370] 2025-03-28T09:19:31.903691Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [14:417:2370] 2025-03-28T09:19:31.903746Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [14:417:2370] 2025-03-28T09:19:31.903789Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [14:417:2370] 2025-03-28T09:19:31.903861Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [14:409:2367] EventType# 268697616 2025-03-28T09:19:31.903978Z node 14 :HIVE WARN: HIVE#72057594037927937 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-03-28T09:19:31.904076Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] received poison pill [14:417:2370] 2025-03-28T09:19:31.904130Z node 14 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [14:417:2370] 2025-03-28T09:19:31.904182Z node 14 :PIPE_SERVER DEBUG: [72057594037927937] Got PeerClosed from# [14:417:2370] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10825, MsgBus: 16361 2025-03-28T09:19:37.065450Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787605741224186:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:37.065484Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016f8/r3tmp/tmp2EboGH/pdisk_1.dat 2025-03-28T09:19:37.654118Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:37.661616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:37.666817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:37.669740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10825, node 1 2025-03-28T09:19:37.903155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:19:37.903184Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:19:37.903191Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:19:37.903296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16361 TClient is connected to server localhost:16361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:19:38.755184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:38.777355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:38.929772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:39.125624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:39.235203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:41.604513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787622921094919:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:41.604614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:41.906058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:19:41.981090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:19:42.021019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:19:42.067236Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787605741224186:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:42.067300Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:19:42.099960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:19:42.131594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:19:42.177575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:19:42.243980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787627216062733:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:42.244055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:42.244404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787627216062738:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:42.248540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:19:42.265855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787627216062740:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:19:42.328627Z node 1 :TX_PROXY ERROR: Actor# [1:7486787627216062794:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test_dispatch.py::TestMapping::test_mapping [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> LabeledDbCounters::OneTabletRestart [GOOD] >> LabeledDbCounters::TwoTablets >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> KqpJoin::JoinDupColumnRightPure >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] >> test_dispatch.py::TestMapping::test_idle >> KqpJoin::JoinDupColumnRightPure [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinDupColumnRightPure [GOOD] Test command err: Trying to start YDB, gRPC: 7347, MsgBus: 28166 2025-03-28T09:19:59.747531Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787700920784558:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:59.755792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016f6/r3tmp/tmpB7MRq6/pdisk_1.dat 2025-03-28T09:20:00.327903Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:00.352994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:00.353088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:00.355660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7347, node 1 2025-03-28T09:20:00.494979Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:00.495000Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:00.495010Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:00.495157Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28166 TClient is connected to server localhost:28166 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:01.505744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:01.537502Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:01.553280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:01.760269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:02.115192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:02.239603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:04.503859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787722395622777:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:04.503989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:04.750863Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787700920784558:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:04.750953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:04.887245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:04.951108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:04.989092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:05.041853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:05.080699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:05.175384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:05.244820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787726690590591:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:05.244910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:05.245930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787726690590597:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:05.249523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:05.263838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787726690590599:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:05.322525Z node 1 :TX_PROXY ERROR: Actor# [1:7486787726690590651:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:06.577596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:06.611903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:06.654785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> TColumnShardTestSchema::InternalTTL_Types >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] |85.9%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |85.9%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] >> BasicStatistics::NotFullStatisticsColumnshard >> TFlatTest::Init >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> KqpJoin::JoinAggregateSingleRow >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15348, MsgBus: 10266 2025-03-28T09:20:01.875189Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787710585871020:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:01.894345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016f5/r3tmp/tmpA0jIWk/pdisk_1.dat 2025-03-28T09:20:02.746945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:02.748538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:02.748630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:02.761877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15348, node 1 2025-03-28T09:20:03.080677Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:03.080698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:03.080708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:03.080824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10266 TClient is connected to server localhost:10266 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:04.238764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:04.282764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:04.508865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:04.768664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:04.894663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:06.880884Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787710585871020:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:07.013695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:07.268478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787736355676567:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:07.268614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:07.702518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:07.750363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:07.798436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:07.848157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:07.899673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:07.938701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:08.032789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787740650644378:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:08.032895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:08.033263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787740650644383:2463], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:08.037501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:08.055477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787740650644385:2464], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:08.127996Z node 1 :TX_PROXY ERROR: Actor# [1:7486787740650644440:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:09.720382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:09.767259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:09.816574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:20:09.859079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:09.907224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:09.981607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> BsControllerConfig::Basic >> BsControllerConfig::OverlayMapCrossReferences >> BsControllerConfig::ManyPDisksRestarts >> BsControllerConfig::AddDriveSerial >> BsControllerConfig::PDiskCreate >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::DeleteStoragePool >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] >> BsControllerConfig::PDiskCreate [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateExternalTablet >> KqpJoin::JoinAggregateSingleRow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::PDiskCreate [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:194:2076] 2025-03-28T09:20:18.503408Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:18.515125Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:18.515546Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:20:18.517304Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:18.517717Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:20:18.518900Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:18.518935Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:18.519151Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:20:18.530273Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:20:18.530403Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:20:18.530559Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:20:18.530661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:18.530776Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:18.530841Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:242:2066] recipient: [1:20:2067] 2025-03-28T09:20:18.544503Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:20:18.544661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:18.555523Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:18.555661Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:18.555748Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:18.555835Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:18.555947Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:18.556024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:18.556059Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:18.556097Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:18.567005Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:18.567135Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:18.577867Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:18.578023Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:20:18.579235Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:20:18.579289Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:20:18.579513Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:20:18.579560Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:20:18.603318Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2025-03-28T09:20:18.603913Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2025-03-28T09:20:18.603995Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2025-03-28T09:20:18.604023Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2025-03-28T09:20:18.604047Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2025-03-28T09:20:18.604069Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2025-03-28T09:20:18.604108Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2025-03-28T09:20:18.604133Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2025-03-28T09:20:18.604155Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2025-03-28T09:20:18.604188Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2025-03-28T09:20:18.604215Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2025-03-28T09:20:18.604237Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2025-03-28T09:20:18.604263Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2025-03-28T09:20:18.604288Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2025-03-28T09:20:18.604323Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2025-03-28T09:20:18.604349Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2025-03-28T09:20:18.604369Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2025-03-28T09:20:18.604391Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2025-03-28T09:20:18.604413Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2025-03-28T09:20:18.604435Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2025-03-28T09:20:18.604456Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2025-03-28T09:20:18.604476Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2025-03-28T09:20:18.604521Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2025-03-28T09:20:18.604561Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2025-03-28T09:20:18.604588Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2025-03-28T09:20:18.604623Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2025-03-28T09:20:18.604653Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2025-03-28T09:20:18.604701Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2025-03-28T09:20:18.604730Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2025-03-28T09:20:18.604753Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2025-03-28T09:20:18.604779Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:188:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:188:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:188:2076] 2025-03-28T09:20:20.279284Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:20.280170Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:20.280414Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:20:20.281678Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:20.282065Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:20:20.282625Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:20.282655Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:20.285013Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:20:20.303890Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:20:20.304027Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:20:20.304158Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:20:20.304263Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:20.304365Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:20.304435Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:242:2066] recipient: [11:20:2067] 2025-03-28T09:20:20.319344Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:20:20.319522Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:20.331170Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:20.331310Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:20.331386Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:20.331455Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:20.331584Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:20.331691Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:20.331749Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:20.331818Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:20.343223Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:20.343348Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:20.355239Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:20.355365Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:20:20.356580Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:20:20.356641Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:20:20.356845Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:20:20.356884Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:20:20.357637Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } } } Command { QueryBaseConfig { } } } 2025-03-28T09:20:20.358134Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2025-03-28T09:20:20.358180Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2025-03-28T09:20:20.358214Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1002 Path# /dev/disk3 2025-03-28T09:20:20.358245Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1000 Path# /dev/disk1 2025-03-28T09:20:20.358269Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1001 Path# /dev/disk2 2025-03-28T09:20:20.358292Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1002 Path# /dev/disk3 2025-03-28T09:20:20.358313Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1000 Path# /dev/disk1 2025-03-28T09:20:20.358336Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1001 Path# /dev/disk2 2025-03-28T09:20:20.358359Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1002 Path# /dev/disk3 2025-03-28T09:20:20.358380Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1000 Path# /dev/disk1 2025-03-28T09:20:20.358409Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1001 Path# /dev/disk2 2025-03-28T09:20:20.358434Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1002 Path# /dev/disk3 2025-03-28T09:20:20.358456Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1000 Path# /dev/disk1 2025-03-28T09:20:20.358532Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1001 Path# /dev/disk2 2025-03-28T09:20:20.358560Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1002 Path# /dev/disk3 2025-03-28T09:20:20.358593Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1000 Path# /dev/disk1 2025-03-28T09:20:20.358633Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1001 Path# /dev/disk2 2025-03-28T09:20:20.358657Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1002 Path# /dev/disk3 2025-03-28T09:20:20.362846Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1000 Path# /dev/disk1 2025-03-28T09:20:20.363020Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1001 Path# /dev/disk2 2025-03-28T09:20:20.363055Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1002 Path# /dev/disk3 2025-03-28T09:20:20.363078Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1000 Path# /dev/disk1 2025-03-28T09:20:20.363117Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1001 Path# /dev/disk2 2025-03-28T09:20:20.363158Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1002 Path# /dev/disk3 2025-03-28T09:20:20.363188Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1000 Path# /dev/disk1 2025-03-28T09:20:20.363214Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1001 Path# /dev/disk2 2025-03-28T09:20:20.363237Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1002 Path# /dev/disk3 2025-03-28T09:20:20.363259Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1000 Path# /dev/disk1 2025-03-28T09:20:20.363282Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1001 Path# /dev/disk2 2025-03-28T09:20:20.363304Z node 11 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1002 Path# /dev/disk3 >> BsControllerConfig::ReassignGroupDisk >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> THiveTest::TestCreateExternalTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143154136.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154136.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154136.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154136.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154136.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154136.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123152936.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154136.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123154136.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123152936.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123152936.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123152936.000000s;Name=;Codec=}; 2025-03-28T09:18:57.207752Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:18:57.330654Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:18:57.354768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:18:57.357344Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:18:57.367008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:18:57.367232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:18:57.367456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:18:57.367566Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:18:57.367679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:18:57.367780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:18:57.367881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:18:57.368008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:18:57.368131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:18:57.368253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:18:57.368371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:18:57.368476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:18:57.400845Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:18:57.401227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:18:57.401295Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:18:57.401476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:18:57.401616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:18:57.401694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:18:57.401739Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:18:57.401830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:18:57.401892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:18:57.401929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:18:57.401949Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:18:57.402057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:18:57.402102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:18:57.402126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:18:57.402144Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:18:57.402200Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:18:57.402232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:18:57.402255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:18:57.402272Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:18:57.402315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:18:57.402338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:18:57.402357Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:18:57.402383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:18:57.402407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:18:57.402432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:18:57.402850Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-03-28T09:18:57.402932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-03-28T09:18:57.403005Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-03-28T09:18:57.403097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-03-28T09:18:57.403272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:18:57.403352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:18:57.403395Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:18:57.403610Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:18:57.403657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:18:57.403687Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... d=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:20:21.519783Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:20:21.519844Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:20:21.519913Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:20:21.519974Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:20:21.520106Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:20:21.520469Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-28T09:20:21.520627Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:20:21.520848Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:20:21.520923Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:20:21.521455Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:20:21.521563Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:20:21.522128Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1984:3989];trace_detailed=; 2025-03-28T09:20:21.522606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:20:21.522906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:20:21.523136Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:20:21.523294Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:20:21.523714Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:20:21.523861Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:20:21.524015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:20:21.524073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1984:3989] finished for tablet 9437184 2025-03-28T09:20:21.524621Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1983:3988];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743153621522049,"name":"_full_task","f":1743153621522049,"d_finished":0,"c":0,"l":1743153621524149,"d":2100},"events":[{"name":"bootstrap","f":1743153621522279,"d_finished":1049,"c":1,"l":1743153621523328,"d":1049},{"a":1743153621523682,"name":"ack","f":1743153621523682,"d_finished":0,"c":0,"l":1743153621524149,"d":467},{"a":1743153621523657,"name":"processing","f":1743153621523657,"d_finished":0,"c":0,"l":1743153621524149,"d":492},{"name":"ProduceResults","f":1743153621523027,"d_finished":583,"c":2,"l":1743153621524048,"d":583},{"a":1743153621524052,"name":"Finish","f":1743153621524052,"d_finished":0,"c":0,"l":1743153621524149,"d":97}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:20:21.524731Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1983:3988];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:20:21.525195Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1983:3988];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743153621522049,"name":"_full_task","f":1743153621522049,"d_finished":0,"c":0,"l":1743153621524786,"d":2737},"events":[{"name":"bootstrap","f":1743153621522279,"d_finished":1049,"c":1,"l":1743153621523328,"d":1049},{"a":1743153621523682,"name":"ack","f":1743153621523682,"d_finished":0,"c":0,"l":1743153621524786,"d":1104},{"a":1743153621523657,"name":"processing","f":1743153621523657,"d_finished":0,"c":0,"l":1743153621524786,"d":1129},{"name":"ProduceResults","f":1743153621523027,"d_finished":583,"c":2,"l":1743153621524048,"d":583},{"a":1743153621524052,"name":"Finish","f":1743153621524052,"d_finished":0,"c":0,"l":1743153621524786,"d":734}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1984:3989]->[1:1983:3988] 2025-03-28T09:20:21.525302Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:20:21.521530Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:20:21.525357Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:20:21.525481Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinAggregateSingleRow [GOOD] Test command err: Trying to start YDB, gRPC: 9519, MsgBus: 24108 2025-03-28T09:20:13.574338Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787763694709106:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:13.574814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016f4/r3tmp/tmpNnDNXk/pdisk_1.dat 2025-03-28T09:20:14.237175Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:14.276103Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:14.276198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:14.287612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9519, node 1 2025-03-28T09:20:14.422331Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:14.422373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:14.422392Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:14.422520Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24108 TClient is connected to server localhost:24108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:15.193780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:15.213634Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:15.220885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:20:15.412775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:20:15.661085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:15.738457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:17.669645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787780874579891:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:17.669792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:18.078185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:18.130482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:18.221479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:18.259077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:18.304015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:18.352868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:18.428353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787785169547701:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:18.428473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:18.428734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787785169547706:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:18.433828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:18.452601Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:20:18.452880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787785169547708:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:18.535000Z node 1 :TX_PROXY ERROR: Actor# [1:7486787785169547763:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:18.574713Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787763694709106:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:18.574969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:19.930511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:20.004003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:20.069867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:194:2076] 2025-03-28T09:20:18.230420Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:18.248644Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:18.256338Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:20:18.260046Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:18.260915Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:20:18.261405Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:18.261892Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:18.262148Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:20:18.274530Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:20:18.274761Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:20:18.276360Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:20:18.276501Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:18.276622Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:18.276712Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:239:2066] recipient: [1:20:2067] 2025-03-28T09:20:18.295272Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:20:18.295418Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:18.315576Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:18.315722Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:18.315798Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:18.315889Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:18.316045Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:18.316111Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:18.316158Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:18.316203Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:18.327239Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:18.327412Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:18.338060Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:18.338187Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:20:18.342599Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:20:18.342696Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:20:18.342907Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:20:18.342971Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:20:18.385333Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-03-28T09:20:18.405849Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-03-28T09:20:18.406493Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:188:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:188:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:188:2076] 2025-03-28T09:20:20.404411Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:20.405404Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:20.405629Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:20:20.407150Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:20.407616Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:20:20.408205Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:20.408237Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:20.408456Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:20:20.417506Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:20:20.417637Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:20:20.417757Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:20:20.417858Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:20.417946Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:20.418020Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:239:2066] recipient: [11:20:2067] 2025-03-28T09:20:20.431255Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:20:20.431398Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:20.442923Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:20.443076Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:20.443156Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:20.443231Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:20.443341Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:20.443394Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:20.443449Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:20.443515Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:20.455208Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:20.455328Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:20.468101Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:20.468227Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:20:20.469437Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:20:20.469493Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:20:20.469693Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:20:20.469736Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:20:20.470326Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2025-03-28T09:20:20.476329Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-03-28T09:20:22.254022Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-03-28T09:20:22.254617Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-03-28T09:20:22.260867Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-03-28T09:20:22.261544Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-03-28T09:20:22.262348Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-03-28T09:20:22.263146Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-03-28T09:20:22.263806Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-03-28T09:20:22.264471Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-03-28T09:20:22.265172Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-03-28T09:20:22.265804Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-03-28T09:20:22.270383Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-03-28T09:20:22.271261Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-03-28T09:20:22.271959Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-03-28T09:20:22.272710Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:214:2066] recipient: [31:203:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:214:2066] recipient: [31:203:2076] Leader for TabletID 72057594037932033 is [31:216:2078] sender: [31:217:2066] recipient: [31:203:2076] 2025-03-28T09:20:23.711971Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:23.712852Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:23.713076Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:20:23.714598Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:23.715050Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:20:23.715579Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:23.715609Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:23.715796Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:20:23.725440Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:20:23.725574Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:20:23.725691Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:20:23.725783Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:23.725863Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:23.725927Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:216:2078] sender: [31:239:2066] recipient: [31:20:2067] 2025-03-28T09:20:23.738204Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:20:23.738364Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:23.749637Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:23.749792Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:23.749876Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:23.749972Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:23.750087Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:23.750140Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:23.750188Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:23.750259Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:23.763252Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:23.763375Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:23.775117Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:23.775240Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:20:23.776481Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:20:23.776533Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:20:23.776738Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:20:23.776783Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:20:23.777296Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2025-03-28T09:20:23.778560Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2025-03-28T09:20:23.779336Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2025-03-28T09:20:23.779966Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2025-03-28T09:20:23.780516Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2025-03-28T09:20:23.781092Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2025-03-28T09:20:23.781666Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2025-03-28T09:20:23.782361Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2025-03-28T09:20:23.783083Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2025-03-28T09:20:23.783866Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2025-03-28T09:20:23.784491Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2025-03-28T09:20:23.785195Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2025-03-28T09:20:23.785811Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2025-03-28T09:20:23.786481Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2025-03-28T09:20:23.787200Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2025-03-28T09:20:23.787974Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2025-03-28T09:20:23.788667Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2025-03-28T09:20:23.789321Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2025-03-28T09:20:23.790014Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2025-03-28T09:20:23.790880Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> BsControllerConfig::ReassignGroupDisk [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] >> KqpJoin::TwoJoinsWithQueryService >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:278:2068] recipient: [1:244:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:278:2068] recipient: [1:244:2078] Leader for TabletID 72057594037932033 is [1:280:2080] sender: [1:281:2068] recipient: [1:244:2078] 2025-03-28T09:20:22.997487Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:23.011377Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:23.011690Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:20:23.068297Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:23.068672Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:20:23.069239Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:23.069280Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:23.069490Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:20:23.089278Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:20:23.089400Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:20:23.089565Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:20:23.089747Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:23.089877Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:23.089975Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:280:2080] sender: [1:306:2068] recipient: [1:22:2069] 2025-03-28T09:20:23.103349Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:20:23.103534Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:23.115246Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:23.115381Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:23.115461Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:23.115576Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:23.115691Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:23.115749Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:23.115789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:23.115876Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:23.127026Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:23.127185Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:23.137920Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:23.138053Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:20:23.139266Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:20:23.139331Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:20:23.139531Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:20:23.139596Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:20:23.160103Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-03-28T09:20:23.160831Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1000 Path# /dev/disk 2025-03-28T09:20:23.160895Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 2:1000 Path# /dev/disk 2025-03-28T09:20:23.160935Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 3:1000 Path# /dev/disk 2025-03-28T09:20:23.160962Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 4:1000 Path# /dev/disk 2025-03-28T09:20:23.160987Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 5:1000 Path# /dev/disk 2025-03-28T09:20:23.161011Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 6:1000 Path# /dev/disk 2025-03-28T09:20:23.161056Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 7:1000 Path# /dev/disk 2025-03-28T09:20:23.161085Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 8:1000 Path# /dev/disk 2025-03-28T09:20:23.161109Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 9:1000 Path# /dev/disk 2025-03-28T09:20:23.161131Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 10:1000 Path# /dev/disk 2025-03-28T09:20:23.161170Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 11:1000 Path# /dev/disk 2025-03-28T09:20:23.161196Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 12:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-03-28T09:20:23.193478Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { Success: true } Success: true ConfigTxSeqNo: 2 Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:278:2068] recipient: [13:244:2078] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [13:278:2068] recipient: [13:244:2078] Leader for TabletID 72057594037932033 is [13:280:2080] sender: [13:281:2068] recipient: [13:244:2078] 2025-03-28T09:20:25.363732Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:25.364477Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:25.364679Z node 13 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:20:25.365901Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:25.366097Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:20:25.366580Z node 13 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:25.366618Z node 13 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:25.366858Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:20:25.376552Z node 13 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:20:25.376716Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:20:25.376845Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:20:25.376976Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:25.377077Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:25.377146Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [13:280:2080] sender: [13:306:2068] recipient: [13:22:2069] 2025-03-28T09:20:25.391378Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:20:25.391531Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:25.403528Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:25.403702Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:25.403802Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:25.403880Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:25.404001Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:25.404049Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:25.404084Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:25.404131Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:25.414925Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:25.415066Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:25.425904Z node 13 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:25.426044Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:20:25.427386Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:20:25.427450Z node 13 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:20:25.427643Z node 13 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:20:25.427711Z node 13 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:20:25.428554Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2025-03-28T09:20:25.429118Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 13:1000 Path# /dev/disk 2025-03-28T09:20:25.429177Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 14:1000 Path# /dev/disk 2025-03-28T09:20:25.429202Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 15:1000 Path# /dev/disk 2025-03-28T09:20:25.429226Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 16:1000 Path# /dev/disk 2025-03-28T09:20:25.429249Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 17:1000 Path# /dev/disk 2025-03-28T09:20:25.429278Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 18:1000 Path# /dev/disk 2025-03-28T09:20:25.429318Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 19:1000 Path# /dev/disk 2025-03-28T09:20:25.429363Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 20:1000 Path# /dev/disk 2025-03-28T09:20:25.429387Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2025-03-28T09:20:25.429428Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2025-03-28T09:20:25.429461Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2025-03-28T09:20:25.429488Z node 13 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 24:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2025-03-28T09:20:25.451498Z node 13 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> KqpJoinOrder::TPCDS34+ColumnStore >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2025-03-28T09:14:39.911302Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:14:39.935170Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:39.962070Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "/tmp/pdisk.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-28T09:14:39.990754Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:14:40.011940Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2025-03-28T09:14:40.012019Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:14:40.012942Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:30:2075] ControllerId# 72057594037932033 2025-03-28T09:14:40.012984Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:14:40.013094Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:14:40.013414Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:14:40.027512Z node 1 :BS_PROXY INFO: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2025-03-28T09:14:40.027560Z node 1 :BS_PROXY NOTICE: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2025-03-28T09:14:40.029489Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:37:2080] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.029662Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:38:2081] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.029813Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:39:2082] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030015Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:40:2083] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030165Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:41:2084] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030300Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:42:2085] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030478Z node 1 :BS_PROXY DEBUG: Group# 0 Actor# [1:29:2074] Create Queue# [1:43:2086] targetNodeId# 1 Marker# DSP01 2025-03-28T09:14:40.030508Z node 1 :BS_PROXY INFO: Group# 0 SetStateEstablishingSessions Marker# DSP03 2025-03-28T09:14:40.030572Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [1:30:2075] 2025-03-28T09:14:40.030609Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [1:30:2075] 2025-03-28T09:14:40.030660Z node 1 :BS_PROXY NOTICE: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2025-03-28T09:14:40.030777Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:14:40.036391Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:14:40.038187Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StInit ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.079248Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-28T09:14:40.079319Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.079355Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:14:40.106738Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] ::Bootstrap [1:20:2063] 2025-03-28T09:14:40.106824Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] lookup [1:20:2063] 2025-03-28T09:14:40.106945Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037936129 entry.State: StInit ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.109192Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-28T09:14:40.109284Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-28T09:14:40.109343Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.109383Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-28T09:14:40.113428Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-28T09:14:40.113646Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.114960Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2025-03-28T09:14:40.115065Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2025-03-28T09:14:40.117847Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "/tmp/pdisk.dat" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2025-03-28T09:14:40.120358Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-28T09:14:40.120879Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:14:40.120931Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2025-03-28T09:14:40.121015Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-28T09:14:40.126892Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:14:40.127120Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] ::Bootstrap [1:50:2091] 2025-03-28T09:14:40.127154Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] lookup [1:50:2091] 2025-03-28T09:14:40.127276Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:14:40.127497Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037927937 entry.State: StInit ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:14:40.127549Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037927937] queue send [1:50:2091] 2025-03-28T09:14:40.136037Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.141337Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2025-03-28T09:14:40.141404Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2025-03-28T09:14:40.141483Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2025-03-28T09:14:40.141517Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033} 2025-03-28T09:14:40.141602Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037932033 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037932033 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:14:40.141632Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037932033 followers: 0 2025-03-28T09:14:40.141686Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] queue send [1:20:2063] 2025-03-28T09:14:40.141900Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [1:30:2075] 2025-03-28T09:14:40.143268Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result error, check reconnect [1:30:2075] 2025-03-28T09:14:40.143322Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037932033] schedule retry [1:30:2075] 2025-03-28T09:14:40.143424Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-28T09:14:40.146647Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-28T09:14:40.148802Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129} 2025-03-28T09:14:40.148921Z node 1 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72057594037936129 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72057594037936129 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:14:40.148956Z node 1 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72057594037936129 followers: 0 2025-03-28T09:14:40.149088Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] forward result error, check reconnect [1:20:2063] 2025-03-28T09:14:40.149130Z node 1 :PIPE_CLIENT DEBUG: TClient[72057594037936129] connect failed [1:20:2063] 2025-03-28T09:14:40.149231Z node 1 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:14:40.149362Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-03-28T09:14:40.149389Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:14:40.150047Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\371$\224\316I\335\243.)W\014\261m\013\346Osy\0160" } 2025-03-28T09:14:40.150217Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-03-28T09:14:40.150261Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2025-03-28T09:14:40.152511Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2025-03-28T09:14:40.152596Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2025-03-28T09:14:40.152625Z node 1 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2025-03-28T09:14:40.152671Z node ... 72057594037927937 entry.State: StInitResolve success: true ev: {EvInfo Status: 0 TabletID: 72057594037927937 Cookie: 0 CurrentLeader: [149:270:2261] CurrentLeaderTablet: [149:277:2265] CurrentGeneration: 2 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 0}} 2025-03-28T09:20:23.085780Z node 149 :TABLET_RESOLVER DEBUG: ApplyEntry leader tabletId: 72057594037927937 followers: 0 2025-03-28T09:20:23.085858Z node 149 :TABLET_RESOLVER DEBUG: SelectForward node 149 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [149:270:2261] 2025-03-28T09:20:23.085971Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037927937] forward result local node, try to connect [149:267:2260] 2025-03-28T09:20:23.086047Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037927937]::SendEvent [149:267:2260] 2025-03-28T09:20:23.086182Z node 149 :PIPE_SERVER DEBUG: [72057594037927937] Accept Connect Originator# [149:267:2260] 2025-03-28T09:20:23.086359Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037927937] connected with status OK role: Leader [149:267:2260] 2025-03-28T09:20:23.086434Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037927937] send queued [149:267:2260] 2025-03-28T09:20:23.086509Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037927937] push event to server [149:267:2260] 2025-03-28T09:20:23.086606Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037927937] shutdown pipe due to pending shutdown request [149:267:2260] 2025-03-28T09:20:23.086698Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037927937] notify reset [149:267:2260] 2025-03-28T09:20:23.086922Z node 149 :PIPE_SERVER DEBUG: [72057594037927937] HandleSend Sender# [149:266:2259] EventType# 268697601 2025-03-28T09:20:23.087132Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2025-03-28T09:20:23.087220Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:20:23.088091Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1157b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2025-03-28T09:20:23.088231Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:20:23.088446Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037932033] ::Bootstrap [149:311:2288] 2025-03-28T09:20:23.088504Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037932033] lookup [149:311:2288] 2025-03-28T09:20:23.088606Z node 149 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:20:23.088713Z node 149 :TABLET_RESOLVER DEBUG: SelectForward node 149 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [149:94:2122] 2025-03-28T09:20:23.088835Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037932033] queue send [149:311:2288] 2025-03-28T09:20:23.088913Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037932033] forward result local node, try to connect [149:311:2288] 2025-03-28T09:20:23.088972Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037932033]::SendEvent [149:311:2288] 2025-03-28T09:20:23.089082Z node 149 :PIPE_SERVER DEBUG: [72057594037932033] Accept Connect Originator# [149:311:2288] 2025-03-28T09:20:23.089274Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037932033] connected with status OK role: Leader [149:311:2288] 2025-03-28T09:20:23.089329Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037932033] send queued [149:311:2288] 2025-03-28T09:20:23.089369Z node 149 :PIPE_CLIENT DEBUG: TClient[72057594037932033] push event to server [149:311:2288] 2025-03-28T09:20:23.089438Z node 149 :PIPE_SERVER DEBUG: [72057594037932033] HandleSend Sender# [149:277:2265] EventType# 268637702 2025-03-28T09:20:23.089647Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2025-03-28T09:20:23.089743Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:20:23.089959Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:20:23.090062Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:20:23.090422Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2025-03-28T09:20:23.090520Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:20:23.091007Z node 149 :HIVE NOTICE: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{88923005651424}(72075186224037888)::Execute - TryToBoot was not successfull 2025-03-28T09:20:23.091159Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2025-03-28T09:20:23.091260Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:20:23.107377Z node 149 :BS_PROXY_PUT INFO: [aeed6b7f2709b4c0] bootstrap ActorId# [149:314:2291] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2025-03-28T09:20:23.107495Z node 149 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2025-03-28T09:20:23.107535Z node 149 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2025-03-28T09:20:23.107585Z node 149 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2025-03-28T09:20:23.107619Z node 149 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2025-03-28T09:20:23.107728Z node 149 :BS_PROXY DEBUG: Send to queueActorId# [149:37:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2025-03-28T09:20:23.108888Z node 149 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 19 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 20 }}}} from# [0:1:0:0:0] Marker# BPP01 2025-03-28T09:20:23.108979Z node 149 :BS_PROXY_PUT DEBUG: [aeed6b7f2709b4c0] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2025-03-28T09:20:23.109025Z node 149 :BS_PROXY_PUT INFO: [aeed6b7f2709b4c0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2025-03-28T09:20:23.109120Z node 149 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:470} Query history GroupId# 0 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.512 sample PartId# [72057594037927937:2:4:0:0:698:1] QueryCount# 1 VDiskId# [0:1:0:0:0] NodeId# 149 } TEvVPutResult{ TimestampMs# 1.676 VDiskId# [0:1:0:0:0] NodeId# 149 Status# OK } ] } 2025-03-28T09:20:23.109221Z node 149 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2025-03-28T09:20:23.109314Z node 149 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2025-03-28T09:20:23.109612Z node 149 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:20:23.109729Z node 149 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-28T09:20:23.109786Z node 149 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-28T09:20:23.109827Z node 149 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-28T09:20:23.109870Z node 149 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:20:23.109929Z node 149 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:20:23.109976Z node 149 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:20:23.110378Z node 149 :PIPE_CLIENT DEBUG: TClient[72075186224037888] ::Bootstrap [149:318:2294] 2025-03-28T09:20:23.110444Z node 149 :PIPE_CLIENT DEBUG: TClient[72075186224037888] lookup [149:318:2294] 2025-03-28T09:20:23.110592Z node 149 :TABLET_RESOLVER DEBUG: Handle TEvForward tabletId: 72075186224037888 entry.State: StInit ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2025-03-28T09:20:23.110810Z node 149 :STATESTORAGE DEBUG: ProxyRequest::HandleInit ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2025-03-28T09:20:23.110963Z node 149 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2025-03-28T09:20:23.111049Z node 149 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2025-03-28T09:20:23.111112Z node 149 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:20:23.111174Z node 149 :STATESTORAGE DEBUG: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2025-03-28T09:20:23.111223Z node 149 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:20:23.111261Z node 149 :STATESTORAGE DEBUG: ProxyRequest::HandleLookup ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888} 2025-03-28T09:20:23.111386Z node 149 :TABLET_RESOLVER DEBUG: Handle TEvInfo tabletId: 72075186224037888 entry.State: StInitResolve success: false ev: {EvInfo Status: 5 TabletID: 72075186224037888 Cookie: 0 CurrentLeader: [0:0:0] CurrentLeaderTablet: [0:0:0] CurrentGeneration: 0 CurrentStep: 0 Locked: false LockedFor: 0 SignatureSz: 3 Signature: {2, 5, 8}} 2025-03-28T09:20:23.111457Z node 149 :TABLET_RESOLVER DEBUG: DropEntry tabletId: 72075186224037888 followers: 0 2025-03-28T09:20:23.111594Z node 149 :PIPE_CLIENT DEBUG: TClient[72075186224037888] forward result error, check reconnect [149:318:2294] 2025-03-28T09:20:23.111660Z node 149 :PIPE_CLIENT DEBUG: TClient[72075186224037888] connect failed [149:318:2294] >> TFlatTest::LargeDatashardReply [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2025-03-28T09:20:13.141458Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787761753051188:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:13.141496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f11/r3tmp/tmpqsgY1T/pdisk_1.dat 2025-03-28T09:20:13.776037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:13.776121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:13.778373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:13.803975Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:13.821315Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:20:13.821352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:20:13.821397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:20:13.821415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:20:13.821427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:20:13.827199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 TClient is connected to server localhost:29086 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:20:14.079802Z node 1 :TX_PROXY DEBUG: actor# [1:7486787761753051445:2104] Handle TEvNavigate describe path dc-1 2025-03-28T09:20:14.079865Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019020:2259] HANDLE EvNavigateScheme dc-1 2025-03-28T09:20:14.081199Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019020:2259] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:20:14.142379Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019020:2259] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:20:14.158213Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019020:2259] Handle TEvDescribeSchemeResult Forward to# [1:7486787766048019019:2258] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:20:14.200582Z node 1 :TX_PROXY DEBUG: actor# [1:7486787761753051445:2104] Handle TEvProposeTransaction 2025-03-28T09:20:14.200612Z node 1 :TX_PROXY DEBUG: actor# [1:7486787761753051445:2104] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:20:14.200722Z node 1 :TX_PROXY DEBUG: actor# [1:7486787761753051445:2104] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486787766048019043:2268] 2025-03-28T09:20:14.312322Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019043:2268] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:20:14.312375Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019043:2268] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:20:14.312432Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019043:2268] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:20:14.312750Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019043:2268] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:20:14.312894Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019043:2268] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T09:20:14.312937Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019043:2268] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T09:20:14.313093Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019043:2268] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T09:20:14.315409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:20:14.315569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:20:14.315756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:20:14.316017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:20:14.316045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:20:14.316572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:20:14.316665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //dc-1 2025-03-28T09:20:14.316811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:20:14.316835Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:20:14.316846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:20:14.316859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-03-28T09:20:14.317091Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019043:2268] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T09:20:14.317138Z node 1 :TX_PROXY DEBUG: Actor# [1:7486787766048019043:2268] txid# 281474976710657 SEND to# [1:7486787766048019042:2267] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-03-28T09:20:14.317504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:20:14.317531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:20:14.317557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:20:14.317868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:20:14.317930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:20:14.317951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:20:14.317981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:20:14.322082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:20:14.322428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:20:14.322554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:20:14.324932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153614370, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:20:14.325036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743153614370 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:20:14.325057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDom ... 03-28T09:20:15.597301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:15.597414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:15.597431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976710674:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:20:15.597644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 3 2025-03-28T09:20:15.597729Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710674:0 progress is 1/1 2025-03-28T09:20:15.597736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-03-28T09:20:15.597757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710674:0 progress is 1/1 2025-03-28T09:20:15.597765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-03-28T09:20:15.597807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710674, ready parts: 1/1, is published: true 2025-03-28T09:20:15.597841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7486787770342987411:2381] message: TxId: 281474976710674 2025-03-28T09:20:15.597854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2025-03-28T09:20:15.597866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710674:0 2025-03-28T09:20:15.597873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710674:0 2025-03-28T09:20:15.597960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 2 2025-03-28T09:20:15.598972Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710674 datashard 72075186224037899 state PreOffline 2025-03-28T09:20:15.599014Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 Got TEvSchemaChangedResult from SS at 72075186224037899 2025-03-28T09:20:15.600004Z node 1 :TX_DATASHARD DEBUG: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:20:15.600080Z node 1 :TX_DATASHARD INFO: 72075186224037899 Initiating switch from PreOffline to Offline state 2025-03-28T09:20:15.602201Z node 1 :TX_DATASHARD INFO: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:20:15.602962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486787770342986934 RawX2: 4503603922340126 } TabletId: 72075186224037899 State: 4 2025-03-28T09:20:15.603025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:20:15.603416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:20:15.603704Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 12 TabletID: 72075186224037899 2025-03-28T09:20:15.603725Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2025-03-28T09:20:15.603800Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2025-03-28T09:20:15.603873Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2025-03-28T09:20:15.603944Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2025-03-28T09:20:15.604659Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7486787761753051425:2100] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7486787761753051632:2196] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2025-03-28T09:20:15.604778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 12 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2025-03-28T09:20:15.605020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 1 2025-03-28T09:20:15.605226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:20:15.605244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 6], at schemeshard: 72057594046644480 2025-03-28T09:20:15.605290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:20:15.605629Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2025-03-28T09:20:15.605669Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2025-03-28T09:20:15.605707Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2025-03-28T09:20:15.605778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:12 2025-03-28T09:20:15.605797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2025-03-28T09:20:15.605867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:20:15.605996Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2025-03-28T09:20:15.606172Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2025-03-28T09:20:15.606202Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037899 reason = ReasonStop 2025-03-28T09:20:15.606240Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037899, clientId# [1:7486787770342987116:2783], serverId# [1:7486787770342987117:2784], sessionId# [0:0:0] 2025-03-28T09:20:15.606523Z node 1 :HIVE DEBUG: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2025-03-28T09:20:15.606562Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-03-28T09:20:15.606894Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037899 2025-03-28T09:20:15.607005Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037899 2025-03-28T09:20:15.608495Z node 1 :HIVE DEBUG: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2025-03-28T09:20:17.612013Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486787778651195861:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:17.612045Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f11/r3tmp/tmpSkgthX/pdisk_1.dat 2025-03-28T09:20:17.814185Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:17.862184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:17.862267Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:17.864022Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9784 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:20:18.134867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:20:18.143185Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:18.159350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:22.612305Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486787778651195861:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:22.612357Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:31.384760Z node 2 :MINIKQL_ENGINE ERROR: Shard %72075186224037888, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-03-28T09:20:31.396372Z node 2 :TX_DATASHARD ERROR: Datashard execution error for [0:281474976711360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2025-03-28T09:20:31.398875Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976711360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2025-03-28T09:20:31.434205Z node 2 :TX_PROXY ERROR: Actor# [2:7486787834485777151:5929] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup >> KqpJoin::FullOuterJoin >> KqpJoin::TwoJoinsWithQueryService [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::TwoJoinsWithQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27019, MsgBus: 16659 2025-03-28T09:20:28.352123Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787828080596759:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:28.352160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001859/r3tmp/tmpihdUYM/pdisk_1.dat 2025-03-28T09:20:29.049527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:29.049616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:29.082403Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:29.084969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27019, node 1 2025-03-28T09:20:29.227141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:29.227164Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:29.227170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:29.227290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16659 TClient is connected to server localhost:16659 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:29.985856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:30.007681Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:32.182886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787845260466619:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:32.183028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:32.449730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:20:32.618835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787845260466728:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:32.618912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:32.628199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:20:32.695921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787845260466809:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:32.696007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:32.711212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:32.821616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787845260466886:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:32.821696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:32.822837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787845260466892:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:32.826339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:20:32.837567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787845260466894:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:20:32.922462Z node 1 :TX_PROXY ERROR: Actor# [1:7486787845260466947:2498] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:33.354790Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787828080596759:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:33.354849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpJoinOrder::CanonizedJoinOrderTPCH10 >> KqpJoin::FullOuterJoin2 >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees [GOOD] >> BsControllerConfig::DeleteStoragePool [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::DeleteStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:214:2066] recipient: [1:194:2076] Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:217:2066] recipient: [1:194:2076] 2025-03-28T09:20:18.235746Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:18.249390Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:18.257118Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:20:18.259540Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:18.260689Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:20:18.261371Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:18.262374Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:18.262626Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:20:18.276374Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:20:18.276497Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:20:18.276639Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:20:18.276753Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:18.276844Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:18.276907Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:216:2078] sender: [1:239:2066] recipient: [1:20:2067] 2025-03-28T09:20:18.295333Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:20:18.295454Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:18.307257Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:18.307427Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:18.307522Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:18.307595Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:18.307703Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:18.307748Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:18.307797Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:18.307862Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:18.319389Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:18.319517Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:18.331443Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:18.331592Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:20:18.345965Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:20:18.346054Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:20:18.346273Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:20:18.346324Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:20:18.382766Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:188:2076] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:214:2066] recipient: [11:188:2076] Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:217:2066] recipient: [11:188:2076] 2025-03-28T09:20:20.181228Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:20.182255Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:20.182502Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:20:20.188123Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:20.188618Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:20:20.189316Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:20.189366Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:20.189585Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:20:20.207855Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:20:20.208026Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:20:20.208149Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:20:20.208269Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:20.208381Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:20.208457Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:216:2078] sender: [11:239:2066] recipient: [11:20:2067] 2025-03-28T09:20:20.223468Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:20:20.223647Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:20.235250Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:20.235387Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:20.235468Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:20.235565Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:20.235722Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:20.235804Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:20.235855Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:20.235910Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:20.247265Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:20.247392Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:20.259234Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:20.259382Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:20:20.260565Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:20:20.260613Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:20:20.260854Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:20:20.260924Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:20:20.261499Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3014:2106] recipient: [21:2914:2116] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:3014:2106] recipient: [21:2914:2116] Leader for TabletID 72057594037932033 is [21:3016:2118] sender: [21:3017:2106] recipient: [21:2914:2116] 2025-03-28T09:20:22.893939Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:22.899005Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:22.899347Z n ... 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 96:1002 Path# /dev/disk3 2025-03-28T09:20:31.124132Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1000 Path# /dev/disk1 2025-03-28T09:20:31.124157Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1001 Path# /dev/disk2 2025-03-28T09:20:31.124180Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 97:1002 Path# /dev/disk3 2025-03-28T09:20:31.124204Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1000 Path# /dev/disk1 2025-03-28T09:20:31.124231Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1001 Path# /dev/disk2 2025-03-28T09:20:31.124255Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 98:1002 Path# /dev/disk3 2025-03-28T09:20:31.124280Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1000 Path# /dev/disk1 2025-03-28T09:20:31.124304Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1001 Path# /dev/disk2 2025-03-28T09:20:31.124329Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 99:1002 Path# /dev/disk3 2025-03-28T09:20:31.124352Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1000 Path# /dev/disk1 2025-03-28T09:20:31.124375Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1001 Path# /dev/disk2 2025-03-28T09:20:31.124401Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 100:1002 Path# /dev/disk3 2025-03-28T09:20:31.124429Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1000 Path# /dev/disk1 2025-03-28T09:20:31.124453Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1001 Path# /dev/disk2 2025-03-28T09:20:31.124478Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 101:1002 Path# /dev/disk3 2025-03-28T09:20:31.124502Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1000 Path# /dev/disk1 2025-03-28T09:20:31.124536Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1001 Path# /dev/disk2 2025-03-28T09:20:31.124562Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 102:1002 Path# /dev/disk3 2025-03-28T09:20:31.124589Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1000 Path# /dev/disk1 2025-03-28T09:20:31.124630Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1001 Path# /dev/disk2 2025-03-28T09:20:31.124656Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 103:1002 Path# /dev/disk3 2025-03-28T09:20:31.124679Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1000 Path# /dev/disk1 2025-03-28T09:20:31.124701Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1001 Path# /dev/disk2 2025-03-28T09:20:31.124724Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 104:1002 Path# /dev/disk3 2025-03-28T09:20:31.124745Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1000 Path# /dev/disk1 2025-03-28T09:20:31.124770Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1001 Path# /dev/disk2 2025-03-28T09:20:31.124793Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 105:1002 Path# /dev/disk3 2025-03-28T09:20:31.124815Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1000 Path# /dev/disk1 2025-03-28T09:20:31.124837Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1001 Path# /dev/disk2 2025-03-28T09:20:31.124859Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 106:1002 Path# /dev/disk3 2025-03-28T09:20:31.124883Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1000 Path# /dev/disk1 2025-03-28T09:20:31.124904Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1001 Path# /dev/disk2 2025-03-28T09:20:31.124935Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 107:1002 Path# /dev/disk3 2025-03-28T09:20:31.124965Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1000 Path# /dev/disk1 2025-03-28T09:20:31.124996Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1001 Path# /dev/disk2 2025-03-28T09:20:31.125027Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 108:1002 Path# /dev/disk3 2025-03-28T09:20:31.125058Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1000 Path# /dev/disk1 2025-03-28T09:20:31.125105Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1001 Path# /dev/disk2 2025-03-28T09:20:31.125137Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 109:1002 Path# /dev/disk3 2025-03-28T09:20:31.125167Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1000 Path# /dev/disk1 2025-03-28T09:20:31.125193Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1001 Path# /dev/disk2 2025-03-28T09:20:31.125215Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 110:1002 Path# /dev/disk3 2025-03-28T09:20:31.125237Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1000 Path# /dev/disk1 2025-03-28T09:20:31.125258Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1001 Path# /dev/disk2 2025-03-28T09:20:31.125282Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 111:1002 Path# /dev/disk3 2025-03-28T09:20:31.125306Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1000 Path# /dev/disk1 2025-03-28T09:20:31.125331Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1001 Path# /dev/disk2 2025-03-28T09:20:31.125355Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 112:1002 Path# /dev/disk3 2025-03-28T09:20:31.125379Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1000 Path# /dev/disk1 2025-03-28T09:20:31.125400Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1001 Path# /dev/disk2 2025-03-28T09:20:31.125428Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 113:1002 Path# /dev/disk3 2025-03-28T09:20:31.125454Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1000 Path# /dev/disk1 2025-03-28T09:20:31.125477Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1001 Path# /dev/disk2 2025-03-28T09:20:31.125502Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 114:1002 Path# /dev/disk3 2025-03-28T09:20:31.125527Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1000 Path# /dev/disk1 2025-03-28T09:20:31.125555Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1001 Path# /dev/disk2 2025-03-28T09:20:31.125590Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 115:1002 Path# /dev/disk3 2025-03-28T09:20:31.125618Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1000 Path# /dev/disk1 2025-03-28T09:20:31.125643Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1001 Path# /dev/disk2 2025-03-28T09:20:31.125669Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 116:1002 Path# /dev/disk3 2025-03-28T09:20:31.125694Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1000 Path# /dev/disk1 2025-03-28T09:20:31.125718Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1001 Path# /dev/disk2 2025-03-28T09:20:31.125746Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 117:1002 Path# /dev/disk3 2025-03-28T09:20:31.125769Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1000 Path# /dev/disk1 2025-03-28T09:20:31.125795Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1001 Path# /dev/disk2 2025-03-28T09:20:31.125824Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 118:1002 Path# /dev/disk3 2025-03-28T09:20:31.125851Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1000 Path# /dev/disk1 2025-03-28T09:20:31.125882Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1001 Path# /dev/disk2 2025-03-28T09:20:31.125909Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 119:1002 Path# /dev/disk3 2025-03-28T09:20:31.125930Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1000 Path# /dev/disk1 2025-03-28T09:20:31.125953Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1001 Path# /dev/disk2 2025-03-28T09:20:31.125976Z node 71 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 120:1002 Path# /dev/disk3 2025-03-28T09:20:31.142837Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool 1" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: ROT } } } } } 2025-03-28T09:20:31.243258Z node 71 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.100553s 2025-03-28T09:20:31.243389Z node 71 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.100700s 2025-03-28T09:20:31.266066Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineStoragePool { BoxId: 1 StoragePoolId: 2 Name: "storage pool 2" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 50 PDiskFilter { Property { Type: SSD } } } } Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 2 ItemConfigGeneration: 1 } } } 2025-03-28T09:20:31.332342Z node 71 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DeleteStoragePool { BoxId: 1 StoragePoolId: 1 ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 32559, MsgBus: 11096 2025-03-28T09:20:24.083064Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787810236643501:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:24.084040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001862/r3tmp/tmpfNZBrl/pdisk_1.dat 2025-03-28T09:20:24.602206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:24.602334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:24.604590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:24.664553Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32559, node 1 2025-03-28T09:20:24.800358Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:24.800377Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:24.800401Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:24.800525Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11096 TClient is connected to server localhost:11096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:25.541001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:25.566127Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:25.582213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:25.757277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:25.983014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:26.110278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:27.988274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787823121547122:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:27.988416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:28.362380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:28.439118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:28.518656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:28.557572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:28.593283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:28.647617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:28.705997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787827416514936:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:28.706088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:28.706352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787827416514941:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:28.711721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:28.730916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787827416514943:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:28.817530Z node 1 :TX_PROXY ERROR: Actor# [1:7486787827416514999:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.095052Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787810236643501:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:29.095257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:30.157964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.207675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.237988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.300123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.340586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.391311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 3026, MsgBus: 63386 2025-03-28T09:20:32.140412Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486787841548023582:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001862/r3tmp/tmpsXxli7/pdisk_1.dat 2025-03-28T09:20:32.195967Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:20:32.276199Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:32.293370Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:32.293456Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:32.299677Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3026, node 2 2025-03-28T09:20:32.429732Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:32.429755Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:32.429762Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:32.429881Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63386 TClient is connected to server localhost:63386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:33.036121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:33.047074Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:20:33.069702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:33.183514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:33.393228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:33.471427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:36.186274Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787858727894354:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:36.186384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:36.234598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:36.271089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:36.335265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:36.417162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:36.472985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:36.582640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:36.720506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787858727894875:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:36.720601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:36.720926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787858727894880:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:36.724835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:36.751203Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486787858727894882:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:20:36.813490Z node 2 :TX_PROXY ERROR: Actor# [2:7486787858727894938:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:37.124743Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486787841548023582:2218];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:37.124808Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:37.969073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.013169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.059336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.135047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.183565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.214529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 61581, MsgBus: 29240 2025-03-28T09:20:33.062989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787846636782249:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:33.063119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001851/r3tmp/tmpvSWNCd/pdisk_1.dat 2025-03-28T09:20:33.684299Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:33.686499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:33.686597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:33.691488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61581, node 1 2025-03-28T09:20:33.889861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:33.889889Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:33.889896Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:33.889997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29240 TClient is connected to server localhost:29240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:34.669080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:34.697559Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:34.702817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:34.925783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:35.186235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:35.295852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:37.100303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787863816653219:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:37.100405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:37.388104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:37.443714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:37.486918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:37.546767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:37.631454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:37.721075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:37.796413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787863816653739:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:37.796505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:37.796700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787863816653744:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:37.800312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:37.816810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787863816653746:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:37.897966Z node 1 :TX_PROXY ERROR: Actor# [1:7486787863816653801:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:38.066800Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787846636782249:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:38.077004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:39.188596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.279166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.318965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.377044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.409515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.441980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> OlapEstimationRowsCorrectness::TPCDS96 |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] >> KqpJoin::FullOuterJoin [GOOD] >> OlapEstimationRowsCorrectness::TPCH10 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsManyHintTrees [GOOD] Test command err: Trying to start YDB, gRPC: 10258, MsgBus: 2245 2025-03-28T09:19:02.356668Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787458481092909:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:02.357819Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016fd/r3tmp/tmplNvmix/pdisk_1.dat 2025-03-28T09:19:02.699361Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10258, node 1 2025-03-28T09:19:02.764300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:02.764835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:02.771050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:19:02.809013Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:19:02.809056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:19:02.809082Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:19:02.809254Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2245 TClient is connected to server localhost:2245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:19:03.283187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:03.306965Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:19:05.431126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787471365995326:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:05.431133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787471365995318:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:05.431236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:05.434757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:19:05.444009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787471365995332:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:19:05.508825Z node 1 :TX_PROXY ERROR: Actor# [1:7486787471365995383:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:19:05.794867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:19:05.976084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:19:05.976084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:19:05.976381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:19:05.976677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:19:05.976797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:19:05.976907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:19:05.977017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:19:05.977137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:19:05.977274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:19:05.977379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:19:05.977479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:19:05.977624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:19:05.977728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787471365995631:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:19:05.978349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:19:05.978587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:19:05.978731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:19:05.978872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:19:05.978978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:19:05.979089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:19:05.979254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:19:05.979358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:19:05.979494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:19:05.979703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:19:05.979827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486787471365995664:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:19:06.014127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787471365995692:2361];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:19:06.014201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787471365995692:2361];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstra ... COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.518020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.519467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.525141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.525287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.532144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.533506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.538951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.539204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.545401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.545402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.551353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.551353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.556244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.562498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.564629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.568297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.572832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.574959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.579202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.582785Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.585810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.588861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.592649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.595654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.604364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.621420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.629544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.630891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.636501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.640200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.643940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.647605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.651307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.655105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.658100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.661519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.665967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.668908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.674068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.674158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.681409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.681796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.689038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.689703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.829539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:24.906299Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0ssxwfxdeap5sjcptgp0c", SessionId: ydb://session/3?node_id=1&id=ZTBkZmUxYTYtZTEyOGJjNjctZGEzYjJlY2YtYmZiNmMzYzk=, Slow query, duration: 36.748876s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:20:25.248488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:20:25.248522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:20:25.249169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> KqpJoin::RightTableIndexPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 11405, MsgBus: 11612 2025-03-28T09:20:34.340161Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787854098234719:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:34.340225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00184e/r3tmp/tmp566hiB/pdisk_1.dat 2025-03-28T09:20:35.014223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:35.014318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:35.056303Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:35.063773Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11405, node 1 2025-03-28T09:20:35.283256Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:35.283278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:35.283288Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:35.283393Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11612 TClient is connected to server localhost:11612 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:36.008560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:36.035309Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:36.053329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:20:36.269324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:36.492248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:36.583125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:38.494981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787871278105663:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.495087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.887937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.964381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.018450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.090628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.139881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.211493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.292933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787875573073477:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:39.293015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:39.293317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787875573073482:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:39.297513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:39.314034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787875573073484:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:39.340412Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787854098234719:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:39.340505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:39.393468Z node 1 :TX_PROXY ERROR: Actor# [1:7486787875573073539:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:40.631640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:40.670082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:40.717253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinOnlyLeftColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 4125, MsgBus: 11682 2025-03-28T09:20:25.838603Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787814931041769:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:25.839374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001860/r3tmp/tmpK1GYYh/pdisk_1.dat 2025-03-28T09:20:26.502334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:26.508473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:26.508564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:26.511985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4125, node 1 2025-03-28T09:20:26.648471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:26.648501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:26.648509Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:26.648651Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11682 TClient is connected to server localhost:11682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:27.463999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:27.525425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:27.698004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:27.964572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:28.058134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:29.912394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787832110912609:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:29.912558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:30.301925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.357818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.431871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.468796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.517748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.578641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.648382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787836405880421:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:30.648441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:30.648569Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787836405880426:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:30.651636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:30.668151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787836405880428:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:30.747580Z node 1 :TX_PROXY ERROR: Actor# [1:7486787836405880483:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:30.808192Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787814931041769:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:30.808324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:32.027113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:32.067819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:32.102548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:20:32.137140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:32.182422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:32.226832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 65013, MsgBus: 25101 2025-03-28T09:20:34.207113Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486787852017894767:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:34.207597Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001860/r3tmp/tmpMKR3qQ/pdisk_1.dat 2025-03-28T09:20:34.427975Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:34.463212Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:34.463295Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:34.472550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65013, node 2 2025-03-28T09:20:34.607173Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:34.607195Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:34.607202Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:34.607304Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25101 TClient is connected to server localhost:25101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:35.349032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:35.355336Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:20:35.369953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:35.470909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:35.718864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:35.857946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:38.533921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787869197765724:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.534047Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.604879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.683246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.718835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.756955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.790429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.833773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.924707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787869197766243:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.924825Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.925222Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787869197766248:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.929054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:38.946827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486787869197766250:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:20:39.016639Z node 2 :TX_PROXY ERROR: Actor# [2:7486787873492733600:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:39.207283Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486787852017894767:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:39.207333Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:40.189234Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:40.233432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:40.288182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:20:40.328200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:40.367825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:40.403885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Simple |86.1%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry_high_rate.py::TestRetry::test_high_rate[kikimr0] [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> KqpJoin::FullOuterJoin2 [GOOD] >> KqpJoinOrder::CanonizedJoinOrderLookupBug >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::FullOuterJoin2 [GOOD] Test command err: Trying to start YDB, gRPC: 30823, MsgBus: 29730 2025-03-28T09:20:38.440904Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787870087910458:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:38.441264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00184a/r3tmp/tmpnuve0f/pdisk_1.dat 2025-03-28T09:20:38.946086Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:38.946443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:38.948598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:38.979114Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30823, node 1 2025-03-28T09:20:39.151175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:39.151200Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:39.151211Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:39.151309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29730 TClient is connected to server localhost:29730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:39.898767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:39.923503Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:39.945576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:40.157486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:40.352877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:40.465158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:42.329084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787887267781283:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:42.329225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:42.635151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:42.712826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:42.756613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:42.795856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:42.834082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:42.875441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:42.942707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787887267781798:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:42.942770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:42.943192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787887267781803:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:42.946651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:42.973084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787887267781805:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:43.042035Z node 1 :TX_PROXY ERROR: Actor# [1:7486787891562749157:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:43.428116Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787870087910458:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:43.428170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:44.213160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:44.269807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:44.307475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::FiveWayJoin+ColumnStore >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH22 |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] |86.1%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinByComplexKeyWithNullComponents-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 1702, MsgBus: 15774 2025-03-28T09:20:34.040526Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787851799287572:2253];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:34.042759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001850/r3tmp/tmpeVqJQz/pdisk_1.dat 2025-03-28T09:20:34.647817Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:34.664290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:34.664423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:34.668128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1702, node 1 2025-03-28T09:20:34.864714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:34.864742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:34.864752Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:34.864873Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15774 TClient is connected to server localhost:15774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:35.645440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:35.689670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:35.871086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:20:36.067680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:36.151688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.151112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787868979158328:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.151207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.581297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.641013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.716787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.789112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.823055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.859056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:38.958894Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787868979158850:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.959012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.961123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787868979158855:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.964985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:38.980528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787868979158857:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:39.038441Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787851799287572:2253];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:39.038536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:39.048453Z node 1 :TX_PROXY ERROR: Actor# [1:7486787873274126208:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:40.241265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:40.279801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23947, MsgBus: 32445 2025-03-28T09:20:41.723677Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486787882619845829:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:41.723762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001850/r3tmp/tmpXqId56/pdisk_1.dat 2025-03-28T09:20:41.955447Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:41.969609Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:41.969694Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:41.972195Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23947, node 2 2025-03-28T09:20:42.065935Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:42.065958Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:42.065966Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:42.066052Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32445 TClient is connected to server localhost:32445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:20:42.523556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:20:42.534310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:42.635269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:42.820074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:42.894402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:45.105982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787899799716775:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:45.106060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:45.157232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:45.204658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:45.240079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:45.312846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:45.365526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:45.445821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:45.500087Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787899799717292:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:45.500171Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:45.500227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787899799717297:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:45.504504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:45.516935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486787899799717299:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:45.613805Z node 2 :TX_PROXY ERROR: Actor# [2:7486787899799717355:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:46.726253Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486787882619845829:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:46.726308Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:46.828720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:46.878318Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull >> KqpJoin::RightTableIndexPredicate [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] >> KqpJoinOrder::TPCDS95-ColumnStore >> KqpFlipJoin::RightOnly_1 >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableIndexPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 18150, MsgBus: 27621 2025-03-28T09:20:44.057862Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787896642460412:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:44.068330Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001846/r3tmp/tmp06O7IC/pdisk_1.dat 2025-03-28T09:20:44.525358Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:44.566077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:44.566176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18150, node 1 2025-03-28T09:20:44.568071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:44.647400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:44.647420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:44.647430Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:44.647539Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27621 TClient is connected to server localhost:27621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:45.300827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:45.326556Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:45.343141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:45.520913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:45.755769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:45.854955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:47.791694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787909527364079:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:47.791856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:48.092089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.129179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.169003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.206485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.264655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.351382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.443084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787913822331900:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:48.443167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:48.443535Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787913822331905:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:48.447684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:48.468691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787913822331907:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:48.559986Z node 1 :TX_PROXY ERROR: Actor# [1:7486787913822331963:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:49.062959Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787896642460412:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:49.063047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:50.199948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] >> OlapEstimationRowsCorrectness::TPCH9 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 8423, MsgBus: 64411 2025-03-28T09:20:44.567441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787893614419195:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:44.568383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001842/r3tmp/tmpmyhAeC/pdisk_1.dat 2025-03-28T09:20:45.302000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:45.302077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:45.305261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:45.324913Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8423, node 1 2025-03-28T09:20:45.624918Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:45.624947Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:45.624956Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:45.625060Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64411 TClient is connected to server localhost:64411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:46.526135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:46.574835Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:46.587768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:46.828862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:47.052413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:47.166267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:49.435534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787915089257429:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:49.435646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:49.570793Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787893614419195:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:49.588495Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:49.775727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:49.819566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:49.897480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:49.972577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.022529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.065972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.139378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787919384225242:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.139450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.139820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787919384225247:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.144250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:50.167826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787919384225249:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:50.244591Z node 1 :TX_PROXY ERROR: Actor# [1:7486787919384225305:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:51.407058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.456916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.501406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.540076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.623052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.675433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Simple [GOOD] Test command err: Trying to start YDB, gRPC: 13075, MsgBus: 21948 2025-03-28T09:20:44.881061Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787893281225088:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:44.881501Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00183f/r3tmp/tmpmg1gMY/pdisk_1.dat 2025-03-28T09:20:45.610140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:45.610228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:45.615237Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:45.630258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13075, node 1 2025-03-28T09:20:45.730527Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:45.730546Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:45.730552Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:45.730665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21948 TClient is connected to server localhost:21948 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:46.561930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:46.583701Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:46.605457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:20:46.830174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:47.273697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:47.358562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:49.484418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787914756063193:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:49.484544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:49.748591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:49.786629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:49.842857Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787893281225088:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:49.842926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:49.875218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:49.945893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:49.989967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.040295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.153279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787919051031012:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.153368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.153759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787919051031017:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.158533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:50.178223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787919051031019:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:50.250767Z node 1 :TX_PROXY ERROR: Actor# [1:7486787919051031075:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:51.764707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.835840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.896722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinRightNullFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15662, MsgBus: 19785 2025-03-28T09:20:45.394928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787898860127180:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:45.395011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00183c/r3tmp/tmpnLtUzN/pdisk_1.dat 2025-03-28T09:20:46.165765Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:46.172970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:46.173065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:46.177224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15662, node 1 2025-03-28T09:20:46.491308Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:46.491332Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:46.491339Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:46.491436Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19785 TClient is connected to server localhost:19785 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:47.667528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:47.696641Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:47.717769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:47.868951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:48.109679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:48.202841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:50.103755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787920334965417:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.103865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.398793Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787898860127180:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:50.398874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:50.455476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.498336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.561327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.591704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.672854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.757949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.842175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787920334965939:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.842262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.842565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787920334965944:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.846847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:50.881132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787920334965946:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:50.975200Z node 1 :TX_PROXY ERROR: Actor# [1:7486787920334966001:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:52.408704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.451965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.496661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.544063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.582739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.620820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup >> KqpJoin::JoinLeftPureFull >> KqpJoinOrder::TPCDS96-ColumnStore >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup >> KqpIndexLookupJoin::InnerJoinLeftFilter+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::AllowJoinsForComplexPredicates+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 63803, MsgBus: 7505 2025-03-28T09:20:48.690501Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787913595677055:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:48.690565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001835/r3tmp/tmpFonuL8/pdisk_1.dat 2025-03-28T09:20:49.385261Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:49.386460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:49.386532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:49.390336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63803, node 1 2025-03-28T09:20:49.627254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:49.627273Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:49.627280Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:49.631660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7505 TClient is connected to server localhost:7505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:50.525146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:50.566046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:50.809034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:51.057230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:51.265172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:53.290839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787935070515295:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:53.290948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:53.652318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.690890Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787913595677055:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:53.690936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:53.695846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.779051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.849783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.935980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:54.017534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:54.119455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787939365483124:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:54.119541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:54.119941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787939365483129:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:54.123999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:54.139545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787939365483131:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:54.214789Z node 1 :TX_PROXY ERROR: Actor# [1:7486787939365483186:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpFlipJoin::RightOnly_1 [GOOD] >> KqpFlipJoin::RightOnly_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUtf8ToString-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 25192, MsgBus: 11923 2025-03-28T09:20:44.324012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787894645676121:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:44.324132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001844/r3tmp/tmpzkKi8U/pdisk_1.dat 2025-03-28T09:20:44.748187Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:44.752616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:44.752706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:44.759267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25192, node 1 2025-03-28T09:20:44.834770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:44.834817Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:44.834827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:44.834978Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11923 TClient is connected to server localhost:11923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:45.384182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:45.414017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:45.579192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:45.774962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:45.862901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:48.292055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787911825547070:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:48.292155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:48.684859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.716376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.775378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.810754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.844146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.922761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:48.993977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787911825547586:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:48.994059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:48.994309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787911825547591:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:48.998351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:49.022009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787911825547593:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:49.081556Z node 1 :TX_PROXY ERROR: Actor# [1:7486787916120514942:3439] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:49.341725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787894645676121:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:49.341779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:50.239845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:50.306479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22115, MsgBus: 12365 2025-03-28T09:20:52.486984Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486787931049239273:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:52.487779Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001844/r3tmp/tmpcbGF6t/pdisk_1.dat 2025-03-28T09:20:52.795438Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:52.799182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:52.799272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:52.803784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22115, node 2 2025-03-28T09:20:52.899171Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:52.899189Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:52.899196Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:52.899314Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12365 TClient is connected to server localhost:12365 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:20:53.493049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.512323Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:53.535000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:53.691088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:53.849952Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:54.041482Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:56.664833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787948229110225:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:56.664918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:56.717220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:56.766721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:56.803545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:56.842227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:56.880912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:56.933212Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:57.028920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787952524078032:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:57.028997Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:57.029303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787952524078037:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:57.033012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:57.048380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486787952524078039:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:57.129695Z node 2 :TX_PROXY ERROR: Actor# [2:7486787952524078094:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:57.487519Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486787931049239273:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:57.487600Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:58.861941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.965656Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> LabeledDbCounters::TwoTablets [GOOD] >> LabeledDbCounters::TwoTabletsKillOneTablet >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH19 >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] >> KqpJoin::JoinLeftPureFull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 28991, MsgBus: 27511 2025-03-28T09:20:56.315044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787948443180252:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:56.316023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001829/r3tmp/tmpuIKVXl/pdisk_1.dat 2025-03-28T09:20:56.986876Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:57.004594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:57.004666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:57.009144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28991, node 1 2025-03-28T09:20:57.215137Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:57.215162Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:57.215168Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:57.215261Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27511 TClient is connected to server localhost:27511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:58.094141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:58.139824Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:58.154183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:58.333739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:58.570317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:58.688833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:00.683279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787965623051074:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:00.683380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:01.009994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.059410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.095543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.173677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.226539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.285007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.312893Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787948443180252:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:01.312996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:01.358237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787969918018886:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:01.358312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:01.358583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787969918018891:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:01.362289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:01.379823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787969918018893:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:01.441395Z node 1 :TX_PROXY ERROR: Actor# [1:7486787969918018947:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:02.914338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.950399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureFull [GOOD] Test command err: Trying to start YDB, gRPC: 16771, MsgBus: 5396 2025-03-28T09:20:57.705037Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787950364315184:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:57.705311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001824/r3tmp/tmpQWKetS/pdisk_1.dat 2025-03-28T09:20:58.434755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:58.436787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:58.436892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:58.447901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16771, node 1 2025-03-28T09:20:58.728707Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:58.728724Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:58.728733Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:58.728824Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5396 TClient is connected to server localhost:5396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:59.751498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:59.763673Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:59.770626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:59.926166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:00.122565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:00.238060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:02.513029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787971839153293:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:02.513127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:02.698918Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787950364315184:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:02.698964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:02.823958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.868318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.901802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.935515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.974145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.060755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.137736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787976134121107:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:03.137820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:03.138163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787976134121112:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:03.142651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:03.164677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787976134121114:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:03.253241Z node 1 :TX_PROXY ERROR: Actor# [1:7486787976134121169:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> OlapEstimationRowsCorrectness::TPCH21 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftOnlyJoin+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 18505, MsgBus: 2645 2025-03-28T09:20:56.868195Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787947886604555:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:56.880953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001827/r3tmp/tmpuKYxck/pdisk_1.dat 2025-03-28T09:20:57.638568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:57.638851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:57.640533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:57.689513Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18505, node 1 2025-03-28T09:20:57.901311Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:57.901331Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:57.901337Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:57.901437Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2645 TClient is connected to server localhost:2645 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:58.993864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:59.027105Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:59.045399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:59.211643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:59.435612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:59.523766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:01.856742Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787947886604555:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:01.856859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:01.944328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787969361442683:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:01.944440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:02.296671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.356658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.423283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.486639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.527728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.578009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.644936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787973656410495:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:02.645026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:02.645476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787973656410500:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:02.648480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:02.665158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787973656410502:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:02.743630Z node 1 :TX_PROXY ERROR: Actor# [1:7486787973656410557:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:04.276493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.340429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.402190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.460912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.532747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.588738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] >> OlapEstimationRowsCorrectness::TPCH5 >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore >> test_dispatch.py::TestMapping::test_idle [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftOnlyJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 28445, MsgBus: 24587 2025-03-28T09:20:59.395311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787959932756884:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:59.406257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001821/r3tmp/tmpBDjAo8/pdisk_1.dat 2025-03-28T09:21:00.121300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:00.143359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:00.143475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:00.146448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28445, node 1 2025-03-28T09:21:00.319208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:00.319234Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:00.319245Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:00.319346Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24587 TClient is connected to server localhost:24587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:01.231165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:01.258248Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:01.281549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:01.573780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:01.829816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:01.960133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:04.212423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787981407595160:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:04.212538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:04.456373Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787959932756884:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:04.456757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:04.546729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.602351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.662843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.711824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.772774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.808761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.899035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787981407595679:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:04.899107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:04.899233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787981407595684:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:04.904366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:04.913275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787981407595686:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:04.981434Z node 1 :TX_PROXY ERROR: Actor# [1:7486787981407595741:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:06.288956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:06.324984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:06.372476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:06.433032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:06.513975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:06.561607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] >> KqpJoin::RightSemiJoin_KeyPrefix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 14808, MsgBus: 19475 2025-03-28T09:20:50.553341Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787922723278312:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:50.560655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001833/r3tmp/tmpX4cCsM/pdisk_1.dat 2025-03-28T09:20:51.229412Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:51.236917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:51.237049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:51.244794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14808, node 1 2025-03-28T09:20:51.430742Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:51.430759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:51.430784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:51.430886Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19475 TClient is connected to server localhost:19475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:52.353597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:52.377598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:52.627397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:52.919085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:53.058789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:55.202533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787944198116458:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:55.202645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:55.530102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:55.544651Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787922723278312:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:55.544714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:55.573508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:55.612433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:55.693045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:55.750005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:55.825099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:55.914771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787944198116983:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:55.914829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:55.914888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787944198116988:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:55.918645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:55.932601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787944198116990:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:56.026550Z node 1 :TX_PROXY ERROR: Actor# [1:7486787948493084341:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:57.286072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:57.392548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:57.448438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:20:57.501834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:57.560126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:57.616709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29548, MsgBus: 5400 2025-03-28T09:21:00.062857Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486787962714450968:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:00.062896Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001833/r3tmp/tmpRRRCUN/pdisk_1.dat 2025-03-28T09:21:00.280167Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:00.320819Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:00.320920Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:00.327783Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29548, node 2 2025-03-28T09:21:00.587228Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:00.587254Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:00.587261Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:00.587376Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5400 TClient is connected to server localhost:5400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:01.473038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:01.487450Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:21:01.498002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:01.627547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:01.938449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:02.026292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:05.067649Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486787962714450968:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:05.067721Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:05.418906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787984189289242:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:05.419039Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:05.491812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.546455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.591748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.648696Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.736750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.804188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.919115Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787984189289764:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:05.919229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:05.919546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787984189289769:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:05.923902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:05.941509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486787984189289771:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:21:06.020765Z node 2 :TX_PROXY ERROR: Actor# [2:7486787988484257122:3461] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:07.360853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:07.408146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:07.492664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:07.579718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:07.668139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:07.707257Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] >> KqpFlipJoin::RightOnly_2 [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_2 [GOOD] Test command err: Trying to start YDB, gRPC: 18586, MsgBus: 32552 2025-03-28T09:20:53.695260Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787933269759633:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:53.702414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00182d/r3tmp/tmpRkTMiJ/pdisk_1.dat 2025-03-28T09:20:54.320790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:54.320888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:54.322357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:54.330599Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18586, node 1 2025-03-28T09:20:54.534932Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:54.534950Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:54.534956Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:54.535076Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32552 TClient is connected to server localhost:32552 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:55.230171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:55.247514Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:55.259514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:55.457555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:55.734671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:55.830155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:58.063463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787954744597880:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:58.063573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:58.354321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.392183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.427931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.463768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.507867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.620657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.698826Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787933269759633:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:58.699056Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:58.716530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787954744598395:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:58.716623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:58.717051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787954744598400:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:58.721303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:20:58.760300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787954744598402:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:20:58.870910Z node 1 :TX_PROXY ERROR: Actor# [1:7486787954744598462:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:00.322942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.378550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.448422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.490505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32277, MsgBus: 21524 2025-03-28T09:21:02.746440Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486787974124447648:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00182d/r3tmp/tmp8oL9t1/pdisk_1.dat 2025-03-28T09:21:02.812354Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:21:03.004395Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:03.047032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:03.047119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:03.055272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32277, node 2 2025-03-28T09:21:03.191255Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:03.191276Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:03.191282Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:03.191381Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21524 TClient is connected to server localhost:21524 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:03.811879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:03.847245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:03.938704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:04.138654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:04.253671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:07.656152Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486787974124447648:2215];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:07.656247Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:07.822859Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787995599285716:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:07.822938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:07.875705Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:07.923612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:07.975916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:08.019797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:08.101795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:08.205699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:08.267272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787999894253529:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:08.267395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:08.270416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486787999894253534:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:08.274630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:08.287764Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:21:08.288103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486787999894253536:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:08.357179Z node 2 :TX_PROXY ERROR: Actor# [2:7486787999894253589:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:09.669195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:09.735309Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:09.823484Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:09.876377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::DatetimeConstantFold+ColumnStore |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 11346, MsgBus: 9931 2025-03-28T09:21:05.324120Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787986666263186:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:05.333202Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001820/r3tmp/tmp8v9PTf/pdisk_1.dat 2025-03-28T09:21:05.969563Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:05.988013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:05.988154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:05.995820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11346, node 1 2025-03-28T09:21:06.170299Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:06.170324Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:06.170335Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:06.170448Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9931 TClient is connected to server localhost:9931 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:07.024881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:07.063770Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:07.082044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:07.348954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:07.646260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:07.747734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:09.869321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788003846134002:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:09.869415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:10.322842Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787986666263186:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:10.322908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:10.476013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:10.543333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:10.577684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:10.611095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:10.642220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:10.689045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:10.749301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788008141101811:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:10.749374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:10.749723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788008141101816:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:10.753984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:10.780015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788008141101818:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:10.842623Z node 1 :TX_PROXY ERROR: Actor# [1:7486788008141101873:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:12.059106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:12.110063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:12.169168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:12.237192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:12.311538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:12.354533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::Inner_3 >> KqpIndexLookupJoin::MultiJoins ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH1 [GOOD] Test command err: Trying to start YDB, gRPC: 26436, MsgBus: 18367 2025-03-28T09:19:24.207884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787553410601210:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:24.208072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016fb/r3tmp/tmpmyD6vO/pdisk_1.dat 2025-03-28T09:19:24.958134Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:24.964676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:24.964801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:24.973213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26436, node 1 2025-03-28T09:19:25.227348Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:19:25.227369Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:19:25.227375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:19:25.227474Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18367 TClient is connected to server localhost:18367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:19:26.137505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:26.162572Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:19:28.413984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787570590470830:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:28.414081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:28.414342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787570590470842:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:28.418512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:19:28.431925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787570590470844:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:19:28.513925Z node 1 :TX_PROXY ERROR: Actor# [1:7486787570590470895:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:19:28.923396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:19:29.204318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:19:29.204499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:19:29.204788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:19:29.204932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:19:29.205046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:19:29.205172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:19:29.205281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:19:29.205376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:19:29.205481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:19:29.205596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:19:29.205695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:19:29.205796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787574885438654:2364];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:19:29.216459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:19:29.216579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:19:29.216768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:19:29.216879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:19:29.217007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:19:29.217121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:19:29.217225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:19:29.217340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:19:29.217454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:19:29.217568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:19:29.217697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:19:29.217805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486787574885438528:2361];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:19:29.255453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787574885438427:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:19:29.255525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787574885438427:2350];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abs ... 28T09:20:52.555604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.561405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.561923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.568490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.569289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.574750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.574750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.582353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.589216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.596531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.602775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.608988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.621784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.631493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.637629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.644549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.652111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.666257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.669509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.676018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.682000Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.684300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.691008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.699780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.705220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.709169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.717902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.719048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.724688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.728555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.734815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.735593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.741180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.829991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.836634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.838984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.842214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.848686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.854170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.854368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:52.860729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:53.037367Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0tmfz59p73b8yesmqp466", SessionId: ydb://session/3?node_id=1&id=NjQyMTlkYjItZTJhNDIzYTMtZGI2ZjU3ZWItZDlmNTVjMWM=, Slow query, duration: 37.676709s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:20:53.465453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:20:53.466002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:20:53.466287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486787866943267882:10727];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:20:53.466665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:20:54.421249Z node 1 :KQP_YQL WARN: TraceId: 01jqe0vsyyacdjzadjsh7p2xw5, SessionId: CompileActor 2025-03-28 09:20:54.374 WARN ydb-core-kqp-ut-join(pid=366345, tid=0x00007FE1E242E640) [KQP] kqp_opt_phy_olap_agg.cpp:50: Expected TCoMember callable to get column under aggregation. Got: Failed to render expression to pretty string: yql/essentials/ast/yql_expr.cpp:1973 BuildValueNode(): requirement ctx.AllowFreeArgs failed, message: Free arguments are not allowed 2025-03-28T09:20:56.778204Z node 1 :KQP_YQL WARN: TraceId: 01jqe0vwdk7a782471r0dtkn5k, SessionId: CompileActor 2025-03-28 09:20:56.777 WARN ydb-core-kqp-ut-join(pid=366345, tid=0x00007FE1E374E640) [KQP] kqp_opt_phy_olap_agg.cpp:50: Expected TCoMember callable to get column under aggregation. Got: Failed to render expression to pretty string: yql/essentials/ast/yql_expr.cpp:1973 BuildValueNode(): requirement ctx.AllowFreeArgs failed, message: Free arguments are not allowed >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFoldOpt+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 17516, MsgBus: 1067 2025-03-28T09:19:33.790414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787588926117285:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:33.808869Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016f9/r3tmp/tmpZXdWTA/pdisk_1.dat 2025-03-28T09:19:34.498316Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:34.499770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:34.499887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:34.509842Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17516, node 1 2025-03-28T09:19:34.707239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:19:34.707268Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:19:34.707276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:19:34.707400Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1067 TClient is connected to server localhost:1067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:19:35.663156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:19:35.698017Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:19:38.375924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787610400954296:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:38.376056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:38.382858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787610400954308:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:38.391746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:19:38.419718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787610400954310:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:19:38.503047Z node 1 :TX_PROXY ERROR: Actor# [1:7486787610400954361:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:19:38.793131Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787588926117285:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:19:38.811048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:19:38.914380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:19:39.239046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:19:39.239257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:19:39.239602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:19:39.239741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:19:39.239853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:19:39.239988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:19:39.240105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:19:39.240218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:19:39.240355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:19:39.240478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:19:39.240603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:19:39.240696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787614695921913:2356];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:19:39.247469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:19:39.247545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:19:39.247768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:19:39.247872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:19:39.247998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:19:39.248099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:19:39.248206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:19:39.248317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:19:39.248423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:19:39.248545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:19:39.248644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:19:39.248758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787614695921909:2354];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:19:39.288386Z node 1 :TX_C ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.301989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.302698Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.312604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.316129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.322262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.325741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.328134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.330556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.339973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.339973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.345735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.346820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.351605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.353538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.359335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.361580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.364258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.368302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.370661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.374745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.376743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.381986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.384394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.388516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.391126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.394096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.397505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.399661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.403123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.405777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.408664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.411651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.414666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.417610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.422210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.422938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.428908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.429758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.434499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.435463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.440325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.441960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.446247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.448733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.557048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:20:59.683946Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0txa8bb3rf7n44ersv77p", SessionId: ydb://session/3?node_id=1&id=MzBiOTA1YmQtYTY2ZGEyZC1iZGEwZjM5YS01OGMwYWFhZg==, Slow query, duration: 35.288879s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:00.009751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:00.010142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:00.010401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486787863804070315:9159];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:21:00.010878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH17 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinStatsOverride-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20805, MsgBus: 8711 2025-03-28T09:20:25.106181Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787814804420144:2093];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:25.106231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001861/r3tmp/tmpKlTgLw/pdisk_1.dat 2025-03-28T09:20:25.726203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:25.726300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:25.732080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:25.739199Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20805, node 1 2025-03-28T09:20:26.003275Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:26.003299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:26.003306Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:26.003414Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8711 TClient is connected to server localhost:8711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:26.842045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:29.217785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787831984289978:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:29.217785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787831984289970:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:29.217914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:29.222003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:29.238054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787831984289984:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:29.310984Z node 1 :TX_PROXY ERROR: Actor# [1:7486787831984290035:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.754812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:29.939909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:20:29.979478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.026767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.057565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.106426Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787814804420144:2093];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:30.106490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:30.284666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.320804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.354186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.397155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.461444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.503617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.534851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:30.566880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.190821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:20:31.266349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.338219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.375804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.415846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.507801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.540412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.576364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.611172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.650028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.689213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.724798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.763324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.797969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.869651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.904559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.947662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:20:32.042650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.097125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.100557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.102962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.106515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.108584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.112218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.114556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.117558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.119948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.124009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.125895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.129291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.132144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.135150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.138705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.141666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.144803Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.148131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.151480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.154320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.158288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.160200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.164753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.166184Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.171494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.172821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.177801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.178524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.184121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.184761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.190431Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.195450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.202849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.210713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.214493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.221224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.226512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.231823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.237235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.243188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.249279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.251267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.258024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.258285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.264591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:08.412442Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0v6fq8tpjmag032zer0z8", SessionId: ydb://session/3?node_id=1&id=NDg5MDI4OWQtYzBjNmQzZjctY2U1OGY5MDItYTZmMDI4MWU=, Slow query, duration: 34.628048s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:08.704368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:08.704834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:08.705869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7486787909293718735:4522];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-03-28T09:21:08.706253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH4 |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_dispatch.py::TestMapping::test_idle [GOOD] >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnlyJoinValueColumn+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 20569, MsgBus: 13935 2025-03-28T09:21:12.730067Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788013411665080:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:12.730094Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001813/r3tmp/tmpJqIERD/pdisk_1.dat 2025-03-28T09:21:13.455506Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:13.464723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:13.464823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:13.479485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20569, node 1 2025-03-28T09:21:13.742934Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:13.742952Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:13.742961Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:13.743065Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13935 TClient is connected to server localhost:13935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:14.664472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:14.678547Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:14.695283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:14.949818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:15.216628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:15.334829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:17.668162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788034886503281:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:17.668258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:17.734999Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788013411665080:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:17.735054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:17.981015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:18.050125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:18.084711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:18.173750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:18.222678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:18.294124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:18.353427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788039181471100:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:18.353495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:18.353835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788039181471105:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:18.357679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:18.373414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788039181471107:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:18.478449Z node 1 :TX_PROXY ERROR: Actor# [1:7486788039181471163:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:19.949461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:19.984624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.031771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.081529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.132385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.194140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::Chain65Nodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_KeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 32132, MsgBus: 17876 2025-03-28T09:21:10.956676Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788004911857651:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:10.957225Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001815/r3tmp/tmptDgoKt/pdisk_1.dat 2025-03-28T09:21:11.725394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:11.725482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:11.726750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:11.760584Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32132, node 1 2025-03-28T09:21:11.975345Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:11.975364Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:11.975370Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:11.975454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17876 TClient is connected to server localhost:17876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:12.949315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:12.979020Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:12.992857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:13.204166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:13.470230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:13.562119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:15.951073Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788004911857651:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:15.984291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:16.156920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788030681663076:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:16.157027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:16.530720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:16.566911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:16.600028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:16.643925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:16.730901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:16.829914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:16.919765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788030681663595:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:16.919856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:16.920314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788030681663600:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:16.924910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:16.939991Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:21:16.941044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788030681663602:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:17.019931Z node 1 :TX_PROXY ERROR: Actor# [1:7486788034976630957:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:18.474774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:18.558556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:18.602893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:18.696697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:18.741936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:57: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull >> KqpJoin::JoinLeftPureInnerConverted >> KqpJoinOrder::CanonizedJoinOrderTPCH6 >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix >> KqpFlipJoin::Inner_3 [GOOD] >> KqpFlipJoin::LeftSemi_1 >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn+StreamLookup [GOOD] >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup >> KqpIndexLookupJoin::MultiJoins [GOOD] >> unstable_connection.py::TestUnstableConnection::test [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::MultiJoins [GOOD] Test command err: Trying to start YDB, gRPC: 25956, MsgBus: 27594 2025-03-28T09:21:17.236399Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788035491430639:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:17.255432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00180a/r3tmp/tmpoiIxpK/pdisk_1.dat 2025-03-28T09:21:17.946761Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:17.946982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:17.947056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:17.952790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25956, node 1 2025-03-28T09:21:18.063374Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:18.063392Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:18.063403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:18.063497Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27594 TClient is connected to server localhost:27594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:18.929133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:18.989912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:19.285269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:19.661659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:19.853935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:22.238906Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788035491430639:2136];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:22.238963Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:22.404427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788056966268814:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:22.404514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:23.048570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:23.103922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:23.141840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:23.185006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:23.234798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:23.322289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:23.419977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788061261236643:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:23.420077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:23.420315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788061261236648:2465], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:23.424612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:23.436764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788061261236650:2466], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:23.512248Z node 1 :TX_PROXY ERROR: Actor# [1:7486788061261236705:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:25.198917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:25.267037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:25.332382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:25.379781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:25.422583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:25.466670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] >> KqpJoin::IdxLookupPartialWithTempTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 30575, MsgBus: 18046 2025-03-28T09:21:15.245024Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788028348162449:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:15.263518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00180f/r3tmp/tmpuN4NCI/pdisk_1.dat 2025-03-28T09:21:15.876726Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:15.902013Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:15.902088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:15.910281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30575, node 1 2025-03-28T09:21:16.246304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:16.246323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:16.246331Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:16.246430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18046 TClient is connected to server localhost:18046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:17.329924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:17.375350Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:17.392011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:17.542604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:17.739460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:17.832959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:20.015567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788049823000579:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:20.015668Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:20.098771Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788028348162449:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:20.098833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:20.354350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.398906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.427981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.459009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.545843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.583271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.694551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788049823001097:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:20.694623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:20.694938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788049823001102:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:20.698863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:20.716021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788049823001104:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:20.806300Z node 1 :TX_PROXY ERROR: Actor# [1:7486788049823001161:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:22.015526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:22.115462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28189, MsgBus: 19290 2025-03-28T09:21:24.196413Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788067014190646:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:24.197110Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00180f/r3tmp/tmpVmFcBk/pdisk_1.dat 2025-03-28T09:21:24.454128Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:24.500033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:24.500118Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28189, node 2 2025-03-28T09:21:24.514714Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:24.659317Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:24.659340Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:24.659347Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:24.659471Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19290 TClient is connected to server localhost:19290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:25.510303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:25.542976Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:25.695327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:25.905739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:26.031986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:28.975957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788084194061578:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:28.976058Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.063057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.105225Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.161523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.198986Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788067014190646:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:29.199040Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:29.236075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.320028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.396586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.491231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788088489029398:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.491295Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.491480Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788088489029403:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.496290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:29.522729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788088489029405:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:29.581337Z node 2 :TX_PROXY ERROR: Actor# [2:7486788088489029461:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:30.780785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.920940Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpJoin::JoinLeftPureInnerConverted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureInnerConverted [GOOD] Test command err: Trying to start YDB, gRPC: 2968, MsgBus: 24569 2025-03-28T09:21:24.439025Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788065211068909:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:24.441377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017fd/r3tmp/tmpFWycEJ/pdisk_1.dat 2025-03-28T09:21:25.055760Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:25.063031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:25.063114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:25.067989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2968, node 1 2025-03-28T09:21:25.255590Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:25.255626Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:25.255634Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:25.255770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24569 TClient is connected to server localhost:24569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:26.190632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:26.238395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:26.496986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:21:26.727048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:26.834995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:29.417919Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788065211068909:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:29.417985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:29.650198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788086685907020:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.650294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.168926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.216278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.251462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.290396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.373014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.459579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.567005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788090980874845:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.567086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.567431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788090980874850:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.571488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:30.589487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788090980874852:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:30.648141Z node 1 :TX_PROXY ERROR: Actor# [1:7486788090980874910:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:31.933502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:31.971527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.010937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::LeftSemi_1 [GOOD] >> KqpJoin::CrossJoinCount >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_1 [GOOD] Test command err: Trying to start YDB, gRPC: 28418, MsgBus: 24170 2025-03-28T09:21:16.833568Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788030438361996:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:16.833604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00180d/r3tmp/tmpgLHRll/pdisk_1.dat 2025-03-28T09:21:17.625209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:17.625286Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:17.635543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:17.687331Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28418, node 1 2025-03-28T09:21:17.895541Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:17.895579Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:17.895591Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:17.895715Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24170 TClient is connected to server localhost:24170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:18.641071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:18.690807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:18.986292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:19.184638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:21:19.267953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:21:21.834680Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788030438361996:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:21.848772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:21.882561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788051913200242:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:21.882702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:22.342191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:22.379362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:22.414208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:22.456851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:22.491839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:22.559929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:22.665158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788056208168061:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:22.665237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:22.665687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788056208168066:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:22.705138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:22.739238Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:21:22.739561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788056208168068:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:22.834168Z node 1 :TX_PROXY ERROR: Actor# [1:7486788056208168126:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:24.353421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:24.416932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:24.465928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:24.536681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6407, MsgBus: 22361 2025-03-28T09:21:27.534569Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788078344031469:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00180d/r3tmp/tmpdR8meV/pdisk_1.dat 2025-03-28T09:21:27.608878Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:21:27.663646Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:27.681531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:27.681637Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:27.687554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6407, node 2 2025-03-28T09:21:27.923649Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:27.923670Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:27.923677Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:27.923783Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22361 TClient is connected to server localhost:22361 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:28.610151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:28.619595Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:21:28.635373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:28.768013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:28.987042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:29.082861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:31.933416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788095523902283:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:31.933491Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:31.977619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.023176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.062827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.103945Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.188747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.276089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.364683Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788099818870099:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:32.364761Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:32.365120Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788099818870104:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:32.369056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:32.386265Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788099818870106:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:21:32.427971Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788078344031469:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:32.428043Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:32.473171Z node 2 :TX_PROXY ERROR: Actor# [2:7486788099818870161:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:33.786640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:33.852771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:33.918069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:33.995579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_NestedJoin [GOOD] Test command err: Trying to start YDB, gRPC: 13048, MsgBus: 22012 2025-03-28T09:21:26.321755Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788076898683111:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:26.322170Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017f7/r3tmp/tmpYdsdRb/pdisk_1.dat 2025-03-28T09:21:26.979561Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:27.003006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:27.003096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:27.009950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13048, node 1 2025-03-28T09:21:27.179197Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:27.179216Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:27.179223Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:27.179327Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22012 TClient is connected to server localhost:22012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:27.933933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:27.991535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:28.241331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:28.473232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:28.595862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:31.009661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788094078553939:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:31.009774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:31.313042Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788076898683111:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:31.313101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:31.434948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:31.483681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:31.535309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:31.576965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:31.661094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:31.731223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:31.831192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788098373521759:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:31.831292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:31.831626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788098373521764:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:31.836056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:31.863397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788098373521766:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:31.927144Z node 1 :TX_PROXY ERROR: Actor# [1:7486788098373521821:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:33.551819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:33.601588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:33.660729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::InnerJoinOnlyRightColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 19346, MsgBus: 29358 2025-03-28T09:21:18.988595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788041187459052:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:18.990569Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001809/r3tmp/tmpkOlc6Z/pdisk_1.dat 2025-03-28T09:21:19.695243Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:19.705212Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:19.705304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:19.707772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19346, node 1 2025-03-28T09:21:19.922716Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:19.922759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:19.922828Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:19.922989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29358 TClient is connected to server localhost:29358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:20.998883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:21.024274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:21.241495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:21.573170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:21.729089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:23.978501Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788041187459052:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:23.983642Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:24.079650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788066957264461:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:24.079763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:24.515871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:24.598288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:24.640697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:24.683114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:24.728782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:24.781798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:24.869780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788066957264981:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:24.869878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:24.874842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788066957264986:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:24.878201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:24.894737Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788066957264988:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:24.947022Z node 1 :TX_PROXY ERROR: Actor# [1:7486788066957265043:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:26.143458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:26.183340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:26.239978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:26.277380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:26.337399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:26.373464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13934, MsgBus: 28926 2025-03-28T09:21:28.894087Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788086025270654:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:28.894128Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001809/r3tmp/tmpPeyL3q/pdisk_1.dat 2025-03-28T09:21:29.295854Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:29.323748Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:29.323831Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:29.332080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13934, node 2 2025-03-28T09:21:29.543144Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:29.543163Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:29.543170Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:29.543269Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28926 TClient is connected to server localhost:28926 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:30.195713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:30.207464Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:30.223185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:30.356362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:30.621939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:30.744224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:33.589816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788107500108776:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:33.589934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:33.703235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:33.777005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:33.833972Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:33.893105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:33.894988Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788086025270654:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:33.895361Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:33.939727Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:33.996715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.078659Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788111795076590:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.078738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.078913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788111795076595:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.084227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:34.096619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788111795076597:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:34.197428Z node 2 :TX_PROXY ERROR: Actor# [2:7486788111795076653:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:35.610142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:35.654545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:35.736467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:35.805804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:35.858115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:35.906938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::LeftSemi_2 >> data_correctness.py::TestDataCorrectness::test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndexPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 16635, MsgBus: 4058 2025-03-28T09:21:26.974753Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788074170379114:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:27.002918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017f6/r3tmp/tmpcywJM8/pdisk_1.dat 2025-03-28T09:21:27.720727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:27.720823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:27.727871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:27.757909Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16635, node 1 2025-03-28T09:21:27.997969Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:27.997991Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:27.998005Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:27.998113Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4058 TClient is connected to server localhost:4058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:29.108753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:29.131997Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:29.141925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:29.410469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:29.742234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:29.865242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:31.975146Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788074170379114:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:31.975206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:32.148545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788099940184655:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:32.148662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:32.695833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.734474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.769161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.796683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.832127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.885156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:32.976979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788099940185170:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:32.977076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:32.977121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788099940185175:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:32.980973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:33.007666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788099940185177:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:33.086531Z node 1 :TX_PROXY ERROR: Actor# [1:7486788104235152530:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:34.851936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.935661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.999146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:35.054489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:35.123344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore [GOOD] >> KqpJoin::ExclusionJoin >> KqpJoinOrder::GeneralPrioritiesBug1 >> BsControllerConfig::MergeBoxes [GOOD] >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11014:2156] recipient: [1:10839:2166] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11014:2156] recipient: [1:10839:2166] Leader for TabletID 72057594037932033 is [1:11016:2168] sender: [1:11017:2156] recipient: [1:10839:2166] 2025-03-28T09:20:19.613975Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:19.622032Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:19.622613Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:20:19.626536Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:19.627165Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:20:19.627837Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:19.627884Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:20:19.628441Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:20:19.639257Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:20:19.639465Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:20:19.639671Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:20:19.639813Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:19.639940Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:20:19.640032Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11016:2168] sender: [1:11042:2156] recipient: [1:110:2157] 2025-03-28T09:20:19.660472Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:20:19.660654Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:19.672726Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:20:19.672911Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:19.673022Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:20:19.673123Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:19.673282Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:20:19.673358Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:19.673397Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:20:19.673484Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:19.684379Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:20:19.684550Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:19.695606Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:20:19.695789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:20:19.697193Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:20:19.697262Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:20:19.697534Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:20:19.697611Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed 2025-03-28T09:20:19.723028Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... 9} Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2025-03-28T09:21:33.683248Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2025-03-28T09:21:33.683279Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2025-03-28T09:21:33.683309Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2025-03-28T09:21:33.683344Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2025-03-28T09:21:33.683392Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2025-03-28T09:21:33.683425Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2025-03-28T09:21:33.683461Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2025-03-28T09:21:33.683503Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2025-03-28T09:21:33.683535Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2025-03-28T09:21:33.683579Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2025-03-28T09:21:33.683627Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2025-03-28T09:21:33.683656Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2025-03-28T09:21:33.683684Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2025-03-28T09:21:33.683724Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2025-03-28T09:21:33.683767Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2025-03-28T09:21:33.683799Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2025-03-28T09:21:33.683826Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2025-03-28T09:21:33.683862Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2025-03-28T09:21:33.683899Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2025-03-28T09:21:33.683943Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2025-03-28T09:21:33.683981Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2025-03-28T09:21:33.684012Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2025-03-28T09:21:33.684042Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2025-03-28T09:21:33.684070Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2025-03-28T09:21:33.684097Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2025-03-28T09:21:33.684125Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2025-03-28T09:21:33.684154Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2025-03-28T09:21:33.684190Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2025-03-28T09:21:33.684231Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2025-03-28T09:21:33.684259Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2025-03-28T09:21:33.684307Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2025-03-28T09:21:33.684337Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2025-03-28T09:21:33.684368Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2025-03-28T09:21:33.684397Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2025-03-28T09:21:33.684426Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2025-03-28T09:21:33.684453Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2025-03-28T09:21:33.684486Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2025-03-28T09:21:33.684517Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2025-03-28T09:21:33.684558Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2025-03-28T09:21:33.684603Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2025-03-28T09:21:33.684638Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2025-03-28T09:21:33.684666Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2025-03-28T09:21:33.684694Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2025-03-28T09:21:33.684723Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2025-03-28T09:21:33.684751Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2025-03-28T09:21:33.684780Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2025-03-28T09:21:33.684809Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2025-03-28T09:21:33.684838Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2025-03-28T09:21:33.684866Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2025-03-28T09:21:33.684898Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2025-03-28T09:21:33.684929Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2025-03-28T09:21:33.684963Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2025-03-28T09:21:33.684994Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2025-03-28T09:21:33.685027Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2025-03-28T09:21:33.685072Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2025-03-28T09:21:33.685113Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2025-03-28T09:21:33.685154Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2025-03-28T09:21:33.685188Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2025-03-28T09:21:33.685219Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2025-03-28T09:21:33.685270Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2025-03-28T09:21:33.685305Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2025-03-28T09:21:33.685340Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2025-03-28T09:21:33.685370Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2025-03-28T09:21:33.685404Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2025-03-28T09:21:33.685435Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2025-03-28T09:21:33.685467Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2025-03-28T09:21:33.685498Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2025-03-28T09:21:33.685530Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2025-03-28T09:21:33.685559Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2025-03-28T09:21:33.685589Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2025-03-28T09:21:33.685618Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2025-03-28T09:21:33.685647Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2025-03-28T09:21:33.685678Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2025-03-28T09:21:33.685708Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2025-03-28T09:21:33.685735Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2025-03-28T09:21:33.975611Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.294882s 2025-03-28T09:21:33.975825Z node 251 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.295122s 2025-03-28T09:21:34.009053Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2025-03-28T09:21:34.040849Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } >> KqpJoin::JoinDupColumnRight >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 8647, MsgBus: 11654 2025-03-28T09:20:46.119871Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787904351461346:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:46.120811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00183a/r3tmp/tmpZuO077/pdisk_1.dat 2025-03-28T09:20:46.780366Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:46.803663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:46.803758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:46.809122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8647, node 1 2025-03-28T09:20:47.028013Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:47.028038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:47.028047Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:47.028167Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11654 TClient is connected to server localhost:11654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:47.978611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:50.402019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787921531331169:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.402076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787921531331181:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.402114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.405812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:50.430866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787921531331183:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:50.498859Z node 1 :TX_PROXY ERROR: Actor# [1:7486787921531331234:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:50.817698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.000793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.042597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.088420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.119929Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787904351461346:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:51.120014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:51.124432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.312698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.354432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.392801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.456728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.504683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.550908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.631148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.671592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.505595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:20:52.551957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.585058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.633503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.669844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.723147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.758073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.788476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.856455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.893724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.932910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.984938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.019746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.064217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.118621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.167326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.198977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.232243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.294329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038544;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.312314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.316126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.322660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.326562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.337148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.339965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.350801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.351333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.357839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.365907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.367292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.381539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.387915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.391450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.394304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.400662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.400964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038520;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.407142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.413465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.418867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.425651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.433316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038532;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.447683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038534;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.456605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.461969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.479329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.481070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.494360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.496016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.504812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038516;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.506924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.511324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.512066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.518476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.526753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.527124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.532526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.537560Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.540671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.549976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038542;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.552051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.563715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.567922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.569093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.576835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:31.692491Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0vty4a22374esja0m1p5j", SessionId: ydb://session/3?node_id=1&id=MmEzN2YwNTUtYWYxZTIyM2UtM2UzZDA4NDgtNTE4MmYyNWU=, Slow query, duration: 36.967787s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:32.095309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:32.095957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:32.103918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |86.3%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |86.3%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupPartialWithTempTable [GOOD] Test command err: Trying to start YDB, gRPC: 22044, MsgBus: 25269 2025-03-28T09:21:33.868950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788107332156760:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:33.869483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017f5/r3tmp/tmpTCioPu/pdisk_1.dat 2025-03-28T09:21:34.593520Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:34.605631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:34.605708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:34.612581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22044, node 1 2025-03-28T09:21:34.840710Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:34.840732Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:34.840741Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:34.840858Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25269 TClient is connected to server localhost:25269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:36.080158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:36.095614Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:36.114676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:36.349606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:36.597779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:36.689409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:38.867050Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788107332156760:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:38.867158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:38.869061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788128806994856:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:38.869164Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:39.180660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:39.246407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:39.307238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:39.397367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:39.454519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:39.517611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:39.614848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788133101962671:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:39.614992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:39.619064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788133101962676:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:39.626360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:39.646311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788133101962680:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:39.732745Z node 1 :TX_PROXY ERROR: Actor# [1:7486788133101962734:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:41.184909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:41.271978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:41.309808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:3:44: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpJoin::RightSemiJoin_ComplexKey >> KqpJoin::CrossJoinCount [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderLookupBug [GOOD] Test command err: Trying to start YDB, gRPC: 2369, MsgBus: 18420 2025-03-28T09:20:47.009237Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787906924220996:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:47.026905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001838/r3tmp/tmpedEMNE/pdisk_1.dat 2025-03-28T09:20:47.544553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:47.544880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:47.549602Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:47.554453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2369, node 1 2025-03-28T09:20:47.788619Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:47.788642Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:47.788650Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:47.788788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18420 TClient is connected to server localhost:18420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:48.744311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:51.249951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787924104090681:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:51.250073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:51.250402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787924104090693:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:51.261866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:51.281918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787924104090695:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:51.391437Z node 1 :TX_PROXY ERROR: Actor# [1:7486787924104090746:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:51.793838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.911282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.972953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.011357Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787906924220996:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:52.011529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:52.025013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.065533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.227861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.266813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.345901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.394993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.439780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.482402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.521177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:52.565253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.358170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:20:53.402182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.441755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.478447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.540647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.583831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.615521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.657535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.733051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.771979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.824939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.876466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.929560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.973313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:20:54.028269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:20:54.075282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:20:54.106449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:20:54.140736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... 976710714; 2025-03-28T09:21:33.949659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:33.959458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:33.960990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:33.974039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:33.974276Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:33.982094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:33.987596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:33.992231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:33.992762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.003597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.006428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.011073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.017490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.019005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.022584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.023748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.028775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.029821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.034382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.034666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.040384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.041270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.049805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.050009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.055341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.055421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.064685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.066537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.071784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.077548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.077993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.083753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.084611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.089776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.089812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.094656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.095552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.100461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.101492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.111305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.112175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.118035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038437;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.122199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:34.207030Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0vvnq4t6ytt9etknp7s0q", SessionId: ydb://session/3?node_id=1&id=NGE0ZGE0OTMtMzQ2ODdiYTUtYTBjOGY5LTlhZjI4ZTNm, Slow query, duration: 38.727424s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:34.543621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:34.544016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:34.544739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486788057248103053:5806];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-28T09:21:34.545067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
:3:9: Warning: Symbol $limit is not used, code: 4527
:2:9: Warning: Symbol $browserGroup is not used, code: 4527
:1:9: Warning: Symbol $quotaName is not used, code: 4527
:4:9: Warning: Symbol $offset is not used, code: 4527
:3:9: Warning: Symbol $limit is not used, code: 4527
:2:9: Warning: Symbol $browserGroup is not used, code: 4527
:1:9: Warning: Symbol $quotaName is not used, code: 4527
:4:9: Warning: Symbol $offset is not used, code: 4527 >> KqpJoin::JoinLeftPureCross ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::CrossJoinCount [GOOD] Test command err: Trying to start YDB, gRPC: 28696, MsgBus: 14759 2025-03-28T09:21:37.303378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788124127727850:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:37.303434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017f4/r3tmp/tmphCzexg/pdisk_1.dat 2025-03-28T09:21:37.941605Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:37.978849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:37.978920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:37.994808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28696, node 1 2025-03-28T09:21:38.207935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:38.207955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:38.207966Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:38.208071Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14759 TClient is connected to server localhost:14759 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:38.939297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:38.995351Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:39.009196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:39.194135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:39.458010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:39.558825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:41.980089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788141307598734:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:41.980203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:42.304299Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788124127727850:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:42.304368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:42.420728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:42.478457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:42.517785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:42.569817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:42.649835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:42.700946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:42.778861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788145602566548:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:42.778955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:42.782809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788145602566553:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:42.790958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:42.813239Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:21:42.813448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788145602566555:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:42.882859Z node 1 :TX_PROXY ERROR: Actor# [1:7486788145602566611:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:44.090479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.168475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.210253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> KqpFlipJoin::LeftSemi_2 [GOOD] >> KqpFlipJoin::LeftSemi_3 |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore [GOOD] >> KqpJoin::ExclusionJoin [GOOD] >> KqpJoinOrder::TestJoinHint1-ColumnStore >> KqpJoinOrder::CanonizedJoinOrderTPCH12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ExclusionJoin [GOOD] Test command err: Trying to start YDB, gRPC: 27810, MsgBus: 18253 2025-03-28T09:21:41.803373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788141371254881:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:41.803805Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017ed/r3tmp/tmpiEC6FY/pdisk_1.dat 2025-03-28T09:21:42.603934Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:42.629912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:42.629994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:42.631621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27810, node 1 2025-03-28T09:21:42.875237Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:42.875263Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:42.875270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:42.875381Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18253 TClient is connected to server localhost:18253 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:43.831011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:43.874641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:44.181666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:44.470256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:44.586801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:46.719613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788162846093011:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:46.719739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:46.794939Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788141371254881:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:46.795017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:47.241926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.291410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.326193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.400104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.438676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.492005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.576194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788167141060825:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:47.576304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:47.576488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788167141060830:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:47.580597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:47.619201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788167141060832:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:47.719411Z node 1 :TX_PROXY ERROR: Actor# [1:7486788167141060893:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:49.057515Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.113030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.168071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> unstable_connection.py::TestUnstableConnection::test [GOOD] >> KqpJoin::JoinDupColumnRight [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPreds-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 19026, MsgBus: 22457 2025-03-28T09:20:56.499283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787946359707059:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:56.515959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001828/r3tmp/tmpPSbiGD/pdisk_1.dat 2025-03-28T09:20:57.185615Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:57.206759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:57.206864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:57.217729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19026, node 1 2025-03-28T09:20:57.410642Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:57.410667Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:57.410675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:57.410790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22457 TClient is connected to server localhost:22457 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:58.181226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:58.216854Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:00.767840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787963539576909:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:00.767996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:00.775462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787963539576921:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:00.782431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:00.800941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787963539576923:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:00.896318Z node 1 :TX_PROXY ERROR: Actor# [1:7486787963539576974:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:01.270395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.466891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.499789Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787946359707059:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:01.499848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:01.543907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.620744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.659431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.878448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.915105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.950791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.989490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.038188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.082711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.121093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.160089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:02.947433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:21:03.002591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.057551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.102882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.153258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.192419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.258964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.295867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.337272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.370980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.411545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.455478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.502479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.577753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.644767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.686212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.743494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.789047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.794923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.794928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.806631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.810960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.812712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.816534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.819020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.825056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.831271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.835322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.837249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.845251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.846064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.852068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038516;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.858338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.860132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.865970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.874389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.879170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.885113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.890657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.892743Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.897868Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.906178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.914473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.919323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.925152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.932299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.932659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038530;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.938182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.938476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.944973Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.951098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.956039Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.956921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038532;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.963253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.966809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038518;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.974587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.980159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.985151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038506;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.991399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.997493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:40.998486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:41.049469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038476;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:41.071838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038446;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:41.155887Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0w5q21edce9vnnyh67m3a", SessionId: ydb://session/3?node_id=1&id=ZDI0N2E0MGQtMWU2OTc2ODQtMjlmNGJjZjEtN2ExZmI1NzM=, Slow query, duration: 35.392498s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:41.463179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:41.463291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:41.463751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinDupColumnRight [GOOD] Test command err: Trying to start YDB, gRPC: 8761, MsgBus: 64057 2025-03-28T09:21:43.836145Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788148930910345:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:43.837042Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017eb/r3tmp/tmpaZCC8r/pdisk_1.dat 2025-03-28T09:21:44.572505Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:44.575021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:44.575119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:44.590751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8761, node 1 2025-03-28T09:21:44.843260Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:44.843277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:44.843283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:44.843372Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64057 TClient is connected to server localhost:64057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:45.793829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:45.815494Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:45.836484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:21:46.053324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.316885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:46.441970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:48.226888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788170405748607:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:48.227028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:48.667457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.733834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.789245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.838913Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788148930910345:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:48.839020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:48.841578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.884438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.922722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.023081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788174700716420:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:49.023157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:49.023431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788174700716425:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:49.027354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:49.046523Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:21:49.046908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788174700716427:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:49.145052Z node 1 :TX_PROXY ERROR: Actor# [1:7486788174700716483:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:50.454575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:50.518755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:50.576635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup >> KqpJoinOrder::TPCDS94-ColumnStore >> ColumnStatistics::CountMinSketchStatistics [GOOD] >> KqpJoin::JoinLeftPureCross [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_unavailable_s3.py::TestUnavailableS3::test [GOOD] Test command err: !!! simulating S3 hang up -- sending SIGSTOP !!! simulating S3 recovery -- sending SIGCONT >> KqpJoinOrder::TPCDS92+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinLeftPureCross [GOOD] Test command err: Trying to start YDB, gRPC: 10972, MsgBus: 18441 2025-03-28T09:21:47.430285Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788166309305039:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:47.430648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017e5/r3tmp/tmp2m3yK5/pdisk_1.dat 2025-03-28T09:21:48.125672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:48.137736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:48.137810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:48.144352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10972, node 1 2025-03-28T09:21:48.397110Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:48.397138Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:48.397149Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:48.397291Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18441 TClient is connected to server localhost:18441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:49.238423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:49.274310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:49.486052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:49.690407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:21:49.792572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:21:51.679056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788183489175875:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:51.679167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:52.091893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.182529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.220899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.305071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.376773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.427232Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788166309305039:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:52.427291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:52.469233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.566413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788187784143700:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:52.566492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:52.566720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788187784143705:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:52.571197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:52.586630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788187784143707:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:52.687878Z node 1 :TX_PROXY ERROR: Actor# [1:7486788187784143763:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: 2025-03-28T09:19:33.404856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:19:33.405168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:19:33.405236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fe1/r3tmp/tmpUsd4tC/pdisk_1.dat 2025-03-28T09:19:33.841840Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10167, node 1 2025-03-28T09:19:34.361023Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:19:34.361085Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:19:34.361115Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:19:34.361759Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:19:34.374130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:19:34.480430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:34.480555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:34.495726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25871 2025-03-28T09:19:35.143485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:19:39.100812Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:19:39.144485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:39.144602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:39.197087Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:19:39.200012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:19:39.498089Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:39.498735Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:39.499297Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:39.499465Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:39.499694Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:39.499768Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:39.499839Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:39.499917Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:39.500038Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:39.685488Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:39.685592Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:39.701848Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:19:39.907403Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:39.970624Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:19:39.974422Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:19:40.022098Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:19:40.028715Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:19:40.028965Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:19:40.029075Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:19:40.029130Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:19:40.029205Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:19:40.029265Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:19:40.029321Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:19:40.029942Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:19:40.152733Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:19:40.153610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:19:40.172084Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:19:40.172140Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:19:40.172214Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:19:40.180259Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:19:40.180392Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:19:40.201318Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:19:40.201816Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:19:40.235593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:19:40.242913Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:19:40.243080Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:19:40.447965Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:19:40.751410Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:19:40.802738Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:19:41.855942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:41.856098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:19:41.878245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:19:42.081874Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:19:42.082140Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:19:42.082535Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:19:42.082713Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:19:42.082860Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:19:42.083003Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:19:42.083136Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:19:42.083271Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:19:42.083409Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:19:42.083565Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:19:42.083706Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:19:42.083834Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2849];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:19:42.115807Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:19:42.115930Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... ARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-03-28T09:19:44.495978Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000020s 2025-03-28T09:19:54.358561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:19:54.358671Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:55.403240Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:19:55.403341Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:48.156268Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:21:48.156353Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:21:48.156403Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:21:48.156456Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:21:49.534843Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-28T09:21:49.534932Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 206.000000s, at schemeshard: 72075186224037897 2025-03-28T09:21:49.535322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-03-28T09:21:49.549276Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:21:50.468442Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:21:50.468525Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:21:50.468579Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:21:50.468632Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:21:50.468970Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:21:50.472668Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:21:50.477756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6968:5159], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:50.477890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6979:5164], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:50.477998Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:50.491197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:21:50.580119Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6982:5167], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:21:50.873613Z node 2 :TX_PROXY ERROR: Actor# [2:7078:5213] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:51.092870Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7107:5228]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:21:51.093132Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:21:51.093252Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7109:5230] 2025-03-28T09:21:51.093335Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7109:5230] 2025-03-28T09:21:51.093938Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7110:5231] 2025-03-28T09:21:51.094040Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7110:5231], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:21:51.094130Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:21:51.094252Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7109:5230], server id = [2:7110:5231], tablet id = 72075186224037894, status = OK 2025-03-28T09:21:51.094331Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:21:51.094413Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7107:5228], StatRequests.size() = 1 2025-03-28T09:21:51.368121Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTRlMzlmMDUtNDU4YTZkMGUtZjA2NTdlNWUtMzU3ZmEzZjI=, TxId: 2025-03-28T09:21:51.368212Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTRlMzlmMDUtNDU4YTZkMGUtZjA2NTdlNWUtMzU3ZmEzZjI=, TxId: 2025-03-28T09:21:51.368804Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:21:51.387916Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:21:51.387985Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:21:51.423362Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:21:51.423457Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:21:51.491467Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7109:5230], schemeshard count = 1 2025-03-28T09:21:53.325293Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:21:53.325360Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:21:53.325404Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:21:53.325449Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:21:53.328388Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:21:53.352256Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:21:53.352895Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:21:53.352986Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:21:53.353902Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:21:53.370102Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:21:53.370419Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:21:53.371066Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7226:5298], server id = [2:7227:5299], tablet id = 72075186224037899, status = OK 2025-03-28T09:21:53.371442Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7226:5298], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:21:53.386285Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:21:53.386422Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:21:53.386746Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:21:53.386976Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:21:53.387250Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7226:5298], server id = [2:7227:5299], tablet id = 72075186224037899 2025-03-28T09:21:53.387292Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:21:53.387498Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:21:53.393554Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:21:53.444942Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7247:5318]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:21:53.445188Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:21:53.445236Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7247:5318], StatRequests.size() = 1 2025-03-28T09:21:53.785799Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YmJiY2E4YTctOWQxMDE2NDktOTQ1MTM1YzYtNjhiODljZDI=, TxId: 2025-03-28T09:21:53.785863Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YmJiY2E4YTctOWQxMDE2NDktOTQ1MTM1YzYtNjhiODljZDI=, TxId: 2025-03-28T09:21:53.797060Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:21:53.798429Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:7255:5448]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:21:53.798828Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:21:53.798900Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:21:53.801158Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:21:53.801226Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:21:53.801300Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:21:53.829883Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithLeftFilter+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 23161, MsgBus: 61331 2025-03-28T09:21:46.610030Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788161914422754:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:46.614618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017e9/r3tmp/tmpayVAHD/pdisk_1.dat 2025-03-28T09:21:47.209878Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:47.215147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:47.215225Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:47.218508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23161, node 1 2025-03-28T09:21:47.535997Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:47.536014Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:47.536020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:47.536128Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61331 TClient is connected to server localhost:61331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:48.318299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:48.351490Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:48.369409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:48.633922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:48.910249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:49.048888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:51.507883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788183389260981:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:51.507972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:51.610244Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788161914422754:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:51.610306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:51.846418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:51.934626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.006046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.040562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.089862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.151317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.225260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788187684228796:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:52.225342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:52.225719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788187684228801:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:52.230081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:52.246186Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:21:52.246429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788187684228803:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:52.330460Z node 1 :TX_PROXY ERROR: Actor# [1:7486788187684228859:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:53.598908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:53.627577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:53.653784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:53.701489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:53.737995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:53.780884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexKey [GOOD] Test command err: Trying to start YDB, gRPC: 15489, MsgBus: 61541 2025-03-28T09:21:45.920902Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788157939497943:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:45.921759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017ea/r3tmp/tmppaih6x/pdisk_1.dat 2025-03-28T09:21:46.512063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:46.516023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:46.516089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:46.519913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15489, node 1 2025-03-28T09:21:46.690184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:46.690209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:46.690217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:46.690320Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61541 TClient is connected to server localhost:61541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:47.682924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:47.707548Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:47.725473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:47.926418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:48.183975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:48.278729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:50.268300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788179414336171:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:50.268424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:50.579874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:50.629767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:50.667235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:50.710859Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:50.751477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:50.842817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:50.923120Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788157939497943:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:50.923232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:50.926106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788179414336689:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:50.926189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:50.926475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788179414336694:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:50.931343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:50.954763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788179414336696:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:51.020226Z node 1 :TX_PROXY ERROR: Actor# [1:7486788183709304049:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:52.421083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.459678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.540090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.591510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:52.639080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:43: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:56: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpJoinOrder::TPCDS96-ColumnStore [GOOD] >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] >> KqpJoin::JoinWithDuplicates >> KqpFlipJoin::LeftSemi_3 [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemiJoinWithDuplicatesInRightTable+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 24458, MsgBus: 22748 2025-03-28T09:21:49.478424Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788172334967387:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:49.479218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017e4/r3tmp/tmpiMMdJX/pdisk_1.dat 2025-03-28T09:21:50.151606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:50.151694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:50.155687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:50.183434Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24458, node 1 2025-03-28T09:21:50.483790Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:50.483810Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:50.483821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:50.483923Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22748 TClient is connected to server localhost:22748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:51.549837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:51.603428Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:51.614504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:51.813619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:52.039263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:52.140593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:54.436450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788193809805508:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:54.436541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:54.439515Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788172334967387:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:54.439593Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:54.820499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:54.895941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:54.932732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.006848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.042708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.112539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.165231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788198104773327:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:55.165336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:55.165574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788198104773332:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:55.169770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:55.181045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788198104773334:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:55.268140Z node 1 :TX_PROXY ERROR: Actor# [1:7486788198104773388:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:56.507610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.592114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpFlipJoin::Right_1 >> KqpJoinOrder::GeneralPrioritiesBug4 >> KqpJoin::LeftJoinPushdownPredicate_Nulls ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24343, MsgBus: 16321 2025-03-28T09:20:57.894546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787951946613511:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:57.895644Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001823/r3tmp/tmpebxrBH/pdisk_1.dat 2025-03-28T09:20:58.659688Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:58.675896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:58.675968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:58.687651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24343, node 1 2025-03-28T09:20:58.979420Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:58.979453Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:58.979464Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:58.979596Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16321 TClient is connected to server localhost:16321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:00.021832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:00.046729Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:02.391083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787973421450502:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:02.391170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:02.391857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787973421450514:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:02.396434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:02.411969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787973421450516:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:02.482430Z node 1 :TX_PROXY ERROR: Actor# [1:7486787973421450567:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:02.891763Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787951946613511:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:02.891826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:03.073846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.225263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.268998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.328690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.376798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.557034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.588545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.621335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.659298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.730628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.791819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.834953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:03.879510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.612395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:21:04.667820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.746825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.797214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.832850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.904452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:21:04.982785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.038336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.101514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.151313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.187676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.222623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.262148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.294066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.340130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.385801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:21:05.462878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.625586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.631815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.636412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.637983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.645737Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.646608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.656958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.660269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.668159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.672272Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.677843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.681772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.691412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.695227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.699386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.710443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.711946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.717876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.727934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.733113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.740675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.741178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.746778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.748305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.752825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.753678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.759129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.759194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.764934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.764941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.770670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.770804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.776141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.776335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.786784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.793257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.801188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.804660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.814221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.817901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.823253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.827515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.828337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.835462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.836521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:42.973516Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0w6y34tdakj01r47a6q97", SessionId: ydb://session/3?node_id=1&id=MWE5MDcwZjktNGFkYmUzMzAtNjBlY2M5YzgtYjAwNzk3ZWY=, Slow query, duration: 35.961572s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:43.597483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:43.598193Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7486788003486228612:2984];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038629; 2025-03-28T09:21:43.598511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:43.599049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::LeftSemi_3 [GOOD] Test command err: Trying to start YDB, gRPC: 24913, MsgBus: 15490 2025-03-28T09:21:40.549971Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788135500671839:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:40.550177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017ee/r3tmp/tmp4m9tdy/pdisk_1.dat 2025-03-28T09:21:41.277048Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:41.295654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:41.295745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:41.298090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24913, node 1 2025-03-28T09:21:41.587210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:41.587233Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:41.587239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:41.587345Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15490 TClient is connected to server localhost:15490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:42.275062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:42.296898Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:42.301288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:42.461381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:42.688083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:42.785975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:45.044136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788156975509927:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:45.044274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:45.420885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:45.481308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:45.528331Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788135500671839:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:45.528387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:45.587300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:45.662884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:45.703237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:45.750904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:45.810530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788156975510444:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:45.810633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:45.813123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788156975510450:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:45.817184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:45.830206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788156975510452:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:45.913293Z node 1 :TX_PROXY ERROR: Actor# [1:7486788156975510507:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:47.032298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.097063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.166008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.214985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17082, MsgBus: 16682 2025-03-28T09:21:49.863943Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788176295105025:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:49.863984Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017ee/r3tmp/tmp6HSlqm/pdisk_1.dat 2025-03-28T09:21:50.204766Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:50.204858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:50.206617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:50.219005Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17082, node 2 2025-03-28T09:21:50.346198Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:50.346227Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:50.346236Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:50.346348Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16682 TClient is connected to server localhost:16682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:50.960760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:50.976817Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:50.986148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:21:51.069447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:21:51.244665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:51.327842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:54.174866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788197769943275:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:54.174982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:54.236593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:54.276075Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:54.341048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:54.409707Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:54.462871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:54.544046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:54.640367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788197769943792:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:54.640455Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:54.640677Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788197769943797:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:54.647200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:21:54.663417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788197769943799:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:21:54.772252Z node 2 :TX_PROXY ERROR: Actor# [2:7486788197769943854:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:54.874783Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788176295105025:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:54.874931Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:56.326247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.424992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.466731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.510419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> KqpJoinOrder::TPCDS92-ColumnStore >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate [GOOD] >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery+StreamLookup [GOOD] >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinRightNullFilter+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 24436, MsgBus: 24171 2025-03-28T09:21:55.601576Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788199314306071:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:55.624216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017d5/r3tmp/tmpAxWVDl/pdisk_1.dat 2025-03-28T09:21:56.269898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:56.269999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:56.275937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:56.319021Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24436, node 1 2025-03-28T09:21:56.395937Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:21:56.396202Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:21:56.465738Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:56.465755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:56.465762Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:56.465877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24171 TClient is connected to server localhost:24171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:57.467404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:57.487629Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:57.500632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:57.749186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.003156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:58.088625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:00.131644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788220789144128:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:00.131763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:00.512024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.553775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.604515Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788199314306071:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:00.604643Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:00.620738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.686449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.720313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.800529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.862384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788220789144647:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:00.862450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:00.862779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788220789144652:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:00.866506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:00.880683Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:22:00.881548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788220789144654:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:00.987562Z node 1 :TX_PROXY ERROR: Actor# [1:7486788220789144711:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:02.555129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.595384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.675089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.725824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.799196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.836161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationManyKeysJoinPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 1550, MsgBus: 25914 2025-03-28T09:20:26.983436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787818363550396:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:26.983979Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00185c/r3tmp/tmpIDdJ8t/pdisk_1.dat 2025-03-28T09:20:27.588361Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:27.593299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:27.593394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:27.597662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1550, node 1 2025-03-28T09:20:27.770234Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:27.770255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:27.770261Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:27.770400Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25914 TClient is connected to server localhost:25914 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:28.524286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:28.542182Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:30.575010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787835543420115:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:30.575063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787835543420127:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:30.575120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:30.586843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:30.605829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787835543420129:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:30.708778Z node 1 :TX_PROXY ERROR: Actor# [1:7486787835543420180:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:31.164955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.437720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:31.437862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:31.438076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:31.438148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:31.438228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:31.438300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:31.438358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:31.438426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:31.438488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:31.438549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:31.438616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:31.438672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787839838387738:2349];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:31.439242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:31.439271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:31.439373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:31.439432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:31.439499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:31.439559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:31.439615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:31.439678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:31.439740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:31.439805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:31.439860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:31.439913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787839838387742:2351];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:31.472869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787839838387740:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:31.472869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787839838387764:2362];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstr ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.333936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.338760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.348500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.350412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.359952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.364077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.369236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.373634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.378617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.392273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.406186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.419960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.425561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.441892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.456372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.466020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.480037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.489783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.500012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.503631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.509800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.513858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.523673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.528547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.533892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.538307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.548031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.548983Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.554258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.564683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.568503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.574465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.578014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.586540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.588139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.593522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.596721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.599055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.602125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.604295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.607317Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.610167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.612836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.616316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.620483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:51.851190Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0we4v3csd2y1h8ptzdmsa", SessionId: ydb://session/3?node_id=1&id=YzY0MDRmNzQtMzM2ZjljMTgtZTJlZDllNjMtNTVmOWUwZTI=, Slow query, duration: 37.454996s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:52.152239Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:52.152652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:52.153681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788140486154646:10979];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:21:52.154040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown >> KqpJoin::JoinWithDuplicates [GOOD] >> KqpFlipJoin::Right_1 [GOOD] >> KqpFlipJoin::Right_2 >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore [GOOD] >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinWithDuplicates [GOOD] Test command err: Trying to start YDB, gRPC: 8760, MsgBus: 32452 2025-03-28T09:21:59.390246Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788217078763370:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:59.390335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017ad/r3tmp/tmp2TKkrP/pdisk_1.dat 2025-03-28T09:22:00.135606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:00.135702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:00.142997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:00.146851Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8760, node 1 2025-03-28T09:22:00.427132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:00.427152Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:00.427158Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:00.427245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32452 TClient is connected to server localhost:32452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:01.298500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:01.333233Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:01.344902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:01.605081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:01.837272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:01.930444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:04.097991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788238553601619:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.098117Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.391550Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788217078763370:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:04.391603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:04.640563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.697191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.755754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.820001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.875833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.957276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.059947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788242848569439:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.060011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.060322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788242848569444:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.064274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:05.092165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788242848569446:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:05.175186Z node 1 :TX_PROXY ERROR: Actor# [1:7486788242848569502:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:06.631274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.681131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS95-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_Nulls [GOOD] Test command err: Trying to start YDB, gRPC: 12033, MsgBus: 1884 2025-03-28T09:22:00.467615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788223285862040:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:00.467657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017aa/r3tmp/tmp3mUhtk/pdisk_1.dat 2025-03-28T09:22:01.122510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:01.129288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:01.129373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:01.130617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12033, node 1 2025-03-28T09:22:01.373445Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:01.373461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:01.373468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:01.373560Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1884 TClient is connected to server localhost:1884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:02.171403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:02.211089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:02.389131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:02.605722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:02.706878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:05.064625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788244760700077:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.064734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.461583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.474777Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788223285862040:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:05.474824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:05.511297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.552539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.601538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.657357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.704813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.791067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788244760700591:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.791169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.791301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788244760700596:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.795417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:05.809800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788244760700598:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:05.914951Z node 1 :TX_PROXY ERROR: Actor# [1:7486788244760700654:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:07.282078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.368490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.406905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpJoin::RightTableValuePredicate >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS95-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 4070, MsgBus: 8237 2025-03-28T09:20:53.721738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787933847795969:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:53.721775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001830/r3tmp/tmpDwTzzq/pdisk_1.dat 2025-03-28T09:20:54.398656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:54.398788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:54.416450Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:54.417899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4070, node 1 2025-03-28T09:20:54.656335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:54.656366Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:54.656373Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:54.656506Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8237 TClient is connected to server localhost:8237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:55.351452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:58.018196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787955322633117:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:58.018344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:58.020744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787955322633129:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:58.027168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:58.041925Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:20:58.043580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787955322633131:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:58.139771Z node 1 :TX_PROXY ERROR: Actor# [1:7486787955322633182:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:58.618760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.722111Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787933847795969:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:58.722212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:58.786009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.820726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.855105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:20:58.929418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:20:59.210234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:20:59.281311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:20:59.339295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:20:59.397253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:20:59.447529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:20:59.498678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:20:59.541806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:20:59.611810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.455698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:21:00.508746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.557874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.631162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.670044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.705425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.747012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.801147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.844191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.881524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.917015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.957191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.997008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.053676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.116742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.192933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:21:01.238649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... TxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.623953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.635704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038500;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.639324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038524;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.641144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.649524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.650143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.658327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.659340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.664356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.670487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.676959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.681100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.682393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.686896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.687814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038490;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.692195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.692696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038484;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.698116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.701405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.703383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.706568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.708509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.711858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.716958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.716963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.722240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.722252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038566;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.728035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.731824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.733169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038522;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.737709Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038556;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.748330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.750936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.809258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.815169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.836190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.857866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.865911Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.888644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:38.944095Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0w30hcwrfmkctyw3hgrwa", SessionId: ydb://session/3?node_id=1&id=YjgzMjhjNWItYzNhOGVhYjctMzlkOGJmYjYtZWQ4MGU5NWU=, Slow query, duration: 35.950400s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:39.672657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:39.673076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:39.673881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:05.435319Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xkcs71bjctmfjmamh1gj", SessionId: ydb://session/3?node_id=1&id=YjgzMjhjNWItYzNhOGVhYjctMzlkOGJmYjYtZWQ4MGU5NWU=, Slow query, duration: 12.896087s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n-- NB: Subquerys\n$ws_wh =\n(select ws1.ws_order_number ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2\n from web_sales ws1 cross join web_sales ws2\n where ws1.ws_order_number = ws2.ws_order_number\n and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk);\n-- start query 1 in stream 0 using template query95.tpl and seed 2031708268\n select\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\nwhere\n cast(d_date as date) between cast('2002-4-01' as date) and\n (cast('2002-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'AL'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\nand ws1.ws_order_number in (select ws_order_number\n from $ws_wh)\nand ws1.ws_order_number in (select wr_order_number\n from web_returns cross join $ws_wh ws_wh\n where wr_order_number = ws_wh.ws_order_number)\norder by `order count`\nlimit 100;\n", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7428, MsgBus: 23115 2025-03-28T09:20:30.736087Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787834465263530:2261];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:30.758772Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001855/r3tmp/tmpeZnnpC/pdisk_1.dat 2025-03-28T09:20:31.319582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:31.321682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:31.321799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:31.325855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7428, node 1 2025-03-28T09:20:31.465878Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:31.465903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:31.465910Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:31.466048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23115 TClient is connected to server localhost:23115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:32.126267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:32.149570Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:34.579184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787851645133176:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:34.579330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:34.579715Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787851645133188:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:34.584345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:34.597899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787851645133190:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:34.700204Z node 1 :TX_PROXY ERROR: Actor# [1:7486787851645133241:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:35.105098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:35.429241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:35.430075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:35.430485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:35.430776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:35.430912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:35.431018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:35.431025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:35.431318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:35.431460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:35.431578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:35.431703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:35.431821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:35.431926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:35.432034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787855940100806:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:35.442354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:35.442483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:35.442563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:35.442646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:35.442739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:35.442840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:35.442955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:35.443285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:35.443395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:35.443493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787855940100818:2360];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:35.478803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787855940100808:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:35.478867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787855940100808:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstr ... 10714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.056058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.060939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.067646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.072430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.080016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.086842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.089016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.092847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.095325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.102162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.107231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.108489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.114150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.114548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.126382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.127584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.136540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.141936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.151101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.155857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.157247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.162197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.169282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.174569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.179892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.185225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.187671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.196220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.201279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.207977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.213945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.219780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.223275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.231767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.234823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.244979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.248717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.258834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.264828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.271991Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.272415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.279023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.280527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.368682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:57.535106Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0wmrf2tsr6m34vzmzmv67", SessionId: ydb://session/3?node_id=1&id=YjNlZDBiYzAtN2QwNmQwN2MtZmI4NWQxMjAtZDY2YmU1ZTI=, Slow query, duration: 36.367421s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:57.825204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:57.825582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486788062098566704:7645];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-28T09:21:57.831941Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:57.832222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] >> KqpIndexLookupJoin::LeftSemi >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleLeftSemiJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 17533, MsgBus: 20499 2025-03-28T09:22:03.084030Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788235635501355:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:03.084747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017a8/r3tmp/tmpIBVoPu/pdisk_1.dat 2025-03-28T09:22:03.885341Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:03.886853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:03.886908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:03.899861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17533, node 1 2025-03-28T09:22:04.214154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:04.230741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:04.230768Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:04.230965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20499 TClient is connected to server localhost:20499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:05.177330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:05.195956Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:05.205151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:05.465126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:22:05.784741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.912822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:08.073932Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788235635501355:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:08.074002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:08.103361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788257110339484:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:08.103462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:08.468987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.524904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.627612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.679464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.732228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.793331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.855080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788257110340003:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:08.855155Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:08.855565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788257110340008:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:08.860393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:08.875828Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:22:08.876213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788257110340010:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:08.982651Z node 1 :TX_PROXY ERROR: Actor# [1:7486788257110340066:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:10.511497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.585307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.615647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.673182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.745577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.792947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::SimpleInnerJoin-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 15314, MsgBus: 8822 2025-03-28T09:22:04.029649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788236769506532:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:04.030509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017a6/r3tmp/tmpxDhCDR/pdisk_1.dat 2025-03-28T09:22:04.667819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:04.667906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:04.681207Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:04.736884Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15314, node 1 2025-03-28T09:22:04.813224Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:22:04.813562Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:22:04.937927Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:04.937950Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:04.937957Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:04.938082Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8822 TClient is connected to server localhost:8822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:05.985585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:22:06.013308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.237627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:22:06.477729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.553891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:08.518267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788253949377462:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:08.518371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:08.822983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.862086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.894140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.929320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.988810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.031700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.036279Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788236769506532:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:09.036327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:09.105756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788258244345273:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:09.105806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:09.105966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788258244345278:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:09.109899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:09.134162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788258244345280:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:09.190254Z node 1 :TX_PROXY ERROR: Actor# [1:7486788258244345334:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:10.567359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.614490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.650210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.685323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.718980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.754780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_correctness.py::TestDataCorrectness::test [GOOD] >> KqpJoinOrder::TPCDS96+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithSubquery-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 19013, MsgBus: 10081 2025-03-28T09:21:55.427032Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788199740319425:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:55.427925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017cf/r3tmp/tmp4IEzVC/pdisk_1.dat 2025-03-28T09:21:56.047012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:56.052819Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:56.052893Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:56.063736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19013, node 1 2025-03-28T09:21:56.431719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:56.431742Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:56.431770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:56.431872Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10081 TClient is connected to server localhost:10081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:57.252897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:57.280373Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:57.299157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.551194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:57.742139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:57.833746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:00.128473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788221215157662:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:00.128580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:00.429530Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788199740319425:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:00.429581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:00.472114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.559788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.616254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.657622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.708022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.771359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.845779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788221215158178:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:00.845849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:00.846042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788221215158183:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:00.849635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:00.865529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788221215158185:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:00.932466Z node 1 :TX_PROXY ERROR: Actor# [1:7486788221215158239:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:02.069661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.138029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.183657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.219018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.258632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.338133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4713, MsgBus: 27252 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017cf/r3tmp/tmpo5TdfT/pdisk_1.dat 2025-03-28T09:22:05.099645Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:22:05.121119Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:05.145549Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:05.145626Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:05.258159Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4713, node 2 2025-03-28T09:22:05.447115Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:05.447133Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:05.447143Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:05.447243Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27252 TClient is connected to server localhost:27252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:06.178012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:06.187054Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:22:06.208828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:06.343033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:06.549448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:06.635151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:09.256024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788258284987083:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:09.256130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:09.299567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.353927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.426587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.459315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.499613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.560262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.628810Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788258284987597:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:09.628915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:09.629197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788258284987602:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:09.633654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:09.648591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788258284987604:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:22:09.717747Z node 2 :TX_PROXY ERROR: Actor# [2:7486788258284987657:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:10.898783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.952178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.004953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.059865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.112902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.146909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TestJoinHint2+ColumnStore >> KqpJoinOrder::TPCDS90-ColumnStore >> OlapEstimationRowsCorrectness::TPCH3 >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS64_small [GOOD] Test command err: Trying to start YDB, gRPC: 24021, MsgBus: 9420 2025-03-28T09:20:26.682382Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787818330205568:2087];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:26.706747Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00185e/r3tmp/tmpDMf1FU/pdisk_1.dat 2025-03-28T09:20:27.272349Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:27.305427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:27.305531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:27.314385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24021, node 1 2025-03-28T09:20:27.490985Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:27.491004Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:27.491011Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:27.491126Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9420 TClient is connected to server localhost:9420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:28.394336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:28.419224Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:30.781495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787835510075384:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:30.781629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:30.782291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787835510075396:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:30.791193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:30.803498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787835510075398:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:30.902209Z node 1 :TX_PROXY ERROR: Actor# [1:7486787835510075449:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:31.295360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:31.525399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:31.525625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:31.525984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:31.526127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:31.526258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:31.526402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:31.526516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:31.526633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:31.527153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:31.527222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:31.527397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:31.527513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:31.527517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:31.527613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:31.527684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:31.527732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:31.527837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:31.527841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:31.527943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:31.527949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787839805043019:2354];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:31.528058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:31.528167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:31.528266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:31.528369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787839805042930:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:31.573582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787839805043023:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:31.573663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787839805043023:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstra ... tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.482249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.486552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.497426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.499012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.508745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.511805Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.514661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.517500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.520354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.525953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.527059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.532810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.538663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.540104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.547516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.549918Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.554268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.559626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.564851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.565433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.570848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.571463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.576382Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.578322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.655118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.655836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.660660Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.667820Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.673189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.678107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.682944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.688488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.693639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.698766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.703547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.708942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.714534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.719582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.724846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.730731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.731063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.737477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.737814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.746042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.748156Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.752305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:53.838412Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0whjzcjaj6g0vwczh6k9f", SessionId: ydb://session/3?node_id=1&id=OGE2ODBhYTAtNTI4ZTZhN2UtZDU5MThkNDctYTdiYzM2MGY=, Slow query, duration: 35.918804s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:54.539810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:54.540480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:54.540864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] >> KqpJoin::ComplexJoin >> KqpFlipJoin::Right_2 [GOOD] >> KqpJoinOrder::TPCDS23+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::LeftJoinPushdownPredicate_NoPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 17531, MsgBus: 30109 2025-03-28T09:22:09.045057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788258924269913:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:09.045089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017a5/r3tmp/tmpfWuOf4/pdisk_1.dat 2025-03-28T09:22:09.783341Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:09.791593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:09.791658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:09.800441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17531, node 1 2025-03-28T09:22:10.087777Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:10.087809Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:10.087821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:10.087940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30109 TClient is connected to server localhost:30109 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:11.155894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:11.200873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:11.478445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:11.751548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:11.854017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:13.819415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788276104140753:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:13.819511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:14.050783Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788258924269913:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:14.050854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:14.188569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.270502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.330390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.394287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.458730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.547564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.623432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788280399108570:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:14.623538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:14.623834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788280399108576:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:14.627710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:14.647275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788280399108578:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:14.746926Z node 1 :TX_PROXY ERROR: Actor# [1:7486788280399108633:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:16.147283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:16.187626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:16.242033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 15433, MsgBus: 27325 2025-03-28T09:21:59.687149Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788218058382880:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:59.687560Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017b5/r3tmp/tmpYmahfm/pdisk_1.dat 2025-03-28T09:22:00.319098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:00.337690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:00.337785Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:00.340373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15433, node 1 2025-03-28T09:22:00.571216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:00.571259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:00.571267Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:00.571367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27325 TClient is connected to server localhost:27325 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:01.430804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:01.488835Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:01.521114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:22:01.814391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:02.048099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:02.134642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:04.187530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788239533220993:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.187655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.500656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.550256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.633410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.659142Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788218058382880:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:04.659212Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:04.695916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.745304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.814206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.911219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788239533221510:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.911307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.911670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788239533221515:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.915751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:04.939335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788239533221517:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:05.014676Z node 1 :TX_PROXY ERROR: Actor# [1:7486788243828188867:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:06.338011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.470315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9939, MsgBus: 5046 2025-03-28T09:22:09.059499Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788262236137827:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:09.059551Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017b5/r3tmp/tmpw9jL5w/pdisk_1.dat 2025-03-28T09:22:09.302184Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9939, node 2 2025-03-28T09:22:09.347271Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:09.347355Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:09.466054Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:09.490996Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:09.491018Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:09.491025Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:09.491131Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5046 TClient is connected to server localhost:5046 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:10.103209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:10.113417Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:22:10.121274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:10.243030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:10.474170Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:10.568510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:13.875820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788279416008758:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:13.875909Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:13.935440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:13.986800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.020405Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788262236137827:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:14.020454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:14.039485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.088197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.146233Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.204561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:14.292883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788283710976569:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:14.292967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:14.293142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788283710976574:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:14.297515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:14.323002Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788283710976576:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:22:14.413631Z node 2 :TX_PROXY ERROR: Actor# [2:7486788283710976632:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:15.981909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:16.136755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS87-ColumnStore >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::Right_2 [GOOD] Test command err: Trying to start YDB, gRPC: 30747, MsgBus: 13121 2025-03-28T09:21:59.843371Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788217110309523:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:59.858996Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017ab/r3tmp/tmp3X2pGy/pdisk_1.dat 2025-03-28T09:22:00.636182Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:00.640970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:00.641087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:00.645932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30747, node 1 2025-03-28T09:22:00.863555Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:00.863572Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:00.863579Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:00.863669Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13121 TClient is connected to server localhost:13121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:01.749567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:01.771859Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:01.782095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:02.024848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:02.253285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:02.360083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:04.628722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788238585147697:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.628842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.850779Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788217110309523:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:04.850838Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:04.873230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.918156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:04.973050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.015491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.053232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.114045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.203730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788242880115508:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.203830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.204096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788242880115513:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:05.208413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:05.224005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788242880115515:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:05.319664Z node 1 :TX_PROXY ERROR: Actor# [1:7486788242880115570:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:06.838612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.884251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.925507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.971194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 31399, MsgBus: 62116 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017ab/r3tmp/tmp2kV7A3/pdisk_1.dat 2025-03-28T09:22:09.566815Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:22:09.666129Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:09.773752Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:09.774212Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:09.776239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31399, node 2 2025-03-28T09:22:09.868993Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:09.869011Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:09.869018Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:09.869127Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62116 TClient is connected to server localhost:62116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:10.406145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:10.426883Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:10.443158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:10.530897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:10.802554Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:10.882034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:13.592036Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788276798202022:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:13.592125Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:13.671847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:13.712581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:13.755417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:13.800389Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:13.868736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:13.935595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:13.994057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788276798202536:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:13.994143Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:13.994429Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788276798202541:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:13.998377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:14.011698Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:22:14.013138Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788276798202543:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:14.101584Z node 2 :TX_PROXY ERROR: Actor# [2:7486788281093169895:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:15.361520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:15.412370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:15.469889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:15.559559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH11 >> KqpJoinOrder::CanonizedJoinOrderTPCH18 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftJoinOnlyRightColumn-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12142, MsgBus: 8477 2025-03-28T09:22:10.163179Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788262942805983:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:10.164071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017a1/r3tmp/tmp7SRRL2/pdisk_1.dat 2025-03-28T09:22:10.869707Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:10.870122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:10.870203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:10.873392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12142, node 1 2025-03-28T09:22:11.101314Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:11.101336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:11.101343Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:11.101436Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8477 TClient is connected to server localhost:8477 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:12.035005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:12.075223Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:12.090202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:12.355519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:12.553274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:12.657288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:14.865058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788280122676916:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:14.865165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:15.164200Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788262942805983:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:15.185932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:15.300507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:15.363923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:15.461968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:15.522103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:15.572341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:15.633934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:15.717890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788284417644734:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:15.717967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:15.718321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788284417644739:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:15.722519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:15.741967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788284417644741:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:15.825998Z node 1 :TX_PROXY ERROR: Actor# [1:7486788284417644798:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:17.051246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:17.163069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:17.224006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:17.271376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:17.346762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:17.435025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH20 >> KqpJoin::RightTableValuePredicate [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightTableValuePredicate [GOOD] Test command err: Trying to start YDB, gRPC: 2124, MsgBus: 26119 2025-03-28T09:22:13.178763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788275925828220:2179];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:13.178831Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00179f/r3tmp/tmpsr4J6k/pdisk_1.dat 2025-03-28T09:22:13.982712Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:13.999633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:13.999717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:14.001426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2124, node 1 2025-03-28T09:22:14.282192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:14.282212Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:14.282222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:14.282312Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26119 TClient is connected to server localhost:26119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:15.279605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:15.293006Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:15.310167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:15.517311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:15.791590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:15.889933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:18.120373Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788275925828220:2179];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:18.120438Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:18.204728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788297400666357:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:18.204826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:18.575902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:18.615276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:18.651650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:18.712483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:18.747905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:18.801860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:18.864629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788297400666870:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:18.864665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:18.865082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788297400666875:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:18.868343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:18.878133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788297400666877:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:18.967849Z node 1 :TX_PROXY ERROR: Actor# [1:7486788297400666933:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:20.139310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::LeftSemi [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH3 >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftSemi [GOOD] Test command err: Trying to start YDB, gRPC: 7231, MsgBus: 2021 2025-03-28T09:22:14.477824Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788281874352447:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:14.478136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00179d/r3tmp/tmpi8Qbyr/pdisk_1.dat 2025-03-28T09:22:15.156500Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:15.169656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:15.169745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:15.175706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7231, node 1 2025-03-28T09:22:15.403640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:15.403661Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:15.403668Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:15.403771Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2021 TClient is connected to server localhost:2021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:16.404901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:16.445248Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:16.466227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:16.677150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:16.940235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:17.053659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:19.324447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788303349190556:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:19.324567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:19.462783Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788281874352447:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:19.473835Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:19.696086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:19.783627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:19.857582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:19.898471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:19.968550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:20.030178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:20.081571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788307644158372:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:20.081648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:20.081801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788307644158377:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:20.085636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:20.100797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788307644158379:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:20.178624Z node 1 :TX_PROXY ERROR: Actor# [1:7486788307644158432:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:21.580348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:21.623322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:21.664813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:21.698389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:21.734309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:21.771460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH10 [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex >> KqpIndexLookupJoin::LeftOnly-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH10 [GOOD] Test command err: Trying to start YDB, gRPC: 17838, MsgBus: 8619 2025-03-28T09:20:36.705838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787860031465009:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:36.705868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00184b/r3tmp/tmpRrr7Of/pdisk_1.dat 2025-03-28T09:20:37.326198Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:37.359621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:37.359703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:37.362032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17838, node 1 2025-03-28T09:20:37.655371Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:37.655432Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:37.655452Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:37.655608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8619 TClient is connected to server localhost:8619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:38.461727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:38.522733Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:40.872392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787877211334693:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:40.872534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:40.872960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787877211334705:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:40.877110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:40.889277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787877211334707:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:40.988634Z node 1 :TX_PROXY ERROR: Actor# [1:7486787877211334758:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:41.392566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:41.714940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:41.715232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:41.715463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:41.715492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:41.715546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:41.715643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:41.715676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:41.715708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:41.715829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:41.716409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:41.716677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:41.716898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:41.717091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:41.717257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:41.717443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:41.717657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:41.717874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:41.718058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:41.718297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:41.718482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:41.718671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:41.718845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:41.719005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787881506302300:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:41.719146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787881506302319:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:41.764761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787881506302323:2360];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:41.764761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787881506302311:2354];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstra ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.543116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.553147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.553634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.559465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039199;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.567871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.569128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.574465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.580234Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.586349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.589214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.592271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.599776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.599887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.606263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.606361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.612584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.612628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039187;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.618852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.618852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.624656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.626527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.629301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.634220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.640187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.641346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.646778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.646799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.655232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.660059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.660833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.668386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.668386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.673879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.679394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.683754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.688582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.692908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.703515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.705244Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.711977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.714466Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.725891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.727582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.734023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.828838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.974887Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0wtwbc0q614bet4nsn32k", SessionId: ydb://session/3?node_id=1&id=ZTJiNTAyOGUtNGMzODc2NTctMTU5MzhlYTAtYTMwMjVlODk=, Slow query, duration: 34.539004s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:02.340906Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:02.341386Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:02.342050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788143499353906:9422];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:22:02.342566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::GeneralPrioritiesBug2 [GOOD] >> KqpJoinOrder::TPCDS34+ColumnStore [GOOD] >> KqpJoin::ComplexJoin [GOOD] >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] >> KqpJoinOrder::TPCDS16-ColumnStore >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore [GOOD] >> TColumnShardTestSchema::InternalTTL_Types [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::ComplexJoin [GOOD] Test command err: Trying to start YDB, gRPC: 3153, MsgBus: 11782 2025-03-28T09:22:18.443630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788297172890378:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:18.478266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001795/r3tmp/tmpkoMhht/pdisk_1.dat 2025-03-28T09:22:19.093434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:19.093527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:19.095391Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:19.127130Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3153, node 1 2025-03-28T09:22:19.347263Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:19.347290Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:19.347301Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:19.347422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11782 TClient is connected to server localhost:11782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:20.393764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:20.445669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:20.651997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:20.836424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:20.949967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:23.403987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788318647728484:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:23.404140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:23.442941Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788297172890378:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:23.443030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:24.110302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.154358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.190804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.249267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.298364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.354571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.462038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788322942696306:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:24.462339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:24.462661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788322942696312:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:24.470517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:24.489476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788322942696314:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:24.586301Z node 1 :TX_PROXY ERROR: Actor# [1:7486788322942696372:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:25.922146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:25.970486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:26.014497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:26.054809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:26.089520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug2 [GOOD] Test command err: Trying to start YDB, gRPC: 15126, MsgBus: 2467 2025-03-28T09:21:38.924373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788125834656482:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:38.924860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017f2/r3tmp/tmpB7IiPV/pdisk_1.dat 2025-03-28T09:21:39.872890Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:39.874599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:39.875049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:39.914389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15126, node 1 2025-03-28T09:21:40.155328Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:40.155348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:40.155354Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:40.155458Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2467 TClient is connected to server localhost:2467 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:41.434242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:43.640690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788147309493507:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:43.640776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788147309493498:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:43.640918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:43.645009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:43.666957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788147309493512:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:43.746812Z node 1 :TX_PROXY ERROR: Actor# [1:7486788147309493563:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:43.894889Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788125834656482:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:43.895023Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:44.131843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.245688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.298207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.336483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.416610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.578572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.617903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.678622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.753507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.811282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.849339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.894641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:44.932453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:45.771256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:21:45.807051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:45.884708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:45.922280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:21:45.988864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.053938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.119701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.186313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.247817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.292754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.324295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.396897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.435561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.474659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.506374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.537379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.570030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:21:46.608890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.486052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.492339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.492659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.500728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.501742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.506975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.507233Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.513207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.520609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.524732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.531676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.535002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.544532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.549891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.554203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.557667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.560092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.566134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.566917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.573084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.577484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.587460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.591773Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.593303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.597638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.599297Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.605806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.605812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.611871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.612633Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.618415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.618414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.624399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.625363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.630290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.630674Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.636306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.639068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.642388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.644651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.649339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.650516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.655561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.656713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.661822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.783163Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xf1v0y32s6t9d7sfy6av", SessionId: ydb://session/3?node_id=1&id=ZDBhNWFkMjctYzY3MTM0ZDEtY2EwY2NhZTMtYTk4MGU3N2U=, Slow query, duration: 33.690867s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:22.052694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:22.053106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:22.054015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7486788254683697703:5504];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-28T09:22:22.054335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS34+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 9163, MsgBus: 17607 2025-03-28T09:20:29.748224Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787832512569313:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:29.748712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001858/r3tmp/tmpQKSp0R/pdisk_1.dat 2025-03-28T09:20:30.324339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:30.324417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:30.344145Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:30.379357Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9163, node 1 2025-03-28T09:20:30.564678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:30.564707Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:30.564714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:30.564854Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17607 TClient is connected to server localhost:17607 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:31.183622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:33.736000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787849692439024:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:33.736087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:33.736221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787849692439036:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:33.748792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:33.782078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787849692439038:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:33.843784Z node 1 :TX_PROXY ERROR: Actor# [1:7486787849692439091:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:34.388903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:34.657874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:34.663005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:34.663306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:34.663402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:34.663530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:34.663661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:34.663771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:34.663885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:34.664007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:34.664125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:34.664252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:34.664356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787853987406661:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:34.667127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:34.667173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:34.667379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:34.667490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:34.667585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:34.667747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:34.667854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:34.667947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:34.668042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:34.668169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:34.668272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:34.668367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486787853987406696:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:34.755459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787853987406659:2354];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:34.760561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787853987406649:2351];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:34.760666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486787853 ... LUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.523049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.528500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.531318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.540850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.546250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.559997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.564474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.574668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.583802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.594461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.599857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.605378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.610643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.611638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.617042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.626135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.633216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.638446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.640014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.648110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.651176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.660611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.664680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.675769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.680068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.685262Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.689599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.691862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.694578Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.697037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.702405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.708237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.714896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.717399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.720016Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:21:58.920549Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0wmna8djyzqdhs92k5p9h", SessionId: ydb://session/3?node_id=1&id=OWRmOWY4MjgtNmQwOWE3NjQtNjhkMmQ2NTItODYxNjgxNDM=, Slow query, duration: 37.853784s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:21:59.498288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:59.498703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:21:59.499040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788111685490081:9195];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-28T09:21:59.499401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:20.525706Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0y3jhes20zwmv6z5m35y4", SessionId: ydb://session/3?node_id=1&id=OWRmOWY4MjgtNmQwOWE3NjQtNjhkMmQ2NTItODYxNjgxNDM=, Slow query, duration: 11.418445s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query34.tpl and seed 1971067816\nselect c_last_name\n ,c_first_name\n ,c_salutation\n ,c_preferred_cust_flag\n ,ss_ticket_number\n ,cnt from\n (select store_sales.ss_ticket_number ss_ticket_number\n ,store_sales.ss_customer_sk ss_customer_sk\n ,count(*) cnt\n from store_sales as store_sales \n cross join date_dim as date_dim\n cross join store as store\n cross join household_demographics as household_demographics\n where store_sales.ss_sold_date_sk = date_dim.d_date_sk\n and store_sales.ss_store_sk = store.s_store_sk\n and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk\n and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28)\n and (household_demographics.hd_buy_potential = '>10000' or\n household_demographics.hd_buy_potential = 'Unknown')\n and household_demographics.hd_vehicle_count > 0\n and (case when household_demographics.hd_vehicle_count > 0\n\t then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count\n\t else null\n\t end) > 1.2\n and date_dim.d_year in (2000,2000+1,2000+2)\n and store.s_county in ('Salem County','Terrell County','Arthur County','Oglethorpe County',\n 'Lunenburg County','Perry County','Halifax County','Sumner County')\n group by store_sales.ss_ticket_number,store_sales.ss_customer_sk) dn \n cross join customer as customer\n where ss_customer_sk = c_customer_sk\n and cnt between 15 and 20\n order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number;\n\n-- end query 1 in stream 0 using template query34.tpl", parameters: 0b >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCDS96 [GOOD] Test command err: Trying to start YDB, gRPC: 1994, MsgBus: 21190 2025-03-28T09:20:42.576599Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787888025271839:2270];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:42.576900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001848/r3tmp/tmpRVFf5d/pdisk_1.dat 2025-03-28T09:20:43.020203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:43.025031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:43.025137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:43.027898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1994, node 1 2025-03-28T09:20:43.122542Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:43.122563Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:43.122569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:43.122657Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21190 TClient is connected to server localhost:21190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:43.741589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:43.768215Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:45.842442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787900910174188:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:45.842447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787900910174177:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:45.842572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:45.855251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:45.867379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787900910174191:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:45.970633Z node 1 :TX_PROXY ERROR: Actor# [1:7486787900910174243:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:46.379681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:46.628312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:46.628559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:46.628691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:46.628728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:46.628848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:46.628958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:46.629055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:46.629171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:46.629269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:46.629370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:46.629482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:46.629613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:46.629636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:46.629739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:46.629743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:46.629846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787905205141802:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:46.629949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:46.630039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:46.630133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:46.630236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:46.630361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:46.630465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:46.630551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:46.630642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787905205141804:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:46.670705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787905205141818:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:46.670787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787905205141818:2355];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstr ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.945572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.951410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.953854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.958728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.965703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.976609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.978792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.982829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.984799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.989041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.990864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.995882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:09.996757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.002634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.002783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.009085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.009345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.016327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.020443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.023624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.032094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.032192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.038784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.038783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.048610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.048618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.059180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.062905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.069888Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.073108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.085050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.087636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.097884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.099662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.110207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.112439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.124612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.128260Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.139604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.142846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.150077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.152479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.167242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.191444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.520408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039204;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:10.625531Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0x01y5nmpvj560r7h73mh", SessionId: ydb://session/3?node_id=1&id=ZTU5NTllOTAtNzlkNmFmNy0zOGZiNjY3Ny1mODk5YmYwOA==, Slow query, duration: 37.890137s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:11.303748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:11.304246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:11.304676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788167198193500:9317];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-28T09:22:11.305035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::GeneralPrioritiesBug1 [GOOD] >> test_retry.py::TestRetry::test_fail_first[kikimr0] [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL_Types [GOOD] Test command err: 2025-03-28T09:20:11.454798Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:20:11.567504Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:20:11.572185Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:20:11.572897Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:20:11.599213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:20:11.599551Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:20:11.609622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:11.609868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:11.610110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:11.610216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:11.610307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:11.610412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:11.610508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:11.610613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:11.610809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:11.610911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:11.611037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:11.611148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:11.639291Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:20:11.661740Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:20:11.662204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:20:11.662268Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:20:11.662468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:20:11.662630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:20:11.662735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:20:11.662802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:20:11.662908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:20:11.662997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:20:11.663044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:20:11.663084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:20:11.663280Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:20:11.663361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:20:11.663420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:20:11.663455Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:20:11.663551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:20:11.663612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:20:11.663677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:20:11.663705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:20:11.663782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:20:11.663831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:20:11.663866Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:20:11.663925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:20:11.663967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:20:11.663994Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:20:11.664406Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-28T09:20:11.664508Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-03-28T09:20:11.664593Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-03-28T09:20:11.664684Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:20:11.664883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:20:11.664960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:20:11.664996Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:20:11.665191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:20:11.666224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:20:11.666341Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:20:11.666572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:20:11.666618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:20:11.666651Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:20:11.666931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:20:11.666983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:20:11.667016Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T0 ... =saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:22:31.086362Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:22:31.086400Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:22:31.086438Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:22:31.086587Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:22:31.088358Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:22:31.088416Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:22:31.088515Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-03-28T09:22:31.088580Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-03-28T09:22:31.088739Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[4:593:2609];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-03-28T09:22:31.088849Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:22:31.088966Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:22:31.089080Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:22:31.089216Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:22:31.089323Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:22:31.089413Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:22:31.089457Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:600:2616] finished for tablet 9437184 2025-03-28T09:22:31.090041Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[4:593:2609];stats={"p":[{"events":["f_bootstrap"],"t":0.108},{"events":["f_ProduceResults"],"t":0.734},{"events":["l_bootstrap"],"t":1.064},{"events":["f_processing","f_task_result"],"t":1.094},{"events":["l_task_result"],"t":9.355},{"events":["f_ack"],"t":9.402},{"events":["l_ProduceResults","f_Finish"],"t":10.497},{"events":["l_ack","l_processing","l_Finish"],"t":10.498}],"full":{"a":1743153740591486,"name":"_full_task","f":1743153740591486,"d_finished":0,"c":0,"l":1743153751089516,"d":10498030},"events":[{"name":"bootstrap","f":1743153740700067,"d_finished":956205,"c":1,"l":1743153741656272,"d":956205},{"a":1743153751089196,"name":"ack","f":1743153749994172,"d_finished":1007999,"c":903,"l":1743153751089110,"d":1008319},{"a":1743153751089181,"name":"processing","f":1743153741685985,"d_finished":4439519,"c":4515,"l":1743153751089112,"d":4439854},{"name":"ProduceResults","f":1743153741325806,"d_finished":1821221,"c":5420,"l":1743153751089439,"d":1821221},{"a":1743153751089443,"name":"Finish","f":1743153751089443,"d_finished":0,"c":0,"l":1743153751089516,"d":73},{"name":"task_result","f":1743153741686015,"d_finished":3326564,"c":3612,"l":1743153749946557,"d":3326564}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:22:31.090129Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[4:593:2609];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:22:31.090652Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[4:593:2609];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.108},{"events":["f_ProduceResults"],"t":0.734},{"events":["l_bootstrap"],"t":1.064},{"events":["f_processing","f_task_result"],"t":1.094},{"events":["l_task_result"],"t":9.355},{"events":["f_ack"],"t":9.402},{"events":["l_ProduceResults","f_Finish"],"t":10.497},{"events":["l_ack","l_processing","l_Finish"],"t":10.498}],"full":{"a":1743153740591486,"name":"_full_task","f":1743153740591486,"d_finished":0,"c":0,"l":1743153751090189,"d":10498703},"events":[{"name":"bootstrap","f":1743153740700067,"d_finished":956205,"c":1,"l":1743153741656272,"d":956205},{"a":1743153751089196,"name":"ack","f":1743153749994172,"d_finished":1007999,"c":903,"l":1743153751089110,"d":1008992},{"a":1743153751089181,"name":"processing","f":1743153741685985,"d_finished":4439519,"c":4515,"l":1743153751089112,"d":4440527},{"name":"ProduceResults","f":1743153741325806,"d_finished":1821221,"c":5420,"l":1743153751089439,"d":1821221},{"a":1743153751089443,"name":"Finish","f":1743153751089443,"d_finished":0,"c":0,"l":1743153751090189,"d":746},{"name":"task_result","f":1743153741686015,"d_finished":3326564,"c":3612,"l":1743153749946557,"d":3326564}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:22:31.090768Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:22:20.436479Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-03-28T09:22:31.090819Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:22:31.091042Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithConstantFold+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 29549, MsgBus: 9506 2025-03-28T09:20:45.996567Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787901328287448:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:45.996610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00183b/r3tmp/tmp2IUfCU/pdisk_1.dat 2025-03-28T09:20:46.852454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:46.852545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:46.859843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:46.917855Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29549, node 1 2025-03-28T09:20:47.183740Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:47.183770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:47.183784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:47.183934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9506 TClient is connected to server localhost:9506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:48.113546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:50.764579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787922803124539:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.764705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.765159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787922803124551:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:50.769074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:50.784254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787922803124553:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:20:50.876774Z node 1 :TX_PROXY ERROR: Actor# [1:7486787922803124604:2341] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:50.998840Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787901328287448:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:50.998919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:51.348974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:51.662752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:51.662950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:51.663220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:51.663363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:51.663468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:51.663585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:51.663687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:51.663787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:51.663892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:51.664035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:51.664158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:51.664260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787927098092122:2355];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:51.678857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:51.678993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:51.679217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:51.679327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:51.679427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:51.679547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:51.679653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:51.679748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:51.679850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:51.679949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:51.680041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:51.680141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486787927098092114:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:51.719406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486787927098092116:2352];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp: ... oller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.491967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.495391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.522207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.531942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.542495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.565246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.575628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.584857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.591282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.599600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.606283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.611328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.613086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.620411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.626344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.630027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.633393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.639933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.640059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.646040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.646073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.652315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.652316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.658273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.658471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.664208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.664712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.669996Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.670914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.675837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.677648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.681447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.686379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.692087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.693385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.701120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.703685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.708795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.711376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.716162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.716322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.722288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.727088Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.732557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.847861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:16.921066Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0x7c46ndds8wffwywtsyg", SessionId: ydb://session/3?node_id=1&id=OWZjNzkxZjYtNzc2OWQ3MjgtOWU2ZTczMWYtYWYxYWRkN2M=, Slow query, duration: 36.692449s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:17.296415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; 2025-03-28T09:22:17.296937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; 2025-03-28T09:22:17.300119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788197681078082:9294];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:22:17.300549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH10 [GOOD] Test command err: Trying to start YDB, gRPC: 7791, MsgBus: 28217 2025-03-28T09:20:42.810947Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787886556473916:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:42.811268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001847/r3tmp/tmptVU05N/pdisk_1.dat 2025-03-28T09:20:43.379545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:43.379625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:43.400707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7791, node 1 2025-03-28T09:20:43.586994Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:20:43.587012Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:20:43.654053Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:43.667109Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:43.667132Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:43.667139Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:43.667297Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28217 TClient is connected to server localhost:28217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:44.470066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:47.320420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787908031311067:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:47.320503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787908031311078:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:47.320617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:47.326810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:47.338084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787908031311081:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:47.424248Z node 1 :TX_PROXY ERROR: Actor# [1:7486787908031311132:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:47.809774Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787886556473916:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:47.809836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:47.842231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:47.986288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:47.986516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:47.987039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:47.987218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:47.987335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:47.987460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:47.987589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:47.987759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:47.987890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:47.988000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:47.988105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:47.988207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787908031311264:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:48.067923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:20:48.067987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:20:48.068101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:20:48.068129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:20:48.068325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:20:48.068359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:20:48.068458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:20:48.068488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:20:48.068556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:20:48.068587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:20:48.068641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:20:48.068669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:20:48.069326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:20:48.069374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:20:48.069616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;descri ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.455191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.460266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.464626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.470058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.474724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.492855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.495567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.506242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.514703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.527563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.533084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.546219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.555851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.560760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.562236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.568642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.570304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.574771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.584664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.591409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.592420Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.610934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.615329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.620302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.621837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.631977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.636170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.641807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.645954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.652038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.660362Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.666312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.667217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.673514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.685791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.686345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.695306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.702926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.704634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.708450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.714022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.718549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.727987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.732783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.734139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:13.964437Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0x1cb2t461j18c42svd5h", SessionId: ydb://session/3?node_id=1&id=MmNiYmZkZjktODJhNmQ4NGEtMTdlYzQyZDUtYmJhMTIzOGE=, Slow query, duration: 39.872278s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:14.301335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:14.301742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:14.302353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486788122779712535:7802];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-03-28T09:22:14.302726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug1 [GOOD] Test command err: Trying to start YDB, gRPC: 32575, MsgBus: 30313 2025-03-28T09:21:42.431651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788145241291895:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:42.431706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017ec/r3tmp/tmp6Sy6UF/pdisk_1.dat 2025-03-28T09:21:43.135711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:43.135790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:43.139601Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:43.142615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32575, node 1 2025-03-28T09:21:43.395262Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:43.395291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:43.395298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:43.395423Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30313 TClient is connected to server localhost:30313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:44.406056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:44.451064Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:46.980853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788162421161528:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:46.980970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:46.981284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788162421161540:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:46.985922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:47.017306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788162421161542:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:47.077720Z node 1 :TX_PROXY ERROR: Actor# [1:7486788166716128889:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:47.445368Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788145241291895:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:47.445563Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:47.493364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.601442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.635260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.674031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.716902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.928834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:47.987112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.067684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.107812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.155846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.197188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.235271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:48.284448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.136501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:21:49.179211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.235903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.276873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.324533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.353918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.381060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.413961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.460998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.495638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.527541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.600379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.642914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.681855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.720274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.791081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:21:49.825598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.894598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.905329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038532;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.907662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.913182Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.917971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.927533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038538;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.932208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038492;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.937247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038490;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.941801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038450;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.948379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.955042Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038510;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.961136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.964631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.966597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.970301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038526;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.971966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.976624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.977032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.981478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.984555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.988579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.990663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:26.993980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.000160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.009117Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.015761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.022908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.024470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.028045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.031455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.038268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.042968Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.043690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.049083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.054249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038580;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.058856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.063892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.069363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.073707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038514;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.080775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.083854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.093595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.101087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.107852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.208372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:27.274971Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xj934c4a9b5reayxz2dk", SessionId: ydb://session/3?node_id=1&id=ZmM0OWFmNzktNWViYjAyMGItZDQzODllNjAtNTJlOTdmYWY=, Slow query, duration: 35.879359s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:27.591800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:27.592254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:27.592833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486788295565173976:5911];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-03-28T09:22:27.593185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TestJoinHint1-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore [GOOD] >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] >> KqpJoinOrder::TestJoinHint2-ColumnStore >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoin+ColumnStore [GOOD] >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnly-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 7747, MsgBus: 11090 2025-03-28T09:22:28.305234Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788340601502587:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:28.305677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001772/r3tmp/tmplRcycs/pdisk_1.dat 2025-03-28T09:22:29.149811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:29.163959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:29.164051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:29.170037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7747, node 1 2025-03-28T09:22:29.435825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:29.435851Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:29.435858Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:29.436032Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11090 TClient is connected to server localhost:11090 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:30.369884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:30.395867Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:30.406209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:30.617167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:30.884941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:30.986072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:33.282983Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788340601502587:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:33.283079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:33.300604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788362076340710:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:33.300718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:33.923668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:34.003369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:34.045569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:34.103829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:34.183212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:34.268958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:34.390064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788366371308539:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:34.390149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:34.390361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788366371308544:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:34.394019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:34.407995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788366371308546:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:34.519023Z node 1 :TX_PROXY ERROR: Actor# [1:7486788366371308603:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:35.992248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:36.048651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:36.139828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:36.204766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:36.242426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:36.271652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpIndexLookupJoin::Left+StreamLookup >> KqpJoin::RightSemiJoin_SimpleKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_ComplexSecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 9059, MsgBus: 4947 2025-03-28T09:22:27.013425Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788333796976485:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:27.013818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001775/r3tmp/tmpZSVjZ4/pdisk_1.dat 2025-03-28T09:22:27.723977Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:27.754942Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:27.755030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:27.763740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9059, node 1 2025-03-28T09:22:28.003719Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:28.003738Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:28.003744Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:28.003837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4947 TClient is connected to server localhost:4947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:29.073136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:29.102984Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:29.120622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:29.361063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:22:29.688921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:29.841861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:32.002109Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788333796976485:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:32.002184Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:32.220552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788359566781903:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:32.220626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:32.629658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.665608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.708976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.756723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.806461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.887972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.969365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788359566782423:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:32.969423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:32.969739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788359566782428:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:32.973450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:32.989695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788359566782430:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:33.078339Z node 1 :TX_PROXY ERROR: Actor# [1:7486788363861749783:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:34.277111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:34.325439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:34.360575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:34.407416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:34.451259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:47: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:57: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint1-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 8474, MsgBus: 18853 2025-03-28T09:21:51.675217Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788182304137196:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:51.675671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017d9/r3tmp/tmp8vFm3a/pdisk_1.dat 2025-03-28T09:21:52.350519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:52.350595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:52.350824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:52.355179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8474, node 1 2025-03-28T09:21:52.509412Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:52.509449Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:52.509456Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:52.509546Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18853 TClient is connected to server localhost:18853 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:53.240335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:55.615434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788199484006914:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:55.615546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:55.618803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788199484006926:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:55.630076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:55.646888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788199484006928:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:21:55.741928Z node 1 :TX_PROXY ERROR: Actor# [1:7486788199484006981:2340] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:56.211048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.384054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.419145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.458448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.491230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.644828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.646843Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788182304137196:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:56.647396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:56.690718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.760320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.785640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.809967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.836131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.910404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.962823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.799808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:2, at schemeshard: 72057594046644480 2025-03-28T09:21:57.848622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.901999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.987993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.080620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.126381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.161296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.197301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.271071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.316243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.355432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.394958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.472871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.521155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.567228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.613923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.657533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-28T09:21:58.699358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... oller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.669646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.671525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.676754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.682171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.687453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.692994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.694314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.699778Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.701280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.705719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.706755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.711455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.711768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.717240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.721180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.726627Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.732105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.734019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.737594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.742832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.744445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.748510Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.756975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.760647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.765715Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.770515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.774105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.783500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.787497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.792123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.796493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.801155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.805581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.813141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.814257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.822676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.824014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.828664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.835572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.841549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.844496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.851006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.857905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.871738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:31.875599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715714; 2025-03-28T09:22:32.090102Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xtssb3z8v7vzbqf43313", SessionId: ydb://session/3?node_id=1&id=ODQ4YTY5MzQtNGYwMjZiZjAtYzQwOGQyNjQtN2U5MWIyYzQ=, Slow query, duration: 31.968560s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:32.414853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; 2025-03-28T09:22:32.415266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; 2025-03-28T09:22:32.416107Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486788341217959107:6943];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-28T09:22:32.416470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976715716;tx_id=281474976715716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715716; >> KqpJoinOrder::CanonizedJoinOrderTPCH22 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsComplex-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 9774, MsgBus: 27676 2025-03-28T09:21:50.297193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788177908015113:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:50.297511Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017e3/r3tmp/tmpjB5MBD/pdisk_1.dat 2025-03-28T09:21:50.967152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:50.967245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:50.973260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:51.072679Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9774, node 1 2025-03-28T09:21:51.289905Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:51.302762Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:51.302787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:51.302986Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27676 TClient is connected to server localhost:27676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:52.420830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:52.442176Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:54.773874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788195087884812:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:54.773981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:54.774074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788195087884823:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:54.778268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:54.798941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788195087884826:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:54.892141Z node 1 :TX_PROXY ERROR: Actor# [1:7486788195087884877:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:55.295102Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788177908015113:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:55.295204Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:55.362776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.501641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.545168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.596659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.665331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.895939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.939817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:55.988412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.030260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.077032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.110227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.155151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.194647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:21:56.926833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:21:56.975993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.010432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.058016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.119201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.194322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.251905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.292457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.328104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.397175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.453042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.494185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.536773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.578344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.610573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.687601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.759166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.168910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.172123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.174671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.181258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.182119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.187211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.193390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.193403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.201702Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.206928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038499;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.207828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.222296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.223154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.227648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.233123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.236964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.243570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.246130Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.249518Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.251670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.255076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.261451Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.264801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.268013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.270126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.273545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.277569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.279185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.284249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.286822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.290075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.292076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.295567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.297046Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.302058Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.305598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.312482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.318062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.320738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.330037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.339277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.379132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:31.490948Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xt5j6fzgt91b1tz04m78", SessionId: ydb://session/3?node_id=1&id=OWMxOTc2MzktOTc4YTJjNjctYTMxMjgzYmQtYzc1NzRjYWM=, Slow query, duration: 32.015597s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:31.845268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:31.845655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:31.846671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7486788250922471930:3975];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-28T09:22:31.847146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: JoinOrder( (Unused1 Unused2) (Unused3 Unused4) ), code: 4534
: Warning: Unapplied hint: Rows(Unused # 10e8), code: 4534
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinStatsOverride+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20174, MsgBus: 30957 2025-03-28T09:20:48.165452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787910850219649:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:48.165571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001836/r3tmp/tmpPxmLmO/pdisk_1.dat 2025-03-28T09:20:48.893882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:48.945233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:48.945333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:48.951757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20174, node 1 2025-03-28T09:20:49.133518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:49.133538Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:49.133552Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:49.133709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30957 TClient is connected to server localhost:30957 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:49.947949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:49.963507Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:52.427095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787928030089365:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:52.427228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:52.427567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787928030089377:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:52.431516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:52.442749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787928030089379:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:52.511338Z node 1 :TX_PROXY ERROR: Actor# [1:7486787928030089431:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:52.895037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.166777Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787910850219649:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:53.166863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:53.294077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:53.298433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:53.298658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:53.299272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:53.299416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:53.299533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:53.299667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:53.299776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:53.299882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:53.299993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:53.300107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:53.300209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:53.300311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486787932325057048:2361];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:53.303142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:53.303392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:53.303499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:53.303619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:53.303744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:53.303843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:53.311501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:53.311804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:53.311947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:53.312070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:53.312212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486787932325057004:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:53.384048Z node 1 :T ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.097360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.102784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.103433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.108439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.109175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.115945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.128405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.129488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.134345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.134581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039191;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.149150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.153322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.157681Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.166982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.171554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.176666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.180679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.185946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.191811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.193929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.197210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.203429Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.209488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.215067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.218168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.223021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.227146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.228508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.233475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.237859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.238806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.243056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.251190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.256310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.260493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.265740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.269910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.275175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.283372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.284631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.294213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.297196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.304259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.313567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.317026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.554949Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0x9geejt95rqnbrc6epwd", SessionId: ydb://session/3?node_id=1&id=NTJhYTgzZWItYTgzYWNhMzQtYmM2MjE5NTMtNDY4NWI0OWU=, Slow query, duration: 38.135766s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:20.842161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:20.842550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:20.843310Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788198613074958:9249];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-28T09:22:20.843650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics [GOOD] Test command err: 2025-03-28T09:18:54.788719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:605:2415], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:18:54.789065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:18:54.789216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000feb/r3tmp/tmpiGye7x/pdisk_1.dat 2025-03-28T09:18:55.261399Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20002, node 1 2025-03-28T09:18:55.603080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:18:55.603142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:18:55.603170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:18:55.603603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:18:55.607041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:18:55.708933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:18:55.709083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:18:55.724532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11330 2025-03-28T09:18:56.283237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:18:59.866487Z node 4 :STATISTICS INFO: Subscribed for config changes on node 4 2025-03-28T09:18:59.911412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:18:59.911533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:18:59.968580Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-28T09:18:59.980908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:19:00.242790Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:00.243403Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:00.243952Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:00.244104Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:00.244349Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:00.244446Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:00.244556Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:00.244644Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:00.244720Z node 4 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:19:00.434401Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:00.434489Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:00.447845Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:19:00.613387Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:00.684386Z node 4 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:19:00.684488Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:19:00.742623Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:19:00.743981Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:19:00.744223Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:19:00.744288Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:19:00.744344Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:19:00.744406Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:19:00.744478Z node 4 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:19:00.744547Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:19:00.745095Z node 4 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:19:00.832189Z node 4 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:19:00.832293Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:2037:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:19:00.839062Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:2046:2608] 2025-03-28T09:19:00.845247Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:2079:2624] 2025-03-28T09:19:00.846334Z node 4 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:2079:2624], schemeshard id = 72075186224037897 2025-03-28T09:19:00.848862Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:19:00.875658Z node 4 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:19:00.875726Z node 4 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:19:00.875815Z node 4 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:19:00.893118Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:19:00.900857Z node 4 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:19:00.901012Z node 4 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:19:01.118591Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:19:01.312458Z node 4 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:19:01.383067Z node 4 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:19:02.107245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:19:05.333785Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-03-28T09:19:05.418284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:05.418399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:05.418613Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:05.418668Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:05.465056Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:19:05.468631Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:19:05.558497Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:05.630432Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:19:05.631071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:19:05.770974Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:19:05.771052Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:19:05.771183Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3096:2952], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:19:05.771984Z node 4 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [4:3097:2953] 2025-03-28T09:19:05.772356Z node 4 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:3097:2953], schemeshard id = 72075186224037899 2025-03-28T09:19:06.791887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:19:10.250504Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:19:10.337217Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:10.337342Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:10.337632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:19:10.337707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:19:10.376085Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:19:10.377716Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:19:10.379520Z node 4 :HIVE WARN: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:19:10.387396Z node 4 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:19:10.487152Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:19:10.726452Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-03-28T09:19:10.726530Z node 4 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-03-28T09:19:10.726640Z node 4 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [4:3932:3154], at schemeshard: 72075186224037905, StatisticsAggregatorId: 720751862 ... TktYWZkZWI2MGYtYjI1NWZmNDktMTQ3ODgzZjA=, TxId: 2025-03-28T09:22:30.511351Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=MjMzNDAzMTktYWZkZWI2MGYtYjI1NWZmNDktMTQ3ODgzZjA=, TxId: 2025-03-28T09:22:30.512418Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:22:30.545388Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:22:30.545494Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:22:30.595739Z node 4 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:22:30.595829Z node 4 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:22:30.666201Z node 4 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [4:11597:7153], schemeshard count = 1 2025-03-28T09:22:32.663927Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-28T09:22:32.664023Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 201.000000s, at schemeshard: 72075186224037899 2025-03-28T09:22:32.664488Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-03-28T09:22:32.685245Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:22:33.779947Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:22:33.780028Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:22:33.780090Z node 4 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-03-28T09:22:33.780145Z node 4 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:22:33.785931Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:22:33.816416Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:22:33.817215Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:22:33.817314Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:22:33.818949Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:22:33.844459Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:22:33.844728Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 2, current Round: 0 2025-03-28T09:22:33.845621Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11769:7251], server id = [4:11770:7252], tablet id = 72075186224037911, status = OK 2025-03-28T09:22:33.846040Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11769:7251], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:22:33.855666Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-03-28T09:22:33.855827Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-03-28T09:22:33.856166Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:22:33.856400Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:22:33.856755Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:22:33.863501Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11769:7251], server id = [4:11770:7252], tablet id = 72075186224037911 2025-03-28T09:22:33.863569Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:22:33.864465Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:22:33.953955Z node 4 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [4:11790:7271]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:22:33.954239Z node 4 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:22:33.954290Z node 4 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [4:11790:7271], StatRequests.size() = 1 2025-03-28T09:22:34.223001Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=ZmNlMmQwMTEtOGJjYzA1ZDQtZTFmMzk1MWYtNTg3YzYzM2U=, TxId: 2025-03-28T09:22:34.223075Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZmNlMmQwMTEtOGJjYzA1ZDQtZTFmMzk1MWYtNTg3YzYzM2U=, TxId: 2025-03-28T09:22:34.223877Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:22:34.239412Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:22:34.239467Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:22:34.880690Z node 4 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-28T09:22:34.880779Z node 4 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:22:35.384041Z node 4 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037905 2025-03-28T09:22:35.384122Z node 4 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 180.000000s, at schemeshard: 72075186224037905 2025-03-28T09:22:35.384409Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037905, stats size# 28 2025-03-28T09:22:35.412788Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:22:37.460293Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:22:37.460357Z node 4 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:22:37.460401Z node 4 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037905, LocalPathId: 2] is column table. 2025-03-28T09:22:37.460441Z node 4 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037905, LocalPathId: 2] 2025-03-28T09:22:37.468151Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:22:37.500986Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:22:37.501586Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:22:37.501665Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:22:37.502389Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:22:37.533553Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:22:37.533820Z node 4 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 4, Round: 3, current Round: 0 2025-03-28T09:22:37.534678Z node 4 :STATISTICS DEBUG: EvClientConnected, node id = 4, client id = [4:11933:7344], server id = [4:11934:7345], tablet id = 72075186224037912, status = OK 2025-03-28T09:22:37.534809Z node 4 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [4:11933:7344], path = { OwnerId: 72075186224037905 LocalId: 2 } 2025-03-28T09:22:37.544074Z node 4 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-03-28T09:22:37.544190Z node 4 :STATISTICS DEBUG: Send aggregate statistics response to node: 4 2025-03-28T09:22:37.544370Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:22:37.544531Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:22:37.544824Z node 4 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:22:37.552874Z node 4 :STATISTICS DEBUG: EvClientDestroyed, node id = 4, client id = [4:11933:7344], server id = [4:11934:7345], tablet id = 72075186224037912 2025-03-28T09:22:37.552931Z node 4 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:22:37.554230Z node 4 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:22:37.583226Z node 4 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=NWMwYzA5ZGMtZjFlMzRiMTctMzY1MWY3YzktODgxYTUzMGY=, TxId: 2025-03-28T09:22:37.583297Z node 4 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NWMwYzA5ZGMtZjFlMzRiMTctMzY1MWY3YzktODgxYTUzMGY=, TxId: 2025-03-28T09:22:37.584449Z node 4 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:22:37.585691Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:11952:6016]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:22:37.586065Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:22:37.586129Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:22:37.591728Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:22:37.591811Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-28T09:22:37.591874Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:22:37.615443Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-03-28T09:22:37.621885Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:11952:6016]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:22:37.622284Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:22:37.622365Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:22:37.622713Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:22:37.622774Z node 1 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-28T09:22:37.622842Z node 1 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:22:37.634081Z node 1 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> KqpJoinOrder::TPCH12_100 >> test_retry.py::TestRetry::test_low_rate[kikimr0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoin+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 31331, MsgBus: 4382 2025-03-28T09:20:48.192446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787913325655419:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:48.235902Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001837/r3tmp/tmplGAmtd/pdisk_1.dat 2025-03-28T09:20:48.920049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:48.920147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:48.921480Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:48.930236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31331, node 1 2025-03-28T09:20:49.127196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:49.127217Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:49.127228Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:49.127325Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4382 TClient is connected to server localhost:4382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:50.034598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:52.636663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787930505525064:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:52.636666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787930505525052:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:52.636783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:52.641023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:52.656107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787930505525066:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:52.747096Z node 1 :TX_PROXY ERROR: Actor# [1:7486787930505525117:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:53.193624Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787913325655419:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:53.193696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:20:53.227461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:53.515465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:53.515654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:53.515906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:53.516037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:53.516134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:53.516264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:53.516356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:53.516450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:53.516613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:53.516732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:53.516817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:53.516909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787934800492655:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:53.561377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787934800492673:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:53.568161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787934800492781:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:53.568219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787934800492781:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:53.568424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787934800492781:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:53.568535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787934800492781:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:53.568631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787934800492781:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:53.568747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787934800492781:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:53.568867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787934800492781:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:53.568941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787934800492673:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:53.568983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787934800492781:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:53.569103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787934800492781:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:53.569164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486787934800492673:2359];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:53.569207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486787934800492781:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME= ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.590807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.595600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.595827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.601846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.608929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.616830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.627011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.628944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.637157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.642790Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.649635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.664946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.665468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.679494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.679494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.693447Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.699924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.711502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.716496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.717972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.724263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.728328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.729980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.736395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.737760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.742009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.748044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.753034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.758142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.763583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.766958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.772073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.777213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.779153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.786438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.788801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.800615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.808149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.815699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.818949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.824933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.829987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.836811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.842870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:19.843307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:20.107006Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0x8nk9jwf5jtxhvpbf7ah", SessionId: ydb://session/3?node_id=1&id=ZGJlYjI2OWMtOWU0MzlhYjUtZDU3Njk1NTgtZTdmYjM3MWQ=, Slow query, duration: 38.551174s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:20.595566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:20.595567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:20.596148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788261218063811:11182];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:22:20.596512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoin::RightSemiJoin_SecondaryIndex >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull >> KqpJoinOrder::CanonizedJoinOrderTPCH21 >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins >> OlapEstimationRowsCorrectness::TPCH2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH22 [GOOD] Test command err: Trying to start YDB, gRPC: 1858, MsgBus: 8592 2025-03-28T09:20:51.103312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787925393088436:2269];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:51.103539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001832/r3tmp/tmpQKntbC/pdisk_1.dat 2025-03-28T09:20:51.707780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:51.728174Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:51.728266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:51.730413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1858, node 1 2025-03-28T09:20:51.972229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:51.972253Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:51.972264Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:51.972376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8592 TClient is connected to server localhost:8592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:52.871229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:52.884927Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:55.085743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787942572958068:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:55.085823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787942572958079:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:55.085842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:55.090198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:55.108166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787942572958082:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:55.180876Z node 1 :TX_PROXY ERROR: Actor# [1:7486787942572958135:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:55.590244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:55.890733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:55.891026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:55.891320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:55.891486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:55.891684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:55.891827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:55.891937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:55.895022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:55.895191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:55.895323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:55.895381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:55.895432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:55.895447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:55.895827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486787942572958361:2349];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:55.898856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:55.898991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:55.899109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:55.899231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:55.899499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:55.899717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:55.899925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:55.900063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:55.900161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:55.900289Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787942572958468:2362];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:55.935307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787942572958600:2364];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:55.935380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486787942572958600:2364];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.171219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.172841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.178554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.182090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.189920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.197406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.212923Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.213175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.218581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.223551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.229544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.233101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.245034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.248862Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.254196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.257218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.268313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.268498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.286026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.286997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.299812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.299869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.309708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.312784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.322527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.323331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.336643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.337873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.342474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.349339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.364588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.371504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.377096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.381008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.386849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.392478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.396582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.397767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.410449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.412017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.417206Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.421301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.436203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.440832Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.450069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:21.799303Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xb64eaxghhegwt65pf5k", SessionId: ydb://session/3?node_id=1&id=N2IyZTkwOWQtNGU1NzQyMmEtOTZhMDQwZDUtYjYzZDZlY2Y=, Slow query, duration: 37.664158s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:22.189304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:22.189688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:22.190375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788217450911264:9322];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-28T09:22:22.190701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull >> KqpJoinOrder::TPCDS61-ColumnStore >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull >> KqpJoin::JoinConvert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH9 [GOOD] Test command err: Trying to start YDB, gRPC: 64650, MsgBus: 27896 2025-03-28T09:20:55.269732Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787942452341622:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:55.281721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00182c/r3tmp/tmpKdGk9G/pdisk_1.dat 2025-03-28T09:20:55.826514Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:55.829565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:55.829636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:55.832519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64650, node 1 2025-03-28T09:20:56.026283Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:56.026306Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:56.026314Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:56.026465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27896 TClient is connected to server localhost:27896 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:56.897905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:56.923404Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:59.851086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787959632211465:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:59.851208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:59.851685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787959632211477:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:59.855929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:59.869288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787959632211479:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:59.967787Z node 1 :TX_PROXY ERROR: Actor# [1:7486787959632211530:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:00.270604Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787942452341622:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:00.270725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:00.366596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:00.766314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:00.770975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:00.771284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:00.771399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:00.771496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:00.771597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:00.771700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:00.771804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:00.771902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:00.771999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:00.772128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:00.772253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486787963927179095:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:00.813221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:00.813312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:00.813499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:00.813596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:00.813686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:00.813773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:00.813860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:00.813950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:00.814044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:00.814134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:00.814240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:00.814329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787963927179101:2358];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:00.850157Z node 1 :T ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.424824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.425158Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.431065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.432162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.436814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.437483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.442551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.442616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.448503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.449192Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.454826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.455353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.461283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.461282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.466988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.467163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.472634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.475050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.478876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.480950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.484663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.487077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.489852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.491763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.495090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.496699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.500489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.501672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.505504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.506579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.511405Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.511438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.516624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.517138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.522302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.523250Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.528641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.533653Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.537438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.539645Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.543168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.545839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.549719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.551650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.556470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:25.812447Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xef4cd5y0ydasrraxg3e", SessionId: ydb://session/3?node_id=1&id=OWQ0YWRhNzEtODZhYjFlOWQtNWY3YWUzMzEtZWJlNTdhNTI=, Slow query, duration: 38.318643s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:26.217866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:26.218321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:26.219303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486788174380612644:7744];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-28T09:22:26.219711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::Left+StreamLookup [GOOD] >> KqpIndexLookupJoin::Left-StreamLookup >> KqpJoinOrder::CanonizedJoinOrderTPCH19 [GOOD] >> KqpJoinOrder::GeneralPrioritiesBug4 [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] >> KqpJoinOrder::DatetimeConstantFold+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_SimpleKey [GOOD] Test command err: Trying to start YDB, gRPC: 17738, MsgBus: 24023 2025-03-28T09:22:40.531877Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788392421152607:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:40.531934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00175b/r3tmp/tmpxeuQzR/pdisk_1.dat 2025-03-28T09:22:41.248129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:41.264647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:41.264705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:41.268036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17738, node 1 2025-03-28T09:22:41.389240Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:41.389259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:41.389267Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:41.389382Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24023 TClient is connected to server localhost:24023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:42.170345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:42.211223Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:42.235335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:42.513411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:42.798273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:42.960210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:45.183472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788413895990619:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:45.183571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:45.549664Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788392421152607:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:45.550308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:45.553063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.638202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.712115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.779077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.821965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.912360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.023308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788418190958444:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:46.023407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:46.023627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788418190958449:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:46.027921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:46.059302Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:22:46.062509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788418190958451:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:46.151695Z node 1 :TX_PROXY ERROR: Actor# [1:7486788418190958509:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:47.759941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:47.805266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:47.883413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:47.976242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:48.017631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:39: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:49: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::GeneralPrioritiesBug4 [GOOD] Test command err: Trying to start YDB, gRPC: 26382, MsgBus: 3848 2025-03-28T09:21:59.965867Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788217883812973:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:59.965924Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017ac/r3tmp/tmpWipHYL/pdisk_1.dat 2025-03-28T09:22:00.560556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:00.560644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:00.578116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26382, node 1 2025-03-28T09:22:00.767334Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:00.867202Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:00.867223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:00.867230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:00.867336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3848 TClient is connected to server localhost:3848 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:02.154220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:04.929545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788239358650094:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.929667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.930107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788239358650105:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:04.934070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:04.962223Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788217883812973:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:04.962290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:04.969838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788239358650108:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:05.070652Z node 1 :TX_PROXY ERROR: Actor# [1:7486788243653617458:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:05.364294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.499499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.532281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.573595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.624353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.857313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.919690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:05.973339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.010060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.084445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.134415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.182304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.232007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.104499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:07.164478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.201866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.232345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.262977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.297954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.328513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.357701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.387414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.419282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.452686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.489792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.534787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.571955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.603885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.675320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.718488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.767192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but prop ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.125533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.129905Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.134517Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.141416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.144514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.153757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.154357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.161564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.167328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.179180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.183793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.185683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.189827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.197935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.208289Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.209554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.225995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.233164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.239658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.250750Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.256713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.262138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038545;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.270549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.271671Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.280478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.283838Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.293505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.296533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.310359Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.310417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.316191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.321852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.328811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.336011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.350043Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.351090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.358366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.359012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.373617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.384469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.392937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038515;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.398215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038483;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.457408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.531224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.544355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:43.588319Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0y3wz04am9jdyfsgsyv27", SessionId: ydb://session/3?node_id=1&id=ODAwNWZiOTktY2IzN2Q5YmEtODUyMGI2NzMtNjNhZmNmYzA=, Slow query, duration: 34.148455s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:44.083387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:44.083757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:44.084344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7486788338142919104:5342];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-28T09:22:44.084642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 63366, MsgBus: 32532 2025-03-28T09:22:35.944833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788372609149853:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:35.945086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00176d/r3tmp/tmp56GrCf/pdisk_1.dat 2025-03-28T09:22:36.663890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:36.663982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:36.683784Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:36.704261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63366, node 1 2025-03-28T09:22:37.014959Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:37.014978Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:37.014991Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:37.015095Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32532 TClient is connected to server localhost:32532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:37.823241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:37.849183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:38.074706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:38.322410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:38.421698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:40.479080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788394083988006:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:40.479244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:40.834615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.896297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.930945Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788372609149853:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:40.931055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:40.941142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.999171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:41.045290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:41.134331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:41.220670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788398378955823:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:41.220750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:41.220915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788398378955828:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:41.227989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:41.246041Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:22:41.246698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788398378955830:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:41.346330Z node 1 :TX_PROXY ERROR: Actor# [1:7486788398378955885:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:42.612688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.759444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 9273, MsgBus: 23053 2025-03-28T09:22:44.959778Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788412451193084:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00176d/r3tmp/tmpLbrJpg/pdisk_1.dat 2025-03-28T09:22:44.975443Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:22:45.175231Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:45.181442Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:45.181517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:45.187600Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9273, node 2 2025-03-28T09:22:45.367188Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:45.367206Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:45.367214Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:45.367324Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23053 TClient is connected to server localhost:23053 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:46.033406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:46.039575Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:46.049856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:46.170782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:46.370214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:46.541878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:49.478840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788433926031174:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.478969Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.597056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.646609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.740287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.818652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.885022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.891001Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788412451193084:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:49.891145Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:49.944713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.025172Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788438220998989:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:50.025253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:50.025593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788438220998994:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:50.029486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:50.051426Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788438220998996:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:50.125400Z node 2 :TX_PROXY ERROR: Actor# [2:7486788438220999051:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:51.362144Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:51.452381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH19 [GOOD] Test command err: Trying to start YDB, gRPC: 25679, MsgBus: 16323 2025-03-28T09:21:05.170745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787984893728395:2239];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:05.170976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00181d/r3tmp/tmpy162RN/pdisk_1.dat 2025-03-28T09:21:05.708040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:05.727982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:05.728063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:05.732030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25679, node 1 2025-03-28T09:21:05.934191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:05.934211Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:05.934217Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:05.934323Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16323 TClient is connected to server localhost:16323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:06.793590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:06.813963Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:09.455148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788002073598062:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:09.455272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:09.458792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788002073598072:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:09.463409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:09.481158Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:21:09.482346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788002073598076:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:09.566582Z node 1 :TX_PROXY ERROR: Actor# [1:7486788002073598127:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:09.967204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:10.170861Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486787984893728395:2239];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:10.170991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:10.294619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:10.295038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:10.295305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:10.295436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:10.295556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:10.295696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:10.295801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:10.295927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:10.296072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:10.296177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:10.296305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:10.296450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788006368565661:2352];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:10.297664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:10.297718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:10.297893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:10.298011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:10.298125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:10.298222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:10.298309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:10.298413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:10.298570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:10.306842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:10.307118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:10.307267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788006368565669:2356];tablet_id=72075186224037 ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.829012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.839283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.843174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.849173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.853142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.863470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.867404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.873668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.876824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.882053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.882617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.888688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.888764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.895277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.896900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.901186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.907436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.907576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.914241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.916022Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.920516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.926267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.928096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.932552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.933436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.939406Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.944767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.947992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.960163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.961076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.968992Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.970878Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.977818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.980139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.990411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.992371Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:32.998979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:33.001381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:33.008191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:33.009439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:33.019023Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:33.021644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:33.029854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:33.032988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:33.056588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:33.157997Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xqnj1hcm86fz270f4rfd", SessionId: ydb://session/3?node_id=1&id=ZWQ1NjdjZS05ZDgzMDk2Ni02ZDgwMmFmNy1mNTk3YTlhMQ==, Slow query, duration: 36.242010s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:33.584220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:33.584654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:33.585404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788268361616378:9236];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-28T09:22:33.585796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint64ToInt64-StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 62364, MsgBus: 17671 2025-03-28T09:22:35.712716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788372076355197:2283];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:35.782954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00176e/r3tmp/tmpKIAtG9/pdisk_1.dat 2025-03-28T09:22:36.335055Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:36.335144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:36.337114Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:36.339579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62364, node 1 2025-03-28T09:22:36.667459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:36.667480Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:36.667486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:36.667596Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17671 TClient is connected to server localhost:17671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:37.769561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:37.801949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:38.048226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:38.276551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:22:38.402160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.724930Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788372076355197:2283];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:40.725203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:40.779722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788393551193199:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:40.779873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:41.073751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:41.130262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:41.203558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:41.266715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:41.312038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:41.375930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:41.471324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788397846161018:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:41.471424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:41.471731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788397846161023:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:41.476376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:41.493208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788397846161025:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:41.554381Z node 1 :TX_PROXY ERROR: Actor# [1:7486788397846161080:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:42.748491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.848136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19136, MsgBus: 20389 2025-03-28T09:22:44.868315Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788411099559964:2209];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00176e/r3tmp/tmpcbFTAY/pdisk_1.dat 2025-03-28T09:22:44.986959Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:22:45.192521Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:45.206818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:45.206928Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:45.215711Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19136, node 2 2025-03-28T09:22:45.379192Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:45.379216Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:45.379223Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:45.379332Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20389 TClient is connected to server localhost:20389 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:46.122045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:46.135429Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:46.149370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:46.276292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:46.498003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:46.594124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:49.316456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788432574398036:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.316537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.366434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.403774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.441474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.478090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.514968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.588578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.679317Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788432574398554:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.679380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.679675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788432574398559:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.682562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:49.702174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788432574398562:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:49.774226Z node 2 :TX_PROXY ERROR: Actor# [2:7486788432574398616:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:49.822783Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788411099559964:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:49.822904Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:50.964596Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:51.043538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore [GOOD] >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastUint32ToUint16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 28321, MsgBus: 61096 2025-03-28T09:22:36.951235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788377538005251:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:36.951653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00176a/r3tmp/tmpQuCGs1/pdisk_1.dat 2025-03-28T09:22:37.701051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:37.701584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:37.701650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:37.703964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28321, node 1 2025-03-28T09:22:37.967833Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:37.967853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:37.967861Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:37.968003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61096 TClient is connected to server localhost:61096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:38.786308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:38.819512Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:38.833141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:39.035833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:39.315755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:39.442831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:41.646782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788399012843370:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:41.646905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:41.947972Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788377538005251:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:41.948106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:41.979969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.068373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.145656Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.186568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.225211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.315651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.382544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788403307811188:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:42.382618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:42.383046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788403307811193:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:42.387232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:42.402428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788403307811195:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:42.502304Z node 1 :TX_PROXY ERROR: Actor# [1:7486788403307811250:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:43.945321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.036698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29611, MsgBus: 3798 2025-03-28T09:22:45.980195Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788413128275059:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:45.980236Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00176a/r3tmp/tmpORouGp/pdisk_1.dat 2025-03-28T09:22:46.276533Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:46.302815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:46.302900Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:46.304264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29611, node 2 2025-03-28T09:22:46.515189Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:46.515211Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:46.515217Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:46.515327Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3798 TClient is connected to server localhost:3798 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:47.375284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:47.394499Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:22:47.404860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:47.499102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:47.801314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:47.889273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:50.251822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788434603113237:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:50.251926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:50.361551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.405796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.457874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.498571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.534079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.617669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.716170Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788434603113759:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:50.716249Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:50.716462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788434603113764:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:50.720236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:50.733113Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788434603113766:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:22:50.797799Z node 2 :TX_PROXY ERROR: Actor# [2:7486788434603113821:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:50.982254Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788413128275059:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:50.982307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:52.309862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.425933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCC >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin >> KqpJoin::JoinConvert [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH16 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::DatetimeConstantFold+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 27377, MsgBus: 22708 2025-03-28T09:21:14.340900Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788022686016424:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:14.341288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001811/r3tmp/tmpm6oXfa/pdisk_1.dat 2025-03-28T09:21:15.187887Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:15.199486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:15.199585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:15.202083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27377, node 1 2025-03-28T09:21:15.459148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:15.459167Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:15.459177Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:15.459268Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22708 TClient is connected to server localhost:22708 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:16.418566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:16.443690Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:19.047700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788044160853433:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:19.047864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:19.048284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788044160853445:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:19.052934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:19.067823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788044160853447:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:19.151651Z node 1 :TX_PROXY ERROR: Actor# [1:7486788044160853498:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:19.334818Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788022686016424:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:19.334890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:19.684240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:20.010089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:20.011777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:20.011994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:20.012234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:20.012340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:20.012456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:20.012549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:20.012636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:20.012726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:20.012848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:20.012950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:20.013037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:20.013123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788044160853815:2362];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:20.016148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:20.016365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:20.016470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:20.016558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:20.016643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:20.016724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:20.016809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:20.016896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:20.016987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:20.017090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:20.017180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788044160853809:2360];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:20.078007Z node 1 :T ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.654121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.659087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.665514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.666196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.671167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.678433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.684454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.687863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.693389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.696656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.702287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.705683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.711267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.718988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.724739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.726472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.735035Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.739813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.743804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.748972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.749295Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.758788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.763938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.767994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.773475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.777588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.786669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.787278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.796553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.800027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.806355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.809599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.819487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.824230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.829175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.833741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.846947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.852556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.866935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.877074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.886036Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.898985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.911064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.917150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:41.931074Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:42.071582Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0y0b21fzf857css43cccm", SessionId: ydb://session/3?node_id=1&id=YWNmZjc1MTEtOTMzZWRkNDUtOWJmMWIwMTQtNTQ0MjBjNmU=, Slow query, duration: 36.276198s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:42.329617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:42.330061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:42.330598Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486788254614287495:7812];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224039392; 2025-03-28T09:22:42.331005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TPCDS87+ColumnStore >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::RightSemiJoin_SecondaryIndex [GOOD] Test command err: Trying to start YDB, gRPC: 13884, MsgBus: 27140 2025-03-28T09:22:42.777446Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788402421217427:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:42.811036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00174b/r3tmp/tmpCTIlEc/pdisk_1.dat 2025-03-28T09:22:43.411723Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:43.441069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:43.441150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:43.443535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13884, node 1 2025-03-28T09:22:43.664627Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:43.664648Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:43.664655Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:43.664752Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27140 TClient is connected to server localhost:27140 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:44.804357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:44.855167Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:44.880695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.082894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:45.385317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:45.513159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:47.786858Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788402421217427:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:47.786983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:47.929147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788423896055549:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:47.929257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.283929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:48.323520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:48.378374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:48.424902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:48.461184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:48.544501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:48.636856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788428191023368:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.636923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.637252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788428191023373:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.641059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:48.658499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788428191023375:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:48.738623Z node 1 :TX_PROXY ERROR: Actor# [1:7486788428191023430:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:50.008402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.059897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.150101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.210067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.243614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:51.454947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:4:43: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Execution, code: 1060
:3:53: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> KqpIndexLookupJoin::LeftOnly+StreamLookup >> KqpJoinOrder::TPCDS94+ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::JoinConvert [GOOD] Test command err: Trying to start YDB, gRPC: 20537, MsgBus: 20247 2025-03-28T09:22:46.878992Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788421131392492:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:46.879518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00173d/r3tmp/tmp4jCkXP/pdisk_1.dat 2025-03-28T09:22:47.542203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:47.591726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:47.591820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:47.597170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20537, node 1 2025-03-28T09:22:47.851214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:47.851230Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:47.851236Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:47.851324Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20247 TClient is connected to server localhost:20247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:48.886265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:48.899791Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:48.915047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:49.145059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:49.417912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:49.518191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:51.600970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788442606230601:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:51.601075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:51.782789Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788421131392492:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:51.782870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:51.935775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:51.993613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.047169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.085664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.166824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.220635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.316081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788446901198419:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:52.316159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:52.316621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788446901198424:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:52.321202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:52.339669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788446901198426:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:52.430815Z node 1 :TX_PROXY ERROR: Actor# [1:7486788446901198483:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:53.935836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:53.976930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:54.015668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:54.460686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 [GOOD] >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCHRandomJoinViewJustWorks+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21845, MsgBus: 18213 2025-03-28T09:21:09.767981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788000474227121:2183];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:09.787624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001817/r3tmp/tmpiUroWq/pdisk_1.dat 2025-03-28T09:21:10.361866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:10.361988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:10.371334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:10.423359Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21845, node 1 2025-03-28T09:21:10.658889Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:21:10.715242Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:10.715266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:10.715273Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:10.715378Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18213 TClient is connected to server localhost:18213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:11.657440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:14.452785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788021949064136:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:14.452928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:14.453549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788021949064148:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:14.463580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:14.487055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788021949064150:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:14.579696Z node 1 :TX_PROXY ERROR: Actor# [1:7486788021949064201:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:14.766279Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788000474227121:2183];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:14.778255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:15.090880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:15.396606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:15.396972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:15.397270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:15.397429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:15.397563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:15.397761Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:15.397898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:15.398016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:15.398141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:15.398273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:15.398390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:15.398541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788026244031769:2356];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:15.446160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:15.446253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:15.446483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:15.446608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:15.446941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:15.447049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:15.447152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:15.447255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:15.447358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:15.447467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:15.447565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:15.447664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788026244031767:2355];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:15.449696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:74867880262440 ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.421020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.432024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.436902Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.443717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.444366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.449866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.454341Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.464366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.467752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.473284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.476812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.478976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.482212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.485067Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.488521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.491812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.494845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.497971Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.501119Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.504047Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.507409Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.510166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.513399Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.516507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.520299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.522595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.527440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.531523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.532503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.538652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.539761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.544969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.551188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.551353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.557922Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.563802Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.565302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.572208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.574197Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.579056Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.585735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.593949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.596595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.600464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.609467Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.815017Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xwayfrrtt06rx2f6fcv9", SessionId: ydb://session/3?node_id=1&id=MjY4MDFkNTMtOTM0MTVkYWMtMTJmM2ViMzQtZDFkNmQxN2E=, Slow query, duration: 38.119588s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:40.388237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:40.388690Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:40.389293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486788236697465512:7837];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-28T09:22:40.389721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpFlipJoin::Right_3 |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinOrderHintsSimple-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 12358, MsgBus: 18773 2025-03-28T09:22:03.842503Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788233452489560:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:03.857949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017a7/r3tmp/tmpPAvUxY/pdisk_1.dat 2025-03-28T09:22:04.531353Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:04.542589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:04.542659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:04.567562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12358, node 1 2025-03-28T09:22:04.822196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:04.822216Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:04.822222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:04.822308Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18773 TClient is connected to server localhost:18773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:05.774102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:08.427552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788254927326575:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:08.427657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:08.435324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788254927326587:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:08.439546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:08.452774Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:22:08.453825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788254927326589:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:08.550562Z node 1 :TX_PROXY ERROR: Actor# [1:7486788254927326640:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:08.803057Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788233452489560:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:08.803143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:08.934894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.080126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.133224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.182444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.223244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.457934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.496716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.527578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.598844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.643033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.678668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.721438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.757551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.502800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:10.548185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.589785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.655524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.698723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.743485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.813213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.869698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.912270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:10.956849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.034820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.080733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.155973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.191953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.226411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.256721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:11.289380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... 10714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.131675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.137241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.139590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.148982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038482;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.152135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.157770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.159942Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.164044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.165367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038536;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.171641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038474;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.174045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.176446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038532;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.184532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.186585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.190679Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.196957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.198951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.205952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038516;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.208398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.212817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038554;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.215200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038548;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.219428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038570;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.221041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.225634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.227439Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.232174Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.237712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.238512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.245533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.245534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038558;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.252009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.252009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.258450Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.260579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.266744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.267181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.274258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038540;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.274294Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.281657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.281847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038508;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.289110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038560;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.292549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.301900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.304977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:50.447377Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0y7976vrvnrg4tr7nkp8t", SessionId: ydb://session/3?node_id=1&id=MjA3NDhlOTktOTViZjI5NzQtOGI1MjQ3MjMtOWVhODY5NDA=, Slow query, duration: 37.543652s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:50.766072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:50.766444Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:50.767325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486788392366306268:5789];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-28T09:22:50.767668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716;
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534
: Warning: Execution, code: 1060
: Warning: Unapplied hint: Rows(R T # 1), code: 4534 >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin-NotNull [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore >> KqpFlipJoin::Inner_1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::Left-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10418, MsgBus: 27367 2025-03-28T09:22:40.350330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788391333396014:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:40.350844Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001759/r3tmp/tmpeFPw3B/pdisk_1.dat 2025-03-28T09:22:40.980954Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:40.992072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:40.992155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:40.994294Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10418, node 1 2025-03-28T09:22:41.175176Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:41.175195Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:41.175201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:41.175292Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27367 TClient is connected to server localhost:27367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:42.088774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:42.112713Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:42.125187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:42.338509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:42.585339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:42.704759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:44.717401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788408513266836:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:44.722901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:45.258188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.342978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.343140Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788391333396014:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:45.343303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:45.433635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.491208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.569534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.630398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.734985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788412808234664:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:45.735066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:45.735299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788412808234669:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:45.739581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:45.753484Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:22:45.753749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788412808234671:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:45.818760Z node 1 :TX_PROXY ERROR: Actor# [1:7486788412808234725:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:47.603907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:47.648905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:47.706428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:47.769682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:47.804783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:47.839209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22434, MsgBus: 2499 2025-03-28T09:22:50.369256Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788435840349070:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:50.369352Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001759/r3tmp/tmpU5y5xK/pdisk_1.dat 2025-03-28T09:22:50.690925Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:50.692024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:50.692084Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:50.693377Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22434, node 2 2025-03-28T09:22:50.835232Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:50.835256Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:50.835263Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:50.835369Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2499 TClient is connected to server localhost:2499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:51.612702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:51.623493Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:51.641329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:51.735769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:51.914608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:52.015393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:54.624035Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788453020220017:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:54.624126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:54.678200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:54.735551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:54.831015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:54.948717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:55.028926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:55.097632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:55.211085Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788457315187834:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:55.211199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:55.218872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788457315187839:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:55.227294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:55.253028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788457315187841:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:55.337368Z node 2 :TX_PROXY ERROR: Actor# [2:7486788457315187897:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:55.374829Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788435840349070:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:55.374908Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:56.801404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:56.843270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:56.893035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:22:56.947783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:56.995747Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:57.040838Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt32ToInt16+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 3423, MsgBus: 26491 2025-03-28T09:22:43.265884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788406295243113:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:43.266583Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001749/r3tmp/tmp9JMJKh/pdisk_1.dat 2025-03-28T09:22:43.955611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:43.955690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:43.976816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:43.979098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3423, node 1 2025-03-28T09:22:44.206486Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:44.206508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:44.206514Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:44.206619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26491 TClient is connected to server localhost:26491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:45.238881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:45.305685Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:45.318742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:45.590187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:45.826410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:45.934313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:48.225631Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788406295243113:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:48.225690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:48.558748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788427770081247:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.558880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.011150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.052821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.090217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.129198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.172463Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.249074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.323695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788432065049064:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.323761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.323966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788432065049069:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.327841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:49.338371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788432065049071:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:49.415385Z node 1 :TX_PROXY ERROR: Actor# [1:7486788432065049126:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:50.699804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.786228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 7924, MsgBus: 20831 2025-03-28T09:22:52.725395Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788444028992029:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:52.833017Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001749/r3tmp/tmpTaNmhy/pdisk_1.dat 2025-03-28T09:22:53.023918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:53.023997Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:53.066242Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:53.071956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7924, node 2 2025-03-28T09:22:53.247477Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:53.247517Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:53.247528Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:53.247641Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20831 TClient is connected to server localhost:20831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:54.092844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:54.126139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:54.261531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:54.504721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:22:54.650574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:57.008815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788461208862815:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:57.008924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:57.128694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:57.174574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:57.220660Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:57.260441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:57.301191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:57.357984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:57.425028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788465503830624:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:57.425118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:57.425424Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788465503830629:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:57.429467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:57.443645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788465503830631:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:57.525659Z node 2 :TX_PROXY ERROR: Actor# [2:7486788465503830686:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:57.717476Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788444028992029:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:57.718125Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:58.747749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:58.842466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 >> TFlatTest::CopyCopiedTableAndRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH5 [GOOD] Test command err: Trying to start YDB, gRPC: 19054, MsgBus: 9020 2025-03-28T09:21:09.248691Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788003456625301:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:09.498219Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00181a/r3tmp/tmp6pqbU0/pdisk_1.dat 2025-03-28T09:21:09.853904Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:09.853999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:09.863060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:09.864349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19054, node 1 2025-03-28T09:21:10.103675Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:10.103716Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:10.103730Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:10.103825Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9020 TClient is connected to server localhost:9020 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:11.127334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:13.772691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788020636495006:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:13.772778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788020636495014:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:13.772843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:13.777105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:13.797739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788020636495020:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:13.885298Z node 1 :TX_PROXY ERROR: Actor# [1:7486788020636495071:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:14.323677Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788003456625301:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:14.323745Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:14.475696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:14.941711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:14.941924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:14.942207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:14.942328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:14.942436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:14.942542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:14.942641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:14.946976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:14.947197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:14.947314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:14.947428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:14.947533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788024931462662:2364];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:14.982861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:14.982988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:14.983243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:14.983401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:14.983517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:14.983632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:14.983759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:14.983903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:14.984040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:14.984161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:14.984300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:14.984464Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788024931462652:2359];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:15.006611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788024931462648:2357];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp: ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.492497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.500538Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.507139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.513131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.519037Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.525329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.530496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.536481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.542128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.550480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.558198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.564367Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.573348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.588539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.588986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.594071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.598993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.608225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.612205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.617543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.620122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.629100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.631384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.638179Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.641148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.646892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.652241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.656669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.752151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.759066Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.764988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.770115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.781800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.786800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.792030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.797739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.800165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.808460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.810374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.827754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.837432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.846271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.863847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.878725Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:35.934372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:36.126936Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xvf47j9hec170wpaasmn", SessionId: ydb://session/3?node_id=1&id=OGZkY2IwYTUtMTA2OWQwMGYtMmQ1YzQxZDItYWZjZmQ0MTk=, Slow query, duration: 35.321623s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:36.631712Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:36.632075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:36.632373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788286924514499:9423];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:22:36.632716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCDS78 [GOOD] Test command err: Trying to start YDB, gRPC: 26015, MsgBus: 18685 2025-03-28T09:20:35.275219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787856695663573:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:20:35.289953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00184d/r3tmp/tmpaXXL77/pdisk_1.dat 2025-03-28T09:20:35.895867Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:35.921429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:35.921548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:35.925328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26015, node 1 2025-03-28T09:20:36.093659Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:36.093695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:36.093712Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:36.093919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18685 TClient is connected to server localhost:18685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:20:36.788039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:20:36.812197Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:20:38.942832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787869580566101:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.942968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.943020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486787869580566113:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:38.959338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:20:38.975442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486787869580566115:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:20:39.048600Z node 1 :TX_PROXY ERROR: Actor# [1:7486787873875533463:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:39.408901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:20:39.688149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:39.688149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:39.688349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:39.688593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:39.688684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:39.688796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:39.688880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:39.688954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:39.689055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:39.689153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:39.689174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:39.689346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:39.689405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:39.689473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:39.689492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:39.689575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:39.689591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486787873875533698:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:39.689678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:39.689796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:39.689921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:39.690046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:39.690142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:39.690292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:39.690411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486787873875533692:2349];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:39.729815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787873875533696:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:39.729879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486787873875533696:2351];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abs ... WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.280427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.280919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.289245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.291242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.296537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.302121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.303669Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039286;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.307507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.308929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.314908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.320320Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.324988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.326591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.330724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.336415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.340150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.341551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.346134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.347402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.352442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.355691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.365332Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.367545Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.371402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.372796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.376937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.378057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.383445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.460542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:01.523070Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0wsxp4c04wzjv9bd4hq6m", SessionId: ydb://session/3?node_id=1&id=NzlkYWZlNWItOWYwMjY3ODMtNGFjMzUyMjYtMTkzMzJiNzU=, Slow query, duration: 35.068256s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:02.472572Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:02.472995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:02.473574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788183113235535:11051];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:22:02.473917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:41.162518Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0ydpy8tnca1v011b883j0", SessionId: ydb://session/3?node_id=1&id=NzlkYWZlNWItOWYwMjY3ODMtNGFjMzUyMjYtMTkzMzJiNzU=, Slow query, duration: 21.674807s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n\n$ws =\n\n (select date_dim.d_year AS ws_sold_year, web_sales.ws_item_sk ws_item_sk,\n\n web_sales.ws_bill_customer_sk ws_customer_sk,\n\n sum(ws_quantity) ws_qty,\n\n sum(ws_wholesale_cost) ws_wc,\n\n sum(ws_sales_price) ws_sp\n\n from web_sales as web_sales\n\n left join web_returns as web_returns on web_returns.wr_order_number=web_sales.ws_order_number and web_sales.ws_item_sk=web_returns.wr_item_sk\n\n join date_dim as date_dim on web_sales.ws_sold_date_sk = date_dim.d_date_sk\n\n where wr_order_number is null\n\n group by date_dim.d_year, web_sales.ws_item_sk, web_sales.ws_bill_customer_sk\n\n );\n\n$cs =\n\n (select date_dim.d_year AS cs_sold_year, catalog_sales.cs_item_sk cs_item_sk,\n\n catalog_sales.cs_bill_customer_sk cs_customer_sk,\n\n sum(cs_quantity) cs_qty,\n\n sum(cs_wholesale_cost) cs_wc,\n\n sum(cs_sales_price) cs_sp\n\n from catalog_sales as catalog_sales\n\n left join catalog_returns as catalog_returns on catalog_returns.cr_order_number=catalog_sales.cs_order_number and catalog_sales.cs_item_sk=catalog_returns.cr_item_sk\n\n join date_dim as date_dim on catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n\n where cr_order_number is null\n\n group by date_dim.d_year, catalog_sales.cs_item_sk, catalog_sales.cs_bill_customer_sk\n\n );\n\n$ss=\n\n (select date_dim.d_year AS ss_sold_year, store_sales.ss_item_sk ss_item_sk,\n\n store_sales.ss_customer_sk ss_customer_sk,\n\n sum(ss_quantity) ss_qty,\n\n sum(ss_wholesale_cost) ss_wc,\n\n sum(ss_sales_price) ss_sp\n\n from store_sales as store_sales\n\n left join store_returns as store_returns on store_returns.sr_ticket_number=store_sales.ss_ticket_number and store_sales.ss_item_sk=store_returns.sr_item_sk\n\n join date_dim as date_dim on store_sales.ss_sold_date_sk = date_dim.d_date_sk\n\n where sr_ticket_number is null\n\n group by date_dim.d_year, store_sales.ss_item_sk, store_sales.ss_customer_sk\n\n );\n\n-- start query 1 in stream 0 using template query78.tpl and seed 1819994127\n\n select\n\nss_sold_year, ss_item_sk, ss_customer_sk,\n\ncast(ss_qty as double)/(coalesce(ws_qty,0)+coalesce(cs_qty,0)) ratio,\n\nss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price,\n\ncoalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty,\n\ncoalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost,\n\ncoalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price\n\nfrom $ss ss\n\nleft join $ws ws on (ws.ws_sold_year=ss.ss_sold_year and ws.ws_item_sk=ss.ss_item_sk and ws.ws_customer_sk=ss.ss_customer_sk)\n\nleft join $cs cs on (cs.cs_sold_year=ss.ss_sold_year and cs.cs_item_sk=ss.ss_item_sk and cs.cs_customer_sk=ss.ss_customer_sk)\n\nwhere (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2001\n\norder by\n\n ss_sold_year, ss_item_sk, ss_customer_sk,\n\n store_qty desc, store_wholesale_cost desc, store_sales_price desc,\n\n other_chan_qty,\n\n other_chan_wholesale_cost,\n\n other_chan_sales_price,\n\n ratio\n\nlimit 100;\n\n\n\n-- end query 1 in stream 0 using template query78.tpl", parameters: 0b >> KqpJoin::IdxLookupSelf >> KqpJoinOrder::CanonizedJoinOrderTPCH13 >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpIndexLookupJoin::Inner+StreamLookup >> KqpJoinOrder::TPCDS92-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 32293, MsgBus: 15333 2025-03-28T09:21:07.871485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486787994868176006:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:07.872378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00181b/r3tmp/tmpUPqxxo/pdisk_1.dat 2025-03-28T09:21:08.520120Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:08.544665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:08.544741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:08.555737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32293, node 1 2025-03-28T09:21:08.712505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:08.712555Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:08.712569Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:08.712718Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15333 TClient is connected to server localhost:15333 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:09.693008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:09.715441Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:12.007673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788016343013152:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:12.007810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:12.013999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788016343013164:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:12.019111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:12.040895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788016343013166:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:12.102399Z node 1 :TX_PROXY ERROR: Actor# [1:7486788016343013217:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:12.557033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:12.840142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:12.840408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:12.840698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:12.840824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:12.840942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:12.841072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:12.841235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:12.841352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:12.841475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:12.841583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:12.841697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:12.841840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788016343013453:2362];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:12.843499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:12.843557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:12.843777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:12.843947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:12.844074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:12.844181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:12.844277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:12.844382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:12.844525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:12.844653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:12.844780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:12.844901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788016343013439:2355];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:12.887766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788016343013660:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:12.888085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788016343013660:2364];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abs ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.785166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039270;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.795256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.801094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039302;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.801982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.807791Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.816582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.819346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.825548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.826170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.833708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039310;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.834357Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.841020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.841024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.848101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.849440Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.858823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.860842Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.865587Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.869268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.872631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.877446Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039210;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.879121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039194;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.883675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.898789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.899752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039272;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.905131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039212;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.905526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039200;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.911027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.911190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.918360Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.925057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.925152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.933322Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.935580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.940248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.941970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.946140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.959070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.963146Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.965548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.969221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.972826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.981580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.987751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039312;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:38.990762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:39.213538Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xt94e15px4qth7dec5sq", SessionId: ydb://session/3?node_id=1&id=ZWZjYTBmOGMtOTE1ZWQ5YWMtN2YxNjE5ZjYtNWVhMjExOTY=, Slow query, duration: 39.624590s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:39.578943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:39.579428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:39.579955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788342760584612:11178];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:22:39.580340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::TPCDS94-ColumnStore [GOOD] >> TVectorIndexTests::VectorKmeansTreePostingImplTable >> KqpService::PatternCache [GOOD] >> KqpService::RangeCache+UseCache >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> KqpIndexLookupJoin::JoinWithComplexCondition+StreamLookupJoin [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::VectorKmeansTreePostingImplTable [GOOD] >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 28426, MsgBus: 9889 2025-03-28T09:22:02.167779Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788230551127722:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:02.167845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017a9/r3tmp/tmp2axbeq/pdisk_1.dat 2025-03-28T09:22:02.862829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:02.862932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:02.864955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:02.953430Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28426, node 1 2025-03-28T09:22:03.103239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:03.103264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:03.103277Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:03.103377Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9889 TClient is connected to server localhost:9889 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:03.913907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:03.945682Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:06.244704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788247730997559:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:06.244821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:06.245215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788247730997571:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:06.249124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:06.269728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788247730997573:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:06.370601Z node 1 :TX_PROXY ERROR: Actor# [1:7486788247730997624:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:06.763285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.901907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.949783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:06.985072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.037830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.193772Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788230551127722:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:07.194123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:07.300706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.353076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.387718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.474387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.516395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.566851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.623629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:07.657853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.385504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:08.436034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.471326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.504630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.544551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.581883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.629282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.675417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.712842Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.759131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.813176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.892015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:08.968130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.001305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.045390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.082913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:09.120470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.578573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.579512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.585670Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.589641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.591855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.597583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.598643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.606338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.607694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.613532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.620642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.624356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.633938Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.639217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.640087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.653353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.653354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.664482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.667591Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.674474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.679485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.680638Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.685301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.690504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.694975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038510;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.700897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.707045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.707178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038461;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.713392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.715800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.719821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.725924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.728070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.735755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.741951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.743063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.749798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.756565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.762758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.772500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.778186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.780372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.789762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.792040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.797473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:44.914702Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0y52f91bedtdgb9xsfv10", SessionId: ydb://session/3?node_id=1&id=ZDhmZDJkZmEtM2YwNDY5MzgtNDVjMTMxYjMtNzFjOTNhOA==, Slow query, duration: 34.272404s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:45.812476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7486788277795775599:2984];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-28T09:22:45.812556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:45.812822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:45.813200Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TColumnShardTestSchema::RebootForgetWithLostAnswer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::LeftOnly+StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 16214, MsgBus: 1214 2025-03-28T09:22:57.768418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788464188672061:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:57.768804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001731/r3tmp/tmperxdpT/pdisk_1.dat 2025-03-28T09:22:58.480874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:58.480969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:58.615528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:58.700110Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16214, node 1 2025-03-28T09:22:58.778051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:22:58.958855Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:58.958878Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:58.958885Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:58.959013Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1214 TClient is connected to server localhost:1214 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:59.793698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:59.853426Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:59.867028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:00.098568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:00.307568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:00.403545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:02.416963Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788485663510163:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:02.417089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:02.757844Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788464188672061:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:02.757914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:02.761011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.844776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.923469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.958612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:03.041619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:03.122220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:03.187067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788489958477980:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:03.187180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:03.187517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788489958477985:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:03.193940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:03.216259Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:23:03.219146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788489958477987:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:23:03.274384Z node 1 :TX_PROXY ERROR: Actor# [1:7486788489958478041:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:04.536219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.609517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.645914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.696115Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.731398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.803143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::CanonizedJoinOrderTPCH4 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS94-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 1647, MsgBus: 12088 2025-03-28T09:21:55.807346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788199719418366:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017cd/r3tmp/tmpJWBQgo/pdisk_1.dat 2025-03-28T09:21:56.137675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:21:56.447736Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:56.450886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:56.450987Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:56.455723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1647, node 1 2025-03-28T09:21:56.635173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:56.635197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:56.635205Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:56.635296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12088 TClient is connected to server localhost:12088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:57.547531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:59.683596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788216899288026:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:59.683713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788216899288038:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:59.683783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:59.689417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:59.704968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788216899288040:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:59.764709Z node 1 :TX_PROXY ERROR: Actor# [1:7486788216899288091:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:00.101635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.234966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.270466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.305161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.336667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.615714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.670446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.720007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.775872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.803161Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788199719418366:2220];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:00.803232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:00.848471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.918981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:00.958105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:01.001283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:01.703855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:01.749169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:01.819083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:01.881410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:01.960540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.010708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.059208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.146139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.184564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.235753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.317965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.352644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.391504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.439350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.481417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.532553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.585313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.650889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but pro ... 76710714; 2025-03-28T09:22:37.331502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038550;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.337522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038441;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.338283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038439;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.343178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.343705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038445;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.348966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.349647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.354468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.354617Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038552;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.359815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038427;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.364013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.364014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.373469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038431;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.380880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.386340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.390255Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.391803Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038473;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.397369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038465;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.400692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.406050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.408253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038463;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.413611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.419432Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.419947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.429701Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038471;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.430580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038491;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.435401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038459;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.441259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038429;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.443827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.447380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.448826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.454213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038475;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.457499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.468191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038433;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.470668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.473769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.475801Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.479622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038493;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.481306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038469;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:37.603066Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0xyx0dg0k47ay7n2985ej", SessionId: ydb://session/3?node_id=1&id=ZjE5ZWZhNDgtYTE4MzM1NWMtNTI0NWZjNjAtZDgwMzg5Njg=, Slow query, duration: 33.282174s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:38.235131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:38.235402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:38.236306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7486788246964065743:2904];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-28T09:22:38.236534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:01.422026Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0zagbe3qtmwds1pwv0sgr", SessionId: ydb://session/3?node_id=1&id=ZjE5ZWZhNDgtYTE4MzM1NWMtNTI0NWZjNjAtZDgwMzg5Njg=, Slow query, duration: 12.449883s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n$bla1 = (select ws_order_number\n from web_sales\n group by ws_order_number\n having COUNT(DISTINCT ws_warehouse_sk) > 1);\n\n-- start query 1 in stream 0 using template query94.tpl and seed 2031708268\nselect\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\n left semi join $bla1 bla1 on (ws1.ws_order_number = bla1.ws_order_number)\n left only join web_returns on (ws1.ws_order_number = web_returns.wr_order_number)\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'NE'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\norder by `order count`\nlimit 100;\n", parameters: 0b >> KqpJoinOrder::CanonizedJoinOrderTPCH6 [GOOD] >> KqpFlipJoin::Right_3 [GOOD] >> KqpIndexLookupJoin::CheckAllKeyTypesCast >> LabeledDbCounters::TwoTabletsKillOneTablet [GOOD] >> SystemView::AuthEffectivePermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckCastInt64ToUint64+StreamLookupJoin+NotNull [GOOD] Test command err: Trying to start YDB, gRPC: 21051, MsgBus: 13941 2025-03-28T09:22:51.774952Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788438749553268:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:52.058017Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00173b/r3tmp/tmp2Y466u/pdisk_1.dat 2025-03-28T09:22:52.475485Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:52.513329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:52.513403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:52.529800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21051, node 1 2025-03-28T09:22:52.803120Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:52.803137Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:52.803143Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:52.803236Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13941 TClient is connected to server localhost:13941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:53.934962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:53.993227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:54.291438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:54.553072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:54.634887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:56.459425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788460224391372:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:56.459522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:56.694988Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788438749553268:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:56.695058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:56.780076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:56.851126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:56.889753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:56.938278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:56.979943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:57.033738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:57.090416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788464519359183:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:57.090497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:57.090716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788464519359188:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:57.094275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:22:57.118023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788464519359190:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:22:57.180344Z node 1 :TX_PROXY ERROR: Actor# [1:7486788464519359245:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:58.645040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:58.729552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 11803, MsgBus: 8843 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00173b/r3tmp/tmp9TDfqK/pdisk_1.dat 2025-03-28T09:23:00.766028Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:00.852011Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:00.868244Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:00.868326Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:00.875580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11803, node 2 2025-03-28T09:23:01.039536Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:01.039554Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:01.039561Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:01.039660Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8843 TClient is connected to server localhost:8843 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:01.796735Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:01.803290Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:01.816687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:01.915512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:02.098039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:02.185095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:04.371700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788494789645990:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:04.371769Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:04.420250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.460575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.497585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.538477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.586544Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.664637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.739528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788494789646503:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:04.739606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:04.739795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788494789646508:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:04.742851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:04.758301Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788494789646510:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:23:04.864417Z node 2 :TX_PROXY ERROR: Actor# [2:7486788494789646568:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:06.161874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.241168Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TFlatTest::MiniKQLRanges >> KqpFlipJoin::Inner_1 [GOOD] >> KqpFlipJoin::Inner_2 >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF >> KqpJoinOrder::ShuffleEliminationOneJoin |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH6 [GOOD] Test command err: Trying to start YDB, gRPC: 1093, MsgBus: 29958 2025-03-28T09:21:25.183350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788069480922136:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:25.183699Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017fa/r3tmp/tmpFvFeGm/pdisk_1.dat 2025-03-28T09:21:25.957626Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:25.978471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:25.978555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:25.982234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1093, node 1 2025-03-28T09:21:26.270729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:26.270748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:26.270757Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:26.270863Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29958 TClient is connected to server localhost:29958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:27.176580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:29.651281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788086660791847:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.651357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.654762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788086660791859:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.658923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:29.677634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788086660791861:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:29.746570Z node 1 :TX_PROXY ERROR: Actor# [1:7486788086660791912:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:30.134773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.184326Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788069480922136:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:30.184451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:30.489231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:30.489550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:30.489832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:30.489929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:30.490032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:30.490147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:30.490236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:30.490364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:30.490505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:30.490621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:30.492033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:30.492082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:30.492309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:30.492426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:30.492537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:30.492658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:30.492770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:30.492886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:30.493019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:30.493149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:30.493281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:30.493415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788090955759426:2350];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:30.495947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:30.496188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788090955759501:2355];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:30.550196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788090955759491:2351];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.412641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.416944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.419308Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.423622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.425195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.432002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.432002Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.438038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.444333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.450347Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.451688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.457019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.460652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.462203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.466901Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.468011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.472826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.473390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.478668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.478717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.485830Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.488195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.493899Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.498607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.508972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.514611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.517873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.524340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.524410Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.529883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.529887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.535914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.535914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.544105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.546157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.550764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.552474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.556719Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.558106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.563312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.567211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.570113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.714511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.750416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.752288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:55.980922Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0yb57149s2sfa9rr4w9s9", SessionId: ydb://session/3?node_id=1&id=NjA3MDM5NGUtYzMxODRkOTAtZWIzYzdmOGItNzdlZTk1MDE=, Slow query, duration: 39.109042s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:56.376112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:56.376190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:56.376413Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788417373331522:11334];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:22:56.376924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH4 [GOOD] Test command err: Trying to start YDB, gRPC: 7387, MsgBus: 64857 2025-03-28T09:21:22.325914Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788058646926065:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:22.325961Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001801/r3tmp/tmp3k8WrF/pdisk_1.dat 2025-03-28T09:21:23.054537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:23.069228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:23.069319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:23.073313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7387, node 1 2025-03-28T09:21:23.206010Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:23.206047Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:23.206057Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:23.206211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64857 TClient is connected to server localhost:64857 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:24.105123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:24.143202Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:26.667188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788075826795923:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:26.667351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:26.669844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788075826795931:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:26.673928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:26.715400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788075826795937:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:26.815943Z node 1 :TX_PROXY ERROR: Actor# [1:7486788075826795988:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:27.291972Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788058646926065:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:27.292024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:27.305159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:27.600336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:27.601063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:27.601356Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:27.601473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:27.601615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:27.601741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:27.601852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:27.601982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:27.602113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:27.602220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:27.602347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:27.602466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788080121763610:2363];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:27.603948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:27.603989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:27.604226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:27.604345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:27.604445Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:27.604542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:27.604634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:27.604751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:27.604879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:27.604987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:27.605086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:27.605204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788080121763539:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:27.641859Z node 1 :TX_ ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.082961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.084199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.088204Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.089454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.093965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.100711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.105708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.109718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.114835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.120970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.122651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.125811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.132705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.134271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.139873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.142013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.145163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.147533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.150485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.153106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.155823Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.160134Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.161407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.166935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.168344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.172886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.174948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.178254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.180969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.184898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.186641Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.194950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.207613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.211309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.222191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.231493Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.232762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.237663Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.243622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.252895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.255079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.262500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.269348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.381912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.388085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.534968Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0y9ey0sjay382raczjxym", SessionId: ydb://session/3?node_id=1&id=YjBjNmVjYjktZTdkNDUzOGUtMjQ0M2RhOGEtYmUyYjkzMDg=, Slow query, duration: 37.399222s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:52.938755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:52.939231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:52.951511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486788294870163866:7655];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-28T09:22:52.951930Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::CanonizedJoinOrderTPCH17 [GOOD] >> KqpJoin::IdxLookupSelf [GOOD] >> KqpFlipJoin::RightSemi_1 >> KqpIndexLookupJoin::Inner+StreamLookup [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoin::IdxLookupSelf [GOOD] Test command err: Trying to start YDB, gRPC: 2103, MsgBus: 21318 2025-03-28T09:23:04.374745Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788494889786589:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:04.375220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016ee/r3tmp/tmpQdIEwM/pdisk_1.dat 2025-03-28T09:23:04.921906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:04.921985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:04.935631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:04.965596Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2103, node 1 2025-03-28T09:23:05.151166Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:05.151186Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:05.151193Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:05.151284Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21318 TClient is connected to server localhost:21318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:05.957883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:05.979080Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:05.988833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:06.172800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:06.430668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:06.528075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:08.391817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788512069657388:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:08.391895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:08.706912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:08.742648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:08.789362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:08.857090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:08.919697Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:08.973026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:09.071025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788516364625199:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:09.071113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:09.071344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788516364625204:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:09.075526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:09.099197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788516364625206:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:23:09.199338Z node 1 :TX_PROXY ERROR: Actor# [1:7486788516364625263:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:09.370770Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788494889786589:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:09.370856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:10.592506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:10.681549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:10.748353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:3:29: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 >> BasicStatistics::TwoServerlessTwoSharedDbs >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite >> TLocksTest::Range_GoodLock0 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH17 [GOOD] Test command err: Trying to start YDB, gRPC: 27836, MsgBus: 28647 2025-03-28T09:21:20.151324Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788050829756477:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:20.151962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001805/r3tmp/tmp7MY1HT/pdisk_1.dat 2025-03-28T09:21:21.117751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:21.117829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:21.131012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:21.136954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27836, node 1 2025-03-28T09:21:21.439585Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:21.439609Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:21.439623Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:21.439737Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28647 TClient is connected to server localhost:28647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:22.669914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:25.146919Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788050829756477:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:25.147002Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:25.427179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788072304593471:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:25.427269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:25.427270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788072304593482:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:25.434175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:25.472571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788072304593485:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:25.530879Z node 1 :TX_PROXY ERROR: Actor# [1:7486788072304593536:2344] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:26.042952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:26.303921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:26.304155Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:26.304481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:26.304602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:26.304719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:26.304865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:26.304978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:26.305123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:26.305235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:26.305347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:26.305466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:26.305592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486788076599561065:2358];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:26.308117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:26.308183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:26.308399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:26.308518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:26.308652Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:26.308753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:26.309107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:26.309231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:26.309364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:26.309507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:26.309632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:26.309732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788076599561080:2363];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:26.355261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788076599561075:2361];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.c ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.918140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.924086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.929363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.936340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.941821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.946586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.952855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.960170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.966083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.975531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.981081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.988421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:51.999401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.000044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.005706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.012776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.013216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.019782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.023843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.025635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.036055Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.043027Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.051935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.056031Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.062586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.066404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.073275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.077754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.084770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.085350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.096766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.104283Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.112585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.114658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.121870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.131454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.141452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.145777Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.154190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.158822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.169034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.172139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.184734Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.185554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.200141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:22:52.463660Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0y750361mz05px4b1eqq1", SessionId: ydb://session/3?node_id=1&id=NDA4NGY4M2MtYjIyMjcxMDctNDM1OTYxMDEtZjVkYTliNTI=, Slow query, duration: 39.695134s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:22:52.883385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:52.883807Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:22:52.884821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788342887579196:9279];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:22:52.885177Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore [GOOD] >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2025-03-28T09:23:02.550808Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788487462134943:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:02.558651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f0f/r3tmp/tmpI9JhcT/pdisk_1.dat 2025-03-28T09:23:03.231990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:03.232079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:03.251344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:03.291477Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:21825 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:03.713830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:03.731709Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:03.747189Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:03.758176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:03.995436Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.006s,wait=0.004s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:23:04.000435Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:23:04.027235Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-28T09:23:04.036815Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2025-03-28T09:23:04.157518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:23:04.157779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.159613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-28T09:23:04.159673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-03-28T09:23:04.159688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:23:04.159716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-28T09:23:04.159764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-28T09:23:04.159907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-03-28T09:23:04.160017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:23:04.162121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:23:04.162175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-03-28T09:23:04.163120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-03-28T09:23:04.163245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-03-28T09:23:04.163439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:23:04.163453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:23:04.163581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-03-28T09:23:04.163650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:23:04.163673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486788491757102561:2252], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-03-28T09:23:04.163686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486788491757102561:2252], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-03-28T09:23:04.163728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.163764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-03-28T09:23:04.164111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:23:04.164198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:23:04.165411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-03-28T09:23:04.165491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-03-28T09:23:04.165565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-03-28T09:23:04.165588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-03-28T09:23:04.165598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2025-03-28T09:23:04.171503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:23:04.171598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:23:04.171615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-03-28T09:23:04.171640Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-03-28T09:23:04.171656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:23:04.171919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 waiting... 2025-03-28T09:23:04.174081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:23:04.174150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-03-28T09:23:04.174172Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-03-28T09:23:04.174187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-03-28T09:23:04.1 ... ARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:23:13.340926Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037893 state Offline 2025-03-28T09:23:13.340959Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037893 reason = ReasonStop 2025-03-28T09:23:13.340989Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [3:7486788536364081299:2754], serverId# [3:7486788536364081301:2756], sessionId# [0:0:0] 2025-03-28T09:23:13.341005Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-03-28T09:23:13.341016Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-03-28T09:23:13.341053Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:7486788536364081298:2753], serverId# [3:7486788536364081300:2755], sessionId# [0:0:0] 2025-03-28T09:23:13.341071Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-03-28T09:23:13.347252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486788532069113667 RawX2: 4503612512274741 } TabletId: 72075186224037890 State: 4 2025-03-28T09:23:13.347305Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:23:13.347863Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-03-28T09:23:13.347884Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-28T09:23:13.348190Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-28T09:23:13.348357Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:23:13.348581Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:23:13.349076Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:23:13.349088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-28T09:23:13.349882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:23:13.350026Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-28T09:23:13.350538Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:23:13.350554Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:23:13.350773Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-03-28T09:23:13.350817Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:7486788532069113464:2389], serverId# [3:7486788532069113465:2390], sessionId# [0:0:0] 2025-03-28T09:23:13.351116Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037893 2025-03-28T09:23:13.351233Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-03-28T09:23:13.351481Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-28T09:23:13.352870Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-28T09:23:13.352981Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-28T09:23:13.354051Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037893 from 72075186224037891 is reset 2025-03-28T09:23:13.354089Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:23:13.354136Z node 3 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-03-28T09:23:13.356041Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:24:1:12288:253:0] ] return ack processed 2025-03-28T09:23:13.356139Z node 3 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:23:13.356210Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037892 from 72075186224037890 is reset 2025-03-28T09:23:13.356226Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-28T09:23:13.356244Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-28T09:23:13.356401Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-28T09:23:13.356447Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-28T09:23:13.358266Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-03-28T09:23:13.363023Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-28T09:23:13.363093Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-28T09:23:13.364255Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:7486788536364081422:2394], serverId# [3:7486788536364081431:2867], sessionId# [0:0:0] 2025-03-28T09:23:13.364313Z node 3 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:23:13.365076Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486788532069113349 RawX2: 4503612512274682 } TabletId: 72075186224037889 State: 4 2025-03-28T09:23:13.365124Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:23:13.365521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:23:13.366003Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-28T09:23:13.366212Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486788532069113668 RawX2: 4503612512274742 } TabletId: 72075186224037891 State: 4 2025-03-28T09:23:13.366248Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:23:13.366514Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:23:13.366828Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-28T09:23:13.374037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:23:13.374261Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:23:13.374484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:23:13.374605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-28T09:23:13.374700Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:23:13.374738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-28T09:23:13.374775Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:23:13.374789Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:23:13.374805Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:23:13.374962Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-28T09:23:13.374992Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-28T09:23:13.375889Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:23:13.375915Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:23:13.375956Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:23:13.375970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:23:13.375993Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:23:13.376097Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-03-28T09:23:13.376110Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-03-28T09:23:13.378935Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-28T09:23:13.379010Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-28T09:23:13.380357Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-28T09:23:13.380410Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-28T09:23:13.632652Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-03-28T09:23:13.633416Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-03-28T09:23:13.633839Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-03-28T09:23:13.634262Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-03-28T09:23:13.634699Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-03-28T09:23:13.635110Z node 3 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TFlatTest::MergeEmptyAndWrite [GOOD] >> KqpFlipJoin::Inner_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::JoinWithComplexCondition-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 17277, MsgBus: 63911 2025-03-28T09:22:56.782961Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788459923079680:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:56.785244Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001736/r3tmp/tmpg7auiK/pdisk_1.dat 2025-03-28T09:22:57.432129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:57.432202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:57.443107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:57.448274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17277, node 1 2025-03-28T09:22:57.786352Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:57.786377Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:57.786383Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:57.786480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63911 TClient is connected to server localhost:63911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:58.911596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:58.941198Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:58.964094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:59.245312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:59.594806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:59.738497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:01.785288Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788459923079680:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:01.785374Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:01.811773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788481397917924:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:01.811904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:02.148838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.222246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.262221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.301328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.339455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.387462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.458369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788485692885734:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:02.458425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:02.458949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788485692885739:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:02.463536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:02.488070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788485692885741:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:23:02.592646Z node 1 :TX_PROXY ERROR: Actor# [1:7486788485692885797:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:03.972777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.021655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6339, MsgBus: 62134 2025-03-28T09:23:07.275804Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788508719356673:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:07.275845Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001736/r3tmp/tmpJeh7Rg/pdisk_1.dat 2025-03-28T09:23:07.642390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:07.642463Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:07.646808Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:07.649344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6339, node 2 2025-03-28T09:23:07.829414Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:07.829432Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:07.829440Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:07.829547Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62134 TClient is connected to server localhost:62134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:08.412304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:08.429000Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:08.445504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:08.527008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:08.714343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:08.818099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:11.711828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788525899227571:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:11.711936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:11.769454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:11.818343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:11.921837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:11.975091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:12.056151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:12.116564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:12.196000Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788530194195384:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:12.196111Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:12.196407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788530194195389:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:12.200770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:12.220139Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:23:12.221083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788530194195391:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:23:12.278997Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788508719356673:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:12.279058Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:12.289471Z node 2 :TX_PROXY ERROR: Actor# [2:7486788530194195446:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:13.824604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:13.861721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinLeftFirst-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20498, MsgBus: 21239 2025-03-28T09:22:25.391176Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788329393720793:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:25.391660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00177c/r3tmp/tmpbqbXft/pdisk_1.dat 2025-03-28T09:22:26.048926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:26.049053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:26.063031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:26.156080Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20498, node 1 2025-03-28T09:22:26.423578Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:26.423604Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:26.423610Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:26.423702Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21239 TClient is connected to server localhost:21239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:27.381616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:30.056869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788350868557811:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:30.058167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788350868557807:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:30.058248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:30.061672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:30.081940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788350868557820:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:30.152240Z node 1 :TX_PROXY ERROR: Actor# [1:7486788350868557872:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:30.390140Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788329393720793:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:30.390229Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:30.655575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:30.769382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:30.801894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:30.836250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:30.869601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:31.100074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:31.177752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:31.218825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:31.264381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:31.304971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:31.346757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:31.401963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:31.465251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.339607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:32.411977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.468301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.512678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.548989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.600023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.647739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.679483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.727951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.794383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.881956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.932073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:32.975371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:33.012610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:33.047714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:33.080650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:33.121480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:22:33.201888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but p ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.727980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.733188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.738407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.739664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.743925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.748368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.748964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.753552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.757373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.759557Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038495;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.762479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.768753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.768904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.773929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.774024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038512;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.780986Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.781454Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.789380Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.793812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.802093Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.806070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.815195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.817427Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.822339Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.826186Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.837533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.842525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.842768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038507;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.847852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.855809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.857579Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.862364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.867397Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.875379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.879151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.885713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.887896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.894516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.899628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.901122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.906312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.908635Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.919127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.922279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:08.953575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038432;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:09.042927Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0ywjqf0fxcxvc4ncyn3cq", SessionId: ydb://session/3?node_id=1&id=ZmRmZDc2YmQtZGEzMjY4NzEtZjdlYjY5ZGUtNzE5OTdjODk=, Slow query, duration: 34.331112s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:09.328692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:09.329168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7486788428177986442:4526];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038170; 2025-03-28T09:23:09.339519Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:09.340605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2025-03-28T09:23:10.650460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788521710970169:2159];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:10.652217Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f0d/r3tmp/tmpujU3db/pdisk_1.dat 2025-03-28T09:23:11.310301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:11.310492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:11.323605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:11.339756Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:64975 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:11.733630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:23:11.779543Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:11.783807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:15.358442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788543136785724:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f0d/r3tmp/tmp2qojzF/pdisk_1.dat 2025-03-28T09:23:15.455644Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:23:15.566280Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:15.629728Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:15.629818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:15.635931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4352 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:15.875800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:15.882052Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:15.904445Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:23:15.911760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:16.138976Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.012s,wait=0.010s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:23:16.144999Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.006s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:23:16.186271Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.005s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-28T09:23:16.200509Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.005s,wait=0.004s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153796048 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-03-28T09:23:16.287961Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:23:16.289573Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.289723Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:23:16.290806Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.291216Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:23:16.291675Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:23:16.292342Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.292441Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:23:16.292779Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:23:16.293290Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.293639Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:23:16.293980Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:23:16.294528Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.294605Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:23:16.294961Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:23:16.295483Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.296009Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:23:16.296348Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:23:16.296878Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.296950Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:23:16.297239Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:23:16.297758Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.298231Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:23:16.298561Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:23:16.299256Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.299338Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:23:16.299659Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:23:16.300235Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.300677Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:23:16.301011Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:23:16.301645Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.301736Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:23:16.302077Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:23:16.302668Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:23:16.303242Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:23:16.303593Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:23:16.304170Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:23:16.304192Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:23:16.304347Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:23:16.304682Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037 ... vSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7486788547431753778 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-03-28T09:23:16.636181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2025-03-28T09:23:16.636275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7486788547431753778 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-03-28T09:23:16.636307Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-28T09:23:16.636368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7486788547431753778 RawX2: 4503608217307445 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2025-03-28T09:23:16.636396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715687:0, shardIdx: 72057594046644480:3, datashard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-28T09:23:16.636406Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.636421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2025-03-28T09:23:16.636437Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715687:0 129 -> 240 2025-03-28T09:23:16.636874Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-28T09:23:16.636923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-28T09:23:16.636930Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715687 2025-03-28T09:23:16.636943Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-03-28T09:23:16.636954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:23:16.637114Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-28T09:23:16.637149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-28T09:23:16.637154Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715687 2025-03-28T09:23:16.637164Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-03-28T09:23:16.637173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:23:16.637253Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-28T09:23:16.637286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715687 2025-03-28T09:23:16.637291Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715687 2025-03-28T09:23:16.637299Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715687, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:23:16.637307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-28T09:23:16.637332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 0/1, is published: true 2025-03-28T09:23:16.637468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.637521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.637570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.637595Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715687:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:23:16.637694Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:23:16.637763Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715687 datashard 72075186224037890 state PreOffline 2025-03-28T09:23:16.637787Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-03-28T09:23:16.637827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:23:16.637919Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-03-28T09:23:16.637932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-03-28T09:23:16.637947Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715687:0 progress is 1/1 2025-03-28T09:23:16.637956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-03-28T09:23:16.637969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 1/1, is published: true 2025-03-28T09:23:16.638000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7486788547431754017:2394] message: TxId: 281474976715687 2025-03-28T09:23:16.638016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2025-03-28T09:23:16.638028Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715687:0 2025-03-28T09:23:16.638041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715687:0 2025-03-28T09:23:16.638092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:23:16.638478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-03-28T09:23:16.638514Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 2025-03-28T09:23:16.638532Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715687 TClient::Ls request: /dc-1/Dir/TableOld 2025-03-28T09:23:16.639895Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:23:16.639958Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-28T09:23:16.644232Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:23:16.644693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486788547431753778 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-03-28T09:23:16.644734Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:23:16.645008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:23:16.645436Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-28T09:23:16.651236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:23:16.651422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:23:16.651575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:23:16.651591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:23:16.651622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:23:16.652576Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:23:16.652594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:23:16.652632Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:23:16.653159Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-28T09:23:16.653195Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7486788547431753898:2582], serverId# [2:7486788547431753899:2583], sessionId# [0:0:0] 2025-03-28T09:23:16.653430Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-28T09:23:16.653817Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-28T09:23:16.653877Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::Inner_2 [GOOD] Test command err: Trying to start YDB, gRPC: 4488, MsgBus: 7849 2025-03-28T09:23:01.702546Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788485259180304:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:01.709661Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016ef/r3tmp/tmphR8g68/pdisk_1.dat 2025-03-28T09:23:02.434295Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:02.446217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:02.446290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:02.457195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4488, node 1 2025-03-28T09:23:02.785271Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:02.785289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:02.785298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:02.785386Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7849 TClient is connected to server localhost:7849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:03.684818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:03.750872Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:03.769328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:03.976288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:04.226722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:04.321683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:06.354904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788506734018423:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:06.354994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:06.656497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.688463Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788485259180304:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:06.688508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:06.702393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.746670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.786329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.826085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.902429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.966975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788506734018941:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:06.967040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:06.967292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788506734018946:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:06.970453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:06.987684Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:23:06.987873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788506734018948:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:23:07.066072Z node 1 :TX_PROXY ERROR: Actor# [1:7486788511028986297:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:08.336242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:08.398048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:08.436206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:23:08.519563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"Tables":["FJ_Table_1"],"PlanNodeId":6,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/FJ_Table_1","ReadRangesPointPrefixLen":"0","E-Rows":"4","Table":"FJ_Table_1","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["FJ_Table_2"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/FJ_Table_2","ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"ComputeNodes":[{"Tasks":[{"FinishTimeMs":1743153789585,"Host":"ghrun-mdsjyjltvm","OutputRows":2,"StartTimeMs":1743153789584,"IngressRows":2,"ComputeTimeUs":134,"NodeId":1,"OutputChannels":[{"ChannelId":1,"Rows":2,"DstStageId":1,"Bytes":24}],"TaskId":1,"OutputBytes":24}],"PeakMemoryUsageBytes":65536,"DurationUs":1000,"CpuTimeUs":1885}],"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[10,24]}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"FirstMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":7809,"Max":7809,"Min":7809,"History":[10,7809]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":8,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[10,1048576]},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"Tasks":1,"OutputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"PhysicalStageId":0,"StageDurationUs":1000,"Table":[{"Path":"\/Root\/FJ_Table_2","ReadRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ReadBytes":{"Count":1,"Sum":22,"Max":22,"Min":22}}],"BaseTimeMs":1743153789576,"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"CpuTimeUs":{"Count":1,"Sum":793,"Max":793,"Min":793,"History":[9,793]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":8,"Max":8,"Min":8}," ... ":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":10,"Min":3}}}],"CpuTimeUs":{"Count":1,"Sum":519,"Max":519,"Min":519,"History":[11,519]},"StageDurationUs":1000,"ResultRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResultBytes":{"Count":1,"Sum":38,"Max":38,"Min":38},"OutputBytes":{"Count":1,"Sum":38,"Max":38,"Min":38},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":38,"Max":38,"Min":38,"History":[11,38]}},"Name":"7","Push":{"LastMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Rows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"FirstMessageMs":{"Count":1,"Sum":10,"Max":10,"Min":10},"Bytes":{"Count":1,"Sum":38,"Max":38,"Min":38,"History":[11,38]},"PauseMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"WaitTimeUs":{"Count":1,"Sum":6563,"Max":6563,"Min":6563,"History":[11,6563]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":10,"Min":3}}}],"InputRows":{"Count":1,"Sum":2,"Max":2,"Min":2},"Tasks":1}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":660164,"CpuTimeUs":656476},"ProcessCpuTimeUs":352,"TotalDurationUs":704814,"ResourcePoolId":"default","QueuedTimeUs":0},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/FJ_Table_1","ReadRangesPointPrefixLen":"0","E-Rows":"4","Table":"FJ_Table_1","ReadColumns":["Key","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"4","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":12,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/FJ_Table_2","ReadRangesPointPrefixLen":"0","E-Rows":"2","Table":"FJ_Table_2","ReadColumns":["Fk1","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"2","Condition":"t1.Key = t2.Fk1","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"14.4"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"A-Rows":2,"A-SelfCpu":2.001,"A-Cpu":2.001,"A-Size":38,"Name":"TopSort","Limit":"1001","TopSortBy":"[row.t1.Value,row.t2.Value]"}],"Node Type":"TopSort"}],"Operators":[{"A-Rows":2,"A-SelfCpu":0.519,"A-Cpu":2.52,"A-Size":38,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 61161, MsgBus: 29881 2025-03-28T09:23:10.958087Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788520638798401:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:10.958117Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016ef/r3tmp/tmpLCR4tl/pdisk_1.dat 2025-03-28T09:23:11.317061Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:11.331132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:11.331204Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:11.339420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61161, node 2 2025-03-28T09:23:11.563226Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:11.563244Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:11.563251Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:11.563429Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29881 TClient is connected to server localhost:29881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:12.399575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:12.410462Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:23:12.429672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:12.546461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:12.781886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:12.872455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:15.383867Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788542113636638:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:15.383944Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:15.462112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:15.503525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:15.551555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:15.602151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:15.645029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:15.705155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:15.816364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788542113637158:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:15.816461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:15.816660Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788542113637163:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:15.820932Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:15.846894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788542113637165:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:23:15.938040Z node 2 :TX_PROXY ERROR: Actor# [2:7486788542113637221:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:15.959271Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788520638798401:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:15.959330Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:17.239560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:17.350128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:17.399025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:23:17.445320Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS90-ColumnStore [GOOD] >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> KqpFlipJoin::RightSemi_1 [GOOD] >> KqpFlipJoin::RightOnly_3 |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::Inner-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 3958, MsgBus: 2408 2025-03-28T09:23:05.078732Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788499966671169:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:05.078776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016e7/r3tmp/tmp5lFOk7/pdisk_1.dat 2025-03-28T09:23:05.746483Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:05.764999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:05.765100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:05.767666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3958, node 1 2025-03-28T09:23:06.027234Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:06.027262Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:06.027271Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:06.027375Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2408 TClient is connected to server localhost:2408 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:06.994294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:07.007230Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:07.020101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:07.241313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:07.435283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:07.554289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:09.936749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788517146542151:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:09.936843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:10.085807Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788499966671169:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:10.085871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:10.328291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:10.378849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:10.461913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:10.517370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:10.556491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:10.599219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:10.676808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788521441509963:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:10.676878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:10.677166Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788521441509968:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:10.680134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:10.693365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788521441509970:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:23:10.765284Z node 1 :TX_PROXY ERROR: Actor# [1:7486788521441510025:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:11.974230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:12.007563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:12.037986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:23:12.069494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:23:12.122319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:23:12.170994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1738, MsgBus: 20097 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016e7/r3tmp/tmpm0uft6/pdisk_1.dat 2025-03-28T09:23:14.890917Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:14.905779Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:14.914071Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:14.914144Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:14.916273Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1738, node 2 2025-03-28T09:23:15.106354Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:15.106390Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:15.106397Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:15.106498Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20097 TClient is connected to server localhost:20097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:15.693159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:15.708427Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:23:15.725586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:15.835256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:16.069581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:16.153693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:18.756877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788558474163168:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:18.756976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:18.851410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.896332Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.953665Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.029220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.074151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.152079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.212930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788562769130983:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:19.213032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:19.213417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788562769130988:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:19.217339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:19.236456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788562769130990:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:23:19.343608Z node 2 :TX_PROXY ERROR: Actor# [2:7486788562769131046:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:20.651939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:20.716104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:20.761659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:23:20.819846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:23:20.892372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:23:20.926851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 >> TGRpcCmsTest::RemoveWithAnotherTokenTest >> TFlatTest::SplitEmptyToMany >> TGRpcCmsTest::DisabledTxTest >> TGRpcCmsTest::SimpleTenantsTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS90-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11547, MsgBus: 9398 2025-03-28T09:22:17.880219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788295715974888:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:17.880671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001799/r3tmp/tmpDqLQlQ/pdisk_1.dat 2025-03-28T09:22:18.696534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:18.697924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:18.697977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:18.704629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11547, node 1 2025-03-28T09:22:18.947123Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:18.947141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:18.947151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:18.947245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9398 TClient is connected to server localhost:9398 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:19.819276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:22.246541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788317190811905:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:22.246659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:22.246787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788317190811917:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:22.255240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:22.279399Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:22:22.283603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788317190811919:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:22.346115Z node 1 :TX_PROXY ERROR: Actor# [1:7486788317190811970:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:22.702249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:22.839268Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788295715974888:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:22.839357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:22.868924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:22.939234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:22.986872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.029190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.301979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.352924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.387232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.422750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.466386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.514096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.559113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.603596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.300258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:24.352768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.390286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.423478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.454544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.490658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.524864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.554270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.590718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.662722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.711833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.743561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.780940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.823145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.869920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.917712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:24.948643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.218041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.225937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.226729Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.232532Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.248594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.254217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.270919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.276550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.282514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.288448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.294333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.300128Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.307908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.312404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.313449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038633;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.319356Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.322880Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.325621Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.328721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.364269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.373879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.375072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.383970Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.388614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.394394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.397484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.405934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.409142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.420525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.421724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.432753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.434962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.445300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.446873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.456495Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.459296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.466492Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.473142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.478680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.480253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.484398Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.490053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.495287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.495920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.501335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.507604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.615228Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0ymm7bmcgwz8dz3twjqp6", SessionId: ydb://session/3?node_id=1&id=YTViYjNiNS1hM2M2NzJlYS03MDA1MjkxNS0xYjg4ZjE3ZQ==, Slow query, duration: 38.047389s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:05.223263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:05.223324Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:05.223952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TGRpcCmsTest::AlterRemoveTest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest >> KqpJoinOrder::TestJoinHint2-ColumnStore [GOOD] >> BasicStatistics::TwoDatabases >> TGRpcCmsTest::DescribeOptionsTest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 20570, MsgBus: 15218 2025-03-28T09:22:38.958248Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788383765173070:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:38.958667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00175d/r3tmp/tmpPTDYPW/pdisk_1.dat 2025-03-28T09:22:39.711600Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:39.714648Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:39.715119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:39.728309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20570, node 1 2025-03-28T09:22:40.077323Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:40.077347Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:40.077353Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:40.077446Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15218 TClient is connected to server localhost:15218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:40.963701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:40.989022Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:43.301282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788405240010080:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:43.301358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:43.305012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788405240010092:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:43.309101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:43.326608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788405240010094:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:43.389065Z node 1 :TX_PROXY ERROR: Actor# [1:7486788405240010145:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:43.782002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:43.951121Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788383765173070:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:43.951182Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:43.974266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.016104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.064101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.100670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.287090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.372137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.428517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.474046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.520319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.564148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.600541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.671984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.461488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:45.507630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.545124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.579264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.652979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.692965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.773262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.833645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.899606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.948747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.002393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.093561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.179666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.231039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.281525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.317306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.354494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.795515Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.796079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.801268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.802242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.806543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.815121Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.820176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.823251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.825490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.829757Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.833105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.835369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.839189Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.840608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.845959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038619;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.848516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.851516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.853895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.856895Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.859219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.863844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.864280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.869668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.875041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.875471Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.882556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.885168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.888053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.891414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.893555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.896865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.898574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.903736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.904268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.909615Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.909985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.915292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.915312Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.920735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.921394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.926474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.928677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.932102Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.993881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.998411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.091531Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0z9tb3ehef5xa2g6g95x9", SessionId: ydb://session/3?node_id=1&id=Y2VkNjRhZTMtYTYxNTAzYzktYzYwYWRiMWUtZDJhMjUzNQ==, Slow query, duration: 31.820586s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:20.344749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:20.345187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:20.345623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;self_id=[1:7486788435304788006:2956];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038170;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038629;receive=72075186224038331; 2025-03-28T09:23:20.345952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TLocksTest::NoLocksSet >> TGRpcCmsTest::DisabledTxTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPredsAndEquiv-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 7052, MsgBus: 4148 2025-03-28T09:22:37.559210Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788380702580653:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:37.559574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001760/r3tmp/tmpNJMWgs/pdisk_1.dat 2025-03-28T09:22:38.330723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:38.330813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:38.335703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7052, node 1 2025-03-28T09:22:38.454763Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:22:38.454782Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:22:38.570741Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:38.688260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:22:38.719290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:38.719313Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:38.719321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:38.719426Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4148 TClient is connected to server localhost:4148 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:39.612559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:39.646120Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:42.028796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788402177417659:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:42.028923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:42.029344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788402177417671:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:42.033580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:42.061291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788402177417673:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:42.166307Z node 1 :TX_PROXY ERROR: Actor# [1:7486788402177417724:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:42.554802Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788380702580653:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:42.554891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:42.615191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.776912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.814900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.857214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:42.911835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:43.137033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:43.220057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:43.276975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:43.312849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:43.356120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:43.395628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:43.439048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:43.514486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.313445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:44.363589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.400780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.427529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.462311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.499283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.551394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.606545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.644230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.677750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.769688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.824408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.865941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.916079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.961358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 7205759404664 ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.909498Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.911655Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.916522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.926843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.932651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.936007Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.939269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.940910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.944907Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.945967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.949203Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038568;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.954075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.954780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.962856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038615;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.965946Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.971268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.973227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.977667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038438;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.981886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.985344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.987296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.992964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.996733Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:19.998458Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.003870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.009344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038645;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.012811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.019210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.026296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.028961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.035819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.041165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.042278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.048883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.051915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.057229Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.060929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.070401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.072249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.081998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.085327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.094974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.100837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.104329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.107065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.239376Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0z8h099tvmjmhhv6hrh83", SessionId: ydb://session/3?node_id=1&id=YTZhMDZjZmEtNjAwZTYxOTQtNWFkNDBhYjgtZTdmZjlkMzk=, Slow query, duration: 33.294538s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:20.583770Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:20.584243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:20.584881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7486788479486845602:4430];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-28T09:23:20.585216Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore [GOOD] >> TGRpcCmsTest::AlterRemoveTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2025-03-28T09:23:25.537152Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788585153391272:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:25.543546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001778/r3tmp/tmpi2Lg3u/pdisk_1.dat 2025-03-28T09:23:26.563816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:26.598382Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:26.629453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:26.629557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:26.644240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7205, node 1 2025-03-28T09:23:27.098449Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:27.098481Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:27.098492Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:27.098605Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:28.208564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:28.450169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-03-28T09:23:28.502586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> KqpJoinOrder::TPCDS87-ColumnStore [GOOD] >> SystemView::AuthEffectivePermissions [GOOD] >> KqpFlipJoin::RightOnly_3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001765/r3tmp/tmpWvrN2g/pdisk_1.dat 2025-03-28T09:23:26.987670Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:27.349926Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:27.461316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:27.461420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:27.496336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17232, node 1 2025-03-28T09:23:27.771402Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:27.771427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:27.771434Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:27.771567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22400 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:28.349779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:28.527948Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7486788598106661353:2314], Recipient [1:7486788593811693725:2202]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-03-28T09:23:28.527992Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-03-28T09:23:28.528024Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.528037Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.528147Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-03-28T09:23:28.528292Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743153808524909) 2025-03-28T09:23:28.528969Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743153808524909 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-03-28T09:23:28.529171Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-03-28T09:23:28.537570Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-03-28T09:23:28.538461Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808524909&action=1" } } } 2025-03-28T09:23:28.538623Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.538720Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-28T09:23:28.538874Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-28T09:23:28.539390Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-03-28T09:23:28.539536Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-28T09:23:28.552103Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788598106661363:2315], Recipient [1:7486788593811693725:2202]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808524909&action=1" } UserToken: "" } 2025-03-28T09:23:28.552131Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:28.552349Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808524909&action=1" } } 2025-03-28T09:23:28.556710Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-03-28T09:23:28.556772Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.556844Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7486788598106661358:2202], Recipient [1:7486788593811693725:2202]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.556860Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.556874Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.556888Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.556930Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-03-28T09:23:28.556944Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-03-28T09:23:28.557003Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-03-28T09:23:28.564824Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-28T09:23:28.564857Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.564865Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.564873Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.564947Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-03-28T09:23:28.564974Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743153808524909 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:28.571171Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-28T09:23:28.571346Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.571393Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-28T09:23:28.571405Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-03-28T09:23:28.578741Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-03-28T09:23:28.580553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-03-28T09:23:28.587059Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-03-28T09:23:28.587136Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2025-03-28T09:23:28.593741Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2025-03-28T09:23:28.617298Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715658 2025-03-28T09:23:28.617815Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743153808641 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:23:28.617826Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-28T09:23:28.617866Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainCreated 2025-03-28T09:23:28.617951Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435074, Sender [1:7486788598106661405:2202], Recipient [1:7486788593811693725:2202]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainCreated 2025-03-28T09:23:28.617971Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainCreated 2025-03-28T09:23: ... node 1 :CMS_TENANTS DEBUG: TTxRemoveTenant: Request { path: "/Root/users/user-1" } UserToken: "" 2025-03-28T09:23:28.668242Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_SUBDOMAIN txid=1743153808668088 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:28.668313Z node 1 :CMS_TENANTS TRACE: Update user token in database for /Root/users/user-1 2025-03-28T09:23:28.676130Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenant Complete 2025-03-28T09:23:28.676272Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvRemoveTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808668088&action=2" } } } 2025-03-28T09:23:28.676309Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.676348Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-28T09:23:28.677006Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743153808641 ParentPathId: 2 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 0 TimeCastBucketsPerMediator: 0 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:23:28.677018Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) drop subdomain 2025-03-28T09:23:28.677161Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-03-28T09:23:28.677813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976715660 2025-03-28T09:23:28.679609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:28.682442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976715660 2025-03-28T09:23:28.682766Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2025-03-28T09:23:28.682777Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-28T09:23:28.682817Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-03-28T09:23:28.682879Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-03-28T09:23:28.682910Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715660 2025-03-28T09:23:28.682971Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7486788598106661464:2202], Recipient [1:7486788593811693725:2202]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-03-28T09:23:28.682995Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-03-28T09:23:28.683006Z node 1 :CMS_TENANTS DEBUG: Ignoring ready subdomain for tenant /Root/users/user-1 in REMOVING_SUBDOMAIN state 2025-03-28T09:23:28.686674Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715660 2025-03-28T09:23:28.690755Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788598106661540:2322], Recipient [1:7486788593811693725:2202]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808668088&action=2" } UserToken: "" } 2025-03-28T09:23:28.690776Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:28.690930Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808668088&action=2" } } 2025-03-28T09:23:28.700346Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2025-03-28T09:23:28.700366Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-28T09:23:28.700401Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:28.700557Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7486788598106661527:2202], Recipient [1:7486788593811693725:2202]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:28.700577Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:28.700607Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.700636Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.700668Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-03-28T09:23:28.700699Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743153808668088 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:28.700754Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743153808668088 issue= 2025-03-28T09:23:28.704824Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-03-28T09:23:28.704905Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-03-28T09:23:28.704919Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.705077Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7486788593811693630:2201], Recipient [1:7486788593811693725:2202]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-28T09:23:28.705092Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-28T09:23:28.705107Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.705116Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.705140Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-03-28T09:23:28.705157Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743153808668088 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:28.708628Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-28T09:23:28.708691Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.708722Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-28T09:23:28.708823Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-28T09:23:28.709377Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2025-03-28T09:23:28.709463Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2025-03-28T09:23:28.720227Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2025-03-28T09:23:28.720327Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7486788598106661574:2202], Recipient [1:7486788593811693725:2202]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-03-28T09:23:28.722758Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-03-28T09:23:28.722789Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.722803Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.722862Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-03-28T09:23:28.722882Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-03-28T09:23:28.733145Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-28T09:23:28.733182Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.733190Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.733197Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.733279Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743153808668088 2025-03-28T09:23:28.733292Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743153808668088 issue= 2025-03-28T09:23:28.733319Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743153808668088 issue= 2025-03-28T09:23:28.733346Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-03-28T09:23:28.733423Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743153808668088 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:28.735515Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-03-28T09:23:28.735581Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.751623Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788598106661591:2324], Recipient [1:7486788593811693725:2202]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808668088&action=2" } UserToken: "" } 2025-03-28T09:23:28.751662Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:28.751798Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808668088&action=2" ready: true status: SUCCESS } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithComplexPreds2-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13413, MsgBus: 13675 2025-03-28T09:22:39.326193Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788388513341375:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:39.361926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00175c/r3tmp/tmpeCGaUa/pdisk_1.dat 2025-03-28T09:22:39.980707Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:39.996764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:39.996843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:39.997901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13413, node 1 2025-03-28T09:22:40.271201Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:40.271223Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:40.271229Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:40.271328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13675 TClient is connected to server localhost:13675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:41.372449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:43.537430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788405693211127:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:43.537538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:43.537827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788405693211139:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:43.544925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:43.556166Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:22:43.556643Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788405693211141:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:43.637725Z node 1 :TX_PROXY ERROR: Actor# [1:7486788405693211192:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:43.918359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.100046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.137391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.175548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.206767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.324931Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788388513341375:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:44.325060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:44.441709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.485137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.570833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.621969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.663997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.701298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.734255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:44.770617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.428289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:45.475448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.517802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.557067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.594321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.672938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.752404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.787068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.864561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:45.985259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.039878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.087995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.135201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.220727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.282972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.351178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:46.392413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.453546Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.462958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038497;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.468249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038485;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.470383Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038505;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.480422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.485213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.487393Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.492574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038477;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.498102Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.501840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.509054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.511349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.513954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038503;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.517448Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038457;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.519438Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038643;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.522732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.527795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.528161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038449;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.533558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.540079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.546394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038543;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.546769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038479;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.551116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.555609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.556029Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038528;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.561355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038504;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.562872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038519;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.567416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.568806Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.573463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.574321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.578985Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.580236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.584597Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038438;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.585482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.589774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.591131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.596078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038494;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.597302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038447;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.601963Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.603188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.607530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038455;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.608650Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.612686Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.648840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038453;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:22.710326Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0z9h529d137ambj8y34fh", SessionId: ydb://session/3?node_id=1&id=MTlkMGMxNWQtOTBiMTAyNGItM2QyNzI0MDctZWY5ZTA3ZWU=, Slow query, duration: 34.736616s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:22.999794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:23.000221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:23.000892Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486788547427160348:6348];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-28T09:23:23.001242Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TGRpcCmsTest::SimpleTenantsTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpFlipJoin::RightOnly_3 [GOOD] Test command err: Trying to start YDB, gRPC: 7901, MsgBus: 2245 2025-03-28T09:23:13.591230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788533725680424:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:13.591798Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016e0/r3tmp/tmppW1bOJ/pdisk_1.dat 2025-03-28T09:23:14.160655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:14.160780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:14.167814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:14.178834Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7901, node 1 2025-03-28T09:23:14.415335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:14.415358Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:14.415366Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:14.415480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2245 TClient is connected to server localhost:2245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:15.676827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:15.733249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:15.967015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:16.270132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:16.395116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:18.585506Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788533725680424:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:18.585577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:18.773915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788555200518540:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:18.774080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:19.138978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.195228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.269612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.311285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.344370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.381643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.451723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788559495486351:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:19.451796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:19.452173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788559495486356:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:19.456276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:19.471438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788559495486358:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:23:19.555018Z node 1 :TX_PROXY ERROR: Actor# [1:7486788559495486414:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:20.983616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:21.018376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:21.066434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:23:21.117734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25818, MsgBus: 4954 2025-03-28T09:23:23.397189Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788578711521701:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016e0/r3tmp/tmp3JOBa3/pdisk_1.dat 2025-03-28T09:23:23.478919Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:23:23.628907Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:23.646248Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:23.646327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:23.651617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25818, node 2 2025-03-28T09:23:23.887303Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:23.887330Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:23.887337Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:23.887455Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4954 TClient is connected to server localhost:4954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:24.880607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:24.887490Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:24.901100Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:25.017163Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:25.244691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:25.404235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:28.107785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788600186359795:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:28.107881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:28.153653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:28.230871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:28.294804Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788578711521701:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:28.295003Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:28.312030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:28.369540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:28.411830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:28.466310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:28.550337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788600186360312:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:28.550423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:28.550654Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486788600186360317:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:28.554589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:28.576899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486788600186360319:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:23:28.633448Z node 2 :TX_PROXY ERROR: Actor# [2:7486788600186360374:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:29.892084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:29.923329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:29.962432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:23:30.009673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> KqpService::RangeCache+UseCache [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS87-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21502, MsgBus: 3298 2025-03-28T09:22:20.431970Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788306678984321:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:20.432503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00178a/r3tmp/tmpBvQtRO/pdisk_1.dat 2025-03-28T09:22:21.031656Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:21.062467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:21.062567Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:21.071783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21502, node 1 2025-03-28T09:22:21.388511Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:21.388533Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:21.388538Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:21.388621Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3298 TClient is connected to server localhost:3298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:22.342653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:22.383656Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:24.912115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788323858854022:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:24.912226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:24.912663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788323858854034:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:24.916427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:24.931938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788323858854036:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:25.001941Z node 1 :TX_PROXY ERROR: Actor# [1:7486788323858854087:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:25.382880Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788306678984321:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:25.392448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:25.414714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:25.569227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:25.604310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:25.651156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:25.721764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:25.963871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:26.016598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:26.069923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:26.122404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:26.231246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:26.286125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:26.322609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:26.370589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.255791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:27.311253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.367657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.449564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.508331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.557492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.598006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.650279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.696368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.744765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.784094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.825310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.862146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.897743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.945132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.983838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:28.031626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.786421Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.789335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.794254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.799626Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.804120Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.807813Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.813475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.819810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.825978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.828869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038481;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.830680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.839665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.846760Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.852411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.856600Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.861956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.867665Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.876103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.881453Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.889997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.894754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.900161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.903903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.909185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.913724Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.922993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.927240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.932580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.936700Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.942132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.946445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038613;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.956721Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038611;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.959924Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038605;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.966351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:04.970490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:05.078800Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0yqq9eh8tbngk7ade01em", SessionId: ydb://session/3?node_id=1&id=OTJlMjIzYjktYWU5NjFkYjYtNzIwNmY3MjktY2I2Y2M0ZWE=, Slow query, duration: 35.339532s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:05.653752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:05.654211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:05.654774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7486788405463248780:4359];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-28T09:23:05.655115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:27.514813Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe1069v7mjpwzmamwbhnc7a", SessionId: ydb://session/3?node_id=1&id=OTJlMjIzYjktYWU5NjFkYjYtNzIwNmY3MjktY2I2Y2M0ZWE=, Slow query, duration: 10.078715s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n$bla1 = (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from store_sales as store_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where store_sales.ss_sold_date_sk = date_dim.d_date_sk\n and store_sales.ss_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11);\n\n$bla2 = ((select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from catalog_sales as catalog_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11)\n union all\n (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from web_sales as web_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where web_sales.ws_sold_date_sk = date_dim.d_date_sk\n and web_sales.ws_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11));\n\n-- start query 1 in stream 0 using template query87.tpl and seed 1819994127\nselect count(*)\nfrom $bla1 bla1 left only join $bla2 bla2 using (c_last_name, c_first_name, d_date)\n;\n\n-- end query 1 in stream 0 using template query87.tpl", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/ut/unittest >> SystemView::AuthEffectivePermissions [GOOD] Test command err: 2025-03-28T09:14:11.956719Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786207114158301:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:11.974909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001593/r3tmp/tmp8Ucttf/pdisk_1.dat 2025-03-28T09:14:13.012640Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:13.032149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:14:13.032230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:13.032313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:13.068343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:13.250647Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2160} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.126547s 2025-03-28T09:14:13.250746Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:665} StateWork event processing took too much time Type# 2146435078 Duration# 0.126661s TServer::EnableGrpc on GrpcPort 25604, node 1 2025-03-28T09:14:13.824811Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:14:13.824839Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:14:13.824848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:14:13.824991Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:14:15.086528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.143698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:14:15.170323Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486786223564465644:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:14:15.195183Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/Database1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:14:15.197334Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [3:7486786206384596244:2061] waiting... 2025-03-28T09:14:15.267854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:14:15.267949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:14:15.280026Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:14:15.281468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:14:15.410830Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [2:7486786206524896287:2061] 2025-03-28T09:14:15.407664Z node 3 :SYSTEM_VIEWS INFO: [72075186224037893] OnActivateExecutor 2025-03-28T09:14:15.407734Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Execute 2025-03-28T09:14:15.530583Z node 3 :SYSTEM_VIEWS DEBUG: Handle TEvSysView::TEvRegisterDbCounters: service id# [3:7486786223564465508:2076], path id# [OwnerId: 72057594046644480, LocalPathId: 2], service# 2 2025-03-28T09:14:15.665418Z node 2 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [2:7486786206524896287:2061] 2025-03-28T09:14:15.687480Z node 3 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector bootstrapped 2025-03-28T09:14:15.697917Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [1:7486786207114158095:2077] 2025-03-28T09:14:15.698015Z node 3 :SYSTEM_VIEWS INFO: Navigate by path id succeeded: service id# [3:7486786223564465508:2076], path id# [OwnerId: 72057594046644480, LocalPathId: 2], database# /Root/Database1 2025-03-28T09:14:15.698172Z node 3 :SYSTEM_VIEWS INFO: Navigate by database succeeded: service id# [3:7486786223564465508:2076], database# /Root/Database1, no sysview processor 2025-03-28T09:14:15.698767Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInitSchema::Complete 2025-03-28T09:14:15.698823Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Execute 2025-03-28T09:14:15.719128Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval summaries: query count# 0, node ids count# 0, total count# 0 2025-03-28T09:14:15.719213Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval metrics: query count# 0 2025-03-28T09:14:15.719249Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading interval query tops: total query count# 0 2025-03-28T09:14:15.719293Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading nodes to request: nodes count# 0, hashes count# 0 2025-03-28T09:14:15.719340Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 6, result count# 0 2025-03-28T09:14:15.719386Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 7, result count# 0 2025-03-28T09:14:15.719428Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 8, result count# 0 2025-03-28T09:14:15.719468Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 9, result count# 0 2025-03-28T09:14:15.719518Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 10, result count# 0 2025-03-28T09:14:15.719551Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 11, result count# 0 2025-03-28T09:14:15.719587Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 12, result count# 0 2025-03-28T09:14:15.719621Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 13, result count# 0 2025-03-28T09:14:15.719664Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 14, result count# 0 2025-03-28T09:14:15.719696Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 15, result count# 0 2025-03-28T09:14:15.719732Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 16, partCount count# 0 2025-03-28T09:14:15.719769Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 19, partCount count# 0 2025-03-28T09:14:15.728881Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 17, result count# 0 2025-03-28T09:14:15.728979Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 18, result count# 0 2025-03-28T09:14:15.729019Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 20, result count# 0 2025-03-28T09:14:15.729066Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Loading results: table# 21, result count# 0 2025-03-28T09:14:15.739778Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] Reset: interval end# 2025-03-28T09:14:15.000000Z 2025-03-28T09:14:15.741096Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:14:15.769839Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxInit::Complete 2025-03-28T09:14:15.781025Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Execute 2025-03-28T09:14:15.805116Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryResults: interval end# 2025-03-28T09:14:15.000000Z, query count# 0 2025-03-28T09:14:15.805244Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 8, interval end# 2025-03-28T09:14:15.000000Z, query count# 0, persisted# 0 2025-03-28T09:14:15.805277Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 10, interval end# 2025-03-28T09:14:15.000000Z, query count# 0, persisted# 0 2025-03-28T09:14:15.805303Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 12, interval end# 2025-03-28T09:14:15.000000Z, query count# 0, persisted# 0 2025-03-28T09:14:15.805332Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 14, interval end# 2025-03-28T09:14:15.000000Z, query count# 0, persisted# 0 2025-03-28T09:14:15.807100Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [1:7486786207114158011:2070] 2025-03-28T09:14:15.806317Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 9, interval end# 2025-03-28T10:00:00.000000Z, query count# 0, persisted# 0 2025-03-28T09:14:15.806355Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 11, interval end# 2025-03-28T10:00:00.000000Z, query count# 0, persisted# 0 2025-03-28T09:14:15.806374Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 13, interval end# 2025-03-28T10:00:00.000000Z, query count# 0, persisted# 0 2025-03-28T09:14:15.806399Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistQueryTopResults: table id# 15, interval end# 2025-03-28T10:00:00.000000Z, query count# 0, persisted# 0 2025-03-28T09:14:15.809234Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Execute: database# /Root/Database1 2025-03-28T09:14:15.875187Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxAggregate::Complete 2025-03-28T09:14:15.875231Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxConfigure::Complete 2025-03-28T09:14:15.895280Z node 3 :SYSTEM_VIEWS INFO: NSysView::TPartitionStatsCollector initialized: domain key# [OwnerId: 72057594046644480, LocalPathId: 2], sysview processor id# 72075186224037893 2025-03-28T09:14:15.914626Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessLabeledCounters: service id# [1:7486786207114158011:2070] 2025-03-28T09:14:15.999367Z node 1 :SYSTEM_VIEWS DEBUG: Handle TEvPrivate::TEvProcessCounters: service id# [1:7486786207114158095:2077] 2025-03-28T09:14:16.014925Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] TTxCollect::Execute 2025-03-28T09:14:16.014977Z node 3 :SYSTEM_VIEWS DEBUG: [72075186224037893] PersistPartitionTopResults: table id# 17, partition interval end# 2025-03-28T09:14:16.000000Z, partition count# ... ate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:23:27.853968Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/pools/default TableId: [72057594046644480:8:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindResourcePool DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:23:27.854063Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7486788594583742481:2414], row count: 5, finished: 0 2025-03-28T09:23:27.858819Z node 19 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:23:27.866944Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Dir1 TableId: [72057594046644480:9:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-28T09:23:27.867014Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7486788594583742481:2414], row count: 1, finished: 0 2025-03-28T09:23:27.870822Z node 19 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:23:27.871814Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table0 TableId: [72057594046644480:4:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:23:27.871857Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7486788594583742481:2414], row count: 1, finished: 0 2025-03-28T09:23:27.878821Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7486788594583742481:2414], owner: [19:7486788594583742478:2412], scan id: 0, table id: [72057594046644480:1:0:auth_effective_permissions] 2025-03-28T09:23:27.979188Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:23:27.979222Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:27.983024Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153807826, txId: 281474976715676] shutting down 2025-03-28T09:23:27.983594Z node 19 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [19:7486788521569296509:2192], database# , query hash# 11342553055430868283, cpu time# 441541 2025-03-28T09:23:28.034910Z node 22 :SYSTEM_VIEWS DEBUG: NSysView::TPartitionStatsCollector: TEvProcessOverloaded , top size by CPU # 0, top size by TLI # 0, time# 2025-03-28T09:23:28.034827Z 2025-03-28T09:23:28.128067Z node 19 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqe10gm3dbmk19p62qk1vqhq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=19&id=YzZkYjY0ZS1jNjE0NTlkYS0zYmVhNjMwNS1mNDcyMmM0Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:23:28.130868Z node 19 :SYSTEM_VIEWS INFO: Scan started, actor: [19:7486788598878709830:2424], owner: [19:7486788598878709827:2422], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-03-28T09:23:28.133869Z node 19 :SYSTEM_VIEWS INFO: Scan prepared, actor: [19:7486788598878709830:2424], schemeshard id: 72075186224037888, hive id: 72057594037968897, database: /Root/Tenant1, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 2], database node count: 2 2025-03-28T09:23:28.133905Z node 19 :SYSTEM_VIEWS DEBUG: ProceedToScan, tenant name: /Root/Tenant1 tenant owner: root@builtin subject sid: empty require admin access: 0 is admin: 1 2025-03-28T09:23:28.139274Z node 19 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:23:28.139707Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1 TableId: [72075186224037888:1:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 }] Groups: [] } Children [Dir2,Table1] }] } 2025-03-28T09:23:28.139765Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7486788598878709830:2424], row count: 1, finished: 0 2025-03-28T09:23:28.147786Z node 19 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:23:28.154519Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Dir2 TableId: [72075186224037888:3:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [] }] } 2025-03-28T09:23:28.165751Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7486788598878709830:2424], row count: 2, finished: 0 2025-03-28T09:23:28.171856Z node 19 :SYSTEM_VIEWS TRACE: Navigate { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:23:28.173816Z node 19 :SYSTEM_VIEWS TRACE: Got navigate: { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Tenant1/Table1 TableId: [72075186224037888:2:1] RequestType: ByPath Operation: OpList RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037889 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037891 Mediators: 72075186224037892 SchemeShard: 72075186224037888 SysViewProcessor: 72075186224037893 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:23:28.173861Z node 19 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [19:7486788598878709830:2424], row count: 1, finished: 0 2025-03-28T09:23:28.175422Z node 19 :SYSTEM_VIEWS INFO: Scan finished, actor: [19:7486788598878709830:2424], owner: [19:7486788598878709827:2422], scan id: 0, table id: [72075186224037888:1:0:auth_effective_permissions] 2025-03-28T09:23:28.179030Z node 19 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [19:7486788521569296509:2192], database# , query hash# 17325808444334437222, cpu time# 156594 2025-03-28T09:23:28.179520Z node 19 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153808126, txId: 281474976715678] shutting down 2025-03-28T09:23:28.215644Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 22 2025-03-28T09:23:28.216391Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:23:28.216546Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 21 2025-03-28T09:23:28.216736Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:23:28.227688Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 20 2025-03-28T09:23:28.228667Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:23:28.228816Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 23 2025-03-28T09:23:28.229558Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:23:28.234201Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[20:7486788540127872324:2109], Type=268959746 2025-03-28T09:23:28.234420Z node 22 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:23:28.234203Z node 20 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:23:28.259984Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[23:7486788529498240467:2102], Type=268959746 2025-03-28T09:23:28.260045Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[23:7486788529498240467:2102], Type=268959746 2025-03-28T09:23:28.260075Z node 19 :HIVE WARN: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[23:7486788529498240467:2102], Type=268959746 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2025-03-28T09:23:25.471871Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788586174000587:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:25.471917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00179e/r3tmp/tmpJF96Xw/pdisk_1.dat 2025-03-28T09:23:26.540483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:26.575704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:26.576384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:26.585371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:26.601667Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26614, node 1 2025-03-28T09:23:27.097235Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:27.097259Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:27.097266Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:27.097366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:28.178900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:28.317718Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7486788599058903300:2315], Recipient [1:7486788590468968364:2212]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-03-28T09:23:28.317775Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-03-28T09:23:28.317846Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.317862Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.318037Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-03-28T09:23:28.323047Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743153808322587) 2025-03-28T09:23:28.327206Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743153808322587 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-03-28T09:23:28.327457Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-03-28T09:23:28.351637Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-03-28T09:23:28.352595Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808322587&action=1" } } } 2025-03-28T09:23:28.352741Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.352806Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-28T09:23:28.352949Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-28T09:23:28.353452Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-03-28T09:23:28.353599Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-28T09:23:28.378997Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-03-28T09:23:28.379058Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.379147Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7486788599058903305:2212], Recipient [1:7486788590468968364:2212]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.379165Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.379192Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.379200Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.379237Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-03-28T09:23:28.382874Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-03-28T09:23:28.382959Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-03-28T09:23:28.383403Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788599058903310:2316], Recipient [1:7486788590468968364:2212]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808322587&action=1" } UserToken: "" } 2025-03-28T09:23:28.383425Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:28.383678Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808322587&action=1" } } 2025-03-28T09:23:28.392703Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-28T09:23:28.392748Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.392765Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.392774Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.392828Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-03-28T09:23:28.392849Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743153808322587 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:28.403382Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-28T09:23:28.403530Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.403560Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-28T09:23:28.403570Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-03-28T09:23:28.408058Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-03-28T09:23:28.409715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-03-28T09:23:28.419972Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-03-28T09:23:28.420032Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2025-03-28T09:23:28.427633Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2025-03-28T09:23:28.446831Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715658 2025-03-28T09:23:28.447363Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743153808473 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:23:28.447374Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-28T09:23:28.447416Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NCo ... TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-28T09:23:29.760199Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:29.760286Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7486788603353871239:2212], Recipient [1:7486788590468968364:2212]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:29.760302Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:29.760316Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:29.760331Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:29.760364Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-03-28T09:23:29.760385Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743153809692197 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:29.760425Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743153809692197 issue= 2025-03-28T09:23:29.762538Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-03-28T09:23:29.762606Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-03-28T09:23:29.762619Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:29.762800Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7486788590468968268:2199], Recipient [1:7486788590468968364:2212]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-28T09:23:29.762813Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-28T09:23:29.762828Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:29.762837Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:29.762860Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-03-28T09:23:29.762882Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743153809692197 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:29.767616Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-03-28T09:23:29.767666Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-03-28T09:23:29.767699Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-03-28T09:23:29.767714Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-28T09:23:29.767724Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-03-28T09:23:29.767734Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-03-28T09:23:29.767744Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-03-28T09:23:29.767754Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-03-28T09:23:29.767766Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-03-28T09:23:29.773321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-03-28T09:23:29.775803Z node 1 :BS_VDISK_PUT ERROR: VDISK[82000001:_:0:0:0]: TEvVPut: failed to pass the Hull check; id# [72075186224037895:1:7:0:0:41:1] status# {Status# BLOCKED status.Postponed# true Lsn# 149} Marker# BSVS03 2025-03-28T09:23:29.781143Z node 3 :BS_PROXY_PUT ERROR: [b4016b53f7a167a6] Result# TEvPutResult {Id# [72075186224037895:1:7:0:0:41:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000001:2:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038081 Marker# BPP12 2025-03-28T09:23:29.784682Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-28T09:23:29.784736Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:29.784764Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-28T09:23:29.784880Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-28T09:23:29.786092Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-03-28T09:23:29.786161Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-03-28T09:23:29.788253Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788603353871330:2403], Recipient [1:7486788590468968364:2212]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809692197&action=2" } UserToken: "" } 2025-03-28T09:23:29.788268Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:29.788405Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809692197&action=2" } } 2025-03-28T09:23:29.801340Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-28T09:23:29.808878Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-03-28T09:23:29.808969Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7486788603353871334:2212], Recipient [1:7486788590468968364:2212]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-03-28T09:23:29.808994Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-03-28T09:23:29.809007Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:29.809016Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:29.809053Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-03-28T09:23:29.809070Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-03-28T09:23:29.816418Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult retrying for 72075186224037888 because of ERROR 2025-03-28T09:23:29.871732Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788603353871370:2406], Recipient [1:7486788590468968364:2212]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809692197&action=2" } UserToken: "" } 2025-03-28T09:23:29.871763Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:29.871919Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809692197&action=2" } } 2025-03-28T09:23:29.872604Z node 1 :BS_PDISK ERROR: {BPD01@blobstorage_pdisk_impl.cpp:2825} PDiskId# 1 request from unregistered ownerId# 5 error in TLogWrite for ownerId# 5 ownerRound# 5 lsn# 185 PDiskId# 1 2025-03-28T09:23:29.875519Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-28T09:23:29.875551Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:29.875559Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:29.875564Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:29.875627Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743153809692197 2025-03-28T09:23:29.875664Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743153809692197 issue= 2025-03-28T09:23:29.875675Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743153809692197 issue= 2025-03-28T09:23:29.875684Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-03-28T09:23:29.875766Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743153809692197 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:29.885427Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-03-28T09:23:29.885511Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:29.927907Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788603353871377:2408], Recipient [1:7486788590468968364:2212]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809692197&action=2" } UserToken: "" } 2025-03-28T09:23:29.927937Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:29.928099Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809692197&action=2" ready: true status: SUCCESS } } 2025-03-28T09:23:29.931370Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7486788603353871380:2410], Recipient [1:7486788590468968364:2212]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-03-28T09:23:29.931394Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-28T09:23:29.931508Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-03-28T09:23:29.934457Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7486788603353871383:2411], Recipient [1:7486788590468968364:2212]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-03-28T09:23:29.934480Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-03-28T09:23:29.934677Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-03-28T09:23:29.946800Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-28T09:23:29.946986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2025-03-28T09:23:28.673924Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788600098635029:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:28.673959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00174d/r3tmp/tmpHUHrbv/pdisk_1.dat 2025-03-28T09:23:29.509616Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17493, node 1 2025-03-28T09:23:29.590843Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:29.590938Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:29.597079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:29.597190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:29.648533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:29.843825Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:29.843847Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:29.843853Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:29.843954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:30.437833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:21405 2025-03-28T09:23:30.803794Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locking 2025-03-28T09:23:30.803814Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now locked by parent 2025-03-28T09:23:30.819196Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) is now active 2025-03-28T09:23:30.900226Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285140, Sender [1:7486788608688570339:2316], Recipient [1:7486788604393602692:2199]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" } 2025-03-28T09:23:30.900264Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2025-03-28T09:23:30.908669Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2025-03-28T09:23:25.481363Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788585447541636:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:25.485109Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001774/r3tmp/tmpIVHzdX/pdisk_1.dat 2025-03-28T09:23:26.521580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:26.588182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:26.588293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:26.612101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:26.676676Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27994, node 1 2025-03-28T09:23:26.730395Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:26.730418Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:27.131143Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:27.131159Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:27.131166Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:27.131298Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:28.093031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:28.323375Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7486788598332444210:2315], Recipient [1:7486788589742509308:2194]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" } 2025-03-28T09:23:28.323417Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-03-28T09:23:28.323438Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.323449Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.323548Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" 2025-03-28T09:23:28.323661Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743153808323169) 2025-03-28T09:23:28.328663Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743153808323169 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-03-28T09:23:28.328849Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-03-28T09:23:28.343400Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-03-28T09:23:28.344756Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808323169&action=1" } } } 2025-03-28T09:23:28.350776Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.350888Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-28T09:23:28.351058Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-28T09:23:28.351265Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285139, Sender [1:7486788598332444210:2315], Recipient [1:7486788589742509308:2194]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808323169&action=1" } UserToken: "" } 2025-03-28T09:23:28.351287Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2025-03-28T09:23:28.363018Z node 1 :CMS_TENANTS DEBUG: Add subscription to /Root/users/user-1 for [1:7486788598332444210:2315] 2025-03-28T09:23:28.366853Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808323169&action=1" } } 2025-03-28T09:23:28.366977Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-03-28T09:23:28.367130Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-28T09:23:28.385083Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-03-28T09:23:28.385146Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.385244Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7486788598332444215:2194], Recipient [1:7486788589742509308:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.385262Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.385276Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.385283Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.385340Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-03-28T09:23:28.385372Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-03-28T09:23:28.385433Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-03-28T09:23:28.396814Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-28T09:23:28.396849Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.396858Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.396869Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.396945Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-03-28T09:23:28.396974Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743153808323169 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:28.408358Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-28T09:23:28.408517Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.408560Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-28T09:23:28.408569Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-03-28T09:23:28.413423Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-03-28T09:23:28.414595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-28T09:23:28.479107Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-03-28T09:23:28.479229Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-03-28T09:23:28.498857Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-03-28T09:23:28.524351Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-03-28T09:23:28.524914Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153808543 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 ... 644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153808543 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186224037895 Coordinators: 72075186224037893 Coordinators: 72075186224037896 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037892 Mediators: 72075186224037890 Mediators: 72075186224037889 SchemeShard: 72075186224037897 Hive: 72075186224037888 SysViewProcessor: 72075186224037891 StatisticsAggregator: 72075186224037894 } DomainKey { SchemeShard: 72057594046644480 PathId: 3 } StoragePools { Name: "/Root/users/user-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 10 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:23:29.812778Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) drop subdomain 2025-03-28T09:23:29.812880Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain drop cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" 2025-03-28T09:23:29.819267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-03-28T09:23:29.819604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:29.832004Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-03-28T09:23:29.832051Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710660 2025-03-28T09:23:29.832247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710660 2025-03-28T09:23:29.838120Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2025-03-28T09:23:29.887280Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2025-03-28T09:23:29.887304Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-28T09:23:29.887352Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:29.887429Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7486788602627412109:2194], Recipient [1:7486788589742509308:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:29.887452Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:29.887471Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:29.887488Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:29.887552Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-03-28T09:23:29.887577Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743153809802355 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:29.887658Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743153809802355 issue= 2025-03-28T09:23:29.895716Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-03-28T09:23:29.898820Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-03-28T09:23:29.898852Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:29.899719Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-03-28T09:23:29.899114Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7486788589742509167:2199], Recipient [1:7486788589742509308:2194]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-28T09:23:29.899137Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-28T09:23:29.899154Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:29.899164Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:29.899191Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-03-28T09:23:29.899217Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743153809802355 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:29.911770Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-28T09:23:29.911834Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:29.911874Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-28T09:23:29.912011Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-28T09:23:29.912611Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2025-03-28T09:23:29.912690Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2025-03-28T09:23:29.932100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-03-28T09:23:29.950668Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2025-03-28T09:23:29.951550Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7486788602627412192:2194], Recipient [1:7486788589742509308:2194]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-03-28T09:23:29.962919Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-03-28T09:23:29.967472Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:29.967515Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:29.967568Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-03-28T09:23:29.967590Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-03-28T09:23:30.055555Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037891 because of ERROR 2025-03-28T09:23:30.055663Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037892 because of ERROR 2025-03-28T09:23:30.055695Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037895 because of ERROR 2025-03-28T09:23:30.055723Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037889 because of ERROR 2025-03-28T09:23:30.055751Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxBlockStorageResult retrying for 72075186224037896 because of ERROR 2025-03-28T09:23:30.097014Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-28T09:23:30.107573Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult retrying for 72075186224037888 because of ERROR 2025-03-28T09:23:30.123892Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-28T09:23:30.123930Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:30.123939Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:30.123946Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:30.129261Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743153809802355 2025-03-28T09:23:30.129283Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743153809802355 issue= 2025-03-28T09:23:30.129297Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743153809802355 issue= 2025-03-28T09:23:30.138762Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-03-28T09:23:30.138901Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743153809802355 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:30.199191Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-03-28T09:23:30.199405Z node 1 :CMS_TENANTS TRACE: Send /Root/users/user-1 notification to [1:7486788602627412087:2388]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809802355&action=2" ready: true status: SUCCESS } } 2025-03-28T09:23:30.199512Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:30.229483Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7486788606922379542:2391], Recipient [1:7486788589742509308:2194]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" } 2025-03-28T09:23:30.229513Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-28T09:23:30.229662Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2025-03-28T09:23:30.257612Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285123, Sender [1:7486788606922379545:2392], Recipient [1:7486788589742509308:2194]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" } 2025-03-28T09:23:30.257648Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvListTenantsRequest 2025-03-28T09:23:30.257876Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2025-03-28T09:23:30.271504Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-28T09:23:30.271708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2025-03-28T09:23:25.548798Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788584397293097:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:25.549134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001783/r3tmp/tmpv6b7Sb/pdisk_1.dat 2025-03-28T09:23:26.618838Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:26.634026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:26.636861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:26.636947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:26.667929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3599, node 1 2025-03-28T09:23:27.106754Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:27.106782Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:27.106791Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:27.106900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63286 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:28.110566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:63286 2025-03-28T09:23:28.514132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:28.672800Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7486788597282195678:2315], Recipient [1:7486788588692260751:2195]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-03-28T09:23:28.672841Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-03-28T09:23:28.672869Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.672883Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.673017Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" 2025-03-28T09:23:28.673222Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743153808673056) 2025-03-28T09:23:28.673724Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743153808673056 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-03-28T09:23:28.673968Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-03-28T09:23:28.691562Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-03-28T09:23:28.692515Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808673056&action=1" } } } 2025-03-28T09:23:28.692660Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.692719Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-28T09:23:28.692850Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-28T09:23:28.693279Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-03-28T09:23:28.693407Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-28T09:23:28.707545Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-03-28T09:23:28.707610Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.707739Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7486788597282195685:2195], Recipient [1:7486788588692260751:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.707757Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:28.707782Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.707790Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.707826Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-03-28T09:23:28.707842Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-03-28T09:23:28.707929Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-03-28T09:23:28.723382Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-28T09:23:28.723413Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:28.723421Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.723428Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:28.723510Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-03-28T09:23:28.723532Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743153808673056 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:28.725994Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788597282195693:2317], Recipient [1:7486788588692260751:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808673056&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" } 2025-03-28T09:23:28.726020Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:28.726200Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153808673056&action=1" } } 2025-03-28T09:23:28.734340Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-28T09:23:28.734508Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:28.734557Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-28T09:23:28.734572Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-03-28T09:23:28.739601Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)" DatabaseName: "Root" 2025-03-28T09:23:28.748805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-03-28T09:23:28.753356Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-03-28T09:23:28.753406Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710659 2025-03-28T09:23:28.761712Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710659 2025-03-28T09:23:28.771123Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-03-28T09:23:28.771670Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153808802 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "user-1@builtin" ACL: "" EffectiveACL: "\n\032\010\001\020\377\377\003\032\016user-1@builtin \003(\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } ... Transaction { ModifyScheme { WorkingDir: "/Root/users" OperationType: ESchemeOpForceDropExtSubDomain Drop { Name: "user-1" } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-2@builtin\022\030\022\026\n\024all-users@well-known\032\016user-2@builtin\"\007Builtin*\017**** (FA717EBF)" DatabaseName: "Root" 2025-03-28T09:23:30.829412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710663 2025-03-28T09:23:30.829727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:30.832337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 3] by tx: 281474976710663 2025-03-28T09:23:30.832912Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710663 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2025-03-28T09:23:30.832945Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710663 2025-03-28T09:23:30.840674Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710663 2025-03-28T09:23:30.851125Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788605872130909:2382], Recipient [1:7486788588692260751:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153810818789&action=2" } UserToken: "" } 2025-03-28T09:23:30.851158Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:30.851338Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153810818789&action=2" } } 2025-03-28T09:23:30.862075Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710663 2025-03-28T09:23:30.862103Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-28T09:23:30.862161Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:30.862227Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435077, Sender [1:7486788605872130892:2195], Recipient [1:7486788588692260751:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:30.862251Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2025-03-28T09:23:30.862267Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:30.862279Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:30.862314Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Execute /Root/users/user-1 2025-03-28T09:23:30.862344Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1743153810818789 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-03-28T09:23:30.862405Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743153810818789 issue=AccessDenied: Access denied for request 2025-03-28T09:23:30.868448Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found - using supplied 72075186224037888 2025-03-28T09:23:30.869081Z node 1 :CMS_TENANTS DEBUG: TTxRemoveComputationalUnits Complete /Root/users/user-1 2025-03-28T09:23:30.869187Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2025-03-28T09:23:30.869203Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:30.869375Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7486788588692260632:2198], Recipient [1:7486788588692260751:2195]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-28T09:23:30.869393Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-28T09:23:30.869409Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:30.869419Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:30.869446Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2025-03-28T09:23:30.869468Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1743153810818789 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-03-28T09:23:30.875427Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-28T09:23:30.875482Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:30.875521Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-28T09:23:30.875684Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-28T09:23:30.876222Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 3 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2025-03-28T09:23:30.876303Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2025-03-28T09:23:30.883282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Got DeleteTabletReply with Forward response from Hive 72075186224037888 to Hive 72057594037968897 shardIdx 72057594046644480:1 2025-03-28T09:23:30.889149Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2025-03-28T09:23:30.889258Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435081, Sender [1:7486788605872130952:2195], Recipient [1:7486788588692260751:2195]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2025-03-28T09:23:30.889288Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolDeleted 2025-03-28T09:23:30.889302Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:30.889318Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:30.889351Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2025-03-28T09:23:30.889375Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2025-03-28T09:23:30.896156Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-03-28T09:23:30.896188Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-28T09:23:30.896200Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2025-03-28T09:23:30.896212Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-03-28T09:23:30.896224Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037896 not found 2025-03-28T09:23:30.896237Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2025-03-28T09:23:30.896252Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-03-28T09:23:30.896265Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2025-03-28T09:23:30.896277Z node 3 :HIVE WARN: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037894 not found 2025-03-28T09:23:30.914114Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-03-28T09:23:30.914606Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-28T09:23:30.914626Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:30.914635Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:30.914642Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:30.915710Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1743153810818789 2025-03-28T09:23:30.917159Z node 1 :CMS_TENANTS TRACE: Remove computational units of /Root/users/user-1 from database txid=1743153810818789 issue=AccessDenied: Access denied for request 2025-03-28T09:23:30.917185Z node 1 :CMS_TENANTS TRACE: Remove tenant /Root/users/user-1 from database txid=1743153810818789 issue=AccessDenied: Access denied for request 2025-03-28T09:23:30.917198Z node 1 :CMS_TENANTS TRACE: Remove pool /Root/users/user-1:hdd from database 2025-03-28T09:23:30.923094Z node 1 :CMS_TENANTS TRACE: Add tenant removal info for /Root/users/user-1 txid=1743153810818789 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2025-03-28T09:23:30.927421Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-28T09:23:30.936087Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788605872130997:2385], Recipient [1:7486788588692260751:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153810818789&action=2" } UserToken: "" } 2025-03-28T09:23:30.936121Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:30.941060Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153810818789&action=2" } } 2025-03-28T09:23:30.944005Z node 1 :CMS_TENANTS DEBUG: TTxRemoveTenantDone Complete 2025-03-28T09:23:30.944053Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:31.051215Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788610167098312:2390], Recipient [1:7486788588692260751:2195]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153810818789&action=2" } UserToken: "" } 2025-03-28T09:23:31.051249Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:31.051430Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153810818789&action=2" ready: true status: SUCCESS } } 2025-03-28T09:23:31.059868Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-28T09:23:31.060093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected >> TGRpcCmsTest::AuthTokenTest [GOOD] |86.8%| [TA] $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpJoinOrder::CanonizedJoinOrderTPCH12 [GOOD] >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped >> TestProtocols::TestResolveProtocol |86.8%| [TA] {RESULT} $(B)/ydb/core/sys_view/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic >> TVPatchTests::PatchPartPutError |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TActorTracker::Basic [GOOD] >> TVPatchTests::PatchPartPutError [GOOD] >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2025-03-28T09:23:27.506876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788595462211471:2217];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:27.529166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001763/r3tmp/tmpkg29Kf/pdisk_1.dat 2025-03-28T09:23:28.314131Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:28.337946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:28.338038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:28.355054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62052, node 1 2025-03-28T09:23:28.723295Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:28.723315Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:28.723320Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:28.723410Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11562 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:29.272261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:29.334862Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:29.438113Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285120, Sender [1:7486788604052146684:2314], Recipient [1:7486788599757179049:2200]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-28T09:23:29.449366Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2025-03-28T09:23:29.449421Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:29.449435Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:29.449601Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" 2025-03-28T09:23:29.449794Z node 1 :CMS_TENANTS DEBUG: Add tenant /Root/users/user-1 (txid = 1743153809443858) 2025-03-28T09:23:29.450541Z node 1 :CMS_TENANTS TRACE: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1743153809443858 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2025-03-28T09:23:29.450745Z node 1 :CMS_TENANTS TRACE: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2025-03-28T09:23:29.464009Z node 1 :CMS_TENANTS DEBUG: TTxCreateTenant Complete 2025-03-28T09:23:29.465109Z node 1 :CMS_TENANTS TRACE: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809443858&action=1" } } } 2025-03-28T09:23:29.465257Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:29.465338Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2025-03-28T09:23:29.465501Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2025-03-28T09:23:29.466157Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2025-03-28T09:23:29.466298Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-28T09:23:29.472716Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788604052146695:2315], Recipient [1:7486788599757179049:2200]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809443858&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-28T09:23:29.472754Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:29.472950Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809443858&action=1" } } 2025-03-28T09:23:29.479904Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2025-03-28T09:23:29.479966Z node 1 :CMS_TENANTS DEBUG: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:29.480046Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435079, Sender [1:7486788604052146689:2200], Recipient [1:7486788599757179049:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:29.480066Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvPoolAllocated 2025-03-28T09:23:29.480085Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:29.480099Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:29.480171Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2025-03-28T09:23:29.480187Z node 1 :CMS_TENANTS TRACE: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2025-03-28T09:23:29.480245Z node 1 :CMS_TENANTS TRACE: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2025-03-28T09:23:29.485724Z node 1 :CMS_TENANTS DEBUG: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2025-03-28T09:23:29.485750Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:29.485756Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:29.485770Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:29.485826Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2025-03-28T09:23:29.485847Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1743153809443858 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:29.493151Z node 1 :CMS_TENANTS DEBUG: TTxUpdateTenantState complete for /Root/users/user-1 2025-03-28T09:23:29.493301Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:29.493339Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1)::Bootstrap 2025-03-28T09:23:29.493355Z node 1 :CMS_TENANTS DEBUG: TSubDomainManip(/Root/users/user-1) create subdomain 2025-03-28T09:23:29.501617Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "Root" 2025-03-28T09:23:29.515753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-28T09:23:29.526475Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 2025-03-28T09:23:29.526552Z node 1 :CMS_TENANTS TRACE: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2025-03-28T09:23:29.571091Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2025-03-28T09:23:29.602368Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285131, Sender [1:7486788604052146764:2318], Recipient [1:7486788599757179049:2200]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809443858&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-28T09:23:29.602400Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetOperationRequest 2025-03-28T09:23:29.602531Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1743153809443858&action=1" } } 2025-03-28T09:23:29.637944Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2025-03-28T09:23:29.638533Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 3 SchemeshardId: 720575 ... nsole::TEvGetTenantStatusRequest 2025-03-28T09:23:31.215111Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.215190Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7486788595462211647:2194], Recipient [1:7486788599757179049:2200]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.215207Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-28T09:23:31.215755Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-03-28T09:23:31.223875Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7486788612642081807:2372], Recipient [1:7486788599757179049:2200]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-28T09:23:31.223905Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-28T09:23:31.223947Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.224681Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7486788595462211647:2194], Recipient [1:7486788599757179049:2200]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.224703Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-28T09:23:31.225286Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-03-28T09:23:31.233924Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7486788612642081822:2373], Recipient [1:7486788599757179049:2200]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-28T09:23:31.233962Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-28T09:23:31.233998Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.234080Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7486788595462211647:2194], Recipient [1:7486788599757179049:2200]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.234091Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-28T09:23:31.234724Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-03-28T09:23:31.238112Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7486788612642081831:2374], Recipient [1:7486788599757179049:2200]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-28T09:23:31.238152Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-28T09:23:31.238186Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.238458Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7486788595462211647:2194], Recipient [1:7486788599757179049:2200]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.238472Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-28T09:23:31.241198Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-03-28T09:23:31.257207Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7486788612642081851:2375], Recipient [1:7486788599757179049:2200]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-28T09:23:31.257244Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-28T09:23:31.257285Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.257654Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7486788595462211647:2194], Recipient [1:7486788599757179049:2200]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.257680Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-28T09:23:31.258244Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2025-03-28T09:23:31.259334Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2025-03-28T09:23:31.259362Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) done 2025-03-28T09:23:31.259405Z node 1 :CMS_TENANTS DEBUG: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-03-28T09:23:31.259455Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 2146435076, Sender [1:7486788604052146792:2200], Recipient [1:7486788599757179049:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2025-03-28T09:23:31.259471Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvPrivate::TEvSubdomainReady 2025-03-28T09:23:31.259492Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) enqueue tx 2025-03-28T09:23:31.259500Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) starts new tx 2025-03-28T09:23:31.259530Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2025-03-28T09:23:31.259554Z node 1 :CMS_TENANTS TRACE: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1743153809443858 errorcode=STATUS_CODE_UNSPECIFIED issue= 2025-03-28T09:23:31.259615Z node 1 :CMS_TENANTS TRACE: Update database for /Root/users/user-1 confirmedsubdomain=2 2025-03-28T09:23:31.264354Z node 1 :CMS_TENANTS DEBUG: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2025-03-28T09:23:31.264394Z node 1 :CMS_TENANTS TRACE: TTxProcessor(tenants) completed tx 2025-03-28T09:23:31.271059Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273285122, Sender [1:7486788612642081858:2376], Recipient [1:7486788599757179049:2200]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" } 2025-03-28T09:23:31.271086Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2025-03-28T09:23:31.271129Z node 1 :CMS_TENANTS TRACE: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.271260Z node 1 :CMS_TENANTS TRACE: StateWork, received event# 273154052, Sender [1:7486788595462211647:2194], Recipient [1:7486788599757179049:2200]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2025-03-28T09:23:31.271272Z node 1 :CMS_TENANTS TRACE: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2025-03-28T09:23:31.271840Z node 1 :CMS_TENANTS TRACE: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } TClient is connected to server localhost:11562 TClient::Ls request: /Root/users/user-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root/users/user-1" PathId: 1 SchemeshardId: 72075186224037897 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037897 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 3 ProcessingParams { Version: 3 PlanReso... (TRUNCATED) 2025-03-28T09:23:32.028777Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-28T09:23:32.029231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:23:34.698858Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486788605146988321:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:34.698982Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:35.463115Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/users/user-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:23:35.488470Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7486788630916792723:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> TInterconnectTest::OldFormat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/service/unittest >> KqpService::RangeCache+UseCache [GOOD] Test command err: Trying to start YDB, gRPC: 14523, MsgBus: 61294 2025-03-28T09:13:53.095251Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486786130902450854:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:53.095650Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001511/r3tmp/tmpz13xXE/pdisk_1.dat 2025-03-28T09:13:53.596736Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:13:53.632952Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:13:53.633069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:13:53.635044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14523, node 1 2025-03-28T09:13:53.811190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:13:53.811219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:13:53.811228Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:13:53.811330Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61294 TClient is connected to server localhost:61294 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:13:54.393187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.438639Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:13:54.454095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.671048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.921975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:54.993275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:13:57.007773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786148082321666:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.007901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.399283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.470291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.548328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.593196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.624366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.667964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:13:57.757331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786148082322189:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.757410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.757770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486786148082322194:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:13:57.761627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:13:57.777706Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486786148082322196:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:13:57.853127Z node 1 :TX_PROXY ERROR: Actor# [1:7486786148082322251:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:13:58.087424Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486786130902450854:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:13:58.087473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:13:58.971961Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YzM5NmMxNTgtYzQ0ZmM3ZC03NzMzOTY1NS1jYzhiZjZiOA==, ActorId: [1:7486786143787354352:2404], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:13:58.972001Z node 1 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=1&id=YzM5NmMxNTgtYzQ0ZmM3ZC03NzMzOTY1NS1jYzhiZjZiOA==, ActorId: [1:7486786143787354352:2404], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:13:58.972025Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzM5NmMxNTgtYzQ0ZmM3ZC03NzMzOTY1NS1jYzhiZjZiOA==, ActorId: [1:7486786143787354352:2404], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:13:58.972045Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzM5NmMxNTgtYzQ0ZmM3ZC03NzMzOTY1NS1jYzhiZjZiOA==, ActorId: [1:7486786143787354352:2404], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:13:58.972101Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YzM5NmMxNTgtYzQ0ZmM3ZC03NzMzOTY1NS1jYzhiZjZiOA==, ActorId: [1:7486786143787354352:2404], ActorState: unknown state, Session actor destroyed 2025-03-28T09:13:58.978159Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YTFhNGE5YWQtNTMzZjU5ZGEtOWUwOThiZC00ZDE2Yjc5Mw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YTFhNGE5YWQtNTMzZjU5ZGEtOWUwOThiZC00ZDE2Yjc5Mw== 2025-03-28T09:13:58.978335Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YTFhNGE5YWQtNTMzZjU5ZGEtOWUwOThiZC00ZDE2Yjc5Mw==, ActorId: [1:7486786152377289808:2489], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:13:58.990331Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2RmMzlmOTEtZDg3ZDQ0NjItMWRmNWEyNTYtZWNkMTkwNGE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id Y2RmMzlmOTEtZDg3ZDQ0NjItMWRmNWEyNTYtZWNkMTkwNGE= 2025-03-28T09:13:58.990508Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=Y2RmMzlmOTEtZDg3ZDQ0NjItMWRmNWEyNTYtZWNkMTkwNGE=, ActorId: [1:7486786152377289810:2491], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:13:59.010320Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWYyODEwNWQtM2QwNmVkOTgtNTU2NTU3MGUtMzI4ZjViZmY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id NWYyODEwNWQtM2QwNmVkOTgtNTU2NTU3MGUtMzI4ZjViZmY= 2025-03-28T09:13:59.010497Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=NWYyODEwNWQtM2QwNmVkOTgtNTU2NTU3MGUtMzI4ZjViZmY=, ActorId: [1:7486786156672257110:2494], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:13:59.028573Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjVkNzYzYTUtMjY1OWY1NDctNjYwOWVhM2ItODM0ODRhMQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id YjVkNzYzYTUtMjY1OWY1NDctNjYwOWVhM2ItODM0ODRhMQ== 2025-03-28T09:13:59.029090Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=YjVkNzYzYTUtMjY1OWY1NDctNjYwOWVhM2ItODM0ODRhMQ==, ActorId: [1:7486786156672257112:2496], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:13:59.043657Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjkyZmEwYmMtMjBjZDk3MDAtYTBjOWY4NDUtYTY5OWY1MGY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id ZjkyZmEwYmMtMjBjZDk3MDAtYTBjOWY4NDUtYTY5OWY1MGY= 2025-03-28T09:13:59.043846Z node 1 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=1&id=ZjkyZmEwYmMtMjBjZDk3MDAtYTBjOWY4NDUtYTY5OWY1MGY=, ActorId: [1:7486786156672257114:2498], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:13:59.060734Z node 1 :KQP_ ... blechecking } 2025-03-28T09:20:29.102797Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486787829984808899:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:20:29.158622Z node 7 :TX_PROXY ERROR: Actor# [7:7486787829984809075:2423] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.169817Z node 7 :TX_PROXY ERROR: Actor# [7:7486787829984809083:2429] txid# 281474976715670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.173099Z node 7 :TX_PROXY ERROR: Actor# [7:7486787829984809082:2428] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.178376Z node 7 :TX_PROXY ERROR: Actor# [7:7486787829984809094:2437] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.178617Z node 7 :TX_PROXY ERROR: Actor# [7:7486787829984809095:2438] txid# 281474976715672, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.185025Z node 7 :TX_PROXY ERROR: Actor# [7:7486787829984809126:2456] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.190234Z node 7 :TX_PROXY ERROR: Actor# [7:7486787829984809134:2462] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.198646Z node 7 :TX_PROXY ERROR: Actor# [7:7486787829984809141:2468] txid# 281474976715676, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.198949Z node 7 :TX_PROXY ERROR: Actor# [7:7486787829984809143:2469] txid# 281474976715677, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:29.199108Z node 7 :TX_PROXY ERROR: Actor# [7:7486787829984809140:2467] txid# 281474976715675, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:20:38.250911Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:20:38.250976Z node 7 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 18671, MsgBus: 31919 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001511/r3tmp/tmpOl0qVV/pdisk_1.dat 2025-03-28T09:23:07.026517Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:07.258586Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:07.268449Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:07.268581Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:07.280355Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18671, node 8 2025-03-28T09:23:07.518514Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:07.518544Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:07.518556Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:07.518745Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31919 TClient is connected to server localhost:31919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:08.400646Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:08.410092Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:08.422488Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:08.583377Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:08.961707Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:09.104258Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:14.022914Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486788537128370306:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:14.023062Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:14.242100Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:14.306186Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:14.369818Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:14.437803Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:14.526998Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:14.656486Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:14.755366Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486788537128370838:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:14.755537Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:14.755941Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486788537128370843:2464], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:14.762575Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:14.805362Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7486788537128370845:2465], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:23:14.903136Z node 8 :TX_PROXY ERROR: Actor# [8:7486788537128370904:3463] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:22.229111Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:23:22.229148Z node 8 :IMPORT WARN: Table profiles were not loaded took: 16.222665s took: 16.224893s took: 16.226797s took: 16.229849s took: 16.231558s took: 16.231831s took: 16.232258s took: 16.232462s took: 16.232672s took: 16.240518s ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2025-03-28T09:23:37.366815Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:23:37.367714Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-28T09:23:37.367793Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-28T09:23:37.368013Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2025-03-28T09:23:37.368078Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:23:37.368256Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2025-03-28T09:23:37.368341Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2025-03-28T09:23:37.368418Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2025-03-28T09:23:37.369074Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:627} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2025-03-28T09:23:37.369168Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-03-28T09:23:37.369260Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew |86.9%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::SelectRangeReverseItemsLimit >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> TInterconnectTest::TestBlobEvent220BytesPreSerialized >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH12 [GOOD] Test command err: Trying to start YDB, gRPC: 21004, MsgBus: 25559 2025-03-28T09:21:52.512034Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788185998713543:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:52.512313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017d7/r3tmp/tmppdaChi/pdisk_1.dat 2025-03-28T09:21:53.018742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:53.018839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:53.034070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:53.117665Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21004, node 1 2025-03-28T09:21:53.287225Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:53.287246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:53.287263Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:53.287383Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25559 TClient is connected to server localhost:25559 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:54.218718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:54.253000Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:56.688783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788203178583245:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:56.688872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:56.691221Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788203178583257:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:56.695535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:21:56.719772Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788203178583259:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:21:56.778428Z node 1 :TX_PROXY ERROR: Actor# [1:7486788203178583310:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:57.185618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:57.427627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:57.427833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:57.428064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:57.428204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:57.428313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:57.428441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:57.428576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:57.428720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:57.428845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:57.428987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:57.429099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:57.429226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788207473550821:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:57.430119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:57.430160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:21:57.430332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:21:57.430430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:21:57.430545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:21:57.430655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:21:57.430765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:21:57.430856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:21:57.430987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:21:57.431100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:21:57.431219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:21:57.431309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486788207473550831:2357];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:21:57.465217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788207473550843:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:21:57.465293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788207473550843:2363];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abs ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.075893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.080717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.085232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039240;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.089706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.095490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.095727Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.100469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.103639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.105933Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.109419Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.111959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.114562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.117142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.119994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.123577Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039338;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.124921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.128706Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.132292Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.133667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.138904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.139747Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.144395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.144849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.152534Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.157967Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.164085Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.169768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.175685Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.181610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.187090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.192796Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.198499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.199758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.205263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.205827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.211854Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.212762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.217533Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.221488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.225474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.227804Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.231845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.233800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.237860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.241497Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:20.403764Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0z4w6cvmy1hbkawgek6yy", SessionId: ydb://session/3?node_id=1&id=ZDE0MDBjODctYTcwY2EwZTYtN2RkNGRkOWQtYmIxYmE0MmE=, Slow query, duration: 37.196896s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:20.689716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:20.689731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:20.690608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788521006220285:11171];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:23:20.691050Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest >> TestProtocols::TestHTTPRequest [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2025-03-28T09:23:38.484408Z node 4 :INTERCONNECT WARN: Handshake [4:20:2056] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-03-28T09:23:38.999890Z node 5 :INTERCONNECT WARN: Handshake [5:18:2057] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-03-28T09:23:39.555199Z node 8 :INTERCONNECT WARN: Handshake [8:20:2056] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2025-03-28T09:23:39.557731Z node 7 :INTERCONNECT WARN: Handshake [7:18:2057] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default >> TInterconnectTest::TestBlobEvent >> TFlatTest::SelectRangeNullArgs3 >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials >> YdbYqlClient::SecurityTokenAuth >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts >> YdbTableBulkUpsert::Nulls >> YdbYqlClient::TestYqlWrongTable >> TGRpcYdbTest::DropTableBadRequest >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> TGRpcNewCoordinationClient::SessionMethods >> KqpJoinOrder::TPCDS16-ColumnStore [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] |86.9%| [TA] $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} >> TTableProfileTests::UseTableProfilePreset >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TGRpcClientLowTest::SimpleRequest >> TFlatTest::SelectRangeReverseIncludeKeys >> TInterconnectTest::OldNbs [GOOD] |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] |86.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/service/test-results/unittest/{meta.json ... results_accumulator.log} |86.9%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS16-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 2799, MsgBus: 22976 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001771/r3tmp/tmpsndzsH/pdisk_1.dat 2025-03-28T09:22:33.320503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:22:33.601141Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:33.666851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:33.666929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:33.671797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2799, node 1 2025-03-28T09:22:33.939320Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:33.939340Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:33.939347Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:33.939451Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22976 TClient is connected to server localhost:22976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:34.953134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:34.975634Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:37.361767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788380294568362:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:37.361897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:37.365719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788380294568374:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:37.370421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:37.386964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788380294568376:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:37.483659Z node 1 :TX_PROXY ERROR: Actor# [1:7486788380294568427:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:37.911443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.121574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.160805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.205993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.266307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.460964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.497071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.530985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.562497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.605221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.642367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.669816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:38.748775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:39.550650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:39.595049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:39.627749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:39.670386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:39.726746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:39.765263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:39.812830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:39.897452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:39.952111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.031784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.076568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.107863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.176791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.210380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.289492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.327962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.370055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.401514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.442459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480 2025-03-28T09:22:40.499748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok ... p:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.905222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.910798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.910866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.916408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.917417Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.924079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.927445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.931682Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038593;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.935102Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.940537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038587;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.942457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.951910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038531;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.956549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.962254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.966175Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.975920Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038555;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.979812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.984969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038563;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.989442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038559;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.994542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:15.998976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038527;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.008612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038561;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.012672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.017921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038523;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.022188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038529;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.031313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038609;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.036763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.038543Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038535;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.047762Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.053098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.057013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038537;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.062559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038575;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.066165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.076003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038547;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.080129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.085490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.090606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.122612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:16.179124Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0z3s2e3yxhnaaxfzfw5x8", SessionId: ydb://session/3?node_id=1&id=YzRhNWRkMGMtNDI4ODJlODktNzBkOWFmYjQtMjUzZDczODQ=, Slow query, duration: 34.096519s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:16.747580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:16.748011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:16.748503Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;self_id=[1:7486788453309029081:4485];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038331;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038629; 2025-03-28T09:23:16.748822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:38.409631Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe10gfydhbnr35w9syke8cy", SessionId: ydb://session/3?node_id=1&id=YzRhNWRkMGMtNDI4ODJlODktNzBkOWFmYjQtMjUzZDczODQ=, Slow query, duration: 10.538687s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "-- NB: Subquerys\n$orders_with_several_warehouses = (\n select cs_order_number\n from `/Root/test/ds/catalog_sales`\n group by cs_order_number\n having count(distinct cs_warehouse_sk) > 1\n);\n\n-- start query 1 in stream 0 using template query16.tpl and seed 171719422\nselect\n count(distinct cs1.cs_order_number) as `order count`\n ,sum(cs_ext_ship_cost) as `total shipping cost`\n ,sum(cs_net_profit) as `total net profit`\nfrom\n `/Root/test/ds/catalog_sales` cs1\n cross join `/Root/test/ds/date_dim`\n cross join `/Root/test/ds/customer_address`\n cross join `/Root/test/ds/call_center`\n left semi join $orders_with_several_warehouses cs2 on cs1.cs_order_number = cs2.cs_order_number\n left only join `/Root/test/ds/catalog_returns` cr1 on cs1.cs_order_number = cr1.cr_order_number\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand cs1.cs_ship_date_sk = d_date_sk\nand cs1.cs_ship_addr_sk = ca_address_sk\nand ca_state = 'IL'\nand cs1.cs_call_center_sk = cc_call_center_sk\nand cc_county in ('Richland County','Bronx County','Maverick County','Mesa County',\n 'Raleigh County'\n)\norder by `order count`\nlimit 100;\n", parameters: 0b >> TGRpcYdbTest::RemoveNotExistedDirectory |86.9%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout >> TColumnShardTestSchema::RebootOneColdTier >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] >> TGRpcNewCoordinationClient::SessionMethods [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchData >> OperationMapping::IndexBuildRejected >> OperationMapping::IndexBuildRejected [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithValidCredentials [GOOD] >> TGRpcNewClient::SimpleYqlQuery >> TGRpcYdbTest::DropTableBadRequest [GOOD] >> TGRpcYdbTest::CreateYqlSession >> TGRpcClientLowTest::SimpleRequest [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] >> TFlatTest::SelectRangeNullArgs4 [GOOD] >> YdbYqlClient::TestYqlWrongTable [GOOD] >> YdbYqlClient::TraceId >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2025-03-28T09:23:38.924788Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788644447229875:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:38.924833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f05/r3tmp/tmpw2evz1/pdisk_1.dat 2025-03-28T09:23:39.640165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:39.656076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:39.656155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:39.663724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63082 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:40.006468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:40.019465Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:40.030330Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:40.036772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:43.187304Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788661685464323:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:43.187725Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f05/r3tmp/tmp4VVuwc/pdisk_1.dat 2025-03-28T09:23:43.492707Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:43.520406Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:43.520496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:43.524560Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19417 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:43.760181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:43.776827Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:43.807065Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:43.814018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::SecurityTokenAuth [GOOD] >> YdbYqlClient::RetryOperationTemplate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2025-03-28T09:23:41.107047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788655081688343:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:41.107647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f03/r3tmp/tmpBnrEQV/pdisk_1.dat 2025-03-28T09:23:41.665254Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:41.716632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:41.716714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:41.721156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24047 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:42.170144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:23:42.218407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f03/r3tmp/tmpn4Kq7R/pdisk_1.dat 2025-03-28T09:23:45.462985Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:45.467818Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:45.491930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:45.492037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:45.494663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21246 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:45.790646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:23:45.841572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> TGRpcYdbTest::RemoveNotExistedDirectory [GOOD] >> TGRpcYdbTest::SdkUuid >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> TFlatTest::SelectRangeReverse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCC [GOOD] Test command err: Trying to start YDB, gRPC: 1629, MsgBus: 20086 2025-03-28T09:22:56.666882Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788463042552772:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:56.675241Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001738/r3tmp/tmpHgoyjT/pdisk_1.dat 2025-03-28T09:22:57.576073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:57.576162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:57.592865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:57.606846Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:57.725108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 1629, node 1 2025-03-28T09:22:57.947335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:57.947356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:57.947378Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:57.947518Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20086 TClient is connected to server localhost:20086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:58.897744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:58.919156Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:01.512685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788484517389922:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:01.512877Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:01.513360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788484517389934:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:01.517862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:23:01.532487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788484517389936:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:23:01.627740Z node 1 :TX_PROXY ERROR: Actor# [1:7486788484517389987:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:01.643044Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788463042552772:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:01.643101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:02.000361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.128393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.164827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.201157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.235526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.490401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.547224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.589524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.673367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.744725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.799455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.834710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.867776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:03.707636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:23:03.783094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:23:03.857812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:23:03.900126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:23:03.975549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.019341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.066865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.110217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.161316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.238815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.322182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.367820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.414135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.449243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.484655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.520798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:40.996945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:40.996945Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038435;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.002133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.002133Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.007622Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038567;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.008033Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038489;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.012897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.013304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038521;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.019048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038585;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.019275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038541;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.024061Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038553;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.024767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038603;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.029389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038525;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.032676Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038581;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.034062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038557;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.038112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038513;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.038586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.043084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038509;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.043219Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038565;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.048218Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038589;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.049581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038533;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.053611Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038583;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.054336Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038443;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.058664Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038569;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.059270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038539;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.063896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038617;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.064190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038517;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.069279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.069279Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038467;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.074392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.074391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038429;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.079711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.080237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038639;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.084969Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038487;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.085268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038573;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.090166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038501;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.090536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038597;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.095593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038607;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.095593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038635;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.100890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038621;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.100893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038601;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.106052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.111115Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038551;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.133893Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038591;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.153314Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:41.219128Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0zv7t9mmwvd9k2jknpdwx", SessionId: ydb://session/3?node_id=1&id=YTlhZGMyN2QtZmE0ZmYzZTQtMzI1ZDU1OWItZDgzNTU0ZGE=, Slow query, duration: 35.112159s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:41.512269Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:41.512547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:41.513006Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486788621956370249:6030];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038331;receive=72075186224038170; 2025-03-28T09:23:41.513270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks >> TTableProfileTests::UseTableProfilePreset [GOOD] >> TTableProfileTests::UseTableProfilePresetViaSdk >> TGRpcNewCoordinationClient::SessionSemaphoreInfiniteTimeout [GOOD] >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions >> YdbLogStore::LogStore >> TGRpcNewCoordinationClient::SessionDescribeWatchData [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners >> TGRpcYdbTest::CreateYqlSession [GOOD] >> TGRpcYdbTest::ExecuteDmlQuery >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase >> OperationMapping::IndexBuildCanceled [GOOD] >> TGRpcClientLowTest::SimpleRequestDummyService [GOOD] >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup >> KqpJoinOrder::TestJoinHint2+ColumnStore [GOOD] >> TGRpcNewClient::SimpleYqlQuery [GOOD] >> TGRpcNewClient::TestAuth >> KqpJoinOrder::TPCDS92+ColumnStore [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> YdbYqlClient::TraceId [GOOD] >> YdbYqlClient::Utf8DatabasePassViaHeader >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys >> YdbYqlClient::RetryOperationTemplate [GOOD] >> YdbYqlClient::RetryOperationSync >> YdbIndexTable::AlterIndexImplBySuperUser >> TGRpcYdbTest::SdkUuid [GOOD] >> TGRpcYdbTest::SdkUuidViaParams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TestJoinHint2+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13856, MsgBus: 65434 2025-03-28T09:22:16.864992Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788289647426925:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:16.865433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00179a/r3tmp/tmpaa4Hbg/pdisk_1.dat 2025-03-28T09:22:17.550700Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:17.559038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:17.559140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:17.569754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13856, node 1 2025-03-28T09:22:17.839304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:17.839326Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:17.839333Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:17.839439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65434 TClient is connected to server localhost:65434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:18.947857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:18.971516Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:21.539629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788311122263919:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:21.539718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788311122263927:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:21.539776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:21.545438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:21.561821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788311122263933:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:21.647320Z node 1 :TX_PROXY ERROR: Actor# [1:7486788311122263984:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:21.858920Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788289647426925:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:21.859014Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:22.064536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:22.337650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:22.337879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:22.338165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:22.338295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:22.338398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:22.338504Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:22.338601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:22.338819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:22.338935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:22.339054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:22.339208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:22.339318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788315417231510:2360];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:22.344206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:22.344266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:22.344439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:22.344554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:22.344670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:22.344768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:22.344896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:22.345018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:22.345122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:22.345235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:22.345354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:22.345460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788315417231497:2354];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:22.384192Z node 1 :T ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.733917Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.738253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.742903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039224;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.747535Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.752646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.754666Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.758069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.762903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.767609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.772821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.775684Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039212;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.778491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.783964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039240;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.789049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039194;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.791142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.796350Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.802469Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.803044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.807470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.808642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.812818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.813865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.817749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.819335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.822747Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039190;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.824744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.827673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.829939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.835812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.838646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.841334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039216;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.844894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.846658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039222;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.851547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039192;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.852188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.858198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.861522Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.866435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.877529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039208;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.883326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.887909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.892005Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.895477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.899164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.902818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.119919Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0zww231r0dtjgt63q6v54", SessionId: ydb://session/3?node_id=1&id=NjQxZmM2NzYtY2FhMzVkOTItOTQ4YmVlMDQtMjc5MWNhMmM=, Slow query, duration: 35.340999s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:43.410059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:43.410259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:43.410335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788620359965197:10984];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:23:43.410749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS92+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11007, MsgBus: 3158 2025-03-28T09:21:56.965377Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788204830806350:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:56.989369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017c9/r3tmp/tmpjjQpkT/pdisk_1.dat 2025-03-28T09:21:57.716912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:57.716992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:57.733866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:21:57.819442Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11007, node 1 2025-03-28T09:21:57.995259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:57.995276Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:57.995287Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:57.995423Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3158 TClient is connected to server localhost:3158 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:58.841938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:58.861802Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:01.765614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788226305643501:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:01.765746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:01.766076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788226305643513:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:01.770157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:01.787201Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788226305643515:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:01.884397Z node 1 :TX_PROXY ERROR: Actor# [1:7486788226305643566:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:01.974830Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788204830806350:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:01.974905Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:02.259929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:02.497184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:02.497433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:02.497678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:02.497833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:02.497952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:02.498068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:02.498181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:02.498305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:02.498425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:02.498519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:02.499985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:02.500166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788230600611134:2355];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:02.520038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:02.520102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:02.520344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:02.520460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:02.520556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:02.520642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:02.520751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:02.520852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:02.520959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:02.521074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:02.521185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:02.521290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788230600611136:2356];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:02.550173Z node 1 :TX_C ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:27.948349Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:27.954104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:27.962616Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:27.963683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:27.969404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:27.974266Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:27.979763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039364;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:27.988565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:27.993582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:27.998166Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.007096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.012090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.016815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.027161Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.031707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.037426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.041191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.051852Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.055521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.061150Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.064915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.074506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.075575Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.084552Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.085741Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.091210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.095606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.097090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.102877Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.108075Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.108433Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.114132Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.119994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.121652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039330;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.126094Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.132381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.132751Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.140897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.142222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.147913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.154267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.159171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.160728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.165726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.166259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:28.395568Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0zbgbej40z46f7rgb91p6", SessionId: ydb://session/3?node_id=1&id=YzZmOGE4YTEtODdlYzA4N2YtNmFmNDE1MjItMzY2OTdlMWM=, Slow query, duration: 38.399271s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:28.981935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:28.982340Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:28.983014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486788449643979605:7856];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-28T09:23:28.983377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TGRpcNewCoordinationClientAuth::OwnersAndPermissions [GOOD] >> TGRpcYdbTest::AlterTableAddIndexBadRequest >> TGRpcNewCoordinationClient::SessionDescribeWatchOwners [GOOD] >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace >> TGRpcLdapAuthentication::LdapAuthServerIsUnavailable [GOOD] >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTable >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> YdbYqlClient::ConnectDbAclIsOffWhenYdbRequestsWithoutDatabase [GOOD] >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull >> TGRpcNewClient::TestAuth [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop >> TGRpcYdbTest::ExecuteDmlQuery [GOOD] >> TGRpcYdbTest::CreateYqlSessionExecuteQuery >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] >> TLocksTest::Range_IncorrectNullDot1 >> YdbYqlClient::Utf8DatabasePassViaHeader [GOOD] >> YdbYqlClient::TestYqlTypesFromPreparedQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2025-03-28T09:23:52.319636Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788703030910083:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:52.319690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f01/r3tmp/tmpG6HTeP/pdisk_1.dat 2025-03-28T09:23:52.818952Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:52.839477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:52.839573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:52.842730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9818 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:53.270069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:53.315597Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:53.327308Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:53.334873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:56.448196Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788720922763202:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:56.459737Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f01/r3tmp/tmpmgRMXc/pdisk_1.dat 2025-03-28T09:23:56.733129Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:56.791180Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:56.791250Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:56.795645Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24921 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:57.040112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:57.053451Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:57.079952Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:23:57.086839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] >> TGRpcYdbTest::SdkUuidViaParams [GOOD] >> TGRpcYdbTest::ReadTablePg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::OltpJoinTypeHintCBOTurnOFF [GOOD] Test command err: Trying to start YDB, gRPC: 3766, MsgBus: 14548 2025-03-28T09:23:11.212051Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788525147762490:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:11.215132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016e3/r3tmp/tmp9zKBAJ/pdisk_1.dat 2025-03-28T09:23:11.880956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:11.881051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:11.897296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:11.974667Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3766, node 1 2025-03-28T09:23:12.188174Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:12.188197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:12.188204Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:12.188296Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14548 TClient is connected to server localhost:14548 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:13.118341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:13.138341Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:15.657470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788542327632125:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:15.657623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:15.658067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788542327632137:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:15.662423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:23:15.682282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788542327632139:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:23:15.751705Z node 1 :TX_PROXY ERROR: Actor# [1:7486788542327632190:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:16.214142Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788525147762490:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:16.226836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:16.274567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.420915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.456418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.490924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.523606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.724486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.773294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.854619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.888037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.942153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:23:17.013070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:23:17.094660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:17.139139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:17.927485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:23:17.992387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.051545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.093239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.128872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.175460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.213826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.250616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.301618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.345875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.399812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.453710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.494810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.530277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.564343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.635933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:23:18.720014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... tablet_id=72075186224038564;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.708693Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.712249Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.739100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.739101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.744673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.744864Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.750384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.755464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.756843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.763742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038642;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.767568Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.771612Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.773480Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.779898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.785118Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.786019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038658;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.791819Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.793277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038614;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.797960Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.798596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038598;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.803810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038656;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.805845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038608;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.809695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.812221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038594;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.815090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038610;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.818041Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038612;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.823594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038602;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.824185Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038606;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.828976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038600;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.829264Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038632;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.835157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038620;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.838199Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038616;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.840457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038604;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.844550Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038574;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.845837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.850507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038578;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.850957Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038588;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.856756Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038562;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.856836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038586;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.863544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038592;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.866348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038596;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.869338Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038582;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.872102Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038590;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.877658Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038576;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.878618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038618;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.885344Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038572;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:53.981040Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe1095e2yt9vskwd8vq0pys", SessionId: ydb://session/3?node_id=1&id=NzFhOWY4NTUtZGI1MTFiOGItMWRlODBhZTgtNGYzNjE4NTM=, Slow query, duration: 33.612966s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:54.255717Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:54.256416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:54.262887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TLocksTest::MultipleLocks [GOOD] >> KqpJoinOrder::TPCDS96+ColumnStore [GOOD] >> SplitPathTests::WithDatabaseShouldFail >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> YdbIndexTable::AlterIndexImplBySuperUser [GOOD] >> YdbIndexTable::CreateTableAddIndex >> TGRpcYdbTest::AlterTableAddIndexBadRequest [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> TTableProfileTests::UseTableProfilePresetViaSdk [GOOD] >> TTableProfileTests::WrongTableProfile >> TGRpcLdapAuthentication::LdapAuthSetIncorrectDomain [GOOD] >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism >> TGRpcNewCoordinationClient::SessionDescribeWatchReplace [GOOD] >> TGRpcNewCoordinationClient::SessionReconnectReattach >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_AllowOnlyDefaultGroup [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH3 [GOOD] Test command err: Trying to start YDB, gRPC: 19482, MsgBus: 9507 2025-03-28T09:22:17.955026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788292846193088:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:18.000110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001797/r3tmp/tmpIhJIob/pdisk_1.dat 2025-03-28T09:22:18.755134Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:18.784978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:18.794826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:18.803099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19482, node 1 2025-03-28T09:22:19.041694Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:19.041716Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:19.041727Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:19.041834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9507 TClient is connected to server localhost:9507 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:19.969489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:20.004037Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:22.462111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788314321030104:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:22.462143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788314321030099:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:22.462264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:22.466527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:22.484437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788314321030113:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:22.558825Z node 1 :TX_PROXY ERROR: Actor# [1:7486788314321030164:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:22.899208Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788292846193088:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:22.899286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:23.123164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.458255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:23.458833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:23.459057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:23.459093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:23.459127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:23.459231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:23.459242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:23.459341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:23.459350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:23.459447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:23.459460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:23.459553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:23.459572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:23.460087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:23.460244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:23.460347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:23.460460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:23.460569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:23.460649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788318615997722:2352];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:23.462298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:23.462426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:23.462547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:23.462647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:23.462762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788318615997730:2356];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:23.522107Z node 1 :TX_C ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.524379Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.533749Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039231;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.537247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.543238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.548477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.548989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.553810Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.555776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.559707Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.560824Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.564744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.570165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.576468Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.582086Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.587863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.591720Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.597236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.601210Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.611059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.615994Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.620178Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.622169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.626377Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039193;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.627775Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.634090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.634104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.640030Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.640248Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.646494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.646514Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.652848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.653385Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.661241Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.662959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.667833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.668962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.676556Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.681608Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.687835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.693654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.700953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.705949Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.712555Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.729606Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.760187Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.954673Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe100xeea98s77fd72918m8", SessionId: ydb://session/3?node_id=1&id=MjQyZjM4MGQtY2Q5ZjE1ZWQtODMxNjNlNzQtMzk1NTJjNDc=, Slow query, duration: 35.031554s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:47.355845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:47.356282Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:47.356800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788584904015620:9245];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:23:47.357170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TGRpcYdbTest::GetOperationBadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2025-03-28T09:23:30.137226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788606696270937:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:30.162075Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f07/r3tmp/tmp44Og3Q/pdisk_1.dat 2025-03-28T09:23:30.896779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:30.896874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:30.904993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:30.936239Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:65400 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:31.365651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:31.379112Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:31.393949Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:31.401430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:23:31.554518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:31.628841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:34.760801Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788623179089729:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:34.760841Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f07/r3tmp/tmpODYCE2/pdisk_1.dat 2025-03-28T09:23:35.109044Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:35.136101Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:35.136198Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:35.147324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12610 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:35.554096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:35.563713Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:35.582581Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:35.593946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:35.689045Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:35.794296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:39.079436Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486788648453267010:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:39.079496Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f07/r3tmp/tmpA4e12c/pdisk_1.dat 2025-03-28T09:23:39.340784Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:39.418821Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:39.418905Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:39.420045Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12779 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:39.739261Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:39.751214Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:39.763828Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:39.770662Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:39.856283Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:39.920493Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:43.683845Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788664056410571:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f07/r3tmp/tmpd6gdiI/pdisk_1.dat 2025-03-28T09:23:43.768278Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:23:43.883628Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:43.903947Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:43.904046Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:43.912153Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13156 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:44.230262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:44.236758Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:44.252435Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:44.369401Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:44.468836Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:48.377689Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486788687246756631:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:48.378787Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f07/r3tmp/tmpAykuI0/pdisk_1.dat 2025-03-28T09:23:48.534357Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:48.562518Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:48.562611Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:48.568315Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31020 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:48.824025Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:23:48.843926Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:23:48.848465Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:48.944196Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:49.015075Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:53.397648Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486788706060188022:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f07/r3tmp/tmphjtMFs/pdisk_1.dat 2025-03-28T09:23:53.501553Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:23:53.551847Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:53.568803Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:53.568902Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:53.571945Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22698 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:23:53.816746Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:23:53.823845Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:53.840948Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:23:53.845793Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:53.926297Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:53.991190Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:58.338129Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788727450682000:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:58.338221Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f07/r3tmp/tmpqujyDZ/pdisk_1.dat 2025-03-28T09:23:58.658349Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:58.658454Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:58.675489Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:58.677263Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20495 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:58.999034Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:23:59.021620Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:59.029110Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:59.214783Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:59.297983Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::ConnectDbAclIsOffWhenTokenIsOptionalAndNull [GOOD] >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex >> KqpJoinOrder::CanonizedJoinOrderTPCH20 [GOOD] >> TGRpcNewClient::CreateAlterUpsertDrop [GOOD] >> TGRpcNewClient::InMemoryTables >> TGRpcYdbTest::CreateYqlSessionExecuteQuery [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate >> YdbYqlClient::TestYqlIssues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS96+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 11595, MsgBus: 9590 2025-03-28T09:22:16.200925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788289164113207:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:16.201449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00179c/r3tmp/tmpcW33BQ/pdisk_1.dat 2025-03-28T09:22:17.000895Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:17.026229Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:17.026302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:17.029257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11595, node 1 2025-03-28T09:22:17.275184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:17.275208Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:17.275214Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:17.275305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9590 TClient is connected to server localhost:9590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:18.091589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:21.003693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788306343982925:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:21.003837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:21.004138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788310638950233:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:21.007881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:21.029529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788310638950235:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:21.126919Z node 1 :TX_PROXY ERROR: Actor# [1:7486788310638950286:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:21.162831Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788289164113207:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:21.162887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:21.473456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:21.713633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:21.713870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:21.714134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:21.714253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:21.714371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:21.714490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:21.714591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:21.714711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:21.714822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:21.714916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:21.715005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:21.715175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788310638950592:2361];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:21.749964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788310638950524:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:21.750238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:21.750297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:21.750485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:21.750591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:21.750824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:21.750931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:21.751019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:21.751120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:21.751227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:21.751333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:21.751459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:21.751577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788310638950737:2365];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_reg ... tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.880870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.886766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.893331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.893389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.900299Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.903982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.906865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.910188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.913739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.916603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.920487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039220;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.922412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.926929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.928245Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.933793Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.935530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039186;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.941224Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.946091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.953018Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.957576Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.960076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.966850Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.967257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.980596Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.987704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039208;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.989271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:42.997253Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039188;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.004472Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.006974Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.014882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039206;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.016716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.020505Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.026098Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.038065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.039876Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.045889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.048443Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.056273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.057293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.069908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039198;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.073765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.081296Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.089718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039276;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.093045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.100354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.108060Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:43.433069Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0zwvre7j1xr61mhd0z0ec", SessionId: ydb://session/3?node_id=1&id=MWQzMjExZGUtZGRmYzcwYzAtZjg4Yzc1YTMtYzhlYjY3MGU=, Slow query, duration: 35.664071s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:44.047275Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:44.047278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:44.047859Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> OperationMapping::IndexBuildSuccess [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH18 [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] >> TGRpcYdbTest::MakeListRemoveDirectory >> YdbYqlClient::TestReadTableMultiShardWholeTable [GOOD] >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] >> TLocksTest::Range_GoodLock1 [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH3 [GOOD] >> YdbYqlClient::RetryOperationSync [GOOD] >> YdbYqlClient::RetryOperationLimitedDuration >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad >> TGRpcYdbTest::ReadTablePg [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired >> GrpcConnectionStringParserTest::NoDatabaseFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlTypesFromPreparedQuery [GOOD] Test command err: 2025-03-28T09:23:42.910263Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788658666110099:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:42.910344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001474/r3tmp/tmpoBajgW/pdisk_1.dat 2025-03-28T09:23:43.788239Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:43.788318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:43.804418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2875, node 1 2025-03-28T09:23:43.899274Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:43.899369Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:43.934619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:44.059653Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:44.221154Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:44.221179Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:44.221191Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:44.221326Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:44.971169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:47.420681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788680140947659:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:47.420784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:47.912795Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788658666110099:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:47.912853Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:48.247693Z node 1 :TX_PROXY ERROR: Actor# [1:7486788684435915001:2645] txid# 281474976710658, issues: { message: "Column Key has wrong key type Json" severity: 1 } 2025-03-28T09:23:48.287077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788684435915011:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:48.287142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:48.303853Z node 1 :TX_PROXY ERROR: Actor# [1:7486788684435915018:2655] txid# 281474976710659, issues: { message: "Column Key has wrong key type Yson" severity: 1 } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001474/r3tmp/tmp2Zd1fv/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15322, node 4 TClient is connected to server localhost:27791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001474/r3tmp/tmp0nA18s/pdisk_1.dat TServer::EnableGrpc on GrpcPort 2685, node 7 TClient is connected to server localhost:26219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:24:02.415357Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788744426433697:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:02.415427Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001474/r3tmp/tmpxFG5xt/pdisk_1.dat 2025-03-28T09:24:02.771412Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:02.831245Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:02.831333Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:02.840022Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6925, node 10 2025-03-28T09:24:03.055446Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:03.055483Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:03.055491Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:03.055616Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9190 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:03.485596Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:06.259361Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788761606303944:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:06.259455Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788761606303955:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:06.259512Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:06.263812Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:06.302340Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788761606303958:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:06.375163Z node 10 :TX_PROXY ERROR: Actor# [10:7486788761606304027:2693] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> YdbIndexTable::CreateTableAddIndex [GOOD] >> YdbIndexTable::AlterTableAddIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH20 [GOOD] Test command err: Trying to start YDB, gRPC: 7243, MsgBus: 12463 2025-03-28T09:22:22.712959Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788315483546770:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:22.713344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00177f/r3tmp/tmpTMVAPA/pdisk_1.dat 2025-03-28T09:22:23.576096Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:23.583997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:23.584092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:23.591709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7243, node 1 2025-03-28T09:22:23.850600Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:23.850618Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:23.850625Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:23.850749Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12463 TClient is connected to server localhost:12463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:24.738366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:27.248929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788336958383788:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:27.249007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:27.249287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788336958383796:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:27.253082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:27.265781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788336958383802:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:27.326344Z node 1 :TX_PROXY ERROR: Actor# [1:7486788336958383853:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:27.697150Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788315483546770:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:27.697206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:27.824944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:28.083632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:28.083852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:28.084115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:28.084225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:28.084352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:28.084474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:28.084570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:28.084683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:28.084782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:28.084925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:28.085038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:28.085130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788336958384172:2357];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:28.086147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:28.086180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:28.086319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:28.086451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:28.086545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:28.086649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:28.086862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:28.086961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:28.087072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:28.087181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:28.087283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:28.087374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788336958384097:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:28.164819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788336958384178:2358];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.748851Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.749286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.754181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.762096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.768235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.769318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.774861Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.775019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.783667Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.787843Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.789141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.793845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039360;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.794937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.799863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.800173Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.805463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.805547Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.811116Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.811374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.816758Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.816784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.822111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.822135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.828127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.828343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.833699Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.833766Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.841411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.843529Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.853065Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.856363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.858926Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.862549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.864799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.867890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.870048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.873010Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039362;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.874953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.878894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.879691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.907198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.912221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.917977Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.918609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:47.923672Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:48.053308Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe101za7532w9n75x7a7ytx", SessionId: ydb://session/3?node_id=1&id=Njc5OWY1MzItYTQ1OGUyODMtZjg5YTQ2NjQtYWUxYzk3Njg=, Slow query, duration: 35.049904s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:48.418154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:48.419759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788641901118021:10992];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:23:48.420112Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:48.421026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] >> KqpJoinOrder::TPCDS61-ColumnStore [GOOD] >> TGRpcYdbTest::CreateAlterCopyAndDropTable [GOOD] >> TGRpcYdbTest::BeginTxRequestError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2025-03-28T09:23:15.966738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788544022536398:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f0b/r3tmp/tmpNylOFA/pdisk_1.dat 2025-03-28T09:23:16.280744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:23:16.548679Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:16.584472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:16.584560Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:16.588008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32104 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:17.061685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:17.084745Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:17.099752Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:17.104485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:17.307037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:17.387705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:20.589986Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788564586450809:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:20.668410Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f0b/r3tmp/tmp69AxTS/pdisk_1.dat 2025-03-28T09:23:20.771202Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:20.780881Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:20.780956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:20.787502Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14293 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:21.079980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:21.085042Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:21.103129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:21.165434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:21.214894Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:24.544382Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486788583414935716:2153];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:24.683140Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f0b/r3tmp/tmpoMxNzU/pdisk_1.dat 2025-03-28T09:23:24.875073Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:24.888041Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:24.888129Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:24.892877Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12490 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:25.119906Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:23:25.153779Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:23:25.161891Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:25.282167Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:25.377168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:29.234197Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788603379899480:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:29.234249Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f0b/r3tmp/tmpuvgtQ3/pdisk_1.dat 2025-03-28T09:23:29.449987Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:29.499931Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:29.500021Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:29.507970Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6120 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Schem ... " PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:45.568146Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:45.584801Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:45.603946Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:23:45.609694Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:45.746992Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:45.850042Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:50.870568Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486788693906897922:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:50.871555Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f0b/r3tmp/tmpzBinbB/pdisk_1.dat 2025-03-28T09:23:51.154659Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:51.162886Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:51.163508Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:51.208393Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30278 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:51.653080Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:51.661147Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:51.685818Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:51.814236Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:51.906155Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:56.202368Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486788720963720235:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:56.203029Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f0b/r3tmp/tmpWSKQKC/pdisk_1.dat 2025-03-28T09:23:56.560503Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:56.596064Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:56.596172Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:56.599685Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3645 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:57.036237Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:57.047947Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:57.077976Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:57.090997Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:23:57.192949Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:57.267390Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:02.307917Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788743753604741:2217];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f0b/r3tmp/tmpK4WTAB/pdisk_1.dat 2025-03-28T09:24:02.513121Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:02.685030Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:02.695717Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:02.695831Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:02.704038Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62959 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:24:03.086392Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:03.110864Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:03.156317Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:03.445312Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:24:03.513514Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH18 [GOOD] Test command err: Trying to start YDB, gRPC: 20028, MsgBus: 12560 2025-03-28T09:22:22.055219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788317783632655:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:22.055626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001780/r3tmp/tmp86umhZ/pdisk_1.dat 2025-03-28T09:22:22.857206Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:22.870293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:22.870386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:22.891285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20028, node 1 2025-03-28T09:22:23.103266Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:23.103283Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:23.103290Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:23.103385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12560 TClient is connected to server localhost:12560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:24.209818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:24.231594Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:26.572459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788334963502375:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:26.572555Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:26.572835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788334963502387:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:26.577247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:26.590088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788334963502389:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:26.674866Z node 1 :TX_PROXY ERROR: Actor# [1:7486788334963502440:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:27.049171Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788317783632655:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:27.049257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:27.202414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.544441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:27.544634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:27.544880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:27.545007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:27.545107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:27.545234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:27.545349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:27.545477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:27.545583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:27.545676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:27.545780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:27.545870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788339258469962:2352];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:27.545941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:27.545975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:27.546136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:27.546249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:27.546334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:27.546428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:27.546524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:27.546618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:27.546925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:27.547051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:27.547179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:27.547271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788339258470018:2353];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:27.578546Z node 1 :T ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.725995Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.731618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.737883Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.742196Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.745526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.749014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.752605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.757236Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.762014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.767475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.772504Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.777539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.783106Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.783809Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.789278Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.789500Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.794654Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.799235Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.799378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.802746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.804753Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.807984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.810109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.811999Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.815966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.817422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.821506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.823841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.826742Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.832110Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.837457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.841277Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.843180Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.848958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.849408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.854789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.855113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.860818Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.860912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.865865Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.866003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.874411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.878894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.879884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:50.884841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.046189Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe1027ka0n82esszcksea6n", SessionId: ydb://session/3?node_id=1&id=MTE0M2Y2OGUtZWEwOWY4M2EtNDkxZmM1MDktYTMxMTY3Yw==, Slow query, duration: 37.778662s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:51.339351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:51.339826Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:51.340595Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788618431393720:9954];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039392; 2025-03-28T09:23:51.340989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] >> TGRpcYdbTest::GetOperationBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithUuid ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH3 [GOOD] Test command err: Trying to start YDB, gRPC: 2237, MsgBus: 3094 2025-03-28T09:22:24.668303Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788323969843421:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:24.668541Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00177d/r3tmp/tmpIlLir1/pdisk_1.dat 2025-03-28T09:22:25.347781Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:25.349682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:25.349757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:25.353471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2237, node 1 2025-03-28T09:22:25.764257Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:25.764286Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:25.764293Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:25.764381Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3094 TClient is connected to server localhost:3094 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:26.585423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:29.227614Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788345444680434:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:29.227730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:29.234816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788345444680446:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:29.242480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:29.258899Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:22:29.259212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788345444680448:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:29.339277Z node 1 :TX_PROXY ERROR: Actor# [1:7486788345444680499:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:29.666098Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788323969843421:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:29.666152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:29.684147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:30.009353Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:30.009530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:30.009757Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:30.009865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:30.009965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:30.010067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:30.010150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:30.010308Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:30.010416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:30.010522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:30.010627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:30.012599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:30.012684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:30.012846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:30.012937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:30.013022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:30.013105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:30.013205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:30.013327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:30.013421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:30.013517Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:30.013606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:30.013710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788345444680721:2355];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:30.014780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788345444680713:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:30.070667Z node 1 :TX_COL ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.505090Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.510251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.520565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.523884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.530191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.535485Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.541012Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.543481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039227;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.553333Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.559891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.566113Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.567849Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.572589Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.579833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.585091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.590990Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.598083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.598328Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.609212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.611794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.619489Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.625364Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.633287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.635302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.640252Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.647366Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.654284Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039317;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.661430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.666657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.669015Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.681323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.683424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.690321Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.697053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.705052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.708109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.716122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.721580Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.732159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.741748Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.743208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.754496Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.761045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.774542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.779694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.976795Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe104ybdxy3wv19sg4800dm", SessionId: ydb://session/3?node_id=1&id=OTliZDVhZTctOWEwY2IyN2ItZjE1M2MxNzMtNjg2ZjRjNTg=, Slow query, duration: 35.932423s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:52.381925Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:52.382422Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:52.383553Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788680452191078:12029];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-28T09:23:52.383955Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::DisableBuiltinAuthMechanism [GOOD] Test command err: 2025-03-28T09:23:43.632326Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788664196488347:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:43.632390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001460/r3tmp/tmpgH3hta/pdisk_1.dat 2025-03-28T09:23:44.421480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:44.421554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:44.447939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:44.507357Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2271, node 1 2025-03-28T09:23:44.737914Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:44.737958Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:44.737987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:44.803310Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:44.803338Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:44.803350Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:44.803475Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23088 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:45.221589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:49.523245Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788690816288199:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:49.585585Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001460/r3tmp/tmpcWBaaq/pdisk_1.dat 2025-03-28T09:23:49.968758Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:49.994455Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:49.994525Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:50.013662Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4017, node 4 2025-03-28T09:23:50.215251Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:50.215270Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:50.215277Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:50.215392Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:50.499828Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:54.812349Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788712559592663:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:54.816633Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001460/r3tmp/tmpEBRgdf/pdisk_1.dat 2025-03-28T09:23:55.201951Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:55.236089Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:55.236168Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:55.251778Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23306, node 7 2025-03-28T09:23:55.535349Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:55.535372Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:55.535379Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:55.535507Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:55.808082Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:59.868121Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788733605778605:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:59.868192Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001460/r3tmp/tmp6WKWWu/pdisk_1.dat 2025-03-28T09:24:00.471096Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13000, node 10 2025-03-28T09:24:00.681893Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:00.681914Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:00.681928Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:00.682057Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:24:00.811379Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:00.811478Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:00.816573Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:01.053658Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:05.278917Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788757271064619:2179];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:05.394304Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001460/r3tmp/tmp5actOz/pdisk_1.dat 2025-03-28T09:24:05.614502Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:05.641999Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:05.642083Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:05.644757Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4027, node 13 2025-03-28T09:24:05.887470Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:05.887497Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:05.887505Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:05.887635Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3750 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:06.365132Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:06.386304Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> TColumnShardTestSchema::RebootExternalTTL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS61-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 63069, MsgBus: 22633 2025-03-28T09:22:45.283958Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788414380606072:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:45.284415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001743/r3tmp/tmpzRzpyw/pdisk_1.dat 2025-03-28T09:22:45.993282Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:45.993765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:45.993847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:45.998563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63069, node 1 2025-03-28T09:22:46.376155Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:46.376172Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:46.376180Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:46.376290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22633 TClient is connected to server localhost:22633 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:47.062328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:49.691350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788431560475786:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.691490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.692191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788431560475798:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:49.697078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:49.716754Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:22:49.718974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788431560475800:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:49.779678Z node 1 :TX_PROXY ERROR: Actor# [1:7486788431560475851:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:50.261926Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788414380606072:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:50.262482Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:50.266722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.457717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.538059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.589029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.644031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.917106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:22:50.956559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:22:51.003004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:22:51.036486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:22:51.088207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:22:51.157007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:22:51.220986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:22:51.262272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.144431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:2, at schemeshard: 72057594046644480 2025-03-28T09:22:52.191785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.232193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.294330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.332830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.381987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.419963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.471305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.520824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.610597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.651284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.713481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.757717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.806645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.839406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.907840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480 2025-03-28T09:22:52.961270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... 6710714; 2025-03-28T09:23:30.521885Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038579;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.527512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038660;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.532875Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038595;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.535998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038628;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.537540Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038511;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.541091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038636;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.546401Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038659;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.552541Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038444;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.554887Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038577;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.557222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038599;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.561306Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038630;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.563524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038638;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.568761Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038631;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.569794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038571;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.575947Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038634;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.576928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038661;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.581799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038626;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.582490Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038655;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.587891Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038650;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.592536Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038622;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.593430Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038654;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.598841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038644;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.600280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038653;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.605755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038623;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.611800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038647;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.616188Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038657;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.617898Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038627;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.622202Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038651;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.623827Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038641;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.627932Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038648;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.629581Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038637;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.666052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038649;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.666052Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038652;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.672363Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038640;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.672982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038625;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.678507Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038646;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.678882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038624;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:30.801796Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0zg9k2x25d3327492kh62", SessionId: ydb://session/3?node_id=1&id=NzNhNjAyYWEtOGZhN2RjZDUtNDc5MjBmZTctODNiNGQwMTc=, Slow query, duration: 35.901919s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:31.813160Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038331;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:31.813525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;self_id=[1:7486788577589393087:6405];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038629;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038170;receive=72075186224038331; 2025-03-28T09:23:31.815691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038170;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:31.816661Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038629;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:05.570959Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe114yh649mazqyecg3cfzn", SessionId: ydb://session/3?node_id=1&id=NzNhNjAyYWEtOGZhN2RjZDUtNDc5MjBmZTctODNiNGQwMTc=, Slow query, duration: 16.752616s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n-- start query 1 in stream 0 using template query61.tpl and seed 1930872976\nselect promotions,total,cast(promotions as float)/cast(total as float)*100\nfrom\n (select sum(ss_ext_sales_price) promotions\n from store_sales\n cross join store\n cross join promotion\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_promo_sk = p_promo_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y')\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) promotional_sales cross join\n (select sum(ss_ext_sales_price) total\n from store_sales\n cross join store\n cross join date_dim\n cross join customer\n cross join customer_address\n cross join item\n where ss_sold_date_sk = d_date_sk\n and ss_store_sk = s_store_sk\n and ss_customer_sk= c_customer_sk\n and ca_address_sk = c_current_addr_sk\n and ss_item_sk = i_item_sk\n and ca_gmt_offset = -6\n and i_category = 'Sports'\n and s_gmt_offset = -6\n and d_year = 2001\n and d_moy = 12) all_sales\norder by promotions, total\nlimit 100;\n", parameters: 0b ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionReconnectReattach [GOOD] Test command err: 2025-03-28T09:23:42.886897Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788660781883798:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:42.886988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001467/r3tmp/tmpLs80Ux/pdisk_1.dat 2025-03-28T09:23:43.735683Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:43.819398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:43.819503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:43.834557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8389, node 1 2025-03-28T09:23:44.167242Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:44.167272Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:44.167277Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:44.167376Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:44.844662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:45.114611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:48.603030Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788686317537458:2095];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:48.603081Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001467/r3tmp/tmpeuANxg/pdisk_1.dat 2025-03-28T09:23:49.044095Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:49.062127Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:49.062183Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:49.066069Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6540, node 4 2025-03-28T09:23:49.274775Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:49.274796Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:49.274802Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:49.274916Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:49.590347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:49.732393Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:54.063222Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788709660109754:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:54.067165Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001467/r3tmp/tmpHsRjoR/pdisk_1.dat 2025-03-28T09:23:54.473772Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:54.519112Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:54.519211Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:54.528892Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12824, node 7 2025-03-28T09:23:54.690275Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:54.690298Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:54.690312Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:54.690450Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:55.036108Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:55.057335Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:23:55.131522Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:59.660321Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788733263692791:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:59.660383Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001467/r3tmp/tmp1ybsuB/pdisk_1.dat 2025-03-28T09:24:00.087946Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:00.137504Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:00.137601Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:00.145896Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15164, node 10 2025-03-28T09:24:00.383965Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:00.383993Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:00.384003Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:00.384154Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21419 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:00.721347Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:00.747408Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:24:00.865154Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:05.801828Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788757993591304:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:05.801871Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001467/r3tmp/tmpRRuPYG/pdisk_1.dat 2025-03-28T09:24:06.281097Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:06.358228Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:06.358334Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:06.372372Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27541, node 13 2025-03-28T09:24:06.582109Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:06.582136Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:06.582145Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:06.582301Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:06.977664Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:07.080255Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> TObjectStorageListingTest::Split >> YdbYqlClient::ColumnFamiliesWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex >> TTableProfileTests::WrongTableProfile [GOOD] >> TYqlDateTimeTests::DateKey >> TGRpcNewClient::InMemoryTables [GOOD] |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] >> YdbYqlClient::TestReadTableOneBatch >> TGRpcYdbTest::MakeListRemoveDirectory [GOOD] >> TGRpcYdbTest::ReadTable >> KqpJoinOrder::CanonizedJoinOrderTPCH11 [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] |87.0%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.0%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::TestYqlIssues [GOOD] >> YdbYqlClient::TestYqlSessionClosed >> YdbTableBulkUpsert::Simple >> YdbYqlClient::ConnectDbAclIsStrictlyChecked >> YdbYqlClient::TestReadTableMultiShardWholeTableUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribe >> YdbYqlClient::TestDecimal1 >> YdbYqlClient::TestColumnOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewClient::InMemoryTables [GOOD] Test command err: 2025-03-28T09:23:42.884108Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788659794833882:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:42.889149Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00146e/r3tmp/tmpNmywI1/pdisk_1.dat 2025-03-28T09:23:43.733300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:43.733395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:43.751789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:43.802490Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10345, node 1 2025-03-28T09:23:43.872987Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:43.873010Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:43.943866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:44.163757Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:44.163776Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:44.163787Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:44.163939Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:44.974139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:48.994835Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788685352869112:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:49.006965Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00146e/r3tmp/tmpZ8nElu/pdisk_1.dat 2025-03-28T09:23:49.438959Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:49.495121Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:49.495216Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:49.499911Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27386, node 4 2025-03-28T09:23:49.668514Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:49.668530Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:49.668535Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:49.668626Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:49.928862Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:52.522008Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:52.767531Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788702532739504:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:52.767621Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:52.770751Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788702532739516:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:52.774068Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:23:52.802933Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788702532739518:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:23:52.895711Z node 4 :TX_PROXY ERROR: Actor# [4:7486788702532739589:2790] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:55.535676Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788716156715410:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:55.535797Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00146e/r3tmp/tmpKlXcnu/pdisk_1.dat 2025-03-28T09:23:55.811981Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:55.833160Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:55.833234Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:55.837610Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15070, node 7 2025-03-28T09:23:56.122429Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:56.122458Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:56.122467Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:56.122605Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26678 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:56.414361Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:01.113853Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788740167484836:2096];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:01.114090Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00146e/r3tmp/tmplTUB8o/pdisk_1.dat 2025-03-28T09:24:01.495184Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:01.548672Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:01.548757Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:01.556115Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14806, node 10 2025-03-28T09:24:01.843377Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:01.843400Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:01.843408Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:01.843582Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:02.235322Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:02.270894Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:24:04.843384Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:04.966548Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:24:05.038829Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788757347355324:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:05.038918Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788757347355335:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:05.038965Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:05.043705Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-28T09:24:05.101120Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788757347355338:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-28T09:24:05.192974Z node 10 :TX_PROXY ERROR: Actor# [10:7486788757347355411:2878] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:05.319241Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe11ms95zsrpqx21tz3grax, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MjNkMzA2YzYtM2FiYmFiNTUtMzBiNTJkNzctZmYwZDE1NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:05.447892Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-03-28T09:24:07.438936Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788765815257881:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:07.439007Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00146e/r3tmp/tmpahUHJe/pdisk_1.dat 2025-03-28T09:24:07.785204Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:07.818100Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:07.818199Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:07.833115Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2227, node 13 2025-03-28T09:24:08.087488Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:08.087518Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:08.087529Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:08.087682Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:08.496955Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:11.293904Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:11.454988Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:11.531172Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 >> GrpcConnectionStringParserTest::NoDatabaseFlag [GOOD] >> GrpcConnectionStringParserTest::IncorrectConnectionString ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-03-28T09:23:46.995792Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788678140062529:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:46.995844Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001579/r3tmp/tmpQLx6ZX/pdisk_1.dat 2025-03-28T09:23:47.875289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:47.875415Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:47.887610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:47.892126Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:48.042901Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 2702, node 1 2025-03-28T09:23:48.523747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:48.523806Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:48.523814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:48.523966Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:23:49.347999Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:23:49.354905Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:23:49.358790Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:23:49.359777Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:24926, port: 24926 2025-03-28T09:23:49.375479Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:23:49.415245Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:23:49.465666Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:23:49.509343Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****d36Q (88507BC8) () has now valid token of ldapuser@ldap 2025-03-28T09:23:51.662752Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788698281806175:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:51.699274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001579/r3tmp/tmprpgCbx/pdisk_1.dat 2025-03-28T09:23:51.941694Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:51.964689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:51.964777Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:51.966571Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17937, node 2 2025-03-28T09:23:52.207275Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:52.207299Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:52.207305Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:52.207403Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:23:52.714836Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:23:52.721967Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:23:52.721995Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:23:52.722717Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10713, port: 10713 2025-03-28T09:23:52.722781Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:23:52.729212Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:23:52.770971Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:23:52.775283Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:23:52.775371Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:23:52.819078Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:23:52.867026Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:23:52.868334Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****6yzg (8D7B7FA4) () has now valid token of ldapuser@ldap 2025-03-28T09:23:56.098971Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486788718075211555:2219];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:56.160328Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001579/r3tmp/tmpcEDUyf/pdisk_1.dat 2025-03-28T09:23:56.322604Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:56.342404Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:56.342508Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:56.352545Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5442, node 3 2025-03-28T09:23:56.531343Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:56.531367Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:56.531375Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:56.531512Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:23:56.666818Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:23:56.669504Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:23:56.669539Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:23:56.670256Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:14376, port: 14376 2025-03-28T09:23:56.670335Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:23:56.673857Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:23:56.719305Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****cPbg (D8C11284) () has now valid token of ldapuser@ldap 2025-03-28T09:24:00.444834Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788735336858829:2215];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001579/r3tmp/tmpWTib9t/pdisk_1.dat 2025-03-28T09:24:00.530617Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:00.601357Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:00.613529Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:00.613613Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:00.617065Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64497, node 4 2025-03-28T09:24:00.775266Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:00.775288Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:00.775295Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:00.775428Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:24:00.920082Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:24:00.922936Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:24:00.922962Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:24:00.923698Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://qqq:27483 ldap://localhost:27483 ldap://localhost:11111, port: 27483 2025-03-28T09:24:00.923798Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:24:00.955312Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:24:00.998986Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:24:00.999449Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:24:00.999510Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:24:01.049227Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:24:01.095081Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:24:01.095912Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****Ehxw (2C7FE0BC) () has now valid token of ldapuser@ldap 2025-03-28T09:24:04.850294Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486788754585347185:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:04.851781Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001579/r3tmp/tmpqLrJF5/pdisk_1.dat 2025-03-28T09:24:05.024808Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:05.026238Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:05.026305Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:05.028927Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61307, node 5 2025-03-28T09:24:05.159383Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:05.159416Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:05.159424Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:05.159582Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:24:05.386930Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:24:05.387273Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:24:05.387291Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:24:05.388095Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:23267, port: 23267 2025-03-28T09:24:05.388160Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:24:05.402896Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-28T09:24:05.450892Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:24:05.451688Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:24:05.451733Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-28T09:24:05.495010Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-28T09:24:05.543017Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-28T09:24:05.544070Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****CJhQ (A1C1A1B5) () has now valid token of ldapuser@ldap test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001579/r3tmp/tmpmngq8N/pdisk_1.dat 2025-03-28T09:24:09.059502Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486788771971677159:2236];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:09.059886Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:09.201979Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:09.227707Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:09.227801Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:09.235965Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1896, node 6 2025-03-28T09:24:09.415322Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:09.415348Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:09.415356Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:09.415522Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:24:09.570424Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:24:09.573745Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:24:09.573774Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:24:09.574486Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:11142, port: 11142 2025-03-28T09:24:09.574578Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:24:09.581892Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-03-28T09:24:09.581970Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:11142. Bad search filter 2025-03-28T09:24:09.582199Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****yf2g (2690822E) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:11142. Bad search filter)' >> GrpcConnectionStringParserTest::IncorrectConnectionString [GOOD] >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::DeleteFromAfterCreate [GOOD] Test command err: 2025-03-28T09:23:43.121449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788665897134519:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:43.121518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001482/r3tmp/tmpO7U5py/pdisk_1.dat 2025-03-28T09:23:43.950897Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:43.974640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:43.974731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:43.986900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27476, node 1 2025-03-28T09:23:44.291350Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:44.291372Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:44.291379Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:44.291476Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:44.887513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:45.131407Z node 1 :TX_PROXY ERROR: Actor# [1:7486788674487070049:2603] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-28T09:23:49.187072Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788687772486521:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:49.187133Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001482/r3tmp/tmpw74b8c/pdisk_1.dat 2025-03-28T09:23:49.531868Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:49.583450Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:49.583553Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:49.586512Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32565, node 4 2025-03-28T09:23:49.659032Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:49.659057Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:49.659064Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:49.659188Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:50.069209Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:54.326910Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788710723970401:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:54.327043Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001482/r3tmp/tmpFEKJrd/pdisk_1.dat 2025-03-28T09:23:54.748400Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:54.811760Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:54.811853Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:54.823966Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21059, node 7 2025-03-28T09:23:55.107418Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:55.107440Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:55.107449Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:55.107602Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21003 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:55.440257Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:57.978580Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788723608873359:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:57.978674Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:58.355957Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:58.553616Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788727903840836:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:58.553708Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:58.554069Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788727903840841:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:58.558397Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:23:58.579680Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788727903840843:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:23:58.647140Z node 7 :TX_PROXY ERROR: Actor# [7:7486788727903840925:2813] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:59.005332Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe11et8adaqx2ahmk46q1kk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTk2MzVkMTctZTJiZDRlY2QtYTVhNDhmYWEtNWYwODJhMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:23:59.293020Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe11f5k0v0ackndjfxh95gw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTk2MzVkMTctZTJiZDRlY2QtYTVhNDhmYWEtNWYwODJhMTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId ... ccessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:01.258474Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788741292625588:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:01.258538Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001482/r3tmp/tmpOpy4yS/pdisk_1.dat 2025-03-28T09:24:01.619741Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:01.654155Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:01.654249Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:01.660000Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4767, node 10 2025-03-28T09:24:01.931542Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:01.931568Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:01.931577Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:01.931719Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10763 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:02.274008Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:05.207773Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788758472495843:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:05.207867Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:05.208417Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788758472495855:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:05.212679Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:05.249304Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788758472495857:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:05.349299Z node 10 :TX_PROXY ERROR: Actor# [10:7486788758472495937:2683] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:05.512083Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7486788758472495969:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[Root/NotFound]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:24:05.512389Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=OWViNjY0OTUtMTAxMDAwYWEtOWE2Y2Y3NmMtMzA2ZDQzODY=, ActorId: [10:7486788758472495816:2331], ActorState: ExecuteState, TraceId: 01jqe11n6v839mgcgpzg7q5fs0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:24:07.572694Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788767156185327:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:07.572776Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001482/r3tmp/tmpBHTVJe/pdisk_1.dat 2025-03-28T09:24:07.895844Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:07.960660Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:07.960762Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:07.968206Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17322, node 13 2025-03-28T09:24:08.198787Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:08.198814Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:08.198823Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:08.198967Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17226 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:08.608024Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:08.678705Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:08.797549Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:11.685078Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788784336055816:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:11.685152Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788784336055808:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:11.685307Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:11.688960Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:24:11.716762Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486788784336055822:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:24:11.796279Z node 13 :TX_PROXY ERROR: Actor# [13:7486788784336055893:2901] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:11.910948Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe11v9314jycrwj9fcyvmyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzNhNzU0YmYtMjhhN2IwM2EtYzk0YjM4ZWUtNTZjMTNjYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:11.919009Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqe11v9314jycrwj9fcyvmyh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzNhNzU0YmYtMjhhN2IwM2EtYzk0YjM4ZWUtNTZjMTNjYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:12.056431Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqe11vgmf1natn9kf3bb012h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzNhNzU0YmYtMjhhN2IwM2EtYzk0YjM4ZWUtNTZjMTNjYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:12.061856Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe11vgmf1natn9kf3bb012h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YzNhNzU0YmYtMjhhN2IwM2EtYzk0YjM4ZWUtNTZjMTNjYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TTableProfileTests::UseDefaultProfile >> YdbIndexTable::AlterTableAddIndex [GOOD] >> YdbLogStore::AlterLogStore >> TGRpcYdbTest::BeginTxRequestError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_AuthNotRequired [GOOD] Test command err: 2025-03-28T09:23:45.649402Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788671152648162:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:45.649558Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145f/r3tmp/tmpBBB2bM/pdisk_1.dat 2025-03-28T09:23:46.475475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:46.475588Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:46.482186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:46.503941Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61376, node 1 2025-03-28T09:23:46.854434Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:46.854457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:46.854475Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:46.854565Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:47.454725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:47.632412Z node 1 :TX_PROXY ERROR: Actor# [1:7486788679742583597:2624] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-28T09:23:51.748807Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788697850393693:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:51.748849Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145f/r3tmp/tmpwAnsfn/pdisk_1.dat 2025-03-28T09:23:52.039792Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:52.083857Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:52.083935Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:52.100003Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23361, node 4 2025-03-28T09:23:52.323219Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:52.323240Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:52.323248Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:52.323375Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:52.560297Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:55.124261Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788715030263927:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:55.124386Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:55.124728Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788715030263939:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:55.129137Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:23:55.173398Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788715030263941:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:23:55.251639Z node 4 :TX_PROXY ERROR: Actor# [4:7486788715030264027:2667] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:57.802824Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788725795421442:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:57.806816Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145f/r3tmp/tmpG0SDwb/pdisk_1.dat 2025-03-28T09:23:58.112413Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:58.160722Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:58.160823Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:58.173634Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20426, node 7 2025-03-28T09:23:58.395551Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:58.395579Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:58.395592Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:58.395741Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:58.756388Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:01.156681Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788742975291664:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:01.156772Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:01.157169Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788742975291676:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:01.167399Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:01.206305Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788742975291678:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:01.271425Z node 7 : ... u don't have access permissions } 2025-03-28T09:24:07.086016Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:07.102847Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:07.102966Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:07.102976Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:07.103019Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:07.117099Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:07.117262Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:07.117273Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:07.117331Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:07.124907Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788766762614778:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:07.212117Z node 10 :TX_PROXY ERROR: Actor# [10:7486788766762614847:2799] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:07.409305Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe11ps8dtw3ndqhx9dnd1p7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OTE3M2Q4OTctOTYyNzFmNDgtMTI1Yzg4MDAtYWYyNjhiNWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:07.434705Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqe11q4acr7qqn7zvaynys9e, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:34262, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:07.435376Z node 10 :READ_TABLE_API NOTICE: [10:7486788766762614890:2352] Finish grpc stream, status: 400010 2025-03-28T09:24:07.438627Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqe11q4e5xf8y5r6ehth03k9, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:34262, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:07.457264Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614891:2353] Adding quota request to queue ShardId: 0, TxId: 281474976710662 2025-03-28T09:24:07.457346Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614891:2353] Assign stream quota to Shard 0, Quota 5, TxId 281474976710662 Reserved: 5 of 25, Queued: 0 2025-03-28T09:24:07.458822Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614891:2353] got stream part, size: 246, RU required: 128 rate limiter absent 2025-03-28T09:24:07.459202Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614891:2353] Starting inactivity timer for 600.000000s with tag 3 2025-03-28T09:24:07.568329Z node 10 :READ_TABLE_API NOTICE: [10:7486788766762614891:2353] Finish grpc stream, status: 400000 2025-03-28T09:24:07.594282Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqe11q9ae9rpqw0vcqqphxvf, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:34262, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:07.626844Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614920:2356] Adding quota request to queue ShardId: 0, TxId: 281474976710664 2025-03-28T09:24:07.626887Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614920:2356] Assign stream quota to Shard 0, Quota 5, TxId 281474976710664 Reserved: 5 of 25, Queued: 0 2025-03-28T09:24:07.627644Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614920:2356] got stream part, size: 84, RU required: 128 rate limiter absent 2025-03-28T09:24:07.627983Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614920:2356] Starting inactivity timer for 600.000000s with tag 3 2025-03-28T09:24:07.640514Z node 10 :READ_TABLE_API NOTICE: [10:7486788766762614920:2356] Finish grpc stream, status: 400000 2025-03-28T09:24:07.645052Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqe11qawexwksp0xpw7070w7, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:34262, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:07.657424Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614940:2358] Adding quota request to queue ShardId: 0, TxId: 281474976710666 2025-03-28T09:24:07.657474Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614940:2358] Assign stream quota to Shard 0, Quota 5, TxId 281474976710666 Reserved: 5 of 25, Queued: 0 2025-03-28T09:24:07.659125Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614940:2358] got stream part, size: 210, RU required: 128 rate limiter absent 2025-03-28T09:24:07.659466Z node 10 :READ_TABLE_API DEBUG: [10:7486788766762614940:2358] Starting inactivity timer for 600.000000s with tag 3 2025-03-28T09:24:07.661752Z node 10 :READ_TABLE_API NOTICE: [10:7486788766762614940:2358] Finish grpc stream, status: 400000 2025-03-28T09:24:07.664794Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f2480] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.665049Z node 10 :GRPC_SERVER DEBUG: [0x51a00000a280] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.665231Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f3680] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.665406Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ccc80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.665597Z node 10 :GRPC_SERVER DEBUG: [0x51a0000c1880] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.665790Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e7c80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.665958Z node 10 :GRPC_SERVER DEBUG: [0x51a00004f880] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.666129Z node 10 :GRPC_SERVER DEBUG: [0x51a00006fc80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.666322Z node 10 :GRPC_SERVER DEBUG: [0x51a000070880] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.666505Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e2e80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.666670Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e3a80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.666859Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e7680] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.667052Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f4e80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.667238Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f4880] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.667493Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e7080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.667668Z node 10 :GRPC_SERVER DEBUG: [0x51a0000e4c80] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-28T09:24:07.667834Z node 10 :GRPC_SERVER DEBUG: [0x51a0000cf680] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:09.790387Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788774098463778:2272];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:09.790456Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145f/r3tmp/tmpQPSE2f/pdisk_1.dat 2025-03-28T09:24:10.143434Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:10.178784Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:10.178893Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:10.184217Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14063, node 13 2025-03-28T09:24:10.324609Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:10.324635Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:10.324644Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:10.324788Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:10.647468Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:10.766922Z node 13 :TICKET_PARSER DEBUG: Ticket A393A64A69A34B7F169691B2532DA631A84508E2F03FD1798EE5C37162B3FEAD (ipv6:[::1]:44420) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-28T09:24:11.007331Z node 13 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token 2025-03-28T09:24:11.121818Z node 13 :TICKET_PARSER DEBUG: Ticket 2A2D5840BBC56E5D48CC7DB54A2F563752370763D7B739996A770FBF61825799 (ipv6:[::1]:44458) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-28T09:24:11.122555Z node 13 :TICKET_PARSER ERROR: Ticket 2A2D5840BBC56E5D48CC7DB54A2F563752370763D7B739996A770FBF61825799: Cannot create token from certificate. Client certificate failed verification ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH11 [GOOD] Test command err: Trying to start YDB, gRPC: 4688, MsgBus: 21171 2025-03-28T09:22:21.911439Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788311960062346:2230];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001787/r3tmp/tmpHej9Jf/pdisk_1.dat 2025-03-28T09:22:22.340487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:22:22.659220Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:22.668336Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:22.668420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:22.672547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4688, node 1 2025-03-28T09:22:22.871192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:22.871212Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:22.871219Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:22.871320Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21171 TClient is connected to server localhost:21171 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:23.853312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:26.300597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788333434899294:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:26.300701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:26.306840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788333434899306:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:26.311062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:26.324647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788333434899308:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:26.406912Z node 1 :TX_PROXY ERROR: Actor# [1:7486788333434899361:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:26.908535Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788311960062346:2230];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:26.908602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:27.117651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:27.565432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:27.566555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:27.566822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:27.568907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:27.568954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:27.570005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:27.570069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:27.570110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:27.570214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:27.570218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:27.570371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:27.570509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:27.570613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:27.570633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:27.570808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:27.570810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:27.570970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:27.571015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:27.571091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:27.571163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:27.571233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:27.571274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:27.571346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788337729866927:2354];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:27.571378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788337729866920:2350];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:27.617202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788337729866949:2365];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.762831Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.766426Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.768391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.772049Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039219;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.774105Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.777131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.778928Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.783683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.792168Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.792452Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039342;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.797390Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039282;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.801847Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.806987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.812424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.812858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.817825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.821759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.823271Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.827745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039324;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.828396Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.833940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039306;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.841623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.844526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.897368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.914874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.919730Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.920435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.925554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.927460Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.930657Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.933069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.947008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.947011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.953103Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.954215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.960585Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.965927Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.989225Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.994288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:51.999768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:52.006170Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039299;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:52.007889Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:52.019565Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039289;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:52.024144Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:52.026381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:52.146084Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe1033masfr5cy5mteajmgc", SessionId: ydb://session/3?node_id=1&id=NTEyNDhkODQtNGE5MDRmY2ItODdjYzFkNmUtNzI1MDJkMDU=, Slow query, duration: 37.980744s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:52.418799Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:52.419038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:52.419303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;self_id=[1:7486788604017885156:9237];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039094;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224038933; 2025-03-28T09:23:52.419636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts >> TGRpcYdbTest::ExecuteQueryWithUuid [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest >> TGRpcNewCoordinationClient::CreateDropDescribe >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> TGRpcNewClient::YqlQueryWithParams >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::BeginTxRequestError [GOOD] Test command err: 2025-03-28T09:23:47.311422Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788681801644936:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:47.311468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145e/r3tmp/tmpTZkxzA/pdisk_1.dat 2025-03-28T09:23:48.166024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:48.168327Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:48.168431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:48.185544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26342, node 1 2025-03-28T09:23:48.602720Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:48.602758Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:48.602770Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:48.602919Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:49.114787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:49.381197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:53.947127Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788708610477030:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:53.947171Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145e/r3tmp/tmpl5xj5b/pdisk_1.dat 2025-03-28T09:23:54.248460Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:54.275417Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:54.275490Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:54.287664Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24652, node 4 2025-03-28T09:23:54.395574Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:54.395597Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:54.395605Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:54.395691Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:54.664136Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:54.892650Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:54.995289Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:23:55.097708Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:55.164771Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:23:55.252054Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:55.752902Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:55.838569Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:55.901801Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:59.719670Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788731150462975:2159];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:59.737810Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145e/r3tmp/tmp1BSzlv/pdisk_1.dat 2025-03-28T09:23:59.973030Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:00.002840Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:00.002926Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:00.006119Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17105, node 7 2025-03-28T09:24:00.082352Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:00.082398Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:00.082413Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:00.082522Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:00.368317Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:00.509694Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:05.112214Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788758006102581:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:05.112751Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145e/r3tmp/tmp15U6xr/pdisk_1.dat 2025-03-28T09:24:05.354296Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:05.431332Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:05.431405Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 12012, node 10 2025-03-28T09:24:05.451004Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:05.607539Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:05.607561Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:05.607571Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:05.607701Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:05.904654Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:05.995786Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:06.145290Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:06.539467Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037888 not found 2025-03-28T09:24:10.652776Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788781908585174:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:10.653209Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145e/r3tmp/tmpFtnSSV/pdisk_1.dat 2025-03-28T09:24:10.932680Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:10.979325Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:10.979449Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:10.989132Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5708, node 13 2025-03-28T09:24:11.055665Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:11.055691Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:11.055701Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:11.055858Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:11.372342Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:14.412319Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788799088455399:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:14.412372Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788799088455391:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:14.412679Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:14.423525Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:14.467047Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486788799088455405:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:14.552998Z node 13 :TX_PROXY ERROR: Actor# [13:7486788799088455477:2675] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:14.555803Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NzE5OWY1NC04NzU2M2VmMy03MTQ3NmQ1MC04ODdmYzAz, ActorId: [13:7486788799088455380:2332], ActorState: ExecuteState, TraceId: 01jqe11xy9ab6wd49wfxnhg9qs, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-03-28T09:24:14.559528Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NzE5OWY1NC04NzU2M2VmMy03MTQ3NmQ1MC04ODdmYzAz, ActorId: [13:7486788799088455380:2332], ActorState: ExecuteState, TraceId: 01jqe11y2yddppzhbkkfzmbmx1, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: 2025-03-28T09:24:14.561799Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NzE5OWY1NC04NzU2M2VmMy03MTQ3NmQ1MC04ODdmYzAz, ActorId: [13:7486788799088455380:2332], ActorState: ExecuteState, TraceId: 01jqe11y312kqgfsqtmfwhxw92, ReplyQueryCompileError, status NOT_FOUND remove tx with tx_id: >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins [GOOD] >> YdbS3Internal::TestS3Listing >> YdbYqlClient::TestYqlSessionClosed [GOOD] >> YdbYqlClient::TestYqlLongSessionPrepareError >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK >> YdbYqlClient::ConnectDbAclIsStrictlyChecked [GOOD] >> YdbYqlClient::CopyTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143154388.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143154388.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154388.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123154388.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153188.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123153188.000000s;Name=;Codec=}; 2025-03-28T09:23:10.715370Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:23:10.884496Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:23:10.920084Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:23:10.920399Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:23:10.932830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:10.933086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:10.933391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:10.933518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:10.933629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:10.933738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:10.933853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:10.933990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:10.934122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:10.934235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:10.934383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:10.934507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:10.998125Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:23:10.998460Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:23:10.998542Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:23:10.998763Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:23:10.998953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:23:10.999043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:23:10.999086Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:23:10.999179Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:23:10.999239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:23:10.999280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:23:10.999311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:23:10.999468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:23:10.999530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:23:10.999576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:23:10.999609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:23:10.999700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:23:10.999767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:23:10.999811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:23:10.999841Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:23:10.999915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:23:10.999957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:23:10.999989Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:23:11.000046Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:23:11.000088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:23:11.000128Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:23:11.000536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-03-28T09:23:11.000619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-03-28T09:23:11.000695Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-03-28T09:23:11.000805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=52; 2025-03-28T09:23:11.000990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:23:11.001047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:23:11.001082Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:23:11.001310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:23:11.001362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:23:11.001394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:23:11.001541Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:23:11.001582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:23:11.001616Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:23:11.001811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... .cpp:29;PRECHARGE:finishLoadingTime=13; 2025-03-28T09:24:17.747738Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=378; 2025-03-28T09:24:17.747781Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=31150; 2025-03-28T09:24:17.754558Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6686; 2025-03-28T09:24:17.763266Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5896; 2025-03-28T09:24:17.763398Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6947; 2025-03-28T09:24:17.763554Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=91; 2025-03-28T09:24:17.763659Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=61; 2025-03-28T09:24:17.763776Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=75; 2025-03-28T09:24:17.763880Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=60; 2025-03-28T09:24:17.771889Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7941; 2025-03-28T09:24:17.783094Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11073; 2025-03-28T09:24:17.783246Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=37; 2025-03-28T09:24:17.783325Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=29; 2025-03-28T09:24:17.783385Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-03-28T09:24:17.783433Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-03-28T09:24:17.783480Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-28T09:24:17.783564Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2025-03-28T09:24:17.783611Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-28T09:24:17.783698Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=51; 2025-03-28T09:24:17.783745Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-03-28T09:24:17.783813Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2025-03-28T09:24:17.783921Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=62; 2025-03-28T09:24:17.784170Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=208; 2025-03-28T09:24:17.784208Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=76242; 2025-03-28T09:24:17.784357Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:24:17.784462Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:24:17.784516Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:24:17.784577Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:24:17.794478Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:24:17.794927Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:24:17.794997Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:24:17.795075Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-28T09:24:17.795143Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:24:17.795190Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:24:17.795241Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:24:17.795286Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:24:17.795399Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:24:17.796226Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:24:17.796322Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1991:3891];tablet_id=9437184;parent=[1:1953:3860];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-03-28T09:24:17.797361Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:24:17.797502Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:24:17.797533Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:24:17.797558Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:24:17.797601Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:24:17.797662Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:24:17.797719Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-28T09:24:17.797782Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:24:17.797827Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:24:17.797875Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:24:17.797916Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:24:17.798005Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:24:17.806850Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=11;path_id=1; 2025-03-28T09:24:17.808233Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> TGRpcYdbTest::ReadTable [GOOD] >> TGRpcYdbTest::OperationTimeout >> GrpcConnectionStringParserTest::CommonClientSettingsFromConnectionString [GOOD] >> LocalityOperation::LocksFromAnotherTenants+UseSink >> TGRpcClientLowTest::GrpcRequestProxy >> YdbYqlClient::TestReadTableOneBatch [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder >> YdbYqlClient::TestReadTableMultiShardWithDescribe [GOOD] >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit >> YdbYqlClient::TestDecimal1 [GOOD] >> YdbYqlClient::TestDecimal35 >> YdbYqlClient::ColumnFamiliesDescriptionWithStorageAndIndex [GOOD] >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile >> YdbLogStore::AlterLogStore [GOOD] >> YdbLogStore::AlterLogTable >> TObjectStorageListingTest::SuffixColumns [GOOD] >> YdbTableBulkUpsert::Simple [GOOD] >> YdbTableBulkUpsert::SyncIndexShouldSucceed >> YdbYqlClient::TestColumnOrder [GOOD] >> YdbYqlClient::TestDecimal >> TYqlDateTimeTests::DateKey [GOOD] >> TYqlDateTimeTests::DatetimeKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationReuseShuffleTwoJoins [GOOD] Test command err: Trying to start YDB, gRPC: 14450, MsgBus: 28723 2025-03-28T09:22:44.231202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788412569549487:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:44.231668Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001744/r3tmp/tmpcpC9Yd/pdisk_1.dat 2025-03-28T09:22:44.935627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:44.935744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:44.940533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:44.956124Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14450, node 1 2025-03-28T09:22:45.304727Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:45.304746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:45.304753Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:45.304866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28723 TClient is connected to server localhost:28723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:46.144274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:48.388389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788429749419129:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.388532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.388928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788429749419141:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.393479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:48.407288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788429749419143:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:48.487063Z node 1 :TX_PROXY ERROR: Actor# [1:7486788429749419194:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:48.961846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.234966Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788412569549487:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:49.235021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:49.290920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:49.291167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:49.291378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:49.291480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:49.291602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:49.291711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:49.291799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:49.291921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:49.292038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:49.292145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:49.292241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:49.292337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788434044386701:2350];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:49.324123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:49.324186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:49.324378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:49.324482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:49.324578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:49.324675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:49.324784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:49.324899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:49.324999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:49.325091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:49.325195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:49.325302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788434044386720:2359];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:49.348959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037901;self_id=[1:7486788434044386710:2354];tablet_id=72075186224037901;process=TTxInitSchema::Execute;fline=abstract.c ... tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.734063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.741539Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.744214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.748345Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.750835Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.755662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.757896Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039354;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.763190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.766246Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.769909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.773298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.777258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.784525Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.788247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039376;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.796962Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.801143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.806909Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.812343Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.813855Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.820677Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.829652Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.839716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.846871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.853822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.857953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.864604Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.868325Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.878143Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.878542Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.885618Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.893068Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.902048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.906442Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.913251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.919976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.926624Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.929846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.933416Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.936334Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.940195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.943230Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.945940Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.948599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.953424Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.966142Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.095373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.144331Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe10p0q9xgxf5h294e9sk10", SessionId: ydb://session/3?node_id=1&id=ZDljMjk4OTAtNGExYzY4N2YtZTZmMTk1Y2MtMWFkYTZkMTM=, Slow query, duration: 33.615939s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:07.422014Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:07.422026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:07.422673Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> YdbYqlClient::TestTzTypesFullStack >> KqpJoinOrder::TPCH12_100 [GOOD] >> TGRpcNewCoordinationClient::CreateDropDescribe [GOOD] >> TGRpcNewCoordinationClient::NodeNotFound >> TGRpcYdbTest::ExecuteQueryWithParametersBadRequest [GOOD] >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2025-03-28T09:24:12.679523Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788787034157884:2192];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:12.679566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001efd/r3tmp/tmpr95UWn/pdisk_1.dat 2025-03-28T09:24:13.235456Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:13.239316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:13.239438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:13.241534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26687, node 1 2025-03-28T09:24:13.603343Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:13.603376Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:13.603386Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:13.603500Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11616 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:24:14.389508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:24:14.445213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153854638 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) waiting... TClient::Ls request: /dc-1/Dir/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153854638 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "Hash" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Path" ... (TRUNCATED) 2025-03-28T09:24:16.985566Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788803768499643:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:17.071068Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001efd/r3tmp/tmp15Cr7y/pdisk_1.dat 2025-03-28T09:24:17.173339Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:17.188003Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:17.188073Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:17.189274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20379, node 2 2025-03-28T09:24:17.331298Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:17.331322Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:17.331329Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:17.331446Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63776 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:24:17.767820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:17.774108Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:17.785300Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:24:17.792086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:18.369027Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7486788812358435411:2484], Recipient [2:7486788808063467442:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2025-03-28T09:24:18.369064Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-03-28T09:24:18.369257Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:24:18.369436Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2025-03-28T09:24:18.369473Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2025-03-28T09:24:18.369499Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2025-03-28T09:24:18.369524Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2025-03-28T09:24:18.369548Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2025-03-28T09:24:18.369605Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2025-03-28T09:24:18.386544Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553163, Sender [2:7486788812358435415:2485], Recipient [2:7486788808063467442:2311]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 3 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2025-03-28T09:24:18.386583Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2025-03-28T09:24:18.386737Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:24:18.386954Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2025-03-28T09:24:18.386994Z node 2 :TX_DATASHARD TRACE: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2025-03-28T09:24:18.387055Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 >> TGRpcNewClient::YqlQueryWithParams [GOOD] >> TGRpcNewClient::YqlExplainDataQuery >> TTableProfileTests::UseDefaultProfile [GOOD] >> TTableProfileTests::OverwriteCompactionPolicy >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts >> YdbYqlClient::TestYqlLongSessionPrepareError [GOOD] >> YdbYqlClient::TestYqlLongSessionMultipleErrors >> YdbYqlClient::SecurityTokenAuthMultiTenantSDK [GOOD] >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCH12_100 [GOOD] Test command err: Trying to start YDB, gRPC: 5591, MsgBus: 9710 2025-03-28T09:22:42.635192Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788401558983488:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:42.688657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001758/r3tmp/tmpEIaTAH/pdisk_1.dat 2025-03-28T09:22:43.393498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:43.393572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:43.400259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:43.463039Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5591, node 1 2025-03-28T09:22:43.715630Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:43.715655Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:43.715662Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:43.715760Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9710 TClient is connected to server localhost:9710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:44.807411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:44.847413Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:47.218740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788423033820511:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:47.219182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788423033820500:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:47.219252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:47.222534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:47.236009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788423033820514:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:47.326759Z node 1 :TX_PROXY ERROR: Actor# [1:7486788423033820565:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:47.618786Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788401558983488:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:47.618842Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:47.760336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:48.070235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:48.070473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:48.070844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:48.070986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:48.071090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:48.071185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:48.071284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:48.071380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:48.071500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:48.071634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:48.071751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:48.071861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788427328788162:2364];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:48.102485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:48.102549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:48.102751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:48.102870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:48.102969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:48.103058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:48.103145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:48.103241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:48.103361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:48.103458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:48.103570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:48.103687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788423033820846:2360];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:48.142321Z node 1 :TX_COL ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.646574Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.646744Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.651993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.652019Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.659821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.661309Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.666551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.670208Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.672080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.675912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.677738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.681929Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.683588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.687812Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.690554Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.694728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.696476Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.700921Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.701872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.706337Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.707745Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.711846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.713516Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.718407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.719755Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.724846Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.726307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.732222Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.732387Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.744473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.747586Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.751323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.754021Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.758163Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.761251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.765091Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.772003Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.775774Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.777904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.781688Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.783767Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.787965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.789408Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.794524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.795445Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:07.994315Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe10qbw8q6q84cbxbe9j57n", SessionId: ydb://session/3?node_id=1&id=NmNkZmI5Y2MtM2RiOTZmZWMtMzE0ZDNlOTgtNWJjMmUyNDY=, Slow query, duration: 33.085108s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:08.400214Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:08.401531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788727976554920:10941];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-28T09:24:08.401904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:08.403501Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash >> TGRpcYdbTest::OperationTimeout [GOOD] >> TGRpcYdbTest::OperationCancelAfter >> YdbYqlClient::TestDecimal35 [GOOD] >> YdbYqlClient::TestDecimalFullStack >> YdbS3Internal::TestS3Listing [GOOD] >> YdbS3Internal::TestAccessCheck >> YdbTableBulkUpsert::Nulls [GOOD] >> YdbTableBulkUpsert::NotNulls >> TGRpcClientLowTest::GrpcRequestProxy [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken >> YdbYqlClient::TestReadTableNotNullBorder [GOOD] >> YdbYqlClient::TestReadTableNotNullBorder2 >> YdbYqlClient::TestDecimal [GOOD] >> YdbYqlClient::TestBusySession >> YdbLogStore::AlterLogTable [FAIL] >> YdbTableBulkUpsert::SyncIndexShouldSucceed [GOOD] >> YdbTableBulkUpsert::Timeout >> YdbYqlClient::CopyTables [GOOD] >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] >> TYqlDateTimeTests::SimpleUpsertSelect >> TGRpcLdapAuthentication::LdapAuthWithInvalidRobouserPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter >> TGRpcNewCoordinationClient::NodeNotFound [GOOD] >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores >> TGRpcYdbTest::ExecuteQueryWithParametersExplicitSession [GOOD] >> TGRpcYdbTest::ExplainQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableMultiShardWithDescribeAndRowLimit [GOOD] Test command err: 2025-03-28T09:24:00.993257Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788738218216131:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:00.993311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001454/r3tmp/tmpzJhJ0D/pdisk_1.dat 2025-03-28T09:24:01.885667Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:01.902520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:01.902585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:01.914285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61680, node 1 2025-03-28T09:24:02.442659Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:02.453789Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:02.453797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:02.480835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26220 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:02.857730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:02.943674Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jqe11jqw56chhv7z7dzezzhx, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33866, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.979043s 2025-03-28T09:24:03.013326Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jqe11jrf2g974hyctpz89tvq, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33872, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:05.195403Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jqe11myb2cxw1142pew3v3n8, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:33882, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:05.196188Z node 1 :TX_PROXY DEBUG: actor# [1:7486788742513183570:2141] Handle TEvProposeTransaction 2025-03-28T09:24:05.196210Z node 1 :TX_PROXY DEBUG: actor# [1:7486788742513183570:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:24:05.196252Z node 1 :TX_PROXY DEBUG: actor# [1:7486788742513183570:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486788759693053554:2626] 2025-03-28T09:24:05.276237Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788759693053554:2626] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:33882" 2025-03-28T09:24:05.276287Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788759693053554:2626] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:24:05.276568Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788759693053554:2626] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:24:05.276642Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788759693053554:2626] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:24:05.276787Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788759693053554:2626] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:24:05.276910Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788759693053554:2626] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:24:05.276946Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788759693053554:2626] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:24:05.277059Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788759693053554:2626] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:24:05.279703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:05.283898Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788759693053554:2626] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-03-28T09:24:05.283952Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788759693053554:2626] txid# 281474976710658 SEND to# [1:7486788759693053553:2337] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-03-28T09:24:05.285071Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:05.285166Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:05.285179Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:05.285207Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:05.381392Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788759693053600:2670], Recipient [1:7486788759693053742:2351]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:05.382646Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788759693053651:2701], Recipient [1:7486788759693053745:2354]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:05.382835Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788759693053605:2675], Recipient [1:7486788759693053744:2353]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:05.386868Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788759693053648:2698], Recipient [1:7486788759693053727:2344]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:05.387430Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788759693053598:2668], Recipient [1:7486788759693053724:2341]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:05.387977Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788759693053652:2702], Recipient [1:7486788759693053737:2346]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:05.391255Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788759693053603:2673], Recipient [1:7486788759693053723:2340]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:05.395696Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788759693053649:2699], Recipient [1:7486788759693053739:2348]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:05.396416Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788759693053604:2674], Recipient [1:7486788759693053725:2342]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:05.396612Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788759693053651:2701], Recipient [1:7486788759693053745:2354]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:05.397047Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788759693053598:2668], Recipient [1:7486788759693053724:2341]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:05.397139Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:7486788759693053745:2354] 2025-03-28T09:24:05.397406Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7486788759693053724:2341] 2025-03-28T09:24:05.397424Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:24:05.397579Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:24:05.412178Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788759693053605:2675], Recipient [1:7486788759693053744:2353]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:05.412651Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037898 actor [1:7486788759693053744:2353] 2025-03-28T09:24:05.412863Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:24:05.413567Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788759693053600:2670], Recipient [1:7486788759693053742:2351]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:05.414061Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037903 actor [1:7486788759693053742:2351] 2025-03-28T09:24:05.414259Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:24:05.432309Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788759693053652:2702], Recipient [1:7486788759693053737:2346]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:05.432740Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:7486788759693053737:2346] 2025-03-28T09:24:05.432897Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:24:05.436071Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788759693053603:2673], Recipient [1:7486788759693053723:2340]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:05.436468Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037900 actor [1:7486788759693053723:2340] 2025-03-28T09:24:05.436653Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:24:05.450964Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788759693053649:2699], Recipient [1:7486788759693053739:2348]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:05.452757Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:74867887596930536 ... 435072, Sender [10:7486788838140659432:2343], Recipient [10:7486788838140659432:2343]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:24:25.309491Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:24:25.309521Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-03-28T09:24:25.309538Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:24:25.309558Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for WaitForStreamClearance 2025-03-28T09:24:25.309572Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit WaitForStreamClearance 2025-03-28T09:24:25.309590Z node 10 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715681] at 72075186224037897 2025-03-28T09:24:25.309602Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-03-28T09:24:25.309616Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit WaitForStreamClearance 2025-03-28T09:24:25.309627Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit ReadTableScan 2025-03-28T09:24:25.309639Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-03-28T09:24:25.309804Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Continue 2025-03-28T09:24:25.309817Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:24:25.309828Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-03-28T09:24:25.309840Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-03-28T09:24:25.309851Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-03-28T09:24:25.309886Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-03-28T09:24:25.310245Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7486788842435627771:2155], Recipient [10:7486788838140659432:2343]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:24:25.310265Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-28T09:24:25.310306Z node 10 :READ_TABLE_API DEBUG: [10:7486788842435627749:2402] Adding quota request to queue ShardId: 0, TxId: 281474976715680 2025-03-28T09:24:25.310335Z node 10 :READ_TABLE_API DEBUG: [10:7486788842435627749:2402] Assign stream quota to Shard 0, Quota 5, TxId 281474976715680 Reserved: 5 of 25, Queued: 0 2025-03-28T09:24:25.310486Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 5 2025-03-28T09:24:25.310713Z node 10 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037897, TxId: 281474976715681, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 4 2025-03-28T09:24:25.310933Z node 10 :READ_TABLE_API DEBUG: [10:7486788842435627749:2402] got stream part, size: 75, RU required: 128 rate limiter absent 2025-03-28T09:24:25.311304Z node 10 :READ_TABLE_API DEBUG: [10:7486788842435627749:2402] Starting inactivity timer for 600.000000s with tag 3 2025-03-28T09:24:25.313053Z node 10 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037897, TxId: 281474976715681, PendingAcks: 0 2025-03-28T09:24:25.313093Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715681, MessageQuota: 4 2025-03-28T09:24:25.315232Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [10:7486788842435627750:2402], Recipient [10:7486788838140659432:2343]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715681 2025-03-28T09:24:25.315261Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-03-28T09:24:25.315275Z node 10 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037897 txId 281474976715681 2025-03-28T09:24:25.315310Z node 10 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037897 txId 281474976715681 2025-03-28T09:24:25.315402Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [10:7486788842435627750:2402], Recipient [10:7486788838140659432:2343]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715681 2025-03-28T09:24:25.315417Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-03-28T09:24:25.315491Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788842435627750:2402], Recipient [10:7486788838140659432:2343]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153865341 TxId: 281474976715680 2025-03-28T09:24:25.315653Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-03-28T09:24:25.315665Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715681, at: 72075186224037897 2025-03-28T09:24:25.315764Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7486788838140659432:2343], Recipient [10:7486788838140659432:2343]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:24:25.315783Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:24:25.315810Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-03-28T09:24:25.315828Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:24:25.315851Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715681] at 72075186224037897 for ReadTableScan 2025-03-28T09:24:25.315867Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit ReadTableScan 2025-03-28T09:24:25.315886Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715681] at 72075186224037897 error: , IsFatalError: 0 2025-03-28T09:24:25.315914Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-03-28T09:24:25.315933Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit ReadTableScan 2025-03-28T09:24:25.315946Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit FinishPropose 2025-03-28T09:24:25.315959Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-03-28T09:24:25.315996Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is DelayCompleteNoMoreRestarts 2025-03-28T09:24:25.316013Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit FinishPropose 2025-03-28T09:24:25.316025Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715681] at 72075186224037897 to execution unit CompletedOperations 2025-03-28T09:24:25.316034Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715681] at 72075186224037897 on unit CompletedOperations 2025-03-28T09:24:25.316070Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715681] at 72075186224037897 is Executed 2025-03-28T09:24:25.316079Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715681] at 72075186224037897 executing on unit CompletedOperations 2025-03-28T09:24:25.316103Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715681] at 72075186224037897 has finished 2025-03-28T09:24:25.316116Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:24:25.316129Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-03-28T09:24:25.316142Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-03-28T09:24:25.316153Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-03-28T09:24:25.317684Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-03-28T09:24:25.317709Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715681] at 72075186224037897 on unit FinishPropose 2025-03-28T09:24:25.317740Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715681 at tablet 72075186224037897 send to client, exec latency: 7 ms, propose latency: 9 ms, status: COMPLETE 2025-03-28T09:24:25.317791Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-03-28T09:24:25.319537Z node 10 :READ_TABLE_API NOTICE: [10:7486788842435627749:2402] Finish grpc stream, status: 400000 2025-03-28T09:24:25.323224Z node 10 :GRPC_SERVER DEBUG: [0x51a0000daa80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.323496Z node 10 :GRPC_SERVER DEBUG: [0x51a000195080] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.323704Z node 10 :GRPC_SERVER DEBUG: [0x51a000082880] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.323886Z node 10 :GRPC_SERVER DEBUG: [0x51a000044a80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.324043Z node 10 :GRPC_SERVER DEBUG: [0x51a000007e80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.324233Z node 10 :GRPC_SERVER DEBUG: [0x51a00019bc80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.324324Z node 10 :GRPC_SERVER DEBUG: [0x51a000164480] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.324496Z node 10 :GRPC_SERVER DEBUG: [0x51a000080a80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.324707Z node 10 :GRPC_SERVER DEBUG: [0x51a00005a080] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.324883Z node 10 :GRPC_SERVER DEBUG: [0x51a000166880] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.325031Z node 10 :GRPC_SERVER DEBUG: [0x51a00013e080] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.325174Z node 10 :GRPC_SERVER DEBUG: [0x51a000102680] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.325342Z node 10 :GRPC_SERVER DEBUG: [0x51a00011dc80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.325858Z node 10 :GRPC_SERVER DEBUG: [0x51a000101a80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.326027Z node 10 :GRPC_SERVER DEBUG: [0x51a00011e880] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.326194Z node 10 :GRPC_SERVER DEBUG: [0x51a000103280] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-28T09:24:25.326350Z node 10 :GRPC_SERVER DEBUG: [0x51a00019b080] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> OlapEstimationRowsCorrectness::TPCH2 [GOOD] Test command err: Trying to start YDB, gRPC: 16274, MsgBus: 7085 2025-03-28T09:22:44.460138Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788408416570650:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:44.460618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001747/r3tmp/tmpnC9b9C/pdisk_1.dat 2025-03-28T09:22:45.208635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:45.208733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:45.215719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:45.271447Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16274, node 1 2025-03-28T09:22:45.493426Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:45.493458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:45.493466Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:45.493594Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7085 TClient is connected to server localhost:7085 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:46.388752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:48.770652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788425596440351:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.770803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.771420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788425596440363:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.775498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:48.789431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788425596440365:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:48.844165Z node 1 :TX_PROXY ERROR: Actor# [1:7486788425596440416:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:49.187749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:49.480748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:49.480916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:49.481152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:49.481287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:49.481403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:49.481518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:49.481620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:49.481718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:49.481817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:49.481939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:49.482036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:49.482125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788429891408010:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:49.515860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:49.515937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:49.516116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:49.516224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:49.516348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:49.516441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:49.516540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:49.516651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:49.516773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:49.516898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:49.517001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:49.517088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429891407975:2352];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:49.536781Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788429891407977:2353];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:49.536829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[1:7486788429891407977:2353];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:49.537004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_i ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.551869Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039248;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.556815Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.561763Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.566198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.574871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039254;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.576506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.580389Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039300;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.581979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039344;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.584642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.589919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039290;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.590797Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039278;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.595782Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039226;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.595837Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039284;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.601319Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039228;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.601402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039294;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.607148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039244;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.611063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039250;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.611457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.615964Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039288;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.622457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039274;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.624135Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039220;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.628853Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039236;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.631628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039238;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.636323Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.636914Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039262;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.641873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039234;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.641910Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039218;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.648270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.649348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039260;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.653950Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039246;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.654768Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039256;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.659569Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039268;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.659794Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.664934Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.665304Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039222;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.670211Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039230;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.670858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039242;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.676017Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039280;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.676231Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039304;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.681769Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039252;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.681776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039258;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.687238Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.687407Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039308;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.692629Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039214;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.695703Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039296;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:06.822310Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe10p8p7m8vkh7kfxhrzfr2", SessionId: ydb://session/3?node_id=1&id=OTBlYzZiZjEtMjJhOTcxYjYtYTc2ZmYyNjMtY2FlYWI0NTA=, Slow query, duration: 33.038809s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:07.109392Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:07.109976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:07.110335Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788721949240587:11025];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-28T09:24:07.110680Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> YdbYqlClient::TestTzTypesFullStack [GOOD] >> YdbYqlClient::TestVariant >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesServerCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert >> TGRpcNewClient::YqlExplainDataQuery [GOOD] >> TGRpcNewCoordinationClient::CheckUnauthorized >> YdbYqlClient::SecurityTokenAuthMultiTenantSDKAsync [GOOD] >> YdbYqlClient::SimpleColumnFamilies >> LocalityOperation::LocksFromAnotherTenants+UseSink [GOOD] >> LocalityOperation::LocksFromAnotherTenants-UseSink >> KqpJoinOrder::CanonizedJoinOrderTPCH21 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::ColumnFamiliesExternalBlobsWithoutDefaultProfile [GOOD] Test command err: 2025-03-28T09:23:54.846811Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788709155711133:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:54.863575Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001456/r3tmp/tmpIGl5n4/pdisk_1.dat 2025-03-28T09:23:55.438069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:55.438185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:55.438660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:55.449127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27033, node 1 2025-03-28T09:23:55.847332Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:55.847354Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:55.847365Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:55.847472Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:56.290879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:00.999173Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788738370969568:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:01.047069Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001456/r3tmp/tmppGDPDB/pdisk_1.dat 2025-03-28T09:24:01.432833Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:01.485517Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:01.485601Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:01.489497Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14279, node 4 2025-03-28T09:24:01.683321Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:01.683342Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:01.683350Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:01.683492Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61872 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:02.064070Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:06.279689Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788763370816038:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:06.279751Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001456/r3tmp/tmpp3XZuF/pdisk_1.dat 2025-03-28T09:24:06.682925Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:06.719362Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:06.719464Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:06.724406Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6132, node 7 2025-03-28T09:24:06.979459Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:06.979479Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:06.979487Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:06.979630Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:07.280692Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:9907 2025-03-28T09:24:07.589341Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:07.589797Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:07.589820Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:07.601497Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-03-28T09:24:07.620876Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153847666, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:24:07.623956Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-03-28T09:24:07.624018Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-03-28T09:24:07.625352Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-03-28T09:24:07.632112Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:07.632815Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:07.632848Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:07.634405Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-03-28T09:24:08.140723Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486788772887149664:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:08.140769Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:08.296672Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:08.296765Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:08.311456Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-03-28T09:24:08.319045Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:08.535974Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153848576, t ... 97 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:15.404282Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153855429, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:24:15.410008Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-03-28T09:24:15.410171Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-03-28T09:24:15.412138Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-03-28T09:24:17.620169Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:24:17.621389Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:17.621418Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:24:17.621537Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTableIndex Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex, operationId: 281474976710660:1, transaction: WorkingDir: "/Root/ydb_ut_tenant/Table-1" OperationType: ESchemeOpCreateTableIndex CreateTableIndex { Name: "MyIndex" KeyColumnNames: "Value" Type: EIndexTypeGlobal IndexImplTableDescriptions { } } Internal: false FailOnExist: false AllowCreateInTempDir: false, at schemeshard: 72057594046644480 2025-03-28T09:24:17.621708Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:17.621756Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1/MyIndex/indexImplTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-03-28T09:24:17.622581Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:17.643862Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE WITH INDEXES, path: /Root/ydb_ut_tenant/Table-1 2025-03-28T09:24:17.658254Z node 10 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923065101920}: tablet 72075186224037891 could not find a group for channel 2 pool name_ydb_ut_tenant_kind_hdd 2025-03-28T09:24:17.658294Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923065101920}: tablet 72075186224037891 wasn't changed 2025-03-28T09:24:17.764282Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153857800, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:24:17.777846Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-03-28T09:24:17.777965Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:1 2025-03-28T09:24:17.777978Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:2 2025-03-28T09:24:17.824975Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-03-28T09:24:17.825615Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:24:21.149246Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788826776664463:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:21.149444Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001456/r3tmp/tmpCENxM6/pdisk_1.dat 2025-03-28T09:24:21.402080Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:21.471383Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:21.471490Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:21.474937Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8927, node 13 2025-03-28T09:24:21.660027Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:21.660056Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:21.660065Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:21.660245Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30248 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:22.008248Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:30248 2025-03-28T09:24:22.476904Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:22.477309Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:22.477334Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:22.483063Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-03-28T09:24:22.500398Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153862541, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:24:22.502512Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-03-28T09:24:22.502555Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710658, publications: 2, subscribers: 1 2025-03-28T09:24:22.504388Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710658, subscribers: 1 2025-03-28T09:24:22.509870Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /Root/ydb_ut_tenant, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:22.510391Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:22.510411Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:22.511930Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /Root/ydb_ut_tenant waiting... 2025-03-28T09:24:23.024607Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7486788837194690555:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:23.024670Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:23.093311Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:23.093402Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:23.103371Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-03-28T09:24:23.104598Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:23.507781Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153863549, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:24:23.515842Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 2025-03-28T09:24:23.516000Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710659, publications: 1, subscribers: 1 2025-03-28T09:24:23.517993Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710659, subscribers: 1 2025-03-28T09:24:25.745164Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/ydb_ut_tenant/Table-1, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:24:25.750870Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:25.750917Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:24:25.755764Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root/ydb_ut_tenant, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/ydb_ut_tenant/Table-1 2025-03-28T09:24:25.895353Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153865930, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:24:25.916485Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-03-28T09:24:25.943604Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-03-28T09:24:25.943984Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected >> TYqlDateTimeTests::DatetimeKey [GOOD] >> YdbScripting::Params >> YdbOlapStore::ManyTables >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] >> TGRpcYdbTest::OperationCancelAfter [GOOD] >> TGRpcYdbTest::KeepAlive >> TGRpcClientLowTest::GrpcRequestProxyWithoutToken [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore >> TTableProfileTests::OverwriteCompactionPolicy [GOOD] >> TTableProfileTests::OverwriteExecutionPolicy >> YdbYqlClient::TestReadTableNotNullBorder2 [GOOD] >> YdbYqlClient::TestReadTableSnapshot >> TAuthenticationWithSqlExecution::CreateAlterUserWithHash [GOOD] >> TDatabaseQuotas::DisableWritesToDatabase >> YdbS3Internal::TestAccessCheck [GOOD] >> YdbScripting::BasicV0 >> YdbYqlClient::TestBusySession [GOOD] >> YdbYqlClient::TestConstraintViolation >> YdbTableBulkUpsert::NotNulls [GOOD] >> YdbTableBulkUpsert::Errors ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDateTimeTests::DatetimeKey [GOOD] Test command err: 2025-03-28T09:23:43.479062Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788661812478660:2223];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:43.479429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001465/r3tmp/tmpXKlizu/pdisk_1.dat 2025-03-28T09:23:44.253317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:44.253419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:44.267443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:44.273255Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13093, node 1 2025-03-28T09:23:44.442823Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:44.449010Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:44.770856Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:44.770876Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:44.770889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:44.771042Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15664 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:45.319735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:15664 2025-03-28T09:23:45.911675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:45.949256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:46.495476Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486788678432737156:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:46.495539Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:23:46.671232Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:46.682876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:46.726067Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:23:46.779640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15664 2025-03-28T09:23:47.612655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:48.463600Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788661812478660:2223];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:48.469011Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:15664 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743153828600 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:23:49.221315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15664 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743153830330 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:23:51.404722Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-28T09:23:51.410054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:23:51.574590Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486788678432737156:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:51.578421Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:53.943374Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788705317179320:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:53.943434Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001465/r3tmp/tmpEuVfAP/pdisk_1.dat 2025-03-28T09:23:54.382622Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:54.414604Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:54.414915Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:54.423813Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12832, node 4 2025-03-28T09:23:54.839429Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:54.839456Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:54.839464Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:54.839630Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:55.225945Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:55.243884Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:14422 2025-03-28T09:23:55.664584Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:55.698289Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:56.216370Z node 6 :METADATA_PRO ... initialize from file: (empty maybe) 2025-03-28T09:24:14.331211Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:14.331381Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21998 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:14.796497Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:18.625766Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:18.628488Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486788791499556630:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:18.628568Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:18.751261Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788812974394329:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.751391Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788812974394340:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.751456Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.757743Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:18.789344Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788812974394343:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:18.892281Z node 10 :TX_PROXY ERROR: Actor# [10:7486788812974394413:2786] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:19.078168Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1225wetkhe9n2k9k9r35t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDhmZTJiYzYtMTRlMWZkZjMtYWZjMGU0MzQtNTUzZjU5ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:19.364487Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe122hrb5fr7s6aemqpk5pv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDhmZTJiYzYtMTRlMWZkZjMtYWZjMGU0MzQtNTUzZjU5ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:19.632370Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe122sgfcd6spmay46v7fck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDhmZTJiYzYtMTRlMWZkZjMtYWZjMGU0MzQtNTUzZjU5ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:19.841076Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1231r23qkw0zdyzzcajw1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NDhmZTJiYzYtMTRlMWZkZjMtYWZjMGU0MzQtNTUzZjU5ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:21.847338Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788827146578217:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:21.847432Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001465/r3tmp/tmpZKHFeP/pdisk_1.dat 2025-03-28T09:24:22.131789Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:22.178307Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:22.178435Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:22.186420Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3199, node 13 2025-03-28T09:24:22.403490Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:22.403517Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:22.403528Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:22.403708Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:22.900302Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:26.847627Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7486788827146578217:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:26.847712Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:26.955588Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:27.141099Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788852916383207:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:27.141188Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788852916383199:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:27.141343Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:27.147200Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:27.190342Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486788852916383213:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:27.268198Z node 13 :TX_PROXY ERROR: Actor# [13:7486788852916383286:2794] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:27.372048Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12ac30871ktwyc32fy2x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTVmMGNlYzAtMmM4OWY4N2UtYWZlYjVjYTgtMWRlNGFlZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:27.557841Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe12akpfsrhapdymdavppbn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTVmMGNlYzAtMmM4OWY4N2UtYWZlYjVjYTgtMWRlNGFlZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:27.812715Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe12asb4sfd6hmbhxgzq38x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTVmMGNlYzAtMmM4OWY4N2UtYWZlYjVjYTgtMWRlNGFlZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:28.090643Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe12b1g2kv70vjx8ggz8xhd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTVmMGNlYzAtMmM4OWY4N2UtYWZlYjVjYTgtMWRlNGFlZjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestYqlLongSessionMultipleErrors [GOOD] Test command err: 2025-03-28T09:24:07.747947Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788767197597705:2280];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:07.747978Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00144c/r3tmp/tmpoEA7g8/pdisk_1.dat 2025-03-28T09:24:08.393720Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:08.413974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:08.414077Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:08.435125Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9406, node 1 2025-03-28T09:24:08.842972Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:08.842994Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:08.843004Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:08.843139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:09.284717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:11.647148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788784377467699:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:11.647269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:11.941612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:12.194834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788788672435184:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:12.194947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:12.195362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788788672435189:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:12.199488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:12.229007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788788672435191:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:12.327676Z node 1 :TX_PROXY ERROR: Actor# [1:7486788788672435260:2809] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:12.393636Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486788788672435278:2356], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:43: Error: Failed to convert type: Struct<'Key':String,'Value':String> to Struct<'Key':Uint32?,'Value':String?>
:2:43: Error: Failed to convert 'Key': String to Optional
:2:43: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:24:12.395756Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODZhZWFhYmQtNjUzMjUzNmMtYTAxZDhhYzMtYmYzZWYxOGQ=, ActorId: [1:7486788784377467672:2334], ActorState: ExecuteState, TraceId: 01jqe11vs11f2zvyvv8j9qa6z1, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:24:14.063041Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788796809479100:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:14.063113Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00144c/r3tmp/tmphZutqw/pdisk_1.dat 2025-03-28T09:24:14.350226Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:14.382613Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:14.382797Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:14.387339Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8189, node 4 2025-03-28T09:24:14.547217Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:14.547242Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:14.547250Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:14.547381Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:14.764176Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:17.435837Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788809694382047:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:17.435922Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:19.163251Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788817154010960:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:19.163340Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00144c/r3tmp/tmp314i1Y/pdisk_1.dat 2025-03-28T09:24:19.355672Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:19.387487Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:19.387568Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:19.395320Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3518, node 7 2025-03-28T09:24:19.487677Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:19.487704Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:19.487714Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:19.487868Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20482 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:19.733499Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:22.383853Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788830038913808:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:22.383938Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:22.400255Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:22.545073Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788830038913985:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:22.545176Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:22.545626Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788830038913990:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:22.549890Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:22.577866Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788830038913992:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:22.644623Z node 7 :TX_PROXY ERROR: Actor# [7:7486788830038914061:2794] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:22.755803Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe125we82h8ghter2npk5sd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Njc1M2NhZjItNGQ3OWY1OWEtZmQwOTlmODAtZmU2Y2IwNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:22.822162Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7486788830038914103:2363], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:24:22.823522Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=Njc1M2NhZjItNGQ3OWY1OWEtZmQwOTlmODAtZmU2Y2IwNzM=, ActorId: [7:7486788830038913790:2335], ActorState: ExecuteState, TraceId: 01jqe1263zd009388qctjadp8m, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:24:22.916524Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe1265n1cq1sksrqk1trcvd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Njc1M2NhZjItNGQ3OWY1OWEtZmQwOTlmODAtZmU2Y2IwNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:23.108295Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqe1268vcwpa0vzvqsxbtfw7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Njc1M2NhZjItNGQ3OWY1OWEtZmQwOTlmODAtZmU2Y2IwNzM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:24.849385Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788841310756190:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:24.849459Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00144c/r3tmp/tmpXSAuYZ/pdisk_1.dat 2025-03-28T09:24:25.044084Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:25.077488Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:25.077578Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:25.081478Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4305, node 10 2025-03-28T09:24:25.246947Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:25.246972Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:25.246982Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:25.250996Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:25.633321Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:28.446886Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788858490626433:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.446995Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.463424Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:28.595995Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788858490626597:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.596093Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.596317Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788858490626602:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.600660Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:28.637939Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788858490626604:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:28.731810Z node 10 :TX_PROXY ERROR: Actor# [10:7486788858490626673:2786] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:28.750929Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7486788858490626693:2355], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:24:28.752638Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=N2U5NDExNmYtZGE5ZGJjYS1iN2VlMDM4ZS1hOWIzNjViMw==, ActorId: [10:7486788858490626430:2335], ActorState: ExecuteState, TraceId: 01jqe12bsd04k3vc3mpnd3d0wb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:24:28.782620Z node 10 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [10:7486788858490626711:2361], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:25: Error: At function: KiWriteTable!
:2:25: Error: Cannot find table 'db.[Root/BadTable2]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:24:28.784177Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=N2U5NDExNmYtZGE5ZGJjYS1iN2VlMDM4ZS1hOWIzNjViMw==, ActorId: [10:7486788858490626430:2335], ActorState: ExecuteState, TraceId: 01jqe12byzc63epyrxm9zx71bt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> YdbYqlClient::TestDecimalFullStack [GOOD] >> YdbYqlClient::TestDescribeDirectory >> TGRpcNewCoordinationClient::MultipleSessionsSemaphores [GOOD] >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore >> TGRpcYdbTest::CreateTableBadRequest >> YdbYqlClient::CreateAndAltertTableWithCompactionPolicy [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad >> TGRpcLdapAuthentication::LdapAuthWithInvalidSearchFilter [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] >> YdbTableBulkUpsert::Timeout [GOOD] >> YdbTableBulkUpsert::RetryOperationSync >> YdbYqlClient::TestVariant [GOOD] >> YdbYqlClient::TestTransactionQueryError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH21 [GOOD] Test command err: Trying to start YDB, gRPC: 1465, MsgBus: 26115 2025-03-28T09:22:43.987023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788407706933693:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:44.007077Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001748/r3tmp/tmppi8hA1/pdisk_1.dat 2025-03-28T09:22:44.660663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:44.660748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:44.696277Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:44.697803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1465, node 1 2025-03-28T09:22:45.047135Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:45.047154Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:45.047160Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:45.047256Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26115 TClient is connected to server localhost:26115 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:45.790074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:45.806921Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:48.256624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788429181770702:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.256724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.258909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788429181770714:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:48.263748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:48.278184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788429181770716:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:48.355372Z node 1 :TX_PROXY ERROR: Actor# [1:7486788429181770767:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:48.739779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:48.970788Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788407706933693:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:48.970957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:22:49.063734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:49.063987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:49.064260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:49.064374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:49.064481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:49.064584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:49.064711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:49.064833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:49.064939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:49.065067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:49.065199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:49.065333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788429181771015:2351];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:49.067310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:49.067357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:49.067561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:49.067676Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:49.067784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:49.067914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:49.068036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:49.068157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:49.068276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:49.068381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:49.068524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:49.068660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788429181771019:2353];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:49.106888Z node 1 :TX_ ... tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.374866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.375127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.381008Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.381643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.396183Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.400620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.401558Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.407562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.407562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039195;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.413613Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.414205Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.419689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.419689Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039327;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.424746Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.426109Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.430403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.432243Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.436678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.438263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.442190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.444415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.447779Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.449783Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.452614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.455718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.457873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.461078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.463691Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.466412Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.469176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.471915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.475659Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.478566Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.480477Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039213;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.485217Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.486559Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.491754Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.502784Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.504062Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.509172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.509449Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.515800Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.515808Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.523593Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.523997Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.608637Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:08.724859Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe10pjc9sjb5npnzyqp3pvy", SessionId: ydb://session/3?node_id=1&id=ZThkYzM0MTktNGM2ZWMzNzEtYTNjZTkwOWEtZDA0NWI5Nzk=, Slow query, duration: 34.631681s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:09.057740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:09.057740Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:09.058352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TGRpcNewCoordinationClient::CheckUnauthorized [GOOD] >> TGRpcNewCoordinationClient::CreateAlter >> TYqlDateTimeTests::SimpleUpsertSelect [GOOD] >> TYqlDateTimeTests::TimestampKey >> YdbQueryService::TestCreateAndAttachSession >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts >> TColumnShardTestSchema::RebootExternalTTL [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvideIncorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RetryOperationLimitedDuration [GOOD] Test command err: 2025-03-28T09:23:42.908655Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788660241848038:2233];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:42.918372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001470/r3tmp/tmpdVPSOc/pdisk_1.dat 2025-03-28T09:23:43.710651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:43.712689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:43.726004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:43.769263Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20450, node 1 2025-03-28T09:23:44.166116Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:44.166142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:44.166151Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:44.166260Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2397 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:44.855442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:47.391037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788681716685447:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:47.391157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:47.903630Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788660241848038:2233];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:47.903691Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:48.237354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:48.452196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:23:48.651028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788686011652963:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:48.651116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:48.651734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788686011652968:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:48.655825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:23:48.698490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788686011652970:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:23:48.786654Z node 1 :TX_PROXY ERROR: Actor# [1:7486788686011653048:2838] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:48.853974Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7486788686011653071:2850], for# test_user@builtin, access# DescribeSchema 2025-03-28T09:23:48.853999Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7486788686011653071:2850], for# test_user@builtin, access# DescribeSchema 2025-03-28T09:23:48.872384Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486788686011653059:2367], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[Root/Test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:23:48.873971Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGVmZTA5MTYtNWEzMGE2MDQtYjZjNzE5ZTktYWExZDJkMTE=, ActorId: [1:7486788686011652959:2357], ActorState: ExecuteState, TraceId: 01jqe114s05j5x438czkfr92d8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:23:50.966858Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788693801648399:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:50.972049Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001470/r3tmp/tmpRm8StK/pdisk_1.dat 2025-03-28T09:23:51.202731Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:51.235476Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:51.235570Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:51.245331Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32493, node 4 2025-03-28T09:23:51.371065Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:51.371085Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:51.371092Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:51.371213Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:51.816211Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:54.691912Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788710981518609:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:54.692001Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:54.718898Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:54.865832Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788710981518772:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:54.865937Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:54.866215Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788710981518777:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:54.869764Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:23:54.912669Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorAc ... 8715276486153:2788] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:55.169574Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe11avg83sv8w5hb2wdmsdm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YTQ3NjVjNjAtNjYxYmYxY2ItZjBmYmNkMzEtZTBiMTNlNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:23:55.387673Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe11b64brz80emh2jzywp6d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YTUxOGE1YzAtZjEwZmNkYTUtZGYzZjdhMTYtNWZkZGYyNGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:23:57.586250Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788724553593706:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:57.586407Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001470/r3tmp/tmpoC110f/pdisk_1.dat 2025-03-28T09:23:57.853173Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:57.930423Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:57.930508Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:57.947722Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26340, node 7 2025-03-28T09:23:58.307644Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:58.307668Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:58.307676Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:58.307826Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27349 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:58.758296Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:02.560767Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486788724553593706:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:02.560851Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-03-28T09:24:04.272834Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788754618365774:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:04.272905Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788754618365782:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:04.272951Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:04.278770Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:04.315492Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788754618365788:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:04.407930Z node 7 :TX_PROXY ERROR: Actor# [7:7486788754618365857:2704] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-03-28T09:24:09.220377Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788774055330420:2146];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:09.223285Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001470/r3tmp/tmpSfCHm1/pdisk_1.dat 2025-03-28T09:24:09.492979Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:09.592986Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:09.593079Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:09.608015Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63356, node 10 2025-03-28T09:24:09.825940Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:09.825989Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:09.826000Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:09.826144Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12555 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:24:10.254445Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 2025-03-28T09:24:14.193027Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486788774055330420:2146];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:14.193099Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 2025-03-28T09:24:24.455972Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:24:24.456010Z node 10 :IMPORT WARN: Table profiles were not loaded Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 2 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 3 of 3 Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 3 >> TGRpcYdbTest::ExplainQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExternalTTL [GOOD] Test command err: 2025-03-28T09:24:12.435180Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:24:12.561384Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:12.565829Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:12.566284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:24:12.592676Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:24:12.593058Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:24:12.601908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:24:12.602143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:24:12.602383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:24:12.602503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:24:12.602606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:24:12.602775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:24:12.602893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:24:12.603036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:24:12.603179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:24:12.603302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:24:12.603439Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:24:12.603555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:24:12.629961Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:24:12.634573Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:24:12.634953Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:24:12.635010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:24:12.635213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:24:12.635409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:24:12.635493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:24:12.635552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:24:12.635660Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:24:12.635732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:24:12.635777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:24:12.635808Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:24:12.635988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:24:12.636065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:24:12.636127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:24:12.636162Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:24:12.636273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:24:12.636333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:24:12.636382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:24:12.636412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:24:12.636488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:24:12.636526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:24:12.636560Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:24:12.636637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:24:12.636683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:24:12.636714Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:24:12.637140Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=53; 2025-03-28T09:24:12.637239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-03-28T09:24:12.637341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-03-28T09:24:12.637419Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-03-28T09:24:12.637617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:24:12.637684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:24:12.637719Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:24:12.637926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:24:12.637971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:24:12.638016Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:24:12.638159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:24:12.638201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:24:12.638229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:24:12.638468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:24:12.638532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:24:12.638565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T0 ... send_data;compute_actor_id=[1:1031:3024];bytes=362872;rows=45359;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-28T09:24:33.224163Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:24:33.224346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:24:33.224397Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:24:33.224473Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:24:33.224661Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:24:33.224830Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:24:33.224879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:24:33.225009Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=34641; 2025-03-28T09:24:33.225075Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=277128;num_rows=34641;batch_columns=saved_at; 2025-03-28T09:24:33.225254Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1031:3024];bytes=277128;rows=34641;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-28T09:24:33.225405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:24:33.225526Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:24:33.225642Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:24:33.225787Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:24:33.225866Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:24:33.225936Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:24:33.225966Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1036:3029] finished for tablet 9437184 2025-03-28T09:24:33.226485Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1031:3024];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.021},{"events":["f_ack","l_task_result"],"t":0.862},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.865}],"full":{"a":1743153872360512,"name":"_full_task","f":1743153872360512,"d_finished":0,"c":0,"l":1743153873226051,"d":865539},"events":[{"name":"bootstrap","f":1743153872361086,"d_finished":6261,"c":1,"l":1743153872367347,"d":6261},{"a":1743153873225768,"name":"ack","f":1743153873223126,"d_finished":2419,"c":2,"l":1743153873225670,"d":2702},{"a":1743153873225753,"name":"processing","f":1743153872381948,"d_finished":187854,"c":18,"l":1743153873225673,"d":188152},{"name":"ProduceResults","f":1743153872364681,"d_finished":9458,"c":22,"l":1743153873225953,"d":9458},{"a":1743153873225955,"name":"Finish","f":1743153873225955,"d_finished":0,"c":0,"l":1743153873226051,"d":96},{"name":"task_result","f":1743153872381991,"d_finished":184892,"c":16,"l":1743153873222839,"d":184892}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:24:33.226582Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1031:3024];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:24:33.227312Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1031:3024];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.021},{"events":["f_ack","l_task_result"],"t":0.862},{"events":["l_ProduceResults","f_Finish"],"t":0.865},{"events":["l_ack","l_processing","l_Finish"],"t":0.866}],"full":{"a":1743153872360512,"name":"_full_task","f":1743153872360512,"d_finished":0,"c":0,"l":1743153873226625,"d":866113},"events":[{"name":"bootstrap","f":1743153872361086,"d_finished":6261,"c":1,"l":1743153872367347,"d":6261},{"a":1743153873225768,"name":"ack","f":1743153873223126,"d_finished":2419,"c":2,"l":1743153873225670,"d":3276},{"a":1743153873225753,"name":"processing","f":1743153872381948,"d_finished":187854,"c":18,"l":1743153873225673,"d":188726},{"name":"ProduceResults","f":1743153872364681,"d_finished":9458,"c":22,"l":1743153873225953,"d":9458},{"a":1743153873225955,"name":"Finish","f":1743153873225955,"d_finished":0,"c":0,"l":1743153873226625,"d":670},{"name":"task_result","f":1743153872381991,"d_finished":184892,"c":16,"l":1743153873222839,"d":184892}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:24:33.227447Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:24:32.359108Z;index_granules=0;index_portions=2;index_batches=1720;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5265968;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5265968;selected_rows=0; 2025-03-28T09:24:33.227498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:24:33.227808Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1036:3029];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> YdbTableBulkUpsert::ValidRetry ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::ExplainQuery [GOOD] Test command err: 2025-03-28T09:24:06.178589Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788761957404856:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:06.178631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00144d/r3tmp/tmp0PPhi3/pdisk_1.dat 2025-03-28T09:24:07.024273Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:07.034470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:07.034548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:07.047337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3925, node 1 2025-03-28T09:24:07.355963Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:07.355990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:07.356004Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:07.356155Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:07.762430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00144d/r3tmp/tmpKPYmdm/pdisk_1.dat 2025-03-28T09:24:11.698238Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:24:11.745397Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:11.819014Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:11.819125Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:11.831527Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6583, node 4 2025-03-28T09:24:12.051309Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:12.051331Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:12.051338Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:12.051490Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31463 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:12.268116Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:14.869610Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788794930896184:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:14.869677Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788794930896176:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:14.869771Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:14.872771Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:14.898403Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788794930896190:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:14.971338Z node 4 :TX_PROXY ERROR: Actor# [4:7486788794930896259:2675] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:17.099672Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788810537675526:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:17.099946Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00144d/r3tmp/tmpVzWR8F/pdisk_1.dat 2025-03-28T09:24:17.309261Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:17.353160Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:17.353241Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:17.356217Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8271, node 7 2025-03-28T09:24:17.487236Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:17.487259Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:17.487266Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:17.487412Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6211 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:17.752004Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1743153860.323791 409655 text_format.cc:383] Error parsing text-format Ydb.Type: 3:13: Unknown enumeration value of "TYPE_UNDEFINED" for field "type_id". 2025-03-28T09:24:20.332753Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788823422578208:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:20.332832Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788823422578219:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:20.332883Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:20.335711Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:20.354842Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788823422578222:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:20.408717Z node 7 :TX_PROXY ERROR: Actor# [7:7486788823422578302:2676] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/wo ... 057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:20.492253Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MmM1YTY3NjEtNDA2NjdlMTctYzA1ODMxMDEtMTRiNTI5NWI=, ActorId: [7:7486788823422578181:2331], ActorState: ExecuteState, TraceId: 01jqe123qb06bzawbe2xb1bjhh, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-03-28T09:24:20.493387Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: 01jqe123qb06bzawbe2xb1bjhh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MmM1YTY3NjEtNDA2NjdlMTctYzA1ODMxMDEtMTRiNTI5NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: E0000 00:00:1743153860.495516 409655 text_format.cc:383] Error parsing text-format Ydb.Type: 5:21: Unknown enumeration value of "Int32" for field "type_id". 2025-03-28T09:24:20.559539Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MmM1YTY3NjEtNDA2NjdlMTctYzA1ODMxMDEtMTRiNTI5NWI=, ActorId: [7:7486788823422578181:2331], ActorState: ExecuteState, TraceId: 01jqe123wg3s57yax4w59cqys7, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-03-28T09:24:20.560272Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe123wg3s57yax4w59cqys7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MmM1YTY3NjEtNDA2NjdlMTctYzA1ODMxMDEtMTRiNTI5NWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root
: Error: Unsupported protobuf type:
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:997: ydb/library/mkql_proto/mkql_proto.cpp:1435: Unknown protobuf type: 2025-03-28T09:24:22.637057Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788829508446954:2220];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00144d/r3tmp/tmpA5vnBH/pdisk_1.dat 2025-03-28T09:24:22.774736Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:22.928244Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15299, node 10 2025-03-28T09:24:23.007205Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:23.007383Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:23.042359Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:23.163243Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:23.163265Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:23.163274Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:23.163431Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:23.516128Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:26.141055Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788846688317015:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:26.141128Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:26.141340Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788846688317027:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:26.144743Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:26.186910Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788846688317029:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:24:26.257068Z node 10 :TX_PROXY ERROR: Actor# [10:7486788846688317111:2684] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:28.279086Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788858373309902:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:28.279136Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00144d/r3tmp/tmpVW97hh/pdisk_1.dat 2025-03-28T09:24:28.424226Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:28.457501Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:28.457588Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:28.470920Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20124, node 13 2025-03-28T09:24:28.669076Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:28.669099Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:28.669108Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:28.669257Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:28.944700Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:29.055886Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:31.805600Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788871258212975:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:31.805706Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:31.811093Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788871258212987:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:31.814944Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:31.844549Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486788871258212989:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:31.929298Z node 13 :TX_PROXY ERROR: Actor# [13:7486788871258213060:2803] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:32.045106Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12exsdq6wfa36pjp37yc3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NGMwMjRkYmYtZTc5Y2RhZWEtYjM2ZmNjYS00MGI1YmU2Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Ignore [GOOD] >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check >> TGRpcYdbTest::KeepAlive [GOOD] >> ClientStatsCollector::PrepareQuery >> YdbScripting::Params [GOOD] >> YdbTableBulkUpsert::DataValidation >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore [GOOD] >> YdbYqlClient::TestReadTableSnapshot [GOOD] >> KqpJoinOrder::CanonizedJoinOrderTPCH16 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest [GOOD] >> TGRpcYdbTest::CreateTableBadRequest2 >> YdbYqlClient::DiscoveryLocationOverride >> TGRpcNewCoordinationClient::SessionCreateUpdateDeleteSemaphore [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback >> YdbScripting::BasicV0 [GOOD] >> YdbScripting::BasicV1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::KeepAlive [GOOD] Test command err: 2025-03-28T09:24:08.651232Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788772998402249:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:08.651287Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001446/r3tmp/tmpt8UlAS/pdisk_1.dat 2025-03-28T09:24:09.327816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:09.327912Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:09.332228Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:09.342838Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6610, node 1 2025-03-28T09:24:09.642982Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:09.642999Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:09.643006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:09.643102Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13395 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:10.033549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:10.238784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:13.767785Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788792891350001:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:13.767851Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001446/r3tmp/tmpt4dX51/pdisk_1.dat 2025-03-28T09:24:14.044222Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:14.095755Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:14.095856Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 21888, node 4 2025-03-28T09:24:14.163363Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:14.387230Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:14.387254Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:14.387262Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:14.387407Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:14.772698Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:14.914288Z node 4 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jqe11ye24mtv59n5ytq2x3wg, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54686, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:14.917708Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:14.931330Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:14.931441Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:14.931456Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:14.931488Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:15.042130Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:15.042260Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:15.046555Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:15.046623Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:15.155499Z node 4 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jqe11ynkbjhhx35fc2rc1ax1, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54686, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:17.280251Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ExecuteDataQueryRequest, traceId# 01jqe120r07keh3r0xr4tex624, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54686, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:17.282179Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788810071220342:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:17.282269Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:17.282876Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788810071220354:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:17.287100Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:17.304013Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:17.304117Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:17.304127Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:17.304164Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:17.319651Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:17.319768Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:17.319780Z node 4 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:17.319835Z node 4 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:17.323753Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788810071220356:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:17.433123Z node 4 :TX_PROXY ERROR: Actor# [4:7486788810071220435:2794] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:18.161502Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe120r07keh3r0xr4tex624, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MWFkNjZlZjYtOGRiM2YzMmItZGYxMTEzMDctZTFmYzU2ZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:18.200418Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqe121mrej9fke64frvdnvr7, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54686, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:18.200744Z node 4 :READ_TABLE_API NOTICE: [4:7486788814366187786:2353] Finish grpc stream, status: 400010 2025-03-28T09:24:18.207165Z node 4 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqe121mz7bccs59vz1k7wbzp, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:54686, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:18.236174Z node 4 :READ_TABLE_API DEBUG: [4:7486788814366187787:2354] Adding quota request to queue ShardId: 0, TxId: 281474976715662 2025-03-28T09:24:18.236229Z node 4 :READ_TABLE_API DEBUG: [4:7486788814366187787:2354] Assign stream quota to Shard 0, Quota 5, TxId 281474976715662 Reserved: 5 of 25, Queued: 0 2025-03-28T09:24:18.237297Z node 4 :READ_TABLE_API DEBUG: [4:7486788814366187787:2354] got stream part, size: 290, RU required: 128 rate limiter absent 2025-03-28T09:24:18.237626Z node 4 :READ_TABLE_API DEBUG: [4:7486788814366187787:2354] Starting inactivity timer for 600.000000s with tag 3 2025-03-28T09:24:18 ... eam quota to Shard 0, Quota 5, TxId 281474976715666 Reserved: 5 of 25, Queued: 0 2025-03-28T09:24:18.334213Z node 4 :READ_TABLE_API DEBUG: [4:7486788814366187836:2359] got stream part, size: 244, RU required: 128 rate limiter absent 2025-03-28T09:24:18.334580Z node 4 :READ_TABLE_API DEBUG: [4:7486788814366187836:2359] Starting inactivity timer for 600.000000s with tag 3 2025-03-28T09:24:18.337145Z node 4 :READ_TABLE_API NOTICE: [4:7486788814366187836:2359] Finish grpc stream, status: 400000 2025-03-28T09:24:18.347351Z node 4 :GRPC_SERVER DEBUG: [0x51a000031e80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.347623Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d2080] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.347797Z node 4 :GRPC_SERVER DEBUG: [0x51a000043e80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.347973Z node 4 :GRPC_SERVER DEBUG: [0x51a0000ab080] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.348138Z node 4 :GRPC_SERVER DEBUG: [0x51a000032a80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.348286Z node 4 :GRPC_SERVER DEBUG: [0x51a000034280] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.348435Z node 4 :GRPC_SERVER DEBUG: [0x51a0000aa480] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.348607Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a9880] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.348762Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a8c80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.348951Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d0280] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.349100Z node 4 :GRPC_SERVER DEBUG: [0x51a0000d0e80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.349251Z node 4 :GRPC_SERVER DEBUG: [0x51a00009cc80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.349433Z node 4 :GRPC_SERVER DEBUG: [0x51a0000c4880] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.349587Z node 4 :GRPC_SERVER DEBUG: [0x51a0000c5480] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.349735Z node 4 :GRPC_SERVER DEBUG: [0x51a000036080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.349906Z node 4 :GRPC_SERVER DEBUG: [0x51a0000a8080] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-28T09:24:18.350054Z node 4 :GRPC_SERVER DEBUG: [0x51a00002e280] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:20.126543Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788820734534232:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:20.126596Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001446/r3tmp/tmpth7pKi/pdisk_1.dat 2025-03-28T09:24:20.298905Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:20.339043Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:20.339147Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:20.342856Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24453, node 7 2025-03-28T09:24:20.414668Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:20.414734Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:20.414746Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:20.414878Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:20.692141Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation timeout. 2025-03-28T09:24:25.235009Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788845780396356:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:25.235089Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001446/r3tmp/tmpbpo6ka/pdisk_1.dat 2025-03-28T09:24:25.496934Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:25.525430Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:25.525531Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:25.530346Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30186, node 10 2025-03-28T09:24:25.608495Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:25.608516Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:25.608527Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:25.608669Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:25.940436Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting...
: Error: Operation cancelled. 2025-03-28T09:24:30.498731Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788864679286367:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:30.498825Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001446/r3tmp/tmpvbZeDc/pdisk_1.dat 2025-03-28T09:24:30.665649Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:30.702995Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:30.703096Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:30.707269Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2485, node 13 2025-03-28T09:24:30.811852Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:30.811872Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:30.811883Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:30.812029Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15569 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:31.153561Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> YdbYqlClient::TestReadTableMultiShard >> YdbYqlClient::TestConstraintViolation [GOOD] >> YdbYqlClient::TestDescribeDirectory [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableSnapshot [GOOD] Test command err: 2025-03-28T09:24:13.707164Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788790629145354:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:13.716432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143d/r3tmp/tmptFjYpS/pdisk_1.dat 2025-03-28T09:24:14.320682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:14.320780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:14.326439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:14.335246Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26823, node 1 2025-03-28T09:24:14.667285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:14.667308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:14.667319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:14.667441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6210 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:15.230234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:17.632926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788807809015595:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:17.633028Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.059577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:18.288517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788812103983076:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.288608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.288810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788812103983081:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.293059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:18.330161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788812103983083:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:18.404494Z node 1 :TX_PROXY ERROR: Actor# [1:7486788812103983156:2806] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:18.545664Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe121qf8nstx9cbq405712t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzg1YTVhY2YtMWZiZjRiMWYtZDNlMTgzNWMtODc3MWViZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:18.694786Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788790629145354:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:18.694850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:18.788915Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe1220k31fzbd9ngtwcaw4n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mzg1YTVhY2YtMWZiZjRiMWYtZDNlMTgzNWMtODc3MWViZGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:18.806621Z node 1 :TX_PROXY ERROR: [ReadTable [1:7486788812103983232:2371] TxId# 281474976710663] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-03-28T09:24:18.824147Z node 1 :TX_PROXY ERROR: [ReadTable [1:7486788812103983235:2372] TxId# 281474976710664] RESPONSE Status# ResolveError shard: 0 table: Root/Test 2025-03-28T09:24:20.376331Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788823078965879:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:20.376385Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143d/r3tmp/tmpXPsX7I/pdisk_1.dat 2025-03-28T09:24:20.687637Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:20.707745Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:20.707828Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:20.716938Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13199, node 4 2025-03-28T09:24:20.950077Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:20.950099Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:20.950107Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:20.950246Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:21.209364Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:23.619043Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788835963868813:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.619126Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.644016Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:23.790850Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788835963868979:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.790988Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.791070Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788835963868984:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.796135Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:23.846896Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788835963868986:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:23.908441Z node 4 :TX_PROXY ERROR: Actor# [4:7486788835963869071:2798] txid# 281474976710660, i ... main>: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.182162Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:29.350883Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788862348723552:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.351032Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.354849Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788862348723557:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.358968Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:29.382889Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788862348723559:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:29.452040Z node 7 :TX_PROXY ERROR: Actor# [7:7486788862348723655:2810] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:29.530646Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12cgz09d8vazkp46c3626, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTA3NDlkYmQtOTEyMTg4ZjgtYmE3MmU2ZjQtZjQzYmZh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:29.707865Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe12cqrasae4f5vet0t95rr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MTA3NDlkYmQtOTEyMTg4ZjgtYmE3MmU2ZjQtZjQzYmZh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:29.744070Z node 7 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:24:31.218533Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788869045765034:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:31.219944Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143d/r3tmp/tmp7MKUqS/pdisk_1.dat 2025-03-28T09:24:31.369054Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:31.388609Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:31.388700Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:31.392997Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30274, node 10 2025-03-28T09:24:31.463103Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:31.463121Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:31.463129Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:31.463263Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8667 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:31.724607Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:31.809152Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jqe12ey04j1x998xx2kkbhdf, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59364, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.998570s 2025-03-28T09:24:31.823654Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jqe12eyf03p6600m2jpjp32g, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59380, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:34.739744Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ExecuteSchemeQueryRequest, traceId# 01jqe12hskc8kvrxfxmfm2d732, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59392, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:34.742945Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788881930667923:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:34.743035Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:34.761078Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:34.766790Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:34.766920Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:34.767217Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:34.767283Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:34.854644Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:34.854781Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:34.854930Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:34.854992Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:34.886207Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqe12hy5f7kte6ttm0nxf0g8, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:59402, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:34.899011Z node 10 :READ_TABLE_API DEBUG: [10:7486788881930668087:2346] Adding quota request to queue ShardId: 0, TxId: 281474976715659 2025-03-28T09:24:34.899056Z node 10 :READ_TABLE_API DEBUG: [10:7486788881930668087:2346] Assign stream quota to Shard 0, Quota 5, TxId 281474976715659 Reserved: 5 of 25, Queued: 0 2025-03-28T09:24:34.912425Z node 10 :READ_TABLE_API DEBUG: [10:7486788881930668087:2346] got stream part, size: 35, RU required: 128 rate limiter absent 2025-03-28T09:24:34.912775Z node 10 :READ_TABLE_API DEBUG: [10:7486788881930668087:2346] Starting inactivity timer for 600.000000s with tag 3 2025-03-28T09:24:34.912817Z node 10 :READ_TABLE_API NOTICE: [10:7486788881930668087:2346] Finish grpc stream, status: 400000 2025-03-28T09:24:34.924595Z node 10 :GRPC_SERVER DEBUG: [0x51a000108c80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.924837Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b7c80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.925088Z node 10 :GRPC_SERVER DEBUG: [0x51a00008fa80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.926345Z node 10 :GRPC_SERVER DEBUG: [0x51a000070280] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.926588Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f5480] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.926821Z node 10 :GRPC_SERVER DEBUG: [0x51a000025880] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.927021Z node 10 :GRPC_SERVER DEBUG: [0x51a000160280] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.927205Z node 10 :GRPC_SERVER DEBUG: [0x51a000025280] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.927404Z node 10 :GRPC_SERVER DEBUG: [0x51a000090680] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.927542Z node 10 :GRPC_SERVER DEBUG: [0x51a0000cc080] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.927726Z node 10 :GRPC_SERVER DEBUG: [0x51a0000cd280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.927894Z node 10 :GRPC_SERVER DEBUG: [0x51a000044a80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.928047Z node 10 :GRPC_SERVER DEBUG: [0x51a000021080] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.928243Z node 10 :GRPC_SERVER DEBUG: [0x51a000025e80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.928425Z node 10 :GRPC_SERVER DEBUG: [0x51a000160880] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.928620Z node 10 :GRPC_SERVER DEBUG: [0x51a000020a80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:34.929087Z node 10 :GRPC_SERVER DEBUG: [0x51a000021c80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 >> TGRpcLdapAuthentication::LdapAuthWithInvalidLogin [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword >> YdbTableBulkUpsert::Errors [GOOD] >> YdbTableBulkUpsert::Limits >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyHosts [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn >> KqpJoinOrder::CanonizedJoinOrderTPCH13 [GOOD] >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] >> YdbYqlClient::CreateAndAltertTableWithPartitioningByLoad [GOOD] >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter >> YdbYqlClient::TestTransactionQueryError [GOOD] >> YdbYqlClient::TestReadWrongTable >> YdbQueryService::TestCreateAndAttachSession [GOOD] >> YdbQueryService::TestForbidExecuteWithoutAttach >> TGRpcNewCoordinationClient::CreateAlter [GOOD] >> TGRpcNewCoordinationClient::BasicMethods >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestConstraintViolation [GOOD] Test command err: 2025-03-28T09:24:15.067226Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788801719430676:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:15.067274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001434/r3tmp/tmpuo9rxa/pdisk_1.dat 2025-03-28T09:24:15.844525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:15.844599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:15.848413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:15.851117Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20856, node 1 2025-03-28T09:24:16.061965Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:16.062117Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:16.062272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:16.062597Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:16.375157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:16.403343Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:24:18.883403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788814604333630:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.883517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:19.203687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:19.373388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788818899301112:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:19.373485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:19.373730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788818899301117:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:19.377067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:19.395042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788818899301119:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:19.496201Z node 1 :TX_PROXY ERROR: Actor# [1:7486788818899301198:2818] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:19.720213Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe122sc7jfvacnqkyx74yt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVlNTA1MTktOGM0ODg5NmItNTk0Y2QyNDMtMjliMzdhZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:19.905426Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe12355f9kheggeeekc2108, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzVlNTA1MTktOGM0ODg5NmItNTk0Y2QyNDMtMjliMzdhZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:21.476346Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788828875080376:2153];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:21.537710Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001434/r3tmp/tmp0Odsea/pdisk_1.dat 2025-03-28T09:24:21.695839Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:21.728119Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:21.728200Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:21.730945Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26646, node 4 2025-03-28T09:24:21.835248Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:21.835268Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:21.835276Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:21.835394Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8769 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:22.054627Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:24.468826Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788841759983219:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:24.468913Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:24.469145Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788841759983231:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:24.472323Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:24.510925Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788841759983233:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:24:24.587836Z node 4 :TX_PROXY ERROR: Actor# [4:7486788841759983322:2694] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:26.318920Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788846498420534:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:26.319013Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001434/r3tmp/tmpLU6c5P/pdisk_1.dat 2025-03-28T09:24:26.608201Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:26.651286Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:26.651395Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:26.655932Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10274, node 7 2025-03-28T09:24:26.790129Z node 7 :NET_CLASSIFIER WARN: distributable config i ... ResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:29.725231Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.725661Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MzRlMDk0NmYtY2JmYzAxZjctNTY4Y2JkY2QtNGZiNjUwODk=, ActorId: [7:7486788859383323439:2335], ActorState: ExecuteState, TraceId: 01jqe12cw66bmhhrtnrtcrtzag, Reply query error, msg: Pending previous query completion proxyRequestId: 10 2025-03-28T09:24:29.725702Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MzRlMDk0NmYtY2JmYzAxZjctNTY4Y2JkY2QtNGZiNjUwODk=, ActorId: [7:7486788859383323439:2335], ActorState: ExecuteState, TraceId: 01jqe12cw66bmhhrtnrtcrtzag, Reply query error, msg: Pending previous query completion proxyRequestId: 11 2025-03-28T09:24:29.726269Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788859383323506:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.726351Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.729930Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MzRlMDk0NmYtY2JmYzAxZjctNTY4Y2JkY2QtNGZiNjUwODk=, ActorId: [7:7486788859383323439:2335], ActorState: ExecuteState, TraceId: 01jqe12cw66bmhhrtnrtcrtzag, Reply query error, msg: Pending previous query completion proxyRequestId: 12 2025-03-28T09:24:29.730855Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788859383323515:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.730919Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.744967Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788859383323456:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:24:29.823706Z node 7 :TX_PROXY ERROR: Actor# [7:7486788859383323576:2688] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:31.667669Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788869046901875:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:31.667724Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001434/r3tmp/tmp0Lr5Sr/pdisk_1.dat 2025-03-28T09:24:31.856685Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:31.894146Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:31.894234Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:31.906827Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15225, node 10 2025-03-28T09:24:32.063353Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:32.063378Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:32.063387Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:32.063524Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:32.368138Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:32.384603Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:24:35.121841Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788886226772092:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:35.121927Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:35.140811Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:35.265237Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788886226772253:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:35.265368Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:35.265613Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788886226772258:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:35.270143Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:35.300689Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788886226772260:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:35.363521Z node 10 :TX_PROXY ERROR: Actor# [10:7486788886226772331:2792] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:35.655793Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12ja09msf451rmcwdhz6c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTEyMjBhMjgtZTU2OWY3NmUtYTk1OTBmYzItYTkzZDhiMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:35.662296Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe12ja09msf451rmcwdhz6c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTEyMjBhMjgtZTU2OWY3NmUtYTk1OTBmYzItYTkzZDhiMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:35.666222Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe12ja09msf451rmcwdhz6c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTEyMjBhMjgtZTU2OWY3NmUtYTk1OTBmYzItYTkzZDhiMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:35.925813Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe12jq9f8rb3y7hh14p37bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTEyMjBhMjgtZTU2OWY3NmUtYTk1OTBmYzItYTkzZDhiMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:35.931032Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe12jq9f8rb3y7hh14p37bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTEyMjBhMjgtZTU2OWY3NmUtYTk1OTBmYzItYTkzZDhiMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:35.952828Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7486788886226772444:2378], TxId: 281474976715665, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe12jq9f8rb3y7hh14p37bw. SessionId : ydb://session/3?node_id=10&id=ZTEyMjBhMjgtZTU2OWY3NmUtYTk1OTBmYzItYTkzZDhiMWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:24:35.953802Z node 10 :KQP_COMPUTE ERROR: SelfId: [10:7486788886226772445:2379], TxId: 281474976715665, task: 2. Ctx: { TraceId : 01jqe12jq9f8rb3y7hh14p37bw. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=10&id=ZTEyMjBhMjgtZTU2OWY3NmUtYTk1OTBmYzItYTkzZDhiMWQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Handle abort execution event from: [10:7486788886226772441:2335], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:24:35.954287Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZTEyMjBhMjgtZTU2OWY3NmUtYTk1OTBmYzItYTkzZDhiMWQ=, ActorId: [10:7486788886226772066:2335], ActorState: ExecuteState, TraceId: 01jqe12jq9f8rb3y7hh14p37bw, Create QueryResponse for error on request, msg: 2025-03-28T09:24:35.955968Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe12jq9f8rb3y7hh14p37bw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTEyMjBhMjgtZTU2OWY3NmUtYTk1OTBmYzItYTkzZDhiMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestDescribeDirectory [GOOD] Test command err: 2025-03-28T09:24:14.794530Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788797574816735:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:14.794578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143b/r3tmp/tmptqsDPk/pdisk_1.dat 2025-03-28T09:24:15.497330Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:15.512515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:15.512610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:15.530221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16089, node 1 2025-03-28T09:24:15.850626Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:15.850650Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:15.850658Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:15.850779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19522 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:16.166428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:16.207391Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:24:18.496537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788814754686973:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.496621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788814754686964:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.496757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.501124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:18.550943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788814754686978:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:18.643692Z node 1 :TX_PROXY ERROR: Actor# [1:7486788814754687051:2696] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:20.683376Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788823578810478:2086];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:20.683539Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143b/r3tmp/tmpdio1ve/pdisk_1.dat 2025-03-28T09:24:20.908998Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17285, node 4 2025-03-28T09:24:21.039676Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:21.039757Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:21.049009Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:21.075280Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:21.075303Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:21.075310Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:21.075464Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:21.273883Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:23.690318Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788836463713400:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.690448Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.690974Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788836463713413:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.694104Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:23.724024Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788836463713415:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:24:23.799291Z node 4 :TX_PROXY ERROR: Actor# [4:7486788836463713485:2667] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:25.507027Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788843949964551:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:25.507077Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143b/r3tmp/tmpILhARd/pdisk_1.dat 2025-03-28T09:24:25.750981Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26305, node 7 2025-03-28T09:24:25.835151Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:25.835269Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:25.908355Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:25.920921Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:25.920946Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:25.920953Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:25.921078Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:26.192062Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:28.682935Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:28.904620Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788856834867594:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.904707Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.905279Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788856834867606:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.909429Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:28.954961Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788856834867617:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:29.056133Z node 7 :TX_PROXY ERROR: Actor# [7:7486788861129834994:2798] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:29.207636Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12c36ag2psk175hsw334a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTdhZmNiLWVmYWYyYzExLWU3NGM5NGVhLTEzYWMyMGU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:29.340994Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe12cds9a5sf4w3ngfjvew8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTdhZmNiLWVmYWYyYzExLWU3NGM5NGVhLTEzYWMyMGU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:29.444310Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe12chedrskwjrj7n8wqe6m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTdhZmNiLWVmYWYyYzExLWU3NGM5NGVhLTEzYWMyMGU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:29.549812Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe12cmf1dkkyfnsrgxq0vr8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTdhZmNiLWVmYWYyYzExLWU3NGM5NGVhLTEzYWMyMGU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:29.647949Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe12cqt4nf1v0a5mcpznzys, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTdhZmNiLWVmYWYyYzExLWU3NGM5NGVhLTEzYWMyMGU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:30.507307Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486788843949964551:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:30.507382Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:30.540461Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe12cv31pn0rd4jpecasjw5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTdhZmNiLWVmYWYyYzExLWU3NGM5NGVhLTEzYWMyMGU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:30.552033Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqe12cv31pn0rd4jpecasjw5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YTdhZmNiLWVmYWYyYzExLWU3NGM5NGVhLTEzYWMyMGU4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:32.226968Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788872778322922:2089];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:32.255572Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143b/r3tmp/tmp6qSsvk/pdisk_1.dat 2025-03-28T09:24:32.466566Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:32.505927Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:32.506145Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:32.512638Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14752, node 10 2025-03-28T09:24:32.621849Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:32.621875Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:32.621884Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:32.622036Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:32.883768Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:35.952945Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788885663225852:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:35.953045Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:35.989010Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TYqlDateTimeTests::TimestampKey [GOOD] >> TYqlDateTimeTests::IntervalKey >> TTableProfileTests::OverwriteExecutionPolicy [GOOD] >> TTableProfileTests::OverwritePartitioningPolicy >> YdbTableBulkUpsert::RetryOperationSync [GOOD] >> YdbTableBulkUpsert::RetryOperation >> YdbTableBulkUpsertOlap::UpsertCsvBug ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FiveWayJoinWithPreds+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 19241, MsgBus: 1854 2025-03-28T09:23:01.300029Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788483033603933:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:01.300696Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016f1/r3tmp/tmpHEnYiD/pdisk_1.dat 2025-03-28T09:23:01.901196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:01.901295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:01.904442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:01.940951Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19241, node 1 2025-03-28T09:23:02.091191Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:02.091214Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:02.091221Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:02.091311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1854 TClient is connected to server localhost:1854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:02.968777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:05.471533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788500213473631:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:05.471645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:05.472046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788500213473643:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:05.477586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:23:05.493074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788500213473645:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:23:05.563399Z node 1 :TX_PROXY ERROR: Actor# [1:7486788500213473697:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:05.908655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.155955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:06.156149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:06.156410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:06.156554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:06.156658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:06.156784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:06.156883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:06.156975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:06.157088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:06.157185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:06.157318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:06.157435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788504508441280:2363];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:06.159413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:06.159461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:06.159679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:06.159804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:06.159911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:06.160005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:06.160109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:06.160203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:06.160318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:06.160477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:06.160580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:06.160681Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486788504508441262:2354];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:06.224925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788504508441272:2359];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:06.224988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788504508441272:2359];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:06.225181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_i ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.442465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.448247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.453705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.459787Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.465890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.468821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.472195Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.475169Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.480696Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.481191Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.485841Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.487207Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.492227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.492475Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.498298Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.499313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.505172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.505656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.511857Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.511980Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.517291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.520293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.527084Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.533092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.535936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.538372Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.541481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.544647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.547482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039419;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.550293Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.552966Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.555904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.559491Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.566772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.572486Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.572527Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.577599Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.583462Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.583499Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.589789Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.590237Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.596048Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.596871Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.602636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.609402Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:23.779222Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe117q13zbzs49mmjgs9td7", SessionId: ydb://session/3?node_id=1&id=YzY2N2Q5ZmItYTY5OWJhODktYjA5MjIwNzctNmYzOTg3Y2Y=, Slow query, duration: 32.128975s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:24.021303Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:24.021718Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:24.022009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788792271305945:10876];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:24:24.022354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH16 [GOOD] Test command err: Trying to start YDB, gRPC: 4110, MsgBus: 14618 2025-03-28T09:22:57.151681Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788466994137166:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:57.189742Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001733/r3tmp/tmpKWQhMq/pdisk_1.dat 2025-03-28T09:22:57.900713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:57.900828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:57.901089Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:57.909784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4110, node 1 2025-03-28T09:22:58.180244Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:58.180269Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:58.180278Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:58.180390Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14618 TClient is connected to server localhost:14618 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:59.181712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:01.361949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788484174006872:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:01.362061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:01.362435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788484174006884:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:01.366606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:23:01.381394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788484174006886:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:23:01.486648Z node 1 :TX_PROXY ERROR: Actor# [1:7486788484174006937:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:01.929440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.134780Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788466994137166:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:02.134845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:02.244692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:02.244857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:02.245087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:02.245199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:02.245326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:02.245429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:02.245521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:02.245622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:02.245727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:02.245829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:02.245942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:02.246087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037903;self_id=[1:7486788488468974494:2351];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:02.251143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:02.251192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:02.251387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:02.251479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:02.251568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:02.251658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:02.251744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:02.251844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:02.251934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:02.252021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:02.252108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:02.252192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788488468974516:2358];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:02.325871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788488468974504:2356];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp ... oller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.636792Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.640354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.645694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.645987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.651171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.661834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.667267Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.672692Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.675601Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.682524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.685069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.691704Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.692908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.698157Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.703848Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.709152Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.709151Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.714636Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.716780Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.721903Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.723817Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.727856Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.729368Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.733874Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.734948Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.739678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.740544Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.747752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.748376Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.753683Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.753739Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.760064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.760064Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.766159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.766165Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.772254Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039197;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.774656Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039415;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.778220Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.780640Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.784155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.791965Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.797937Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.804212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.810221Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039293;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.826814Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:19.956024Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe111sv71che7jy5f5grnqw", SessionId: ydb://session/3?node_id=1&id=OWYzZjIzMDEtMzlkMTFhYS0zNTY2ZGFiNy1kNDk2NDlmZA==, Slow query, duration: 34.359789s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:20.231482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:20.231502Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:20.232011Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;self_id=[1:7486788686037506336:7805];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224038933;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039392;receive=72075186224039094; 2025-03-28T09:24:20.232461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> LocalityOperation::LocksFromAnotherTenants-UseSink [GOOD] Test command err: 2025-03-28T09:24:10.112389Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788778558776298:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:10.113914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001445/r3tmp/tmpQvcOtR/pdisk_1.dat 2025-03-28T09:24:10.739373Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:10.740494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:10.740557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:10.749506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24951, node 1 2025-03-28T09:24:11.012016Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:11.012036Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:11.012045Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:11.012133Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17203 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:11.304768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:15.182943Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788799373435418:2218];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:15.235038Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001445/r3tmp/tmpWnBGnO/pdisk_1.dat 2025-03-28T09:24:15.395061Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:15.434041Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:15.434139Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:15.441478Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13422, node 4 2025-03-28T09:24:15.691518Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:15.691542Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:15.691548Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:15.691686Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:16.000220Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:16.019164Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:24:20.316903Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788824475187222:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:20.316975Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001445/r3tmp/tmpnTEVxr/pdisk_1.dat 2025-03-28T09:24:20.813424Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:20.881793Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:20.881894Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:20.896149Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30118, node 7 2025-03-28T09:24:21.043436Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:21.043459Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:21.043468Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:21.043570Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63379 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:21.452477Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:63379 2025-03-28T09:24:21.776717Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:21.810068Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:22.339607Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486788833190916913:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:22.340143Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_0/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:22.351172Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:22.351292Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:22.369128Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-03-28T09:24:22.376783Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:22.598526Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:22.659353Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:23.275292Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486788836525577158:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:23.364878Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:23.496930Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:23.497033Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:23.527380Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-03-28T09:24:23.547570Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:25.074094Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:24:25.317080Z node 7 :METADA ... ed to initialize from file: (empty maybe) 2025-03-28T09:24:30.143622Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:30.516616Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:13245 2025-03-28T09:24:30.952434Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:30.973039Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:31.527367Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:31.527444Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:31.578901Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-03-28T09:24:31.580065Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:31.623271Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:31.656580Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:32.175152Z node 11 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7486788876077454192:2246];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:32.234029Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:32.234146Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:32.241362Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2025-03-28T09:24:32.242386Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:32.266578Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:34.001029Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:24:34.117984Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:24:34.284344Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788882688920658:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:34.284449Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:34.284836Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788882688920670:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:34.289239Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-03-28T09:24:34.325773Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788882688920672:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-03-28T09:24:34.427766Z node 10 :TX_PROXY ERROR: Actor# [10:7486788882688920754:3397] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:34.547969Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe12hba01ngp6j07mc5m8vg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTQ0ODFmMzktYzM2YjI0NTEtYzliZTJjMzAtNWRkZmI5Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:34.676391Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe12hmw84em0n8wtc4mrg0e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTQ0ODFmMzktYzM2YjI0NTEtYzliZTJjMzAtNWRkZmI5Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:34.840452Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jqe12hrn6tetvx8kt15mgawe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTQ0ODFmMzktYzM2YjI0NTEtYzliZTJjMzAtNWRkZmI5Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:34.869255Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486788861214082139:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:34.869329Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:36.134899Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqe12hrn6tetvx8kt15mgawe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTQ0ODFmMzktYzM2YjI0NTEtYzliZTJjMzAtNWRkZmI5Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:36.139804Z node 10 :KQP_EXECUTER ERROR: ActorId: [10:7486788891278855542:2360] TxId: 281474976710669. Ctx: { TraceId: 01jqe12hrn6tetvx8kt15mgawe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTQ0ODFmMzktYzM2YjI0NTEtYzliZTJjMzAtNWRkZmI5Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Handle TEvProposeTransactionResult: unable to select coordinator. Tx canceled, actorId: [10:7486788891278855542:2360], previously selected coordinator: 72075186224037888, coordinator selected at propose result: 72075186224037890 2025-03-28T09:24:36.140100Z node 10 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=10&id=ZTQ0ODFmMzktYzM2YjI0NTEtYzliZTJjMzAtNWRkZmI5Mzk=, ActorId: [10:7486788878393953206:2360], ActorState: ExecuteState, TraceId: 01jqe12hrn6tetvx8kt15mgawe, Create QueryResponse for error on request, msg: 2025-03-28T09:24:36.140831Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jqe12hrn6tetvx8kt15mgawe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTQ0ODFmMzktYzM2YjI0NTEtYzliZTJjMzAtNWRkZmI5Mzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:36.153037Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-03-28T09:24:36.153567Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:24:36.153695Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2025-03-28T09:24:36.154005Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:24:36.527940Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:24:36.551423Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7486788893257323677:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:24:36.643095Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7486788893257323677:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:24:36.762285Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7486788893257323677:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:24:37.024932Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7486788893257323677:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:24:37.170956Z node 11 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7486788876077454192:2246];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:37.171033Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:37.533151Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_tenant_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:24:37.539763Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7486788893257323677:2321], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> YdbYqlClient::BuildInfo >> TGRpcClientLowTest::GrpcRequestProxyCheckTokenWhenItIsSpecified_Check [GOOD] >> TGRpcClientLowTest::MultipleSimpleRequests >> YdbTableBulkUpsert::ValidRetry [GOOD] >> YdbTableBulkUpsert::Types >> YdbOlapStore::LogLast50 >> YdbYqlClient::RetryOperationAsync >> YdbTableBulkUpsert::DataValidation [GOOD] >> YdbTableBulkUpsert::DecimalPK >> ClientStatsCollector::PrepareQuery [GOOD] >> ClientStatsCollector::CounterCacheMiss ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::CanonizedJoinOrderTPCH13 [GOOD] Test command err: Trying to start YDB, gRPC: 26924, MsgBus: 3045 2025-03-28T09:23:04.457339Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788495749389161:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:04.474418Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016ec/r3tmp/tmpT1gIst/pdisk_1.dat 2025-03-28T09:23:05.287816Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:05.287926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:05.299347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:05.338881Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26924, node 1 2025-03-28T09:23:05.608053Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:05.608072Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:05.608079Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:05.608184Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3045 TClient is connected to server localhost:3045 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:06.667165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:06.692353Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:09.069011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788517224226159:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:09.069165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:09.069611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788517224226171:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:09.074277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:23:09.096291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788517224226173:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:23:09.175273Z node 1 :TX_PROXY ERROR: Actor# [1:7486788517224226224:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:09.459177Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788495749389161:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:09.459264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:09.648110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:10.096881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:10.097139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:10.097420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:10.097538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:10.097646Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:10.097760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:10.097863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:10.097993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:10.098116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:10.098237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:10.098338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:10.098434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486788517224226507:2358];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:10.099503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:10.099540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:10.099741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:10.099875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:10.099978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:10.100076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:10.100175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:10.100273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:10.100366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:10.100463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:10.100557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:10.100642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788517224226485:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:10.188540Z node 1 :TX_C ... tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.264646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039319;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.268227Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.269484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.272961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.274240Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.278415Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.279735Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.283270Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.284631Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039406;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.288080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.289384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.292976Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.294228Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.298588Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.299384Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.304209Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.304551Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.309623Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.309939Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.315354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.315464Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039417;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.320511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.324609Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.325732Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.330251Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.331198Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.335607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.338083Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.341077Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.343694Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039424;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.345607Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.348958Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.350273Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.354662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.355649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.360675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.360987Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.366259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039397;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.366605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.370954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.372076Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.375675Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039387;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.380181Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039405;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.382026Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039423;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.386038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039309;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.388213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:24.520107Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe11avva2h7c7yn586v7etj", SessionId: ydb://session/3?node_id=1&id=ODZlODBkNGEtN2U1NDg5MTYtZjYwOGNhOTgtMTM3ZTIwMzM=, Slow query, duration: 29.644190s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:24.872752Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:24.872764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:24.873326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> YdbMonitoring::SelfCheckWithNodesDying >> TGRpcYdbTest::CreateTableBadRequest2 [GOOD] >> TGRpcYdbTest::CreateTableBadRequest3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::FourWayJoinWithPredsAndEquivAndLeft+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 21219, MsgBus: 7150 2025-03-28T09:23:04.737413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788497695848887:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:04.737449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016e9/r3tmp/tmpejtJdI/pdisk_1.dat 2025-03-28T09:23:05.431730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:05.431827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:05.446743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:05.541864Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21219, node 1 2025-03-28T09:23:05.775767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:05.775807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:05.775814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:05.775936Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7150 TClient is connected to server localhost:7150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:06.579533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:06.634816Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:09.070787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788519170686035:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:09.070865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:09.071065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788519170686047:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:09.075343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:23:09.090088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788519170686049:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:23:09.188876Z node 1 :TX_PROXY ERROR: Actor# [1:7486788519170686100:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:09.636240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:09.743097Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788497695848887:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:09.743166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:09.890645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:09.890903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:09.891142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:09.891278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:09.891400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:09.891493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:09.891595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:09.891690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:09.891807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:09.891958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:09.892093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:09.892195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788519170686322:2354];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:09.903412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:09.903526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:09.903765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:09.903885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:09.903983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:09.904094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:09.904206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:09.904326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:09.904435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:09.904548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:09.904656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:09.904756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788519170686312:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:09.945480Z node 1 :TX_C ... tablet_id=72075186224039315;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.283414Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039281;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.290375Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.293944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039325;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.294870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.298776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.305961Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.310639Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.312009Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.315634Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.316513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.321162Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.321285Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.326537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.326978Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039207;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.333258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039279;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.333695Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.338882Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.339038Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039361;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.343764Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.344434Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.348215Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.348562Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.352731Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.357353Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.357935Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.362122Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.363425Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039265;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.366866Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039378;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.369736Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039297;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.371603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.375096Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.376138Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.380738Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.380839Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.386711Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.386989Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.392473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.393104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.397530Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.398069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.402167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.403040Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.406943Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.407972Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.411473Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.446403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:25.552041Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe119sme4q2qrmmjrk39j8d", SessionId: ydb://session/3?node_id=1&id=YjQ1ODkwOWItMTUwMzM1ZDUtMjJhNmY4YzItZTUxMWJjZjI=, Slow query, duration: 31.770764s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:25.761845Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:25.761873Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:25.762573Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids >> YdbYqlClient::TestDoubleKey >> YdbYqlClient::DiscoveryLocationOverride [GOOD] >> YdbYqlClient::DeleteTableWithDeletedIndex >> KqpJoinOrder::ShuffleEliminationOneJoin [GOOD] >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-03-28T09:23:57.895925Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788722612118698:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:57.895980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001455/r3tmp/tmpUngz3u/pdisk_1.dat 2025-03-28T09:23:58.796859Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:58.828236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:58.828318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:58.844340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11079, node 1 2025-03-28T09:23:59.279601Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:59.279631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:59.279642Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:59.279785Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:59.834145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:61073 2025-03-28T09:24:02.321820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:02.667520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:02.667717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), at schemeshard: 72057594046644480 2025-03-28T09:24:02.675787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusNameConflict, reason: Check failed: path: '/Root/Foo/TimestampIndex/indexImplTable', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges), operation: ALTER TABLE, path: Root/Foo/TimestampIndex/indexImplTable 2025-03-28T09:24:02.676108Z node 1 :TX_PROXY ERROR: Actor# [1:7486788744086956621:2918] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/Foo/TimestampIndex/indexImplTable\', error: path is not a common path (id: [OwnerId: 72057594046644480, LocalPathId: 4], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } Error 128: Administrative access denied TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153842542 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-03-28T09:24:02.732116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710660:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-03-28T09:24:02.732265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:02.732282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-28T09:24:02.732716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:02.732732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-28T09:24:02.735983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: root@builtin, status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable waiting... 2025-03-28T09:24:02.779021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153842822, transactions count in step: 1, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable 2025-03-28T09:24:02.795321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-03-28T09:24:02.795369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153842542 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-03-28T09:24:02.803842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710661:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-03-28T09:24:02.803950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:02.803967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo/TimestampIndex/indexImplTable, pathId: , opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-03-28T09:24:02.804334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710661:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:02.804347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-03-28T09:24:02.806113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710661, database: /Root, subject: root@builtin, status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo/TimestampIndex/indexImplTable waiting... 2025-03-28T09:24:02.857556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153842899, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:24:02.875820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:0 2025-03-28T09:24:02.875869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710661:1 TClient::Ls request: /Root/Foo/TimestampIndex/indexImplTable TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153842542 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "Timestamp" Type: "Int64" TypeId: 3 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false ... (TRUNCATED) 2025-03-28T09:24:02.881402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTableIndex Propose, path: /Root/Foo/TimestampIndex, operationId: 281474976710662:0, transaction: WorkingDir: "/Root/Foo" OperationType: ESchemeOpAlterTableIndex AlterTableIndex { Name: "TimestampIndex" State: EIndexStateReady } Internal: false, at schemeshard: 72057594046644480 2025-03-28T09:24:02.881525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710662:2, propose status:StatusAccep ... T09:24:10.960163Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:10.960186Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:10.960195Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:10.960322Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:11.364225Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:11.514864Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:11.669593Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:24:11.896342Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:24:15.909640Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788801441752182:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:15.909696Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001455/r3tmp/tmpBeK5aF/pdisk_1.dat 2025-03-28T09:24:16.124908Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:16.139556Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:16.139639Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:16.142957Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31616, node 10 2025-03-28T09:24:16.311365Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:16.311415Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:16.311426Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:16.311595Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:16.616066Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:21.099000Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788825202794188:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:21.099095Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001455/r3tmp/tmp8J76Lk/pdisk_1.dat 2025-03-28T09:24:21.433002Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:21.473603Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:21.473693Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:21.478020Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61421, node 13 2025-03-28T09:24:21.559423Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:21.559446Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:21.559454Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:21.559577Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30141 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:21.820132Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:21.915674Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:41922" , at schemeshard: 72057594046644480 2025-03-28T09:24:21.916157Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:21.916187Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-03-28T09:24:21.920553Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976715658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:24:21.920707Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-03-28T09:24:21.920943Z node 13 :TX_PROXY ERROR: Actor# [13:7486788825202795147:2598] txid# 281474976715658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C67422C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CB312E0) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8721 (0x1C1A6F71) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C1CFE08) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CB68306) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CB37E59) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C1CEFD4) NUnitTest::TTestFactory::Execute()+2438 (0x1CB39726) NUnitTest::RunMain(int, char**)+5213 (0x1CB6287D) ??+0 (0x7FBB9FFF5D90) __libc_start_main+128 (0x7FBB9FFF5E40) _start+41 (0x1902A029) >> YdbScripting::BasicV1 [GOOD] >> YdbScripting::MultiResults >> KqpJoinOrder::TPCDS23+ColumnStore [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBaseDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn >> YdbImport::Simple >> YdbYqlClient::TestReadWrongTable [GOOD] >> YdbYqlClient::TestReadTableMultiShard [GOOD] >> YdbYqlClient::TestReadTableMultiShardUseSnapshot >> YdbQueryService::TestForbidExecuteWithoutAttach [GOOD] >> YdbQueryService::TestCreateDropAttachSession >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithInvalidPassword [GOOD] Test command err: 2025-03-28T09:24:17.988520Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788808475709766:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:17.988678Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142a/r3tmp/tmpDTSbgP/pdisk_1.dat 2025-03-28T09:24:18.741423Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:18.755906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:18.756005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:18.761237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2232, node 1 2025-03-28T09:24:18.930990Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:18.931013Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:18.931023Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:18.931162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:19.265651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:22.951032Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788829484989699:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:22.951077Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142a/r3tmp/tmpvwgoMy/pdisk_1.dat 2025-03-28T09:24:23.180321Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:23.210524Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:23.210610Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:23.215177Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5745, node 4 2025-03-28T09:24:23.306049Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:23.306066Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:23.306073Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:23.306188Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:23.629103Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:27.566438Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788851267810051:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:27.566482Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142a/r3tmp/tmpsPGoDq/pdisk_1.dat 2025-03-28T09:24:27.796738Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6988, node 7 2025-03-28T09:24:27.888283Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:27.888376Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:27.892112Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:27.938313Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:27.938333Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:27.938341Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:27.938479Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12329 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:28.232111Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:32.803758Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788872562272937:2165];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:32.803879Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142a/r3tmp/tmpWRDACi/pdisk_1.dat 2025-03-28T09:24:33.080422Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3456, node 10 2025-03-28T09:24:33.259688Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:33.259781Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:33.327080Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:33.455443Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:33.455471Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:33.455477Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:33.455830Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6884 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:33.799729Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:37.987020Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788894489132777:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:37.987125Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142a/r3tmp/tmpIezEtG/pdisk_1.dat 2025-03-28T09:24:38.174279Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:38.216766Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:38.216857Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:38.221819Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6445, node 13 2025-03-28T09:24:38.325569Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:38.325594Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:38.325601Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:38.325750Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24605 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:38.586641Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::SessionAcquireAcceptedCallback [GOOD] Test command err: 2025-03-28T09:24:17.199266Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788809892827974:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:17.199324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001432/r3tmp/tmpl5gmyu/pdisk_1.dat 2025-03-28T09:24:17.709727Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:17.719518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:17.719603Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:17.732948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3868, node 1 2025-03-28T09:24:18.069708Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:18.069730Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:18.069738Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:18.069866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63576 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:18.516454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:18.641167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:18.768791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:18.869000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropKesus, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:24:18.884561Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:24:18.890340Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-03-28T09:24:22.352034Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788831387618086:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:22.352171Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001432/r3tmp/tmpA4kmXE/pdisk_1.dat 2025-03-28T09:24:22.752153Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:22.774092Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:22.774211Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:22.777810Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11632, node 4 2025-03-28T09:24:22.851385Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:22.851406Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:22.851412Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:22.851536Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20812 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:23.217670Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:27.621342Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788854847774148:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:27.621514Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001432/r3tmp/tmp2nD45s/pdisk_1.dat 2025-03-28T09:24:27.876939Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24876, node 7 2025-03-28T09:24:27.980051Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:27.980135Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:28.034765Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:28.143302Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:28.143336Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:28.143342Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:28.143466Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:28.391592Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:28.409666Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:24:28.504889Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:32.342443Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788873516360545:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:32.342507Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001432/r3tmp/tmplyCeoz/pdisk_1.dat 2025-03-28T09:24:32.678170Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:32.730202Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:32.730302Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:32.735887Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18667, node 10 2025-03-28T09:24:32.897938Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:32.897965Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:32.897973Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:32.898130Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:33.225921Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:33.356885Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:37.363198Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788897519799967:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:37.363293Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001432/r3tmp/tmptnMnvz/pdisk_1.dat 2025-03-28T09:24:37.564549Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:37.608894Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:37.609002Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:37.613969Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29530, node 13 2025-03-28T09:24:37.711462Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:37.711499Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:37.711509Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:37.711662Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:38.012245Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:38.134728Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> TGRpcNewCoordinationClient::BasicMethods [GOOD] >> TYqlDateTimeTests::IntervalKey [GOOD] >> TYqlDateTimeTests::SimpleOperations >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideAnyCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadWrongTable [GOOD] Test command err: 2025-03-28T09:24:22.123026Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788831065123174:2214];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:22.124469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00141a/r3tmp/tmpZd0pus/pdisk_1.dat 2025-03-28T09:24:23.055763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:23.055837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:23.067696Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:23.085313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64336, node 1 2025-03-28T09:24:23.379303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:23.379347Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:23.379355Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:23.379447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:23.737977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:23.763210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:24:26.060847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788848244993240:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:26.060902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788848244993252:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:26.060928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:26.065470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:26.094158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788848244993254:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:26.187633Z node 1 :TX_PROXY ERROR: Actor# [1:7486788848244993330:2683] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:28.307612Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788857879219416:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:28.307676Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00141a/r3tmp/tmpV5E0m5/pdisk_1.dat 2025-03-28T09:24:28.460209Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:28.489621Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:28.489694Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:28.496981Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10439, node 4 2025-03-28T09:24:28.615266Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:28.615288Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:28.615296Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:28.615422Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:28.844569Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:31.245537Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788870764122300:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:31.245608Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788870764122311:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:31.245622Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:31.249258Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:31.291828Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788870764122314:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:24:31.392042Z node 4 :TX_PROXY ERROR: Actor# [4:7486788870764122391:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:33.052145Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788876812804388:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:33.052714Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00141a/r3tmp/tmptIu32U/pdisk_1.dat 2025-03-28T09:24:33.296337Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:33.331174Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:33.331260Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:33.337222Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3138, node 7 2025-03-28T09:24:33.431473Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:33.431497Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:33.431506Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:33.431633Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14120 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:33.690029Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:36.141633Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788889697707310:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:36.141718Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:36.171637Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:36.322636Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788889697707478:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:36.322727Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:36.322776Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788889697707483:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:36.326110Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:36.342817Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788889697707485:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:36.445061Z node 7 :TX_PROXY ERROR: Actor# [7:7486788889697707579:2807] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:36.676489Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12kb1871p9xy6yybbf7ac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NDQxODU1MDYtMmU1MjUzODgtOTVmNDVhNTktMjcxNWRmYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:36.684255Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe12kb1871p9xy6yybbf7ac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NDQxODU1MDYtMmU1MjUzODgtOTVmNDVhNTktMjcxNWRmYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:36.687695Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe12kb1871p9xy6yybbf7ac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NDQxODU1MDYtMmU1MjUzODgtOTVmNDVhNTktMjcxNWRmYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:36.816018Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe12kqy8j656r0ax17frcqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NTViMzgxYTUtMjNkNmJlMDItY2UzZWUwOGQtMWU5MmRhYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:36.906733Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NDQxODU1MDYtMmU1MjUzODgtOTVmNDVhNTktMjcxNWRmYTQ=, ActorId: [7:7486788889697707307:2335], ActorState: ExecuteState, TraceId: 01jqe12kv56cx28jxnkdh055gh, Create QueryResponse for error on request, msg: 2025-03-28T09:24:38.516832Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788901135525972:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:38.517561Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00141a/r3tmp/tmpYNdC4q/pdisk_1.dat 2025-03-28T09:24:38.654713Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:38.690047Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:38.690140Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:38.693417Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20262, node 10 2025-03-28T09:24:38.833798Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:38.833819Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:38.833839Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:38.833981Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:39.152124Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:39.234925Z node 10 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jqe12p5z3tj0qt0yqtwn92mn, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:46968, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.992802s 2025-03-28T09:24:39.262898Z node 10 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jqe12p6wcmw6wepdsm8wqadz, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:46980, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:41.669371Z node 10 :GRPC_SERVER DEBUG: Got grpc request# ReadTableRequest, traceId# 01jqe12rj52s8sn0djrfpgt4bj, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:46988, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:41.671011Z node 10 :TX_PROXY ERROR: [ReadTable [10:7486788914020428834:2336] TxId# 281474976715658] Navigate request failed for table 'Root/NoTable' 2025-03-28T09:24:41.671127Z node 10 :TX_PROXY ERROR: [ReadTable [10:7486788914020428834:2336] TxId# 281474976715658] RESPONSE Status# ResolveError shard: 0 table: Root/NoTable 2025-03-28T09:24:41.671577Z node 10 :READ_TABLE_API NOTICE: [10:7486788914020428833:2336] Finish grpc stream, status: 400070
: Error: Failed to resolve table Root/NoTable, code: 200400
: Error: Got ResolveError response from TxProxy
: Error: Failed to resolve table Root/NoTable 2025-03-28T09:24:41.678572Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d6880] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.678595Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b2280] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.678833Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d6e80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.678836Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b1c80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679016Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d6280] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679025Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d5c80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679203Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b1680] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679218Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b1080] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679403Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b0a80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679415Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b3480] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679575Z node 10 :GRPC_SERVER DEBUG: [0x51a0000afe80] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679575Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b2e80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679737Z node 10 :GRPC_SERVER DEBUG: [0x51a0000af880] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679745Z node 10 :GRPC_SERVER DEBUG: [0x51a0000af280] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679922Z node 10 :GRPC_SERVER DEBUG: [0x51a000081080] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.679949Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b0480] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-28T09:24:41.680089Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b2880] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 >> YdbYqlClient::BuildInfo [GOOD] >> YdbYqlClient::AlterTableAddIndexWithDataColumn >> YdbTableBulkUpsertOlap::UpsertCsvBug [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::ShuffleEliminationOneJoin [GOOD] Test command err: Trying to start YDB, gRPC: 63778, MsgBus: 27685 2025-03-28T09:23:11.503928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788527227942400:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:11.527010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016e5/r3tmp/tmpmDtsuT/pdisk_1.dat 2025-03-28T09:23:12.156817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:12.156895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:12.167733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:12.208149Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63778, node 1 2025-03-28T09:23:12.415246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:12.415288Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:12.415305Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:12.415424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27685 TClient is connected to server localhost:27685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:13.417672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:16.151879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788548702779402:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:16.152006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:16.154808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788548702779414:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:16.159270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:23:16.173421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788548702779416:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:23:16.275122Z node 1 :TX_PROXY ERROR: Actor# [1:7486788548702779467:2343] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:16.471167Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788527227942400:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:16.471255Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:16.735312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:16.971824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:16.972070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:16.972370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:16.972496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:16.972603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:16.972732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:16.972886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:16.972997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:16.973106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:16.973221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:16.973325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:16.973448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037902;self_id=[1:7486788548702779698:2352];tablet_id=72075186224037902;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:16.982659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:16.982851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:16.983116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:16.983253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:16.983370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:16.983485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:16.983627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:16.983765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:16.983893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:16.984023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:16.984129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:16.984240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788548702779736:2361];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:17.015427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788548702779702:2354];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.c ... tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.640564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039263;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.645884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.646391Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039203;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.651154Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039229;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.652213Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.657054Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039225;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.657772Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039295;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.664044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.664044Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039237;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.670280Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.670411Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039269;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.676482Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039255;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.676900Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039408;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.681370Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039261;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.682644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.687662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039205;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.688287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039201;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.694268Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039259;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.694481Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.698944Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039257;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.700484Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.705728Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039217;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.706620Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039331;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.709881Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039241;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.713020Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039223;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.715437Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.718798Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039249;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.720642Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039277;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.724361Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039233;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.724988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.729136Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039209;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.730567Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.732548Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039211;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.736870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039243;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.736894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039301;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.742305Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039221;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.742531Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.747867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039235;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.748081Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039303;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.753649Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039283;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.753979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.759549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.759632Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039239;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.765456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039271;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.765463Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.771099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:31.962470Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe11gdpbmk8st3ay5817g5f", SessionId: ydb://session/3?node_id=1&id=OWE2Zjk3NTMtN2VlZGMwYTktZTIxNjVmZjUtYzJhMmI2NzU=, Slow query, duration: 31.395097s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:32.233582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:32.233708Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:32.234171Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcNewCoordinationClient::BasicMethods [GOOD] Test command err: 2025-03-28T09:24:17.314224Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788810806134326:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:17.314268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142b/r3tmp/tmp7ok48x/pdisk_1.dat 2025-03-28T09:24:18.029441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:18.029521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:18.041429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:18.078697Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28861, node 1 2025-03-28T09:24:18.439262Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:18.439285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:18.439292Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:18.439398Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:18.904845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:18.944511Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:24:21.361898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788827986004541:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:21.362065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:21.362389Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788827986004553:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:21.366354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:21.392819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788827986004555:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:21.476750Z node 1 :TX_PROXY ERROR: Actor# [1:7486788827986004627:2678] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:23.524136Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788837253538435:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:23.524181Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142b/r3tmp/tmpb3dodP/pdisk_1.dat 2025-03-28T09:24:24.057088Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:24.116190Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:24.116275Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15790, node 4 2025-03-28T09:24:24.237516Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:24.287536Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:24.287556Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:24.287564Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:24.287703Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:24.588494Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:24.603420Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:24:26.882818Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788850138441387:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:26.882901Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:26.908434Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:27.056415Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788854433408855:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:27.056485Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:27.056665Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788854433408860:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:27.060267Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:27.104851Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788854433408862:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:27.195818Z node 4 :TX_PROXY ERROR: Actor# [4:7486788854433408935:2802] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:29.006394Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788862461739900:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:29.006457Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142b/r3tmp/tmpoIIKcd/pdisk_1.dat 2025-03-28T09:24:29.150548Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:29.181998Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:29.182082Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:29.185936Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7687, node 7 2025-03-28T09:24:29.286402Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:29.286425Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:29.286446Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:29.286585Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:29.588810Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:29.644111Z node 7 :TX_PROXY ERROR: Actor# [7:7486788862461740838:2602] txid# 281474976715658, Access denied for bad@builtin on path /Root, with access CreateTable 2025-03-28T09:24:29.644252Z node 7 :TX_PROXY ERROR: Actor# [7:7486788862461740838:2602] txid# 281474976715658, issues: { message: "Access denied for bad@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-03-28T09:24:33.559615Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788877113243919:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:33.559740Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142b/r3tmp/tmp3ud7de/pdisk_1.dat 2025-03-28T09:24:33.837151Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16911, node 10 2025-03-28T09:24:33.912452Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:33.912544Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:33.948934Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:34.015404Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:34.015428Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:34.015439Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:34.015566Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2837 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:34.355392Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:34.379348Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:24:34.474955Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:34.628434Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:34.683571Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:24:34.735961Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterKesus, opId: 281474976715661:0, at schemeshard: 72057594046644480 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00142b/r3tmp/tmpesWoSR/pdisk_1.dat 2025-03-28T09:24:39.265361Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:24:39.278458Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:39.336828Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:39.336926Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:39.340536Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24910, node 13 2025-03-28T09:24:39.527375Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:39.527403Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:39.527413Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:39.527562Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3847 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:39.813848Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:39.895268Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> YdbTableBulkUpsert::RetryOperation [GOOD] >> YdbTableBulkUpsert::Limits [GOOD] >> YdbTableBulkUpsert::Overload >> YdbTableBulkUpsert::Types [GOOD] >> YdbTableBulkUpsert::Uint8 >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateAndAltertTableWithKeyBloomFilter [GOOD] Test command err: 2025-03-28T09:24:14.406114Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788797471502580:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:14.406156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143c/r3tmp/tmp9IztZD/pdisk_1.dat 2025-03-28T09:24:14.964857Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:14.975779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:14.975854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:14.984743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13586, node 1 2025-03-28T09:24:15.109976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:15.109996Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:15.110002Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:15.110101Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22372 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:15.616967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:15.685251Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:47406 Call 2025-03-28T09:24:15.712377Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:47422 2025-03-28T09:24:17.999423Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, AllowYdbRequestsWithoutDatabase is off, there is no db provided from user, database: /Root, user: root@builtin, from ip: ipv6:[::1]:49148 Call Call 2025-03-28T09:24:18.067847Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:49172 2025-03-28T09:24:18.081568Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: Skip check permission connect db, user is a admin, database: /Root, user: root@builtin, from ip: ipv6:[::1]:49188 2025-03-28T09:24:18.082956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:19.831414Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788818448435361:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:19.831520Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143c/r3tmp/tmp53Oaom/pdisk_1.dat 2025-03-28T09:24:20.012413Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:20.041673Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:20.041767Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:20.047652Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2743, node 4 2025-03-28T09:24:20.219021Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:20.219040Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:20.219048Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:20.219473Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16840 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:20.448037Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:20.463510Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:24:22.990221Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Table-1, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:22.991459Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:22.991484Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:22.993852Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Table-1 2025-03-28T09:24:23.093511Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153863136, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:24:23.174727Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715658:0 2025-03-28T09:24:23.198090Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-2, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:23.198705Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:23.205868Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715659, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, path: /Root/Table-2 2025-03-28T09:24:23.254579Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153863297, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:24:23.278483Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715659, done: 0, blocked: 1 2025-03-28T09:24:23.281300Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715659:0 2025-03-28T09:24:23.293672Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-3, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:24:23.294082Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:23.294105Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-4, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-03-28T09:24:23.294300Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:23.299708Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715660, subject: , status: StatusAccepted, operation: CREATE TABLE COPY FROM, dst path: /Root/Table-3, dst path: /Root/Table-4 2025-03-28T09:24:23.359184Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153863402, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:24:23.391194Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715660, done: 0, blocked: 2 2025-03-28T09:24:23.393459Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:0 2025-03-28T09:24:23.393548Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715660:1 2025-03-28T09:24:23.406454Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-5, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:24:23.406904Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:23.406919Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-6, opId: 281474976715661:1, at schemeshard: 72057594046644480 2025-03-28T09:24:23.407164Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:23.407176Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-7, opId: 281474976715661:2, at schemeshard: 72057594046644480 2025-03-28T09:24:23.407397Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715661:4, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:23.407408Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /Root/Table-8, opId: 281474976715661:3, at schemeshard: 720 ... VE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:27.160160Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:27.210892Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:27.251348Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:27.251370Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:27.251378Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:27.251516Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24112 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:27.519165Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:24112 2025-03-28T09:24:30.067591Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:24112 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743153870164 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-03-28T09:24:30.356556Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:24112 TClient::Ls request: Root/Test TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Test" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743153870164 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Test" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) 2025-03-28T09:24:32.547043Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788875594701904:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:32.547178Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143c/r3tmp/tmpJuYBFF/pdisk_1.dat 2025-03-28T09:24:32.765282Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25253, node 10 2025-03-28T09:24:32.876397Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:32.876573Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:32.934523Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:33.006905Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:33.006931Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:33.006941Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:33.007099Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28493 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:33.395644Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:33.447778Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:24:36.364014Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:36.486230Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:36.545675Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:24:38.496046Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788901057561801:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:38.496146Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00143c/r3tmp/tmpYoM3A2/pdisk_1.dat 2025-03-28T09:24:38.706962Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23837, node 13 2025-03-28T09:24:38.814858Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:38.814981Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:38.846386Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:38.851158Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:38.851190Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:38.851201Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:38.851402Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:39.189562Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:42.592259Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:42.769634Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> ClientStatsCollector::CounterCacheMiss [GOOD] >> ClientStatsCollector::CounterRetryOperation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS23+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13080, MsgBus: 8027 2025-03-28T09:22:18.747336Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788298737258111:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:18.766630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00178e/r3tmp/tmp3kfvKN/pdisk_1.dat 2025-03-28T09:22:19.468393Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:19.468448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:19.470056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:22:19.502756Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13080, node 1 2025-03-28T09:22:19.699022Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:19.699039Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:19.699046Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:19.699145Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8027 TClient is connected to server localhost:8027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:20.494510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:20.510503Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:22:22.827397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788315917127943:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:22.827532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:22.831061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788315917127955:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:22:22.838899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:22:22.853789Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:22:22.854262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788315917127957:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:22:22.943778Z node 1 :TX_PROXY ERROR: Actor# [1:7486788315917128008:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:22:23.307782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:22:23.626261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:23.628287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:23.628485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:23.628709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:23.628830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:23.628922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:23.629020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:23.629113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:23.629222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:23.629363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:23.629486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:23.629601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:23.629713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788320212095561:2356];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:23.631845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:22:23.632040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:22:23.632132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:22:23.632218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:22:23.632320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:22:23.632416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:22:23.632505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:22:23.632591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:22:23.632672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:22:23.632789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:22:23.632898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486788320212095565:2358];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:22:23.699487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486788320212095567:2359];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:22:23.699551Z node ... ller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.747395Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039422;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.749247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039414;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.752919Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039396;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.757288Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.760644Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.762821Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039421;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.768104Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039253;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.768286Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039369;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.774111Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.775400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.778651Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039386;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.780912Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.782457Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039398;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.786829Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.793765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039384;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.799032Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039353;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.804523Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039390;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.808318Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039402;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.809570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039394;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.815087Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039400;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.820428Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039404;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.821247Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039343;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.826400Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039410;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.829668Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.839890Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.843313Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039411;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.849092Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.853488Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039409;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.858508Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039381;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.862378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.871494Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039403;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.876059Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039416;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.881164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039351;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.884327Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.889483Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039399;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.893984Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039357;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.904139Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039401;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.907904Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039383;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.913582Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.917257Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.927331Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.930982Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.936605Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039412;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.940897Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:45.952153Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039232;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:23:46.230909Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe0zxsb37226ep25s0fw2qv", SessionId: ydb://session/3?node_id=1&id=MzBlNTY5ZDUtNjA2NzQ2YmItZmNmY2I2MTUtMWU1MTlkODk=, Slow query, duration: 37.511523s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:23:46.819355Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:46.819771Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:23:46.820034Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788629449797232:11010];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224039094;receive=72075186224038933; 2025-03-28T09:23:46.820329Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; >> YdbTableBulkUpsert::DecimalPK [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail >> TGRpcYdbTest::CreateTableBadRequest3 [GOOD] >> TGRpcYdbTest::CreateTableWithIndex >> YdbYqlClient::TestDoubleKey [GOOD] >> YdbYqlClient::TestMultipleModifications ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::RetryOperation [GOOD] Test command err: 2025-03-28T09:24:14.335192Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788797259059936:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:14.340159Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001444/r3tmp/tmp0PyJDY/pdisk_1.dat 2025-03-28T09:24:14.921015Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:14.933198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:14.933257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:14.940310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1298, node 1 2025-03-28T09:24:15.239232Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:15.239285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:15.239300Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:15.239494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:15.766306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:17.997909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.034197s 2025-03-28T09:24:18.828144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788814438931932:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.828258Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.828767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788814438931946:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:18.832459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:18.862026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788814438931948:2442], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:18.959007Z node 1 :TX_PROXY ERROR: Actor# [1:7486788814438932045:4123] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:19.303852Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788797259059936:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:19.303898Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:19.599852Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1228a9g10q16kf6gxj31p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI0OTY4MGYtNjRkNDJmLTIzY2EyYmY1LWZkMjBlOWRk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-03-28T09:24:21.394189Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788825453818467:2089];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001444/r3tmp/tmpnIEzcT/pdisk_1.dat 2025-03-28T09:24:21.451285Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:21.571240Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:21.604573Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:21.604647Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:21.609780Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28575, node 4 2025-03-28T09:24:21.743275Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:21.743294Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:21.743301Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:21.743406Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3631 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:22.045447Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:24.715568Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table 2025-03-28T09:24:26.920027Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788847997375108:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:26.920084Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001444/r3tmp/tmpJiVadh/pdisk_1.dat 2025-03-28T09:24:27.168916Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26449, node 7 2025-03-28T09:24:27.253115Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:27.253271Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:27.273585Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:27.277885Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:27.277906Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:27.277915Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:27.278063Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:27.540471Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:30.173086Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 28147 ... upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 32 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 64 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 128 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 256 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 512 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 1024 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 2048 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 4096 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 8192 usec
: Error: Bulk upsert to table '/Root/ui32' Deadline exceeded 16384 usec
: Error: Bulk upsert to table '/Root/ui32' longTx ydb://long-tx/read-only timed out, duration: 0 sec 32768 usec 2025-03-28T09:24:32.735749Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788875284394191:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:32.735837Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001444/r3tmp/tmpEpfxZT/pdisk_1.dat 2025-03-28T09:24:33.028333Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:33.075100Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:33.075200Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:33.083968Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19447, node 10 2025-03-28T09:24:33.263376Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:33.263402Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:33.263412Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:33.263545Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7872 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:33.588155Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:36.415183Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS 2025-03-28T09:24:39.216827Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788904248249734:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:39.216881Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001444/r3tmp/tmp1qi6oL/pdisk_1.dat 2025-03-28T09:24:39.391945Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:39.416492Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:39.416583Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:39.420312Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2577, node 13 2025-03-28T09:24:39.554061Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:39.554091Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:39.554103Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:39.554266Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15450 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:39.857289Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:42.576056Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Injecting ABORTED 10 times Result: ABORTED Injecting ABORTED 6 times Result: ABORTED Injecting ABORTED 5 times Result: SUCCESS Injecting ABORTED 3 times Result: SUCCESS Injecting ABORTED 0 times Result: SUCCESS Injecting OVERLOADED 10 times Result: OVERLOADED Injecting OVERLOADED 6 times Result: OVERLOADED Injecting OVERLOADED 5 times Result: SUCCESS Injecting OVERLOADED 3 times Result: SUCCESS Injecting OVERLOADED 0 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 10 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 6 times Result: CLIENT_RESOURCE_EXHAUSTED Injecting CLIENT_RESOURCE_EXHAUSTED 5 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 3 times Result: SUCCESS Injecting CLIENT_RESOURCE_EXHAUSTED 0 times Result: SUCCESS Injecting UNAVAILABLE 10 times Result: UNAVAILABLE Injecting UNAVAILABLE 6 times Result: UNAVAILABLE Injecting UNAVAILABLE 5 times Result: SUCCESS Injecting UNAVAILABLE 3 times Result: SUCCESS Injecting UNAVAILABLE 0 times Result: SUCCESS Injecting BAD_SESSION 10 times Result: BAD_SESSION Injecting BAD_SESSION 6 times Result: BAD_SESSION Injecting BAD_SESSION 5 times Result: SUCCESS Injecting BAD_SESSION 3 times Result: SUCCESS Injecting BAD_SESSION 0 times Result: SUCCESS Injecting SESSION_BUSY 10 times Result: SESSION_BUSY Injecting SESSION_BUSY 6 times Result: SESSION_BUSY Injecting SESSION_BUSY 5 times Result: SUCCESS Injecting SESSION_BUSY 3 times Result: SUCCESS Injecting SESSION_BUSY 0 times Result: SUCCESS Injecting NOT_FOUND 10 times Result: NOT_FOUND Injecting NOT_FOUND 6 times Result: NOT_FOUND Injecting NOT_FOUND 5 times Result: SUCCESS Injecting NOT_FOUND 3 times Result: SUCCESS Injecting NOT_FOUND 0 times Result: SUCCESS Injecting UNDETERMINED 10 times Result: UNDETERMINED Injecting UNDETERMINED 6 times Result: UNDETERMINED Injecting UNDETERMINED 5 times Result: SUCCESS Injecting UNDETERMINED 3 times Result: SUCCESS Injecting UNDETERMINED 0 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 10 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 6 times Result: TRANSPORT_UNAVAILABLE Injecting TRANSPORT_UNAVAILABLE 5 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 3 times Result: SUCCESS Injecting TRANSPORT_UNAVAILABLE 0 times Result: SUCCESS >> YdbYqlClient::DeleteTableWithDeletedIndex [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions >> TTableProfileTests::OverwritePartitioningPolicy [GOOD] >> TTableProfileTests::OverwriteStoragePolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2025-03-28T09:24:02.006616Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788741958345163:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:02.007175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eff/r3tmp/tmpyRlAWH/pdisk_1.dat 2025-03-28T09:24:02.561431Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:02.567033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:02.567138Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:02.568825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6234 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:24:02.929988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:02.987939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:03.007189Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:24:03.013914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:03.175119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:03.247666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:05.823569Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788758136127442:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:05.823636Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eff/r3tmp/tmp6jq6eK/pdisk_1.dat 2025-03-28T09:24:06.005076Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:06.014523Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:06.014585Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:06.019409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24714 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:24:06.276293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:06.291479Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:06.310759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:06.379719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:06.440802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:09.573326Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486788775066639551:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:09.573445Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eff/r3tmp/tmpI0Mhnp/pdisk_1.dat 2025-03-28T09:24:09.766203Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:09.776870Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:09.776944Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:09.778187Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4017 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:24:09.975110Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:09.987501Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:10.005297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:10.061749Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:10.117406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:13.738114Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788794767373763:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:13.738190Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eff/r3tmp/tmp37YK9M/pdisk_1.dat 2025-03-28T09:24:13.865539Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:13.865613Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:13.877153Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:13.880071Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14089 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { Scheme ... 37968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13737 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:24:27.648198Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:27.663723Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:27.675613Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:24:27.680615Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:27.767463Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:27.850463Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:32.091428Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486788873117727155:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:32.091552Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eff/r3tmp/tmpjchYEq/pdisk_1.dat 2025-03-28T09:24:32.234164Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:32.247676Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:32.247777Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:32.249393Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32535 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:24:32.469972Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:32.477447Z node 8 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:32.489895Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:32.555659Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:32.633183Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:36.503387Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486788890355036929:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:36.503478Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eff/r3tmp/tmp1RW5Lp/pdisk_1.dat 2025-03-28T09:24:36.706773Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:36.710125Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:36.710194Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:36.711382Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2322 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:24:37.033448Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:24:37.063482Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:37.138157Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:37.212628Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eff/r3tmp/tmpH8wSAU/pdisk_1.dat 2025-03-28T09:24:40.846936Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:24:40.854017Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:40.885550Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:40.885662Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:40.887555Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62025 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:24:41.241454Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:41.248041Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:41.261240Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:41.337106Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:24:41.393345Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindDn [GOOD] >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword >> YdbYqlClient::TestReadTableMultiShardUseSnapshot [GOOD] >> YdbYqlClient::TestReadTableMultiShardOneRow >> TResourceBroker::TestRealUsage >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts >> TTabletPipeTest::TestSendWithoutWaitOpen >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> BootstrapperTest::KeepExistingTablet >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer >> TTabletResolver::TabletResolvePriority [GOOD] >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] >> TDatabaseQuotas::DisableWritesToDatabase [GOOD] >> TGRpcAuthentication::InvalidPassword >> YdbLogStore::LogStore [GOOD] >> YdbLogStore::LogStoreNegative >> YdbQueryService::TestCreateDropAttachSession [GOOD] >> YdbQueryService::TestCreateAttachAndDropAttachedSession |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::TabletResolvePriority [GOOD] >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestInterconnectSession >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TResourceBroker::TestAutoTaskId [GOOD] >> YdbImport::Simple [GOOD] >> YdbImport::EmptyData >> YdbScripting::MultiResults [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> TResourceBroker::TestRandomQueue [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> TTabletPipeTest::TestInterconnectSession [GOOD] >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcClientLowTest::MultipleSimpleRequests [GOOD] Test command err: 2025-03-28T09:24:20.644421Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788822288203115:2183];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:20.658913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001422/r3tmp/tmpj62NHk/pdisk_1.dat 2025-03-28T09:24:21.425458Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:21.425547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:21.428949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:21.449829Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13271, node 1 2025-03-28T09:24:21.574515Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:24:21.574576Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:24:21.671467Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:21.671488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:21.671495Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:21.671594Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62485 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:21.994111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-03-28T09:24:24.327379Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:24.349490Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:26.014054Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788848861861894:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:26.014102Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001422/r3tmp/tmpUUZysf/pdisk_1.dat 2025-03-28T09:24:26.229333Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:26.247996Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:26.249050Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:26.256163Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21478, node 4 2025-03-28T09:24:26.524298Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:26.524321Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:26.524328Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:26.524481Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:26.792125Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {STATUS_CODE_UNSPECIFIED, 16} TestRequest(database="/blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:26.923388Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 2025-03-28T09:24:26.942490Z node 4 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:30.611669Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788864230461973:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:30.611737Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001422/r3tmp/tmpvUiVj4/pdisk_1.dat 2025-03-28T09:24:30.750710Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:30.776387Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:30.776447Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:30.778653Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22018, node 7 2025-03-28T09:24:30.852481Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:30.852500Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:30.852507Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:30.852632Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:31.125257Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} 2025-03-28T09:24:34.075775Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:34.115972Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:34.128666Z node 7 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token TestRequest(database="/Root", token="invalid token") => {UNAUTHORIZED, 0} TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:34.151491Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 2025-03-28T09:24:34.162745Z node 7 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:35.606507Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788886684670046:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:35.606588Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001422/r3tmp/tmpOtBcKp/pdisk_1.dat 2025-03-28T09:24:35.788215Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:35.806836Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:35.806924Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:35.808950Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2528, node 10 2025-03-28T09:24:35.893988Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:35.894011Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:35.894020Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:35.894167Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2357 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:36.045781Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TestRequest(database="/Root", token="") => {SUCCESS, 0} TestRequest(database="/blabla", token="") => {SUCCESS, 0} TestRequest(database="blabla", token="") => {SUCCESS, 0} TestRequest(database="/Root", token="root@builtin") => {SUCCESS, 0} TestRequest(database="/blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:39.096218Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="root@builtin") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:39.105286Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/Root", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:39.115917Z node 10 :TICKET_PARSER ERROR: Ticket **** (717F937C): Unknown token 2025-03-28T09:24:39.126048Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="/blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:39.165694Z node 10 :GRPC_SERVER WARN: SchemeBoardDelete /blabla Strong=1 TestRequest(database="blabla", token="invalid token") => {STATUS_CODE_UNSPECIFIED, 16} 2025-03-28T09:24:40.815086Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788908289516279:2084];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:40.823560Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001422/r3tmp/tmpGzNzOC/pdisk_1.dat 2025-03-28T09:24:41.071785Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:41.151760Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:41.151849Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:41.164093Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64474, node 13 2025-03-28T09:24:41.307421Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:41.307446Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:41.307453Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:41.307600Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31496 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:41.566198Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:45.815122Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7486788908289516279:2084];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:45.815187Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2025-03-28T09:24:49.123131Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-1 (1 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.123230Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-1 (1 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.123294Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-2 (2 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.123372Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-4 (4 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.123536Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-10 (10 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.123614Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-13 (13 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.123686Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-16 (16 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.123827Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-22 (22 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.123885Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-24 (24 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.123976Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-28 (28 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.124017Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-29 (29 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.124210Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-38 (38 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.124342Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-43 (43 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.124386Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-44 (44 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.124563Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-52 (52 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.124628Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-53 (53 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.124874Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-64 (64 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.124934Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-66 (66 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.125239Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-75 (75 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.125356Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-80 (80 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.125556Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-89 (89 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.125688Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-95 (95 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.125775Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-98 (98 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.125866Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-102 (102 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.126012Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-108 (108 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.126073Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-110 (110 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.127738Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-146 (146 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.128020Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-156 (156 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.128145Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-161 (161 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.128209Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-163 (163 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.128384Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-171 (171 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.128422Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-172 (172 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.128555Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-173 (173 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.128649Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-175 (175 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.128889Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-185 (185 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.128971Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-188 (188 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129051Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-191 (191 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129151Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-194 (194 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129191Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-195 (195 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129249Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-197 (197 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129358Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-202 (202 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129402Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-203 (203 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129444Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-204 (204 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129488Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-205 (205 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129526Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-206 (206 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129646Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-211 (211 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129686Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-212 (212 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129764Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-214 (214 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129860Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-217 (217 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.129923Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-218 (218 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.130138Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-229 (229 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.130202Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-231 (231 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.130245Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-232 (232 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.130291Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-233 (233 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.130373Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-236 (236 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.130429Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-238 (238 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.130495Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-239 (239 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.130722Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-249 (249 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.130838Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-254 (254 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.130934Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-258 (258 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.131015Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-260 (260 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.131068Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-261 (261 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.131260Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-268 (268 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.131377Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-273 (273 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.131423Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-274 (274 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.131540Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-279 (279 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.131668Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-285 (285 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.131748Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-288 (288 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.132032Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-302 (302 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.132107Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-305 (305 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.132250Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-312 (312 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.132493Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-322 (322 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.132627Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-327 (327 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.132668Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-328 (328 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.132710Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-329 (329 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.132844Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-335 (335 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.132885Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-336 (336 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.132922Z node 2 :RESOURCE_BROKER ERROR: Assigning waiting task 'task-337 (337 by [2:99:2134])' of unk ... -03-28T09:24:49.172389Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-398 (398 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.172427Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-410 (410 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.172513Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-433 (433 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.172694Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-531 (531 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.172734Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-556 (556 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.172800Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-618 (618 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.172908Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-639 (639 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.172987Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-652 (652 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173025Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-677 (677 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173134Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-723 (723 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173174Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-730 (730 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173228Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-735 (735 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173585Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-846 (846 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173636Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-875 (875 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173691Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-883 (883 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173752Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-897 (897 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173804Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-912 (912 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173912Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-921 (921 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.173968Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-946 (946 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.174063Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-980 (980 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.174101Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-2 (2 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.174287Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-52 (52 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.174383Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-89 (89 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.174565Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-173 (173 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.174655Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-202 (202 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.174821Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-203 (203 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.174865Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-206 (206 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.174935Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-217 (217 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.174976Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-231 (231 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175033Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-249 (249 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175098Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-261 (261 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175242Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-302 (302 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175298Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-322 (322 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175347Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-327 (327 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175397Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-337 (337 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175434Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-354 (354 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175491Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-367 (367 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175526Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-370 (370 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175563Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-394 (394 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175602Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-397 (397 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175640Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-426 (426 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175682Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-439 (439 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175718Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-451 (451 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175753Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-453 (453 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175790Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-456 (456 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175848Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-468 (468 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.175992Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-513 (513 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176099Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-541 (541 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176137Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-546 (546 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176225Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-582 (582 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176280Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-600 (600 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176324Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-605 (605 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176371Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-619 (619 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176445Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-625 (625 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176492Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-630 (630 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176543Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-641 (641 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176576Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-644 (644 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176636Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-660 (660 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176713Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-675 (675 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176740Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-688 (688 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176790Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-705 (705 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176828Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-710 (710 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176916Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-726 (726 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.176955Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-731 (731 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177030Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-794 (794 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177084Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-818 (818 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177134Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-830 (830 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177252Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-876 (876 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177320Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-899 (899 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177357Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-901 (901 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177394Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-908 (908 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177455Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-915 (915 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177484Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-925 (925 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177527Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-927 (927 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177576Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-933 (933 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177632Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-937 (937 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177657Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-953 (953 by [2:99:2134])' of unknown type 'wrong' to default queue 2025-03-28T09:24:49.177712Z node 2 :RESOURCE_BROKER ERROR: Assigning in-fly task 'task-996 (996 by [2:99:2134])' of unknown type 'wrong' to default queue |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] >> YdbTableBulkUpsertOlap::UpsertCSV_DataShard [GOOD] >> YdbTableBulkUpsertOlap::UpsertMixed >> TTabletLabeledCountersAggregator::HeavyAggregation >> YdbYqlClient::AlterTableAddIndexWithDataColumn [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] Test command err: 2025-03-28T09:24:49.731966Z node 1 :PIPE_SERVER DEBUG: [9437185] Detach 2025-03-28T09:24:49.745559Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-03-28T09:24:49.752736Z node 1 :PIPE_SERVER DEBUG: [9437185] Activate 2025-03-28T09:24:49.755159Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:128:2154] 2025-03-28T09:24:49.755227Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:128:2154] 2025-03-28T09:24:49.755551Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:128:2154] 2025-03-28T09:24:49.755597Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:128:2154] 2025-03-28T09:24:49.755672Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:128:2154] 2025-03-28T09:24:49.755705Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:128:2154] 2025-03-28T09:24:49.755779Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:128:2154] 2025-03-28T09:24:49.755918Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:128:2154] Type# 269877249 Reason# ActorUnknown 2025-03-28T09:24:49.756053Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:131:2156] 2025-03-28T09:24:49.756076Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:131:2156] 2025-03-28T09:24:49.756131Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:131:2156] 2025-03-28T09:24:49.756173Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:131:2156] 2025-03-28T09:24:49.756219Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:131:2156] 2025-03-28T09:24:49.756239Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:131:2156] 2025-03-28T09:24:49.756279Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:131:2156] 2025-03-28T09:24:49.756352Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:131:2156] Type# 269877249 Reason# ActorUnknown 2025-03-28T09:24:49.756479Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] ::Bootstrap [1:133:2158] 2025-03-28T09:24:49.756503Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] lookup [1:133:2158] 2025-03-28T09:24:49.756557Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] forward result local node, try to connect [1:133:2158] 2025-03-28T09:24:49.756582Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185]::SendEvent [1:133:2158] 2025-03-28T09:24:49.756613Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] poison pill while connecting [1:133:2158] 2025-03-28T09:24:49.756638Z node 1 :PIPE_CLIENT DEBUG: TClient[9437185] connect failed [1:133:2158] 2025-03-28T09:24:49.756710Z node 1 :PIPE_SERVER DEBUG: [9437185] Accept Connect Originator# [1:133:2158] 2025-03-28T09:24:49.756811Z node 1 :PIPE_SERVER INFO: [9437185] Undelivered Target# [1:133:2158] Type# 269877249 Reason# ActorUnknown >> TPipeCacheTest::TestIdleRefresh ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbScripting::MultiResults [GOOD] Test command err: 2025-03-28T09:24:19.099653Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788816307575434:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:19.099693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001429/r3tmp/tmpuqmir4/pdisk_1.dat 2025-03-28T09:24:19.658587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:19.658680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:19.663456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:19.681590Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14638, node 1 2025-03-28T09:24:19.823445Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:19.823476Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:19.823496Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:19.823616Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10145 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:20.133582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:22.442562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:23.218446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788833487447554:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.218579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.218938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788833487447566:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.222174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:23.268163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788833487447568:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:23.349980Z node 1 :TX_PROXY ERROR: Actor# [1:7486788833487447672:4182] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:23.817920Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe126hgc0hvgz9e00g4gcgn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmRjM2I5NGYtOTEwYjQ5MDEtYTg5ZmZkZmYtZGE4NWZjYTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS 2025-03-28T09:24:25.694128Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788844753415310:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:25.694189Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001429/r3tmp/tmpUm0j9G/pdisk_1.dat 2025-03-28T09:24:25.898298Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6111, node 4 2025-03-28T09:24:25.991027Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:25.991049Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:25.991057Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:25.991166Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:24:26.009579Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:26.009653Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:26.012527Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:26.203826Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:28.529803Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:29.073098Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788861933287434:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.073088Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788861933287426:2436], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.073164Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.076217Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:29.097749Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788861933287440:2440], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:29.156019Z node 4 :TX_PROXY ERROR: Actor# [4:7486788861933287511:4149] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:29.287558Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12c8fe8vgjjc3v99w9arc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzljY2RlODUtZTgwYmNmMDItOTJiODA5NmUtZjVkMGU3Zjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:29.357002Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:14955 2025-03-28T09:24:31.491648Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788871960156978:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:31.497584Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001429/r3tmp/tmpucKhep/pdisk_1.dat 2025-03-28T09:24:31.803127Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29012, node 7 2025-03-28T09:24:31.907200Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:31.907357Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:31.935114Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:31.957486Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:31.957512Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:31.957522Z node 7 :NET_CLASSIFIER WARN: ... SSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:37.651274Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:37.651283Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:37.651419Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:37.807106Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:40.753116Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788910200423530:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:40.753231Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:40.818374Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:40.928793Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788910200423719:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:40.928919Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:40.929154Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788910200423724:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:40.932827Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:40.953410Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788910200423726:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:41.017354Z node 10 :TX_PROXY ERROR: Actor# [10:7486788914495391097:2802] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:41.095457Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12mvcdaaptc4rmywp1ckx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NWNiNjExN2QtZGIxNTQ0OWItNGQ0YmNjYWQtNzA3ODJhNjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:41.220266Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe12r0g8maz3d2yz9acga8w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MzdmYjY5M2ItZDE1YmU0M2EtMzA2MTA3ZjQtNWRjMzI2NTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:41.224783Z node 10 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153881259, txId: 281474976715662] shutting down 2025-03-28T09:24:43.199758Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788921919192442:2106];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:43.257228Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001429/r3tmp/tmpFKqOmR/pdisk_1.dat 2025-03-28T09:24:43.544523Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9514, node 13 2025-03-28T09:24:43.651864Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:43.651971Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:43.729277Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:43.747529Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:43.747572Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:43.747582Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:43.747736Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32174 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:44.076896Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:46.748344Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788934804095359:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:46.748456Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:47.052379Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:47.166824Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788939099062822:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:47.166899Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:47.167176Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788939099062827:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:47.170393Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:47.187868Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486788939099062829:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:47.289025Z node 13 :TX_PROXY ERROR: Actor# [13:7486788939099062900:2789] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:47.369913Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12v0kc1hrw9dshbg8pzgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=OTlhNjNjMjktMTg3MGFhZDctMzM2ZGE3YjUtMzgxN2NiYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:47.467957Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe12v0kc1hrw9dshbg8pzgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Mjk5ZDkzODItYzk3MWYwOGQtMzFkNWQ1YzItMjU3YTIxYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:47.545109Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe12v0kc1hrw9dshbg8pzgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=Y2QxNmU2MzQtYmY2YzcxMjktZjdmMDM4MWItNDE5MDIyYmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:47.692155Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe12v0kc1hrw9dshbg8pzgr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjUzZGQ3NGUtODJlNGQ0YjMtYmFlZDM0NjktNGYzMTQwZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> KqpJoinOrder::TPCDS87+ColumnStore [GOOD] >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientWithCorrectCerts_EmptyAllowedSids [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerInstant::Test >> BootstrapperTest::DuplicateNodes [GOOD] >> TColumnShardTestSchema::RebootOneColdTier [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::ColumnShardCounters >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> BootstrapperTest::RestartUnavailableTablet >> TYqlDateTimeTests::SimpleOperations [GOOD] >> TYqlDecimalTests::DecimalKey >> TPipeCacheTest::TestTabletNode [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldFail [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed >> TTabletLabeledCountersAggregator::SimpleAggregation >> TTabletCountersAggregator::ColumnShardCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2025-03-28T09:24:50.003690Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:24:50.003807Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:24:50.004418Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-03-28T09:24:50.004461Z node 5 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 12552810490399048506 2025-03-28T09:24:50.004623Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2025-03-28T09:24:50.004649Z node 4 :BOOTSTRAPPER INFO: tablet:9437184, type: Dummy, begin new round, seed: 15249746964198841502 2025-03-28T09:24:50.005434Z node 5 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2025-03-28T09:24:50.005501Z node 5 :BOOTSTRAPPER NOTICE: tablet: 9437184, type: Dummy, boot 2025-03-28T09:24:50.005863Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2025-03-28T09:24:50.005895Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lost round, wait for 0.139961s 2025-03-28T09:24:50.172698Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2025-03-28T09:24:50.173312Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: OK, leader: [5:212:2095] 2025-03-28T09:24:50.173794Z node 4 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, connect: OK 2025-03-28T09:24:50.173837Z node 4 :BOOTSTRAPPER INFO: tablet: 9437184, type: Dummy, connected to leader, waiting >> TResourceBrokerInstant::Test [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] >> KqpJoinOrder::TPCDS94+ColumnStore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143154427.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143154427.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154427.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123154427.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153227.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123153227.000000s;Name=;Codec=}; 2025-03-28T09:23:49.325297Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:23:49.481847Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:23:49.505341Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:23:49.505622Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:23:49.513772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:49.513988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:49.514268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:49.514431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:49.514561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:49.514672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:49.514880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:49.515012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:49.515127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:49.515234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:49.515349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:49.515462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:49.543717Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:23:49.544038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:23:49.544099Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:23:49.544277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:23:49.544422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:23:49.544521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:23:49.544571Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:23:49.544656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:23:49.544713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:23:49.544752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:23:49.544781Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:23:49.544927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:23:49.544982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:23:49.545019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:23:49.545046Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:23:49.545138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:23:49.545201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:23:49.545254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:23:49.545315Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:23:49.545404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:23:49.545448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:23:49.545477Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:23:49.545525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:23:49.545571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:23:49.545604Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:23:49.546030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-03-28T09:23:49.546113Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-03-28T09:23:49.546203Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-03-28T09:23:49.546284Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-03-28T09:23:49.546451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:23:49.546519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:23:49.546556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:23:49.546796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:23:49.546845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:23:49.546876Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:23:49.547027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:23:49.547066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:23:49.547095Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:23:49.547286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... a.cpp:29;PRECHARGE:finishLoadingTime=22; 2025-03-28T09:24:50.799461Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=177; 2025-03-28T09:24:50.799488Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=22262; 2025-03-28T09:24:50.804456Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=4910; 2025-03-28T09:24:50.808520Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=3383; 2025-03-28T09:24:50.808601Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=4071; 2025-03-28T09:24:50.808732Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=76; 2025-03-28T09:24:50.808824Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=51; 2025-03-28T09:24:50.808907Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=55; 2025-03-28T09:24:50.808994Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=56; 2025-03-28T09:24:50.814642Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5589; 2025-03-28T09:24:50.821023Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6265; 2025-03-28T09:24:50.821142Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=36; 2025-03-28T09:24:50.821205Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=27; 2025-03-28T09:24:50.821239Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-03-28T09:24:50.821270Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-03-28T09:24:50.821304Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-03-28T09:24:50.821363Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=28; 2025-03-28T09:24:50.821393Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2025-03-28T09:24:50.821455Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=37; 2025-03-28T09:24:50.821483Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2025-03-28T09:24:50.821524Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=18; 2025-03-28T09:24:50.821593Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=43; 2025-03-28T09:24:50.821764Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=144; 2025-03-28T09:24:50.821794Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=50676; 2025-03-28T09:24:50.821902Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:24:50.821983Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:24:50.822022Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:24:50.822067Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:24:50.830367Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:24:50.830532Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:24:50.830596Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:24:50.830669Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-28T09:24:50.830750Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:24:50.830793Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:24:50.830845Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:24:50.830884Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:24:50.830977Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:24:50.831635Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:24:50.831726Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1991:3891];tablet_id=9437184;parent=[1:1953:3860];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-03-28T09:24:50.832490Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:24:50.833251Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:24:50.833290Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:24:50.833314Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:24:50.833372Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:24:50.833439Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:24:50.833496Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-28T09:24:50.833555Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:24:50.833598Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:24:50.833644Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:24:50.833680Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:24:50.833763Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:24:50.835288Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=11;path_id=1; 2025-03-28T09:24:50.836554Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> YdbYqlClient::TestMultipleModifications [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::Test [GOOD] >> TResourceBroker::TestOverusage >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitions [GOOD] >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TGRpcYdbTest::CreateTableWithIndex [GOOD] >> TGRpcYdbTest::CreateDeleteYqlSession >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> TRegisterNodeOverLegacyService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TTableProfileTests::DescribeTableWithPartitioningPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] Test command err: { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } >> TGRpcLdapAuthentication::LdapAuthSettingsWithEmptyBindPassword [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword >> TGRpcAuthentication::InvalidPassword [GOOD] >> TGRpcAuthentication::DisableLoginAuthentication >> TResourceBroker::TestNotifyActorDied [GOOD] >> YdbYqlClient::TestReadTableMultiShardOneRow [GOOD] >> YdbYqlClient::TestReadTableBatchLimits >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> ClientStatsCollector::CounterRetryOperation [GOOD] >> ClientStatsCollector::ExternalMetricRegistryByRawPtr >> YdbTableBulkUpsert::Overload [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet >> BootstrapperTest::UnavailableStateStorage [GOOD] >> TResourceBrokerInstant::TestErrors >> YdbImport::EmptyData [GOOD] >> YdbImport::ImportFromS3ToExistingTable |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] >> TTabletPipeTest::TestSendAfterOpen >> TTabletPipeTest::TestOpen >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor >> YdbQueryService::TestCreateAttachAndDropAttachedSession [GOOD] >> YdbS3Internal::BadRequests ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS87+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 13423, MsgBus: 12106 2025-03-28T09:22:57.143359Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788466125973167:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:57.144524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001732/r3tmp/tmp4Gn6lw/pdisk_1.dat 2025-03-28T09:22:57.885026Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:57.919995Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:57.920073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:57.935321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13423, node 1 2025-03-28T09:22:58.210854Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:58.210871Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:58.210878Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:58.210967Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12106 TClient is connected to server localhost:12106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:22:59.194064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:22:59.207435Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:01.702899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788483305842992:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:01.703045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:01.703427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788483305843004:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:01.707246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:23:01.718652Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788483305843006:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:23:01.818503Z node 1 :TX_PROXY ERROR: Actor# [1:7486788483305843057:2340] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:02.145822Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788466125973167:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:02.145876Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:02.394297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:02.711582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:02.711760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:02.712002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:02.712105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:02.712209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:02.712306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:02.712394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:02.712491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:02.712596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:02.712695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:02.712783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:02.712909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788487600810634:2363];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:02.716109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:02.716200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:02.716393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:02.716505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:02.716601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:02.716694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:02.716784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:02.716885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:02.717000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:02.717113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:02.717225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:02.717326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486788487600810614:2358];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:02.787010Z node 1 :T ... troller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.039825Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039413;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.043373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039352;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.046521Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039407;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.050259Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039332;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.053124Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039318;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.058013Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039366;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.063190Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039316;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.067465Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039298;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.068834Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039322;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.074140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039348;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.075776Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039350;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.080307Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039292;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.081643Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039372;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.086988Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039334;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.087455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039420;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.093583Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.093584Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039340;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.099884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039320;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.100348Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039185;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.105936Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039328;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.106478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039266;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.112155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039368;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.112509Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039382;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.117840Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039358;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.118141Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039374;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.123884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039380;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.124512Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039314;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.129394Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039215;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.129828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039356;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.135788Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039326;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.135803Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039264;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.141759Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039336;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.141952Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039370;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.148072Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039189;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.148079Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039346;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.153256Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.157045Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039247;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:21.322371Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe113sr5yp1xw3gr00h81da", SessionId: ydb://session/3?node_id=1&id=N2UwZTgzZTgtMmNhODY2NmMtODVjNjA1YzctNzk1MDU4Mjg=, Slow query, duration: 33.681136s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:21.880628Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:21.880816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:21.881455Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:43.144073Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe12etee7f48yg2w6px32q5", SessionId: ydb://session/3?node_id=1&id=N2UwZTgzZTgtMmNhODY2NmMtODVjNjA1YzctNzk1MDU4Mjg=, Slow query, duration: 11.448590s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "PRAGMA TablePathPrefix='/Root/test/ds';\n\n-- NB: Subquerys\n$bla1 = (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from store_sales as store_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where store_sales.ss_sold_date_sk = date_dim.d_date_sk\n and store_sales.ss_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11);\n\n$bla2 = ((select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from catalog_sales as catalog_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk\n and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11)\n union all\n (select distinct\n COALESCE(c_last_name,'') as c_last_name,\n COALESCE(c_first_name,'') as c_first_name,\n COALESCE(cast(d_date as date), cast(0 as Date)) as d_date\n from web_sales as web_sales\n cross join date_dim as date_dim\n cross join customer as customer\n where web_sales.ws_sold_date_sk = date_dim.d_date_sk\n and web_sales.ws_bill_customer_sk = customer.c_customer_sk\n and d_month_seq between 1221 and 1221+11));\n\n-- start query 1 in stream 0 using template query87.tpl and seed 1819994127\nselect count(*)\nfrom $bla1 bla1 left only join $bla2 bla2 using (c_last_name, c_first_name, d_date)\n;\n\n-- end query 1 in stream 0 using template query87.tpl", parameters: 0b |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2025-03-28T09:24:52.928048Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA 2025-03-28T09:24:52.928693Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2025-03-28T09:24:52.928736Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.148014s 2025-03-28T09:24:53.060577Z node 6 :BOOTSTRAPPER DEBUG: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA ... waiting for multiple state storage lookup attempts (done) >> TResourceBrokerInstant::TestErrors [GOOD] >> TResourceBrokerInstant::TestMerge >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] >> TTabletPipeTest::TestOpen [GOOD] >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> YdbLogStore::LogStoreNegative [GOOD] >> YdbLogStore::Dirs >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> TResourceBrokerInstant::TestMerge [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpen [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::Overload [GOOD] Test command err: 2025-03-28T09:23:42.903347Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788660435644824:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:42.903408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001479/r3tmp/tmpW7wVyF/pdisk_1.dat 2025-03-28T09:23:43.760986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:43.761082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:43.780909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:43.807207Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5147, node 1 2025-03-28T09:23:43.985531Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:43.985577Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:43.985591Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:44.163859Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:44.163882Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:44.163889Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:44.164048Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4331 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:45.037982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:47.184545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-28T09:23:47.440461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788681910482537:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:47.440577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:47.440836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788681910482549:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:47.444739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:23:47.470904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788681910482551:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:23:47.552050Z node 1 :TX_PROXY ERROR: Actor# [1:7486788681910482622:2806] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:47.906810Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788660435644824:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:47.906879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:49.745132Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe113k7ah0jx3bgt5m4047k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTEzMGEzNjYtNjllMjEwYmMtOWU3ZTVkZDItODMzZGE2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-28T09:23:50.454418Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe116283recyja5vzsdqvr9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTEzMGEzNjYtNjllMjEwYmMtOWU3ZTVkZDItODMzZGE2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-28T09:23:50.619391Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:23:50.672616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-28T09:23:51.280212Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe116x5dtcdzsfzkabrmm1n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzhjZTRlOTQtNWZkMWU2ZjYtMzg3M2Q5NTQtOGYxY2NmN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-28T09:23:51.695861Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe117bydz2rwyja8xra0d44, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzhjZTRlOTQtNWZkMWU2ZjYtMzg3M2Q5NTQtOGYxY2NmN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-28T09:23:51.799284Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-28T09:23:51.904988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-28T09:23:52.377100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqe1182d72xppcpj6zszawkw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjZkNjEyMmUtMjEwNDRhMS03ZTViMmM0Yy04MmEyMDFhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-28T09:23:52.803810Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jqe118e4dt1k34qz8aef196j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjZkNjEyMmUtMjEwNDRhMS03ZTViMmM0Yy04MmEyMDFhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-28T09:23:52.875173Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:23:52.917627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-28T09:23:53.375131Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqe1191j7p3fnpe8v6pkxckd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTYxZDIwNjgtNmM5MzhhNTYtODMwYmYyYTQtNjgxYmY3ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-28T09:23:53.801678Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqe119dfbsbpcy0xa49q0x2c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTYxZDIwNjgtNmM5MzhhNTYtODMwYmYyYTQtNjgxYmY3ODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-28T09:23:53.903449Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-28T09:23:53.954434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-28T09:23:54.475715Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe11a27bz3w493x4eqcrdcd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRhYTQ2YzEtY2RiYjBmMmEtZTFmODY4YTEtYjQwMDRmYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-28T09:23:54.908117Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710678. Ctx: { TraceId: 01jqe11afx6b1sy54zfgd149c5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRhYTQ2YzEtY2RiYjBmMmEtZTFmODY4YTEtYjQwMDRmYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-28T09:23:54.988607Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-03-28T09:23:55.011250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-28T09:23:55.507537Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jqe11b3b89zqwm4hpzshht6a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBmOTNmOTEtNjU1ZGI2ZjgtYTk3OTNkMy1kMzgyY2RhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 0 rows 2025-03-28T09:23:56.019815Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jqe11bg07h2taychch3t74hs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzBmOTNmOTEtNjU1ZGI2ZjgtYTk3OTNkMy1kMzgyY2RhYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 1 rows 2025-03-28T09:23:56.129256Z node ... 0 2025-03-28T09:24:52.030506Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.030531Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.031533Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-03-28T09:24:52.033457Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.033482Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.033563Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-28T09:24:52.033586Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:24:52.034169Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.034198Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.034793Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-03-28T09:24:52.034812Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:24:52.034946Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-03-28T09:24:52.035816Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.035836Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.038060Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-03-28T09:24:52.038084Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-28T09:24:52.039065Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.039089Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 .2025-03-28T09:24:52.043880Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.043917Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.046949Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.046974Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.057696Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.057801Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.060542Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.060573Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.067410Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.067439Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.069606Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-03-28T09:24:52.070081Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037890 2025-03-28T09:24:52.070386Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-03-28T09:24:52.070753Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-03-28T09:24:52.077741Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.077771Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.078002Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-03-28T09:24:52.078026Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:24:52.078423Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.078437Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.078495Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-28T09:24:52.078511Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:24:52.078835Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.078848Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.079007Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037890 2025-03-28T09:24:52.079022Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:24:52.079459Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.079473Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.079532Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-03-28T09:24:52.079551Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 .2025-03-28T09:24:52.129094Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.129123Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.130705Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.130736Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.132212Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2025-03-28T09:24:52.132236Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037890 2025-03-28T09:24:52.132668Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037892 2025-03-28T09:24:52.136827Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2025-03-28T09:24:52.136865Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:24:52.137547Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.137596Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.138545Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037890 2025-03-28T09:24:52.138577Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:24:52.138676Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037892 2025-03-28T09:24:52.138712Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:24:52.138828Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Execute: at tablet# 72075186224037891 2025-03-28T09:24:52.141966Z node 13 :TX_DATASHARD INFO: TTxDirectBase(36) Complete: at tablet# 72075186224037891 2025-03-28T09:24:52.141992Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-28T09:24:52.143111Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.143129Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.143588Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.143602Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.145107Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.145141Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.145578Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.145600Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 .2025-03-28T09:24:52.155282Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.155312Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.169236Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.169271Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.169724Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037892, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.169740Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037892, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.170422Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037890, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.170445Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037890, table# 1001, finished edge# 0, front# 0 2025-03-28T09:24:52.170847Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037891, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:24:52.170862Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037891, table# 1001, finished edge# 0, front# 0 >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestOpen [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::TPCDS94+ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 24384, MsgBus: 22451 2025-03-28T09:22:58.350086Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788470947419716:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:58.350113Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00172e/r3tmp/tmpaE4IQC/pdisk_1.dat 2025-03-28T09:22:59.063510Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:22:59.065744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:22:59.065840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:22:59.070154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24384, node 1 2025-03-28T09:22:59.273475Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:22:59.273494Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:22:59.273502Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:22:59.273614Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22451 TClient is connected to server localhost:22451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:00.045306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:00.079079Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:23:02.836845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788488127289575:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:02.836998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:02.837348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788488127289587:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:02.841225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:23:02.853541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:02.854591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788488127289589:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:23:02.957660Z node 1 :TX_PROXY ERROR: Actor# [1:7486788488127289640:2342] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:03.354799Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788470947419716:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:03.354857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:03.429657Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:23:03.726622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:03.731108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:03.731334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:03.731454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:03.731559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:03.731667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:03.731763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:03.731886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:03.731992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:03.732098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:03.732199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:03.732298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[1:7486788492422257203:2353];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:23:03.795461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:23:03.795520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:23:03.795726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:23:03.795845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:23:03.795945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:23:03.796041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:23:03.796132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:23:03.796231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:23:03.796334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:23:03.796437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:23:03.796534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037898;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:23:03.796645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037898;self_id=[1:7486788492422257199:2351];tablet_id=72075186224037 ... 6710714; 2025-03-28T09:24:18.563687Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039313;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.563833Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039355;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.567594Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039321;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.569232Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039273;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.572511Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039287;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.575290Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039375;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.579300Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039335;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.582053Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039389;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.585263Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039363;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.588131Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039385;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.591024Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039285;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.594346Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039345;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.596374Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039379;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.600172Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039311;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.605520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039307;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.611526Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039365;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.611765Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039267;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.617071Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039418;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.621315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039359;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.627108Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039377;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.631860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039275;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.632127Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039291;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.641836Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039329;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.648137Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039373;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.652326Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039341;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.653435Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039391;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.658159Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039323;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.659352Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039395;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.663506Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039333;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.666979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039367;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.668524Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039393;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.672099Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039251;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.673953Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039349;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.678100Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039371;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.679863Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039305;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.684373Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039388;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.685461Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039337;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.689603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039347;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.692354Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039339;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710714;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710714; 2025-03-28T09:24:18.864780Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe114vqd34yes6wj2sy1pfr", SessionId: ydb://session/3?node_id=1&id=ODQ1NGY5NjktZWMzYTRlNjItZmZiNTg2ZDgtYTlkMTAxOWU=, Slow query, duration: 30.135985s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "CREATE TABLE t1 (\n id1 Int32 NOT NULL,\n PRIMARY KEY (id1)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t2 (\n id2 Int64 NOT NULL,\n t1_id1 Int64 NOT NULL,\n -- random_field2 Int32\n PRIMARY KEY (id2)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n\nCREATE TABLE t3 (\n id3 Int16 NOT NULL,\n -- random_field3 Int32\n PRIMARY KEY (id3)\n) WITH (STORE = COLUMN, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 240);\n", parameters: 0b 2025-03-28T09:24:19.387662Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;self_id=[1:7486788780185121519:10788];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224039392;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224038933;receive=72075186224039094; 2025-03-28T09:24:19.387822Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039094;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:19.388078Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224038933;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:19.388602Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224039392;tx_state=TTxProgressTx::Execute;tx_current=281474976710716;tx_id=281474976710716;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710716; 2025-03-28T09:24:43.200007Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe12dnpd3hqnnyjxrtqf7p7", SessionId: ydb://session/3?node_id=1&id=ODQ1NGY5NjktZWMzYTRlNjItZmZiNTg2ZDgtYTlkMTAxOWU=, Slow query, duration: 12.680670s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "pragma TablePathPrefix = \"/Root/test/ds/\";\n\n-- NB: Subquerys\n$bla1 = (select ws_order_number\n from web_sales\n group by ws_order_number\n having COUNT(DISTINCT ws_warehouse_sk) > 1);\n\n-- start query 1 in stream 0 using template query94.tpl and seed 2031708268\nselect\n count(distinct ws1.ws_order_number) as `order count`\n ,sum(ws_ext_ship_cost) as `total shipping cost`\n ,sum(ws_net_profit) as `total net profit`\nfrom\n web_sales ws1\n cross join date_dim\n cross join customer_address\n cross join web_site\n left semi join $bla1 bla1 on (ws1.ws_order_number = bla1.ws_order_number)\n left only join web_returns on (ws1.ws_order_number = web_returns.wr_order_number)\nwhere\n cast(d_date as date) between cast('1999-4-01' as date) and\n (cast('1999-4-01' as date) + DateTime::IntervalFromDays(60))\nand ws1.ws_ship_date_sk = d_date_sk\nand ws1.ws_ship_addr_sk = ca_address_sk\nand ca_state = 'NE'\nand ws1.ws_web_site_sk = web_site_sk\nand web_company_name = 'pri'\norder by `order count`\nlimit 100;\n", parameters: 0b |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestMerge [GOOD] Test command err: 2025-03-28T09:24:54.026769Z node 1 :RESOURCE_BROKER ERROR: FinishTaskInstant failed for task 2: cannot finish unknown task ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:117:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [1:118:2146] sender: [1:121:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:118:2146] sender: [1:158:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:118:2146] sender: [1:160:2057] recipient: [1:101:2136] Leader for TabletID 9437185 is [1:118:2146] sender: [1:163:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:118:2146] sender: [1:165:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:167:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:195:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:198:2057] recipient: [1:100:2135] Leader for TabletID 9437184 is [1:116:2145] sender: [1:201:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:202:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:204:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:232:2057] recipient: [1:14:2061] >> TResourceBroker::TestResubmitTask ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesExpiredCert [GOOD] Test command err: 2025-03-28T09:23:43.247083Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788665305772070:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:43.247143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001480/r3tmp/tmpteBV0M/pdisk_1.dat 2025-03-28T09:23:44.091895Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:44.091981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:44.110237Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:44.151881Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14627, node 1 2025-03-28T09:23:44.396056Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:44.396097Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:44.396184Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:44.599243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:44.599263Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:44.599270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:44.599393Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25693 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:45.133978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:45.361097Z node 1 :TICKET_PARSER DEBUG: Ticket E9CCC3C3DDFA224FF7FE00F99E8BE711C47133961A7082541F69ABDA2299CDC2 (ipv6:[::1]:46574) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-28T09:23:45.554888Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:46594) has now valid token of root@builtin 2025-03-28T09:23:45.730993Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:23:45.731033Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:23:45.731046Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:23:45.731084Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001480/r3tmp/tmpDhSiGd/pdisk_1.dat 2025-03-28T09:23:50.007455Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:50.202669Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:50.229894Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:50.229967Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:50.237157Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21656, node 4 2025-03-28T09:23:50.445290Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:50.445314Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:50.445321Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:50.445440Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6290 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:50.703229Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:50.811516Z node 4 :TICKET_PARSER DEBUG: Ticket E9CCC3C3DDFA224FF7FE00F99E8BE711C47133961A7082541F69ABDA2299CDC2 (ipv6:[::1]:38376) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-28T09:23:50.942906Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:38402) has now valid token of root@builtin 2025-03-28T09:23:51.019303Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:23:51.019329Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:23:51.019337Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:23:51.019365Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:23:55.144087Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788715850504570:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:55.144134Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001480/r3tmp/tmpi8EcfL/pdisk_1.dat 2025-03-28T09:23:55.495285Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:55.546843Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:55.546926Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:55.562895Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32698, node 7 2025-03-28T09:23:55.807555Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:55.807580Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:55.807590Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:55.807720Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:56.108364Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:56.292171Z node 7 :TICKET_PARSER DEBUG: Ticket E416A262EDF656280FFA355F8830250DB0308FE9DC25ABF75213D193CD65F67C (ipv6:[::1]:57018) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-28T09:23:56.375124Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:57026) has now valid token of root@builtin 2025-03-28T09:23:56.479032Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:23:56.479067Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:23:56.479076Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:23:56.479108Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:24:00.663368Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788736223661469:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:00.663486Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001480/r3tmp/tmpyJVfHm/pdisk_1.dat 2025-03-28T09:24:01.024203Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:01.080209Z node 10 :HIVE WARN: HIVE#7 ... ent_certificate:certificate verify failed. E0328 09:24:23.648101850 417863 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:23.666174060 417590 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-03-28T09:24:28.859534Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7486788858628859579:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:28.874747Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001480/r3tmp/tmpH3irD2/pdisk_1.dat 2025-03-28T09:24:29.187616Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:29.227593Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:29.227693Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:29.231452Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13513, node 25 2025-03-28T09:24:29.315445Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:29.315467Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:29.315477Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:29.315617Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:29.625539Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:33.862205Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7486788858628859579:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:33.862308Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0328 09:24:39.721774078 420546 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:39.754346798 426692 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:39.790862341 420543 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:39.812752135 426692 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:39.869697270 420543 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:39.889985133 426733 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:39.919946312 420703 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:39.948709212 420703 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:39.989062754 420543 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:40.009035841 420703 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:40.041115891 420704 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:40.060312004 420704 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. 2025-03-28T09:24:41.738308Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7486788912871659376:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:41.738431Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001480/r3tmp/tmpzXvHL7/pdisk_1.dat 2025-03-28T09:24:42.075374Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:42.137351Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:42.137483Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:42.144508Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18336, node 28 2025-03-28T09:24:42.309466Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:42.309497Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:42.309508Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:42.309710Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:42.820818Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:46.738791Z node 28 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[28:7486788912871659376:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.738880Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0328 09:24:52.943768184 427957 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:52.970522176 434287 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:53.003157167 434286 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:53.023693399 434287 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:53.095113116 428278 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:53.119943682 434360 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:53.151801154 428276 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:53.171956824 428276 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:53.205515715 428276 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:53.222602201 428276 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:53.265146672 428278 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. E0328 09:24:53.284647265 427957 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1417C086:SSL routines:tls_process_client_certificate:certificate verify failed. >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> YdbYqlClient::CheckDefaultTableSettings1 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 >> TTabletPipeTest::TestConnectReject >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] >> YdbTableBulkUpsertOlap::UpsertMixed [GOOD] >> YdbYqlClient::AlterTableAddIndex >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TTabletPipeTest::TestConnectReject [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] >> YdbYqlClient::RetryOperationAsync [GOOD] >> YdbYqlClient::QueryLimits >> TPipeCacheTest::TestAutoConnect [GOOD] >> TResourceBroker::TestUpdateCookie [GOOD] >> TTabletPipeTest::TestShutdown >> TTabletPipeTest::TestTwoNodes >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> TTabletPipeTest::TestRewriteSameNode >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> TTabletPipeTest::TestPipeWithVersionInfo >> TResourceBroker::TestQueueWithConfigure >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] >> TTabletPipeTest::TestShutdown [GOOD] >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletPipeTest::TestTwoNodes [GOOD] >> TTabletPipeTest::TestRewriteSameNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: 2025-03-28T09:24:56.505923Z node 3 :PIPE_SERVER ERROR: [9437185] NodeDisconnected NodeId# 2 |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestShutdown [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] >> YdbYqlClient::TestDescribeTableWithShardStats [GOOD] >> YdbYqlClient::TestExplicitPartitioning >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] >> TTabletPipeTest::TestSendAfterReboot |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> TResourceBroker::TestOverusageDifferentResources [GOOD] >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2025-03-28T09:24:57.158661Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2025-03-28T09:24:57.158885Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' is required" 2025-03-28T09:24:57.159068Z node 1 :RESOURCE_BROKER ERROR: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" >> TTableProfileTests::OverwriteStoragePolicy [GOOD] >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] >> YdbTableBulkUpsert::Uint8 [GOOD] >> YdbTableBulkUpsert::ZeroRows >> TTabletPipeTest::TestSendAfterReboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] Test command err: 2025-03-28T09:24:50.391784Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 [1:7:2054] 2025-03-28T09:24:50.392137Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:8:2055] worker 0 2025-03-28T09:24:50.392191Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:9:2056] worker 1 2025-03-28T09:24:50.392236Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:10:2057] worker 2 2025-03-28T09:24:50.392265Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:11:2058] worker 3 2025-03-28T09:24:50.392302Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:12:2059] worker 4 2025-03-28T09:24:50.392337Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:13:2060] worker 5 2025-03-28T09:24:50.392362Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:14:2061] worker 6 2025-03-28T09:24:50.392393Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:15:2062] worker 7 2025-03-28T09:24:50.392432Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:16:2063] worker 8 2025-03-28T09:24:50.392466Z node 1 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [1:7:2054] self [1:17:2064] worker 9 Sending message to [1:9:2056] from [1:7:2054] id 1 Sending message to [1:10:2057] from [1:7:2054] id 2 Sending message to [1:11:2058] from [1:7:2054] id 3 Sending message to [1:12:2059] from [1:7:2054] id 4 Sending message to [1:13:2060] from [1:7:2054] id 5 Sending message to [1:14:2061] from [1:7:2054] id 6 Sending message to [1:15:2062] from [1:7:2054] id 7 Sending message to [1:16:2063] from [1:7:2054] id 8 Sending message to [1:17:2064] from [1:7:2054] id 9 Sending message to [1:8:2055] from [1:7:2054] id 10 2025-03-28T09:24:51.233534Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [1:14:2061] 2025-03-28T09:24:51.233629Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [1:15:2062] 2025-03-28T09:24:51.233687Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [1:16:2063] 2025-03-28T09:24:51.233743Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [1:17:2064] 2025-03-28T09:24:51.234019Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [1:8:2055] 2025-03-28T09:24:51.234064Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [1:9:2056] 2025-03-28T09:24:51.234152Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [1:10:2057] 2025-03-28T09:24:51.234196Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [1:11:2058] 2025-03-28T09:24:51.234237Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [1:12:2059] 2025-03-28T09:24:51.234301Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [1:13:2060] 2025-03-28T09:24:51.234442Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [1:8:2055] 2025-03-28T09:24:51.235899Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [1:8:2055] 2025-03-28T09:24:51.266964Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:8:2055] Initiator [1:7:2054] 2025-03-28T09:24:51.290057Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:9:2056] 2025-03-28T09:24:51.292070Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:9:2056] 2025-03-28T09:24:51.340374Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:9:2056] Initiator [1:7:2054] 2025-03-28T09:24:51.360778Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:10:2057] 2025-03-28T09:24:51.362236Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:10:2057] 2025-03-28T09:24:51.389366Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:10:2057] Initiator [1:7:2054] 2025-03-28T09:24:51.412707Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:11:2058] 2025-03-28T09:24:51.414251Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:11:2058] 2025-03-28T09:24:51.436761Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:11:2058] Initiator [1:7:2054] 2025-03-28T09:24:51.456592Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:12:2059] 2025-03-28T09:24:51.457599Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:12:2059] 2025-03-28T09:24:51.488985Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:12:2059] Initiator [1:7:2054] 2025-03-28T09:24:51.511227Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:13:2060] 2025-03-28T09:24:51.512652Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:13:2060] 2025-03-28T09:24:51.541239Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:13:2060] Initiator [1:7:2054] 2025-03-28T09:24:51.564556Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:14:2061] 2025-03-28T09:24:51.566008Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:14:2061] 2025-03-28T09:24:51.592862Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:14:2061] Initiator [1:7:2054] 2025-03-28T09:24:51.611045Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:15:2062] 2025-03-28T09:24:51.612047Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:15:2062] 2025-03-28T09:24:51.639648Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:15:2062] Initiator [1:7:2054] 2025-03-28T09:24:51.663286Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:16:2063] 2025-03-28T09:24:51.664719Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:16:2063] 2025-03-28T09:24:51.696396Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:16:2063] Initiator [1:7:2054] 2025-03-28T09:24:51.720435Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:17:2064] 2025-03-28T09:24:51.722003Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:17:2064] 2025-03-28T09:24:51.757260Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:17:2064] Initiator [1:7:2054] 2025-03-28T09:24:51.780825Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [1:7:2054] 2025-03-28T09:24:51.780957Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [1:7:2054] 2025-03-28T09:24:51.785593Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [1:7:2054] 2025-03-28T09:24:51.785686Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [1:7:2054] 2025-03-28T09:24:51.789320Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [1:7:2054] 2025-03-28T09:24:51.789448Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [1:7:2054] 2025-03-28T09:24:51.793720Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [1:7:2054] 2025-03-28T09:24:51.793809Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [1:7:2054] 2025-03-28T09:24:51.798026Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [1:7:2054] 2025-03-28T09:24:51.798123Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [1:7:2054] 2025-03-28T09:24:51.802262Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [1:7:2054] 2025-03-28T09:24:51.802381Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [1:7:2054] 2025-03-28T09:24:51.806248Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [1:7:2054] 2025-03-28T09:24:51.806336Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [1:7:2054] 2025-03-28T09:24:51.812222Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [1:7:2054] 2025-03-28T09:24:51.812379Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [1:7:2054] 2025-03-28T09:24:51.817591Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [1:7:2054] 2025-03-28T09:24:51.817727Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [1:7:2054] 2025-03-28T09:24:51.822734Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [1:7:2054] 2025-03-28T09:24:51.822874Z node 1 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [1:7:2054] 2025-03-28T09:24:51.827968Z node 1 :TABLET_AGGREGATOR INFO: aggregator request processed [1:7:2054] Initiator [1:6:2053] TEST 2 10 duration 1.594588s 2025-03-28T09:24:52.094106Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 [2:7:2054] 2025-03-28T09:24:52.094640Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:8:2055] worker 0 2025-03-28T09:24:52.094716Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:9:2056] worker 1 2025-03-28T09:24:52.094747Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:10:2057] worker 2 2025-03-28T09:24:52.094779Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:11:2058] worker 3 2025-03-28T09:24:52.094808Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:12:2059] worker 4 2025-03-28T09:24:52.094835Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:13:2060] worker 5 2025-03-28T09:24:52.094861Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:14:2061] worker 6 2025-03-28T09:24:52.094889Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:15:2062] worker 7 2025-03-28T09:24:52.094938Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:16:2063] worker 8 2025-03-28T09:24:52.094977Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:17:2064] worker 9 2025-03-28T09:24:52.095012Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:18:2065] worker 10 2025-03-28T09:24:52.095041Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:19:2066] worker 11 2025-03-28T09:24:52.095069Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:20:2067] worker 12 2025-03-28T09:24:52.095098Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:21:2068] worker 13 2025-03-28T09:24:52.095126Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:22:2069] worker 14 2025-03-28T09:24:52.095181Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:23:2070] worker 15 2025-03-28T09:24:52.095256Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:24:2071] worker 16 2025-03-28T09:24:52.095285Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:25:2072] worker 17 2025-03-28T09:24:52.095314Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:26:2073] worker 18 2025-03-28T09:24:52.095365Z node 2 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [2:7:2054] self [2:27:2074] worker 19 Sending message to [2:9:2056] from [2:7:2054] id 1 Sending message to [2:10:2057] from [2:7:2054] id 2 Sending message to [2:11:2058] from [2:7:2054] id 3 Sending message to [2:12:2059] from [2:7:2054] id 4 Sending message to [2:13:2060] from [2:7:2054] id 5 Sending message to [2:14:2061] from [2:7:2054] id 6 Sending message to [2:15:2062] from [2:7:2054] id 7 Sending message to [2:16:2063] from [2:7:2054] id 8 Sending message to [2:17:2064] from [2:7: ... response node 19 [2:7:2054] 2025-03-28T09:24:53.329874Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [2:7:2054] 2025-03-28T09:24:53.329895Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [2:7:2054] 2025-03-28T09:24:53.329935Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 11 [2:7:2054] 2025-03-28T09:24:53.329972Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 11 [2:7:2054] 2025-03-28T09:24:53.330024Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 12 [2:7:2054] 2025-03-28T09:24:53.330055Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 12 [2:7:2054] 2025-03-28T09:24:53.330119Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 13 [2:7:2054] 2025-03-28T09:24:53.330137Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 13 [2:7:2054] 2025-03-28T09:24:53.330177Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 14 [2:7:2054] 2025-03-28T09:24:53.330194Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 14 [2:7:2054] 2025-03-28T09:24:53.330218Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [2:7:2054] 2025-03-28T09:24:53.330300Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [2:7:2054] 2025-03-28T09:24:53.334648Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [2:7:2054] 2025-03-28T09:24:53.334782Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [2:7:2054] 2025-03-28T09:24:53.338172Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [2:7:2054] 2025-03-28T09:24:53.338267Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [2:7:2054] 2025-03-28T09:24:53.341481Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [2:7:2054] 2025-03-28T09:24:53.341577Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [2:7:2054] 2025-03-28T09:24:53.346963Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [2:7:2054] 2025-03-28T09:24:53.347099Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [2:7:2054] 2025-03-28T09:24:53.352366Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [2:7:2054] 2025-03-28T09:24:53.352502Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [2:7:2054] 2025-03-28T09:24:53.357860Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [2:7:2054] 2025-03-28T09:24:53.357998Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [2:7:2054] 2025-03-28T09:24:53.365361Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [2:7:2054] 2025-03-28T09:24:53.365515Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [2:7:2054] 2025-03-28T09:24:53.370872Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [2:7:2054] 2025-03-28T09:24:53.371014Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [2:7:2054] 2025-03-28T09:24:53.376440Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [2:7:2054] 2025-03-28T09:24:53.376572Z node 2 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [2:7:2054] 2025-03-28T09:24:53.381936Z node 2 :TABLET_AGGREGATOR INFO: aggregator request processed [2:7:2054] Initiator [2:6:2053] TEST 2 20 duration 1.444870s 2025-03-28T09:24:53.588113Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 [3:7:2054] 2025-03-28T09:24:53.588245Z node 3 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [3:7:2054] self [3:8:2055] worker 0 Sending message to [3:8:2055] from [3:7:2054] id 1 Sending message to [3:8:2055] from [3:7:2054] id 2 Sending message to [3:8:2055] from [3:7:2054] id 3 Sending message to [3:8:2055] from [3:7:2054] id 4 Sending message to [3:8:2055] from [3:7:2054] id 5 Sending message to [3:8:2055] from [3:7:2054] id 6 Sending message to [3:8:2055] from [3:7:2054] id 7 Sending message to [3:8:2055] from [3:7:2054] id 8 Sending message to [3:8:2055] from [3:7:2054] id 9 Sending message to [3:8:2055] from [3:7:2054] id 10 2025-03-28T09:24:54.424264Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [3:8:2055] 2025-03-28T09:24:54.424325Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [3:8:2055] 2025-03-28T09:24:54.424366Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [3:8:2055] 2025-03-28T09:24:54.424491Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [3:8:2055] 2025-03-28T09:24:54.424563Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [3:8:2055] 2025-03-28T09:24:54.424601Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [3:8:2055] 2025-03-28T09:24:54.424642Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [3:8:2055] 2025-03-28T09:24:54.424683Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [3:8:2055] 2025-03-28T09:24:54.424735Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [3:8:2055] 2025-03-28T09:24:54.424794Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [3:8:2055] 2025-03-28T09:24:54.425210Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [3:8:2055] 2025-03-28T09:24:54.426579Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [3:8:2055] 2025-03-28T09:24:54.458129Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [3:8:2055] 2025-03-28T09:24:54.459588Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [3:8:2055] 2025-03-28T09:24:54.491310Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [3:8:2055] 2025-03-28T09:24:54.492727Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [3:8:2055] 2025-03-28T09:24:54.526667Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [3:8:2055] 2025-03-28T09:24:54.528161Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [3:8:2055] 2025-03-28T09:24:54.559268Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [3:8:2055] 2025-03-28T09:24:54.560656Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [3:8:2055] 2025-03-28T09:24:54.600255Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [3:8:2055] 2025-03-28T09:24:54.601755Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [3:8:2055] 2025-03-28T09:24:54.632901Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [3:8:2055] 2025-03-28T09:24:54.634355Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [3:8:2055] 2025-03-28T09:24:54.666911Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [3:8:2055] 2025-03-28T09:24:54.668391Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [3:8:2055] 2025-03-28T09:24:54.699587Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [3:8:2055] 2025-03-28T09:24:54.701027Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [3:8:2055] 2025-03-28T09:24:54.732716Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [3:8:2055] 2025-03-28T09:24:54.734209Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [3:8:2055] 2025-03-28T09:24:54.782518Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:8:2055] Initiator [3:7:2054] 2025-03-28T09:24:55.112327Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor got response node 0 [3:7:2054] 2025-03-28T09:24:55.113076Z node 3 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 0 [3:7:2054] 2025-03-28T09:24:55.164815Z node 3 :TABLET_AGGREGATOR INFO: aggregator request processed [3:7:2054] Initiator [3:6:2053] TEST 2 1 duration 1.836505s 2025-03-28T09:24:55.477391Z node 4 :TABLET_AGGREGATOR INFO: aggregator new request V2 Initiator [4:6:2053] self [4:7:2054] worker 0 Sending message to [4:7:2054] from [4:7:2054] id 1 Sending message to [4:7:2054] from [4:7:2054] id 2 Sending message to [4:7:2054] from [4:7:2054] id 3 Sending message to [4:7:2054] from [4:7:2054] id 4 Sending message to [4:7:2054] from [4:7:2054] id 5 Sending message to [4:7:2054] from [4:7:2054] id 6 Sending message to [4:7:2054] from [4:7:2054] id 7 Sending message to [4:7:2054] from [4:7:2054] id 8 Sending message to [4:7:2054] from [4:7:2054] id 9 Sending message to [4:7:2054] from [4:7:2054] id 10 2025-03-28T09:24:56.212163Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 1 [4:7:2054] 2025-03-28T09:24:56.212228Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 2 [4:7:2054] 2025-03-28T09:24:56.212253Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 3 [4:7:2054] 2025-03-28T09:24:56.212277Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 4 [4:7:2054] 2025-03-28T09:24:56.212386Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 5 [4:7:2054] 2025-03-28T09:24:56.212427Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 6 [4:7:2054] 2025-03-28T09:24:56.212466Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 7 [4:7:2054] 2025-03-28T09:24:56.212501Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 8 [4:7:2054] 2025-03-28T09:24:56.212535Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 9 [4:7:2054] 2025-03-28T09:24:56.212571Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor request to node 10 [4:7:2054] 2025-03-28T09:24:56.212874Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 1 [4:7:2054] 2025-03-28T09:24:56.214573Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 1 [4:7:2054] 2025-03-28T09:24:56.244488Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 2 [4:7:2054] 2025-03-28T09:24:56.246085Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 2 [4:7:2054] 2025-03-28T09:24:56.281417Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 3 [4:7:2054] 2025-03-28T09:24:56.282914Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 3 [4:7:2054] 2025-03-28T09:24:56.317817Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 4 [4:7:2054] 2025-03-28T09:24:56.319417Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 4 [4:7:2054] 2025-03-28T09:24:56.349670Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 5 [4:7:2054] 2025-03-28T09:24:56.351303Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 5 [4:7:2054] 2025-03-28T09:24:56.398826Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 6 [4:7:2054] 2025-03-28T09:24:56.400371Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 6 [4:7:2054] 2025-03-28T09:24:56.428333Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 7 [4:7:2054] 2025-03-28T09:24:56.429905Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 7 [4:7:2054] 2025-03-28T09:24:56.459293Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 8 [4:7:2054] 2025-03-28T09:24:56.460711Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 8 [4:7:2054] 2025-03-28T09:24:56.492295Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 9 [4:7:2054] 2025-03-28T09:24:56.493923Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 9 [4:7:2054] 2025-03-28T09:24:56.529283Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor got response node 10 [4:7:2054] 2025-03-28T09:24:56.531125Z node 4 :TABLET_AGGREGATOR INFO: aggregator actor merged response node 10 [4:7:2054] 2025-03-28T09:24:56.593065Z node 4 :TABLET_AGGREGATOR INFO: aggregator request processed [4:7:2054] Initiator [4:6:2053] TEST 2 1 duration 1.487990s >> TGRpcAuthentication::DisableLoginAuthentication [GOOD] >> TGRpcAuthentication::NoConnectRights >> TYqlDecimalTests::DecimalKey [GOOD] >> YdbImport::ImportFromS3ToExistingTable [GOOD] >> TYqlDecimalTests::SimpleUpsertSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcYdbTest::CreateDeleteYqlSession [GOOD] Test command err: 2025-03-28T09:24:32.390900Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788873875849547:2238];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:32.391308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001408/r3tmp/tmpyRu7og/pdisk_1.dat 2025-03-28T09:24:32.904428Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:32.938461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:32.938591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:32.944192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3277, node 1 2025-03-28T09:24:33.170481Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:33.170506Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:33.170512Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:33.170595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:33.592764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:37.057828Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788893727616123:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:37.058006Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001408/r3tmp/tmpQuLEql/pdisk_1.dat 2025-03-28T09:24:37.248617Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:37.286112Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:37.286207Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:37.290469Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20940, node 4 2025-03-28T09:24:37.379923Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:37.379948Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:37.379955Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:37.380096Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12630 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:37.664557Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:37.748717Z node 4 :TX_PROXY ERROR: Actor# [4:7486788893727617046:2610] txid# 281474976710658, issues: { message: "Unknown column \'BlaBla\' specified in key column list" severity: 1 } 2025-03-28T09:24:41.702081Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788913216991767:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:41.702214Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001408/r3tmp/tmpMlntYY/pdisk_1.dat 2025-03-28T09:24:41.999493Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:42.060061Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:42.060160Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:42.062573Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63701, node 7 2025-03-28T09:24:42.263451Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:42.263476Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:42.263483Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:42.263627Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:42.594293Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:46.764478Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788932990596239:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.764560Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001408/r3tmp/tmpj2cFEL/pdisk_1.dat 2025-03-28T09:24:46.881242Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:46.921024Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:46.921152Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:46.928828Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8176, node 10 2025-03-28T09:24:47.066428Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:47.066460Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:47.066468Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:47.066643Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10146 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:47.324189Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:47.394293Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:47.594617Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:50.081233Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788950170466927:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:50.081327Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:50.081676Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788950170466939:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:50.085614Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:24:50.111913Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788950170466941:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-28T09:24:50.184008Z node 10 :TX_PROXY ERROR: Actor# [10:7486788950170467010:3068] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:51.069954Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe130rzc6mzmff4367dgpnn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MWY1OGVlYzYtNGYxYzIzZDUtYzljNzhiYmYtY2E1OTJkMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:51.096852Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe130rzc6mzmff4367dgpnn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MWY1OGVlYzYtNGYxYzIzZDUtYzljNzhiYmYtY2E1OTJkMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:51.105440Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe130rzc6mzmff4367dgpnn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=MWY1OGVlYzYtNGYxYzIzZDUtYzljNzhiYmYtY2E1OTJkMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:51.288083Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe131t21ktgk6785t27trxc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzJmN2Q0MTctNDQ1MGJmMmQtNjI4Mjk5OGItMjY0OTYzYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:52.926256Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788959911209569:2144];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001408/r3tmp/tmp2IOvkO/pdisk_1.dat 2025-03-28T09:24:53.052206Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:53.124227Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27567, node 13 2025-03-28T09:24:53.265911Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:53.266015Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:53.288719Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:53.288740Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:53.288749Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:53.288904Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:24:53.327562Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:53.575052Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CreateTableWithUniformPartitionsAndAutoPartitioning [GOOD] Test command err: 2025-03-28T09:24:37.223796Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788894673485853:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:37.223879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013f7/r3tmp/tmpmR4nux/pdisk_1.dat 2025-03-28T09:24:37.646011Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:37.654946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:37.655044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19498, node 1 2025-03-28T09:24:37.662283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:37.714957Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:24:37.714988Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:24:37.755628Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:37.755658Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:37.755672Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:37.755825Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:38.054222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:42.265607Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788918846057784:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:42.265682Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013f7/r3tmp/tmpWUolDH/pdisk_1.dat 2025-03-28T09:24:42.574025Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:42.626202Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:42.626323Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:42.629226Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20324, node 4 2025-03-28T09:24:42.743716Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:42.743745Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:42.743753Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:42.743900Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:43.066087Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:45.684300Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:45.892318Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-03-28T09:24:45.939159Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-03-28T09:24:47.525986Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788940608399108:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:47.526064Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013f7/r3tmp/tmpFGBvdV/pdisk_1.dat 2025-03-28T09:24:47.732072Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:47.764601Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:47.764712Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:47.770586Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18620, node 7 2025-03-28T09:24:47.869991Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:47.870021Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:47.870029Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:47.870160Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31237 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:48.130366Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:50.740876Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:52.792224Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788961634286327:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:52.792287Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013f7/r3tmp/tmp3bh7pr/pdisk_1.dat 2025-03-28T09:24:53.021367Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:53.060065Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:53.060144Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:53.063057Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12432, node 10 2025-03-28T09:24:53.143105Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:53.143131Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:53.143139Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:53.143279Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29604 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:53.432399Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:53.447781Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:24:56.193511Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> ClientStatsCollector::ExternalMetricRegistryByRawPtr [GOOD] >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsert::AsyncIndexShouldSucceed [GOOD] Test command err: 2025-03-28T09:24:30.499979Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788866704924757:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:30.500046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00140f/r3tmp/tmpW5dBVG/pdisk_1.dat 2025-03-28T09:24:31.008821Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:31.018155Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:31.018258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:31.028839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64854, node 1 2025-03-28T09:24:31.155326Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:31.155350Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:31.155359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:31.155460Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13956 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:31.578125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:33.803832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788879589827597:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:33.803956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:34.316894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:34.539858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788883884795090:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:34.539951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:34.540188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788883884795095:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:34.544771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:34.586492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788883884795097:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:34.650878Z node 1 :TX_PROXY ERROR: Actor# [1:7486788883884795166:2803] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:34.729328Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe12ev1cwaw6qjym8v0hbgy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTFlNjJjNTQtOWMxNTUxYjctYjNmZWZkNjMtN2Y2ZjU1ZGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:34.845831Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqe12ht3fazrgmhq1mzaqe7v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODYwNDgzZjEtNDJmODNlNDUtNjE1YWZkZGMtY2VkMzVjMjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:34.871860Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153874882, txId: 281474976710662] shutting down 2025-03-28T09:24:36.523131Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788893139771992:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:36.524421Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00140f/r3tmp/tmpmIr0u6/pdisk_1.dat 2025-03-28T09:24:36.689968Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:36.722051Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:36.722132Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:36.726448Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4466, node 4 2025-03-28T09:24:36.811279Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:36.811301Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:36.811308Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:36.811449Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19041 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:37.035991Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:39.355435Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Decimal(22,9) value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Date value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Datetime value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Timestamp value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Interval value CLIENT_INTERNAL_ERROR
: Error: GRpc error: (13): Unable to parse request
: Error: Grpc error response on endpoint localhost:4466 BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Yson value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid Json value BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid JSON for JsonDocument provided: TAPE_ERROR: The JSON document has an improper structure: missing or superfluous commas, braces, missing keys, etc. BAD_REQUEST
: Error: Bulk upsert to table '/Root/TestInvalidData' Invalid DyNumber string representation 2025-03-28T09:24:41.299821Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788914143415250:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:41.299871Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00140f/r3tmp/tmpOlpBnz/pdisk_1.dat 2025-03-28T09:24:41.565276Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:41.604211Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:41.604271Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:41.607871Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10161, node 7 2025-03-28T09:24:41.835923Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:41.835953Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:41.835964Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:41.836144Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5213 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:42.152505Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:44.637915Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:44.765089Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788927028318317:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:44.765190Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788927028318325:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:44.765193Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:44.768620Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:44.791443Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788927028318331:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:44.857145Z node 7 :TX_PROXY ERROR: Actor# [7:7486788927028318416:2801] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:45.064448Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12vjscgtbdsd5xx8cben1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=ZTY0YjNjMmItYjM3NTJmMGYtMjNiYjFjMzktNmUxM2IwYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:46.745281Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788935956223009:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.745354Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00140f/r3tmp/tmpqCqYVE/pdisk_1.dat 2025-03-28T09:24:46.909801Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:46.944042Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:46.944137Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:46.947972Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18362, node 10 2025-03-28T09:24:47.007786Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:47.007811Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:47.007822Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:47.007980Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27855 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:47.357074Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:49.968423Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480
: Error: Bulk upsert to table '/Root/ui8' Only async-indexed tables are supported by BulkUpsert
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table 2025-03-28T09:24:51.924715Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788956852229437:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:51.925862Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00140f/r3tmp/tmpwvqI3W/pdisk_1.dat 2025-03-28T09:24:52.125177Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:52.160729Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:52.160827Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:52.166387Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9217, node 13 2025-03-28T09:24:52.221867Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:52.221890Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:52.221900Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:52.222046Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13360 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:52.363953Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:55.466933Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:56.666106Z node 13 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill
: Error: Bulk upsert to table '/Root/ui8/Value_index/indexImplTable' unknown table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:108:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:109:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:117:2057] recipient: [1:104:2137] Leader for TabletID 9437185 is [1:118:2146] sender: [1:121:2057] recipient: [1:105:2138] Leader for TabletID 9437184 is [1:116:2145] sender: [1:156:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:118:2146] sender: [1:158:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:118:2146] sender: [1:160:2057] recipient: [1:101:2136] Leader for TabletID 9437185 is [1:118:2146] sender: [1:163:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:118:2146] sender: [1:165:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:167:2057] recipient: [1:164:2176] Leader for TabletID 9437185 is [1:166:2177] sender: [1:195:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:198:2057] recipient: [1:100:2135] Leader for TabletID 9437184 is [1:116:2145] sender: [1:201:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:116:2145] sender: [1:202:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:204:2057] recipient: [1:200:2200] Leader for TabletID 9437184 is [1:203:2201] sender: [1:232:2057] recipient: [1:14:2061] >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied >> TTableProfileTests::DescribeTableWithPartitioningPolicy [GOOD] >> TTableProfileTests::ExplicitPartitionsComplex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:101:2135] ... blocking block result NO_GROUP for [1:102:2135] ... blocking block result NO_GROUP for [1:103:2135] ... blocking block result NO_GROUP for [1:104:2135] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcLdapAuthentication::LdapAuthWithEmptyPassword [GOOD] Test command err: 2025-03-28T09:24:34.250400Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788884208730987:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:34.250437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001402/r3tmp/tmpzrRY6H/pdisk_1.dat 2025-03-28T09:24:34.808489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:34.808608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:34.813246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:34.844909Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29056, node 1 2025-03-28T09:24:34.939993Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:24:34.940013Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:24:34.976196Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:34.976212Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:34.976218Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:34.976324Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7622 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:35.287354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:38.413207Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788898772324970:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:38.413295Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001402/r3tmp/tmpx3Ef6D/pdisk_1.dat 2025-03-28T09:24:38.645792Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:38.699957Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:38.700051Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:38.708055Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64715, node 4 2025-03-28T09:24:38.812572Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:38.812595Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:38.812602Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:38.812729Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:24:39.053785Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:24:43.290729Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788919900916478:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:43.290795Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001402/r3tmp/tmpLO0NC5/pdisk_1.dat 2025-03-28T09:24:43.668987Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:43.705023Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:43.705115Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:43.707790Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17366, node 7 2025-03-28T09:24:43.800586Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:43.800611Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:43.800619Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:43.800754Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:44.121748Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:48.317432Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788944954937563:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:48.317512Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001402/r3tmp/tmpuA8PGx/pdisk_1.dat 2025-03-28T09:24:48.515280Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:48.564206Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:48.564313Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:48.566284Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1546, node 10 2025-03-28T09:24:48.667844Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:48.667875Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:48.667884Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:48.668021Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:48.987788Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:53.554707Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788963679253454:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:53.554777Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001402/r3tmp/tmp9Iqy9x/pdisk_1.dat 2025-03-28T09:24:53.796239Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:53.863071Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:53.863177Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:53.867959Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4930, node 13 2025-03-28T09:24:54.114863Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:54.114884Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:54.114892Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:54.115013Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6767 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:54.517328Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::DecimalKey [GOOD] Test command err: 2025-03-28T09:24:27.347053Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788850730016831:2119];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:27.350739Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001411/r3tmp/tmpZduht6/pdisk_1.dat 2025-03-28T09:24:27.917017Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:27.917158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:27.924305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:27.939185Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18678, node 1 2025-03-28T09:24:28.082867Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:28.085123Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:28.085157Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:28.085273Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65513 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:28.429082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:30.538663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:30.724446Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788863614919880:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:30.724454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788863614919869:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:30.724554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:30.728454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:30.748936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788863614919883:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:30.829114Z node 1 :TX_PROXY ERROR: Actor# [1:7486788863614919956:2803] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:31.330508Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe12dw2cjgzg3pc7w651dst, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRjYWJiNDctMjU4ZDFlMTctNDZhNWU3NTEtOGM5M2RlMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:31.503921Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe12efqb5rv0z7mtqp7njsz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRjYWJiNDctMjU4ZDFlMTctNDZhNWU3NTEtOGM5M2RlMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:31.630570Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710663. Ctx: { TraceId: 01jqe12emra9g2d9eb856awhpy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRjYWJiNDctMjU4ZDFlMTctNDZhNWU3NTEtOGM5M2RlMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:31.742569Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqe12erm2r0b2y0xfjsz61db, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRjYWJiNDctMjU4ZDFlMTctNDZhNWU3NTEtOGM5M2RlMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:31.827948Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe12ew49r9czbg3zx0jy64g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjRjYWJiNDctMjU4ZDFlMTctNDZhNWU3NTEtOGM5M2RlMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:33.577833Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788878768049631:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:33.577921Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001411/r3tmp/tmppv7LXC/pdisk_1.dat 2025-03-28T09:24:34.040175Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:34.083233Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:34.083308Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:34.097921Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63872, node 4 2025-03-28T09:24:34.235928Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:34.235965Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:34.235973Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:34.236112Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:34.483768Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:36.771532Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:36.842754Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788891652952701:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:36.842816Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:36.842985Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788891652952713:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:36.845978Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:36.863557Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788891652952715:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:36.964441Z node 4 :TX_PROXY ERROR: Actor# [4:7486788891652952788:2786] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:37.041487Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe12kv97eahxmqt7319sk8a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MjYzYzNmYTMtY2NhMmI3YTktNjZiZDRlOTEtOGQ1Yzc1YTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:37.161040Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe12m1rc60wsxgf25xqzm1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MjYzYzNmYTMtY2NhMmI3YTktNjZiZDRlOTEtOGQ1Yzc1YTA=, CurrentExecutionId: , CustomerSuppliedI ... 03-28T09:24:48.886962Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe12z550hhrqdj0ash3j13k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGNjZTI1MDctOTFjMmFlNzItYmU3YzYxMGItZThhNWFmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:49.340690Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe12zm47s0h764zys5h7sa4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGNjZTI1MDctOTFjMmFlNzItYmU3YzYxMGItZThhNWFmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:49.352355Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe12zm47s0h764zys5h7sa4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGNjZTI1MDctOTFjMmFlNzItYmU3YzYxMGItZThhNWFmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:49.470205Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jqe1302k10bdp4vd19f06gn5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGNjZTI1MDctOTFjMmFlNzItYmU3YzYxMGItZThhNWFmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:49.605456Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqe1306836vxx21xqq2tfcsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGNjZTI1MDctOTFjMmFlNzItYmU3YzYxMGItZThhNWFmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:49.759680Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jqe130aadmmapcks0qk16zva, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGNjZTI1MDctOTFjMmFlNzItYmU3YzYxMGItZThhNWFmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:49.870125Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqe130f721seekxrc8nj210s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGNjZTI1MDctOTFjMmFlNzItYmU3YzYxMGItZThhNWFmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:49.973221Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqe130jj8y2e2mashav9trrv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGNjZTI1MDctOTFjMmFlNzItYmU3YzYxMGItZThhNWFmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:50.284242Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqe130nv9bw0dsec5wpwff4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGNjZTI1MDctOTFjMmFlNzItYmU3YzYxMGItZThhNWFmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:50.287453Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqe130nv9bw0dsec5wpwff4g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=OGNjZTI1MDctOTFjMmFlNzItYmU3YzYxMGItZThhNWFmNGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:51.918912Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788955621407281:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:51.918993Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001411/r3tmp/tmpUlKUgU/pdisk_1.dat 2025-03-28T09:24:52.042357Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:52.073576Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:52.073666Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:52.079326Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64699, node 13 2025-03-28T09:24:52.195427Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:52.195453Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:52.195462Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:52.195606Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:52.468013Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:55.167689Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:55.284108Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788972801277639:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:55.284206Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788972801277647:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:55.284263Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:55.288279Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:55.311474Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486788972801277653:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:55.416388Z node 13 :TX_PROXY ERROR: Actor# [13:7486788972801277730:2795] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:55.533029Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe135vj8fz2hc4hraa4tfc0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2RkZmRjMTQtMTAxYmNkM2MtNjFkNWE4NjItMWU1ZjUyZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:55.671884Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1363xc2bd06zvvm82hrt7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2RkZmRjMTQtMTAxYmNkM2MtNjFkNWE4NjItMWU1ZjUyZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:55.800339Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe13686ajbamf02268agtm8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2RkZmRjMTQtMTAxYmNkM2MtNjFkNWE4NjItMWU1ZjUyZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:55.916897Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe136bz29dsq8c19a994x2q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2RkZmRjMTQtMTAxYmNkM2MtNjFkNWE4NjItMWU1ZjUyZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:56.057198Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe136fwezkbyt74s2an9ywd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2RkZmRjMTQtMTAxYmNkM2MtNjFkNWE4NjItMWU1ZjUyZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:56.202891Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe136m0014jramynzq1tapw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2RkZmRjMTQtMTAxYmNkM2MtNjFkNWE4NjItMWU1ZjUyZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:56.340367Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqe136rg0dy57wvqf3fg68af, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2RkZmRjMTQtMTAxYmNkM2MtNjFkNWE4NjItMWU1ZjUyZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:56.625531Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqe136wvb7mtw17w2065bfq2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2RkZmRjMTQtMTAxYmNkM2MtNjFkNWE4NjItMWU1ZjUyZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:56.917246Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqe1375v3k1jtbd8ybjb2rkf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2RkZmRjMTQtMTAxYmNkM2MtNjFkNWE4NjItMWU1ZjUyZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:56.919736Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7486788955621407281:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:56.920152Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:57.217750Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqe137eydncvn63ftz0dpdc8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=M2RkZmRjMTQtMTAxYmNkM2MtNjFkNWE4NjItMWU1ZjUyZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> YdbOlapStore::LogLast50 [GOOD] >> YdbOlapStore::LogLast50ByResource ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::OverwriteStoragePolicy [GOOD] Test command err: 2025-03-28T09:24:15.816771Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788800368545964:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:15.822878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001433/r3tmp/tmplJStmc/pdisk_1.dat 2025-03-28T09:24:16.365689Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:16.380856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:16.380939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:16.394075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25325, node 1 2025-03-28T09:24:16.623231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:16.623251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:16.623258Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:16.623353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:17.203739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:17.239462Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:23955 2025-03-28T09:24:17.784796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:17.857956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:18.436000Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486788813647253482:2096];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:18.473512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:18.473609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:18.481505Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:24:18.482477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:18.500501Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; TClient is connected to server localhost:23955 2025-03-28T09:24:19.140271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:23955 TClient::Ls request: /Root/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743153859286 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:24:19.599631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:23955 TClient::Ls request: /Root/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743153859720 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:24:20.046743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:23955 TClient::Ls request: /Root/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1743153860133 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:24:20.464363Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-28T09:24:20.465187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:24:23.703353Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788835655795128:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:23.703423Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001433/r3tmp/tmpNlzUAK/pdisk_1.dat 2025-03-28T09:24:23.960504Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:23.985798Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:23.985878Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:23.996201Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6664, node 4 2025-03-28T09:24:24.155543Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:24.155575Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:24.155583Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:24.155740Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:24.420268Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:15710 2025-03-28T09:24:24.748318Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at ... existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001433/r3tmp/tmpyTAhoY/pdisk_1.dat 2025-03-28T09:24:48.592054Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:48.609346Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:48.609474Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:48.621095Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10195, node 13 2025-03-28T09:24:48.795490Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:48.795522Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:48.795532Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:48.795713Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16475 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:49.211431Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:16475 2025-03-28T09:24:49.791466Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:49.834901Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:50.345188Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7486788949720089309:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:50.345250Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:50.349803Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:50.349946Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:50.355610Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-03-28T09:24:50.404594Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16475 2025-03-28T09:24:50.886471Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:16475 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743153891360 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:24:52.038483Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:16475 TClient::Ls request: /Root/ydb_ut_tenant/table-2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-2" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710661 CreateStep: 1743153892550 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:24:53.126589Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:24:53.282943Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7486788942189422766:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:53.283010Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:16475 TClient::Ls request: /Root/ydb_ut_tenant/table-3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-3" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710662 CreateStep: 1743153893500 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:24:54.177610Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:16475 TClient::Ls request: /Root/ydb_ut_tenant/table-4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-4" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710663 CreateStep: 1743153894660 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-4" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:24:55.265088Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:24:55.377318Z node 15 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[15:7486788949720089309:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:55.377392Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=timeout; TClient is connected to server localhost:16475 TClient::Ls request: /Root/ydb_ut_tenant/table-5 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-5" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710664 CreateStep: 1743153895720 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-5" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) 2025-03-28T09:24:56.452183Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-03-28T09:24:56.462195Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected |87.5%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.5%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbYqlClient::TestReadTableBatchLimits [GOOD] >> DstCreator::GlobalConsistency >> DstCreator::NonExistentSrc >> DstCreator::SameOwner >> DstCreator::ExistingDst >> DstCreator::ReplicationModeMismatch >> DstCreator::WithIntermediateDir >> DstCreator::Basic >> YdbS3Internal::BadRequests [GOOD] >> YdbYqlClient::CheckDefaultTableSettings2 [GOOD] >> YdbYqlClient::CheckDefaultTableSettings3 >> YdbLogStore::Dirs [GOOD] >> YdbLogStore::LogTable >> TCmsTest::RequestRestartServicesMultipleNodes >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TMaintenanceApiTest::SingleCompositeActionGroup >> TCmsTest::ManagePermissions >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> TCmsTest::ManageRequestsWrong >> TCmsTest::WalleRebootDownNode >> TCmsTest::StateStorageNodesFromOneRing >> TCmsTest::WalleTasks >> TCmsTest::StateRequestNode >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled >> TCmsTenatsTest::TestTenantLimit >> YdbYqlClient::QueryLimits [GOOD] >> YdbYqlClient::QueryStats >> TCmsTest::RequestReplaceDevices >> TCmsTenatsTest::TestClusterRatioLimit >> TRegisterNodeOverDiscoveryService::ServerWithIssuerVerification_ClientWithSameIssuer [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] >> YdbYqlClient::AlterTableAddIndex [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestReadTableBatchLimits [GOOD] Test command err: 2025-03-28T09:24:37.662597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788897500805567:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:37.669486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013f5/r3tmp/tmpMqcQ55/pdisk_1.dat 2025-03-28T09:24:38.091437Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:38.103498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:38.103594Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:38.109399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16689, node 1 2025-03-28T09:24:38.191232Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:38.191266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:38.191272Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:38.191364Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21438 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:38.517196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:38.675400Z node 1 :GRPC_SERVER INFO: Got grpc request# ListEndpointsRequest, traceId# 01jqe12nm9eevgkp5xyka667nf, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:58652, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# 9.978318s 2025-03-28T09:24:38.708743Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateSessionRequest, traceId# 01jqe12nn97jygvhwctbzt7zw3, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:58662, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:40.727340Z node 1 :GRPC_SERVER DEBUG: Got grpc request# CreateTableRequest, traceId# 01jqe12qmq8e8mkjqe9wnq64w8, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:58672, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:24:40.728132Z node 1 :TX_PROXY DEBUG: actor# [1:7486788897500805793:2141] Handle TEvProposeTransaction 2025-03-28T09:24:40.728168Z node 1 :TX_PROXY DEBUG: actor# [1:7486788897500805793:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:24:40.728220Z node 1 :TX_PROXY DEBUG: actor# [1:7486788897500805793:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486788910385708475:2625] 2025-03-28T09:24:40.786642Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788910385708475:2625] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Test" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Fk" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" KeyColumnNames: "Fk" UniformPartitionsCount: 16 PartitionConfig { } Temporary: false } CreateIndexedTable { } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:58672" 2025-03-28T09:24:40.786708Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788910385708475:2625] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:24:40.787005Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788910385708475:2625] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:24:40.787072Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788910385708475:2625] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:24:40.787255Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788910385708475:2625] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:24:40.787374Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788910385708475:2625] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:24:40.787405Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788910385708475:2625] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:24:40.787535Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788910385708475:2625] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:24:40.789086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:40.793528Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788910385708475:2625] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-03-28T09:24:40.793589Z node 1 :TX_PROXY DEBUG: Actor# [1:7486788910385708475:2625] txid# 281474976710658 SEND to# [1:7486788910385708474:2336] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-03-28T09:24:40.794575Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:40.794678Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:24:40.794784Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:40.794969Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:24:40.846591Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708545:2693], Recipient [1:7486788910385708675:2354]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.846851Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708537:2685], Recipient [1:7486788910385708671:2351]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.847626Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708542:2690], Recipient [1:7486788910385708648:2342]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.848147Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708550:2698], Recipient [1:7486788910385708643:2339]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.848695Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708546:2694], Recipient [1:7486788910385708644:2340]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.849155Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708536:2684], Recipient [1:7486788910385708652:2346]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.849576Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708547:2695], Recipient [1:7486788910385708674:2353]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.850097Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708540:2688], Recipient [1:7486788910385708659:2350]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.850616Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708543:2691], Recipient [1:7486788910385708651:2345]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.851561Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708535:2683], Recipient [1:7486788910385708653:2347]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.851966Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708539:2687], Recipient [1:7486788910385708649:2343]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.852446Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708538:2686], Recipient [1:7486788910385708658:2349]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.852912Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708544:2692], Recipient [1:7486788910385708650:2344]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.853279Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708541:2689], Recipient [1:7486788910385708656:2348]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.853573Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708548:2696], Recipient [1:7486788910385708673:2352]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.853876Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:7486788910385708549:2697], Recipient [1:7486788910385708647:2341]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:24:40.856928Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788910385708546:2694], Recipient [1:7486788910385708644:2340]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:40.857443Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037896 actor [1:7486788910385708644:2340] 2025-03-28T09:24:40.857733Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788910385708548:2696], Recipient [1:7486788910385708673:2352]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:40.857869Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:24:40.858019Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037893 actor [1:7486788910385708673:2352] 2025-03-28T09:24:40.858215Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:24:40.871427Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788910385708547:2695], Recipient [1:7486788910385708674:2353]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:40.871806Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:7486788910385708543:2691], Recipient [1:7486788910385708651:2345]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:24:40.871897Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:7486788910385708674:2353] 2025-03-28T09:24:40.872074Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-2 ... ] at 72075186224037897 on unit ReadTableScan 2025-03-28T09:24:59.243843Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is Continue 2025-03-28T09:24:59.243854Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:24:59.243862Z node 10 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037897 2025-03-28T09:24:59.243871Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-03-28T09:24:59.243879Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-03-28T09:24:59.243898Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-03-28T09:24:59.244048Z node 10 :READ_TABLE_API DEBUG: [10:7486788988667679616:2402] Adding quota request to queue ShardId: 0, TxId: 281474976715678 2025-03-28T09:24:59.244076Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [10:7486788988667679715:2254], Recipient [10:7486788975782775893:2342]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:24:59.244086Z node 10 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037897, TxId: 281474976715679, MessageQuota: 25 2025-03-28T09:24:59.244094Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-28T09:24:59.244125Z node 10 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037897, TxId: 281474976715679, MessageQuota: 25 2025-03-28T09:24:59.244168Z node 10 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037897 2025-03-28T09:24:59.244178Z node 10 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715679, at: 72075186224037897 2025-03-28T09:24:59.244233Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [10:7486788975782775893:2342], Recipient [10:7486788975782775893:2342]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:24:59.244246Z node 10 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:24:59.244272Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037897 2025-03-28T09:24:59.244282Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:24:59.244295Z node 10 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715679] at 72075186224037897 for ReadTableScan 2025-03-28T09:24:59.244305Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit ReadTableScan 2025-03-28T09:24:59.244316Z node 10 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715679] at 72075186224037897 error: , IsFatalError: 0 2025-03-28T09:24:59.244332Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is Executed 2025-03-28T09:24:59.244343Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit ReadTableScan 2025-03-28T09:24:59.244351Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715679] at 72075186224037897 to execution unit FinishPropose 2025-03-28T09:24:59.244362Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit FinishPropose 2025-03-28T09:24:59.244378Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is DelayComplete 2025-03-28T09:24:59.244388Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit FinishPropose 2025-03-28T09:24:59.244397Z node 10 :TX_DATASHARD TRACE: Add [0:281474976715679] at 72075186224037897 to execution unit CompletedOperations 2025-03-28T09:24:59.244407Z node 10 :TX_DATASHARD TRACE: Trying to execute [0:281474976715679] at 72075186224037897 on unit CompletedOperations 2025-03-28T09:24:59.244429Z node 10 :TX_DATASHARD TRACE: Execution status for [0:281474976715679] at 72075186224037897 is Executed 2025-03-28T09:24:59.244440Z node 10 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715679] at 72075186224037897 executing on unit CompletedOperations 2025-03-28T09:24:59.244448Z node 10 :TX_DATASHARD TRACE: Execution plan for [0:281474976715679] at 72075186224037897 has finished 2025-03-28T09:24:59.244458Z node 10 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:24:59.244467Z node 10 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037897 2025-03-28T09:24:59.244476Z node 10 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037897 has no attached operations 2025-03-28T09:24:59.244484Z node 10 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037897 2025-03-28T09:24:59.244508Z node 10 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037897 2025-03-28T09:24:59.244518Z node 10 :TX_DATASHARD TRACE: Complete execution for [0:281474976715679] at 72075186224037897 on unit FinishPropose 2025-03-28T09:24:59.244548Z node 10 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715679 at tablet 72075186224037897 send to client, exec latency: 35 ms, propose latency: 35 ms, status: COMPLETE 2025-03-28T09:24:59.244587Z node 10 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037897 2025-03-28T09:24:59.244848Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788988667679617:2402], Recipient [10:7486788975782775906:2347]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153899242 TxId: 281474976715678 2025-03-28T09:24:59.244979Z node 10 :READ_TABLE_API NOTICE: [10:7486788988667679616:2402] Finish grpc stream, status: 400000 2025-03-28T09:24:59.244994Z node 10 :READ_TABLE_API DEBUG: [10:7486788988667679616:2402] Send zero quota to Shard 0, TxId 281474976715678 2025-03-28T09:24:59.245008Z node 10 :READ_TABLE_API DEBUG: [10:7486788988667679616:2402] Send zero quota to Shard 0, TxId 281474976715678 2025-03-28T09:24:59.245019Z node 10 :READ_TABLE_API DEBUG: [10:7486788988667679616:2402] Send zero quota to Shard 0, TxId 281474976715678 2025-03-28T09:24:59.245028Z node 10 :READ_TABLE_API DEBUG: [10:7486788988667679616:2402] Send zero quota to Shard 0, TxId 281474976715678 2025-03-28T09:24:59.245033Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788988667679617:2402], Recipient [10:7486788975782775885:2340]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153899242 TxId: 281474976715678 2025-03-28T09:24:59.245136Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788988667679617:2402], Recipient [10:7486788975782775904:2345]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153899242 TxId: 281474976715678 2025-03-28T09:24:59.245181Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788988667679617:2402], Recipient [10:7486788975782775914:2348]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153899242 TxId: 281474976715678 2025-03-28T09:24:59.245295Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788988667679617:2402], Recipient [10:7486788975782775901:2343]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153899242 TxId: 281474976715678 2025-03-28T09:24:59.245310Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788988667679617:2402], Recipient [10:7486788975782775903:2344]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153899242 TxId: 281474976715678 2025-03-28T09:24:59.245412Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788988667679617:2402], Recipient [10:7486788975782775893:2342]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153899242 TxId: 281474976715678 2025-03-28T09:24:59.245430Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788988667679617:2402], Recipient [10:7486788975782775888:2341]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153899242 TxId: 281474976715678 2025-03-28T09:24:59.245538Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788988667679617:2402], Recipient [10:7486788975782775884:2339]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153899242 TxId: 281474976715678 2025-03-28T09:24:59.245550Z node 10 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [10:7486788988667679617:2402], Recipient [10:7486788975782775905:2346]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1743153899242 TxId: 281474976715678 2025-03-28T09:24:59.310290Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d0e80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.310290Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f1e80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.310557Z node 10 :GRPC_SERVER DEBUG: [0x51a00002ee80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.310592Z node 10 :GRPC_SERVER DEBUG: [0x51a000078c80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.310772Z node 10 :GRPC_SERVER DEBUG: [0x51a00002b280] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.310807Z node 10 :GRPC_SERVER DEBUG: [0x51a00002be80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.310951Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f1880] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.310989Z node 10 :GRPC_SERVER DEBUG: [0x51a00007aa80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.311128Z node 10 :GRPC_SERVER DEBUG: [0x51a000076880] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.311176Z node 10 :GRPC_SERVER DEBUG: [0x51a000078080] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.311332Z node 10 :GRPC_SERVER DEBUG: [0x51a0000f2480] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.311368Z node 10 :GRPC_SERVER DEBUG: [0x51a00018a880] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.311515Z node 10 :GRPC_SERVER DEBUG: [0x51a000084080] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.311566Z node 10 :GRPC_SERVER DEBUG: [0x51a00010b680] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.311695Z node 10 :GRPC_SERVER DEBUG: [0x51a00002e880] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.311736Z node 10 :GRPC_SERVER DEBUG: [0x51a0000ca280] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-28T09:24:59.311865Z node 10 :GRPC_SERVER DEBUG: [0x51a000192680] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbS3Internal::BadRequests [GOOD] Test command err: 2025-03-28T09:24:34.019418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788884967350538:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:34.020041Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001406/r3tmp/tmpUxhI5m/pdisk_1.dat 2025-03-28T09:24:34.602502Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:34.625475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:34.625575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:34.634653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19823, node 1 2025-03-28T09:24:34.821280Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:34.821302Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:34.821310Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:34.821420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:35.117058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:37.183105Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknownSesson test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001406/r3tmp/tmpxzq6ts/pdisk_1.dat 2025-03-28T09:24:39.058894Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:24:39.157994Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:39.200905Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:39.200999Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:39.209269Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20618, node 4 2025-03-28T09:24:39.365810Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:39.365828Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:39.365833Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:39.365978Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:39.716174Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:44.189251Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788927152493207:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:44.189293Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001406/r3tmp/tmpBYH2d8/pdisk_1.dat 2025-03-28T09:24:44.447966Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24899, node 7 2025-03-28T09:24:44.540352Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:44.540433Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:44.556446Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:44.640939Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:44.640959Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:44.640965Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:44.641096Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:44.910182Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:49.078790Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788948103084888:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:49.079166Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001406/r3tmp/tmpFBIz7I/pdisk_1.dat 2025-03-28T09:24:49.239676Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:49.259768Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:49.259859Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:49.264922Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21393, node 10 2025-03-28T09:24:49.431536Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:49.431561Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:49.431569Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:49.431840Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64275 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:49.776436Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:52.558953Z node 10 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:24:52.560066Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-28T09:24:52.561057Z node 10 :KQP_PROXY DEBUG: TraceId: "01jqe130j71y61gpchswecdbz2", Request has 18445000919816.990588s seconds to be completed 2025-03-28T09:24:52.563863Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm, ActorId: [0:0:0], ActorState: unknown state, Create session actor with id OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm 2025-03-28T09:24:52.563938Z node 10 :KQP_PROXY DEBUG: TraceId: "01jqe130j71y61gpchswecdbz2", Created new session, sessionId: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm, workerId: [10:7486788960987987792:2331], database: , longSession: 1, local sessions count: 1 2025-03-28T09:24:52.563965Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-28T09:24:52.564133Z node 10 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jqe130j71y61gpchswecdbz2 2025-03-28T09:24:52.564205Z node 10 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:24:52.564608Z node 10 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:24:52.564638Z node 10 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:24:52.564688Z node 10 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-28T09:24:52.564745Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:24:52.564789Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:24:52.564868Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:24:52.565052Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm, ActorId: [10:7486788960987987792:2331], ActorState: unknown state, session actor bootstrapped 2025-03-28T09:24:52.567348Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:24:52.567397Z node 10 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:24:52.588025Z node 10 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm, rpc ctrl: [10:7486788960987987803:2333], sameNode: 1, trace_id: 2025-03-28T09:24:52.588069Z node 10 :KQP_PROXY TRACE: Attach local session: [10:7486788960987987792:2331] to rpc: [10:7486788960987987803:2333] on same node 2025-03-28T09:24:52.603999Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm, ActorId: [10:7486788960987987792:2331], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:24:52.604057Z node 10 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm, ActorId: [10:7486788960987987792:2331], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:24:52.604094Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm, ActorId: [10:7486788960987987792:2331], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:24:52.604129Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm, ActorId: [10:7486788960987987792:2331], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:24:52.604214Z node 10 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm, ActorId: [10:7486788960987987792:2331], ActorState: unknown state, Session actor destroyed 2025-03-28T09:24:52.604554Z node 10 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm, workerId: [10:7486788960987987792:2331], local sessions count: 0 2025-03-28T09:24:52.616247Z node 10 :KQP_PROXY DEBUG: Received ping session request, request_id: 3, sender: [10:7486788960987987806:2335], trace_id: 2025-03-28T09:24:52.616421Z node 10 :KQP_PROXY NOTICE: Session not found: ydb://session/3?node_id=10&id=OTdjYTViNWMtYTFhY2IwMzYtYzFlMmM4LTVmMWNiYTFm 2025-03-28T09:24:52.616562Z node 10 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [10:7486788960987987806:2335], selfId: [10:7486788948103085093:2261], source: [10:7486788948103085093:2261] 2025-03-28T09:24:54.331687Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788966966478295:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:54.344985Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001406/r3tmp/tmplMYHCp/pdisk_1.dat 2025-03-28T09:24:54.529670Z node 13 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13689, node 13 2025-03-28T09:24:54.647931Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:54.648062Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:54.677946Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:54.712476Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:54.712504Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:54.712511Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:54.712640Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:54.990263Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:57.977946Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:58.471218Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788984146350545:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:58.471222Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486788984146350552:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:58.471377Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:58.475081Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:58.498281Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486788984146350559:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:58.578324Z node 13 :TX_PROXY ERROR: Actor# [13:7486788984146350693:4289] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:59.047086Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe138z5fq8avnvq6d9d6hye, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTdjMGQ4ZGUtNTQzZDZmZTMtZjIzZmUxMjEtZTdmZjNkNTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:59.320891Z node 13 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7486788966966478295:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:59.320981Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageTwoBrokenRings >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> YdbTableBulkUpsert::ZeroRows [GOOD] >> YdbTableBulkUpsertOlap::ParquetImportBug >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesDryRun >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownNode >> TCmsTest::StateRequest >> TCmsTenatsTest::TestNoneTenantPolicy >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::CreateTime >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] >> TGRpcAuthentication::NoConnectRights [GOOD] >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientWithCorrectCerts_AccessDenied [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplaceDevicePDisk >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> DstCreator::ExistingDst [GOOD] >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::EmptyReplicationConfig >> DstCreator::WithAsyncIndex >> DstCreator::Basic [GOOD] >> DstCreator::CannotFindColumn >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount >> DstCreator::GlobalConsistency [GOOD] >> DstCreator::KeyColumnNameMismatch >> TYqlDecimalTests::SimpleUpsertSelect [GOOD] >> TYqlDecimalTests::NegativeValues ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> ClientStatsCollector::ExternalMetricRegistryStdSharedPtr [GOOD] Test command err: 2025-03-28T09:24:36.203669Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788892012512791:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:36.206639Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013fb/r3tmp/tmpsukZoN/pdisk_1.dat 2025-03-28T09:24:36.610005Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:36.657696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:36.657825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:36.662022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32623, node 1 2025-03-28T09:24:36.752714Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:36.752740Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:36.752750Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:36.752866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65073 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:37.054213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:39.159659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788904897415640:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:39.159732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788904897415629:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:39.159857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:39.163017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:39.201568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788904897415643:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:39.278517Z node 1 :TX_PROXY ERROR: Actor# [1:7486788904897415725:2689] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:39.651645Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=YjUxZmQxMmMtNWM0MGFjYTMtNDU2NDMxNWUtZTEyOWFkZjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-03-28T09:24:41.211475Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788911884784673:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:41.212198Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013fb/r3tmp/tmp7gIMa7/pdisk_1.dat 2025-03-28T09:24:41.436520Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20292, node 4 2025-03-28T09:24:41.542141Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:41.542260Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:41.548909Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:41.702385Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:41.702405Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:41.702414Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:41.702587Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8950 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:42.005066Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:44.554418Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788924769687586:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:44.554517Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788924769687578:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:44.554842Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:44.557698Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:44.593899Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788924769687592:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:44.655861Z node 4 :TX_PROXY ERROR: Actor# [4:7486788924769687676:2683] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:46.324338Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788934155906583:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.324461Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013fb/r3tmp/tmpqw7RVY/pdisk_1.dat 2025-03-28T09:24:46.488398Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:46.519665Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:46.519757Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:46.522522Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8539, node 7 2025-03-28T09:24:46.680345Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:46.680374Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:46.680382Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:46.680506Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20110 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709 ... =, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:51.902022Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqe132hw7e5cc47nz3ear9ma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzA3OTdhNGItZDk3NTJiN2UtZTBkMTk0ZjItMzA0MGJmZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:51.903376Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NzA3OTdhNGItZDk3NTJiN2UtZTBkMTk0ZjItMzA0MGJmZGM=, ActorId: [7:7486788947040809502:2335], ActorState: ExecuteState, TraceId: 01jqe132hw7e5cc47nz3ear9ma, Create QueryResponse for error on request, msg: 2025-03-28T09:24:51.903908Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqe132hw7e5cc47nz3ear9ma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzA3OTdhNGItZDk3NTJiN2UtZTBkMTk0ZjItMzA0MGJmZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:52.100049Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqe132jg1gtgfzrzgca75ytx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzA3OTdhNGItZDk3NTJiN2UtZTBkMTk0ZjItMzA0MGJmZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:52.196736Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqe132rz3meh4gtak2y4ehth, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NmEwYzI3NTMtZTBlM2U5Ni01NGNiNTJlMy1jMzNkODk5Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:52.227283Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqe132vr36pd0xngzrafet2z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzA3OTdhNGItZDk3NTJiN2UtZTBkMTk0ZjItMzA0MGJmZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:52.229002Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=NzA3OTdhNGItZDk3NTJiN2UtZTBkMTk0ZjItMzA0MGJmZGM=, ActorId: [7:7486788947040809502:2335], ActorState: ExecuteState, TraceId: 01jqe132vr36pd0xngzrafet2z, Create QueryResponse for error on request, msg: 2025-03-28T09:24:52.229587Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqe132vr36pd0xngzrafet2z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzA3OTdhNGItZDk3NTJiN2UtZTBkMTk0ZjItMzA0MGJmZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:53.882781Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788962418739769:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:53.882904Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013fb/r3tmp/tmpzd9iMe/pdisk_1.dat 2025-03-28T09:24:54.085945Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10919, node 10 2025-03-28T09:24:54.203918Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:54.204009Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:54.218362Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:54.218394Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:54.218402Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:54.218570Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:24:54.220443Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:24:54.515302Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:24:57.262628Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788979598610005:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:57.262714Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788979598610012:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:57.262759Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:57.267501Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:57.292564Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788979598610019:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:57.389184Z node 10 :TX_PROXY ERROR: Actor# [10:7486788979598610095:2686] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:59.163143Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788992200472763:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:59.163228Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013fb/r3tmp/tmpooxs6P/pdisk_1.dat 2025-03-28T09:24:59.325513Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:59.356324Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:59.356421Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:59.362779Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12887, node 13 2025-03-28T09:24:59.425872Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:59.425897Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:59.425907Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:59.426048Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:59.695010Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:02.472011Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486789005085375725:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:02.472093Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486789005085375717:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:02.472153Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:02.475970Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:25:02.499007Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486789005085375731:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:25:02.557641Z node 13 :TX_PROXY ERROR: Actor# [13:7486789005085375807:2678] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TGRpcAuthentication::NoConnectRights [GOOD] Test command err: 2025-03-28T09:24:25.413205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788842867480226:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:25.413311Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001418/r3tmp/tmp91yETZ/pdisk_1.dat 2025-03-28T09:24:25.951576Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:25.960270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:25.960418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:25.971587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5505, node 1 2025-03-28T09:24:26.177346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:26.177373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:26.177381Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:26.177480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:26.610278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:26.634854Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:10061 TClient is connected to server localhost:10061 2025-03-28T09:24:27.243110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:29.055563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788860047350401:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.055655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.055749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788860047350413:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:29.060550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480 2025-03-28T09:24:29.090945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788860047350415:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2025-03-28T09:24:29.173687Z node 1 :TX_PROXY ERROR: Actor# [1:7486788860047350499:2713] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:10061 2025-03-28T09:24:29.696370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:24:34.199460Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:24:34.199759Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:24:34.199935Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001418/r3tmp/tmpyuGYPw/pdisk_1.dat 2025-03-28T09:24:34.797976Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:24:34.870072Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:34.870252Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:34.890506Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:35.176529Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [4:1003:2807], Recipient [4:555:2468]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:24:35.176643Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:24:35.176696Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:24:35.176839Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [4:1000:2805], Recipient [4:555:2468]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:24:35.176877Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:24:35.284542Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateSubDomain SubDomain { Name: "tenant" } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:24:35.284840Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /Root/tenant, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:24:35.284972Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: tenant, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:24:35.285145Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-28T09:24:35.285339Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:24:35.285478Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:35.285552Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:24:35.285659Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:24:35.285728Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:24:35.285795Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:24:35.288631Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-28T09:24:35.288825Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE DATABASE, path: /Root/tenant 2025-03-28T09:24:35.288900Z node 4 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:24:35.288952Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976715657:0 2025-03-28T09:24:35.289262Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [4:555:2468], Recipient [4:555:2468]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:24:35.289316Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:24:35.289511Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:24:35.289585Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:24:35.289778Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:24:35.289884Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:24:35.289935Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:776:2622], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-03-28T09:24:35.289996Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:776:2622], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 2 2025-03-28T09:24:35.290451Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:24:35.290530Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxCreateSubDomain, at tablet# 72057594046644480 2025-03-28T09:24:35.2905 ... ateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-28T09:24:47.074956Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [5:1643:2459], Recipient [4:555:2468]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037890 TableLocalId: 3 Generation: 1 Round: 2 TableStats { DataSize: 75 RowCount: 1 IndexSize: 0 InMemSize: 0 LastAccessTime: 2040 LastUpdateTime: 2040 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037890 NodeId: 5 StartTime: 1458 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-28T09:24:47.075020Z node 4 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:24:47.075081Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 75 rowCount 1 cpuUsage 0 2025-03-28T09:24:47.075244Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 75 RowCount: 1 IndexSize: 0 InMemSize: 0 LastAccessTime: 2040 LastUpdateTime: 2040 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 1 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 1 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 2 HasLoanedParts: false Channels { Channel: 1 DataSize: 30 IndexSize: 0 } Channels { Channel: 2 DataSize: 45 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:24:47.075301Z node 4 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-28T09:24:47.152890Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe12xv88ppr97b112ry2mgy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZGQ1YWM1MjYtNTAxODUxOTctYWM0ZGFlMjUtN2NlODc3ZTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:48.813183Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486788943192930576:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:48.813238Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001418/r3tmp/tmphFuaGn/pdisk_1.dat 2025-03-28T09:24:48.935549Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:48.970167Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:48.970253Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:48.975074Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26453, node 6 2025-03-28T09:24:49.087973Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:49.087994Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:49.088002Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:49.088113Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8394 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:49.351013Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:8394 2025-03-28T09:24:53.511084Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486788965363646185:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:53.511155Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001418/r3tmp/tmpqMDksP/pdisk_1.dat 2025-03-28T09:24:53.857242Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:53.883772Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:53.883894Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:53.887933Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19725, node 9 2025-03-28T09:24:54.214536Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:54.214561Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:54.214577Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:54.214750Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:54.608734Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:58.933043Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7486788986021705607:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:58.933167Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001418/r3tmp/tmpORe8uv/pdisk_1.dat 2025-03-28T09:24:59.124144Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:59.161840Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:59.161938Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:59.168748Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24472, node 12 2025-03-28T09:24:59.248055Z node 12 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:59.248087Z node 12 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:59.248100Z node 12 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:59.248274Z node 12 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61912 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:59.576261Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:61912 >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions >> TMaintenanceApiTest::ActionReason [GOOD] >> TCmsTest::StateRequest [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> YdbMonitoring::SelfCheckWithNodesDying [GOOD] >> YdbOlapStore::BulkUpsert >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TCmsTest::RequestReplacePDiskDoesntBreakGroup >> TCmsTest::ManageRequestsDry [GOOD] >> TCmsTest::Notifications >> TTableProfileTests::ExplicitPartitionsComplex [GOOD] >> TTableProfileTests::DescribeTableOptions >> TMaintenanceApiTest::CreateTime [GOOD] >> TMaintenanceApiTest::LastRefreshTime >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ActionReason [GOOD] >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientProvidesCorruptedPrivatekey [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath >> TCmsTenatsTest::TestClusterLimit >> TCmsTest::TestKeepAvailableMode >> YdbYqlClient::QueryStats [GOOD] >> YdbYqlClient::RenameTables ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::CheckDefaultTableSettings3 [GOOD] Test command err: test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ec/r3tmp/tmpoj7vvN/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24918, node 1 TClient is connected to server localhost:6300 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ec/r3tmp/tmpElUfnB/pdisk_1.dat 2025-03-28T09:24:45.161480Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:24:45.296390Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:45.338625Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:45.338749Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:45.342812Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4354, node 4 2025-03-28T09:24:45.447397Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:45.447425Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:45.447432Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:45.447594Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22598 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:45.731386Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:45.750987Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:24:48.093031Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788944028906602:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:48.093133Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:48.373732Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:48.568343Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788944028906784:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:48.568460Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:48.568980Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788944028906789:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:48.577049Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:48.601145Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788944028906791:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:48.668169Z node 4 :TX_PROXY ERROR: Actor# [4:7486788944028906866:2807] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:48.816948Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe12z9q9zf3fts66vrdzz2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZmIxMzFhNC1hM2Q0YTBkOS1kNmIyZWI0Zi0yYTdiOGQz, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:48.883782Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:24:49.032201Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:24:50.700359Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788953037376476:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:50.700413Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ec/r3tmp/tmp6pctWP/pdisk_1.dat 2025-03-28T09:24:51.011785Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:51.037172Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:51.037267Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:51.040780Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21708, node 7 2025-03-28T09:24:51.167350Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:51.167374Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:51.167382Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:51.167520Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:51.424257Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:54.123446Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:56.084323Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788978710476375:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:56.084403Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ec/r3tmp/tmpEKMDy7/pdisk_1.dat 2025-03-28T09:24:56.369438Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28894, node 10 2025-03-28T09:24:56.490919Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:56.491063Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:56.505775Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:56.544208Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:56.544233Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:56.544241Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:56.544385Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63502 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:56.812685Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:59.391943Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:01.166769Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486788999820205924:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:01.166851Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ec/r3tmp/tmp9R7NqA/pdisk_1.dat 2025-03-28T09:25:01.412067Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:01.448784Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:01.448873Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:01.460532Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24481, node 13 2025-03-28T09:25:01.558367Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:01.558386Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:01.558393Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:01.558499Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:01.845539Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:04.500918Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> TCmsTest::TestKeepAvailableModeScheduled >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> DstCreator::EmptyReplicationConfig [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TCmsTest::RequestReplacePDiskDoesntBreakGroup [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone >> DstCreator::SamePartitionCount [GOOD] >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions >> DstCreator::WithAsyncIndex [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> DstCreator::CannotFindColumn [GOOD] >> DstCreator::KeyColumnNameMismatch [GOOD] >> TMaintenanceApiTest::LastRefreshTime [GOOD] |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientProvidesEmptyClientCerts [GOOD] >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2025-03-28T09:25:00.768450Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788993821498482:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:00.768597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a18/r3tmp/tmpOEmUHS/pdisk_1.dat 2025-03-28T09:25:01.239498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:01.239598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:01.252465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:01.327288Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:5560 TServer::EnableGrpc on GrpcPort 19909, node 1 2025-03-28T09:25:01.631374Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:01.631418Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:01.631430Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:01.631576Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:02.164933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:02.187578Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:02.192534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:02.338555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153902224 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153902441 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153902224 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153902441 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-28T09:25:02.408199Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.408335Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.408367Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:02.409043Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:03.850165Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153902315, tx_id: 281474976710658 } } } 2025-03-28T09:25:03.850532Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:03.852267Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-28T09:25:03.853922Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153902441 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIop ... e 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:04.495431Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:04.495523Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:04.497274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4411 TServer::EnableGrpc on GrpcPort 7017, node 2 2025-03-28T09:25:04.701626Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:04.701648Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:04.701654Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:04.701767Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:05.004630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:05.011695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:05.078826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153905052 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905143 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153905052 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905143 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-28T09:25:05.107004Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.107216Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.107255Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:05.107742Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:06.995192Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153905115, tx_id: 281474976715658 } } } 2025-03-28T09:25:06.995731Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:06.997093Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-28T09:25:06.998059Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905143 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:25:06.998232Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::LastRefreshTime [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2025-03-28T09:25:00.792063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788993865046259:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:00.801216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a70/r3tmp/tmpn1sOSy/pdisk_1.dat 2025-03-28T09:25:01.243185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:01.243298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:01.245120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:01.302973Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:11374 TServer::EnableGrpc on GrpcPort 9624, node 1 2025-03-28T09:25:01.631252Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:01.631291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:01.631297Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:01.631424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11374 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:02.164718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:02.192530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:02.321396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153902224 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153902392 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153902224 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153902392 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-28T09:25:02.353842Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.353976Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.354004Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:02.354626Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:03.753543Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153902315, tx_id: 281474976710658 } } } 2025-03-28T09:25:03.760039Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:03.762429Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-28T09:25:03.764208Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153902392 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE ConsistencyLevel: CONSISTENCY_LEVEL_UNKNOWN } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 720575 ... 7968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1256 TServer::EnableGrpc on GrpcPort 24820, node 2 2025-03-28T09:25:04.762401Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:04.762429Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:04.762434Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:04.762547Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:05.023141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:05.027458Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:25:05.030162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:05.065420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153905073 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905136 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153905073 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905136 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-28T09:25:05.097068Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.097202Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.097224Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:05.097632Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:07.067448Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153905101, tx_id: 281474976715658 } } } 2025-03-28T09:25:07.067752Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:07.069161Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-28T09:25:07.070121Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905136 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:25:07.070301Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 >> TCmsTenatsTest::TestTenantRatioLimit |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2025-03-28T09:25:00.757313Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788995342408123:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:00.757471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a3b/r3tmp/tmpuegxJC/pdisk_1.dat 2025-03-28T09:25:01.205423Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:01.208899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:01.209025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:01.224659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22961 TServer::EnableGrpc on GrpcPort 19601, node 1 2025-03-28T09:25:01.631447Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:01.631473Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:01.631486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:01.631634Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22961 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:02.180918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:02.214131Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:02.222848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153902336 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153902259 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153902336 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-28T09:25:02.378731Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.379020Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.379060Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:02.383330Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:03.753803Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153902336, tx_id: 281474976710658 } } } 2025-03-28T09:25:03.760046Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:03.762422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-03-28T09:25:03.763071Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-28T09:25:03.763116Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-03-28T09:25:03.792846Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-28T09:25:03.792876Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 4] TClient::Ls request: /Root/Dir/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153903834 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-03-28T09:25:04.394508Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789011449144180:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:04.394582Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a3b/r3tmp/tmpVS3vMo/pdisk_1.dat 2025-03-28T09:25:04.475056Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:04.525534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:04.525610Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:04.526765Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18271 TServer::EnableGrpc on GrpcPort 22733, node 2 2025-03-28T09:25:04.685011Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:04.685032Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:04.685039Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:04.685144Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:04.961325Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:04.971012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743153905262 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyCo... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153905010 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743153905262 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 18446... (TRUNCATED) 2025-03-28T09:25:05.256051Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.256229Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.256240Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:05.256711Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:07.301790Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153905262, tx_id: 281474976715658 } } } 2025-03-28T09:25:07.302071Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:07.303750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:25:07.304493Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-03-28T09:25:07.304516Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-03-28T09:25:07.329889Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-03-28T09:25:07.329921Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 5] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153907369 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2025-03-28T09:25:00.766933Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788994211884600:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:00.767310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a32/r3tmp/tmpPC0MwF/pdisk_1.dat 2025-03-28T09:25:01.219291Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:01.248505Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:01.248657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:01.251029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16405 TServer::EnableGrpc on GrpcPort 28252, node 1 2025-03-28T09:25:01.638829Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:01.638858Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:01.638866Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:01.638999Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:02.177060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:02.205683Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:02.224445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153902329 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153902238 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153902329 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-28T09:25:02.353775Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.353917Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.353944Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:02.354513Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:04.002137Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153902329, tx_id: 281474976710658 } } } 2025-03-28T09:25:04.002543Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:04.003992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:25:04.004591Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-28T09:25:04.004628Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-03-28T09:25:04.027441Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-28T09:25:04.027479Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153904072 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-03-28T09:25:04.572517Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789009989145553:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:04.572583Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a32/r3tmp/tmpGzZKNF/pdisk_1.dat 2025-03-28T09:25:04.677902Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:04.720619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:04.720695Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:04.722310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19833 TServer::EnableGrpc on GrpcPort 4787, node 2 2025-03-28T09:25:04.858104Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:04.858124Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:04.858129Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:04.858263Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19833 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:05.140683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:05.147574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:05.212195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153905192 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905283 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153905192 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905283 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-28T09:25:05.243441Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.243579Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.243595Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:05.244073Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:07.385599Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153905255, tx_id: 281474976715658 } } } 2025-03-28T09:25:07.385842Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:07.387062Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-28T09:25:07.387885Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905283 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:25:07.388077Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::AlterTableAddIndexAsyncOp [GOOD] Test command err: 2025-03-28T09:24:39.628242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788904052347277:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:39.631414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ed/r3tmp/tmpDfx6HF/pdisk_1.dat 2025-03-28T09:24:39.983522Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:40.032116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:40.032205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 11844, node 1 2025-03-28T09:24:40.063140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:40.111175Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:40.111196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:40.111203Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:40.111306Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:40.379049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:42.592041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 SUCCESS 2025-03-28T09:24:42.828615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788916937250377:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:42.828687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:42.829168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788916937250389:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:42.833153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:42.862503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788916937250391:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:42.948442Z node 1 :TX_PROXY ERROR: Actor# [1:7486788916937250462:2822] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:43.331271Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe12sp34gkr72epqeq21brm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjFmOTc5Y2EtNGNhMTQ5YTQtYzRkYTM2MzItNTFlMTZlM2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:43.343449Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153883373, txId: 281474976710661] shutting down 2025-03-28T09:24:43.397965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:24:43.407405Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found BAD_REQUEST 2025-03-28T09:24:43.577542Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-28T09:24:43.580923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 SUCCESS 2025-03-28T09:24:43.778117Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:24:45.225215Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788930448610279:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:45.225749Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ed/r3tmp/tmp8jvIhX/pdisk_1.dat 2025-03-28T09:24:45.393430Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:45.427604Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:45.427684Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:45.429626Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61901, node 4 2025-03-28T09:24:45.515629Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:45.515668Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:45.515680Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:45.515840Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:45.728726Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:48.122252Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:48.148213Z node 4 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [4:7486788943333513212:2338] 2025-03-28T09:24:48.148413Z node 4 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:24:48.159086Z node 4 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:24:48.159152Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:24:48.160520Z node 4 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:24:48.160562Z node 4 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:24:48.160596Z node 4 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:24:48.160852Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:24:48.160898Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:24:48.160922Z node 4 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [4:7486788943333513239:2338] in generation 1 2025-03-28T09:24:48.162914Z node 4 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:24:48.162961Z node 4 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:24:48.163042Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:24:48.163083Z node 4 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [4:7486788943333513242:2339] 2025-03-28T09:24:48.163094Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:24:48.163104Z node 4 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:24:48.163113Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:24:48.163208Z node 4 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:24:48.163285Z node 4 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:24:48.163305Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:24:48.163338Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:24:48.163359Z node 4 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:24:48.163376Z no ... 57594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:56.936206Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:56.956812Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:24:59.599146Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788991579203600:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:59.599239Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:59.637893Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:59.721540Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788991579203775:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:59.721634Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:59.721732Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486788991579203780:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:59.726924Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:59.742542Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486788991579203782:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:59.835120Z node 10 :TX_PROXY ERROR: Actor# [10:7486788991579203861:2796] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:59.933296Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe13a68bd50kr0rje55q32v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=YzViNDQ0MGYtMTlhY2U3ZTYtNDYzNDk0ZGYtOWM4OTg5Yg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:59.987149Z node 10 :TX_PROXY WARN: [AlterTableAddIndex [10:7486788991579203910:2370] TxId# 281474976715663] Access check failed 2025-03-28T09:25:00.015311Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:25:00.095315Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:25:00.223085Z node 10 :TX_PROXY ERROR: [AlterTableAddIndex [10:7486788995874171578:2384] TxId# 281474976715665] Unable to navigate: Root/WrongPath status: PathErrorUnknown 2025-03-28T09:25:00.302799Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2025-03-28T09:25:02.179044Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486789002087244504:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:02.179106Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ed/r3tmp/tmpJg4hsZ/pdisk_1.dat 2025-03-28T09:25:02.356472Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:02.398310Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:02.398411Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:02.401032Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9399, node 13 2025-03-28T09:25:02.473510Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:02.473536Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:02.473547Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:02.473691Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:02.775786Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:05.712667Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486789014972147451:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:05.712765Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:05.729771Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:05.834171Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486789014972147614:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:05.834258Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:05.834438Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486789014972147619:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:05.837790Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:25:05.854869Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486789014972147621:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:25:05.949046Z node 13 :TX_PROXY ERROR: Actor# [13:7486789014972147694:2804] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:06.035017Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe13g59bp4v963zasmj93mp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=YmU2NGUwZTAtNTUzMzhmZTItZmIzZmEyYTgtMmY5NThhYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:06.080237Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:25:06.159247Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:25:06.303000Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 >> TCmsTest::TestKeepAvailableMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects Test command err: 2025-03-28T09:25:00.783215Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788996031874732:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:00.784192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a5a/r3tmp/tmpH7lIE0/pdisk_1.dat 2025-03-28T09:25:01.219671Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:01.224838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:01.224958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:01.228224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15385 TServer::EnableGrpc on GrpcPort 24833, node 1 2025-03-28T09:25:01.634325Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:01.634350Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:01.634363Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:01.634518Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15385 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:02.216087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:02.230318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:02.234317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153902343 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153902273 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153902343 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-28T09:25:02.353337Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.353540Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.353568Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:02.354126Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:03.754278Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153902343, tx_id: 281474976710659 } } } 2025-03-28T09:25:03.760036Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:03.761691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:25:03.762761Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710660} 2025-03-28T09:25:03.762774Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710660 TClient::Ls request: /Root/Replicated 2025-03-28T09:25:03.788571Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710660 2025-03-28T09:25:03.788603Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743153903827 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) 2025-03-28T09:25:04.440457Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789011804500887:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:04.440513Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a5a/r3tmp/tmpuIZ6AE/pdisk_1.dat 2025-03-28T09:25:04.551557Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:04.595691Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:04.595808Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:04.597175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20452 TServer::EnableGrpc on GrpcPort 2819, node 2 2025-03-28T09:25:04.737499Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:04.737522Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:04.737529Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:04.737652Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:05.001410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:05.008052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743153905115 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153905052 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743153905115 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-28T09:25:05.082807Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.083051Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.083089Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:05.083635Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:07.142039Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153905115, tx_id: 281474976715658 } } } 2025-03-28T09:25:07.143650Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:07.145709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:25:07.146525Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2025-03-28T09:25:07.146547Z node 2 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2025-03-28T09:25:07.210394Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2025-03-28T09:25:07.210422Z node 2 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1743153905115 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153907250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2025-03-28T09:25:00.851015Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788995255845256:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:00.851153Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a1f/r3tmp/tmp2QQr0c/pdisk_1.dat 2025-03-28T09:25:01.281335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:01.281469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:01.283275Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:01.288140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31245 TServer::EnableGrpc on GrpcPort 26920, node 1 2025-03-28T09:25:01.629594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:01.629625Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:01.629642Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:01.629782Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:02.196137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:02.216271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153902329 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153902252 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153902329 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-28T09:25:02.353620Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.353914Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.353970Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:02.354508Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:03.867815Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153902329, tx_id: 281474976710658 } } } 2025-03-28T09:25:03.868196Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:03.869828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:25:03.870462Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-28T09:25:03.870490Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-03-28T09:25:03.893723Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-28T09:25:03.893753Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] TClient::Ls request: /Root/Replicated TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Replicated" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153903939 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "ke... (TRUNCATED) test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a1f/r3tmp/tmpenTqJk/pdisk_1.dat 2025-03-28T09:25:04.554842Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:25:04.565266Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:04.605629Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:04.605692Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:04.607233Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19761 TServer::EnableGrpc on GrpcPort 4838, node 2 2025-03-28T09:25:04.773057Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:04.773078Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:04.773087Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:04.773197Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:05.028925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:05.035516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:05.064543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153905080 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905129 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153905080 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905129 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-28T09:25:05.096162Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.096254Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.096272Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:05.096658Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:07.243058Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153905101, tx_id: 281474976715658 } } } 2025-03-28T09:25:07.243307Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:07.244698Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-28T09:25:07.245613Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905129 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:25:07.245826Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2025-03-28T09:25:00.758020Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788994540301537:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:00.758723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a52/r3tmp/tmpGZzSkR/pdisk_1.dat 2025-03-28T09:25:01.200143Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:01.205917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:01.206332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:01.224396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12188 TServer::EnableGrpc on GrpcPort 61447, node 1 2025-03-28T09:25:01.629247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:01.629277Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:01.629285Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:01.629422Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12188 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:02.217826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153902273 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153902273 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version... (TRUNCATED) 2025-03-28T09:25:02.242707Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.242865Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:02.242959Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:02.243583Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:03.849561Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2025-03-28T09:25:03.849644Z node 1 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2025-03-28T09:25:04.343571Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789009733194521:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:04.343653Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a52/r3tmp/tmpY58jkv/pdisk_1.dat 2025-03-28T09:25:04.445473Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:04.470706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:04.470810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:04.472408Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11245 TServer::EnableGrpc on GrpcPort 27883, node 2 2025-03-28T09:25:04.659854Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:04.659879Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:04.659887Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:04.660006Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11245 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:04.924456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:04.933104Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:05.057043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153904975 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905129 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743153904975 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905129 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Src" PathId: 2 S... (TRUNCATED) 2025-03-28T09:25:05.088316Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.088507Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:25:05.088522Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:25:05.088993Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:25:07.677622Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743153905052, tx_id: 281474976715658 } } } 2025-03-28T09:25:07.678049Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:25:07.679677Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeTable, state: EPathStateNoChanges)} 2025-03-28T09:25:07.681621Z node 2 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743153905129 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:25:07.681860Z node 2 :REPLICATION_CONTROLLER ERROR: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 >> TCmsTenatsTest::RequestRestartServices [GOOD] |87.6%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::CollectInfo >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TCmsTest::RequestRestartServicesRejectSecond |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest2 >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbTableBulkUpsertOlap::ParquetImportBug [GOOD] Test command err: 2025-03-28T09:24:35.400650Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788887121302953:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:35.400773Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001401/r3tmp/tmpuoCVen/pdisk_1.dat 2025-03-28T09:24:35.770428Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:35.770510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:35.773993Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:35.779312Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16029, node 1 2025-03-28T09:24:35.890270Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:35.890297Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:35.890306Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:35.890423Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:36.171617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:38.216946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 CLIENT_DEADLINE_EXCEEDED 2025-03-28T09:24:38.833139Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788900006207531:2437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:38.833232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:38.834210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788900006207545:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:38.839928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:38.877923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788900006207549:2441], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:38.951298Z node 1 :TX_PROXY ERROR: Actor# [1:7486788900006207679:4172] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:39.467405Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe12nse3v08sdq7e8t7f1zw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjMwNjQxOWYtZjVjM2ZhMmQtZWMyYWVmZTEtNjM2NTVmOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001401/r3tmp/tmp62LPyN/pdisk_1.dat 2025-03-28T09:24:41.038340Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:24:41.093144Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:41.119910Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:41.119980Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:41.123679Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24463, node 4 2025-03-28T09:24:41.231256Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:41.231274Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:41.231282Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:41.231383Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:41.426343Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:43.885237Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:45.945527Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788929765177261:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:45.945577Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001401/r3tmp/tmp2QrwLD/pdisk_1.dat 2025-03-28T09:24:46.117191Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:46.149188Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:46.149264Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:46.151586Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11423, node 7 2025-03-28T09:24:46.226090Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:46.226109Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:46.226115Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:46.226235Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14252 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:46.516165Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:48.969415Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:50.947191Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486788929765177261:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:50.947258Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:58.783550Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788983961298192:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:58.783627Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initial ... ne=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=8996cd68-bb611f0-93295186-99b5a221; 2025-03-28T09:25:06.887104Z node 13 :TX_COLUMNSHARD DEBUG: external_task_id=8996cd68-bb611f0-93295186-99b5a221;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-28T09:25:06.892773Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486789020288156335:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:06.892878Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486789020288156329:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:06.893032Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:06.893467Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7486789020288156132:2339];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-03-28T09:25:06.895708Z node 13 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-03-28T09:25:06.896069Z node 13 :TX_COLUMNSHARD DEBUG: TxWriteIndex[5] (CS::INDEXATION) apply at tablet 72075186224037888 2025-03-28T09:25:06.897073Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:2 Blob count: 1 2025-03-28T09:25:06.897122Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:25:06.897204Z node 13 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 2025-03-28T09:25:06.899546Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=8996cd68-bb611f0-93295186-99b5a221;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-03-28T09:25:06.899632Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=8996cd68-bb611f0-93295186-99b5a221;fline=with_appended.cpp:65;portions=1,;task_id=8996cd68-bb611f0-93295186-99b5a221; 2025-03-28T09:25:06.899920Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=8996cd68-bb611f0-93295186-99b5a221;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::8996cd68-bb611f0-93295186-99b5a221; 2025-03-28T09:25:06.899998Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=8996cd68-bb611f0-93295186-99b5a221;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:25:06.900071Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=8996cd68-bb611f0-93295186-99b5a221;tablet_id=72075186224037888;fline=columnshard_impl.cpp:781;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2025-03-28T09:25:06.900158Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=8996cd68-bb611f0-93295186-99b5a221;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:25:06.900622Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=8996cd68-bb611f0-93295186-99b5a221;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:25:06.900698Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=8996cd68-bb611f0-93295186-99b5a221;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:25:06.900740Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=8996cd68-bb611f0-93295186-99b5a221;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:25:06.900881Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=8996cd68-bb611f0-93295186-99b5a221;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:25:06.901242Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Delete Blob DS:2181038080:[72075186224037888:1:1:3:0:3864:0] 2025-03-28T09:25:06.901302Z node 13 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:2 Blob count: 1 2025-03-28T09:25:06.901424Z node 13 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=8996cd68-bb611f0-93295186-99b5a221;mem=3380;cpu=0; 2025-03-28T09:25:06.901576Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:781;event=skip_indexation;reason=not_enough_data_and_too_frequency;insert_size=0; 2025-03-28T09:25:06.903023Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7486789020288156132:2339];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046644480, LocalPathId: 1]; 2025-03-28T09:25:06.918994Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7486789020288156132:2339];ev=NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated;fline=columnshard_subdomain_path_id.cpp:90;notify_subdomain=[OwnerId: 72057594046644480, LocalPathId: 1]; 2025-03-28T09:25:06.923266Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486789020288156343:2413], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:25:07.026896Z node 13 :TX_PROXY ERROR: Actor# [13:7486789024583123794:2916] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:07.257691Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[13:7486789020288156132:2339];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:25:07.273337Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe13h676q5z45ytf6ek9qhe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MWQ4ZTIyMDMtNDFjN2FkMDAtYzJlMWYwYTQtYjQ5ZjZjYjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:07.314704Z node 13 :TX_COLUMNSHARD DEBUG: EvScan txId: 281474976715662 scanId: 1 version: {1743153907000:max} readable: {1743153907313:max} at tablet 72075186224037888 2025-03-28T09:25:07.314931Z node 13 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 281474976715662 scanId: 1 at tablet 72075186224037888 2025-03-28T09:25:07.315573Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7486789020288156132:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1743153907000:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-03-28T09:25:07.439947Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7486789020288156132:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1743153907000:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 5 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 1 } Columns { Id: 7 } Columns { Id: 2 } Columns { Id: 6 } } } Version: 5 Kernels: "O\002\020AsScalar\t\211\004\235\213\004\213\000?\000\000\235?\000\000\235?\000\0000BlockAsTuple\000\t\211\002?\006?\000\002\000\013?\000\001\t\211\002?\010?\000\002\000?\016\001\000/" ; 2025-03-28T09:25:07.441682Z node 13 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[13:7486789020288156132:2339];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=281474976715662;scan_id=1;gen=1;table=/Root/OlapStore/OlapTable;snapshot={1743153907000:max};tablet=72075186224037888;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":8},{"from":12},{"from":4},{"from":6},{"from":2},{"from":14},{"from":10}]},{"owner_id":2,"inputs":[{"from":15}]},{"owner_id":4,"inputs":[{"from":15}]},{"owner_id":6,"inputs":[{"from":15}]},{"owner_id":8,"inputs":[{"from":15}]},{"owner_id":10,"inputs":[{"from":15}]},{"owner_id":12,"inputs":[{"from":15}]},{"owner_id":14,"inputs":[{"from":15}]},{"owner_id":15,"inputs":[]}],"nodes":{"15":{"p":{"p":{"data":[{"name":"stringToString","id":7},{"name":"id","id":1},{"name":"timestamp","id":2},{"name":"dateTimeS","id":3},{"name":"dateTimeU","id":4},{"name":"date","id":5},{"name":"utf8ToString","id":6}]},"o":"7,1,2,3,4,5,6","t":"FetchOriginalData"},"w":14,"id":15},"2":{"p":{"i":"5","p":{"address":{"name":"date","id":5}},"o":"5","t":"AssembleOriginalData"},"w":19,"id":2},"8":{"p":{"i":"1","p":{"address":{"name":"id","id":1}},"o":"1","t":"AssembleOriginalData"},"w":19,"id":8},"0":{"p":{"i":"5,3,4,1,7,2,6","t":"Projection"},"w":133,"id":0},"4":{"p":{"i":"3","p":{"address":{"name":"dateTimeS","id":3}},"o":"3","t":"AssembleOriginalData"},"w":19,"id":4},"14":{"p":{"i":"6","p":{"address":{"name":"utf8ToString","id":6}},"o":"6","t":"AssembleOriginalData"},"w":19,"id":14},"10":{"p":{"i":"7","p":{"address":{"name":"stringToString","id":7}},"o":"7","t":"AssembleOriginalData"},"w":19,"id":10},"6":{"p":{"i":"4","p":{"address":{"name":"dateTimeU","id":4}},"o":"4","t":"AssembleOriginalData"},"w":19,"id":6},"12":{"p":{"i":"2","p":{"address":{"name":"timestamp","id":2}},"o":"2","t":"AssembleOriginalData"},"w":19,"id":12}}}; 2025-03-28T09:25:07.448994Z node 13 :TX_COLUMNSHARD INFO: self_id=[13:7486789020288156158:2343];tablet_id=72075186224037888;parent=[13:7486789020288156132:2339];fline=manager.cpp:82;event=ask_data;request=request_id=3;3={portions_count=1};; 2025-03-28T09:25:07.451313Z node 13 :TX_COLUMNSHARD DEBUG: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-03-28T09:25:07.452650Z node 13 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-03-28T09:25:07.455254Z node 13 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 72075186224037888 2025-03-28T09:25:07.476759Z node 13 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743153907000, txId: 18446744073709551615] shutting down >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> TYqlDecimalTests::NegativeValues [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageAvailabilityMode [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::RequestRestartServicesOk |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::StateStorageLockedNodes >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeScheduled >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TYqlDecimalTests::NegativeValues [GOOD] Test command err: 2025-03-28T09:24:43.444302Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788919655316045:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:43.444380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bb/r3tmp/tmpUSatAo/pdisk_1.dat 2025-03-28T09:24:43.950405Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:43.984869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:43.984953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:43.992081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9544, node 1 2025-03-28T09:24:44.142185Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:44.142216Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:44.142226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:44.142366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:44.650352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:46.862365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 SUCCESS 3 rows in 0.012789s 2025-03-28T09:24:47.047379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788936835186382:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:47.047458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788936835186393:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:47.047679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:47.051759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:47.074989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788936835186396:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:24:47.159748Z node 1 :TX_PROXY ERROR: Actor# [1:7486788936835186487:2801] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:47.825357Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe12xt434y2kx63hgxzvhez, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTY5OTAxMDMtYzU4Mjg3NTUtMmIyMDJlMTYtZWE2YTYyZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root SUCCESS count returned 3 rows 2025-03-28T09:24:49.358522Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788945478740803:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:49.358602Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bb/r3tmp/tmpOKXpV1/pdisk_1.dat 2025-03-28T09:24:49.556012Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:49.584312Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:49.584389Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:49.592623Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29112, node 4 2025-03-28T09:24:49.713838Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:49.713859Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:49.713871Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:49.714002Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9654 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:49.926498Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:49.942877Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:24:52.296225Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:54.174441Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788970722307553:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:54.174502Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bb/r3tmp/tmp6VTDHP/pdisk_1.dat 2025-03-28T09:24:54.393244Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62146, node 7 2025-03-28T09:24:54.523463Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:54.523548Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:54.543189Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:54.565943Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:54.565966Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:54.565974Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:54.566094Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:54.854876Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:57.290358Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:58.975579Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788984591936966:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:58.975685Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initializat ... N: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:59.290335Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:59.290347Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:59.290503Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:59.605422Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:02.213883Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:02.314099Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486789001771807345:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:02.314173Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486789001771807337:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:02.314505Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:02.318964Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:25:02.339205Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486789001771807351:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:25:02.434673Z node 10 :TX_PROXY ERROR: Actor# [10:7486789001771807424:2800] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:02.583586Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe13cq805vq9h5wf6krg4kd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzQzYzYzYS1kNGI1YTk4LTE4YmE4Njg2LWVlOTY2YWI1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:02.717376Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe13d02c7fw1xs1z72dgjcy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzQzYzYzYS1kNGI1YTk4LTE4YmE4Njg2LWVlOTY2YWI1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:02.874963Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe13d429rwxnkfkwrj6de55, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzQzYzYzYS1kNGI1YTk4LTE4YmE4Njg2LWVlOTY2YWI1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:02.980642Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe13d901xmxp0zkc6sptjas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzQzYzYzYS1kNGI1YTk4LTE4YmE4Njg2LWVlOTY2YWI1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:03.136209Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe13dc99x2420g801ahxd8f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=NzQzYzYzYS1kNGI1YTk4LTE4YmE4Njg2LWVlOTY2YWI1, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:04.812756Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486789010709399302:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:04.812826Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bb/r3tmp/tmpC4YOnj/pdisk_1.dat 2025-03-28T09:25:04.922757Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:04.953846Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:04.953922Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:04.957262Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10138, node 13 2025-03-28T09:25:05.013040Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:05.013077Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:05.013088Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:05.013257Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62339 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:05.355360Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:08.235538Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:08.316964Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486789027889269660:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:08.316964Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7486789027889269668:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:08.317051Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:08.320347Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:25:08.337938Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7486789027889269674:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:25:08.440455Z node 13 :TX_PROXY ERROR: Actor# [13:7486789027889269747:2792] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:08.556810Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe13jjv1rvfb2we1sh78jrm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZjE3MjdhNDktNjQyNzgyMzYtMzE2ODIwYTAtNTlmNDU2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:08.670372Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe13jtnfsnhsta0jt1rw0yz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZjE3MjdhNDktNjQyNzgyMzYtMzE2ODIwYTAtNTlmNDU2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:08.836522Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe13jy34htdbj8md2085z46, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZjE3MjdhNDktNjQyNzgyMzYtMzE2ODIwYTAtNTlmNDU2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:08.933809Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe13k3b7gsk5vd9f70wnpb8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZjE3MjdhNDktNjQyNzgyMzYtMzE2ODIwYTAtNTlmNDU2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:09.073567Z node 13 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe13k6aef5mnepn6eb79ww7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=ZjE3MjdhNDktNjQyNzgyMzYtMzE2ODIwYTAtNTlmNDU2NQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TTableProfileTests::DescribeTableOptions [GOOD] >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::DynamicConfig >> YdbOlapStore::ManyTables [GOOD] >> YdbOlapStore::LogWithUnionAllAscending |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst >> ReadOnlyVDisk::TestReads >> ReadOnlyVDisk::TestWrites >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientProvidesCorrectCerts [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost >> TCmsTest::SamePriorityRequest2 [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> YdbYqlClient::RenameTables [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] >> TPopulatorTest::Boot |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TTableProfileTests::DescribeTableOptions [GOOD] Test command err: 2025-03-28T09:24:44.123246Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788925182574549:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:44.123296Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a9/r3tmp/tmpclF8Tn/pdisk_1.dat 2025-03-28T09:24:44.620150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:44.652886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:44.653021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:44.656205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22284, node 1 2025-03-28T09:24:44.818810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:44.818836Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:44.818844Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:44.818959Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13784 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:45.122472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743161084557421 Nodes { NodeId: 1024 Host: "localhost" Port: 15090 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743161084557421 } Nodes { NodeId: 1 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 2 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 3 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-03-28T09:24:48.677140Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788942334666056:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:48.677208Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a9/r3tmp/tmpRW7bmz/pdisk_1.dat 2025-03-28T09:24:48.857338Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:48.889068Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:48.889171Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:48.895456Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14656, node 4 2025-03-28T09:24:49.034280Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:49.034299Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:49.034305Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:49.034407Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13755 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:49.304117Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743161088841847 Nodes { NodeId: 1024 Host: "localhost" Port: 29820 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743161088841847 } Nodes { NodeId: 4 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 5 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 6 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-03-28T09:24:53.417128Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788966043838509:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:53.417188Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a9/r3tmp/tmpyan7qs/pdisk_1.dat 2025-03-28T09:24:53.632247Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:53.659375Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:53.659461Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:53.668849Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23903, node 7 2025-03-28T09:24:53.883584Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:53.883604Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:53.883614Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:53.883737Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:54.225271Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:54.241759Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 TClient is connected to server localhost:8032 2025-03-28T09:24:54.591018Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:54.634089Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:55.143713Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486788972399626647:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:55.143820Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:55.167118Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:55.167197Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:55.172807Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2025-03-28T09:24:55.175960Z node 7 :HIVE WARN: HIVE#72057594037968897 Node( ... 8.797625Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:7486788985284529706:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:24:58.797831Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:24:59.810031Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788989591572691:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:59.810099Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a9/r3tmp/tmpwcPf6Q/pdisk_1.dat 2025-03-28T09:24:59.961989Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:00.001280Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:00.001388Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:00.005046Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1487, node 10 2025-03-28T09:25:00.068181Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:00.068208Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:00.068217Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:00.068364Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10025 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:00.362543Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:10025 2025-03-28T09:25:00.689345Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:00.708811Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:01.213672Z node 12 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7486788997926392871:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:01.213744Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:01.262397Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:01.262529Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:01.267926Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2025-03-28T09:25:01.268865Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10025 2025-03-28T09:25:01.563552Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:10025 TClient::Ls request: /Root/ydb_ut_tenant/table-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "table-1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1743153901890 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key2" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "ke... (TRUNCATED) 2025-03-28T09:25:02.186975Z node 10 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2025-03-28T09:25:02.187892Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:25:05.993966Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486789017373354360:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:05.994059Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a9/r3tmp/tmpBnwFO3/pdisk_1.dat 2025-03-28T09:25:06.125009Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:06.168304Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:06.168421Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:06.171445Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15764, node 13 2025-03-28T09:25:06.240222Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:06.240257Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:06.240270Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:06.240445Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:06.535896Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:65114 2025-03-28T09:25:06.859349Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:06.881466Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:07.386053Z node 15 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[15:7486789025854433068:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:07.386163Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:07.389946Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:07.390049Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:07.395523Z node 13 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2025-03-28T09:25:07.410228Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65114 2025-03-28T09:25:07.714231Z node 13 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 15 2025-03-28T09:25:07.714799Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:25:08.388378Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:25:09.388927Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:25:10.389789Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/ydb_ut_tenant/.metadata/initialization/migrations;error=incorrect path status: LookupError; |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesReject |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTest::TestForceRestartModeScheduled [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::RenameTables [GOOD] Test command err: 2025-03-28T09:24:41.288019Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788911305881808:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:41.288103Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013dd/r3tmp/tmp7i0ttm/pdisk_1.dat 2025-03-28T09:24:41.843545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:41.850606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:41.850722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:41.862185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29132, node 1 2025-03-28T09:24:42.164195Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:42.164219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:42.164227Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:42.164340Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:42.521832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 2025-03-28T09:24:46.282805Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788911305881808:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.282877Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 2025-03-28T09:24:49.641112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788945665621152:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:49.641226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:49.641453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788945665621164:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:49.645113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:24:49.664552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788945665621166:2357], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:24:49.758579Z node 1 :TX_PROXY ERROR: Actor# [1:7486788945665621237:2722] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Previous query attempt was finished with unsuccessful status OVERLOADED: Sending retry attempt 1 of 5 Previous query attempt was finished with unsuccessful status CLIENT_RESOURCE_EXHAUSTED: Sending retry attempt 2 of 5 Previous query attempt was finished with unsuccessful status UNAVAILABLE: Sending retry attempt 3 of 5 Previous query attempt was finished with unsuccessful status BAD_SESSION: Sending retry attempt 4 of 5 Previous query attempt was finished with unsuccessful status SESSION_BUSY: Sending retry attempt 5 of 5 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status NOT_FOUND: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status UNDETERMINED: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 Previous query attempt was finished with unsuccessful status TRANSPORT_UNAVAILABLE: Sending retry attempt 1 of 1 2025-03-28T09:24:56.800976Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788976724622148:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:56.801098Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013dd/r3tmp/tmpT4miZR/pdisk_1.dat 2025-03-28T09:24:57.031429Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14282, node 4 2025-03-28T09:24:57.143680Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:57.143807Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:57.147763Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:57.148740Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:57.148750Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:57.148756Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:57.148866Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4615 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:57.408811Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:59.800452Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788989609525050:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:59.800558Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:59.835722Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:59.958991Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788989609525217:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:59.959082Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:59.959095Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788989609525222:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:59.962638Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:59.977987Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486788989609525224:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:25:00.034051Z node 4 :TX_PROXY ERROR: Actor# [4:7486788993904492591:2790] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', ... :25:10.957259Z node 10 :TX_PROXY DEBUG: actor# [10:7486789025282107067:2138] Cookie# 0 userReqId# "" txid# 281474976715672 SEND to# [10:7486789038167011004:3555] 2025-03-28T09:25:10.960159Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011004:3555] txid# 281474976715672 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-1" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:57946" 2025-03-28T09:25:10.960202Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011004:3555] txid# 281474976715672 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:25:10.960280Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011004:3555] txid# 281474976715672 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:25:10.960613Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011004:3555] txid# 281474976715672 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:25:10.960728Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011004:3555] HANDLE EvNavigateKeySetResult, txid# 281474976715672 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:25:10.960780Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011004:3555] txid# 281474976715672 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715672 TabletId# 72057594046644480} 2025-03-28T09:25:10.960943Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011004:3555] txid# 281474976715672 HANDLE EvClientConnected 2025-03-28T09:25:10.961144Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-1, pathId: 0, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:25:10.961299Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715672:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:25:10.963335Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715672, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-1 2025-03-28T09:25:10.963421Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011004:3555] txid# 281474976715672 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715672} 2025-03-28T09:25:10.963474Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011004:3555] txid# 281474976715672 SEND to# [10:7486789038167011003:2391] Source {TEvProposeTransactionStatus txid# 281474976715672 Status# 53} 2025-03-28T09:25:10.964513Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:25:10.964552Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:25:10.964636Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:25:10.964639Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:25:10.972470Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153911016, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:10.975181Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715672, done: 0, blocked: 1 2025-03-28T09:25:10.978008Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:25:10.978024Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:25:10.978119Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:25:10.978120Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:25:10.979189Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715672:0 2025-03-28T09:25:10.985770Z node 10 :GRPC_SERVER DEBUG: Got grpc request# DropTableRequest, traceId# 01jqe13n695zgyb82d9wzv3akq, sdkBuildInfo# ydb-cpp-sdk/3.2.2, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:57952, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:25:10.985904Z node 10 :TX_PROXY DEBUG: actor# [10:7486789025282107067:2138] Handle TEvProposeTransaction 2025-03-28T09:25:10.985923Z node 10 :TX_PROXY DEBUG: actor# [10:7486789025282107067:2138] TxId# 281474976715673 ProcessProposeTransaction 2025-03-28T09:25:10.985949Z node 10 :TX_PROXY DEBUG: actor# [10:7486789025282107067:2138] Cookie# 0 userReqId# "" txid# 281474976715673 SEND to# [10:7486789038167011084:3630] 2025-03-28T09:25:10.988371Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011084:3630] txid# 281474976715673 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "Root" OperationType: ESchemeOpDropTable Drop { Name: "Table-2" } } } DatabaseName: "" RequestType: "" PeerName: "ipv6:[::1]:57952" 2025-03-28T09:25:10.988411Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011084:3630] txid# 281474976715673 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:25:10.988491Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011084:3630] txid# 281474976715673 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:25:10.989030Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037890 not found 2025-03-28T09:25:10.989150Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011084:3630] txid# 281474976715673 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:25:10.989237Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011084:3630] HANDLE EvNavigateKeySetResult, txid# 281474976715673 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:25:10.989273Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011084:3630] txid# 281474976715673 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715673 TabletId# 72057594046644480} 2025-03-28T09:25:10.989407Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011084:3630] txid# 281474976715673 HANDLE EvClientConnected 2025-03-28T09:25:10.989642Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TDropTable Propose, path: Root/Table-2, pathId: 0, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:25:10.989764Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715673:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:25:10.990239Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:25:10.991556Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715673, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: Root/Table-2 2025-03-28T09:25:10.991626Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011084:3630] txid# 281474976715673 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715673} 2025-03-28T09:25:10.991717Z node 10 :TX_PROXY DEBUG: Actor# [10:7486789038167011084:3630] txid# 281474976715673 SEND to# [10:7486789038167011083:2395] Source {TEvProposeTransactionStatus txid# 281474976715673 Status# 53} 2025-03-28T09:25:10.992713Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:25:10.992724Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:25:10.992819Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:25:10.992819Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:25:11.001394Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153911044, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:11.004767Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976715673, done: 0, blocked: 1 2025-03-28T09:25:11.007886Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:25:11.007955Z node 10 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:25:11.008022Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:25:11.008038Z node 10 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:25:11.009709Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715673:0 2025-03-28T09:25:11.013754Z node 10 :GRPC_SERVER DEBUG: [0x51a0001b4e80] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.013757Z node 10 :GRPC_SERVER DEBUG: [0x51a000106880] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.014023Z node 10 :GRPC_SERVER DEBUG: [0x51a000108080] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.014042Z node 10 :GRPC_SERVER DEBUG: [0x51a000043880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.014234Z node 10 :GRPC_SERVER DEBUG: [0x51a0001cbc80] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.014236Z node 10 :GRPC_SERVER DEBUG: [0x51a0000b0480] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.014437Z node 10 :GRPC_SERVER DEBUG: [0x51a000199880] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.014442Z node 10 :GRPC_SERVER DEBUG: [0x51a00011dc80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.014641Z node 10 :GRPC_SERVER DEBUG: [0x51a000169280] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.014646Z node 10 :GRPC_SERVER DEBUG: [0x51a000108c80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.014853Z node 10 :GRPC_SERVER DEBUG: [0x51a000020480] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.014871Z node 10 :GRPC_SERVER DEBUG: [0x51a0000d0280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.015052Z node 10 :GRPC_SERVER DEBUG: [0x51a000169e80] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.015085Z node 10 :GRPC_SERVER DEBUG: [0x51a00016aa80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.015271Z node 10 :GRPC_SERVER DEBUG: [0x51a00011e280] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.015284Z node 10 :GRPC_SERVER DEBUG: [0x51a000168680] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.015460Z node 10 :GRPC_SERVER DEBUG: [0x51a0001d5e80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-03-28T09:25:11.018514Z node 10 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 10, TabletId: 72075186224037889 not found 2025-03-28T09:25:11.020266Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TPopulatorTest::Boot [GOOD] >> TCmsTest::StateStorageLockedNodes [GOOD] |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverLegacyService::ServerWithCertVerification_ClientDoesNotProvideCorrectCerts [GOOD] Test command err: 2025-03-28T09:24:46.079229Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788935849269035:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.079448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139e/r3tmp/tmpibwtoD/pdisk_1.dat 2025-03-28T09:24:46.499226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:46.514955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:46.515035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:46.521682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6708, node 1 2025-03-28T09:24:46.630479Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:46.630502Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:46.630517Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:46.630644Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11726 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:46.927209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:47.146783Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:51872) has now valid token of root@builtin 2025-03-28T09:24:47.218026Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:24:47.218064Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:24:47.218079Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:24:47.218109Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:24:50.389570Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788950885386584:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:50.389645Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139e/r3tmp/tmp0lXs4s/pdisk_1.dat 2025-03-28T09:24:50.718956Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:50.749630Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:50.749750Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:50.753059Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5835, node 4 2025-03-28T09:24:50.875225Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:50.875247Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:50.875254Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:50.875391Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4289 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:51.152165Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:51.316405Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:57038) has now valid token of root@builtin 2025-03-28T09:24:51.441909Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:24:51.441952Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:24:51.441964Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:24:51.441994Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:24:55.094633Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788973446246412:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:55.095116Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139e/r3tmp/tmpugLNGY/pdisk_1.dat 2025-03-28T09:24:55.254015Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:55.284303Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:55.284372Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:55.289278Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18790, node 7 2025-03-28T09:24:55.423483Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:55.423504Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:55.423511Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:55.423636Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:55.694071Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743161095251225 Nodes { NodeId: 1024 Host: "localhost" Port: 19791 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743161095251225 } Nodes { NodeId: 7 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 8 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 9 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-03-28T09:24:59.476336Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788990965329117:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:59.476415Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139e/r3tmp/tmp7dHJKm/pdisk_1.dat 2025-03-28T09:24:59.585884Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:59.617881Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:59.617966Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:59.620375Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3473, node 10 2025-03-28T09:24:59.674948Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:59.674977Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:59.674985Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:59.675131Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:59.903772Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: UNAUTHORIZED Reason: "Cannot authorize node. Access denied" } 2025-03-28T09:25:04.093860Z node 13 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7486789011618394771:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:04.093924Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139e/r3tmp/tmpmfwwhY/pdisk_1.dat 2025-03-28T09:25:04.222934Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:04.261823Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:04.261919Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:04.264550Z node 13 :HIVE WARN: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14194, node 13 2025-03-28T09:25:04.333047Z node 13 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:04.333081Z node 13 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:04.333091Z node 13 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:04.333245Z node 13 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:04.649737Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node Register node result Status { Code: OK } NodeId: 1024 DomainPath: "Root" Expire: 1743161104211664 Nodes { NodeId: 1024 Host: "localhost" Port: 20581 ResolveHost: "localhost" Address: "localhost" Location { DataCenter: "DataCenter" Rack: "Rack" Unit: "Body" } Expire: 1743161104211664 } Nodes { NodeId: 13 Host: "::1" Port: 12001 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 49 RoomNum: 1 RackNum: 1 BodyNum: 1 DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Nodes { NodeId: 14 Host: "::1" Port: 12002 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 50 RoomNum: 2 RackNum: 2 BodyNum: 2 DataCenter: "2" Module: "2" Rack: "2" Unit: "2" } } Nodes { NodeId: 15 Host: "::1" Port: 12003 ResolveHost: "::1" Address: "::1" Location { DataCenterNum: 51 RoomNum: 3 RackNum: 3 BodyNum: 3 DataCenter: "3" Module: "3" Rack: "3" Unit: "3" } } 2025-03-28T09:25:08.596025Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7486789031000173965:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:08.596115Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139e/r3tmp/tmpSCgWnC/pdisk_1.dat 2025-03-28T09:25:08.756322Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:08.799660Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:08.799760Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:08.802727Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14605, node 16 2025-03-28T09:25:08.858251Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:08.858277Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:08.858287Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:08.858452Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:09.155393Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... Trying to register node 2025-03-28T09:25:09.331417Z node 16 :TICKET_PARSER ERROR: Ticket F57CBDFBA196499AB4AC2843BBE9F6977B33D13798AB14E20E6EBF838079CF95: Cannot create token from certificate. Client certificate failed verification Register node result Status { Code: ERROR Reason: "Cannot create token from certificate. Client certificate failed verification" } |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> TCmsTest::RestartNodeInDownState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2025-03-28T09:25:13.253746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:13.253817Z node 1 :IMPORT WARN: Table profiles were not loaded |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_transfer_writer/unittest |87.8%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleTasksDifferentPriorities |87.8%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.8%| [TA] $(B)/ydb/core/tx/replication/service/ut_transfer_writer/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/test-results/unittest/{meta.json ... results_accumulator.log} |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 4716430541683229535 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-03-28T09:25:13.246358Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-03-28T09:25:13.250628Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-03-28T09:25:13.254723Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-03-28T09:25:13.256650Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-03-28T09:25:13.263391Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-03-28T09:25:13.265993Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-03-28T09:25:13.268824Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-03-28T09:25:13.271462Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-03-28T09:25:14.736227Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-28T09:25:14.736351Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-28T09:25:14.736482Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-28T09:25:14.737272Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [5e0c8c36c6df1b08] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-03-28T09:25:14.738875Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-28T09:25:14.739242Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-28T09:25:14.740386Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-03-28T09:25:14.742125Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-28T09:25:14.742839Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-28T09:25:14.743660Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-03-28T09:25:14.744798Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-28T09:25:14.745766Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-28T09:25:14.746353Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:15:0:0:32768:0] 2025-03-28T09:25:14.747404Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-28T09:25:14.747469Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-28T09:25:14.748043Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:15:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:15:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:16:0:0:131072:0] 2025-03-28T09:25:14.749240Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-28T09:25:14.749301Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-28T09:25:14.749962Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] TEvPutResult: TEvPutResult {Id# [1:1:16:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:16:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2025-03-28T09:25:14.751148Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-28T09:25:14.751311Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-28T09:25:14.751367Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2025-03-28T09:25:14.753168Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-28T09:25:14.753357Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] 2025-03-28T09:25:14.753469Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2025-03-28T09:25:14.755217Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-28T09:25:14.755457Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-28T09:25:14.755538Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2025-03-28T09:25:14.757242Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5310:698] 2025-03-28T09:25:14.757312Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5324:712] 2025-03-28T09:25:14.757384Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5317:705] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2025-03-28T09:25:14.761734Z 1 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5311:699] 2025-03-28T09:25:14.761865Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:706] 2025-03-28T09:25:14.761903Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5325:713] 2025-03-28T09:25:14.762413Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [7fe4e3e48708fd8e] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2025-03-28T09:25:14.762549Z 2 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5318:706] 2025-03-28T09:25:14.762600Z 3 00h05m30.160512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5325:713] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::SamePriorityRequest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeScheduledDisconnects [GOOD] >> ReadOnlyVDisk::TestWrites [GOOD] |87.9%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |87.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 2195110343597993998 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2025-03-28T09:25:13.201566Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2025-03-28T09:25:13.205972Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2025-03-28T09:25:13.210519Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2025-03-28T09:25:13.212907Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2025-03-28T09:25:13.219818Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2025-03-28T09:25:13.222066Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2025-03-28T09:25:13.224540Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2025-03-28T09:25:13.226924Z 1 00h01m30.060512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2025-03-28T09:25:14.080372Z 1 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] 2025-03-28T09:25:14.080466Z 3 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] 2025-03-28T09:25:14.080586Z 2 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] 2025-03-28T09:25:14.081356Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [cf60de055c31b63f] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2025-03-28T09:25:14.082774Z 1 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] 2025-03-28T09:25:14.082923Z 2 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] 2025-03-28T09:25:14.083767Z 3 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2025-03-28T09:25:14.085258Z 1 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] 2025-03-28T09:25:14.085969Z 2 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] 2025-03-28T09:25:14.086607Z 3 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:14:0:0:131072:0] 2025-03-28T09:25:14.087579Z 3 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] 2025-03-28T09:25:14.088347Z 1 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:0:0]: Unavailable in read-only Sender# [1:5309:699] 2025-03-28T09:25:14.088806Z 2 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] TEvPutResult: TEvPutResult {Id# [1:1:14:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:14:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:15:0:0:32768:0] 2025-03-28T09:25:14.089575Z 3 00h03m30.110512s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sende ... 2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2025-03-28T09:25:15.862188Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] 2025-03-28T09:25:15.862299Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2025-03-28T09:25:15.864831Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] 2025-03-28T09:25:15.865843Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2025-03-28T09:25:15.869108Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2025-03-28T09:25:15.870989Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] 2025-03-28T09:25:15.871058Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2025-03-28T09:25:15.872909Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] 2025-03-28T09:25:15.872970Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2025-03-28T09:25:15.874971Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] 2025-03-28T09:25:15.875032Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2025-03-28T09:25:15.877028Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] 2025-03-28T09:25:15.877204Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2025-03-28T09:25:15.879211Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] 2025-03-28T09:25:15.879284Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2025-03-28T09:25:15.881316Z 3 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:2:0]: Unavailable in read-only Sender# [1:5323:713] 2025-03-28T09:25:15.881446Z 2 00h08m00.161536s :BS_SKELETON ERROR: VDISK[82000000:_:0:1:0]: Unavailable in read-only Sender# [1:5316:706] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EmergencyDuringRollingRestart [GOOD] Test command err: 2025-03-28T09:25:12.497910Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } } } 2025-03-28T09:25:12.498259Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-03-28T09:25:12.525268Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-28T09:25:12.525443Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:02:00Z 2025-03-28T09:25:12.526881Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120027000 } } 2025-03-28T09:25:12.528025Z node 10 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-17-17" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 17 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 10 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 11 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 12 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 13 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 14 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 15 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120027000 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120027000 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120027000 } Timestamp: 120027000 NodeId: 16 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Timestamp: 120027000 } 2025-03-28T09:25:12.528264Z node 10 :CMS DEBUG: [Sentinel] [Main] Config was updated in 120.003000s 2025-03-28T09:25:12.528320Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-28T09:25:12.528422Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-03-28T09:25:12.528471Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-03-28T09:25:12.528502Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-03-28T09:25:12.528530Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-03-28T09:25:12.528558Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-03-28T09:25:12.528585Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-03-28T09:25:12.528611Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-03-28T09:25:12.528665Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:83883506 ... :CMS DEBUG: Running CleanupWalleTasks 2025-03-28T09:25:12.790873Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-03-28T09:25:12.790934Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-03-28T09:25:12.790963Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-03-28T09:25:12.790992Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-03-28T09:25:12.791025Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-03-28T09:25:12.791057Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-03-28T09:25:12.791101Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-03-28T09:25:12.791146Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-03-28T09:25:12.791364Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-03-28T09:25:12.792165Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-03-28T09:25:12.792282Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-03-28T09:25:12.792349Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-03-28T09:25:12.792411Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-03-28T09:25:12.792478Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-03-28T09:25:12.792543Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-03-28T09:25:12.792607Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240027 2025-03-28T09:25:12.792657Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-28T09:25:12.792933Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2025-03-28T09:25:12.793006Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-03-28T09:25:12.793220Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-03-28T09:25:12.793442Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2025-03-28T09:25:12.793510Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2025-03-28T09:25:12.806306Z node 10 :CMS DEBUG: TTxLogAndSend Complete 2025-03-28T09:25:12.822637Z node 10 :CMS DEBUG: TTxUpdateDowntimes Execute 2025-03-28T09:25:12.822772Z node 10 :CMS DEBUG: TTxUpdateDowntimes Complete 2025-03-28T09:25:12.822841Z node 10 :CMS DEBUG: Timestamp: 1970-01-01T00:04:00Z 2025-03-28T09:25:12.823916Z node 10 :CMS INFO: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-28T09:25:12.824032Z node 10 :CMS DEBUG: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2025-03-28T09:25:12.824088Z node 10 :CMS DEBUG: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2025-03-28T09:25:12.824235Z node 10 :CMS DEBUG: TTxStorePermissions Execute 2025-03-28T09:25:12.824396Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2025-03-28T09:25:12.836688Z node 10 :CMS DEBUG: TTxStorePermissions complete 2025-03-28T09:25:12.836941Z node 10 :CMS NOTICE: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2025-03-28T09:25:12.837528Z node 10 :CMS DEBUG: TTxUpdateConfig Execute 2025-03-28T09:25:12.850061Z node 10 :CMS DEBUG: TTxUpdateConfig Complete 2025-03-28T09:25:12.850315Z node 10 :CMS DEBUG: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 } 2025-03-28T09:25:12.914446Z node 10 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-28T09:25:12.914523Z node 10 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-28T09:25:12.914654Z node 10 :CMS DEBUG: Running CleanupWalleTasks 2025-03-28T09:25:12.914941Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2025-03-28T09:25:12.915008Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2025-03-28T09:25:12.915040Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2025-03-28T09:25:12.915081Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2025-03-28T09:25:12.915129Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2025-03-28T09:25:12.915165Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2025-03-28T09:25:12.915193Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2025-03-28T09:25:12.915234Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 17, wbId# [17:8388350642965737326:1634689637] 2025-03-28T09:25:12.915556Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-03-28T09:25:12.916146Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-03-28T09:25:12.916345Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-03-28T09:25:12.916417Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 17, response# PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/17/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-03-28T09:25:12.916505Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-03-28T09:25:12.916578Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-03-28T09:25:12.916638Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-03-28T09:25:12.916707Z node 10 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300027 2025-03-28T09:25:12.916760Z node 10 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-28T09:25:12.917024Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2025-03-28T09:25:12.917097Z node 10 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2025-03-28T09:25:12.917320Z node 10 :CMS DEBUG: TTxLogAndSend Execute 2025-03-28T09:25:12.917546Z node 10 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2025-03-28T09:25:12.917595Z node 10 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-03-28T09:24:24.775133Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788839364661084:2172];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001419/r3tmp/tmpjfcZY5/pdisk_1.dat 2025-03-28T09:24:24.958457Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:25.230268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:25.230351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:25.243538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:25.308959Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26562, node 1 2025-03-28T09:24:25.545107Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:24:25.545125Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:24:25.598910Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:25.598933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:25.598939Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:25.599032Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:26.077163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:26.209537Z node 1 :TICKET_PARSER DEBUG: Ticket 48EE25891A5AE34098C438543351B827AD6B1042780371DEB8EADE7D6F2B96A7 (ipv6:[::1]:32912) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-28T09:24:26.210214Z node 1 :TICKET_PARSER ERROR: Ticket 48EE25891A5AE34098C438543351B827AD6B1042780371DEB8EADE7D6F2B96A7: Cannot create token from certificate. Client certificate failed verification 2025-03-28T09:24:26.336911Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:32930) has now valid token of root@builtin 2025-03-28T09:24:26.483500Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:24:26.483561Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:24:26.483572Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:24:26.483616Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:24:29.763605Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788861694452242:2285];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:29.763653Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001419/r3tmp/tmpuAn0rT/pdisk_1.dat 2025-03-28T09:24:29.901829Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:29.906129Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:29.906225Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:29.908212Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7627, node 4 2025-03-28T09:24:29.995372Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:29.995396Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:29.995403Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:29.995537Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:30.206951Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:30.299057Z node 4 :TICKET_PARSER DEBUG: Ticket 48EE25891A5AE34098C438543351B827AD6B1042780371DEB8EADE7D6F2B96A7 (ipv6:[::1]:50542) has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-28T09:24:30.299541Z node 4 :TICKET_PARSER ERROR: Ticket 48EE25891A5AE34098C438543351B827AD6B1042780371DEB8EADE7D6F2B96A7: Cannot create token from certificate. Client certificate failed verification 2025-03-28T09:24:30.376127Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:50564) has now valid token of root@builtin 2025-03-28T09:24:30.447020Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:24:30.447044Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:24:30.447054Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:24:30.447084Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:24:34.268514Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788882509582813:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:34.269933Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001419/r3tmp/tmp4NwBy5/pdisk_1.dat 2025-03-28T09:24:34.533063Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:34.578506Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:34.578606Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:34.584308Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13597, node 7 2025-03-28T09:24:34.766518Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:34.766543Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:34.766550Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:34.766703Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3779 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:35.071679Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... E0328 09:24:35.131016791 423913 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0328 09:24:35.134601234 423801 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0328 09:24:35.148550271 423992 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0328 09:24:35.151669743 423801 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0328 09:24:35.166323726 423801 ssl_transport_security.cc:1431] Handshake failed with fatal error SSL_ERROR_SSL: error:1408F10B:SSL routines:ssl3_get_record:wrong version number. E0328 09:24:35.171202845 423996 ssl_transport_security.cc:1431] Handshake failed with fatal e ... 25:01.807215765 438482 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:10236'; Got args: {grpc.client_channel_factory=0x50200005c0b0, grpc.default_authority=localhost:10236, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x506000c3ffe0, grpc.internal.event_engine=0x5020001f8030, grpc.internal.subchannel_pool=0x504000776c50, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040002155d0, grpc.server_uri=dns:///localhost:10236} E0328 09:25:01.818960755 438482 ssl_transport_security.cc:791] Invalid private key. E0328 09:25:01.819147222 438482 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0328 09:25:01.819378465 438482 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:10236'; Got args: {grpc.client_channel_factory=0x50200005c0b0, grpc.default_authority=localhost:10236, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x5060003e7c20, grpc.internal.event_engine=0x5020000417b0, grpc.internal.subchannel_pool=0x504000776c50, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040002155d0, grpc.server_uri=dns:///localhost:10236} E0328 09:25:01.822390921 438482 ssl_transport_security.cc:791] Invalid private key. E0328 09:25:01.822567302 438482 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0328 09:25:01.822807576 438482 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:10236'; Got args: {grpc.client_channel_factory=0x50200005c0b0, grpc.default_authority=localhost:10236, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x5060003e7c20, grpc.internal.event_engine=0x502000cd5130, grpc.internal.subchannel_pool=0x504000776c50, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040002155d0, grpc.server_uri=dns:///localhost:10236} E0328 09:25:01.828203488 438482 ssl_transport_security.cc:791] Invalid private key. E0328 09:25:01.828360937 438482 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0328 09:25:01.828594429 438482 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:10236'; Got args: {grpc.client_channel_factory=0x50200005c0b0, grpc.default_authority=localhost:10236, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x5060003e6120, grpc.internal.event_engine=0x50200079a310, grpc.internal.subchannel_pool=0x504000776c50, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040002155d0, grpc.server_uri=dns:///localhost:10236} E0328 09:25:01.830069195 438482 ssl_transport_security.cc:791] Invalid private key. E0328 09:25:01.830228727 438482 ssl_security_connector.cc:129] Handshaker factory creation failed with TSI_INVALID_ARGUMENT. E0328 09:25:01.892547550 438482 chttp2_connector.cc:269] Failed to create channel args during subchannel creation: INTERNAL: Failed to create secure subchannel for secure name 'localhost:10236'; Got args: {grpc.client_channel_factory=0x50200005c0b0, grpc.default_authority=localhost:10236, grpc.default_compression_algorithm=0, grpc.http2.max_pings_without_data=0, grpc.internal.channel_credentials=0x5060003e6120, grpc.internal.event_engine=0x5020007eab90, grpc.internal.subchannel_pool=0x504000776c50, grpc.keepalive_permit_without_calls=0, grpc.keepalive_time_ms=1250, grpc.keepalive_timeout_ms=10000, grpc.max_receive_message_length=64000000, grpc.max_send_message_length=64000000, grpc.primary_user_agent=grpc-c++/1.54.3, grpc.resource_quota=0x5040002155d0, grpc.server_uri=dns:///localhost:10236} 2025-03-28T09:25:06.750550Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7486789019616617281:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:06.750641Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001419/r3tmp/tmpCHhCD7/pdisk_1.dat 2025-03-28T09:25:06.922319Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:06.950418Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:06.950539Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:06.958071Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19661, node 25 2025-03-28T09:25:07.022549Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:07.022570Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:07.022578Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:07.022731Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:07.413707Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:07.552227Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:59372) has now valid token of root@builtin 2025-03-28T09:25:07.609773Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:25:07.609805Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:25:07.609818Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:25:07.609876Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:25:11.737838Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7486789042235598419:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:11.737907Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001419/r3tmp/tmpi7TGpH/pdisk_1.dat 2025-03-28T09:25:11.870295Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:11.901458Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:11.901540Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:11.904050Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7964, node 28 2025-03-28T09:25:11.955296Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:11.955342Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:11.955352Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:11.955493Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7018 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:12.260292Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:12.382285Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:55960) has now valid token of root@builtin 2025-03-28T09:25:12.433284Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:25:12.433321Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:25:12.433336Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:25:12.433390Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> YdbOlapStore::LogLast50ByResource [GOOD] >> YdbOlapStore::LogGrepNonExisting >> TCmsTest::SamePriorityRequest [GOOD] >> TPersQueueTest::BadTopic >> TPersQueueTest::SetupLockSession2 >> TPersQueueTest::WriteExisting >> TPersQueueTest::ReadFromSeveralPartitions >> TPersQueueTest::UpdatePartitionLocation >> TPersQueueTest::DirectReadPreCached >> DemoTx::Scenario_1 >> TPartitionWriterCacheActorTests::WriteReplyOrder >> TPersQueueTest::ReadFromSeveralPartitionsMigrated >> TopicService::OneConsumer_TheRangesDoNotOverlap |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTest::TestLogOperationsRollback [GOOD] >> TPartitionWriterCacheActorTests::WriteReplyOrder [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TCmsTest::ActionIssue [GOOD] >> TPartitionWriterCacheActorTests::DropOldWriter [GOOD] >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> TBackupTests::BackupUuidColumn[Raw] >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted >> TBackupTests::BackupUuidColumn[Zstd] >> VectorIndexBuildTest::BaseCase >> TBackupTests::ShouldSucceedOnLargeData[Zstd] >> IndexBuildTest::ShadowDataNotAllowedByDefault >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> IndexBuildTest::WithFollowers |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> IndexBuildTest::CancellationNotEnoughRetries >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesNoUser [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssue [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex |88.0%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadOnlyVDisk::TestReads [GOOD] >> IndexBuildTest::BaseCase >> IndexBuildTest::Lock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 11275256914708450311 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> IndexBuildTest::ShadowDataEdgeCases >> IndexBuildTest::RejectsCreate >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] >> TBackupTests::BackupUuidColumn[Raw] [GOOD] |88.0%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |88.0%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> IndexBuildTest::WithFollowers [GOOD] >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::DropIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::WithFollowers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.751421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.751560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.751606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.751658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.752827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.752900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.753057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.753156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.754281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.846257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.846314Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.863786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.864047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.864236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.870333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.870522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.871252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.871444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.875709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.885961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.886015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.886109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.893061Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:21.034872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:21.035129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.035350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:21.035572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:21.035628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.039305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.039471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:21.039658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.039720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:21.039769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:21.039807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:21.041728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.041789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.041829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:21.043494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.043556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.043616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.043673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.053471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.059070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:21.059302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:21.060503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.060651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.060716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.061008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:21.061062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.061233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:21.061320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.063533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.063597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.063813Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.063858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:21.064265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.064318Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:21.064413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.064447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.064485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.064537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.064574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:21.064619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.064654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:21.064685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:21.064755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:21.064818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:21.064855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:21.066751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.066866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.066903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -28T09:25:21.834411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-03-28T09:25:21.834446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:1 progress is 2/3 2025-03-28T09:25:21.834477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2025-03-28T09:25:21.834513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: false 2025-03-28T09:25:21.835031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:21.835137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:21.835168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:21.835199Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:25:21.835232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:21.836458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:21.836536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:21.836563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:21.836607Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:25:21.836656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:21.837928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:21.837999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:21.838024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:21.838051Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-03-28T09:25:21.838083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:21.838977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:21.839083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:21.839112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:21.839807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:2, at schemeshard: 72057594046678944 2025-03-28T09:25:21.839873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:2 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.840204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:25:21.840341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-03-28T09:25:21.840384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-28T09:25:21.840426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:2 progress is 3/3 2025-03-28T09:25:21.840453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-28T09:25:21.840483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: false 2025-03-28T09:25:21.840515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2025-03-28T09:25:21.840564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:25:21.840606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:25:21.840695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:21.840736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:1 2025-03-28T09:25:21.840758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:1 2025-03-28T09:25:21.840789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:21.840810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:2 2025-03-28T09:25:21.840829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:2 2025-03-28T09:25:21.840867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:25:21.840892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 1 2025-03-28T09:25:21.840956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-28T09:25:21.841827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:21.841909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:21.841954Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:21.841994Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:25:21.842032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:25:21.842111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-03-28T09:25:21.842163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:335:2314] 2025-03-28T09:25:21.843179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:25:21.844928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:25:21.845017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:25:21.845066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:25:21.846935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:25:21.847020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:25:21.847059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:696:2654] TestWaitNotification: OK eventTxId 104 2025-03-28T09:25:21.847775Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/WithFollowers" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:21.848038Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/WithFollowers" took 290us result status StatusSuccess 2025-03-28T09:25:21.848495Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/WithFollowers" PathDescription { Self { Name: "WithFollowers" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "WithFollowers" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "valueFloat" Type: "Float" TypeId: 33 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> IndexBuildTest::ShadowDataEdgeCases [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.650500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.650603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.650656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.650754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.651908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.651967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.652037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.652138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.653522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.756925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.756979Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.770219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.770390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.770514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.775905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.776109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.776659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.776823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.778415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.785399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.785483Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.786403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.786472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.786536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.786640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.794019Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:20.960235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:20.960603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.960861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:20.961940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:20.962053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.965160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.965333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:20.965520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.965610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:20.965658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:20.965704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:20.967612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.967672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:20.967733Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:20.969248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.969292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.969330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.969411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.973413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:20.975224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:20.975525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:20.976609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.976779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:20.976832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.977120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:20.977177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.977358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:20.977460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.979667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.979727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.979914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.979953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:20.980383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.980434Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:20.980538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.980572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.980610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.980677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.980716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:20.980755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.980809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:20.980842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:20.980908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:20.980948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:20.980981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:20.982913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.983038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.983076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rd: 72057594046678944 REQUEST: PUT /data_01.csv HTTP/1.1 HEADERS: Host: localhost:2391 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EBB320C4-B4ED-47D7-9FEC-B80DD390B535 amz-sdk-request: attempt=1 content-length: 11 content-md5: jsMhyzH+cyrvZpBm0dQVGQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv / / 11 2025-03-28T09:25:21.439705Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:2391 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3E84E081-F510-45A2-9E9E-2E3F9677DF91 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-28T09:25:21.439913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:21.439987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:21.440055Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:21.440108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:21.440208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:25:21.440450Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:479:2435], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2025-03-28T09:25:21.440504Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:479:2435], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-28T09:25:21.440909Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:472:2430], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } 2025-03-28T09:25:21.452887Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:478:2434], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:2391 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 68BFE6E0-ADE7-4658-B99F-F77E703361E9 amz-sdk-request: attempt=1 content-length: 638 content-md5: Myp3UygaBNGp6+7AMgyRnQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 638 2025-03-28T09:25:21.463709Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:472:2430], result# PutObjectResult { ETag: 332a7753281a04d1a9ebeec0320c919d } 2025-03-28T09:25:21.463949Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:471:2429] 2025-03-28T09:25:21.464036Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:472:2430], sender# [1:471:2429], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2025-03-28T09:25:21.464437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:2391 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2C2DAEB2-2948-411C-95CE-128F9C15FC63 amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-03-28T09:25:21.469407Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:472:2430], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-03-28T09:25:21.469490Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:472:2430], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-28T09:25:21.469664Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:471:2429], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-28T09:25:21.490497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:25:21.490578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:25:21.490753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 318 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:25:21.490850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 318 RawX2: 4294969597 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:25:21.490930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.491068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.491541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:25:21.491581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:25:21.491690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:25:21.491786Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 322 RawX2: 4294969599 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:25:21.491824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.491853Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.491886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:25:21.491921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:25:21.491945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:25:21.492061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.494637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.503705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.503961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.504008Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:25:21.504117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:21.504153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.504193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:21.504222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.504259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:25:21.504350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:370:2336] message: TxId: 102 2025-03-28T09:25:21.504411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.504458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:25:21.504507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:25:21.504658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:21.507026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:21.507067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:454:2413] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.650922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.651018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.651069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.651106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.651981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.652033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.652111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.652230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.655260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.750049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.750115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.767317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.767597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.767839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.774040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.774293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.775058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.775274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.777827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.785409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.785491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.786386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.786458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.786564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.786673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.794001Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:20.948182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:20.948565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.948881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:20.949218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:20.949296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.951844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.951973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:20.952188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.952281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:20.952352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:20.952401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:20.954236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.954301Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:20.954357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:20.955968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.956010Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.956066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.956124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.967677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:20.969684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:20.969973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:20.971056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.971216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:20.971268Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.971578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:20.971641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.971815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:20.971919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.974569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.974629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.974815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.974868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:20.975276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.975330Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:20.975470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.975508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.975547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.975618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.975657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:20.975706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.975754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:20.975786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:20.975855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:20.975890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:20.975921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:20.984701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.984825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.984853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.359817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.359964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.362027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:25:21.362192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-28T09:25:21.362554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.362666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.362740Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:25:21.362848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:25:21.362963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:21.377692Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-03-28T09:25:21.404135Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:25:21.409771Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.409835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:21.410085Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.410131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:25:21.410812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.410873Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.411408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:21.411505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:21.411539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:21.411582Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:21.411636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:21.411719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:25:21.415028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:20684 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 03833FB2-6D1C-4BB2-B1A2-FC2FB1534D7E amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-28T09:25:21.432952Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:20684 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 4ED460FD-0903-48F9-8B98-4BB7A0AD5DFB amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-03-28T09:25:21.440457Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2025-03-28T09:25:21.440580Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-03-28T09:25:21.440692Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:20684 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 332BFC7D-DAB2-4CFF-8861-B210B0F41C9A amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2025-03-28T09:25:21.443966Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2025-03-28T09:25:21.444037Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-28T09:25:21.444212Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-28T09:25:21.454655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:25:21.454732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:25:21.454871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:25:21.455002Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:25:21.455069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.455108Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.455149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:25:21.455192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:25:21.455377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.458347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.458705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.458749Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:25:21.458844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:21.458873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.458900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:21.458925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.458968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:25:21.459033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:25:21.459070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.459118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:25:21.459154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:25:21.459255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:21.461168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:21.461232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:396:2368] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.650560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.650666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.650750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.650790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.651952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.651992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.652077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.652181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.653477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.757093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.757144Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.775558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.775830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.775971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.788187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.788416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.789174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.789376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.791594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.792980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.793077Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.793269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.793319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.793371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.793459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.811922Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:20.935503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:20.935788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.936145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:20.937356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:20.937442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.940637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.940808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:20.941007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.941148Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:20.941203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:20.941271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:20.944187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.944275Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:20.944316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:20.946082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.946126Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.946170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.946261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.951385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:20.955193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:20.955477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:20.956564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.956747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:20.956800Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.957732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:20.957803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.957967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:20.958051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.960118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.960165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.960334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.960375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:20.960888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.960936Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:20.961025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.961064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.961102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.961151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.961195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:20.961256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.961301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:20.961330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:20.961389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:20.961430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:20.961480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:20.963395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.963499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.963532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... EMESHARD INFO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.352217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.352379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.354105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:25:21.354248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-28T09:25:21.354718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.354823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.354871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:25:21.354997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:25:21.355116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:21.373015Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-03-28T09:25:21.413599Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:25:21.417422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.417476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:21.417792Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.417844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:25:21.418466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.418523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.419045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:21.419145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:21.419187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:21.419228Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:21.419266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:21.419348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:25:21.422343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:8651 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E0521699-3B02-4EF4-9BC6-B0078B999338 amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-28T09:25:21.432977Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:8651 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F526F51F-5D2C-42DC-BE05-C6AA0E9A71E7 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-03-28T09:25:21.440366Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-03-28T09:25:21.440470Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-03-28T09:25:21.440608Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:8651 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6ABFDCC1-2B0E-4BB4-A4A0-0C0B09B8D130 amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2025-03-28T09:25:21.443870Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2025-03-28T09:25:21.443936Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-28T09:25:21.444086Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-28T09:25:21.454806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-28T09:25:21.454869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:25:21.455048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-28T09:25:21.455167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-28T09:25:21.455233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.455269Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.455307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:25:21.455344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:25:21.455549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.458436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.458848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.458898Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:25:21.459046Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:21.459102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.459146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:21.459178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.459216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:25:21.459289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:25:21.459335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.459405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:25:21.459446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:25:21.459586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:21.461570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:21.461641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:396:2368] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.650467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.650602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.650664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.650733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.651965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.652020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.652116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.652235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.653450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.744493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.744556Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.758945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.759228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.759434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.766334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.766609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.770433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.771477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.778252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.785400Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.785500Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.786416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.786493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.786577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.786724Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.798390Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:20.945067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:20.945435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.945686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:20.945996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:20.946062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.948398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.948526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:20.948690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.948737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:20.948773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:20.948824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:20.950731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.950817Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:20.950868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:20.952681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.952746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.952819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.952887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.956730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:20.958809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:20.959111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:20.960218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.960396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:20.960456Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.960767Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:20.960830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.961003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:20.961119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.963280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.963331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.963529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.963569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:20.963999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.964051Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:20.964159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.964191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.964229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.964276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.964331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:20.964390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.964432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:20.964460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:20.964518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:20.964579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:20.964625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:20.966477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.966593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.966626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FO: TBackup TPropose, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.354883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.355035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.356629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:25:21.356735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-28T09:25:21.357104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.357203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.357252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:25:21.357369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:25:21.357500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:21.377516Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [1:411:2382], attempt# 0 2025-03-28T09:25:21.404956Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:411:2382], sender# [1:410:2381] FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:25:21.410290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.410347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:21.410643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.410712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:25:21.411299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.411373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.411852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:21.411956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:21.411994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:21.412033Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:21.412075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:21.412174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:25:21.415141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:29471 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1ED3E22D-5D6C-4D4C-981E-F8B9D0E4510A amz-sdk-request: attempt=1 content-length: 61 content-md5: 5ZuHSMjV1bVKZhThhMGD5g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 61 2025-03-28T09:25:21.433324Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: e59b8748c8d5d5b54a6614e184c183e6 } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:29471 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2D435E29-E5DD-4E57-AFCE-CF182F56B3CE amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2025-03-28T09:25:21.440086Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2025-03-28T09:25:21.440213Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:410:2381] 2025-03-28T09:25:21.441293Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:411:2382], sender# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:29471 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 59B5942B-F803-4D93-B4B0-9B2412FB975E amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2025-03-28T09:25:21.444629Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:411:2382], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2025-03-28T09:25:21.444693Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:411:2382], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2025-03-28T09:25:21.444865Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:410:2381], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-28T09:25:21.455881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-28T09:25:21.455945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:25:21.456153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-28T09:25:21.456265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2025-03-28T09:25:21.456330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.456368Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.456411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:25:21.456450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:25:21.456630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.458439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.458749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.458787Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:25:21.458880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:21.458920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.458949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:21.458975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.459009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:25:21.459070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:25:21.459126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:21.459181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:25:21.459221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:25:21.459341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:21.460892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:21.460942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:396:2368] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::ShadowDataEdgeCases [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.751410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.751546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.751605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.751650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.752856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.752914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.753005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.753095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.754281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.854470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.854532Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.869948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.870213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.870375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.876359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.876576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.877242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.877443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.879375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885513Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.885857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.885906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.885984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.892499Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:21.014156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:21.014454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.014662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:21.014915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:21.014994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.017250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.017379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:21.017562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.017680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:21.017727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:21.017763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:21.019643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.019702Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.019738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:21.021256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.021297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.021346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.021394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.031281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.033429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:21.033606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:21.034766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.034899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.034972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.035247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:21.035296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.035497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:21.035625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.037686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.037746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.037915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.037970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:21.038333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.038408Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:21.038513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.038548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.038585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.038619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.038657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:21.038732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.038775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:21.038807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:21.038883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:21.038942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:21.038979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:21.040895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.041010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.041047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... bletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-03-28T09:25:22.730074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 160500 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 155 } } 2025-03-28T09:25:22.730209Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 109:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: PREPARED TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 PrepareArriveTime: 160500 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 155 } } 2025-03-28T09:25:22.730255Z node 2 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-28T09:25:22.730342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 109:0, left await: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.730375Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 3 -> 128 2025-03-28T09:25:22.732461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.732593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.732632Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 109:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:22.732691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 109 ready parts: 1/1 2025-03-28T09:25:22.732798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 109 MinStep: 5000008 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:22.734476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 109:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:109 msg type: 269090816 2025-03-28T09:25:22.734615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 109, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 109 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 109 at step: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 109 at step: 5000008 2025-03-28T09:25:22.735594Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000008, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:22.735710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 109 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936749 } } Step: 5000008 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:22.735763Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 109:0 HandleReply TEvOperationPlan, operationId: 109:0, stepId: 5000008, at schemeshard: 72057594046678944 2025-03-28T09:25:22.735962Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 128 -> 129 2025-03-28T09:25:22.736086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000008 2025-03-28T09:25:22.740754Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:22.740834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 109, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:25:22.741065Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:22.741108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 109, path id: 4 FAKE_COORDINATOR: Erasing txId 109 2025-03-28T09:25:22.742124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.742209Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:25:22.742865Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-03-28T09:25:22.742965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 4 PathOwnerId: 72057594046678944, cookie: 109 2025-03-28T09:25:22.742990Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 109 2025-03-28T09:25:22.743019Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 109, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 4 2025-03-28T09:25:22.743053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:25:22.743137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 0/1, is published: true 2025-03-28T09:25:22.748380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 109 2025-03-28T09:25:22.749117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 965 } } 2025-03-28T09:25:22.749157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-03-28T09:25:22.749275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 965 } } 2025-03-28T09:25:22.749358Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409548 Status: COMPLETE TxId: 109 Step: 5000008 OrderId: 109 ExecLatency: 2 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409548 CpuTimeUsec: 965 } } 2025-03-28T09:25:22.750064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 675 RawX2: 8589937222 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-03-28T09:25:22.750108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 109, tablet: 72075186233409548, partId: 0 2025-03-28T09:25:22.750215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 109:0, at schemeshard: 72057594046678944, message: Source { RawX1: 675 RawX2: 8589937222 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-03-28T09:25:22.750255Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:25:22.750315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 109:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 675 RawX2: 8589937222 } Origin: 72075186233409548 State: 2 TxId: 109 Step: 0 Generation: 2 2025-03-28T09:25:22.750364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 109:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:22.750398Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.750443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 109:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:25:22.750475Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 109:0 129 -> 240 2025-03-28T09:25:22.752900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.753144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.753356Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 109:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.753387Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 109:0 ProgressState 2025-03-28T09:25:22.753468Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-03-28T09:25:22.753519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-03-28T09:25:22.753557Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#109:0 progress is 1/1 2025-03-28T09:25:22.753604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-03-28T09:25:22.753634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 109, ready parts: 1/1, is published: true 2025-03-28T09:25:22.753682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:336:2315] message: TxId: 109 2025-03-28T09:25:22.753722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 109 ready parts: 1/1 2025-03-28T09:25:22.753752Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 109:0 2025-03-28T09:25:22.753774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 109:0 2025-03-28T09:25:22.753904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:25:22.755291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-03-28T09:25:22.755331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:784:2729] TestWaitNotification: OK eventTxId 109 >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> IndexBuildTest::DropIndex [GOOD] >> TableCreation::MultipleTablesCreation >> KqpProxy::NoLocalSessionExecution >> KqpProxy::InvalidSessionID >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> ScriptExecutionsTest::RunCheckLeaseStatus >> TableCreation::ConcurrentTableCreation >> KqpProxy::PassErrroViaSessionActor >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsDropIndex >> KqpProxy::PingNotExistedSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.807600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.807712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.807779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.807822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.807864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.807893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.807953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.808020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.808361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.894293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.894341Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.915718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.916031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.916231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.947164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.947431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.948069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.948309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.952054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.953409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.953468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.953625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.953675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.953718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.953797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.963667Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:21.083591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:21.083820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.084018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:21.084216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:21.084274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.086339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.086445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:21.086604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.086660Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:21.086722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:21.086773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:21.088849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.088900Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.088932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:21.090621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.090664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.090733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.090774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.094249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.096153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:21.096307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:21.097329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.097437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.097489Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.097731Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:21.097780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.097926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:21.098008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.099512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.099566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.099706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.099739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:21.100075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.100115Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:21.100210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.100240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.100273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.100301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.100333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:21.100366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.100415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:21.100442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:21.100508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:21.100546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:21.100569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:21.101776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.101874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.101898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 09550, at schemeshard: 72057594046678944 2025-03-28T09:25:23.799420Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 129 -> 240 2025-03-28T09:25:23.799948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:1, at schemeshard: 72057594046678944 2025-03-28T09:25:23.799984Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:1 ProgressState 2025-03-28T09:25:23.800080Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-03-28T09:25:23.800113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-03-28T09:25:23.800149Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:1 progress is 1/3 2025-03-28T09:25:23.800180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/3 2025-03-28T09:25:23.800214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/3, is published: false 2025-03-28T09:25:23.800644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-03-28T09:25:23.800680Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 105:2 ProgressState at tablet: 72057594046678944 2025-03-28T09:25:23.800731Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 105:2, at schemeshard: 72057594046678944 2025-03-28T09:25:23.800754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 105:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:25:23.800778Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:2 129 -> 240 2025-03-28T09:25:23.801076Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.801169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.801207Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:25:23.801241Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-03-28T09:25:23.801277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 5 2025-03-28T09:25:23.801645Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.801715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.801737Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:25:23.801760Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 8], version: 18446744073709551615 2025-03-28T09:25:23.801785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 3 2025-03-28T09:25:23.802707Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.802778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.802800Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:25:23.802825Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2025-03-28T09:25:23.802848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:23.804027Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.804095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.804116Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:25:23.804802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:25:23.804847Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:23.805056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 4 2025-03-28T09:25:23.805163Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-03-28T09:25:23.805199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-03-28T09:25:23.805234Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 2/3 2025-03-28T09:25:23.805266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2025-03-28T09:25:23.805300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2025-03-28T09:25:23.805807Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.805872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 8 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.805899Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:25:23.807012Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.807078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:23.807101Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:25:23.807125Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-03-28T09:25:23.807149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 4 2025-03-28T09:25:23.807232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2025-03-28T09:25:23.808682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2025-03-28T09:25:23.808729Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:23.808904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-03-28T09:25:23.808989Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-03-28T09:25:23.809015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-03-28T09:25:23.809072Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:2 progress is 3/3 2025-03-28T09:25:23.809095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-03-28T09:25:23.809120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2025-03-28T09:25:23.809181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:416:2373] message: TxId: 105 2025-03-28T09:25:23.809218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2025-03-28T09:25:23.809259Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:25:23.809292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-28T09:25:23.809381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-28T09:25:23.809416Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:1 2025-03-28T09:25:23.809434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:1 2025-03-28T09:25:23.809460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 2 2025-03-28T09:25:23.809479Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:2 2025-03-28T09:25:23.809496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:2 2025-03-28T09:25:23.809528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-03-28T09:25:23.810003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:25:23.810606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:25:23.811109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:25:23.811160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:25:23.811246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:25:23.812870Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:25:23.813926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:25:23.813973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:939:2863] TestWaitNotification: OK eventTxId 105 >> TableCreation::ConcurrentTableCreationWithDifferentVersions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:21.448267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:21.448371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:21.448416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:21.448452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:21.448496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:21.448519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:21.448567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:21.448645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:21.448961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:21.533602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:21.533651Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:21.550160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:21.550423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:21.550596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:21.557752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:21.557975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:21.558587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.558856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.560704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.562007Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.562065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.562231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:21.562272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.562308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:21.562385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.569552Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:21.705129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:21.705341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.705556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:21.705797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:21.705852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.708044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.708176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:21.708331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.708387Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:21.708429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:21.708459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:21.710203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.710253Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.710287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:21.711884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.711928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.711972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.712012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.720988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.722781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:21.722947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:21.724032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.724169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.724226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.724463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:21.724510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.724658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:21.724745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.726492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.726545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.726729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.726769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:21.727125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.727161Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:21.727243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.727271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.727303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.727328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.727375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:21.727426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.727466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:21.727493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:21.727562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:21.727599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:21.727630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:21.729233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.729333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.729372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... on: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:23.680576Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:25:23.680820Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index" took 258us result status StatusSuccess 2025-03-28T09:25:23.681560Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index" PathDescription { Self { Name: "Index" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "Index" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:23.682154Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:25:23.682522Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable" took 381us result status StatusSuccess 2025-03-28T09:25:23.683490Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Index/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "alice" } } Tuple { } } } SplitBoundary { KeyPrefix { Tuple { Optional { Text: "bob" } } Tuple { } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\005\000\000\000alice\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TablePartitions { EndOfRangeKeyPrefix: "\002\000\003\000\000\000bob\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 3 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] >> IndexBuildTest::RejectsDropIndex [GOOD] >> TableCreation::SimpleTableCreation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::VectorIndexDescriptionIsPersisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.751892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.752021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.752084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.752138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.752925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.752982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.753064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.753169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.754325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.836156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.836213Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.856531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.856790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.856969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.865117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.866199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.869920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.871075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.875921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.885771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.885822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.885904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.892573Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:21.010334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:21.011907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.012992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:21.014527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:21.014590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.017244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.017381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:21.017571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.017637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:21.017677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:21.017710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:21.019492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.019546Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.019579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:21.021385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.021422Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.021470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.021522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.025997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.028017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:21.028228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:21.030611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.030775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.030836Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.032030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:21.032099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.032310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:21.032381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.035765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.035827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.036003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.036045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:21.036415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.036466Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:21.036577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.036614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.036648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.036677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.036713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:21.036748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.036778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:21.036805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:21.036861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:21.036913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:21.036948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:21.038959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.039087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.039122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 3] was 2 2025-03-28T09:25:24.625257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:24.625615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.625677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.626047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.626192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.626309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.626516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.626592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.626968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.627292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:24.627536Z node 1 :BUILD_INDEX DEBUG: AddShardStatus id# 102 shard 72057594046678944:11 range { From: -inf, To: inf } 2025-03-28T09:25:24.627625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.627697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.627757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:24.639760Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:25:24.639889Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: by_embedding, IndexColumn: embedding, DataColumns: covered, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976720765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976720766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:25:24.639931Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 0 2025-03-28T09:25:24.646854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:24.646930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:24.647537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:24.647599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:24.647646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:24.647802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:4179:5637] sender: [1:4235:2058] recipient: [1:15:2062] 2025-03-28T09:25:24.691897Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/by_embedding" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:25:24.692180Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/by_embedding" took 319us result status StatusSuccess 2025-03-28T09:25:24.693309Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/by_embedding" PathDescription { Self { Name: "by_embedding" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "by_embedding" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataColumnNames: "covered" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 3 MaxPartitionsCount: 3 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsDropIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:22.222909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:22.223020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:22.223075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:22.223118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:22.223168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:22.223196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:22.223265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:22.223335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:22.223682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:22.301388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:22.301445Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:22.317837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:22.318116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:22.318316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:22.325352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:22.325602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:22.326267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:22.326476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:22.328320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:22.329629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:22.329679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:22.329849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:22.329896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:22.329937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:22.330026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.336879Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:22.461343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:22.461642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.461908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:22.462231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:22.462305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.464775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:22.464926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:22.465116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.465170Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:22.465212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:22.465248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:22.467222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.467279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:22.467316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:22.469936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.469989Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.470035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:22.470066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:22.473828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:22.475907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:22.476087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:22.477186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:22.477321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:22.477378Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:22.477679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:22.477733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:22.477882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:22.477977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:22.480132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:22.480196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:22.480364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:22.480403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:22.480806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:22.480853Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:22.480939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:22.480972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:22.481003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:22.481034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:22.481067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:22.481107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:22.481154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:22.481183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:22.481256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:22.481294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:22.481329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:22.483251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:22.483409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:22.483447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 107 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 107 at step: 5000004 2025-03-28T09:25:25.142246Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:25.142349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 107 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936749 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:25.142398Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TPropose operationId# 107:0 HandleReply TEvOperationPlan, step: 5000004, at schemeshard: 72057594046678944 2025-03-28T09:25:25.142462Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 128 -> 136 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-03-28T09:25:25.146173Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:25:25.146233Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-28T09:25:25.146285Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TWaitRenamedPathPublication operationId: 107:0 ProgressState, no renaming has been detected for this operation 2025-03-28T09:25:25.146319Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 136 -> 137 FAKE_COORDINATOR: Erasing txId 107 2025-03-28T09:25:25.147337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 659 } } 2025-03-28T09:25:25.147413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-03-28T09:25:25.147569Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 659 } } 2025-03-28T09:25:25.147693Z node 2 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 107 Step: 5000004 OrderId: 107 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 659 } } 2025-03-28T09:25:25.148493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-03-28T09:25:25.148535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409547, partId: 0 2025-03-28T09:25:25.148634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 325 RawX2: 8589936900 } Origin: 72075186233409547 State: 5 TxId: 107 Step: 0 Generation: 2 2025-03-28T09:25:25.148690Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-03-28T09:25:25.150230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:25:25.150288Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-28T09:25:25.150332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:25:25.150364Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-03-28T09:25:25.150432Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-28T09:25:25.150535Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-03-28T09:25:25.150632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:25.150705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:25.151317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:25:25.152670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:25:25.154154Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:25.154215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:25.154392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:25.154535Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:25.154584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 107, path id: 1 2025-03-28T09:25:25.154632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-03-28T09:25:25.154718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:25:25.154783Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:25:25.154880Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:25:25.154922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:25:25.154970Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-03-28T09:25:25.156116Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-03-28T09:25:25.156222Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 107 2025-03-28T09:25:25.156260Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-03-28T09:25:25.156306Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-28T09:25:25.156357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:25.157792Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-03-28T09:25:25.157899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-03-28T09:25:25.157928Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-03-28T09:25:25.157961Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:25.157995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:25.158077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-03-28T09:25:25.160460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:25:25.160528Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:25.160826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:25.160957Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-28T09:25:25.160994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:25:25.161033Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-28T09:25:25.161064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:25:25.161118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-03-28T09:25:25.161190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:378:2346] message: TxId: 107 2025-03-28T09:25:25.161239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:25:25.161287Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-03-28T09:25:25.161323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-03-28T09:25:25.161412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:25.162960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-28T09:25:25.163719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-28T09:25:25.164727Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-28T09:25:25.164781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:582:2542] TestWaitNotification: OK eventTxId 107 >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout >> TestKinesisHttpProxy::DifferentContentTypes >> TestYmqHttpProxy::TestCreateQueue >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TestKinesisHttpProxy::MissingAction >> TestKinesisHttpProxy::TestPing >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl >> TestYmqHttpProxy::TestSendMessage >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> YdbYqlClient::TestExplicitPartitioning [GOOD] >> TopicService::OneConsumer_TheRangesDoNotOverlap [GOOD] >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::CreateOldTable >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TableCreation::ConcurrentUpdateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TestExplicitPartitioning [GOOD] Test command err: 2025-03-28T09:24:42.182126Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788919380187616:2146];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:42.182175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bf/r3tmp/tmpi2r2cP/pdisk_1.dat 2025-03-28T09:24:42.707082Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:42.731044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:42.731123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:42.744679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8078, node 1 2025-03-28T09:24:42.844774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:42.844804Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:42.844812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:42.844941Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3591 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:43.123295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:43.147910Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:24:45.170986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788932265090448:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:45.171078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:45.475586Z node 1 :TX_PROXY ERROR: Actor# [1:7486788932265090471:2631] txid# 281474976710658, issues: { message: "Column Key has wrong key type Double" severity: 1 } 2025-03-28T09:24:46.947369Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788935732232932:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.947414Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bf/r3tmp/tmpvdN4Fj/pdisk_1.dat 2025-03-28T09:24:47.178090Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:47.209054Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:47.209114Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:47.213905Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9256, node 4 2025-03-28T09:24:47.370729Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:47.370752Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:47.370757Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:47.370889Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:47.608824Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:50.540132Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788952912103170:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:50.540681Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:50.550758Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788952912103219:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:50.550840Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:50.561244Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:50.577447Z node 4 :TX_PROXY ERROR: Actor# [4:7486788952912103261:2661] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:24:50.577692Z node 4 :TX_PROXY ERROR: Actor# [4:7486788952912103272:2668] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:24:50.578348Z node 4 :TX_PROXY ERROR: Actor# [4:7486788952912103265:2664] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:24:50.578452Z node 4 :TX_PROXY ERROR: Actor# [4:7486788952912103263:2663] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:24:50.578614Z node 4 :TX_PROXY ERROR: Actor# [4:7486788952912103273:2669] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:24:50.578648Z node 4 :TX_PROXY ERROR: Actor# [4:7486788952912103262:2662] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:24:50.578829Z node 4 :TX_PROXY ERROR: Actor# [4:7486788952912103271:2667] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:24:50.584378Z node 4 :TX_PROXY ERROR: Actor# [4:7486788952912103318:2703] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:24:50.584498Z node 4 :TX_PROXY ERROR: Actor# [4:7486788952912103317:2702] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/Test\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:24:50.764802Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788952912103464:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:50.764879Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:50.765235Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788952912103469:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:50.769009Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at ... :50.883881Z node 4 :TX_PROXY ERROR: Actor# [4:7486788952912103542:2874] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:51.047997Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqe131ec5epfhpdyprc9r55p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDRmNjdkMmUtOWExZjU3OTctZjFmNjQyNWEtOWM1MmYwZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:52.588862Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788958809905305:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:52.588924Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bf/r3tmp/tmprbmSMv/pdisk_1.dat 2025-03-28T09:24:52.783833Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:52.812296Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:52.812366Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:52.814748Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16798, node 7 2025-03-28T09:24:52.845696Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:52.845720Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:52.845729Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:52.845856Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8024 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:53.276337Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:55.954319Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:56.066088Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788975989775711:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:56.066173Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:56.066345Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486788975989775723:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:56.069632Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:24:56.090013Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486788975989775725:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:24:56.145890Z node 7 :TX_PROXY ERROR: Actor# [7:7486788975989775800:2831] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:24:56.231340Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe136m0awr4pkqaj9fzfyw3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NzVjYWM0MjAtODdhODI0YjEtZTJhYmM0YjQtN2MxMjMzMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:24:58.023106Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788985312018097:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:58.024060Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bf/r3tmp/tmpN4ZVpv/pdisk_1.dat 2025-03-28T09:24:58.189425Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:58.210212Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:58.210299Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:58.212553Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1783, node 10 2025-03-28T09:24:58.275385Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:58.275409Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:58.275417Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:58.275602Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:58.542877Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:01.469380Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:03.023163Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486788985312018097:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:03.023250Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:13.159775Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:25:13.159811Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:27.100477Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486789109866071842:2540], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.100583Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7486789109866071831:2537], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.100941Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.104668Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:25:27.124809Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7486789109866071845:2541], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:25:27.228453Z node 10 :TX_PROXY ERROR: Actor# [10:7486789109866071920:3193] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:27.365318Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe144xt63mvm9xa8k6nmgsb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTJiNGJlNmQtYTZlZDUyYS0yMmU4ZDFkMi01MjcyZThkOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:27.834408Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe145707185kpz9d8a1gkyx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ZTJiNGJlNmQtYTZlZDUyYS0yMmU4ZDFkMi01MjcyZThkOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::SimpleUpdateTable >> DemoTx::Scenario_1 [GOOD] >> TopicService::OneConsumer_TheRangesOverlap >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> TPersQueueTest::BadTopic [GOOD] >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable >> TPersQueueTest::WriteExisting [GOOD] >> TPersQueueTest::WriteExistingBigValue >> TPersQueueTest::UpdatePartitionLocation [GOOD] >> TPersQueueTest::TopicServiceCommitOffset >> DemoTx::Scenario_2 >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TPersQueueTest::DirectReadPreCached [GOOD] >> TPersQueueTest::DirectReadNotCached >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TPersQueueTest::SetupLockSession2 [GOOD] >> TPersQueueTest::SetupLockSession >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::CancelBuild >> StatisticsSaveLoad::Delete [GOOD] >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> KqpProxy::DatabasesCacheForServerless >> TableCreation::CreateOldTable [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> TableCreation::ConcurrentUpdateTable [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: 2025-03-28T09:25:22.932252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:25:22.932471Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:25:22.932522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001617/r3tmp/tmp0C3jRs/pdisk_1.dat 2025-03-28T09:25:23.397610Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4909, node 1 2025-03-28T09:25:23.926358Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:23.926417Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:23.926451Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:23.926934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:23.936738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:25:24.035195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:24.035971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:24.051649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62223 2025-03-28T09:25:24.648848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:27.858347Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:25:27.908213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:27.908337Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:27.964702Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:27.967767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:28.214465Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:28.216405Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:28.216876Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:28.216994Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:28.217159Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:28.217212Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:28.217258Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:28.217310Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:28.217389Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:28.392541Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:28.392662Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:28.415979Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:28.583657Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:28.642655Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:25:28.643334Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:25:28.681436Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:25:28.689006Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:25:28.689260Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:25:28.689335Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:25:28.689398Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:25:28.689450Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:25:28.689507Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:25:28.689568Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:25:28.690184Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:25:28.728832Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:25:28.729734Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:25:28.748946Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:25:28.749021Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:25:28.749092Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:25:28.762065Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:25:28.762195Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:25:28.814200Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:25:28.814659Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:25:28.923671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:25:28.948711Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:25:28.948887Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:25:29.216162Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:25:29.518301Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:25:29.575450Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:25:30.308838Z node 1 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:25:30.309254Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:25:30.391382Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:25:30.406173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2261:3086], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:30.406317Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2274:3091], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:30.406409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:30.416773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72075186224037897 2025-03-28T09:25:30.481889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2278:3094], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:25:30.808988Z node 1 :TX_PROXY ERROR: Actor# [1:2371:3126] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:31.731187Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2393:3138]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:25:31.731551Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:25:31.731660Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2395:3140] 2025-03-28T09:25:31.738829Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2395:3140] 2025-03-28T09:25:31.739585Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2396:2843] 2025-03-28T09:25:31.739923Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2395:3140], server id = [2:2396:2843], tablet id = 72075186224037894, status = OK 2025-03-28T09:25:31.740141Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2396:2843], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:25:31.740221Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:25:31.740415Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:25:31.740541Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2393:3138], StatRequests.size() = 1 2025-03-28T09:25:32.418222Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MmRkZjQwOWEtNTkyNmQ1Y2MtMTUxNjUxMWQtMzU1MTIyMTc=, TxId: 2025-03-28T09:25:32.418284Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MmRkZjQwOWEtNTkyNmQ1Y2MtMTUxNjUxMWQtMzU1MTIyMTc=, TxId: 2025-03-28T09:25:32.419208Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:25:32.421194Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:25:32.450479Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2424:3161]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:25:32.450652Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:25:32.450717Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2424:3161], StatRequests.size() = 1 2025-03-28T09:25:32.646867Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ODliNWZlMTUtZjQ1YjgzNWYtMWQ4NGIyZmEtOTRiYzk2Njg=, TxId: 2025-03-28T09:25:32.646947Z node 1 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ODliNWZlMTUtZjQ1YjgzNWYtMWQ4NGIyZmEtOTRiYzk2Njg=, TxId: 2025-03-28T09:25:32.648150Z node 1 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:25:32.651097Z node 1 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2025-03-28T09:25:32.742219Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2456:3177]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:25:32.742434Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:25:32.742477Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:2456:3177], StatRequests.size() = 1 2025-03-28T09:25:32.930510Z node 1 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MjQ3YmZmM2QtMzM4ZTczYjQtZDJlZTI4NTctOGViNjJkNTI=, TxId: 01jqe14ajc67gg95kvt7f3bjsm 2025-03-28T09:25:32.933913Z node 1 :STATISTICS WARN: [TQueryBase] Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=MjQ3YmZmM2QtMzM4ZTczYjQtZDJlZTI4NTctOGViNjJkNTI=, TxId: 01jqe14ajc67gg95kvt7f3bjsm >> TableCreation::SimpleUpdateTable [GOOD] >> TestKinesisHttpProxy::TestPing [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2025-03-28T09:25:24.341111Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789099639755618:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.341203Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ef4/r3tmp/tmpFgM9CO/pdisk_1.dat 2025-03-28T09:25:24.800549Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:24.803675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:24.803768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:24.813635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29711 TServer::EnableGrpc on GrpcPort 4483, node 1 2025-03-28T09:25:25.197577Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:25.197597Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:25.197608Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:25.197705Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:25:25.514205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:27.270565Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.272700Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.281189Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:27.281300Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:27.281339Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.281415Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.281496Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.281586Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.281633Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.281948Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.282949Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.282976Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-28T09:25:27.283008Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-28T09:25:27.283035Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.283041Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-28T09:25:27.283062Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-28T09:25:27.283263Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.283269Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-28T09:25:27.283305Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-28T09:25:27.287539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-28T09:25:27.292015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:25:27.293895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:25:27.300047Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-28T09:25:27.300058Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-28T09:25:27.300094Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-03-28T09:25:27.300094Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-03-28T09:25:27.300620Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-28T09:25:27.300654Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710660 2025-03-28T09:25:27.402028Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-03-28T09:25:27.429309Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-28T09:25:27.467429Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-28T09:25:27.487031Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-28T09:25:27.523496Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-28T09:25:27.560907Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-28T09:25:27.561355Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 7c5ce32b-6e36295b-5a87ef4f-d0aaa4c8, Bootstrap. Database: /dc-1 2025-03-28T09:25:27.570752Z node 1 :KQP_PROXY DEBUG: Request has 18445000919781.980894s seconds to be completed 2025-03-28T09:25:27.573470Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=YTAwM2YxODQtZTA3N2Q0ZDEtOWQyZjkxZDAtY2Q4Y2U1YjY=, workerId: [1:7486789112524658405:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-28T09:25:27.573588Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:27.574384Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 7c5ce32b-6e36295b-5a87ef4f-d0aaa4c8, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-28T09:25:27.574869Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=YTAwM2YxODQtZTA3N2Q0ZDEtOWQyZjkxZDAtY2Q4Y2U1YjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7486789112524658405:2333] 2025-03-28T09:25:27.574898Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7486789112524658407:2470] 2025-03-28T09:25:27.576754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789112524658408:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.576764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789112524658416:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.576848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.579598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-28T09:25:27.589471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789112524658422:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:25:27.681920Z node 1 :TX_PROXY ERROR: Actor# [1:7486789112524658464:2502] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges) ... 2025-03-28T09:25:33.333196Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZWIwY2EyNS1iMTE2MmY0Ni1kYTM2YTQ1NS1hZGE5MWQ3OA==, workerId: [2:7486789135658237610:2349], database: dc-1, longSession: 1, local sessions count: 2 2025-03-28T09:25:33.333332Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:33.333604Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YTFjYmNiYTAtODljODQ3ZGMtZmZlZjM3YTgtZDY5YmFmZDk=, workerId: [2:7486789135658237500:2333], local sessions count: 1 2025-03-28T09:25:33.333754Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZWIwY2EyNS1iMTE2MmY0Ni1kYTM2YTQ1NS1hZGE5MWQ3OA==, CurrentExecutionId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [2:7486789135658237610:2349] 2025-03-28T09:25:33.333780Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7486789135658237613:2532] 2025-03-28T09:25:33.353027Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqe14b18b49y8svdtn75akes", Request has 18445000919776.198619s seconds to be completed 2025-03-28T09:25:33.354904Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqe14b18b49y8svdtn75akes", Created new session, sessionId: ydb://session/3?node_id=2&id=N2MwNGY1NS03YjA4ZjRiNC1iZWZhYTA1ZS0zNjBjZDE5, workerId: [2:7486789135658237626:2359], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-28T09:25:33.355039Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jqe14b18b49y8svdtn75akes 2025-03-28T09:25:33.359979Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-03-28T09:25:33.359993Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-03-28T09:25:33.360021Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-03-28T09:25:33.362431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-03-28T09:25:33.365348Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-03-28T09:25:33.365377Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-03-28T09:25:33.385639Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, Bootstrap. Database: /dc-1 2025-03-28T09:25:33.385851Z node 2 :KQP_PROXY DEBUG: Request has 18445000919776.165783s seconds to be completed 2025-03-28T09:25:33.387663Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=YThkOGMwZWUtN2YwNzZlYjMtNGYyNTA5ZTctZDYzN2I2MjE=, workerId: [2:7486789135658237695:2363], database: /dc-1, longSession: 1, local sessions count: 3 2025-03-28T09:25:33.387780Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:33.387855Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7486789135658237497:2464], selfId: [2:7486789118478367447:2070], source: [2:7486789135658237610:2349] 2025-03-28T09:25:33.388028Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-03-28T09:25:33.388311Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=YThkOGMwZWUtN2YwNzZlYjMtNGYyNTA5ZTctZDYzN2I2MjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7486789135658237695:2363] 2025-03-28T09:25:33.388339Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7486789135658237697:2574] 2025-03-28T09:25:33.450776Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-03-28T09:25:33.530952Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:33.534934Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:33.566123Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7486789135658237696:2364], selfId: [2:7486789118478367447:2070], source: [2:7486789135658237695:2363] 2025-03-28T09:25:33.566316Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YThkOGMwZWUtN2YwNzZlYjMtNGYyNTA5ZTctZDYzN2I2MjE=, TxId: 2025-03-28T09:25:33.566357Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YThkOGMwZWUtN2YwNzZlYjMtNGYyNTA5ZTctZDYzN2I2MjE=, TxId: 2025-03-28T09:25:33.566583Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, start saving rows range [0; 1) 2025-03-28T09:25:33.566656Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, Bootstrap. Database: /dc-1 2025-03-28T09:25:33.566843Z node 2 :KQP_PROXY DEBUG: Request has 18445000919775.984792s seconds to be completed 2025-03-28T09:25:33.568808Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=OGE1MzE3ZTYtYWRkNzVlMWItMWY4NDRlM2MtZmNmNDU1MjI=, workerId: [2:7486789135658237758:2374], database: /dc-1, longSession: 1, local sessions count: 4 2025-03-28T09:25:33.568929Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:33.569419Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YThkOGMwZWUtN2YwNzZlYjMtNGYyNTA5ZTctZDYzN2I2MjE=, workerId: [2:7486789135658237695:2363], local sessions count: 3 2025-03-28T09:25:33.569562Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-03-28T09:25:33.569906Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OGE1MzE3ZTYtYWRkNzVlMWItMWY4NDRlM2MtZmNmNDU1MjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7486789135658237758:2374] 2025-03-28T09:25:33.569932Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7486789135658237761:2617] 2025-03-28T09:25:33.583200Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=N2MwNGY1NS03YjA4ZjRiNC1iZWZhYTA1ZS0zNjBjZDE5, workerId: [2:7486789135658237626:2359], local sessions count: 2 2025-03-28T09:25:33.748664Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7486789135658237760:2375], selfId: [2:7486789118478367447:2070], source: [2:7486789135658237758:2374] 2025-03-28T09:25:33.749187Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGE1MzE3ZTYtYWRkNzVlMWItMWY4NDRlM2MtZmNmNDU1MjI=, TxId: 2025-03-28T09:25:33.749215Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGE1MzE3ZTYtYWRkNzVlMWItMWY4NDRlM2MtZmNmNDU1MjI=, TxId: 2025-03-28T09:25:33.749332Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, result part successfully saved 2025-03-28T09:25:33.749351Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, reply SUCCESS, issues: 2025-03-28T09:25:33.749543Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OGE1MzE3ZTYtYWRkNzVlMWItMWY4NDRlM2MtZmNmNDU1MjI=, workerId: [2:7486789135658237758:2374], local sessions count: 1 2025-03-28T09:25:33.749592Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, Bootstrap. Database: /dc-1 2025-03-28T09:25:33.749721Z node 2 :KQP_PROXY DEBUG: Request has 18445000919775.801920s seconds to be completed 2025-03-28T09:25:33.751532Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MTVlZWY1YWEtODc1YThmNGEtYzhlOWYwYmQtZDhlOTRiMWE=, workerId: [2:7486789135658237787:2385], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-28T09:25:33.751654Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:33.751882Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 7fd66e91-ee3e1272-b525cf2c-754f43fd, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-28T09:25:33.752206Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MTVlZWY1YWEtODc1YThmNGEtYzhlOWYwYmQtZDhlOTRiMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7486789135658237787:2385] 2025-03-28T09:25:33.752236Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7486789135658237789:2630] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TestKinesisHttpProxy::MissingAction [GOOD] >> TestYmqHttpProxy::TestCreateQueue [GOOD] |88.1%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.1%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestSendMessage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentUpdateTable [GOOD] Test command err: 2025-03-28T09:25:24.784429Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789099179868919:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.784800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000eb7/r3tmp/tmpJSbrCz/pdisk_1.dat 2025-03-28T09:25:25.210479Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:25.233647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:25.233770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:25.236281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17403 TServer::EnableGrpc on GrpcPort 12359, node 1 2025-03-28T09:25:25.486923Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:25.486955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:25.486966Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:25.487143Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:25:25.635429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:25.652918Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:27.646739Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.649022Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.652703Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.652723Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-28T09:25:27.652754Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-28T09:25:27.652858Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.652862Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-28T09:25:27.652884Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-28T09:25:27.656398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-03-28T09:25:27.659042Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:27.659522Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.659528Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-28T09:25:27.659552Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-28T09:25:27.661270Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:27.661296Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.661344Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.661653Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.661691Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.662467Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.662493Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.662843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:27.665630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:25:27.676570Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-28T09:25:27.676651Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710659 2025-03-28T09:25:27.678460Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-28T09:25:27.678497Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710658 2025-03-28T09:25:27.678638Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-28T09:25:27.678655Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710660 2025-03-28T09:25:27.808995Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-28T09:25:27.846407Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-28T09:25:27.858449Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-03-28T09:25:27.881397Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-28T09:25:27.915081Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-28T09:25:27.949006Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-28T09:25:27.949518Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 81277146-55b97cde-5ef4bee3-ed88f22c, Bootstrap. Database: /dc-1 2025-03-28T09:25:27.969957Z node 1 :KQP_PROXY DEBUG: Request has 18445000919781.581692s seconds to be completed 2025-03-28T09:25:27.981442Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NmI1MjE0MjgtODdlMWY5ZDktMzZhOTNhMy04MjdhODA1ZQ==, workerId: [1:7486789112064771680:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-28T09:25:27.981536Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:27.982586Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 81277146-55b97cde-5ef4bee3-ed88f22c, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-28T09:25:27.983125Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NmI1MjE0MjgtODdlMWY5ZDktMzZhOTNhMy04MjdhODA1ZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7486789112064771680:2333] 2025-03-28T09:25:27.983169Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7486789112064771682:2467] 2025-03-28T09:25:27.994953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789112064771683:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.995080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.995177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789112064771694:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:28.001799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-28T09:25:28.012574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789112064771697:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:25:28.082614Z node 1 :TX_PROXY ERROR: Actor# [1:7486789116359739035:2499] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/p ... 480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-03-28T09:25:34.315075Z node 2 :TX_PROXY ERROR: Actor# [2:7486789140470846183:2633] txid# 281474976710669, issues: { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-03-28T09:25:34.315157Z node 2 :TX_PROXY ERROR: Actor# [2:7486789140470846186:2636] txid# 281474976710672, issues: { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-03-28T09:25:34.315230Z node 2 :TX_PROXY ERROR: Actor# [2:7486789140470846187:2637] txid# 281474976710673, issues: { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-03-28T09:25:34.315307Z node 2 :TX_PROXY ERROR: Actor# [2:7486789140470846185:2635] txid# 281474976710671, issues: { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-03-28T09:25:34.315378Z node 2 :TX_PROXY ERROR: Actor# [2:7486789140470846182:2632] txid# 281474976710668, issues: { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-03-28T09:25:34.315616Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710675 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-28T09:25:34.315625Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-28T09:25:34.315701Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710669 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-28T09:25:34.315707Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-28T09:25:34.315762Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710672 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-28T09:25:34.315766Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-28T09:25:34.315822Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710673 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-28T09:25:34.315829Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-28T09:25:34.315884Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710671 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-28T09:25:34.315890Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-28T09:25:34.315946Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710668 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-28T09:25:34.315951Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-28T09:25:34.316263Z node 2 :TX_PROXY ERROR: Actor# [2:7486789140470846188:2638] txid# 281474976710674, issues: { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2025-03-28T09:25:34.316383Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710674 Issues { message: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/.test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2025-03-28T09:25:34.316391Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Unable to subscribe to concurrent transaction, falling back 2025-03-28T09:25:34.324712Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976710666. Doublechecking... 2025-03-28T09:25:34.386292Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.388107Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.394907Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.396334Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.401003Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.401973Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.402439Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7486789140470846114:2364], selfId: [2:7486789123290975956:2146], source: [2:7486789140470846113:2363] 2025-03-28T09:25:34.402596Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 7ef2b684-e33b5a1a-1af581bf-345012a9, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjNkMjY0NzItZWZmMTM4YzItMmM1MTBiMWQtNGRjODVmNjY=, TxId: 2025-03-28T09:25:34.402625Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 7ef2b684-e33b5a1a-1af581bf-345012a9, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjNkMjY0NzItZWZmMTM4YzItMmM1MTBiMWQtNGRjODVmNjY=, TxId: 2025-03-28T09:25:34.402755Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 7ef2b684-e33b5a1a-1af581bf-345012a9, start saving rows range [0; 1) 2025-03-28T09:25:34.402840Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 7ef2b684-e33b5a1a-1af581bf-345012a9, Bootstrap. Database: /dc-1 2025-03-28T09:25:34.402953Z node 2 :KQP_PROXY DEBUG: Request has 18445000919775.148679s seconds to be completed 2025-03-28T09:25:34.404714Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MTE5MWQ1N2ItZjgyZWYzZWUtNTAyOTYyODQtZGQyMzU0YmE=, workerId: [2:7486789140470846275:2374], database: /dc-1, longSession: 1, local sessions count: 4 2025-03-28T09:25:34.404849Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:34.404908Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NjNkMjY0NzItZWZmMTM4YzItMmM1MTBiMWQtNGRjODVmNjY=, workerId: [2:7486789140470846113:2363], local sessions count: 3 2025-03-28T09:25:34.405195Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 7ef2b684-e33b5a1a-1af581bf-345012a9, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-03-28T09:25:34.405552Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MTE5MWQ1N2ItZjgyZWYzZWUtNTAyOTYyODQtZGQyMzU0YmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7486789140470846275:2374] 2025-03-28T09:25:34.405586Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7486789140470846277:2701] 2025-03-28T09:25:34.407637Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.408649Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.413265Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.421540Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.452825Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NzUzNmJhZTMtNjcwOGZlNzQtMzllZTcwOC0yNTQzN2NkZA==, workerId: [2:7486789140470846046:2359], local sessions count: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] Test command err: 2025-03-28T09:25:24.341038Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789097132224329:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.341074Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f21/r3tmp/tmp394jsN/pdisk_1.dat 2025-03-28T09:25:24.789049Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:24.794153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:24.794262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:24.804690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18317 TServer::EnableGrpc on GrpcPort 19236, node 1 2025-03-28T09:25:25.198475Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:25.198523Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:25.198543Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:25.198720Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:25:25.518337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:27.167835Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.169314Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.171866Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:27.171901Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:27.171918Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.171962Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.172019Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.172052Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.172434Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.172477Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.173747Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.173757Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.173768Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-28T09:25:27.173771Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-28T09:25:27.173792Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-28T09:25:27.173799Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-28T09:25:27.173922Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.173928Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-28T09:25:27.173939Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-28T09:25:27.187105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480 2025-03-28T09:25:27.189672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:25:27.191236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:27.196658Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-28T09:25:27.196663Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-28T09:25:27.196729Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710660 2025-03-28T09:25:27.196729Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-03-28T09:25:27.196874Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-28T09:25:27.196913Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-03-28T09:25:27.348260Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-28T09:25:27.388003Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-03-28T09:25:27.388594Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-28T09:25:27.414720Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-28T09:25:27.440895Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-28T09:25:27.441412Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-28T09:25:27.444300Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: e748f4fe-f284117c-8b1e99e4-84c301b5, Bootstrap. Database: /dc-1 2025-03-28T09:25:27.458369Z node 1 :KQP_PROXY DEBUG: Request has 18445000919782.093274s seconds to be completed 2025-03-28T09:25:27.462329Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NDYyYmJmZmItOGM4YWMzN2QtOTU1ZWJmYmUtZDYyYjhhZjk=, workerId: [1:7486789110017127113:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-28T09:25:27.462435Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:27.471907Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: e748f4fe-f284117c-8b1e99e4-84c301b5, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-28T09:25:27.484203Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NDYyYmJmZmItOGM4YWMzN2QtOTU1ZWJmYmUtZDYyYjhhZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7486789110017127113:2333] 2025-03-28T09:25:27.484259Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7486789110017127115:2469] 2025-03-28T09:25:27.488842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789110017127116:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.488928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789110017127124:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.488974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.499975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-28T09:25:27.507177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789110017127130:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:25:27.569835Z node 1 :TX_PROXY ERROR: Actor# [1:7486789110017127172:2501] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges ... equest: create. Transaction completed: 281474976710682. Doublechecking... 2025-03-28T09:25:33.917596Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976710682. Doublechecking... 2025-03-28T09:25:33.917621Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Request: create. Transaction completed: 281474976710682. Doublechecking... 2025-03-28T09:25:33.937043Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.937095Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.938502Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.944176Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.944255Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.945090Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.946124Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.951078Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.951143Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.955199Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.957506Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.961643Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.961734Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.961757Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.962507Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.966208Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.966246Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.968712Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.970774Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.971216Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.971727Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.972752Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.975926Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.977868Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.980426Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.981912Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.983515Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.983520Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.985528Z node 2 :KQP_PROXY DEBUG: Table test_table0 updater. Column diff is empty, finishing 2025-03-28T09:25:33.989165Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.993426Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.993902Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.998938Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.999927Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:33.999944Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:34.001474Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:34.007578Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:34.013792Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:34.016328Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:34.020140Z node 2 :KQP_PROXY DEBUG: Table test_table1 updater. Column diff is empty, finishing 2025-03-28T09:25:34.054679Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7486789135073346295:2366], selfId: [2:7486789117893475545:2065], source: [2:7486789135073346294:2365] 2025-03-28T09:25:34.054881Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 330b5665-259d34b5-706ee654-179a3556, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGM2MzY3ZjMtNzFkYzliMzgtZjUzYTY2NWItZDdkY2FlNzI=, TxId: 2025-03-28T09:25:34.054912Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 330b5665-259d34b5-706ee654-179a3556, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGM2MzY3ZjMtNzFkYzliMzgtZjUzYTY2NWItZDdkY2FlNzI=, TxId: 2025-03-28T09:25:34.055068Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 330b5665-259d34b5-706ee654-179a3556, start saving rows range [0; 1) 2025-03-28T09:25:34.055129Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 330b5665-259d34b5-706ee654-179a3556, Bootstrap. Database: /dc-1 2025-03-28T09:25:34.055361Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OGM2MzY3ZjMtNzFkYzliMzgtZjUzYTY2NWItZDdkY2FlNzI=, workerId: [2:7486789135073346294:2365], local sessions count: 2 2025-03-28T09:25:34.055400Z node 2 :KQP_PROXY DEBUG: Request has 18445000919775.496225s seconds to be completed 2025-03-28T09:25:34.057175Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=ZDhmOGZiMTEtYjk2YTJmNzQtNjAwODJjNzItOTFjZGVhMGE=, workerId: [2:7486789139368313698:2379], database: /dc-1, longSession: 1, local sessions count: 3 2025-03-28T09:25:34.057292Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:34.057738Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ODk1YjgwYzAtNmM1MTUyYTUtMjVhYTU3NTEtMTQ2MmMxOGY=, workerId: [2:7486789135073345741:2359], local sessions count: 2 2025-03-28T09:25:34.057888Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 330b5665-259d34b5-706ee654-179a3556, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-03-28T09:25:34.058326Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ZDhmOGZiMTEtYjk2YTJmNzQtNjAwODJjNzItOTFjZGVhMGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7486789139368313698:2379] 2025-03-28T09:25:34.058355Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7486789139368313702:3085] 2025-03-28T09:25:34.205205Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7486789139368313701:2381], selfId: [2:7486789117893475545:2065], source: [2:7486789139368313698:2379] 2025-03-28T09:25:34.205766Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 330b5665-259d34b5-706ee654-179a3556, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDhmOGZiMTEtYjk2YTJmNzQtNjAwODJjNzItOTFjZGVhMGE=, TxId: 2025-03-28T09:25:34.205788Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 330b5665-259d34b5-706ee654-179a3556, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDhmOGZiMTEtYjk2YTJmNzQtNjAwODJjNzItOTFjZGVhMGE=, TxId: 2025-03-28T09:25:34.205901Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 330b5665-259d34b5-706ee654-179a3556, result part successfully saved 2025-03-28T09:25:34.205914Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 330b5665-259d34b5-706ee654-179a3556, reply SUCCESS, issues: 2025-03-28T09:25:34.206373Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ZDhmOGZiMTEtYjk2YTJmNzQtNjAwODJjNzItOTFjZGVhMGE=, workerId: [2:7486789139368313698:2379], local sessions count: 1 2025-03-28T09:25:34.206463Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 330b5665-259d34b5-706ee654-179a3556, Bootstrap. Database: /dc-1 2025-03-28T09:25:34.206543Z node 2 :KQP_PROXY DEBUG: Request has 18445000919775.345082s seconds to be completed 2025-03-28T09:25:34.208270Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MTEyNzcyOWYtMjAyZTQzZjktMjhmMjM5OTItMzkxNjk5Njk=, workerId: [2:7486789139368313726:2390], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-28T09:25:34.208371Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:34.208605Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 330b5665-259d34b5-706ee654-179a3556, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-28T09:25:34.208846Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MTEyNzcyOWYtMjAyZTQzZjktMjhmMjM5OTItMzkxNjk5Njk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7486789139368313726:2390] 2025-03-28T09:25:34.208869Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7486789139368313728:3098] 2025-03-28T09:25:34.279001Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> TestKinesisHttpProxy::TestRequestBadJson >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TestYmqHttpProxy::TestReceiveMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2025-03-28T09:25:24.461835Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789097750157122:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.461935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:24.779631Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789098400723942:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.780056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f07/r3tmp/tmp57aWqa/pdisk_1.dat 2025-03-28T09:25:25.276587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:25.276679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:25.279676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:25.279756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:25.281177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:25.289764Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:25.294830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:25.299063Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:23547 2025-03-28T09:25:28.059439Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:28.062380Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:28.065411Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:28.074600Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:28.074645Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.074705Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.075603Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:28.075632Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:28.075829Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.077241Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:28.082237Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NDBhYjg3MDMtMjg3MmIyYWItYTdhZDQwYzItZjBiMDMwZmE=, workerId: [2:7486789115580593272:2308], database: , longSession: 1, local sessions count: 1 2025-03-28T09:25:28.082287Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:28.082458Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:28.082567Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:28.082625Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:28.082647Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:28.082750Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:28.082857Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.082899Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.082963Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.082986Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.083412Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NDBhYjg3MDMtMjg3MmIyYWItYTdhZDQwYzItZjBiMDMwZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [2:8678280833929343339:121] 2025-03-28T09:25:28.083498Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 600.000000s actor id: [1:7486789114930027130:2481] 2025-03-28T09:25:28.083791Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.083932Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NDBhYjg3MDMtMjg3MmIyYWItYTdhZDQwYzItZjBiMDMwZmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7486789115580593272:2308] 2025-03-28T09:25:28.083955Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [2:7486789115580593275:2118] 2025-03-28T09:25:28.087823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789114930027131:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:28.088026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:28.097019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789115580593276:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:28.097114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:28.391037Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqe145wm28qzkjscc0760xe1", Created new session, sessionId: ydb://session/3?node_id=2&id=Njk3MjNkMi1mNjlmNWE1Ny1lZmUzMzg2YS1jOTg1ODk1Nw==, workerId: [2:7486789115580593300:2311], database: , longSession: 0, local sessions count: 2 2025-03-28T09:25:28.391260Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqe145wm28qzkjscc0760xe1, Database: , DatabaseId: , SessionId: ydb://session/3?node_id=2&id=Njk3MjNkMi1mNjlmNWE1Ny1lZmUzMzg2YS1jOTg1ODk1Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 4, targetId: [2:7486789115580593300:2311] 2025-03-28T09:25:28.391296Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 4 timeout: 300.000000s actor id: [2:7486789115580593302:2122] 2025-03-28T09:25:28.395028Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789115580593301:2312], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:28.395140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:28.397595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789115580593307:2315], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:28.405887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:25:28.485926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486789115580593309:2316], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:25:28.622168Z node 2 :TX_PROXY ERROR: Actor# [2:7486789115580593337:2132] txid# 281474976715658, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:28.679156Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqe145wm28qzkjscc0760xe1", Forwarded response to sender actor, requestId: 4, sender: [2:7486789115580593299:2310], selfId: [2:7486789098400723798:2081], source: [2:7486789115580593300:2311] 2025-03-28T09:25:28.679462Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=Njk3MjNkMi1mNjlmNWE1Ny1lZmUzMzg2YS1jOTg1ODk1Nw==, workerId: [2:7486789115580593300:2311], local sessions count: 1 2025-03-28T09:25:28.689827Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7486789097750157361:2281], selfId: [2:7486789098400723798:2081], source: [2:7486789115580593272:2308] 2025-03-28T09:25:28.690847Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7486789102045125160:2447], selfId: [1:7486789097750157361:2281], source: [2:7486789098400723798:2081] 2025-03-28T09:25:29.081274Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:30.291020Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486789124087558084:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:30.291126Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f07/r3tmp/tmpEpMoye/pdisk_1.dat 2025-03-28T09:25:30.659845Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:30.702194Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:30.702290Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:30.709540Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6168, node 3 2025-03-28T09:25:30.923711Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:30.923737Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:30.923747Z node 3 :NET_CLASSIFIER WARN: failed to initialize from ... 9:25:33.613423Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-28T09:25:33.676877Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-28T09:25:33.680931Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710661. Doublechecking... 2025-03-28T09:25:33.682738Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-28T09:25:33.740168Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-28T09:25:33.754922Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-28T09:25:33.755349Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: db25326a-8528d178-1c4b55bc-1b47155e, Bootstrap. Database: /Root 2025-03-28T09:25:33.755567Z node 3 :KQP_PROXY DEBUG: Request has 18445000919775.796064s seconds to be completed 2025-03-28T09:25:33.757585Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZjVhYWE4ZjItOGU5YzIzZGQtMzIyNDk2ZWMtY2U2NThiZGE=, workerId: [3:7486789136972461475:2358], database: /Root, longSession: 1, local sessions count: 1 2025-03-28T09:25:33.757715Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:33.758978Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: db25326a-8528d178-1c4b55bc-1b47155e, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-28T09:25:33.759904Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZjVhYWE4ZjItOGU5YzIzZGQtMzIyNDk2ZWMtY2U2NThiZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [3:7486789136972461475:2358] 2025-03-28T09:25:33.759937Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 300.000000s actor id: [3:7486789136972461478:2970] 2025-03-28T09:25:33.760332Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486789136972461477:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:33.760428Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:33.760780Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486789136972461483:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:33.764491Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:2, at schemeshard: 72057594046644480 2025-03-28T09:25:33.795071Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486789136972461485:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:25:33.850769Z node 3 :TX_PROXY ERROR: Actor# [3:7486789136972461549:3023] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:34.054430Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [3:7486789136972461476:2359], selfId: [3:7486789124087558163:2145], source: [3:7486789136972461475:2358] 2025-03-28T09:25:34.054632Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: db25326a-8528d178-1c4b55bc-1b47155e, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ZjVhYWE4ZjItOGU5YzIzZGQtMzIyNDk2ZWMtY2U2NThiZGE=, TxId: 2025-03-28T09:25:34.054669Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: db25326a-8528d178-1c4b55bc-1b47155e, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZjVhYWE4ZjItOGU5YzIzZGQtMzIyNDk2ZWMtY2U2NThiZGE=, TxId: 2025-03-28T09:25:34.054681Z node 3 :KQP_PROXY DEBUG: [ScriptExecutions] Create script execution operation. ExecutionId: db25326a-8528d178-1c4b55bc-1b47155e. Result: SUCCESS. Issues: 2025-03-28T09:25:34.057329Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZGVhZDRiM2UtOTk3ZWI4NTctMjE0OGU3NTQtNWQ4ZDcwNzQ=, workerId: [3:7486789141267428924:2374], database: /Root, longSession: 1, local sessions count: 2 2025-03-28T09:25:34.057472Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:34.057567Z node 3 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=3&id=ZjVhYWE4ZjItOGU5YzIzZGQtMzIyNDk2ZWMtY2U2NThiZGE=, workerId: [3:7486789136972461475:2358], local sessions count: 1 2025-03-28T09:25:34.057687Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqe14b3jbbn86rnrggd7y6w8, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZGVhZDRiM2UtOTk3ZWI4NTctMjE0OGU3NTQtNWQ4ZDcwNzQ=, CurrentExecutionId: db25326a-8528d178-1c4b55bc-1b47155e, CustomerSuppliedId: 01jqe14b3jbbn86rnrggd7y6w8, PoolId: }. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [3:7486789141267428924:2374] 2025-03-28T09:25:34.057721Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [3:7486789141267428925:3069] 2025-03-28T09:25:34.077081Z node 3 :KQP_PROXY DEBUG: TraceId: "01jqe14bqw0jkzp39h2pvh16c7", Request has 18445000919775.474559s seconds to be completed 2025-03-28T09:25:34.079058Z node 3 :KQP_PROXY DEBUG: TraceId: "01jqe14bqw0jkzp39h2pvh16c7", Created new session, sessionId: ydb://session/3?node_id=3&id=MjY4YzVlMjctMTk2MDA4NjYtZGNjMDI1NDgtMjgwNzc1NGY=, workerId: [3:7486789141267428935:2380], database: /Root, longSession: 1, local sessions count: 2 2025-03-28T09:25:34.079202Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jqe14bqw0jkzp39h2pvh16c7 2025-03-28T09:25:34.091826Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqe14brb1ce389qbs4fp3btr, Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MjY4YzVlMjctMTk2MDA4NjYtZGNjMDI1NDgtMjgwNzc1NGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [3:7486789141267428935:2380] 2025-03-28T09:25:34.091874Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [3:7486789141267428938:3073] 2025-03-28T09:25:34.115691Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486789141267428942:3075], for# user@builtin, access# DescribeSchema 2025-03-28T09:25:34.115725Z node 3 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [3:7486789141267428942:3075], for# user@builtin, access# DescribeSchema 2025-03-28T09:25:34.127620Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486789141267428939:2382], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:34.128119Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjY4YzVlMjctMTk2MDA4NjYtZGNjMDI1NDgtMjgwNzc1NGY=, ActorId: [3:7486789141267428935:2380], ActorState: ExecuteState, TraceId: 01jqe14brb1ce389qbs4fp3btr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:34.128325Z node 3 :KQP_PROXY DEBUG: TraceId: "01jqe14brb1ce389qbs4fp3btr", Forwarded response to sender actor, requestId: 9, sender: [3:7486789141267428937:2381], selfId: [3:7486789124087558163:2145], source: [3:7486789141267428935:2380] 2025-03-28T09:25:34.132227Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: db25326a-8528d178-1c4b55bc-1b47155e, Bootstrap. Database: /Root 2025-03-28T09:25:34.132431Z node 3 :KQP_PROXY DEBUG: Request has 18445000919775.419197s seconds to be completed 2025-03-28T09:25:34.134283Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=Mjk0YmE4MTMtNmNjMGY5MzQtOTEyNDNhN2ItOTNlYmExNWY=, workerId: [3:7486789141267428951:2385], database: /Root, longSession: 1, local sessions count: 3 2025-03-28T09:25:34.134428Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:34.134525Z node 3 :KQP_PROXY DEBUG: TraceId: "01jqe14b3jbbn86rnrggd7y6w8", Forwarded response to sender actor, requestId: 7, sender: [3:7486789136972461472:2968], selfId: [3:7486789124087558163:2145], source: [3:7486789141267428924:2374] 2025-03-28T09:25:34.134872Z node 3 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: db25326a-8528d178-1c4b55bc-1b47155e, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-03-28T09:25:34.135162Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /Root, DatabaseId: , SessionId: ydb://session/3?node_id=3&id=Mjk0YmE4MTMtNmNjMGY5MzQtOTEyNDNhN2ItOTNlYmExNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [3:7486789141267428951:2385] 2025-03-28T09:25:34.135190Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [3:7486789141267428953:3079] 2025-03-28T09:25:34.320597Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::SimpleUpdateTable [GOOD] Test command err: 2025-03-28T09:25:25.767799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789100322892715:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:25.768324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ea9/r3tmp/tmpnLMUc7/pdisk_1.dat 2025-03-28T09:25:26.156222Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:26.170668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:26.170798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:26.173762Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9411 TServer::EnableGrpc on GrpcPort 26437, node 1 2025-03-28T09:25:26.391976Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:26.392002Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:26.392008Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:26.392132Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:25:26.503366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:28.802642Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:28.808115Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:28.813294Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:28.813332Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:28.813350Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:28.813391Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:28.813472Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.813514Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.815092Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.815169Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:28.817690Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-28T09:25:28.817709Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-28T09:25:28.817736Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-28T09:25:28.817745Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-28T09:25:28.817747Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-28T09:25:28.817766Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-28T09:25:28.817846Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-28T09:25:28.817850Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-28T09:25:28.817866Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-28T09:25:28.822350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480 2025-03-28T09:25:28.832682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:25:28.834394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:28.850585Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-28T09:25:28.850656Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710659 2025-03-28T09:25:28.851151Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-28T09:25:28.851171Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710658 2025-03-28T09:25:28.853654Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-28T09:25:28.853685Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710660 2025-03-28T09:25:29.016114Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-28T09:25:29.084180Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-03-28T09:25:29.084978Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-28T09:25:29.110986Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-28T09:25:29.163068Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-28T09:25:29.173561Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-28T09:25:29.174592Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: ab47c091-dbfe079c-a6c39d7f-531216df, Bootstrap. Database: /dc-1 2025-03-28T09:25:29.195411Z node 1 :KQP_PROXY DEBUG: Request has 18445000919780.356250s seconds to be completed 2025-03-28T09:25:29.198406Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZmVjNzc0NWEtZDUyYzM3MTItZWNhY2E3NWMtODUzZmUyOGY=, workerId: [1:7486789117502762796:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-28T09:25:29.198546Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:29.199929Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: ab47c091-dbfe079c-a6c39d7f-531216df, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-28T09:25:29.200580Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZmVjNzc0NWEtZDUyYzM3MTItZWNhY2E3NWMtODUzZmUyOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7486789117502762796:2333] 2025-03-28T09:25:29.200605Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7486789117502762798:2463] 2025-03-28T09:25:29.202350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789117502762799:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:29.202440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:29.202893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789117502762811:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:29.206427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-28T09:25:29.215552Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-28T09:25:29.216542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789117502762813:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:25:29.320739Z node 1 :TX_PROXY ERROR: Actor# [1:7486789117502762856:2496] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/po ... : 5, targetId: [2:7486789140413963719:2349] 2025-03-28T09:25:34.484111Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 604800.000000s actor id: [2:7486789140413963721:2530] 2025-03-28T09:25:34.496451Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqe14c500nwh31dvknkkc2fr", Request has 18445000919775.055191s seconds to be completed 2025-03-28T09:25:34.497975Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqe14c500nwh31dvknkkc2fr", Created new session, sessionId: ydb://session/3?node_id=2&id=YWJhMzE5YjQtYTI0NTQwODItNzBhNTNjMjItYzFmMTA5NWE=, workerId: [2:7486789140413963735:2359], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-28T09:25:34.498100Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 01jqe14c500nwh31dvknkkc2fr 2025-03-28T09:25:34.500182Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Describe result: PathErrorUnknown 2025-03-28T09:25:34.500197Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Creating table 2025-03-28T09:25:34.500236Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-03-28T09:25:34.502629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:1, at schemeshard: 72057594046644480 2025-03-28T09:25:34.504376Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715664 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 10 } 2025-03-28T09:25:34.504406Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715664 2025-03-28T09:25:34.520176Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, Bootstrap. Database: /dc-1 2025-03-28T09:25:34.520773Z node 2 :KQP_PROXY DEBUG: Request has 18445000919775.030859s seconds to be completed 2025-03-28T09:25:34.522142Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NzkwNjI1NDYtNGJmMTllZDMtODkyZTQxYWEtZTA2MzMxYzk=, workerId: [2:7486789140413963805:2363], database: /dc-1, longSession: 1, local sessions count: 3 2025-03-28T09:25:34.522259Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:34.522332Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 5, sender: [2:7486789140413963606:2463], selfId: [2:7486789127529060835:2065], source: [2:7486789140413963719:2349] 2025-03-28T09:25:34.522421Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, RunDataQuery: -- TSaveScriptExecutionResultMetaQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_metas AS JsonDocument; UPDATE `.metadata/script_executions` SET result_set_metas = $result_set_metas WHERE database = $database AND execution_id = $execution_id; 2025-03-28T09:25:34.522651Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NzkwNjI1NDYtNGJmMTllZDMtODkyZTQxYWEtZTA2MzMxYzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 8, targetId: [2:7486789140413963805:2363] 2025-03-28T09:25:34.522675Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 8 timeout: 300.000000s actor id: [2:7486789140413963808:2576] 2025-03-28T09:25:34.528029Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: create. Transaction completed: 281474976715664. Doublechecking... 2025-03-28T09:25:34.615049Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.615515Z node 2 :KQP_PROXY NOTICE: Table test_table updater. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2025-03-28T09:25:34.615556Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Full table path:/dc-1/.test/test_table 2025-03-28T09:25:34.616844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:25:34.617654Z node 2 :KQP_PROXY DEBUG: Table test_table updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715666 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2025-03-28T09:25:34.617688Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Subscribe on create table tx: 281474976715666 2025-03-28T09:25:34.624895Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Request: alter. Transaction completed: 281474976715666. Doublechecking... 2025-03-28T09:25:34.688510Z node 2 :KQP_PROXY DEBUG: Table test_table updater. Column diff is empty, finishing 2025-03-28T09:25:34.690608Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 8, sender: [2:7486789140413963807:2364], selfId: [2:7486789127529060835:2065], source: [2:7486789140413963805:2363] 2025-03-28T09:25:34.690940Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NzkwNjI1NDYtNGJmMTllZDMtODkyZTQxYWEtZTA2MzMxYzk=, TxId: 2025-03-28T09:25:34.690980Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultMetaQuery] TraceId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzkwNjI1NDYtNGJmMTllZDMtODkyZTQxYWEtZTA2MzMxYzk=, TxId: 2025-03-28T09:25:34.691138Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, start saving rows range [0; 1) 2025-03-28T09:25:34.691211Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, Bootstrap. Database: /dc-1 2025-03-28T09:25:34.691545Z node 2 :KQP_PROXY DEBUG: Request has 18445000919774.860079s seconds to be completed 2025-03-28T09:25:34.693165Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=OTM2YjRjYzgtOTA0NTJkYTEtZTAyZjRjYS0zYTdmYWFhYQ==, workerId: [2:7486789140413963887:2374], database: /dc-1, longSession: 1, local sessions count: 4 2025-03-28T09:25:34.693285Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:34.693362Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NzkwNjI1NDYtNGJmMTllZDMtODkyZTQxYWEtZTA2MzMxYzk=, workerId: [2:7486789140413963805:2363], local sessions count: 3 2025-03-28T09:25:34.693575Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, RunDataQuery: -- TSaveScriptExecutionResultQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $result_set_id AS Int32; DECLARE $expire_at AS Optional; DECLARE $items AS List>; UPSERT INTO `.metadata/result_sets` SELECT $database as database, $execution_id as execution_id, $result_set_id as result_set_id, T.row_id as row_id, $expire_at as expire_at, T.result_set as result_set, T.accumulated_size as accumulated_size FROM AS_TABLE($items) AS T; 2025-03-28T09:25:34.693883Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=OTM2YjRjYzgtOTA0NTJkYTEtZTAyZjRjYS0zYTdmYWFhYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 10, targetId: [2:7486789140413963887:2374] 2025-03-28T09:25:34.693910Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 10 timeout: 300.000000s actor id: [2:7486789140413963889:2636] 2025-03-28T09:25:34.704361Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=YWJhMzE5YjQtYTI0NTQwODItNzBhNTNjMjItYzFmMTA5NWE=, workerId: [2:7486789140413963735:2359], local sessions count: 2 2025-03-28T09:25:34.819434Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 10, sender: [2:7486789140413963888:2375], selfId: [2:7486789127529060835:2065], source: [2:7486789140413963887:2374] 2025-03-28T09:25:34.820162Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTM2YjRjYzgtOTA0NTJkYTEtZTAyZjRjYS0zYTdmYWFhYQ==, TxId: 2025-03-28T09:25:34.820188Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptExecutionResultQuery] TraceId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTM2YjRjYzgtOTA0NTJkYTEtZTAyZjRjYS0zYTdmYWFhYQ==, TxId: 2025-03-28T09:25:34.820300Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, result part successfully saved 2025-03-28T09:25:34.820317Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TSaveScriptExecutionResultActor] ExecutionId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, reply SUCCESS, issues: 2025-03-28T09:25:34.820762Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OTM2YjRjYzgtOTA0NTJkYTEtZTAyZjRjYS0zYTdmYWFhYQ==, workerId: [2:7486789140413963887:2374], local sessions count: 1 2025-03-28T09:25:34.820835Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, Bootstrap. Database: /dc-1 2025-03-28T09:25:34.820915Z node 2 :KQP_PROXY DEBUG: Request has 18445000919774.730710s seconds to be completed 2025-03-28T09:25:34.822508Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MWNlNDNmZGEtOWFhMWY1YTgtZTU4N2ZjZC04MjJkNTYzNQ==, workerId: [2:7486789140413963918:2386], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-28T09:25:34.822608Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:34.822893Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 8de7a82a-7f542d65-d03043b1-f01c8ae8, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-28T09:25:34.823201Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MWNlNDNmZGEtOWFhMWY1YTgtZTU4N2ZjZC04MjJkNTYzNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 12, targetId: [2:7486789140413963918:2386] 2025-03-28T09:25:34.823240Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 12 timeout: 300.000000s actor id: [2:7486789140413963921:2652] >> TRegisterNodeOverDiscoveryService::ServerWithOutCertVerification_ClientProvidesExpiredCert [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts >> TestKinesisHttpProxy::GoodRequestPutRecords >> TPersQueueTest::ReadFromSeveralPartitionsMigrated [GOOD] >> TPersQueueTest::SchemeshardRestart >> TPersQueueTest::ReadFromSeveralPartitions [GOOD] >> TPersQueueTest::Init ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2025-03-28T09:25:24.724700Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789099215047759:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.724769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ec7/r3tmp/tmpYh8H7x/pdisk_1.dat 2025-03-28T09:25:25.163241Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9154, node 1 2025-03-28T09:25:25.211981Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:25:25.212018Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:25:25.212930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:25.212992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:25.249133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:25.255295Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:25.255320Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:25.255337Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:25.255503Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:25.681288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:27.626584Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.628986Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-28T09:25:27.630171Z node 1 :KQP_PROXY DEBUG: Received ping session request, request_id: 2, sender: [1:7486789103510016003:2318], trace_id: 01jqe143m28xj8mf01n5600d80 2025-03-28T09:25:27.639052Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 5.000000s actor id: [0:0:0] 2025-03-28T09:25:27.639116Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:27.639143Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:27.639165Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.639219Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-28T09:25:27.639339Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.639375Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.641284Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.641325Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.647026Z node 1 :KQP_PROXY DEBUG: Session not found, targetId: [2:8678280833929343339:121] requestId: 2 2025-03-28T09:25:27.651475Z node 1 :KQP_PROXY DEBUG: TraceId: "01jqe143m28xj8mf01n5600d80", Forwarded response to sender actor, requestId: 2, sender: [1:7486789103510016003:2318], selfId: [1:7486789099215048008:2281], source: [1:7486789099215048008:2281] 2025-03-28T09:25:28.970166Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486789114756873617:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:28.970997Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ec7/r3tmp/tmpSZBQ2K/pdisk_1.dat 2025-03-28T09:25:29.133305Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:29.170357Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:29.170464Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:29.171692Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13356 TServer::EnableGrpc on GrpcPort 13287, node 4 2025-03-28T09:25:29.539290Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:29.539313Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:29.539321Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:29.539473Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:25:29.604657Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:32.228985Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:32.230010Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:32.231155Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:32.231217Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:32.231243Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:32.231971Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:32.232002Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:32.232018Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:32.233876Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-28T09:25:32.233879Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-28T09:25:32.233888Z node 4 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-28T09:25:32.233896Z node 4 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-28T09:25:32.233936Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-28T09:25:32.233938Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-28T09:25:32.234003Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-28T09:25:32.234007Z node 4 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-28T09:25:32.234019Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-28T09:25:32.241736Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480 2025-03-28T09:25:32.243692Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:32.245991Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:25:32.252733Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-28T09:25:32.252793Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976715660 2025-03-28T09:25:32.253655Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-28T09:25:32.253674Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976715659 2025-03-28T09:25:32.254762Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-28T09:25:32.254795Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976715658 2025-03-28T09:25:32.365806Z node 4 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976715660. Doublechecking... 2025-03-28T09:25:32.395599Z node 4 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976715659. Doublechecking... 2025-03-28T09:25:32.406373Z node 4 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 28147497671565 ... XY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:35.266139Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, RunDataQuery: -- TSaveScriptFinalStatusActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, finalization_status, meta, customer_supplied_id, user_token, script_sinks, script_secret_names FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_generation FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-28T09:25:35.266371Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=NTUyMGQ1NDEtNTkyODAyZjgtZTJjOWU4ZjctNzNjNmE1OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [4:7486789144821645960:2441] 2025-03-28T09:25:35.266397Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [4:7486789144821645962:2665] 2025-03-28T09:25:35.273028Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [4:7486789144821645961:2442], selfId: [4:7486789114756873836:2274], source: [4:7486789144821645960:2441] 2025-03-28T09:25:35.273256Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, State: Get operation info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=NTUyMGQ1NDEtNTkyODAyZjgtZTJjOWU4ZjctNzNjNmE1OTc=, TxId: 01jqe14cx34gtgatj0vykz6g1t 2025-03-28T09:25:35.273742Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, State: Get operation info, RunDataQuery: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan AS JsonDocument; DECLARE $stats AS JsonDocument; DECLARE $ast AS Optional; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $user_token AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan = $plan, end_ts = CurrentUtcTimestamp(), stats = $stats, ast = $ast, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-03-28T09:25:35.274158Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=NTUyMGQ1NDEtNTkyODAyZjgtZTJjOWU4ZjctNzNjNmE1OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 21, targetId: [4:7486789144821645960:2441] 2025-03-28T09:25:35.274183Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 21 timeout: 300.000000s actor id: [4:7486789144821645983:2670] 2025-03-28T09:25:35.285441Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 21, sender: [4:7486789144821645982:2448], selfId: [4:7486789114756873836:2274], source: [4:7486789144821645960:2441] 2025-03-28T09:25:35.285662Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=NTUyMGQ1NDEtNTkyODAyZjgtZTJjOWU4ZjctNzNjNmE1OTc=, TxId: 2025-03-28T09:25:35.285739Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=NTUyMGQ1NDEtNTkyODAyZjgtZTJjOWU4ZjctNzNjNmE1OTc=, TxId: 2025-03-28T09:25:35.285804Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: e2f30bb0-73aa087a-77e22a27-9cf421cd. UNAVAILABLE. Issues: {
: Error: Lease expired } 2025-03-28T09:25:35.285966Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: e2f30bb0-73aa087a-77e22a27-9cf421cd, successfully finalized script execution operation 2025-03-28T09:25:35.285985Z node 4 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: e2f30bb0-73aa087a-77e22a27-9cf421cd, reply success 2025-03-28T09:25:35.286284Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=NTUyMGQ1NDEtNTkyODAyZjgtZTJjOWU4ZjctNzNjNmE1OTc=, workerId: [4:7486789144821645960:2441], local sessions count: 1 2025-03-28T09:25:35.307506Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqe14cyb1fcd23neabbxxm7e, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=NWRhOTY1OGItZGQ1N2U0MDYtMzc1ZmExMDItNzQyZGNhMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [4:7486789136231711101:2360] 2025-03-28T09:25:35.307551Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [4:7486789144821646008:2680] 2025-03-28T09:25:35.523371Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:35.778130Z node 4 :KQP_PROXY DEBUG: TraceId: "01jqe14cyb1fcd23neabbxxm7e", Forwarded response to sender actor, requestId: 22, sender: [4:7486789144821646007:2453], selfId: [4:7486789114756873836:2274], source: [4:7486789136231711101:2360] 2025-03-28T09:25:35.780671Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, Bootstrap. Database: /dc-1 2025-03-28T09:25:35.780836Z node 4 :KQP_PROXY DEBUG: Request has 18445000919773.770799s seconds to be completed 2025-03-28T09:25:35.782608Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=ZTBmOTg0YmItZGQ2MTgyYzEtOWVlZWM3YzMtNTFiMjMzOTU=, workerId: [4:7486789144821646057:2466], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-28T09:25:35.782740Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:35.782955Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-28T09:25:35.783228Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZTBmOTg0YmItZGQ2MTgyYzEtOWVlZWM3YzMtNTFiMjMzOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 24, targetId: [4:7486789144821646057:2466] 2025-03-28T09:25:35.783263Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 24 timeout: 300.000000s actor id: [4:7486789144821646059:2704] 2025-03-28T09:25:35.985844Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 24, sender: [4:7486789144821646058:2467], selfId: [4:7486789114756873836:2274], source: [4:7486789144821646057:2466] 2025-03-28T09:25:35.986036Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=ZTBmOTg0YmItZGQ2MTgyYzEtOWVlZWM3YzMtNTFiMjMzOTU=, TxId: 01jqe14dkc98ekkskxq1zayjfe 2025-03-28T09:25:35.986169Z node 4 :KQP_PROXY WARN: [TQueryBase] [TScriptLeaseUpdater] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, State: Get lease info, Finish with BAD_REQUEST, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=4&id=ZTBmOTg0YmItZGQ2MTgyYzEtOWVlZWM3YzMtNTFiMjMzOTU=, TxId: 01jqe14dkc98ekkskxq1zayjfe 2025-03-28T09:25:35.986216Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, State: Get lease info, Rollback transaction: 01jqe14dkc98ekkskxq1zayjfe 2025-03-28T09:25:35.986383Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=ZTBmOTg0YmItZGQ2MTgyYzEtOWVlZWM3YzMtNTFiMjMzOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 25, targetId: [4:7486789144821646057:2466] 2025-03-28T09:25:35.986415Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 25 timeout: 600.000000s actor id: [4:7486789144821646089:2719] 2025-03-28T09:25:35.988491Z node 4 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 25, sender: [4:7486789144821646088:2475], selfId: [4:7486789114756873836:2274], source: [4:7486789144821646057:2466] 2025-03-28T09:25:35.988590Z node 4 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: e2f30bb0-73aa087a-77e22a27-9cf421cd, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2025-03-28T09:25:35.988842Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=ZTBmOTg0YmItZGQ2MTgyYzEtOWVlZWM3YzMtNTFiMjMzOTU=, workerId: [4:7486789144821646057:2466], local sessions count: 1 2025-03-28T09:25:35.997402Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=NWRhOTY1OGItZGQ1N2U0MDYtMzc1ZmExMDItNzQyZGNhMTg=, workerId: [4:7486789136231711101:2360], local sessions count: 0 >> KqpProxy::NodeDisconnectedTest [GOOD] >> IndexBuildTest::CancelBuild [GOOD] >> TestYmqHttpProxy::TestGetQueueUrl >> VectorIndexBuildTest::BaseCase [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:21.468736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:21.468844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:21.468916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:21.468957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:21.468999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:21.469028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:21.469103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:21.469178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:21.469512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:21.554235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:21.554289Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:21.568868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:21.569138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:21.569317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:21.575207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:21.575431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:21.576092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.576316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.578240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.579598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.579667Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.579856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:21.579907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.579947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:21.580034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.586427Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:21.729831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:21.730044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.730239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:21.730482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:21.730537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.732457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.732596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:21.732752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.732807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:21.732852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:21.732883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:21.734534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.734587Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.734621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:21.736240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.736277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.736336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.736381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.740085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.741667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:21.741826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:21.742879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.743012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.743067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.743334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:21.743403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.743561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:21.743635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.745322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.745380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.745517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.745554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:21.745906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.745966Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:21.746064Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.746100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.746132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.746161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.746192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:21.746228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.746259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:21.746286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:21.746363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:21.746406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:21.746438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:21.754415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.754535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.754588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-03-28T09:25:37.935018Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:37.935082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936749 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:37.935131Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-03-28T09:25:37.935169Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-03-28T09:25:37.936759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:25:37.936799Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-28T09:25:37.936899Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:25:37.936926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:25:37.936971Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:25:37.937006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:25:37.937034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-28T09:25:37.937085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:122:2148] message: TxId: 281474976710760 2025-03-28T09:25:37.937116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:25:37.937158Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-28T09:25:37.937189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-28T09:25:37.937242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-28T09:25:37.944492Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-28T09:25:37.944570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-28T09:25:37.944636Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-03-28T09:25:37.944716Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1172:3025], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:25:37.946650Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:25:37.946770Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1172:3025], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:25:37.946839Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2025-03-28T09:25:37.948591Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:25:37.948656Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [2:1172:3025], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:25:37.948692Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-28T09:25:37.948850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:37.948902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1266:3108] TestWaitNotification: OK eventTxId 102 2025-03-28T09:25:37.951128Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-03-28T09:25:37.951322Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 0 } 2025-03-28T09:25:37.954779Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:37.954992Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 228us result status StatusSuccess 2025-03-28T09:25:37.955377Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:37.958336Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:25:37.958555Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 893us result status StatusPathDoesNotExist 2025-03-28T09:25:37.958727Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000005, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 3 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NodeDisconnectedTest [GOOD] Test command err: 2025-03-28T09:25:24.341082Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789098440433563:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.341152Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000edd/r3tmp/tmpQdnr8b/pdisk_1.dat 2025-03-28T09:25:24.794165Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:24.810849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:24.810941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:24.814116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20392 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:25:25.137245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:25.182981Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:27.123199Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.125184Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.141314Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZGZmMTA3NTQtMjgxMWY3YTAtOGY1NGYwYjctMWYxN2NiZTc=, workerId: [1:7486789111325336066:2309], database: , longSession: 0, local sessions count: 1 2025-03-28T09:25:27.141407Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.142457Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZGZmMTA3NTQtMjgxMWY3YTAtOGY1NGYwYjctMWYxN2NiZTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7486789111325336066:2309] 2025-03-28T09:25:27.142492Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2025-03-28T09:25:27.142542Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:27.142576Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:27.142601Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.142647Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.142779Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.142862Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.142893Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.142915Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.142930Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.144666Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGZmMTA3NTQtMjgxMWY3YTAtOGY1NGYwYjctMWYxN2NiZTc=, ActorId: [1:7486789111325336066:2309], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxyRequestId: 2 2025-03-28T09:25:27.144945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789111325336067:2310], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.145061Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7486789102735401421:2283], selfId: [1:7486789098440433808:2278], source: [1:7486789111325336066:2309] 2025-03-28T09:25:27.145104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.152426Z node 1 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-03-28T09:25:27.152455Z node 1 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 2 2025-03-28T09:25:34.211543Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:25:34.212010Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:25:34.212150Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:25:34.213878Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:25:34.214166Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:25:34.214391Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000edd/r3tmp/tmpqjohAE/pdisk_1.dat 2025-03-28T09:25:34.546859Z node 2 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:24410 KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1141:2687] 2025-03-28T09:25:34.871781Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZGY3N2ZkNGItNDJmMmE4ZDAtZmJhMzc1MzktMzEwNWZhNGU=, workerId: [3:1142:2375], database: , longSession: 1, local sessions count: 1 2025-03-28T09:25:34.871970Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=ZGY3N2ZkNGItNDJmMmE4ZDAtZmJhMzc1MzktMzEwNWZhNGU= 2025-03-28T09:25:34.872625Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZGY3N2ZkNGItNDJmMmE4ZDAtZmJhMzc1MzktMzEwNWZhNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2025-03-28T09:25:34.872688Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2025-03-28T09:25:34.873052Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=ZGY3N2ZkNGItNDJmMmE4ZDAtZmJhMzc1MzktMzEwNWZhNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1142:2375] 2025-03-28T09:25:34.873092Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2025-03-28T09:25:35.198505Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1143:2688], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:35.198797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:35.199174Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1144:2376], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:35.199298Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:35.222714Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(3) 2025-03-28T09:25:35.222846Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 3 sessionId: ydb://session/3?node_id=3&id=ZGY3N2ZkNGItNDJmMmE4ZDAtZmJhMzc1MzktMzEwNWZhNGU= status: TIMEOUT round: 0 2025-03-28T09:25:35.223031Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(2) 2025-03-28T09:25:35.223069Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 2 sessionId: ydb://session/3?node_id=3&id=ZGY3N2ZkNGItNDJmMmE4ZDAtZmJhMzc1MzktMzEwNWZhNGU= status: TIMEOUT round: 0 2025-03-28T09:25:35.223297Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZGY3N2ZkNGItNDJmMmE4ZDAtZmJhMzc1MzktMzEwNWZhNGU=, ActorId: [3:1142:2375], ActorState: ExecuteState, TraceId: 01jqe14cgs6j2styfah66tcx1y, Create QueryResponse for error on request, msg: 2025-03-28T09:25:35.225761Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-03-28T09:25:35.226007Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1142:2375] 2025-03-28T09:25:35.226229Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 2 2025-03-28T09:25:35.229106Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MWFiZTZlOC1jMWZjYTQ3OC02MmM5OTBlMS01N2U4MmQxYw==, workerId: [3:1165:2380], database: , longSession: 1, local sessions count: 2 2025-03-28T09:25:35.229275Z node 3 :KQP_PROXY D ... 6, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-03-28T09:25:36.850939Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 84, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1389:2511] 2025-03-28T09:25:36.851134Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 56 2025-03-28T09:25:36.852913Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=Zjg1MjVkMS03ZmRmYzM5My00Nzk0MGFjMi03ZWNmOTM5ZA==, workerId: [3:1396:2515], database: , longSession: 1, local sessions count: 56 2025-03-28T09:25:36.853056Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:36.853385Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 57, sender: [2:1141:2687], trace_id: 2025-03-28T09:25:36.853492Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 57 timeout: 0.001000s actor id: [0:0:0] 2025-03-28T09:25:36.865809Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(57) 2025-03-28T09:25:36.865890Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 57 sessionId: ydb://session/3?node_id=3&id=Zjg1MjVkMS03ZmRmYzM5My00Nzk0MGFjMi03ZWNmOTM5ZA== status: TIMEOUT round: 0 2025-03-28T09:25:36.866032Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 57, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-03-28T09:25:36.867977Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=MWEwOWY2MmItYzI4MGY1MzEtM2U0ZGRmNmEtNWUyZGRmNjE=, workerId: [3:1397:2516], database: , longSession: 1, local sessions count: 57 2025-03-28T09:25:36.868138Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=MWEwOWY2MmItYzI4MGY1MzEtM2U0ZGRmNmEtNWUyZGRmNjE= 2025-03-28T09:25:36.868624Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MWEwOWY2MmItYzI4MGY1MzEtM2U0ZGRmNmEtNWUyZGRmNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 58, targetId: [3:8678280833929343339:121] 2025-03-28T09:25:36.868685Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 58 timeout: 0.001000s actor id: [0:0:0] 2025-03-28T09:25:36.869013Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=MWEwOWY2MmItYzI4MGY1MzEtM2U0ZGRmNmEtNWUyZGRmNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 87, targetId: [3:1397:2516] 2025-03-28T09:25:36.869048Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 87 timeout: 0.001000s actor id: [0:0:0] 2025-03-28T09:25:36.870307Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1398:2750], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.870448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.897464Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1399:2517], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.897629Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.921010Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(87) 2025-03-28T09:25:36.921106Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 87 sessionId: ydb://session/3?node_id=3&id=MWEwOWY2MmItYzI4MGY1MzEtM2U0ZGRmNmEtNWUyZGRmNjE= status: TIMEOUT round: 0 2025-03-28T09:25:36.921238Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(58) 2025-03-28T09:25:36.921275Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 58 sessionId: ydb://session/3?node_id=3&id=MWEwOWY2MmItYzI4MGY1MzEtM2U0ZGRmNmEtNWUyZGRmNjE= status: TIMEOUT round: 0 2025-03-28T09:25:36.921407Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MWEwOWY2MmItYzI4MGY1MzEtM2U0ZGRmNmEtNWUyZGRmNjE=, ActorId: [3:1397:2516], ActorState: ExecuteState, TraceId: 01jqe14ef59xd5jks0rgj8fp3x, Create QueryResponse for error on request, msg: 2025-03-28T09:25:36.923294Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 58, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-03-28T09:25:36.923531Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 87, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1397:2516] 2025-03-28T09:25:36.923715Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 58 2025-03-28T09:25:36.925407Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=OTQ2NzUwZS1kMTNkYmRlMS1hN2FlZjVhNi0yNzI4NmI3NQ==, workerId: [3:1419:2523], database: , longSession: 1, local sessions count: 58 2025-03-28T09:25:36.925551Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:36.925886Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 59, sender: [2:1141:2687], trace_id: 2025-03-28T09:25:36.926012Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 59 timeout: 0.001000s actor id: [0:0:0] 2025-03-28T09:25:36.926128Z node 3 :KQP_PROXY DEBUG: Received ping session request, has local session: ydb://session/3?node_id=3&id=OTQ2NzUwZS1kMTNkYmRlMS1hN2FlZjVhNi0yNzI4NmI3NQ==, rpc ctrl: [0:0:0], sameNode: 0, trace_id: 2025-03-28T09:25:36.926293Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 59, sender: [2:1141:2687], selfId: [2:206:2171], source: [3:236:2127] 2025-03-28T09:25:36.927996Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=YzI4NTRjM2QtOTM3MTBkMi1iM2Y3ZDQ0MC03MDMzZmFkYg==, workerId: [3:1420:2524], database: , longSession: 1, local sessions count: 59 2025-03-28T09:25:36.928130Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=YzI4NTRjM2QtOTM3MTBkMi1iM2Y3ZDQ0MC03MDMzZmFkYg== 2025-03-28T09:25:36.928547Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YzI4NTRjM2QtOTM3MTBkMi1iM2Y3ZDQ0MC03MDMzZmFkYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 60, targetId: [3:8678280833929343339:121] 2025-03-28T09:25:36.928609Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 60 timeout: 0.001000s actor id: [0:0:0] 2025-03-28T09:25:36.929016Z node 3 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: , DatabaseId: , SessionId: ydb://session/3?node_id=3&id=YzI4NTRjM2QtOTM3MTBkMi1iM2Y3ZDQ0MC03MDMzZmFkYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 90, targetId: [3:1420:2524] 2025-03-28T09:25:36.929055Z node 3 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 90 timeout: 0.001000s actor id: [0:0:0] 2025-03-28T09:25:36.930198Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1421:2752], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.930349Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.957682Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1423:2525], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.957896Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.968416Z node 3 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(90) 2025-03-28T09:25:36.968494Z node 3 :KQP_PROXY DEBUG: Reply timeout: requestId 90 sessionId: ydb://session/3?node_id=3&id=YzI4NTRjM2QtOTM3MTBkMi1iM2Y3ZDQ0MC03MDMzZmFkYg== status: TIMEOUT round: 0 2025-03-28T09:25:36.968583Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(59) 2025-03-28T09:25:36.968609Z node 2 :KQP_PROXY DEBUG: Invalid request info while on request timeout handle. RequestId: 59 2025-03-28T09:25:36.968699Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzI4NTRjM2QtOTM3MTBkMi1iM2Y3ZDQ0MC03MDMzZmFkYg==, ActorId: [3:1420:2524], ActorState: ExecuteState, TraceId: 01jqe14eh1cjarx8e58egrgkab, Create QueryResponse for error on request, msg: 2025-03-28T09:25:36.970456Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(60) 2025-03-28T09:25:36.970514Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 60 sessionId: ydb://session/3?node_id=3&id=YzI4NTRjM2QtOTM3MTBkMi1iM2Y3ZDQ0MC03MDMzZmFkYg== status: TIMEOUT round: 0 2025-03-28T09:25:36.970650Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 60, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] 2025-03-28T09:25:36.970830Z node 3 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 90, sender: [2:206:2171], selfId: [3:236:2127], source: [3:1420:2524] 2025-03-28T09:25:36.971009Z node 2 :KQP_PROXY ERROR: Unknown sender for proxy response, requestId: 60 2025-03-28T09:25:36.972755Z node 3 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=3&id=ZmEwMjQwZTEtNjdhZjZjOTEtMTJmZTUzZGUtNzIwZWY1MzY=, workerId: [3:1427:2528], database: , longSession: 1, local sessions count: 60 2025-03-28T09:25:36.972903Z node 3 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:36.973289Z node 2 :KQP_PROXY DEBUG: Received ping session request, request_id: 61, sender: [2:1141:2687], trace_id: 2025-03-28T09:25:36.973397Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 61 timeout: 0.001000s actor id: [0:0:0] 2025-03-28T09:25:36.988706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:36.988837Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:36.991920Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:36.992001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:37.003842Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(61) 2025-03-28T09:25:37.003931Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 61 sessionId: ydb://session/3?node_id=3&id=ZmEwMjQwZTEtNjdhZjZjOTEtMTJmZTUzZGUtNzIwZWY1MzY= status: TIMEOUT round: 0 2025-03-28T09:25:37.004036Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 61, sender: [2:1141:2687], selfId: [2:206:2171], source: [2:206:2171] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BaseCase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.751936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.752041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.752081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.752131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.753366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.753413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.753491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.753580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.756196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.849261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.849315Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.869042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.869318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.869508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.874926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.875123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.875786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.876003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.877678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.885739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.885811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.885894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.892133Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:21.010341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:21.011901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.013008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:21.014491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:21.014559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.017236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.017385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:21.017558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.017637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:21.017678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:21.017709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:21.019552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.019605Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.019648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:21.021510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.021549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.021609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.021668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.037696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.039540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:21.039728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:21.040853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.040972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.041045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.041359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:21.041412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.041573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:21.041656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.043696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.043765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.043943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.043983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:21.044336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.044672Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:21.044780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.044817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.044852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.044883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.044917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:21.044954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.044988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:21.045015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:21.045091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:21.045156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:21.045189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:21.047014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.047134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.047175Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... X_DATASHARD TRACE: StateWork, received event# 269552133, Sender [1:3325:5064], Recipient [1:4583:6240]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72075186233409561 State: 4 2025-03-28T09:25:37.861792Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvStateChangedResult 2025-03-28T09:25:37.861823Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186233409568 state Offline 2025-03-28T09:25:37.862159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72075186233409561 ShardLocalIdx: 7, at schemeshard: 72075186233409561 2025-03-28T09:25:37.862429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409561, LocalPathId: 6] was 1 Forgetting tablet 72075186233409567 2025-03-28T09:25:37.863079Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [1:4898:6541], Recipient [1:4583:6240]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186233409561 ClientId: [1:4898:6541] ServerId: [1:4900:6543] } 2025-03-28T09:25:37.863114Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:25:37.863388Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409561 ShardLocalIdx: 8 TxId_Deprecated: 8 TabletID: 72075186233409568 Forgetting tablet 72075186233409568 2025-03-28T09:25:37.863793Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 268829696, Sender [1:4573:6232], Recipient [1:4583:6240]: NKikimr::TEvTablet::TEvTabletDead 2025-03-28T09:25:37.864008Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186233409568 2025-03-28T09:25:37.864105Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186233409568 2025-03-28T09:25:37.865682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409561 2025-03-28T09:25:37.865724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409561, LocalPathId: 6], at schemeshard: 72075186233409561 2025-03-28T09:25:37.865781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409561, LocalPathId: 3] was 5 2025-03-28T09:25:37.866482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72075186233409561 ShardLocalIdx: 8, at schemeshard: 72075186233409561 2025-03-28T09:25:37.866730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409561, LocalPathId: 7] was 1 2025-03-28T09:25:37.870294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409561:7 2025-03-28T09:25:37.870359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409561:7 tabletId 72075186233409567 2025-03-28T09:25:37.871593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72075186233409561 2025-03-28T09:25:37.871807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409561 2025-03-28T09:25:37.871873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409561, LocalPathId: 7], at schemeshard: 72075186233409561 2025-03-28T09:25:37.871974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409561, LocalPathId: 3] was 4 2025-03-28T09:25:37.872314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409561:8 2025-03-28T09:25:37.872383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409561:8 tabletId 72075186233409568 2025-03-28T09:25:37.874998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409561 2025-03-28T09:25:37.934188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1900, transactions count in step: 1, at schemeshard: 72075186233409561 2025-03-28T09:25:37.934309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976735762 AckTo { RawX1: 0 RawX2: 0 } } Step: 1900 MediatorID: 72075186233409563 TabletID: 72075186233409561, at schemeshard: 72075186233409561 2025-03-28T09:25:37.934371Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409561] TDropLock TPropose opId# 281474976735762:0 HandleReply TEvOperationPlan: step# 1900 2025-03-28T09:25:37.934410Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976735762:0 128 -> 240 2025-03-28T09:25:37.936961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976735762:0, at schemeshard: 72075186233409561 2025-03-28T09:25:37.937017Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72075186233409561] TDone opId# 281474976735762:0 ProgressState 2025-03-28T09:25:37.937091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735762:0 progress is 1/1 2025-03-28T09:25:37.937119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-03-28T09:25:37.937152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976735762:0 progress is 1/1 2025-03-28T09:25:37.937175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-03-28T09:25:37.937205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976735762, ready parts: 1/1, is published: true 2025-03-28T09:25:37.937255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3325:5064] message: TxId: 281474976735762 2025-03-28T09:25:37.937290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976735762 ready parts: 1/1 2025-03-28T09:25:37.937318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976735762:0 2025-03-28T09:25:37.937344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976735762:0 2025-03-28T09:25:37.937398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409561, LocalPathId: 2] was 4 2025-03-28T09:25:37.940607Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976735762 2025-03-28T09:25:37.940675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976735762 2025-03-28T09:25:37.940735Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735762, buildInfoId: 115 2025-03-28T09:25:37.940813Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976735762, buildInfo: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4197:5891], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:25:37.943018Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-03-28T09:25:37.943108Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4197:5891], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:25:37.943162Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:25:37.946675Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 115 2025-03-28T09:25:37.946788Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 115, Uid: , DomainPathId: [OwnerId: 72075186233409561, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409561, LocalPathId: 2], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:4197:5891], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976735757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976735758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976735761, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976735762, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:25:37.946823Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 115, subscribers count# 1 2025-03-28T09:25:37.946973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2025-03-28T09:25:37.947013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:4331:6003] TestWaitNotification: OK eventTxId 115 2025-03-28T09:25:37.954715Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/CommonDB" IndexBuildId: 115 2025-03-28T09:25:37.954977Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 115 State: STATE_DONE Settings { source_path: "/MyRoot/CommonDB/Table" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] Test command err: 2025-03-28T09:25:24.341154Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789096457598056:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.341223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f37/r3tmp/tmpFbOJhg/pdisk_1.dat 2025-03-28T09:25:24.778230Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:24.800829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:24.801030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:24.804797Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27814 TServer::EnableGrpc on GrpcPort 8659, node 1 2025-03-28T09:25:25.199420Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:25.199442Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:25.199451Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:25.199578Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:25:25.526600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:25.543492Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:27.300783Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.303200Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.304158Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:27.304193Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:27.304216Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.304245Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.304289Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.304322Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.304730Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.304969Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.307810Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.307827Z node 1 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-28T09:25:27.307881Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-28T09:25:27.308068Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.308082Z node 1 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-28T09:25:27.308096Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-28T09:25:27.311503Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-28T09:25:27.311528Z node 1 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-28T09:25:27.311568Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-28T09:25:27.312018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480 2025-03-28T09:25:27.315485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:25:27.317025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:25:27.325636Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 3 } 2025-03-28T09:25:27.325646Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 4 } 2025-03-28T09:25:27.325700Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Subscribe on create table tx: 281474976710658 2025-03-28T09:25:27.325705Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Subscribe on create table tx: 281474976710659 2025-03-28T09:25:27.325778Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 5 } 2025-03-28T09:25:27.325796Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Subscribe on create table tx: 281474976710660 2025-03-28T09:25:27.454777Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Request: create. Transaction completed: 281474976710658. Doublechecking... 2025-03-28T09:25:27.489945Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Request: create. Transaction completed: 281474976710660. Doublechecking... 2025-03-28T09:25:27.501188Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Request: create. Transaction completed: 281474976710659. Doublechecking... 2025-03-28T09:25:27.536557Z node 1 :KQP_PROXY DEBUG: Table result_sets updater. Column diff is empty, finishing 2025-03-28T09:25:27.565433Z node 1 :KQP_PROXY DEBUG: Table script_execution_leases updater. Column diff is empty, finishing 2025-03-28T09:25:27.572375Z node 1 :KQP_PROXY DEBUG: Table script_executions updater. Column diff is empty, finishing 2025-03-28T09:25:27.572946Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 7edc69bf-9191c64d-7513005c-dbc64d05, Bootstrap. Database: /dc-1 2025-03-28T09:25:27.594638Z node 1 :KQP_PROXY DEBUG: Request has 18445000919781.957008s seconds to be completed 2025-03-28T09:25:27.597841Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=NWY0ZjgxYTktNGQ1NTc3ZjItZTE5ZDI1YWItZWNhZDJjYTc=, workerId: [1:7486789109342500842:2333], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-28T09:25:27.597983Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:27.599014Z node 1 :KQP_PROXY DEBUG: [TQueryBase] [TCreateScriptOperationQuery] TraceId: 7edc69bf-9191c64d-7513005c-dbc64d05, RunDataQuery: -- TCreateScriptOperationQuery::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $run_script_actor_id AS Text; DECLARE $execution_status AS Int32; DECLARE $execution_mode AS Int32; DECLARE $query_text AS Text; DECLARE $syntax AS Int32; DECLARE $meta AS JsonDocument; DECLARE $lease_duration AS Interval; DECLARE $execution_meta_ttl AS Interval; UPSERT INTO `.metadata/script_executions` (database, execution_id, run_script_actor_id, execution_status, execution_mode, start_ts, query_text, syntax, meta, expire_at) VALUES ($database, $execution_id, $run_script_actor_id, $execution_status, $execution_mode, CurrentUtcTimestamp(), $query_text, $syntax, $meta, CurrentUtcTimestamp() + $execution_meta_ttl); UPSERT INTO `.metadata/script_execution_leases` (database, execution_id, lease_deadline, lease_generation, expire_at) VALUES ($database, $execution_id, CurrentUtcTimestamp() + $lease_duration, 1, CurrentUtcTimestamp() + $execution_meta_ttl); 2025-03-28T09:25:27.601361Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=NWY0ZjgxYTktNGQ1NTc3ZjItZTE5ZDI1YWItZWNhZDJjYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7486789109342500842:2333] 2025-03-28T09:25:27.601430Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7486789109342500844:2470] 2025-03-28T09:25:27.603433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789109342500845:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.603546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789109342500853:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.603597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.607256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-28T09:25:27.616143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789109342500859:2339], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:25:27.711029Z node 1 :TX_PROXY ERROR: Actor# [1:7486789109342500901:2502] txid# 281474976710662, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/po ... supplied_id, NULL), user_token = IF($applicate_script_external_effect_required, $user_token, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL) WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2025-03-28T09:25:36.240591Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NDMzMTMxYWEtODI2NDNiMmYtNDYxZTAzNy1hMWZjODI0OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 16, targetId: [2:7486789145576124289:2391] 2025-03-28T09:25:36.240657Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 16 timeout: 300.000000s actor id: [2:7486789149871091651:2609] 2025-03-28T09:25:36.345138Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 25e765c0-985755a6-ddfdda5e-f3116131, Bootstrap. Database: /dc-1 2025-03-28T09:25:36.345394Z node 2 :KQP_PROXY DEBUG: Request has 18445000919773.206246s seconds to be completed 2025-03-28T09:25:36.347204Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NjIxZjI4MWMtZmQwZGI5MWItNjVmMGQwNjctYjliMmM4ZTI=, workerId: [2:7486789149871091670:2416], database: /dc-1, longSession: 1, local sessions count: 4 2025-03-28T09:25:36.347351Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:36.347677Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 25e765c0-985755a6-ddfdda5e-f3116131, RunDataQuery: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-28T09:25:36.347968Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NjIxZjI4MWMtZmQwZGI5MWItNjVmMGQwNjctYjliMmM4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 18, targetId: [2:7486789149871091670:2416] 2025-03-28T09:25:36.348005Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 18 timeout: 300.000000s actor id: [2:7486789149871091672:2622] 2025-03-28T09:25:36.565774Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 18, sender: [2:7486789149871091671:2417], selfId: [2:7486789128396254224:2274], source: [2:7486789149871091670:2416] 2025-03-28T09:25:36.566010Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 25e765c0-985755a6-ddfdda5e-f3116131, State: Get lease info, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjIxZjI4MWMtZmQwZGI5MWItNjVmMGQwNjctYjliMmM4ZTI=, TxId: 01jqe14e5gf3ddt57c3wpfhz18 2025-03-28T09:25:36.566140Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 25e765c0-985755a6-ddfdda5e-f3116131, State: Get lease info, RunDataQuery: -- TScriptLeaseUpdater::OnGetLeaseInfo DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $lease_duration AS Interval; UPDATE `.metadata/script_execution_leases` SET lease_deadline=(CurrentUtcTimestamp() + $lease_duration) WHERE database = $database AND execution_id = $execution_id; 2025-03-28T09:25:36.566456Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=NjIxZjI4MWMtZmQwZGI5MWItNjVmMGQwNjctYjliMmM4ZTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 19, targetId: [2:7486789149871091670:2416] 2025-03-28T09:25:36.566497Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 19 timeout: 300.000000s actor id: [2:7486789149871091698:2634] 2025-03-28T09:25:36.600547Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 16, sender: [2:7486789149871091650:2409], selfId: [2:7486789128396254224:2274], source: [2:7486789145576124289:2391] 2025-03-28T09:25:36.600874Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 23ce9b77-d93b6717-df4f24d3-8cdb6f2c, State: Update final status, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDMzMTMxYWEtODI2NDNiMmYtNDYxZTAzNy1hMWZjODI0OQ==, TxId: 2025-03-28T09:25:36.600933Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TSaveScriptFinalStatusActor] TraceId: 23ce9b77-d93b6717-df4f24d3-8cdb6f2c, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDMzMTMxYWEtODI2NDNiMmYtNDYxZTAzNy1hMWZjODI0OQ==, TxId: 2025-03-28T09:25:36.600945Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] Finish script execution operation. ExecutionId: 23ce9b77-d93b6717-df4f24d3-8cdb6f2c. SUCCESS. Issues: 2025-03-28T09:25:36.601634Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=OTRhOTg3Y2MtZTlmNjM5OWEtMmJhMzE2NmEtYWEyOWZhODY=, workerId: [2:7486789145576124174:2349], local sessions count: 3 2025-03-28T09:25:36.602153Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NDMzMTMxYWEtODI2NDNiMmYtNDYxZTAzNy1hMWZjODI0OQ==, workerId: [2:7486789145576124289:2391], local sessions count: 2 2025-03-28T09:25:36.779132Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 19, sender: [2:7486789149871091697:2424], selfId: [2:7486789128396254224:2274], source: [2:7486789149871091670:2416] 2025-03-28T09:25:36.779329Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 25e765c0-985755a6-ddfdda5e-f3116131, State: Update lease, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NjIxZjI4MWMtZmQwZGI5MWItNjVmMGQwNjctYjliMmM4ZTI=, TxId: 2025-03-28T09:25:36.779398Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TScriptLeaseUpdater] TraceId: 25e765c0-985755a6-ddfdda5e-f3116131, State: Update lease, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NjIxZjI4MWMtZmQwZGI5MWItNjVmMGQwNjctYjliMmM4ZTI=, TxId: 2025-03-28T09:25:36.779788Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NjIxZjI4MWMtZmQwZGI5MWItNjVmMGQwNjctYjliMmM4ZTI=, workerId: [2:7486789149871091670:2416], local sessions count: 1 2025-03-28T09:25:36.786394Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqe14ech7fr8gwy2cgzd8a3x, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=ODA0YmM5ZGYtNmM1ZGRjNWYtZjA3YmZiNDctYTMwNTM5M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 20, targetId: [2:7486789145576124190:2359] 2025-03-28T09:25:36.786441Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 20 timeout: 300.000000s actor id: [2:7486789149871091748:2652] 2025-03-28T09:25:36.794585Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486789128396253999:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:36.794647Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:37.295170Z node 2 :KQP_PROXY DEBUG: TraceId: "01jqe14ech7fr8gwy2cgzd8a3x", Forwarded response to sender actor, requestId: 20, sender: [2:7486789149871091747:2436], selfId: [2:7486789128396254224:2274], source: [2:7486789145576124190:2359] 2025-03-28T09:25:37.298991Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 25e765c0-985755a6-ddfdda5e-f3116131, Bootstrap. Start TCheckLeaseStatusQueryActor 2025-03-28T09:25:37.299050Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 25e765c0-985755a6-ddfdda5e-f3116131, Bootstrap. Database: /dc-1 2025-03-28T09:25:37.300215Z node 2 :KQP_PROXY DEBUG: Request has 18445000919772.251417s seconds to be completed 2025-03-28T09:25:37.301806Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=MmU1OWE3YjUtOGU2YTVhOWEtYjhlNjEzZjQtZjUzMDI1NjM=, workerId: [2:7486789154166059096:2449], database: /dc-1, longSession: 1, local sessions count: 2 2025-03-28T09:25:37.301908Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:25:37.306817Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 25e765c0-985755a6-ddfdda5e-f3116131, RunDataQuery: -- TCheckLeaseStatusQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT operation_status, execution_status, finalization_status, issues, run_script_actor_id FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2025-03-28T09:25:37.307169Z node 2 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=2&id=MmU1OWE3YjUtOGU2YTVhOWEtYjhlNjEzZjQtZjUzMDI1NjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 22, targetId: [2:7486789154166059096:2449] 2025-03-28T09:25:37.307197Z node 2 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 22 timeout: 300.000000s actor id: [2:7486789154166059098:2679] 2025-03-28T09:25:37.355046Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:37.787595Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 22, sender: [2:7486789154166059097:2450], selfId: [2:7486789128396254224:2274], source: [2:7486789154166059096:2449] 2025-03-28T09:25:37.788279Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 25e765c0-985755a6-ddfdda5e-f3116131, TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmU1OWE3YjUtOGU2YTVhOWEtYjhlNjEzZjQtZjUzMDI1NjM=, TxId: 2025-03-28T09:25:37.788380Z node 2 :KQP_PROXY DEBUG: [TQueryBase] [TCheckLeaseStatusQueryActor] TraceId: 25e765c0-985755a6-ddfdda5e-f3116131, Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmU1OWE3YjUtOGU2YTVhOWEtYjhlNjEzZjQtZjUzMDI1NjM=, TxId: 2025-03-28T09:25:37.788471Z node 2 :KQP_PROXY DEBUG: [ScriptExecutions] [TCheckLeaseStatusActor] ExecutionId: 25e765c0-985755a6-ddfdda5e-f3116131, reply success 2025-03-28T09:25:37.788673Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=MmU1OWE3YjUtOGU2YTVhOWEtYjhlNjEzZjQtZjUzMDI1NjM=, workerId: [2:7486789154166059096:2449], local sessions count: 1 2025-03-28T09:25:37.883702Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=ODA0YmM5ZGYtNmM1ZGRjNWYtZjA3YmZiNDctYTMwNTM5M2U=, workerId: [2:7486789145576124190:2359], local sessions count: 0 >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> TSchemeShardSubDomainTest::Redefine >> TSchemeShardSubDomainTest::Delete >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> TSchemeShardSubDomainTest::RmDir >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas >> TSchemeShardSubDomainTest::CreateWithNoEqualName >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::CreateAndWait >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution >> TSchemeShardSubDomainTest::RmDir [GOOD] >> TSchemeShardSubDomainTest::Redefine [GOOD] >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TopicService::OneConsumer_TheRangesOverlap [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:41.220576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:41.220691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.220739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:41.220790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:41.221686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:41.221754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:41.221837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.221937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:41.223083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:41.298195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:41.298243Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:41.313631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:41.313896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:41.314084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:41.321920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:41.322784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:41.325719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.326637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.331086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337249Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:41.337934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.337999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:41.338078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.344770Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:41.470932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.471153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.471343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:41.471631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:41.471695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.475830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.475979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:41.476207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.476269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:41.476307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:41.476342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:41.478278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.478330Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:41.478377Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:41.480207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.480261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.480293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.480338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.483211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:41.484867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:41.485033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:41.485969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.486068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.486106Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.486316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:41.486352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.486511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.486589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.488403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.488454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.488642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.488682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:41.489027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.489072Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:41.489178Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.489208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.489243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.489266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.489292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:41.489328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.489359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:41.489379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:41.489425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.489466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:41.489491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:41.491089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.491220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.491249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... sReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:41.724482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-28T09:25:41.724508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:41.724536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:25:41.724564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:25:41.724686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:25:41.724726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-03-28T09:25:41.724764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:25:41.724794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:25:41.726100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:41.726166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:41.726196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:41.726230Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:25:41.726261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.727022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:41.727079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:41.727096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:41.727144Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:41.727161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:41.727210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-28T09:25:41.727669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:41.727703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:41.727719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:41.729031Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:25:41.729914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.730159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-03-28T09:25:41.730735Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-28T09:25:41.731155Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:41.731585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:41.731724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-03-28T09:25:41.732527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 Forgetting tablet 72075186233409547 2025-03-28T09:25:41.732848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:41.732960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:41.733441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:41.733475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:41.733568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:41.733674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:25:41.734056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:41.734086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:41.734127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.737330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:41.737373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:41.737446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:41.737462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:41.739024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:41.739068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:41.739228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:41.739308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:25:41.739551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:25:41.739584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:25:41.740130Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:25:41.740235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:25:41.740273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:571:2525] TestWaitNotification: OK eventTxId 104 2025-03-28T09:25:41.740895Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.741111Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 227us result status StatusPathDoesNotExist 2025-03-28T09:25:41.741284Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:41.741896Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.742042Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 167us result status StatusSuccess 2025-03-28T09:25:41.742282Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:41.220521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:41.220665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.220726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:41.220771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:41.221664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:41.221708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:41.221773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.221853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:41.223016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:41.308224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:41.308289Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:41.322793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:41.323070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:41.323248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:41.329267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:41.329461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:41.329974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.330131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.331811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:41.337998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.338084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:41.338208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.344990Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:41.448171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.450110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.451063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:41.452611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:41.452691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.459873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.460017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:41.460253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.460318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:41.460359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:41.460393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:41.462514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.462580Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:41.462621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:41.464630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.464682Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.464741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.464790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.469649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:41.471631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:41.471852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:41.473573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.473696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.473748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.475085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:41.475168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.475351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.475433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.477794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.477866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.478016Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.478056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:41.478453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.478517Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:41.478615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.478647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.478707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.478739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.478772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:41.478806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.478837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:41.478869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:41.478928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.478989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:41.479023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:41.481393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.481511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.481548Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:41.644031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:41.644062Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:41.644085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:41.644153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:25:41.646790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:41.646851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:41.646876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:41.648603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:41.649318Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:25:41.654264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.654768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:41.655072Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-28T09:25:41.655489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:41.655674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2025-03-28T09:25:41.657641Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2025-03-28T09:25:41.658630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:41.658832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-03-28T09:25:41.659275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:41.659493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:41.659554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:41.659690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:41.660119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:41.660174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:41.660233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.663421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:41.663504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:41.663693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:41.663729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:41.667974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:41.668037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:41.668226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:41.668366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:25:41.668593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:41.668636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:25:41.668970Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:41.669034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:41.669068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:493:2447] TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:41.669396Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.669551Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 180us result status StatusPathDoesNotExist 2025-03-28T09:25:41.669670Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:41.669999Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.670259Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 254us result status StatusSuccess 2025-03-28T09:25:41.670480Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted 2025-03-28T09:25:41.671648Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-28T09:25:41.673542Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-03-28T09:25:41.673582Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 2025-03-28T09:25:41.674858Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.675057Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 179us result status StatusSuccess 2025-03-28T09:25:41.675346Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:41.220571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:41.220700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.220759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:41.220832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:41.221676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:41.221719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:41.221791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.221869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:41.223073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:41.316841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:41.316892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:41.332303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:41.332571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:41.332750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:41.339385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:41.339635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:41.340329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.340528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.342520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.343803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.343872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.344036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:41.344098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.344165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:41.344286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.351190Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:41.468776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.469009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.469199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:41.469387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:41.469431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.471499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.471635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:41.471799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.471846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:41.471882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:41.471911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:41.473671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.473727Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:41.473763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:41.475377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.475418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.475468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.475510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.485439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:41.490154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:41.490388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:41.491485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.491586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.491618Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.491837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:41.491879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.491997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.492069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.494025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.494085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.494254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.494295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:41.494711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.494756Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:41.494865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.494904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.494938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.494966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.495002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:41.495037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.495072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:41.495101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:41.495166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.495219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:41.495250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:41.497060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.497197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.497236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:41.704678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-03-28T09:25:41.704809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:41.707792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-28T09:25:41.707940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-28T09:25:41.708290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.708415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.708461Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:41.708773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-28T09:25:41.708831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:41.708965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.709024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-28T09:25:41.709071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:25:41.711139Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.711205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.711386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:41.711514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.711561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-28T09:25:41.711602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-28T09:25:41.711663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.711700Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-28T09:25:41.711789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:41.711854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:41.711908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:41.711938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:41.711970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-28T09:25:41.712032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:41.712078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-28T09:25:41.712110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-28T09:25:41.712348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-03-28T09:25:41.712393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-03-28T09:25:41.712424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:25:41.712462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:25:41.713396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:41.713519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:41.713562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:41.713595Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:25:41.713635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.714258Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:41.714361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:41.714389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:41.714433Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:41.714470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-28T09:25:41.714539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-03-28T09:25:41.714570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:566:2475] 2025-03-28T09:25:41.719774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:41.719907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:41.719973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:41.720007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:567:2476] TestWaitNotification: OK eventTxId 100 2025-03-28T09:25:41.720482Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.720675Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 206us result status StatusSuccess 2025-03-28T09:25:41.721080Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:25:41.723960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.724113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.724264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-28T09:25:41.726658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.726826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 2], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:41.220538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:41.220661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.220707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:41.220751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:41.221628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:41.221666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:41.221738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.221808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:41.223033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:41.302479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:41.302538Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:41.316224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:41.316514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:41.316699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:41.322427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:41.322950Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:41.325972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.326656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.331104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337270Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:41.337949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.337997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:41.338078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.344265Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:41.461875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.462141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.462377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:41.462628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:41.462677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.464774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.464892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:41.465025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.465080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:41.465139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:41.465206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:41.467235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.467285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:41.467320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:41.468972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.469013Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.469071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.469108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.478990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:41.480862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:41.481032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:41.482067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.482179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.482224Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.482503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:41.482563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.482757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.482889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.484924Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.484983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.485158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.485191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:41.485617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.485662Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:41.485747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.485779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.485814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.485846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.485896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:41.485940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.485974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:41.486005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:41.486092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.486149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:41.486185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:41.487720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.487852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.487896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TDropForceUnsafe TPropose, operationId: 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:25:41.639990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.640031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:41.640148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-03-28T09:25:41.640364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.640418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:41.641254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:41.641335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:25:41.642879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.642945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.643048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:41.643183Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.643215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:25:41.643262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:25:41.643505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.643547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-03-28T09:25:41.643649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:41.643681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:41.643716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:41.643749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:41.643797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:25:41.643833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:41.643864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:25:41.643891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:25:41.643951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:41.643988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:25:41.644018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:25:41.644048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:25:41.644726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:41.644802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:41.644846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:41.644885Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:25:41.644921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.645443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:41.645533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:41.645561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:41.645586Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:41.645628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:41.645714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:25:41.650216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:41.650279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:41.650391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:41.651098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:41.651163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:41.651234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.653166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:41.655282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:41.655406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:41.655522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:25:41.655743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:41.655782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:25:41.656211Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:41.656289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:41.656334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:41.656816Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.657123Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 212us result status StatusPathDoesNotExist 2025-03-28T09:25:41.657349Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:41.657833Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.658062Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 190us result status StatusSuccess 2025-03-28T09:25:41.658419Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:41.523014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:41.523139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.523190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:41.523235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:41.523289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:41.523315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:41.523368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.523433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:41.523765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:41.599696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:41.599747Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:41.615684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:41.615991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:41.616168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:41.626268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:41.626512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:41.627173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.627393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.629225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.630473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.630538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.630731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:41.630800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.630860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:41.630953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.637436Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:41.756780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.757005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.757203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:41.757476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:41.757543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.759730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.759866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:41.760037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.760096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:41.760127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:41.760157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:41.761928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.762015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:41.762050Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:41.763604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.763647Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.763683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.763720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.766986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:41.768601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:41.768813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:41.769739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.769854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.769893Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.770145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:41.770213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.770372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.770448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.772201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.772261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.772428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.772474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:41.772823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.772864Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:41.772949Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.772986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.773030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.773057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.773094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:41.773127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.773158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:41.773183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:41.773238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.773286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:41.773322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:41.775034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.775136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.775169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.834429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:25:41.834667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:25:41.834726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:25:41.834865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:41.834913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:41.834954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:25:41.836640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.836687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:41.836807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:25:41.836897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.836935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:25:41.836976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-28T09:25:41.837265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.837303Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:25:41.837368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:41.837411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:41.837444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:41.837466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:41.837505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:25:41.837535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:41.837577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:25:41.837599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:25:41.837643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:41.837669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:25:41.837690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-28T09:25:41.837719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-28T09:25:41.838285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:41.838379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:41.838429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:41.838461Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:25:41.838488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:41.839084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:41.839152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:41.839179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:41.839214Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:25:41.839244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:41.839303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:25:41.842329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:41.842785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 100, wait until txId: 101 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-03-28T09:25:41.843065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:41.843121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-28T09:25:41.843211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:41.843232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:25:41.843693Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:41.843771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:41.843804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:331:2322] 2025-03-28T09:25:41.843962Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:41.844036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:41.844057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:331:2322] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:41.844501Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.844669Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir/USER_0" took 182us result status StatusSuccess 2025-03-28T09:25:41.845064Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.845537Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.845684Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/dir" took 158us result status StatusSuccess 2025-03-28T09:25:41.845960Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:41.220557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:41.220689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.220734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:41.220782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:41.221673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:41.221714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:41.221785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.221876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:41.223091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:41.298284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:41.298337Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:41.313694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:41.313973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:41.314129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:41.321597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:41.322754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:41.325719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.326608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.330920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337136Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:41.337932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.337985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:41.338072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.344258Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:41.484920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.485172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.485402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:41.485641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:41.485694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.488564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.488695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:41.488863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.488908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:41.488944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:41.488975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:41.491010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.491080Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:41.491115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:41.492752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.492797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.492855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.492904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.496458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:41.498276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:41.498429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:41.499488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.499617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.499664Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.499980Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:41.500030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.500184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.500251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.502340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.502396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.502560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.502597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:41.503009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.503075Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:41.503167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.503198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.503233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.503261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.503295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:41.503334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.503369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:41.503393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:41.503481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.503559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:41.503590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:41.505326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.505435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.505467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1404 } } 2025-03-28T09:25:41.908227Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 130 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1404 } } 2025-03-28T09:25:41.908479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:41.908566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:41.908596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:41.908629Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 6 2025-03-28T09:25:41.908664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:25:41.908971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 500 RawX2: 4294969748 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:25:41.909004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2025-03-28T09:25:41.909126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 500 RawX2: 4294969748 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:25:41.909162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:25:41.909230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 500 RawX2: 4294969748 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:25:41.909276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.909303Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.909327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:25:41.909358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:25:41.909703Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:41.909754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:41.909770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:41.909793Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:25:41.909813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:25:41.909854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:25:41.913330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.913469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:41.913947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.914366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.914415Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:25:41.914519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:41.914557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:41.914616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:41.914660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:41.914734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:25:41.914808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:308:2299] message: TxId: 102 2025-03-28T09:25:41.914865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:41.914919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:25:41.914951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:25:41.915073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:41.915395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:41.916859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:41.916918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:528:2472] TestWaitNotification: OK eventTxId 102 2025-03-28T09:25:41.917461Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.917685Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 253us result status StatusSuccess 2025-03-28T09:25:41.918053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.918499Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.918660Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 164us result status StatusSuccess 2025-03-28T09:25:41.918967Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 130 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:41.220514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:41.220645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.220683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:41.220724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:41.221700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:41.221772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:41.221862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.221946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:41.223082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:41.313655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:41.313706Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:41.329203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:41.329476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:41.329604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:41.335374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:41.335594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:41.336275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.336484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.338534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.339794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.339887Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.340041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:41.340107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.340188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:41.340274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.346673Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:41.459201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.459391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.459561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:41.459748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:41.459790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.461937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.462057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:41.462229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.462285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:41.462320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:41.462350Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:41.464175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.464229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:41.464263Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:41.465767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.465814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.465848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.465888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.469495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:41.471181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:41.471370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:41.473602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.473728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.473777Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.475081Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:41.475148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.475345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.475427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.477449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.477524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.477691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.477729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:41.478111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.478158Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:41.478246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.478278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.478312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.478343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.478376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:41.478423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.478462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:41.478488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:41.478544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.478603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:41.478633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:41.481219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.481354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.481392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:41.981838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.981990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.982304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.982368Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:25:41.982467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:41.982506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:41.982572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:41.982603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:41.982639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:25:41.982744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:627:2560] message: TxId: 102 2025-03-28T09:25:41.982794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:41.982851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:25:41.982889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:25:41.982995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:41.988161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:41.988215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:628:2561] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 108 2025-03-28T09:25:41.990704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.990905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.991041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-28T09:25:41.993080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 5 PathCreateTxId: 106, at schemeshard: 72057594046678944 2025-03-28T09:25:41.993224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 5], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2025-03-28T09:25:41.993711Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.993933Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 169us result status StatusSuccess 2025-03-28T09:25:41.994234Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.994787Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.995017Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 238us result status StatusSuccess 2025-03-28T09:25:41.995346Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000005 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.996042Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.996185Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 147us result status StatusSuccess 2025-03-28T09:25:41.996457Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.997029Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:41.997169Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 140us result status StatusSuccess 2025-03-28T09:25:41.997465Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 5 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 5 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:41.777411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:41.777519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.777573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:41.777615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:41.777658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:41.777685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:41.777740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.777811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:41.778129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:41.863168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:41.863218Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:41.877017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:41.877256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:41.877416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:41.882530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:41.882752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:41.883378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.883607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.885259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.886394Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.886455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.886612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:41.886663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.886736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:41.886822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.892695Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:42.027796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:42.028026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:42.028233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:42.028468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:42.028521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:42.030862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:42.030997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:42.031170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:42.031219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:42.031254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:42.031285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:42.033651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:42.033711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:42.033743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:42.035813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:42.035857Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:42.035895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:42.035934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:42.040984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:42.043231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:42.043419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:42.044570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:42.044715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:42.044764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:42.045004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:42.045040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:42.045193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:42.045270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:42.047215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:42.047272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:42.047410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:42.047437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:42.047747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:42.047786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:42.047878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:42.047909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:42.047943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:42.047966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:42.047990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:42.048023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:42.048044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:42.048076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:42.048147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:42.048213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:42.048248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:42.049956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:42.050085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:42.050125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:25:42.050164Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:25:42.050215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:42.050326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:25:42.053678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:25:42.054283Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-03-28T09:25:42.054983Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:25:42.072868Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:25:42.075439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { Coordinators: 1 Mediators: 1 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:42.075718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:42.075894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, at schemeshard: 72057594046678944 2025-03-28T09:25:42.076685Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:25:42.080909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: plan resolution is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:42.081062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: plan resolution is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-03-28T09:25:42.081538Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-28T09:25:42.081780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:42.081877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-28T09:25:42.082250Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:42.082361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:42.082396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:282:2273] TestWaitNotification: OK eventTxId 100 2025-03-28T09:25:42.082827Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:42.083019Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 177us result status StatusPathDoesNotExist 2025-03-28T09:25:42.083197Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> YdbOlapStore::LogGrepNonExisting [GOOD] >> YdbOlapStore::LogGrepExisting >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> TestKinesisHttpProxy::TestConsumersEmptyNames >> DemoTx::Scenario_2 [GOOD] >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> TopicService::DifferentConsumers_TheRangesOverlap >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TestKinesisHttpProxy::DoubleCreateStream >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TSchemeShardSubDomainTest::LS >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain >> DemoTx::Scenario_3 >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside >> TSchemeShardSubDomainTest::RedefineErrors >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] >> TSchemeShardSubDomainTest::LS [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpIndexLookupJoin::CheckAllKeyTypesCast [GOOD] Test command err: Trying to start YDB, gRPC: 17427, MsgBus: 28129 2025-03-28T09:22:59.405738Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788475539945786:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:22:59.406187Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016f2/r3tmp/tmpwz12OQ/pdisk_1.dat 2025-03-28T09:23:00.131463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:00.131584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:00.139666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:00.191004Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17427, node 1 2025-03-28T09:23:00.595839Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:23:00.596208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:00.596215Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:00.596220Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:00.596352Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28129 TClient is connected to server localhost:28129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:01.451415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:01.476257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:01.501207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:01.675581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:01.912717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:02.037885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:04.219655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788497014783929:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:04.219740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:04.390779Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788475539945786:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:04.390858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:04.602131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.661999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.747242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.789324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.861870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:23:04.925816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:23:05.002820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788501309751743:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:05.002887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:05.003072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788501309751748:2462], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:23:05.006848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:23:05.021924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:23:05.022210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788501309751750:2463], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:23:05.114530Z node 1 :TX_PROXY ERROR: Actor# [1:7486788501309751805:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:23:06.468975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.523660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.605625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:23:06.670978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480
: Warning: Execution, code: 1060
:8:40: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 65059, MsgBus: 3136 2025-03-28T09:23:09.032434Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788512652925221:2202];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0016f2/r3tmp/tmpiQuD2q/pdisk_1.dat 2025-03-28T09:23:09.158772Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:23:09.257652Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:09.276272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:09.276344Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:09.283551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65059, node 2 2025-03-28T09:23:09.527274Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:09.527291Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:09.527297Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:09.527386Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3136 TClient is connected to server localhost:3136 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 1844674407370 ... in>:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:03.748753Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe13e3hfsvqfq6jd43d2dyc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:07.092463Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789023754048392:5973], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:07.092714Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe13hby1hqwqmwfcfgastcc, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:07.119679Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789023754048405:5979], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:07.120010Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe13hcv7wfm08t28j5tdr3y, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:12.832496Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789045228885425:6125], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:12.832664Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe13pzganx3ss0t130d30ja, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:15.592223Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789058113787589:6204], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:15.592514Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe13snkf44h0f91j9j4q1hn, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:15.619177Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789058113787603:6210], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:15.619431Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe13spe48t3r19phqyj1khv, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:15.648904Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789058113787616:6216], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:36: Error: At function: EquiJoin
:3:36: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:15.649195Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe13sq90pqff8ttgwrkdczb, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:21.789768Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789083883591920:6362], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:21.790041Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe13zq5a4wgerd829c9e72v, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:21.821485Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789083883591934:6368], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:21.821759Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe13zr4d5ngjp27xyyfwdya, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:25.013455Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789096768494092:6444], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:25.016279Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe142vv1640kg7gy6cweta0, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:25.051431Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789101063461401:6450], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:25.053628Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe142x57wm49v24g61y7px2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:32.241408Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789131128233047:6598], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:32.241714Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe149xnbhgyp0az600bzmk2, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:35.852526Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789144013135209:6676], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:35.852760Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe14deqcms10ep2qxadwgfy, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:35.883123Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789144013135222:6682], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:35.885299Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe14dfn17ad3j7676ybsrnh, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:25:35.913320Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789144013135235:6688], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:3:34: Error: At function: EquiJoin
:3:34: Error: Cannot compare key columns (l.Key has type: Optional, r.Key has type: Optional) 2025-03-28T09:25:35.913611Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWU0YTdjZWEtMzJmZjZhZGEtOTEzOGFiN2MtODJiMzNiZmI=, ActorId: [2:7486788542717698760:2489], ActorState: ExecuteState, TraceId: 01jqe14dgm1mdypbnrkfnvpwy6, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:44.707879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:44.707964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:44.707991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:44.708025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:44.708067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:44.708088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:44.708124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:44.708183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:44.708434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:44.792474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:44.792533Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:44.814738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:44.815003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:44.815202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:44.824449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:44.824671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:44.825173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:44.825339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:44.826819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:44.827793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:44.827841Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:44.827981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:44.828031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:44.828063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:44.828124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:44.833723Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:44.948198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:44.948446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:44.948652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:44.948892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:44.948939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:44.951340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:44.951496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:44.951725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:44.951772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:44.951806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:44.951839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:44.953883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:44.953950Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:44.953984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:44.955752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:44.955803Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:44.955839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:44.955883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:44.959563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:44.961405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:44.961616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:44.962666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:44.962849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:44.962901Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:44.963179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:44.963231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:44.963366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:44.963424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:44.965252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:44.965309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:44.965488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:44.965522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:44.965924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:44.965968Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:44.966063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:44.966104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:44.966140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:44.966166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:44.966197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:44.966235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:44.966265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:44.966291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:44.966366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:44.966418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:44.966450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:44.968180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:44.968284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:44.968316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... trongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-28T09:25:45.097047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-28T09:25:45.097354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.097458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.097506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:45.097815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-28T09:25:45.097866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:45.098025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.098086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:45.098149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:25:45.099896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.099945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.100096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:45.100181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.100221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-28T09:25:45.100258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-28T09:25:45.100569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.100661Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-28T09:25:45.100764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:45.100807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:45.100867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:45.100902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:45.100934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-28T09:25:45.100971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:45.101021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-28T09:25:45.101058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-28T09:25:45.101215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:25:45.101252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-28T09:25:45.101285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:25:45.101327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:25:45.101920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:45.101995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:45.102024Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:45.102060Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:25:45.102101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:45.102984Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:45.103066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:45.103102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:45.103129Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:45.103153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:45.103230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-28T09:25:45.106140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:45.106229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-28T09:25:45.106454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:45.106498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-28T09:25:45.106912Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:45.106998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:45.107034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:453:2407] TestWaitNotification: OK eventTxId 100 2025-03-28T09:25:45.107507Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:45.107717Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 248us result status StatusSuccess 2025-03-28T09:25:45.108137Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.108692Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:45.108859Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 142us result status StatusSuccess 2025-03-28T09:25:45.109210Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:45.021428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:45.021550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:45.021588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:45.021635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:45.021696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:45.021723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:45.021776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:45.021853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:45.022182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:45.107330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:45.107385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:45.121927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:45.122225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:45.122391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:45.127896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:45.128089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:45.128695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.128887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.130823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.132041Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.132097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.132294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:45.132353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.132395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:45.132461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.138707Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:45.265194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:45.265444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.265660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:45.265942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:45.266002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.275497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.275642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:45.275833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.275883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:45.275919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:45.275957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:45.290778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.290852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:45.290892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:45.297791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.297845Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.297886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.297939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.301671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:45.311959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:45.312254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:45.313405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.313547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.313616Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.313945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:45.314031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.314216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.314300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.318118Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.318190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.318379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.318420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:45.318877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.318930Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:45.319090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.319133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.319171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.319203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.319241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:45.319284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.319322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:45.319369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:45.319450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:45.319525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:45.319565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:45.321467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.321610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.321650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046316545 2025-03-28T09:25:45.397014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-28T09:25:45.397138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-28T09:25:45.397479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.397608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.397678Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:45.397921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-28T09:25:45.397984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:45.398148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.398207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:45.398260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:25:45.400320Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.400377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.400534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:45.400668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.400704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-28T09:25:45.400747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-28T09:25:45.400801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.400838Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-28T09:25:45.400947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:45.400980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:45.401051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:45.401094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:45.401132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-28T09:25:45.401204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:45.401244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-28T09:25:45.401282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-28T09:25:45.401350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:45.401393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-28T09:25:45.401430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:25:45.401456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:25:45.402575Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:45.402678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:45.402770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:45.402813Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:25:45.402851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:45.403889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:45.403974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:45.404004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:45.404030Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:45.404059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:45.404170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-28T09:25:45.406413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:45.407432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-28T09:25:45.407689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:45.407735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-28T09:25:45.408144Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:45.408246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:45.408285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:309:2300] TestWaitNotification: OK eventTxId 100 2025-03-28T09:25:45.408727Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:45.408949Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 199us result status StatusSuccess 2025-03-28T09:25:45.409339Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.409849Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:45.410028Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 190us result status StatusSuccess 2025-03-28T09:25:45.410375Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] Test command err: 2025-03-28T09:25:24.341831Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789099480119095:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.341887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f4e/r3tmp/tmpwcbNau/pdisk_1.dat 2025-03-28T09:25:24.773434Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:24.793240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:24.794018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:24.804395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3061 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:25:25.142407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:27.451649Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.453092Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:27.456061Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-03-28T09:25:27.458936Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.458990Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.459207Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:27.459402Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:27.459437Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:27.459678Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 2, sender: [1:7486789103775086949:2282], selfId: [1:7486789099480119336:2278], source: [1:7486789099480119336:2278] 2025-03-28T09:25:27.459717Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:27.460751Z node 1 :KQP_PROXY WARN: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2025-03-28T09:25:27.460826Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7486789103775086949:2282], selfId: [1:7486789099480119336:2278], source: [1:7486789099480119336:2278] 2025-03-28T09:25:27.463464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789112365021576:2310], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.463537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.467246Z node 1 :KQP_PROXY WARN: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2025-03-28T09:25:27.467344Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 4, sender: [1:7486789103775086949:2282], selfId: [1:7486789099480119336:2278], source: [1:7486789099480119336:2278] 2025-03-28T09:25:27.467656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789112365021606:2311], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:27.467739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:31.099080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:25:31.099631Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:25:31.099709Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f4e/r3tmp/tmp4SPXIE/pdisk_1.dat 2025-03-28T09:25:31.420049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:25:31.451993Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:31.452075Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:31.452338Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:31.475822Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:293:2338], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:25:31.477415Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:293:2338], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-03-28T09:25:31.477554Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:293:2338], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:610:2532] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:25:31.477693Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:293:2338], cacheItem# { Subscriber: { Subscriber: [2:610:2532] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:25:31.477855Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:293:2338], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-28T09:25:31.477941Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [2:293:2338], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:611:2533] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:25:31.478029Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:293:2338], cacheItem# { Subscriber: { Subscriber: [2:611:2533] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:25:31.478226Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:624:2534], recipient# [2:302:2346], result# { ErrorCount: 2 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: Root/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:25:31.490384Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:31.490564Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:31.501976Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:31.583341Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [2:293:2338], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /Root PathId: [OwnerId: 72057594046644 ... m::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_COUNTERS_AGGREGATOR Captured TEvents::TSystem::Wakeup to BSC_STAT_PROCESSOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NIcNodeCache::TIcNodeCacheServiceActor Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to NKikimr::NBsController::TBlobStorageController::TSelfHealActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to KQP_COMPILE_COMPUTATION_PATTERN_SERVICE Captured TEvents::TSystem::Wakeup to KQP_NODE_SERVICE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to PROXY_SCHEME_CACHE Captured TEvents::TSystem::Wakeup to DS_PROXY_NODE_MON_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to EXT_COUNTERS_UPDATER_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to TICKET_PARSER_ACTOR Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to TABLET_RESPONSIVENESS_PINGER Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to NKikimr::NKqp::TSchedulerActor Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER 2025-03-28T09:25:39.668010Z node 2 :KQP_PROXY DEBUG: Handle TEvPrivate::TEvOnRequestTimeout(20) 2025-03-28T09:25:39.668101Z node 2 :KQP_PROXY DEBUG: Reply timeout: requestId 20 sessionId: ydb://session/3?node_id=2&id=ODRhYzEwZTQtZWEwNDhmMDktODFlMWJjODEtZDMxYTQ3YzI= status: TIMEOUT round: 0 2025-03-28T09:25:39.668341Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODRhYzEwZTQtZWEwNDhmMDktODFlMWJjODEtZDMxYTQ3YzI=, ActorId: [2:1131:2934], ActorState: ExecuteState, TraceId: 01jqe14gbt0e32k9e9wxsznrt1, Create QueryResponse for error on request, msg: Captured TEvents::TSystem::Wakeup to BS_SYNC_BROKER Captured TEvents::TSystem::Wakeup to BLOB_CACHE_ACTOR 2025-03-28T09:25:39.668648Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 20, sender: [2:593:2518], selfId: [2:57:2104], source: [2:1131:2934] Send scheduled evet back 2025-03-28T09:25:39.668810Z node 2 :KQP_COMPILE_ACTOR NOTICE: Compilation timeout, self: [2:1134:2937], cluster: db, database: , text: "SELECT * FROM `/Root/Table`;", startTime: 2025-03-28T09:25:38.810549Z 2025-03-28T09:25:39.668893Z node 2 :KQP_COMPILE_ACTOR DEBUG: Send response, self: [2:1134:2937], owner: [2:287:2332], status: TIMEOUT, issues:
: Error: Query compilation timed out. , uid: d814de9-8db62730-b7739a41-489af1d3 Send captured event back Send captured event back Send captured event back Send captured event back Send captured event back 2025-03-28T09:25:40.723998Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486789164400219015:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:40.724077Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f4e/r3tmp/tmpWPlobX/pdisk_1.dat 2025-03-28T09:25:40.883817Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:40.912996Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:40.913081Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:40.916420Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6612, node 3 2025-03-28T09:25:40.987435Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:40.987476Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:40.987487Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:40.987701Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:41.196592Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:43.735982Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:43.751087Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-28T09:25:43.751862Z node 3 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:43.751886Z node 3 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:43.751906Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:43.751947Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /Root, empty 2025-03-28T09:25:43.752007Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:43.752049Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:43.766581Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:43.766639Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:45.087804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:45.087898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:45.087931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:45.087977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:45.088033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:45.088060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:45.088130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:45.088206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:45.088528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:45.169792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:45.169841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:45.189601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:45.189845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:45.189995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:45.195050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:45.195262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:45.195871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.196084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.197724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.198906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.198959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.199105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:45.199151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.199199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:45.199265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.205343Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:45.326033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:45.326313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.326536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:45.326837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:45.326903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.329154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.329306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:45.329511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.329581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:45.329632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:45.329670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:45.331738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.331802Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:45.331840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:45.333705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.333756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.333793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.333839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.337573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:45.339512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:45.339690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:45.340753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.340897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.340943Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.341215Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:45.341274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.341435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.341569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.343669Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.343743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.343954Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.343995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:45.344389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.344433Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:45.344530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.344562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.344616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.344661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.344697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:45.344733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.344766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:45.344793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:45.344865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:45.344923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:45.344956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:45.346740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.346866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.346902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... athId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:45.414080Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:45.414120Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:25:45.414150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:45.415860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:45.415946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:45.415972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:45.416021Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:45.416059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-28T09:25:45.416145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 1 2025-03-28T09:25:45.416174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:274:2265] 2025-03-28T09:25:45.418265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:45.418317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:45.418369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:45.418393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:45.418417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:45.418442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:45.419578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:45.422058Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 2025-03-28T09:25:45.422291Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-03-28T09:25:45.422389Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-03-28T09:25:45.422522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-28T09:25:45.422874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-28T09:25:45.423195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.423348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:25:45.423519Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 2025-03-28T09:25:45.423711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:45.423827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:45.423946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:45.424073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:45.424099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:275:2266] 2025-03-28T09:25:45.424153Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-03-28T09:25:45.424347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-28T09:25:45.424455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:45.424581Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-03-28T09:25:45.424663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:45.424776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:45.425060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:25:45.425158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:45.425452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:45.425487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:45.425606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:45.426036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:45.426074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:45.426152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.428830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-28T09:25:45.428974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:45.433189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:45.433462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-28T09:25:45.433727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:45.433838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:25:45.439208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:45.439958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-03-28T09:25:45.440629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:45.440831Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 218us result status StatusPathDoesNotExist 2025-03-28T09:25:45.441015Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:45.441543Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:45.441760Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 269us result status StatusSuccess 2025-03-28T09:25:45.442104Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:45.296078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:45.296185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:45.296223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:45.296269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:45.296319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:45.296348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:45.296422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:45.296499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:45.296856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:45.388704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:45.388762Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:45.413814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:45.414118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:45.414307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:45.421168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:45.421399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:45.422074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.422284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.424555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.425910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.425980Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.426178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:45.426243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.426287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:45.426365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.433665Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:45.554901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:45.555142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.555351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:45.555636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:45.555690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.560014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.560206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:45.560457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.560518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:45.560560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:45.560596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:45.562973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.563061Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:45.563102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:45.565338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.565391Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.565436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.565501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.577865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:45.580588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:45.580808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:45.581989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.582149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.582213Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.582518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:45.582579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.582816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.582924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.585413Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.585480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.585675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.585716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:45.586150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.586206Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:45.586306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.586340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.586379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.586411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.586449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:45.586496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.586534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:45.586564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:45.586629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:45.586706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:45.586745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:45.588696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.588821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.588858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T09:25:45.671864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-28T09:25:45.672176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.672229Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 101:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:45.672286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-28T09:25:45.672414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:45.673123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:45.673233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:45.673271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:45.673311Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-28T09:25:45.673352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:45.674247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:45.674321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:45.674361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:45.674401Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:25:45.674434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:45.674502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-28T09:25:45.691340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-28T09:25:45.691562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-03-28T09:25:45.695159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.695330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.695397Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:25:45.695576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:25:45.695781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:45.695856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:25:45.696589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:45.697012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:25:45.699099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.699144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:45.699296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:25:45.699421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.699481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:25:45.699527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-28T09:25:45.699880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.699931Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:25:45.700060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:45.700106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:45.700155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:45.700206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:45.700246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:25:45.700297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:45.700336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:25:45.700369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:25:45.700451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:45.700493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:25:45.700532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-28T09:25:45.700566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-28T09:25:45.701432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:45.701551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:45.701596Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:45.701640Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:25:45.701683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:45.702357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:45.702453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:45.702484Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:45.702522Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:25:45.702610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:25:45.702695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:25:45.705697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:45.707045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-28T09:25:45.710638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0/dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:45.711244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.711369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/dir/table_0, opId: 102:0, schema: Name: "table_0" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "RowId", at schemeshard: 72057594046678944 2025-03-28T09:25:45.711546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, at schemeshard: 72057594046678944 2025-03-28T09:25:45.714059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Inclusive subDomain do not support shared transactions" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.714277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_0, subject: , status: StatusNameConflict, reason: Inclusive subDomain do not support shared transactions, operation: CREATE TABLE, path: /MyRoot/USER_0/dir/table_0 TestModificationResult got TxId: 102, wait until txId: 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:45.232963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:45.233084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:45.233122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:45.233167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:45.233211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:45.233237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:45.233292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:45.233388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:45.233707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:45.331864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:45.331915Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:45.349077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:45.349328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:45.349480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:45.356806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:45.357062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:45.357781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.358038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.360365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.361846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.361913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.362127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:45.362196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.362252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:45.362346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.370173Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:45.497112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:45.497350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.497570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:45.497808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:45.497868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.499870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.499973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:45.500101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.500137Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:45.500167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:45.500192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:45.501673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.501715Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:45.501744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:45.503078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.503117Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.503141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.503173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.505964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:45.507324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:45.507518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:45.508515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.508645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.508690Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.508888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:45.508934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.509126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.509209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.511063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.511126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.511310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.511348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:45.511699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.511733Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:45.511820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.511850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.511912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.511940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.511972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:45.512009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.512047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:45.512074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:45.512137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:45.512188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:45.512213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:45.513627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.513751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.513810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... G: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:569:2523] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2025-03-28T09:25:45.754371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 2 Name: "USER_0" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:45.754529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/USER_0, opId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.754727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:45.754913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:45.754956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.756766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusAccepted TxId: 108 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:45.756913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: /MyRoot/USER_0 2025-03-28T09:25:45.757088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.757142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 108:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:45.757180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 108:0 ProgressState no shards to create, do next state 2025-03-28T09:25:45.757226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 2 -> 3 2025-03-28T09:25:45.758826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.758872Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:45.758923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 3 -> 128 2025-03-28T09:25:45.760563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.760613Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.760654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2025-03-28T09:25:45.760703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-03-28T09:25:45.760824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:45.762346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-03-28T09:25:45.762464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000007 2025-03-28T09:25:45.762873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.762997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.763043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-03-28T09:25:45.763292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 240 2025-03-28T09:25:45.763352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2025-03-28T09:25:45.763541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:45.763615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2025-03-28T09:25:45.765434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.765490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:45.765662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.765699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 108, path id: 2 2025-03-28T09:25:45.766036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.766082Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-03-28T09:25:45.766170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-28T09:25:45.766202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-28T09:25:45.766254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-28T09:25:45.766295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-28T09:25:45.766331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2025-03-28T09:25:45.766369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-28T09:25:45.766403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-03-28T09:25:45.766445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-03-28T09:25:45.766544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:25:45.766596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2025-03-28T09:25:45.766646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 2], 8 2025-03-28T09:25:45.767187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-03-28T09:25:45.767318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-03-28T09:25:45.767371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2025-03-28T09:25:45.767419Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-03-28T09:25:45.767485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:45.767562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2025-03-28T09:25:45.770206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-03-28T09:25:45.770505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-03-28T09:25:45.770543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-03-28T09:25:45.771076Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-03-28T09:25:45.771161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-28T09:25:45.771200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:594:2548] TestWaitNotification: OK eventTxId 108 2025-03-28T09:25:45.771815Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:45.771992Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 196us result status StatusSuccess 2025-03-28T09:25:45.772338Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:44.736083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:44.736216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:44.736262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:44.736327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:44.736379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:44.736411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:44.736488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:44.736586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:44.736967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:44.829831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:44.829892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:44.852837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:44.853156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:44.853345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:44.859706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:44.859939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:44.860634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:44.860871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:44.863694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:44.865112Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:44.865182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:44.865357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:44.865405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:44.865448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:44.865526Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:44.873366Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:45.048519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:45.048790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.049051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:45.049364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:45.049425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.051921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.052074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:45.052290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.052344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:45.052389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:45.052467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:45.054640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.054724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:45.054768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:45.056684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.056745Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.056787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.056840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.073859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:45.076192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:45.076472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:45.077552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.077709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.077759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.078030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:45.078082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.078268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.078367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.080551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.080615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.080822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.080864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:45.081313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.081363Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:45.081471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.081508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.081550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.081582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.081620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:45.081666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.081706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:45.081742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:45.081811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:45.081874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:45.081910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:45.083799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.083920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.083959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-03-28T09:25:45.629300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:25:45.636594Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:25:45.637096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:45.637194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:25:45.637290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:25:45.637335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:25:45.637367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 Forgetting tablet 72075186233409552 2025-03-28T09:25:45.638435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.638677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:25:45.643711Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2025-03-28T09:25:45.651397Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-03-28T09:25:45.651987Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:45.653212Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-03-28T09:25:45.653609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:45.653853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409548 2025-03-28T09:25:45.655719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-28T09:25:45.655917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2025-03-28T09:25:45.656862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409549 2025-03-28T09:25:45.657267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:45.657437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:45.657768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:45.657979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:25:45.658117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:45.666865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:45.667208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:45.667285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:45.667444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:45.672771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-28T09:25:45.672857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-28T09:25:45.673360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-28T09:25:45.673408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-03-28T09:25:45.673494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:45.674001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:45.674062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:45.674143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.674439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:45.674484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:45.675748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:45.675784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:45.679830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-28T09:25:45.679894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-28T09:25:45.680005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:45.680029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:45.680103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:25:45.680155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:25:45.680252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:45.683628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:25:45.683929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:25:45.683978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:25:45.684447Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:25:45.684566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:25:45.684607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:775:2667] TestWaitNotification: OK eventTxId 103 2025-03-28T09:25:45.685160Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:45.685363Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 202us result status StatusPathDoesNotExist 2025-03-28T09:25:45.685583Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:45.686068Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:45.686284Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 185us result status StatusSuccess 2025-03-28T09:25:45.686627Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:45.097601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:45.097714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:45.097762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:45.097815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:45.097860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:45.097891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:45.097973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:45.098047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:45.098418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:45.190079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:45.190140Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:45.207271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:45.207573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:45.207765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:45.216402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:45.216645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:45.217331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.217566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.219657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.220957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.221028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.221227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:45.221292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.221341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:45.221428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.227689Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:45.384900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:45.385171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.385415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:45.385701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:45.385765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.388292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.388443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:45.388647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.388704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:45.388733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:45.388762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:45.390938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.391007Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:45.391044Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:45.395772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.395830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.395890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.395939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.400257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:45.403943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:45.404126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:45.405321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.405452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.405497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.405796Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:45.405860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.406028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.406125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.412055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.412125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.412289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.412332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:45.412741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.412786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:45.412880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.412916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.412973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.413005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.413041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:45.413080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.413117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:45.413146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:45.413227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:45.413279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:45.413315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:45.415392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.415536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.415573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cookie: 103 2025-03-28T09:25:46.036945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:46.037139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:46.037255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:46.037369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:46.037405Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:25:46.037480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:25:46.037525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:46.037563Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:25:46.037588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:46.037620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:25:46.037686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:482:2437] message: TxId: 103 2025-03-28T09:25:46.037725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:46.037757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:25:46.037784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:25:46.037920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:25:46.040441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:25:46.040489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:483:2438] TestWaitNotification: OK eventTxId 103 2025-03-28T09:25:46.041009Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:46.041218Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 214us result status StatusSuccess 2025-03-28T09:25:46.041697Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:46.042297Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:46.042503Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 205us result status StatusSuccess 2025-03-28T09:25:46.042914Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:46.043454Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:46.043668Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 169us result status StatusSuccess 2025-03-28T09:25:46.044023Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:46.044520Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:46.045049Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 519us result status StatusSuccess 2025-03-28T09:25:46.045433Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPersQueueTest::TopicServiceCommitOffset [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets >> TestKinesisHttpProxy::TestRequestWithIAM >> TPersQueueTest::CloseActiveWriteSessionOnClusterDisable [GOOD] >> TPersQueueTest::Cache >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:41.559778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:41.559883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.559920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:41.559964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:41.560016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:41.560053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:41.560113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.560192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:41.560517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:41.643669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:41.643721Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:41.670469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:41.670784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:41.670974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:41.677624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:41.677887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:41.678550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.678768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.684366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.685720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.685805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.685977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:41.686025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.686076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:41.686160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.699756Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:41.833728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.833938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.834115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:41.834327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:41.834391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.836080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.836259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:41.836440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.836493Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:41.836529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:41.836567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:41.837930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.837969Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:41.837999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:41.839102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.839135Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.839164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.839195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.841752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:41.843052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:41.843207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:41.844028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.844125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.844164Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.844369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:41.844433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.844560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.844616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.846224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.846286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.846447Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.846491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:41.846946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.846995Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:41.847090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.847124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.847162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.847196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.847234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:41.847276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.847311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:41.847342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:41.847423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.847500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:41.847538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:41.849325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.849471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.849509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... ed, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 529 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:25:46.756132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-03-28T09:25:46.756258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 529 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:25:46.756307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-03-28T09:25:46.756831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:25:46.756893Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-03-28T09:25:46.756944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:25:46.756992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-03-28T09:25:46.757083Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-03-28T09:25:46.757207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-03-28T09:25:46.757343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-03-28T09:25:46.757408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-28T09:25:46.760010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:25:46.761453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:25:46.761698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-28T09:25:46.761741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-28T09:25:46.761926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-03-28T09:25:46.762099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-28T09:25:46.762138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-03-28T09:25:46.762177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-03-28T09:25:46.762432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:25:46.762481Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-03-28T09:25:46.762578Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:25:46.762619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-03-28T09:25:46.762675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-03-28T09:25:46.764121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:25:46.764259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:25:46.764299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-28T09:25:46.764353Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-03-28T09:25:46.764392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-03-28T09:25:46.765833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:25:46.765916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:25:46.765943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-28T09:25:46.765971Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:46.765998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-03-28T09:25:46.766066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-28T09:25:46.768825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:25:46.768941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-03-28T09:25:46.769380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-28T09:25:46.769570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:25:46.769605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:46.769657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:25:46.769692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:46.769725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-28T09:25:46.769790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:557:2493] message: TxId: 104 2025-03-28T09:25:46.769833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:46.769882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:25:46.769938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:25:46.770039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-03-28T09:25:46.770650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-28T09:25:46.770721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-28T09:25:46.771801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-28T09:25:46.772242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-28T09:25:46.773586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-28T09:25:46.773643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-03-28T09:25:46.773735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:25:46.773774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:756:2672] 2025-03-28T09:25:46.774487Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-03-28T09:25:46.775514Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-03-28T09:25:46.775719Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 227us result status StatusSuccess 2025-03-28T09:25:46.776164Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> TSchemeShardSubDomainTest::DeclareDefineAndDelete >> TSchemeShardSubDomainTest::CreateDropSolomon >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::DeleteAndRestart >> TSchemeShardSubDomainTest::Create >> TSchemeShardSubDomainTest::SimultaneousDeclare >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TPersQueueTest::SchemeshardRestart [GOOD] >> TPersQueueTest::SameOffset >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:48.149827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:48.149953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.149992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:48.150041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:48.150079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:48.150105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:48.150157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.150223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:48.150542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:48.229099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:48.229158Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:48.255522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:48.255806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:48.255969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:48.275707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:48.275942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:48.276579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.276773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.278835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.280117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.280177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.280337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:48.280381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.280415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:48.280483Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.291259Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:48.414443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:48.414715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.414946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:48.415185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:48.415236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.417472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.417615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:48.417818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.417862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:48.417897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:48.417937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:48.419987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.420045Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:48.420078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:48.421839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.421879Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.421923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.421975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.425502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:48.427356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:48.427533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:48.428614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.428751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:48.428796Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.429086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:48.429160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.429328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:48.429402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.431451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.431527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.431699Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.431738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:48.432148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.432207Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:48.432300Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:48.432332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.432366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:48.432395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.432425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:48.432460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.432490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:48.432517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:48.432585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:48.432645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:48.432689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:48.434450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:48.434580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:48.434647Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-28T09:25:48.486041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-28T09:25:48.486416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.486545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:48.486604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:48.486861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-28T09:25:48.486913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:48.487080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:48.487148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:48.487190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:25:48.489142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.489177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.489332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:48.489424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.489478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-28T09:25:48.489543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-28T09:25:48.489814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.489852Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-28T09:25:48.489953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:48.489989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:48.490023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:48.490050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:48.490108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-28T09:25:48.490150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:48.490183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-28T09:25:48.490212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-28T09:25:48.490287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:48.490335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-28T09:25:48.490367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:25:48.490390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:25:48.491078Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:48.491165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:48.491196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:48.491253Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:25:48.491301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:48.491985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:48.492061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:48.492103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:48.492133Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:48.492171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:48.492263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-28T09:25:48.494884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:48.495181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-03-28T09:25:48.495357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:48.495399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-28T09:25:48.495509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:48.495528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:25:48.495858Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:48.496004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:48.496039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:309:2300] 2025-03-28T09:25:48.496177Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:48.496263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:48.496286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:309:2300] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:48.496731Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:48.496916Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 209us result status StatusSuccess 2025-03-28T09:25:48.497314Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:48.497843Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:48.498020Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 196us result status StatusPathDoesNotExist 2025-03-28T09:25:48.498162Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:47.753830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:47.753955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:47.754014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:47.754067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:47.754111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:47.754137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:47.754191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:47.754244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:47.754510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:47.820035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:47.820078Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:47.834773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:47.835064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:47.835227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:47.844316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:47.844571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:47.845292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:47.845512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:47.847870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:47.849224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:47.849294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:47.849492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:47.849541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:47.849582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:47.849664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:47.859429Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:47.999856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:48.000140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.000412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:48.000715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:48.000792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.003372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.003571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:48.003797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.003847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:48.003889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:48.003925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:48.006252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.006343Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:48.006385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:48.008377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.008429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.008487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.008559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.012190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:48.017388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:48.017626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:48.018753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.018910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:48.018954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.019239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:48.019292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.019462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:48.019555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.022233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.022305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.022536Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.022582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:48.023058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.023144Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:48.023250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:48.023299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.023337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:48.023364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.023401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:48.023439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.023513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:48.023549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:48.023637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:48.023700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:48.023732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:48.025599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:48.025716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:48.025753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... , LocalPathId: 2] was 6 2025-03-28T09:25:48.480505Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-03-28T09:25:48.481229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 Forgetting tablet 72075186233409546 2025-03-28T09:25:48.482092Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:48.482345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:48.482558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:48.483044Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409548 2025-03-28T09:25:48.483980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-28T09:25:48.484164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2025-03-28T09:25:48.485556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:48.485730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:48.486105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:25:48.486271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2025-03-28T09:25:48.495320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:48.495415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:48.495610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:48.497153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-28T09:25:48.497233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-28T09:25:48.497390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-28T09:25:48.497482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-03-28T09:25:48.497666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2025-03-28T09:25:48.497848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:48.497928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:48.497974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:48.498054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:48.504247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:48.504314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:48.504421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:48.504446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:48.504546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-28T09:25:48.504574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-28T09:25:48.504629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:48.504665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:48.504731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:25:48.504766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:25:48.507163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:48.507295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-03-28T09:25:48.507548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:48.507616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-28T09:25:48.507724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:25:48.507746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:25:48.508286Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:48.508405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:48.508441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:719:2607] 2025-03-28T09:25:48.508596Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:25:48.508655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:48.508676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:719:2607] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-03-28T09:25:48.509073Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:48.509256Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 203us result status StatusPathDoesNotExist 2025-03-28T09:25:48.509413Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:48.509815Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:48.510030Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 191us result status StatusPathDoesNotExist 2025-03-28T09:25:48.510134Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:48.510463Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:48.510583Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 140us result status StatusSuccess 2025-03-28T09:25:48.510945Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousDefine >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:48.369882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:48.370005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.370047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:48.370093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:48.370137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:48.370166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:48.370221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.370299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:48.370640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:48.458805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:48.458864Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:48.474066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:48.474314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:48.474516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:48.480439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:48.480682Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:48.481376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.481585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.483377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.484696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.484759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.484941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:48.485003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.485052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:48.485137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.491792Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:48.631442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:48.631723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.631955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:48.632227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:48.632282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.634722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.634884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:48.635118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.635174Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:48.635218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:48.635253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:48.637410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.637477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:48.637532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:48.639464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.639533Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.639579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.639628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.649715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:48.652240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:48.652462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:48.653616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.653768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:48.653834Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.654131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:48.654192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.654388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:48.654471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.656830Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.656893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.657105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.657149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:48.657590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.657644Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:48.657744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:48.657782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.657820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:48.657852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.657887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:48.657933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.657974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:48.658004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:48.658082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:48.658143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:48.658179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:48.660155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:48.660285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:48.660323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:25:48.829298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.829344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.829466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:48.829610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.829647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:25:48.829699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:25:48.829986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.830045Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 102:0 ProgressState 2025-03-28T09:25:48.830111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:48.830148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:48.830185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:48.830219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:48.830255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:25:48.830292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:48.830336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:25:48.830377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:25:48.830538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:25:48.830594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:25:48.830645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:25:48.830709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:25:48.831463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:48.831558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:48.831608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:48.831648Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:25:48.831685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:48.832461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:48.832536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:48.832569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:48.832596Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:48.832623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:48.832692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:25:48.834925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:48.834973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:48.834997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:48.835850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:48.836791Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:25:48.837100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.837431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-03-28T09:25:48.837850Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-28T09:25:48.838445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:48.838726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-03-28T09:25:48.839761Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:48.840858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:48.841047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-03-28T09:25:48.841776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:48.841828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:48.841952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:48.842270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:48.842441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:48.842484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:48.842542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:48.847959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:48.848029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:48.848156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:48.848180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:48.848256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:48.848303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:48.848834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:48.848934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:25:48.849187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:25:48.849228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:25:48.849702Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:25:48.849798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:48.849833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:517:2471] TestWaitNotification: OK eventTxId 102 2025-03-28T09:25:48.850452Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:48.850675Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 207us result status StatusPathDoesNotExist 2025-03-28T09:25:48.850901Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:48.572832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:48.572967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.573010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:48.573058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:48.573106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:48.573133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:48.573188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.573262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:48.573620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:48.659410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:48.659466Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:48.674290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:48.674541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:48.674727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:48.680470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:48.680708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:48.681387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.681608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.683640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.684965Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.685031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.685205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:48.685273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.685318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:48.685397Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.692423Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:48.836054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:48.836314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.836540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:48.836832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:48.836888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.843004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.843188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:48.843386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.843438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:48.843490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:48.843522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:48.846662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.846756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:48.846799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:48.848551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.848604Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.848666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.848727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.852690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:48.854605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:48.854793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:48.855901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.856049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:48.856099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.856366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:48.856421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.856591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:48.856685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.858678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.858757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.858933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.858974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:48.859376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.859438Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:48.859571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:48.859607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.859643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:48.859673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.859705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:48.859749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.859781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:48.859808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:48.859879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:48.859940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:48.859974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:48.868679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:48.868823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:48.868862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:49.109398Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 2025-03-28T09:25:49.111105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-28T09:25:49.111293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409547 2025-03-28T09:25:49.111887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:49.112069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2025-03-28T09:25:49.112562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:25:49.112729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:49.113790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:49.113847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:49.113999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:49.116281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-28T09:25:49.116347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-28T09:25:49.116612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-03-28T09:25:49.116700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-28T09:25:49.119776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:49.119824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:49.119921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:49.120059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:49.120108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:49.120190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:49.120351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:49.120379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:49.120516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-28T09:25:49.120545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-28T09:25:49.120707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-03-28T09:25:49.123995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:49.124055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:49.124157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:25:49.124196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:25:49.124308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:49.124573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:25:49.126263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2025-03-28T09:25:49.126531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:49.126572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-28T09:25:49.126734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:49.126759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-28T09:25:49.126828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:25:49.126850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:25:49.127380Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:49.127668Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:49.127735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:49.127771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:631:2534] 2025-03-28T09:25:49.127906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:49.127923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:631:2534] 2025-03-28T09:25:49.127963Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:25:49.128037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:49.128052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:631:2534] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-03-28T09:25:49.128397Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:49.128544Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 175us result status StatusPathDoesNotExist 2025-03-28T09:25:49.128698Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:49.129047Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:49.129206Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 143us result status StatusPathDoesNotExist 2025-03-28T09:25:49.129288Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:49.129593Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:49.129737Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 115us result status StatusSuccess 2025-03-28T09:25:49.130022Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:48.822630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:48.824179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.824231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:48.824276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:48.824327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:48.824352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:48.824411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.824484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:48.824824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:48.906554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:48.906592Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:48.935296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:48.935564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:48.935691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:48.954480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:48.954680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:48.955302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.955540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.957411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.958620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.958674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.959015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:48.959078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.959119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:48.959193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.964596Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:49.083288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:49.083533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.083742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:49.084049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:49.084116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.085979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.086112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:49.086268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.086317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:49.086351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:49.086382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:49.088236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.088290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:49.088322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:49.089896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.089940Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.089974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:49.090013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.098639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:49.100452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:49.100602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:49.101623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.101745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:49.101805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:49.102060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:49.102109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:49.102282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:49.102357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:49.104258Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:49.104320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:49.104489Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:49.104528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:49.104938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.104976Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:49.105067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:49.105096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.105130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:49.105160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.105191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:49.105227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.105258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:49.105284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:49.105347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:49.105397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:49.105429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:49.107144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:49.107268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:49.107302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... txid 100:0 3 -> 128 2025-03-28T09:25:49.144818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:49.145475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:49.145652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.145689Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.145730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:49.145794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-03-28T09:25:49.145919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:49.147611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-28T09:25:49.147743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-28T09:25:49.148049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.148155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:49.148213Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:49.148443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-28T09:25:49.148497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:49.148652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:49.148725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:49.148772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:25:49.150312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:49.150336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:49.150441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:49.150516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:49.150543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-28T09:25:49.150592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-28T09:25:49.150803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.150838Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-28T09:25:49.150902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:49.150925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:49.150951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:49.150975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:49.150999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-28T09:25:49.151023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:49.151051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-28T09:25:49.151072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-28T09:25:49.151110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:49.151135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-28T09:25:49.151168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:25:49.151199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:25:49.151783Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:49.151864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:49.151894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:49.151941Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:25:49.151993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:49.152538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:49.152626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:49.152673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:49.152700Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:49.152736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:49.152803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-28T09:25:49.156137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:49.156656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-03-28T09:25:49.156937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:49.156977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-28T09:25:49.157083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:49.157127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:25:49.157510Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:49.157638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:49.157690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:309:2300] 2025-03-28T09:25:49.157839Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:49.157951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:49.157979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:309:2300] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:49.158430Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:49.158606Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 201us result status StatusSuccess 2025-03-28T09:25:49.159031Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:48.964951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:48.965069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.965112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:48.965159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:48.965201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:48.965225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:48.965283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.965353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:48.965658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:49.038158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:49.038208Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:49.051095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:49.051272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:49.051400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:49.057794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:49.058033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:49.058597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.058795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:49.060652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:49.061896Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:49.061960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:49.062088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:49.062144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:49.062173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:49.062226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.069275Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:49.195219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:49.195409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.195587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:49.195831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:49.195872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.197827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.197959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:49.198132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.198176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:49.198202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:49.198229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:49.203733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.203806Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:49.203855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:49.207592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.207648Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.207685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:49.207729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.211538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:49.216703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:49.216889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:49.217912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.218046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:49.218087Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:49.218352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:49.218408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:49.218569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:49.218651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:49.220868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:49.220931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:49.221107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:49.221144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:49.221520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.221561Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:49.221649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:49.221678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.221713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:49.221739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.221769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:49.221806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.221840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:49.221866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:49.221941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:49.221999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:49.222034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:49.223859Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:49.223980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:49.224012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:49.270368Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:25:49.270394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:49.270463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-03-28T09:25:49.272116Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:25:49.272944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.273015Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 100:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:49.273058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-03-28T09:25:49.275321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:49.275429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:49.277128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.277175Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.277231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:49.277280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-03-28T09:25:49.277416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:49.279278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-28T09:25:49.279455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-28T09:25:49.279809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.279950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:49.279999Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:49.280224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-28T09:25:49.280270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:49.280445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:49.280502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:49.280546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:25:49.282716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:49.282755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:49.282909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:49.283006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:49.283071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-28T09:25:49.283121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-28T09:25:49.283333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.283363Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-28T09:25:49.283469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:49.283517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:49.283549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:49.283572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:49.283603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-28T09:25:49.283634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:49.283661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-28T09:25:49.283684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-28T09:25:49.283728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:49.283757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2025-03-28T09:25:49.283804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:25:49.283830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:25:49.284477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:49.284559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:49.284587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:49.284627Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:25:49.284691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:49.285403Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:49.285478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:49.285502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:49.285530Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:49.285554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:49.285630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2025-03-28T09:25:49.285686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:273:2264] 2025-03-28T09:25:49.289213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:49.289349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:49.289440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:49.289483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:274:2265] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2025-03-28T09:25:49.290045Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:49.290283Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 221us result status StatusSuccess 2025-03-28T09:25:49.290711Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:48.786009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:48.786118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.786155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:48.786203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:48.786244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:48.786272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:48.786320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.786390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:48.786729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:48.866120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:48.866178Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:48.880248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:48.880488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:48.880653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:48.885953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:48.886165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:48.886767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.886979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.888762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.890008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.890071Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.890246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:48.890305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.890347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:48.890440Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.896809Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:49.018189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:49.018407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.018628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:49.018954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:49.019014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.023793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.023929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:49.024118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.024166Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:49.024204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:49.024239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:49.026247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.026307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:49.026342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:49.028455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.028504Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.028543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:49.028588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.032859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:49.034849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:49.035014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:49.036025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.036154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:49.036197Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:49.036470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:49.036526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:49.036715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:49.036804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:49.039065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:49.039135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:49.039352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:49.039392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:49.039846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.039894Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:49.040225Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:49.040259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.040299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:49.040330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.040365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:49.040411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.040445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:49.040475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:49.040552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:49.040611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:49.040647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:49.042398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:49.042507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:49.042543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 678944 is [1:549:2483] sender: [1:608:2058] recipient: [1:15:2062] 2025-03-28T09:25:49.357404Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:49.357508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:49.357551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:606:2527] TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:49.358036Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:49.358216Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 207us result status StatusPathDoesNotExist 2025-03-28T09:25:49.358379Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:49.359378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:549:2483] sender: [1:612:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:549:2483] sender: [1:615:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:549:2483] sender: [1:616:2058] recipient: [1:614:2532] Leader for TabletID 72057594046678944 is [1:617:2533] sender: [1:618:2058] recipient: [1:614:2532] 2025-03-28T09:25:49.398853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:49.398950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:49.398993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:49.399031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:49.399078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:49.399106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:49.399179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:49.399255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:49.399605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:49.417402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:49.418964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:49.419101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:49.419191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:49.419215Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:49.419456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:49.420047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.420143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.420197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.420498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.420555Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:25:49.420685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.420762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.420846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.420923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.421015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.421163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.421351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.421440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.421694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.421753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.421934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.422062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.422146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.422282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.422346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.422447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.422665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.422932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.422997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.423044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.429999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:49.430072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:49.430266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:49.430317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:49.430353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:49.431260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:617:2533] sender: [1:676:2058] recipient: [1:15:2062] 2025-03-28T09:25:49.466313Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:49.466585Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 260us result status StatusPathDoesNotExist 2025-03-28T09:25:49.466784Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:49.467380Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:49.467551Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 178us result status StatusSuccess 2025-03-28T09:25:49.467933Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TestKinesisHttpProxy::TestListStreamConsumers >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> TestKinesisHttpProxy::CreateDeleteStream >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::RejectsCancel >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:44.849501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:44.849636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:44.849677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:44.849726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:44.849772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:44.849819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:44.849890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:44.849970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:44.850336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:44.937647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:44.937701Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:44.954033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:44.954232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:44.954397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:44.964315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:44.964490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:44.965111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:44.965277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:44.967733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:44.968778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:44.968846Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:44.968997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:44.969040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:44.969075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:44.969158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:44.979117Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:45.122351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:45.122592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.122850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:45.123136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:45.123196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.129219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.129368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:45.129583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.129640Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:45.129681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:45.129718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:45.135858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.135944Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:45.135990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:45.139471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.139532Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.139576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.139639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.143543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:45.147442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:45.147679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:45.148856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:45.148995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:45.149047Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.149323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:45.149378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:45.149584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:45.149681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:45.152282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:45.152359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:45.152575Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:45.152622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:45.153061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:45.153111Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:45.153214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.153250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.153289Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:45.153321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.153363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:45.153402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:45.153438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:45.153470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:45.153532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:45.153599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:45.153636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:45.155631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.155784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:45.155824Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... hemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969694 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:25:49.988874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-03-28T09:25:49.989006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969694 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:25:49.989058Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-03-28T09:25:49.989446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.989503Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-28T09:25:49.989545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:25:49.989575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-03-28T09:25:49.989668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-28T09:25:49.989786Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-03-28T09:25:49.989915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:49.989973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:49.992581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.993553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.993805Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:49.993870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:49.994042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:25:49.994202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:49.994247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:25:49.994431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-28T09:25:49.994513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.994570Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:25:49.994654Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.994718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:25:49.994757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-03-28T09:25:49.996252Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:49.996360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:49.996406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:25:49.996445Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-03-28T09:25:49.996482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:49.998432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:49.998512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:49.998543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:25:49.998571Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:25:49.998603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:25:49.998673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-28T09:25:50.000755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.000807Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:50.001158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:50.001341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:25:50.001374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:50.001414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:25:50.001441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:50.001514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:25:50.001576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:403:2369] message: TxId: 103 2025-03-28T09:25:50.001614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:50.001650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:25:50.001680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:25:50.001763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:50.002235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:50.002272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:50.003220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:50.005510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:50.006399Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:50.006449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-03-28T09:25:50.006840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:25:50.006896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:664:2597] 2025-03-28T09:25:50.007286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-03-28T09:25:50.008356Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:50.008599Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 217us result status StatusSuccess 2025-03-28T09:25:50.009047Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestSetQueueAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:48.749369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:48.749480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.749525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:48.749562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:48.749605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:48.749628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:48.749679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.749769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:48.750099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:48.830270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:48.830314Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:48.851524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:48.851874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:48.852090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:48.871802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:48.872057Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:48.872638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.872850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.874540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.875738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.875796Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.876003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:48.876066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.876123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:48.876193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.881218Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:48.982248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:48.982424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.982576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:48.982790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:48.982830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.984600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.984719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:48.984845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.984878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:48.984906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:48.984927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:48.986257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.986318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:48.986353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:48.987516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.987544Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.987569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.987600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.994757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:48.996395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:48.996570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:48.997380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.997473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:48.997507Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.997712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:48.997750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.997896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:48.997954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.999576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.999644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.999812Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.999866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:49.000207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.000240Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:49.000324Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:49.000367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.000401Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:49.000430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.000461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:49.000500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:49.000532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:49.000557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:49.000633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:49.000673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:49.000715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:49.002462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:49.002564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:49.002600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-03-28T09:25:50.246159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:50.246184Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:25:50.246211Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:25:50.246239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:50.247080Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:50.247145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:25:50.247169Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:25:50.247193Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:50.247232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:50.247290Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-28T09:25:50.249648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:50.249700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:50.249729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:50.249753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:50.250837Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:25:50.255736Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:25:50.256062Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-28T09:25:50.256458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:50.256734Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-03-28T09:25:50.257799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:50.260296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:50.260881Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:50.261624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:50.261794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409548 2025-03-28T09:25:50.262254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:25:50.262924Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-03-28T09:25:50.267635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:25:50.267861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:25:50.268902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:50.268964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:25:50.269032Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:50.269387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:50.269431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:50.269558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:50.269774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:25:50.272248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:50.272302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:50.272407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:50.272433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:50.272480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:50.272502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:50.272891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:25:50.272935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:25:50.274244Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:50.274371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:50.274425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:50.274497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:50.274701Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:50.278298Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-28T09:25:50.278575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:25:50.278619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:25:50.279096Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:25:50.279188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:25:50.279227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:646:2599] TestWaitNotification: OK eventTxId 105 2025-03-28T09:25:50.279824Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:50.279989Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 201us result status StatusPathDoesNotExist 2025-03-28T09:25:50.280148Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:50.280724Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:50.280867Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 160us result status StatusPathDoesNotExist 2025-03-28T09:25:50.280980Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:48.493009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:48.493137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.493209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:48.493263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:48.493313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:48.493355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:48.493433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:48.493531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:48.493889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:48.572838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:48.572900Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:48.586109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:48.586318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:48.586474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:48.591914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:48.592094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:48.592588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.592756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.594236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.595304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.595357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.595491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:48.595536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.595579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:48.595640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.602382Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:48.748811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:48.749082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.749310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:48.749605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:48.749669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.755635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.755788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:48.756002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.756055Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:48.756095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:48.756141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:48.761977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.762054Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:48.762094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:48.769721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.769805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.769850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.769907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.773775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:48.778954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:48.779203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:48.780334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:48.780487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:48.780534Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.780812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:48.780865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:48.781038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:48.781141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:48.783571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:48.783642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:48.783867Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:48.783915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:48.784329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:48.784378Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:48.784486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:48.784523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.784562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:48.784594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.784654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:48.784699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:48.784734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:48.784773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:48.784847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:48.784901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:48.784939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:48.786920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:48.787049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:48.787102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 940928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:49.941045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:49.941190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:49.941243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-03-28T09:25:49.941278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-28T09:25:49.941675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:25:49.941735Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 104:0 ProgressState 2025-03-28T09:25:49.941864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:25:49.941910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:49.941955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:25:49.941989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:49.942027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-28T09:25:49.942069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:49.942117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:25:49.942153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:25:49.942308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:49.942352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-03-28T09:25:49.942386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:25:49.942427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:25:49.943093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:49.943180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:49.943211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:49.943250Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:25:49.943294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:49.944367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:49.944449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:49.944481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:49.944513Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:49.944543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:49.944619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-28T09:25:49.946664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:49.946749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:49.948023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:25:49.948617Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:25:49.948813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:49.949096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:49.949325Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 2025-03-28T09:25:49.950847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:49.951097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:49.952348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:49.952403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:49.952517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 2025-03-28T09:25:49.953628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:49.953686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:49.953764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:49.954037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:25:49.956031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:49.956108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:49.956220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:49.956266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:49.958112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:49.958211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:25:49.958580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:25:49.958629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:25:49.959247Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:25:49.959345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:25:49.959386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2091:3695] TestWaitNotification: OK eventTxId 104 2025-03-28T09:25:49.968460Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:49.968715Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 256us result status StatusPathDoesNotExist 2025-03-28T09:25:49.968902Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:49.969789Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:49.969963Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 187us result status StatusPathDoesNotExist 2025-03-28T09:25:49.970117Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> KqpProxy::DatabasesCacheForServerless [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TestKinesisHttpProxy::ListShards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:50.001092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:50.001216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:50.001262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:50.001316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:50.001363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:50.001394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:50.001453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:50.001554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:50.001896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:50.088939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:50.089007Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:50.111603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:50.111955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:50.112170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:50.127052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:50.127359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:50.128097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:50.128320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:50.133062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:50.134506Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:50.134577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:50.134801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:50.134873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:50.134924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:50.135007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.147463Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:50.291330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:50.291618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.291859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:50.292179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:50.292247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.294966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:50.295119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:50.295326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.295383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:50.295422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:50.295458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:50.297638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.297715Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:50.297757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:50.299618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.299669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.299710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:50.299757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:50.309662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:50.312098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:50.312321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:50.313423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:50.313567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:50.313619Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:50.313934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:50.314006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:50.314188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:50.314281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:50.316694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:50.316761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:50.316939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:50.316985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:50.317413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.317466Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:50.317571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:50.317611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:50.317653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:50.317686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:50.317724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:50.317766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:50.317801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:50.317833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:50.317917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:50.317981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:50.318018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:50.319959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:50.320090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:50.320127Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxId: 101, partId: 0, tablet: 72075186233409548 2025-03-28T09:25:50.453148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:25:50.453317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409546 2025-03-28T09:25:50.453387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-03-28T09:25:50.453452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409546 shardIdx# 72057594046678944:1 at schemeshard# 72057594046678944 2025-03-28T09:25:50.462846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.466443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-03-28T09:25:50.466587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409547 2025-03-28T09:25:50.466653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-03-28T09:25:50.466713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409547 shardIdx# 72057594046678944:2 at schemeshard# 72057594046678944 2025-03-28T09:25:50.467466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409548, partId: 0 2025-03-28T09:25:50.467630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409548 2025-03-28T09:25:50.467672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-03-28T09:25:50.467705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2025-03-28T09:25:50.467743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-03-28T09:25:50.470912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.471086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.471184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.471237Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.471297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:25:50.471358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-28T09:25:50.471506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:50.473301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-28T09:25:50.473461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-03-28T09:25:50.473811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:50.473943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:50.473994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:25:50.474266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:25:50.474325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:25:50.474502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:50.474594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:25:50.476770Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:50.476822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:50.476996Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:50.477035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:25:50.477358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:50.477420Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:25:50.477550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:50.477591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:50.477628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:50.477662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:50.477715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:25:50.477765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:50.477814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:25:50.477846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:25:50.478113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:25:50.478177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2025-03-28T09:25:50.478213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2025-03-28T09:25:50.478790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:50.478912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:50.478959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:50.479001Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-28T09:25:50.479040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:50.479116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-03-28T09:25:50.479158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:308:2299] 2025-03-28T09:25:50.487000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:50.487122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:50.487158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:315:2306] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:50.487802Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:50.488050Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 261us result status StatusSuccess 2025-03-28T09:25:50.488457Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DiskSpaceUsage >> TestKinesisHttpProxy::GoodRequestGetRecords >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TRegisterNodeOverDiscoveryService::ServerWithoutCertVerification_ClientDoesNotProvideClientCerts [GOOD] Test command err: 2025-03-28T09:24:42.221873Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788918479065074:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:42.221926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ce/r3tmp/tmpYtq8Qs/pdisk_1.dat 2025-03-28T09:24:42.750973Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:42.770397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:42.770451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:42.774925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15456, node 1 2025-03-28T09:24:43.080643Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:43.080666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:43.080689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:43.080794Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:43.450538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:43.603707Z node 1 :TICKET_PARSER DEBUG: Ticket 4F075641AD80A3947E470107A50E76F7C9BCA8A3BC4D565C5E89196EAF52C958 (ipv6:[::1]:47074) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-28T09:24:43.710024Z node 1 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:47096) has now valid token of root@builtin 2025-03-28T09:24:43.832679Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:24:43.832726Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:24:43.832742Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:24:43.832792Z node 1 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:24:46.956966Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788935997558215:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.957050Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ce/r3tmp/tmpc45ddA/pdisk_1.dat 2025-03-28T09:24:47.152504Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:47.191210Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:47.191311Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:47.198414Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27153, node 4 2025-03-28T09:24:47.262286Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:47.262311Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:47.262318Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:47.262465Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:47.518010Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:47.614188Z node 4 :TICKET_PARSER DEBUG: Ticket 4F075641AD80A3947E470107A50E76F7C9BCA8A3BC4D565C5E89196EAF52C958 (ipv6:[::1]:38570) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-28T09:24:47.688627Z node 4 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:38586) has now valid token of root@builtin 2025-03-28T09:24:47.757276Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:24:47.757312Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:24:47.757321Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:24:47.757353Z node 4 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:24:51.700646Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788957486393219:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:51.711851Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ce/r3tmp/tmp2SSmYJ/pdisk_1.dat 2025-03-28T09:24:51.925417Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:51.968581Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:51.968674Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:51.975893Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17539, node 7 2025-03-28T09:24:52.120719Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:52.120745Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:52.120753Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:52.120898Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27695 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:52.464653Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:52.598655Z node 7 :TICKET_PARSER DEBUG: Ticket 78374494293F9972686C9CC605DECB18A61FCF470D80550E992EAC1C12097ADE (ipv6:[::1]:48844) has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2025-03-28T09:24:52.701537Z node 7 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:48870) has now valid token of root@builtin 2025-03-28T09:24:52.783241Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:24:52.783272Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:24:52.783281Z node 7 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:24:52.783311Z node 7 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:24:56.871752Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486788979261527939:2083];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:56.885979Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ce/r3tmp/tmpDJmoPh/pdisk_1.dat 2025-03-28T09:24:57.087229Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22164, node 10 2025-03-28T09:24:57.196708Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:57.196813Z node 10 :HIVE WARN: HIVE#7205759403796889 ... schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:17.314171Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7486789047989118128:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:17.314255Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:22.996709Z node 19 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:54614) has now valid token of root@builtin 2025-03-28T09:25:23.052876Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:25:23.052909Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:25:23.052920Z node 19 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:25:23.052959Z node 19 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:25:24.673236Z node 22 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7486789097806565954:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.673318Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ce/r3tmp/tmpXNdG1c/pdisk_1.dat 2025-03-28T09:25:24.886906Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:24.926671Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:24.926805Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:24.933089Z node 22 :HIVE WARN: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22058, node 22 2025-03-28T09:25:25.063475Z node 22 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:25.063506Z node 22 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:25.063518Z node 22 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:25.063690Z node 22 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:25.458102Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:29.670790Z node 22 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[22:7486789097806565954:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:29.670884Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:35.681086Z node 22 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:56404) has now valid token of root@builtin 2025-03-28T09:25:35.749449Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:25:35.749487Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:25:35.749498Z node 22 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:25:35.749533Z node 22 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:25:37.464052Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7486789153257975827:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:37.464145Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ce/r3tmp/tmpBp9JtM/pdisk_1.dat 2025-03-28T09:25:37.718562Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:37.794203Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:37.794313Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:37.807135Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31601, node 25 2025-03-28T09:25:37.881572Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:37.881600Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:37.881612Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:37.881814Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:38.299214Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:38.494216Z node 25 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:57314) has now valid token of root@builtin 2025-03-28T09:25:38.587316Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:25:38.587361Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:25:38.587375Z node 25 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:25:38.587419Z node 25 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator 2025-03-28T09:25:43.650746Z node 28 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7486789178071699400:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:43.651500Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ce/r3tmp/tmpP5lgh7/pdisk_1.dat 2025-03-28T09:25:44.040092Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:44.088130Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:44.088255Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:44.092959Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25818, node 28 2025-03-28T09:25:44.450050Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:44.450089Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:44.450103Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:44.450288Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:44.892009Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:45.101028Z node 28 :TICKET_PARSER DEBUG: Ticket **** (B6C6F477) (ipv6:[::1]:44920) has now valid token of root@builtin 2025-03-28T09:25:45.207139Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db /Root, token db , DomainLoginOnly 1 2025-03-28T09:25:45.207186Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:25:45.207201Z node 28 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:25:45.207250Z node 28 :TICKET_PARSER ERROR: Ticket **** (0C093832): Could not find correct token validator >> TSchemeShardSubDomainTest::DeleteAdd >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs >> TPersQueueTest::DirectReadNotCached [GOOD] >> TPersQueueTest::DirectReadBadCases >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> TSchemeShardSubDomainTest::RestartAtInFly >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> TPersQueueTest::Init [GOOD] >> TPersQueueTest::NoDecompressionMemoryLeaks >> TSchemeShardSubDomainTest::SchemeLimitsRejects >> TSchemeShardSubDomainTest::SetSchemeLimits >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2025-03-28T09:25:24.685119Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789098520769846:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.685206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:24.799421Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789099278414396:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.799490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:24.827033Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486789096160839691:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.827079Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:24.885467Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486789098221256988:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:24.885507Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:25.009844Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486789103809524220:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:25.009968Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f66/r3tmp/tmppcmLLQ/pdisk_1.dat 2025-03-28T09:25:25.700779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:25:25.806141Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:25.813212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:25:25.817235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:25.817328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:25.818826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:25.818889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:25.819066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:25.819142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:25.819261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:25.819339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:25.819446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:25.819499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:25.825859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:25.827742Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-28T09:25:25.827827Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:25:25.830252Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:25:25.831456Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:25.831503Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-28T09:25:25.831668Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:25.831918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:25.832858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:25.833053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63728 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:25:26.258204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:29.299861Z node 5 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:29.303784Z node 5 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:29.315268Z node 5 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:29.315314Z node 5 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:29.315334Z node 5 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:29.315388Z node 5 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:29.315542Z node 5 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:29.315577Z node 5 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:29.338709Z node 5 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:29.356036Z node 5 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-28T09:25:29.356046Z node 5 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-28T09:25:29.356089Z node 5 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-28T09:25:29.356092Z node 5 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:29.356235Z node 5 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-28T09:25:29.356241Z node 5 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-28T09:25:29.356264Z node 5 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-28T09:25:29.356345Z node 5 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-28T09:25:29.356354Z node 5 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-28T09:25:29.356368Z node 5 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-28T09:25:29.401214Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:29.421796Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:29.476636Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:29.486159Z node 3 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:25:29.489145Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Describe result: PathErrorUnknown 2025-03-28T09:25:29.489154Z node 3 :KQP_PROXY NOTICE: Table script_executions updater. Creating table 2025-03-28T09:25:29.489185Z node 3 :KQP_PROXY DEBUG: Table script_executions updater. Full table path:/dc-1/.metadata/script_executions 2025-03-28T09:25:29.490782Z node 3 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:25:29.490812Z node 3 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:29.490872Z node 3 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:25:29.491440Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:29.491480Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:29.491583Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:29.491810Z node 3 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:25:29.491959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480 2025-03-28T09:25:29.497022Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Describe result: PathErrorUnknown 2025-03-28T09:25:29.497047Z node 3 :KQP_PROXY NOTICE: Table script_execution_leases updater. Creating table 2025-03-28T09:25:29.497092Z node 3 :KQP_PROXY DEBUG: Table script_execution_leases updater. Full table path:/dc-1/.metadata/script_execution_leases 2025-03-28T09:25:29.497218Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Describe result: PathErrorUnknown 2025-03-28T09:25:29.497223Z node 3 :KQP_PROXY NOTICE: Table result_sets updater. Creating table 2025-03-28T09:25:29.497243Z node 3 :KQP_PROXY DEBUG: Table result_sets updater. Full table path:/dc-1/.metadata/result_sets 2025-03-28T09:25:29.521379Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:25:29.521510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2 ... anager/running_requests status PathErrorUnknown 2025-03-28T09:25:38.205478Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [6:7486789155675538022:2331], Successfully finished 2025-03-28T09:25:38.205531Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:25:38.236767Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:38.256582Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486789157042580145:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:38.257662Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:38.275032Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:38.275125Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:38.277693Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-03-28T09:25:38.278372Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:38.379414Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:38.379547Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:38.381402Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.381488Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.381531Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.381566Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.381624Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.381672Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.381730Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.381772Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.381818Z node 8 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.389929Z node 8 :HIVE WARN: HIVE#72075186224037888 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:38.508249Z node 8 :STATISTICS WARN: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2025-03-28T09:25:38.510312Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:38.769800Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:25:38.838457Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:38.838588Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:38.847574Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 7 Cookie 7 2025-03-28T09:25:38.851916Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:38.927145Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.927296Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.927419Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.927499Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.927570Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.927657Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.927736Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.928004Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.928083Z node 7 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:25:38.944764Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:38.944838Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:38.951588Z node 7 :HIVE WARN: HIVE#72075186224038889 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:39.105233Z node 7 :STATISTICS WARN: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2025-03-28T09:25:39.114337Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:39.260893Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:25:39.378068Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:39.491786Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7486789161092081307:2548], Database: /Root/test-serverless, Start database fetching 2025-03-28T09:25:39.491998Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7486789161092081307:2548], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2025-03-28T09:25:39.768724Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486789138495668026:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:39.768820Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:42.107143Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:25:42.107420Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7486789174222450098:2345], Start check tables existence, number paths: 2 2025-03-28T09:25:42.107779Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:25:42.107796Z node 8 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:25:42.108338Z node 8 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-03-28T09:25:42.109148Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7486789174222450098:2345], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:25:42.109220Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7486789174222450098:2345], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:25:42.109265Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [8:7486789174222450098:2345], Successfully finished 2025-03-28T09:25:42.109312Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:25:42.834990Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Started workload service initialization 2025-03-28T09:25:42.835893Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7486789173976983307:2371], Start check tables existence, number paths: 2 2025-03-28T09:25:42.836178Z node 7 :KQP_WORKLOAD_SERVICE TRACE: [WorkloadService] [Service] Updated node info, noode count: 3 2025-03-28T09:25:42.836201Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Subscribed for config changes 2025-03-28T09:25:42.836222Z node 7 :KQP_WORKLOAD_SERVICE INFO: [WorkloadService] [Service] Resource pools was enanbled 2025-03-28T09:25:42.838332Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7486789173976983307:2371], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2025-03-28T09:25:42.838399Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7486789173976983307:2371], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2025-03-28T09:25:42.838427Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [TCleanupTablesActor] ActorId: [7:7486789173976983307:2371], Successfully finished 2025-03-28T09:25:42.838463Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2025-03-28T09:25:43.258210Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7486789157042580145:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:43.258305Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:49.496272Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-03-28T09:25:49.508053Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:25:49.508598Z node 6 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2025-03-28T09:25:49.508913Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:25:49.522840Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTQwNjdmYjMtNTM1OTAyOTctMzQ1N2YzZjktYjdkYzJlMTQ=, ActorId: [6:7486789155675538023:2332], ActorState: ReadyState, Session closed due to explicit close event 2025-03-28T09:25:49.522886Z node 6 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=6&id=NTQwNjdmYjMtNTM1OTAyOTctMzQ1N2YzZjktYjdkYzJlMTQ=, ActorId: [6:7486789155675538023:2332], ActorState: ReadyState, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:25:49.522915Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTQwNjdmYjMtNTM1OTAyOTctMzQ1N2YzZjktYjdkYzJlMTQ=, ActorId: [6:7486789155675538023:2332], ActorState: ReadyState, EndCleanup, isFinal: 1 2025-03-28T09:25:49.522934Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTQwNjdmYjMtNTM1OTAyOTctMzQ1N2YzZjktYjdkYzJlMTQ=, ActorId: [6:7486789155675538023:2332], ActorState: unknown state, Cleanup temp tables: 0 2025-03-28T09:25:49.522997Z node 6 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=6&id=NTQwNjdmYjMtNTM1OTAyOTctMzQ1N2YzZjktYjdkYzJlMTQ=, ActorId: [6:7486789155675538023:2332], ActorState: unknown state, Session actor destroyed >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:51.665765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:51.665885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:51.665928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:51.665989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:51.666055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:51.666083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:51.666153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:51.666236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:51.666559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:51.760956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:51.761014Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:51.776492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:51.776762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:51.776959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:51.783761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:51.783988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:51.784585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:51.784804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:51.786608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:51.787898Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:51.787956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:51.788111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:51.788166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:51.788219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:51.788308Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.794361Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:51.913852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:51.914093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.914277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:51.914492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:51.914546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.916583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:51.916730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:51.916888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.916950Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:51.916987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:51.917017Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:51.918722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.918770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:51.918803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:51.920327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.920368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.920405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:51.920444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:51.929164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:51.934617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:51.934853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:51.935964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:51.936116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:51.936165Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:51.936452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:51.936513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:51.936682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:51.936757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:51.940739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:51.940809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:51.940990Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:51.941065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:51.941509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.941559Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:51.941649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:51.941688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:51.941722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:51.941775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:51.941826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:51.941867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:51.941899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:51.941925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:51.942003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:51.942057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:51.942091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:51.943945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:51.944064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:51.944096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:52.174157Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-03-28T09:25:52.175020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-28T09:25:52.175384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-28T09:25:52.175962Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:25:52.176207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:52.176442Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-28T09:25:52.176649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.176858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:25:52.177367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:52.177556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2025-03-28T09:25:52.178434Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 Forgetting tablet 72075186233409548 2025-03-28T09:25:52.180561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-28T09:25:52.180769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:52.181670Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:52.181817Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-03-28T09:25:52.181922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:52.182063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409551 2025-03-28T09:25:52.183635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:25:52.183765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2025-03-28T09:25:52.184503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:52.184552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:52.184696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409549 2025-03-28T09:25:52.185654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:52.185707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:52.185808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:52.187056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-28T09:25:52.187110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-28T09:25:52.187380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2025-03-28T09:25:52.189821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:52.189867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:52.189939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:52.189978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:52.190109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-28T09:25:52.190144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-28T09:25:52.190249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2025-03-28T09:25:52.192846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:52.192889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:52.193015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:25:52.193082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:25:52.193184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Failed to connect, to tablet: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:25:52.193370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:52.193463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-03-28T09:25:52.193726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:52.193771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-28T09:25:52.193883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:52.193906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:25:52.194350Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:52.194492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:52.194548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:618:2519] 2025-03-28T09:25:52.194731Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:52.194812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:52.194839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:618:2519] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:52.195345Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:52.195606Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 257us result status StatusPathDoesNotExist 2025-03-28T09:25:52.195887Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:52.196305Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:52.196468Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 172us result status StatusSuccess 2025-03-28T09:25:52.196914Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] >> TSchemeShardSubDomainTest::SchemeQuotas >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] |88.3%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:52.036171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:52.036287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:52.036332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:52.036375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:52.036423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:52.036460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:52.036524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:52.036639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:52.036968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:52.119683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:52.119741Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:52.135379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:52.135599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:52.135707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:52.140625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:52.140792Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:52.141270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.141417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:52.142760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.143717Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.143756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.143872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:52.143911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.143941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:52.144001Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.149310Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:52.265508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:52.265746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.265962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:52.266246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:52.266302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.273682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.273836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:52.274040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.274091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:52.274142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:52.274192Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:52.276761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.276832Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:52.276868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:52.278505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.278540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.278567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.278599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.281666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:52.283735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:52.283868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:52.284615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.284737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:52.284782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.285016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:52.285052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.285174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:52.285270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:52.287180Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.287224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.287407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.287466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:52.287851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.287910Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:52.287999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:52.288029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.288063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:52.288091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.288139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:52.288186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.288222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:52.288266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:52.288333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:52.288384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:52.288418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:52.290089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:52.290187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:52.290234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:25:52.709155Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.709204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:52.709404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:25:52.709527Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.709574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:25:52.709618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-28T09:25:52.709935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.710000Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:25:52.710102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:52.710156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:52.710199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:52.710232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:52.710269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:25:52.710310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:52.710349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:25:52.710383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:25:52.710621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-03-28T09:25:52.710665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-03-28T09:25:52.710727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-03-28T09:25:52.710758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-28T09:25:52.711604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:52.712938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:52.712990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:52.713059Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-03-28T09:25:52.713098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:52.713940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:52.714042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:52.714096Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:52.714130Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:25:52.714160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-03-28T09:25:52.714242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-03-28T09:25:52.714289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:903:2736] 2025-03-28T09:25:52.720151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:52.720538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:52.720608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:52.720640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:904:2737] TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:52.721206Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:52.721459Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 241us result status StatusSuccess 2025-03-28T09:25:52.721917Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:52.722456Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:52.722625Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 194us result status StatusSuccess 2025-03-28T09:25:52.722996Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:52.723512Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:52.723688Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 189us result status StatusSuccess 2025-03-28T09:25:52.724062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::TestRequestNoAuthorization ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:51.614304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:51.614419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:51.614459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:51.614505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:51.614544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:51.614572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:51.614625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:51.614838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:51.615170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:51.710273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:51.710335Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:51.728852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:51.729142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:51.729333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:51.735366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:51.735620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:51.736225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:51.736444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:51.738352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:51.739671Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:51.739730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:51.739890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:51.739956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:51.740002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:51.740079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.746634Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:51.893552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:51.893796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.894019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:51.894271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:51.894322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.896597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:51.896757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:51.896960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.897025Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:51.897059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:51.897091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:51.899051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.899104Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:51.899140Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:51.900872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.900912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.900951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:51.900997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:51.904647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:51.906554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:51.906728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:51.907833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:51.907976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:51.908026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:51.908286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:51.908341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:51.908504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:51.908591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:51.910499Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:51.910557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:51.910752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:51.910789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:51.911199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:51.911245Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:51.911334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:51.911365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:51.911412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:51.911452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:51.911503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:51.911543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:51.911580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:51.911607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:51.911674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:51.911727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:51.911758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:51.913395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:51.913499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:51.913532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 52.737936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.738071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:52.738128Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 5000003, at tablet: 72057594046678944 2025-03-28T09:25:52.738230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-03-28T09:25:52.738668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-03-28T09:25:52.738858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409550 TxId: 104 Status: OK 2025-03-28T09:25:52.738939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409550 TxId: 104 Status: OK 2025-03-28T09:25:52.738989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-03-28T09:25:52.739043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-03-28T09:25:52.739363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-03-28T09:25:52.739434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409551 TxId: 104 Status: OK 2025-03-28T09:25:52.739467Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409551 TxId: 104 Status: OK 2025-03-28T09:25:52.739484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-03-28T09:25:52.739519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:25:52.745624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.746079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 FAKE_COORDINATOR: Erasing txId 104 2025-03-28T09:25:52.838643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2025-03-28T09:25:52.838847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-03-28T09:25:52.838925Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2025-03-28T09:25:52.838977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.839014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-03-28T09:25:52.839074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-28T09:25:52.839583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2025-03-28T09:25:52.839733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-03-28T09:25:52.839786Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2025-03-28T09:25:52.839828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.839852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-28T09:25:52.840023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-28T09:25:52.840182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:52.840254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:25:52.844199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.844911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.845280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.845331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.845482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:25:52.845648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.845678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:335:2311], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-03-28T09:25:52.845732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:335:2311], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-28T09:25:52.846246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.846302Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-28T09:25:52.846405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:25:52.846438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:52.846480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:25:52.846510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:52.846550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-28T09:25:52.846618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:25:52.846659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:25:52.846721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:25:52.846898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-03-28T09:25:52.846946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2025-03-28T09:25:52.846981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:25:52.847004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-28T09:25:52.847896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:52.848016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:52.848047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:52.848085Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:25:52.848120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:25:52.848615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:52.848688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:25:52.848708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:25:52.848734Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:25:52.848754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:25:52.848795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-28T09:25:52.858664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:25:52.859230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:52.233063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:52.233182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:52.233225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:52.233272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:52.233324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:52.233352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:52.233410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:52.233492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:52.233868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:52.323271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:52.323323Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:52.345816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:52.346084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:52.346251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:52.351960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:52.352168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:52.352882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.353090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:52.354963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.356285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.356350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.356519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:52.356595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.356640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:52.356721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.363699Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:52.506725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:52.506966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.507199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:52.507445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:52.507518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.515019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.515187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:52.515420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.515485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:52.515539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:52.515574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:52.518182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.518252Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:52.518309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:52.523581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.523644Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.523711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.523767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.527558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:52.532874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:52.533099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:52.534261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.534418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:52.534487Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.534822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:52.534891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.535082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:52.535168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:52.547828Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.547913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.548120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.548164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:52.548601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.548672Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:52.548774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:52.548821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.548888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:52.548926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.548967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:52.549012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.549051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:52.549082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:52.549168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:52.549230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:52.549270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:52.551485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:52.551636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:52.551675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:25:53.021791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000004 2025-03-28T09:25:53.022078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.022186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:53.022230Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-03-28T09:25:53.022600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-28T09:25:53.022670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2025-03-28T09:25:53.022873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:53.022969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-03-28T09:25:53.023026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:25:53.024941Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.024980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.025111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:25:53.025206Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.025233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:25:53.025276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-03-28T09:25:53.025321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.025357Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:25:53.025442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:53.025472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:53.025520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:25:53.025558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:53.025595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:25:53.025636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:25:53.025677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:25:53.025713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:25:53.025921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2025-03-28T09:25:53.025972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:25:53.026029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-28T09:25:53.026071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-28T09:25:53.026878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:53.026965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:53.027000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:53.027047Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-28T09:25:53.027112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:53.028185Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:53.028273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:53.028315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:53.028344Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:25:53.028372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2025-03-28T09:25:53.028448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:25:53.031675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:53.032001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:25:53.032214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:25:53.032242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:25:53.032625Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:25:53.032730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:53.032777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:986:2809] TestWaitNotification: OK eventTxId 102 2025-03-28T09:25:53.033240Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:53.033464Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 255us result status StatusSuccess 2025-03-28T09:25:53.033797Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:53.034308Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:53.034446Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 150us result status StatusSuccess 2025-03-28T09:25:53.034792Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:52.455135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:52.455249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:52.455324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:52.455380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:52.455427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:52.455468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:52.455554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:52.455624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:52.455934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:52.537692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:52.537739Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:52.552743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:52.553006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:52.553162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:52.558859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:52.559069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:52.559690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.559899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:52.562512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.563851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.563912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.564088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:52.564150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.564202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:52.564285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.572039Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:52.693400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:52.693635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.693856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:52.694134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:52.694193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.696320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.696456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:52.696637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.696706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:52.696745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:52.696774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:52.698474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.698528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:52.698559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:52.700217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.700265Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.700304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.700364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.703858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:52.705542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:52.705757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:52.706813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.706936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:52.706976Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.707220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:52.707270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.707433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:52.707542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:52.709334Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.709396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.709581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.709622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:52.710045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.710098Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:52.710179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:52.710216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.710250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:52.710280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.710312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:52.710353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.710386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:52.710415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:52.710476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:52.710535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:52.710567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:52.712376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:52.712478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:52.712512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2025-03-28T09:25:52.965132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:52.965155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:52.965180Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:25:52.965208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:52.966610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:52.966709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:25:52.966748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:25:52.966790Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:52.966822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:52.966889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:25:52.968527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:52.968574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:52.968610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:52.968639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:52.969920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:52.970452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:52.970666Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-03-28T09:25:52.971591Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2025-03-28T09:25:52.971811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.972098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:52.972496Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:52.972656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:52.972868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:52.973264Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 2025-03-28T09:25:52.973401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:52.973532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2025-03-28T09:25:52.974534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:25:52.974673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:25:52.976439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:52.976506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:25:52.976577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:52.978147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:52.978201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:52.978316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:52.978980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:25:52.986116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:52.986180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:52.986519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:52.986554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:52.991362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:52.991422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:52.991594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:25:52.991643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:25:52.992249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:52.992376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:52.992420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:52.992490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:52.992706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:52.995537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:25:52.995833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:25:52.995887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:25:52.996350Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:25:52.996445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:52.996477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:535:2489] TestWaitNotification: OK eventTxId 102 2025-03-28T09:25:53.013824Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:53.014015Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 220us result status StatusPathDoesNotExist 2025-03-28T09:25:53.014165Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:53.015055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:53.015227Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 146us result status StatusPathDoesNotExist 2025-03-28T09:25:53.015369Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:52.618995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:52.619105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:52.619155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:52.619201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:52.619245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:52.619271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:52.619335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:52.619413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:52.619730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:52.702625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:52.702675Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:52.716866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:52.717198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:52.717371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:52.723015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:52.723229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:52.723894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.724128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:52.727473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.728753Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.728822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.728983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:52.729039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.729080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:52.729148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.735014Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:52.870415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:52.870643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.870895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:52.871158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:52.871208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.874983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.875150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:52.875367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.875419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:52.875459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:52.875506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:52.877465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.877523Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:52.877557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:52.879140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.879185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.879227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.879271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.883052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:52.884845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:52.885090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:52.886225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.886342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:52.886393Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.886653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:52.886722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.886884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:52.886991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:52.890847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.890916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.891087Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.891143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:52.891628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.891681Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:52.891768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:52.891801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.891837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:52.891865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.891905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:52.891941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.891974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:52.891999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:52.892060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:52.892129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:52.892173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:52.893865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:52.893971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:52.894005Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Seconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:53.046343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.046384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:53.046419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:53.046454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:53.046487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:53.046565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.046838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:53.047139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:53.062968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:53.064229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:53.064396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:53.064509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:53.064537Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:53.064776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:53.065433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:53.065548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:53.065619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.065708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.065899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:25:53.066185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.066267Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:25:53.066461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.066558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.066708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:53.066752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:53.066783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:53.066805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:53.066900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.067044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.067249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-03-28T09:25:53.067576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.067688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.068050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.068150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.068363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.068440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.068554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.068726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.068797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.068943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.069197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.069345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.069389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.069433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.078653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.078754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.079320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:53.079397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.079435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:53.082203Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2025-03-28T09:25:53.134841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:53.134920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:458:2408] sender: [1:520:2058] recipient: [1:15:2062] 2025-03-28T09:25:53.135640Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:53.135745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:53.135800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:518:2455] TestWaitNotification: OK eventTxId 100 2025-03-28T09:25:53.136269Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:53.136470Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 219us result status StatusSuccess 2025-03-28T09:25:53.136877Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:53.137323Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:53.137465Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 146us result status StatusSuccess 2025-03-28T09:25:53.137759Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TopicService::DifferentConsumers_TheRangesOverlap [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:53.361577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:53.361667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.361700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:53.361736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:53.361774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:53.361811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:53.361869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.361945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:53.362249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:53.445167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:53.445226Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:53.461325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:53.461635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:53.461812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:53.471618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:53.471897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:53.472597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.472824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:53.475101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.476449Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.476514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.476704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:53.476780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.476831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:53.476942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.484947Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:53.614018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:53.614257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.614455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:53.614665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:53.614740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.617080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.617229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:53.617427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.617478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:53.617524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:53.617557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:53.619630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.619692Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:53.619942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:53.621686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.621743Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.621781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.621829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.625633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:53.627620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:53.627799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:53.629004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.629154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:53.629208Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.629512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:53.629575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.629744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:53.629827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:53.636187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.636288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.636490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.636542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:53.636983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.637042Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:53.637146Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:53.637181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.637245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:53.637279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.637338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:53.637383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.637422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:53.637451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:53.637533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:53.637598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:53.637632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:53.639658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:53.639821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:53.639861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-28T09:25:53.868357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-28T09:25:53.869007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.869131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:53.869181Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:53.869489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-28T09:25:53.869541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2025-03-28T09:25:53.869716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:53.869782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:53.869845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:25:53.872617Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.872679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.872856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:53.872985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.873020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:335:2311], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-28T09:25:53.873054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:335:2311], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-28T09:25:53.873352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.873387Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-28T09:25:53.873477Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:53.873509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:53.873572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:25:53.873608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:53.873640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-28T09:25:53.873693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:25:53.873739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-28T09:25:53.873773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-28T09:25:53.873992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:53.874034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-28T09:25:53.874068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:25:53.874099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:25:53.876148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:53.876285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:53.876341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:53.876382Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:25:53.876422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:53.877272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:53.877358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:25:53.877388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:25:53.877425Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:53.877462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:53.877526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-28T09:25:53.882392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:25:53.882546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-28T09:25:53.882872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:53.882915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-28T09:25:53.883368Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:53.883490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:53.883560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:481:2429] TestWaitNotification: OK eventTxId 100 2025-03-28T09:25:53.884127Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:53.884331Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 228us result status StatusSuccess 2025-03-28T09:25:53.884705Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:53.885648Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:53.885810Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 213us result status StatusSuccess 2025-03-28T09:25:53.886148Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::DeclareAndDelete >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:53.689625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:53.689735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.689783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:53.689827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:53.689879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:53.689910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:53.689960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.690026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:53.690368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:53.773719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:53.773772Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:53.790894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:53.791247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:53.791423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:53.798016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:53.798265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:53.799024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.799261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:53.802589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.803913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.803988Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.804155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:53.804204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.804248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:53.804323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.811525Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:53.951619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:53.951903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.952150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:53.952448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:53.952517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.959645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.959818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:53.960049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.960107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:53.960147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:53.960193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:53.962595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.962662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:53.962746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:53.964696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.964748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.964787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.964836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.968749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:53.970758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:53.970968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:53.972172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.972321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:53.972382Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.972670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:53.972752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.972923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:53.973005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:53.976125Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.976176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.976368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.976413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:53.976750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.976786Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:53.976865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:53.976893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.976920Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:53.976944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.976973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:53.977006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.977032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:53.977055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:53.977120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:53.977163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:53.977192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:53.978457Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:53.978561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:53.978589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :54.029978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:54.034695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.034772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.034828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:25:54.034880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-28T09:25:54.035102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:54.039859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-28T09:25:54.040038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-28T09:25:54.040427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:54.040547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:54.040601Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:25:54.040881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:25:54.040940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:25:54.041124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:54.041193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:54.041256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:25:54.043779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:54.043816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:54.043948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:54.044054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:54.044092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:25:54.044138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:25:54.044196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.044238Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:25:54.044346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:54.044381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:54.044426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:54.044462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:54.044521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:25:54.044567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:54.044625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:25:54.044660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:25:54.044743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:54.044798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:25:54.044838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:25:54.044868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:25:54.046154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:54.046256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:54.046310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:54.046350Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:25:54.046391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:54.047578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:54.047657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:54.047686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:54.047713Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:54.047743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:54.047830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:25:54.050324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:54.051216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-28T09:25:54.054115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:54.054320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.054497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2025-03-28T09:25:54.058549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:54.058798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-03-28T09:25:54.059175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:54.059239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-28T09:25:54.059358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:25:54.059401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:25:54.059823Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:54.059959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:54.060010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:313:2304] 2025-03-28T09:25:54.060166Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:25:54.060255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:54.060291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:313:2304] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:53.136730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:53.136853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.136894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:53.136958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:53.137003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:53.137030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:53.137104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.137186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:53.137535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:53.221550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:53.221606Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:53.235306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:53.235573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:53.235745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:53.241930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:53.242115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:53.242592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.242804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:53.244722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.245831Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.245886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.246004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:53.246044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.246074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:53.246132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.252392Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:53.376122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:53.376389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.376608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:53.376867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:53.376923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.379437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.379598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:53.379813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.379865Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:53.379900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:53.379942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:53.382186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.382248Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:53.382284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:53.384262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.384323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.384365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.384409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.388331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:53.390331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:53.390508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:53.391710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.391858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:53.391925Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.392213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:53.392270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.392435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:53.392510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:53.394551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.394608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.394810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.394848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:53.395279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.395321Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:53.395406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:53.395435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.395467Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:53.395511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.395545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:53.395584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.395619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:53.395655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:53.395720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:53.395773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:53.395810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:53.397593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:53.397704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:53.397734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ablet 72075186233409569 Forgetting tablet 72075186233409568 Forgetting tablet 72075186233409573 Forgetting tablet 72075186233409577 Forgetting tablet 72075186233409546 Forgetting tablet 72075186233409581 2025-03-28T09:25:54.347422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:9 2025-03-28T09:25:54.347473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2025-03-28T09:25:54.348230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:13 2025-03-28T09:25:54.348266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2025-03-28T09:25:54.348327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:17 2025-03-28T09:25:54.348347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2025-03-28T09:25:54.351429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:21 2025-03-28T09:25:54.351469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2025-03-28T09:25:54.351667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:26 2025-03-28T09:25:54.351692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2025-03-28T09:25:54.352024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:30 2025-03-28T09:25:54.352052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2025-03-28T09:25:54.352149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:34 2025-03-28T09:25:54.352170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2025-03-28T09:25:54.352973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:54.353006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:54.354525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:38 2025-03-28T09:25:54.354562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2025-03-28T09:25:54.354662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:8 2025-03-28T09:25:54.354702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:8 tabletId 72075186233409553 2025-03-28T09:25:54.354761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:12 2025-03-28T09:25:54.354783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:12 tabletId 72075186233409557 2025-03-28T09:25:54.354846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-28T09:25:54.354865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2025-03-28T09:25:54.354910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-03-28T09:25:54.354935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409561 2025-03-28T09:25:54.355036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:25 2025-03-28T09:25:54.355058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:25 tabletId 72075186233409570 2025-03-28T09:25:54.355128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:20 2025-03-28T09:25:54.355155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:20 tabletId 72075186233409565 2025-03-28T09:25:54.355219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:29 2025-03-28T09:25:54.355239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:29 tabletId 72075186233409574 2025-03-28T09:25:54.355285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:33 2025-03-28T09:25:54.355303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:33 tabletId 72075186233409578 2025-03-28T09:25:54.355340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:54.355379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:54.358959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:37 2025-03-28T09:25:54.359003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:37 tabletId 72075186233409582 2025-03-28T09:25:54.359073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:42 2025-03-28T09:25:54.359093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:42 tabletId 72075186233409587 2025-03-28T09:25:54.359145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-28T09:25:54.359164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-28T09:25:54.359204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:11 2025-03-28T09:25:54.359225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2025-03-28T09:25:54.359319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-03-28T09:25:54.359344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2025-03-28T09:25:54.359432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:19 2025-03-28T09:25:54.359453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2025-03-28T09:25:54.359521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-03-28T09:25:54.359553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-03-28T09:25:54.359627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-03-28T09:25:54.359648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-03-28T09:25:54.362879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:28 2025-03-28T09:25:54.362923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2025-03-28T09:25:54.363028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:32 2025-03-28T09:25:54.363052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2025-03-28T09:25:54.363160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:54.363187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:54.363263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:36 2025-03-28T09:25:54.363304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2025-03-28T09:25:54.363491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:54.363595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:54.363679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:54.363723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:54.363806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:54.366145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:25:54.366436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:25:54.366477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:25:54.366974Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:25:54.367072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:25:54.367111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:2054:3654] TestWaitNotification: OK eventTxId 103 2025-03-28T09:25:54.367621Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:54.367817Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 229us result status StatusPathDoesNotExist 2025-03-28T09:25:54.367999Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:54.368505Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:54.368647Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 135us result status StatusPathDoesNotExist 2025-03-28T09:25:54.368765Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPersQueueTest::WriteExistingBigValue [GOOD] >> TPersQueueTest::WriteEmptyData >> TStoragePoolsQuotasTest::DifferentQuotasInteraction >> TSchemeShardSubDomainTest::CopyRejects >> TSchemeShardSubDomainTest::ForceDropTwice >> TSchemeShardSubDomainTest::Restart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:41.220540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:41.220689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.220742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:41.220804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:41.221688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:41.221746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:41.221828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:41.221912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:41.223079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:41.312990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:41.313044Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:41.327534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:41.327752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:41.327898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:41.333147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:41.333378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:41.333930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.334095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.335807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.337883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:41.337931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.338013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:41.338085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.343941Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:41.456495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:41.456780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.457040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:41.457303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:41.457362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.459884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.460014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:41.460206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.460350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:41.460385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:41.460418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:41.462400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.462460Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:41.462499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:41.467529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.467580Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.467633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.467676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.471351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:41.473299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:41.473512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:41.474558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:41.474709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:41.474759Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.475077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:41.475132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:41.475339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:41.475410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:41.477338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:41.477396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:41.477577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:41.477620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:41.478046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:41.478087Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:41.478169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.478205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.478239Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:41.478266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.478299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:41.478338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:41.478402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:41.478442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:41.478506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:41.478554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:41.478589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:41.483631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.483756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:41.483790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:25:54.594648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-03-28T09:25:54.594840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969694 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:25:54.594925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-03-28T09:25:54.595577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.595648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-28T09:25:54.595698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:25:54.595747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-03-28T09:25:54.595847Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-28T09:25:54.595982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-03-28T09:25:54.596107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:54.596173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:54.599713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.601998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.602373Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:54.602424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:54.602623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:25:54.602894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:54.602949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:25:54.602999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-28T09:25:54.603637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.603698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:25:54.603810Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.603851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:25:54.603895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-03-28T09:25:54.604902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:54.605022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:54.605062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:25:54.605104Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-03-28T09:25:54.605147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:54.606641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:54.606778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:54.606810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:25:54.606842Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:25:54.606875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:25:54.606955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-28T09:25:54.611255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.611327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:54.611739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:54.611952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:25:54.611997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:54.612039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:25:54.612074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:54.612116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:25:54.612195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:403:2369] message: TxId: 103 2025-03-28T09:25:54.612247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:54.612288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:25:54.612325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:25:54.612426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:54.613036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:54.613079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:54.613630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:54.615457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:54.619816Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:54.619877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-03-28T09:25:54.620210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:25:54.620256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1341:3266] 2025-03-28T09:25:54.620971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-03-28T09:25:54.625491Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:54.625747Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 297us result status StatusSuccess 2025-03-28T09:25:54.626218Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> TopicService::UnknownConsumer >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> IndexBuildTest::RejectsCancel [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:55.321121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:55.321253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:55.321295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:55.321355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:55.321408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:55.321436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:55.321511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:55.321593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:55.321943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:55.410331Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:55.410390Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:55.425947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:55.426247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:55.426443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:55.432856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:55.433099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:55.433833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:55.434065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:55.436209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:55.437609Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:55.437678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:55.437862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:55.437928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:55.437984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:55.438077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.445694Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:55.570558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:55.570833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.571046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:55.571327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:55.571388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.573519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:55.573654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:55.573827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.573888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:55.573922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:55.573958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:55.575771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.575830Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:55.575868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:55.577400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.577442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.577489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:55.577539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:55.585724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:55.587333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:55.587492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:55.588314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:55.588430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:55.588478Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:55.588688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:55.588734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:55.588901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:55.589003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:55.590960Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:55.591023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:55.591208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:55.591248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:55.591688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.591740Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:55.591837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:55.591864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:55.591907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:55.591937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:55.591972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:55.592010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:55.592045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:55.592074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:55.592138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:55.592204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:55.592240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:55.593978Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:55.594091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:55.594129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:55.659322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:55.659356Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:55.659381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:55.659447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-28T09:25:55.666237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-28T09:25:55.666412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-03-28T09:25:55.667895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:55.668025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:55.668095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropSubdomain TPropose operationId# 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:25:55.668163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:55.668201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:55.668342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-03-28T09:25:55.668484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:55.668572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:55.669127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:55.669486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:25:55.673691Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:55.673728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:55.673842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:55.673968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:55.674016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:25:55.674062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:25:55.674384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.674436Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-03-28T09:25:55.674493Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:55.674530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:55.674587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:55.674633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:55.674673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:25:55.674739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:55.674779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:25:55.674828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:25:55.674911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:55.674955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:25:55.675007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:25:55.675044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:25:55.675780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:55.675866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:55.675898Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:55.675930Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:25:55.675976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:55.676469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:55.676546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:55.676569Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:55.676589Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:55.676606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:55.676655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:25:55.677950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:55.678022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:55.678095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:55.678417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:55.678452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:55.678507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:55.680392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:55.682903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:55.682996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:55.683075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:25:55.683342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:55.683384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:25:55.683925Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:55.684015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:55.684052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:340:2331] TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:55.684591Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:55.684798Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 224us result status StatusPathDoesNotExist 2025-03-28T09:25:55.685005Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas >> DemoTx::Scenario_3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsCancel [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.755826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.755942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.756013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.756051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.756093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.756119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.756188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.756259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.756566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.844383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.844439Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.863455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.863755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.863970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.870260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.870473Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.871239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.871465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.875670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885434Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885529Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.885779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.885847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.885935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.893126Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:21.056706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:21.056971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.057199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:21.057433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:21.057498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.059653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.059780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:21.059953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.060014Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:21.060062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:21.060099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:21.062003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.062074Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.062116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:21.063763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.063811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.063866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.063910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.067612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.071388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:21.071597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:21.072844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.072971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.073029Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.073321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:21.073376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.073537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:21.073618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.075572Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.075648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.075815Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.075863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:21.076275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.076322Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:21.076414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.076452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.076492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.076527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.076563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:21.076601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.076648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:21.076677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:21.076770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:21.076818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:21.076924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:21.078793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.078908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.078951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1172:3025], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:25:55.871987Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:25:55.873712Z node 2 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:25:55.873793Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1172:3025], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 101, upload bytes: 1818, read rows: 101, read bytes: 1818 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:25:55.873826Z node 2 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-28T09:25:55.873986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:55.874028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1264:3106] TestWaitNotification: OK eventTxId 102 2025-03-28T09:25:55.876404Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: DoExecute TxId: 105 DatabaseName: "/MyRoot" IndexBuildId: 102 2025-03-28T09:25:55.876536Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CANCEL_INDEX_BUILD: Reply TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } BUILDINDEX RESPONSE CANCEL: NKikimrIndexBuilder.TEvCancelResponse TxId: 105 Status: PRECONDITION_FAILED Issues { message: "Index build process with id <102> has been finished already" severity: 1 } 2025-03-28T09:25:55.878677Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2025-03-28T09:25:55.878904Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_DONE Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_batch_rows: 2 max_batch_bytes: 8388608 max_shards_in_flight: 2 max_retries_upload_batch: 50 } Progress: 100 } 2025-03-28T09:25:55.881047Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:55.881270Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table" took 248us result status StatusSuccess 2025-03-28T09:25:55.881679Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:55.883938Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:25:55.884179Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 263us result status StatusSuccess 2025-03-28T09:25:55.884896Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/index1" PathDescription { Self { Name: "index1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "index1" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "index" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "index" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:55.257543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:55.257654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:55.257690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:55.257735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:55.257779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:55.257803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:55.257860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:55.257931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:55.258259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:55.339157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:55.339211Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:55.353959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:55.354236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:55.354398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:55.360674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:55.360899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:55.361546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:55.361754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:55.364236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:55.365595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:55.365657Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:55.365838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:55.365900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:55.365945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:55.366033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.372860Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:55.507058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:55.507296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.507536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:55.507783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:55.507833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.511696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:55.511833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:55.512047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.512100Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:55.512135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:55.512169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:55.514611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.514662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:55.514714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:55.516507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.516612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.516649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:55.516689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:55.525874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:55.528107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:55.528286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:55.529353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:55.529481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:55.529555Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:55.529814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:55.529866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:55.530025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:55.530113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:55.532374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:55.532443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:55.532606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:55.532664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:55.533049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:55.533098Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:55.533187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:55.533218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:55.533277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:55.533307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:55.533339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:55.533374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:55.533404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:55.533450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:55.533533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:55.533582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:55.533617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:55.541452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:55.541623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:55.541659Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rd: 72057594046678944, cookie: 103 2025-03-28T09:25:56.063309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.063416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.063486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.063527Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:25:56.063581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:25:56.063609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:56.063633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:25:56.063651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:56.063673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:25:56.063708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:482:2437] message: TxId: 103 2025-03-28T09:25:56.063733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:56.063754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:25:56.063770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:25:56.063843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:25:56.065175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:25:56.065206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:483:2438] TestWaitNotification: OK eventTxId 103 2025-03-28T09:25:56.065530Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:56.065660Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 140us result status StatusSuccess 2025-03-28T09:25:56.066238Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:56.066729Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:56.066898Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 179us result status StatusSuccess 2025-03-28T09:25:56.067162Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:56.067543Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:56.067679Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 123us result status StatusSuccess 2025-03-28T09:25:56.067967Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:56.068287Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:56.068404Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 112us result status StatusSuccess 2025-03-28T09:25:56.068700Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 200 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] >> TSchemeShardSubDomainTest::Restart [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:53.822375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:53.822492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.822531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:53.822579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:53.822623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:53.822654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:53.822735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.822810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:53.823184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:53.897202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:53.897253Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:53.911980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:53.912230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:53.912368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:53.923360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:53.923619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:53.924219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.924439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:53.927411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.928728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.928787Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.928952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:53.929015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.929073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:53.929153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.935970Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:54.081275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:54.081503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.081738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:54.082009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:54.082061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.087189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:54.087334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:54.087538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.087587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:54.087623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:54.087656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:54.090617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.090671Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:54.090722Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:54.095699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.095756Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.095783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:54.095830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:54.103376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:54.107890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:54.108033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:54.108990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:54.109151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:54.109201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:54.109456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:54.109511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:54.109670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:54.109775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:54.115967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:54.116035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:54.116238Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:54.116284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:54.116682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:54.116737Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:54.116845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:54.116876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:54.116911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:54.116938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:54.116971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:54.117010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:54.117045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:54.117076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:54.117144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:54.117205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:54.117237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:54.119069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:54.119184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:54.119220Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... bletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:56.231205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.231296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/USER_0/Table11, opId: 137:0, schema: Name: "Table11" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key", at schemeshard: 72057594046678944 2025-03-28T09:25:56.231659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_0, child name: Table11, child id: [OwnerId: 72057594046678944, LocalPathId: 10], at schemeshard: 72057594046678944 2025-03-28T09:25:56.231706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2025-03-28T09:25:56.231738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 1 2025-03-28T09:25:56.231818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 2 2025-03-28T09:25:56.231865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 1 -> 2 2025-03-28T09:25:56.232370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 137:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:56.232425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 137:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.232518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 11 2025-03-28T09:25:56.232569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 3 2025-03-28T09:25:56.235944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 10, at schemeshard: 72057594046678944 2025-03-28T09:25:56.236146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2025-03-28T09:25:56.236381Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.236442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:56.236641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2025-03-28T09:25:56.236756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.236797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1018:2879], at schemeshard: 72057594046678944, txId: 137, path id: 2 2025-03-28T09:25:56.236840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:1018:2879], at schemeshard: 72057594046678944, txId: 137, path id: 10 2025-03-28T09:25:56.237285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.237342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2025-03-28T09:25:56.237565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-03-28T09:25:56.238662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-03-28T09:25:56.238811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18 PathOwnerId: 72057594046678944, cookie: 137 2025-03-28T09:25:56.238854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-03-28T09:25:56.238890Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18 2025-03-28T09:25:56.238925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 12 2025-03-28T09:25:56.248274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-03-28T09:25:56.248408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 10 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2025-03-28T09:25:56.248443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2025-03-28T09:25:56.248481Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 10], version: 1 2025-03-28T09:25:56.248518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 4 2025-03-28T09:25:56.248625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2025-03-28T09:25:56.249978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2025-03-28T09:25:56.250124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2025-03-28T09:25:56.250165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-03-28T09:25:56.251230Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 2 } ObjectId: 10 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 2 } 2025-03-28T09:25:56.251534Z node 1 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2025-03-28T09:25:56.251671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-03-28T09:25:56.251714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2025-03-28T09:25:56.251873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-03-28T09:25:56.251934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-03-28T09:25:56.252002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2025-03-28T09:25:56.252099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 137:0 2 -> 3 2025-03-28T09:25:56.253620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-03-28T09:25:56.254923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2025-03-28T09:25:56.257098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.257288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.257338Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2025-03-28T09:25:56.257411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2025-03-28T09:25:56.257747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 968 RawX2: 4294970136 } TxBody: "\n\236\004\n\007Table11\020\n\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020\n:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 2 2025-03-28T09:25:56.261150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2025-03-28T09:25:56.261269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:56.134720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:56.134832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:56.134878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:56.134925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:56.134981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:56.135017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:56.135095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:56.135180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:56.135566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:56.213573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:56.213620Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:56.227457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:56.227724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:56.227898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:56.233433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:56.233640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:56.234305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.234503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:56.236343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.237637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.237698Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.237856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:56.237917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.237963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:56.238047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.245297Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:56.380354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:56.380590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.380828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:56.381082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:56.381140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.383629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.383772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:56.383965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.384088Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:56.384135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:56.384171Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:56.391557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.391633Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:56.391678Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:56.393590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.393641Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.393685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.393756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.403785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:56.406792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:56.407038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:56.408316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.408463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:56.408517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.408801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:56.408861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.409047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:56.409124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:56.411224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.411295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.411467Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.411541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:56.411929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.411982Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:56.412098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:56.412134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.412173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:56.412214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.412259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:56.412301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.412337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:56.412367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:56.412431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:56.412483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:56.412517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:56.414404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:56.414527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:56.414564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8944 2025-03-28T09:25:56.686131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:6 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:56.686173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:56.686209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:56.687676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:56.688924Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 Forgetting tablet 72075186233409550 2025-03-28T09:25:56.690143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-28T09:25:56.690498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-28T09:25:56.691248Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:25:56.691806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.692020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:25:56.692478Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2025-03-28T09:25:56.693592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:56.694013Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186233409551 2025-03-28T09:25:56.694213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:56.694412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:56.695299Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:56.695954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-28T09:25:56.696160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2025-03-28T09:25:56.698274Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 2025-03-28T09:25:56.699319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:56.699504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2025-03-28T09:25:56.700572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:25:56.700771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:56.701711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:56.701781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:56.701910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:56.703084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-28T09:25:56.703154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-28T09:25:56.703298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:56.703340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:56.703480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:56.706526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:56.706573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:56.706654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:56.706676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:56.706768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-28T09:25:56.706789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2025-03-28T09:25:56.709406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:56.709472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:56.709554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:25:56.709590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:25:56.709821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:56.709906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2025-03-28T09:25:56.710202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:25:56.710259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2025-03-28T09:25:56.710387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:25:56.710409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:25:56.710910Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:25:56.711021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:56.711059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:670:2570] 2025-03-28T09:25:56.711259Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:25:56.711333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:25:56.711362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:670:2570] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-03-28T09:25:56.711862Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:56.712043Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 215us result status StatusPathDoesNotExist 2025-03-28T09:25:56.712217Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:56.712730Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:56.712924Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 150us result status StatusSuccess 2025-03-28T09:25:56.713292Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:56.169939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:56.170049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:56.170082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:56.170127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:56.170170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:56.170202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:56.170252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:56.170317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:56.170624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:56.255340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:56.255396Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:56.273737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:56.274022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:56.274209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:56.280596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:56.280803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:56.281508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.281713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:56.284178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.285415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.285471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.285626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:56.285682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.285722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:56.285800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.293286Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:56.428730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:56.428956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.429173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:56.429401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:56.429450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.434936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.435079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:56.435288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.435343Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:56.435376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:56.435406Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:56.438919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.438998Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:56.439042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:56.441496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.441582Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.441662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.441730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.445796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:56.447647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:56.447815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:56.448876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.449039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:56.449089Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.449378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:56.449444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.449619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:56.449714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:56.451773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.451864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.452114Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.452156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:56.452539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.452603Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:56.452694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:56.452725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.452762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:56.452800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.452837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:56.452879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.452916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:56.452947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:56.453012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:56.453073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:56.453109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:56.454973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:56.455082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:56.455118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4046678944 2025-03-28T09:25:56.577613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:459:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:462:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:463:2058] recipient: [1:461:2413] Leader for TabletID 72057594046678944 is [1:464:2414] sender: [1:465:2058] recipient: [1:461:2413] 2025-03-28T09:25:56.617748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:56.617859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:56.617904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:56.617946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:56.617983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:56.618016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:56.618090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:56.618202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:56.618563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:56.637377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:56.638723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:56.638894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:56.639016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:56.639060Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:56.639327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:56.640179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:56.640271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:56.640356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.640458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.640737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:25:56.641046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.641132Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:25:56.641352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.641448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.641579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:56.641638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:56.641696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:56.641720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:56.641822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.641931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.642148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-03-28T09:25:56.642507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.642720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.643176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.643268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.643489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.643615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.643737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.643951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.644054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.644237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.644483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.644699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.644773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.644829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.651860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.651940Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.652212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:56.652270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.652316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:56.654983Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:464:2414] sender: [1:524:2058] recipient: [1:15:2062] 2025-03-28T09:25:56.708243Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:56.708472Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 243us result status StatusSuccess 2025-03-28T09:25:56.708843Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:56.709386Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:56.709537Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 164us result status StatusSuccess 2025-03-28T09:25:56.709915Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TestYmqHttpProxy::BillingRecordsForJsonApi >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:56.210938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:56.211062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:56.211102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:56.211151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:56.211195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:56.211225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:56.211282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:56.211357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:56.211720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:56.297260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:56.297316Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:56.311570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:56.311811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:56.311983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:56.317309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:56.317530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:56.318203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.318416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:56.320197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.321487Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.321547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.321713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:56.321788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.321837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:56.321930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.328774Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:56.459175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:56.459416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.459647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:56.459943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:56.460004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.462230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.462364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:56.462536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.462587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:56.462624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:56.462657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:56.464465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.464525Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:56.464558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:56.466281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.466321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.466362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.466405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.475959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:56.479622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:56.479849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:56.480908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.481046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:56.481095Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.481381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:56.481437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.481622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:56.481690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:56.483773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.483852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.484031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.484072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:56.484475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.484529Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:56.484620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:56.484663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.484710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:56.484742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.484778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:56.484818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.484852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:56.484880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:56.484951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:56.485005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:56.485040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:56.486887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:56.486999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:56.487036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 28T09:25:57.022715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:25:57.022745Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:25:57.022774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:25:57.024279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:25:57.024370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:25:57.024398Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:25:57.024427Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:57.024476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:57.024550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-03-28T09:25:57.026371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:5 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:57.026433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:57.026457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:57.026494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:57.026528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:57.027857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:25:57.030565Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186233409550 2025-03-28T09:25:57.030889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-28T09:25:57.031292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186233409550 2025-03-28T09:25:57.032775Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2025-03-28T09:25:57.034391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.034630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:57.035935Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 2025-03-28T09:25:57.036097Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:57.036926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:25:57.037111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:57.037758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:57.037886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:57.038555Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2025-03-28T09:25:57.043744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:25:57.043957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409549 2025-03-28T09:25:57.044518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:25:57.045053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:57.045106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:25:57.045178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:57.045548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:25:57.045692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:57.045740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:57.045837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:57.047809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-28T09:25:57.047862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2025-03-28T09:25:57.047967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:57.047999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:57.048056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:25:57.048082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:25:57.050369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:57.050405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:57.050457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:25:57.050503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:25:57.050664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:57.050791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:57.050872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:57.050906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:57.050964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:57.052462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:25:57.052713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-28T09:25:57.052747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-28T09:25:57.053256Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:25:57.053349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:25:57.053403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:893:2792] TestWaitNotification: OK eventTxId 106 2025-03-28T09:25:57.053951Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:57.054104Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 181us result status StatusSuccess 2025-03-28T09:25:57.054404Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:53.316487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:53.316601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.316643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:53.316693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:53.316734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:53.316763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:53.316816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:53.316886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:53.317211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:53.403701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:53.403771Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:53.421438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:53.421692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:53.421866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:53.427760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:53.427998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:53.428453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.428620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:53.430090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.431342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.431402Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.431568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:53.431617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.431675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:53.431751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.438136Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:53.576108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:53.576342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.576543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:53.576805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:53.576858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.578900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.579047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:53.579234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.579284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:53.579334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:53.579364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:53.581094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.581146Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:53.581176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:53.582582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.582621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.582656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.582735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.591575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:53.593491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:53.593642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:53.594606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:53.594752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:53.594798Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.595111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:53.595168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:53.595335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:53.595425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:53.597429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:53.597485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:53.597662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:53.597699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:53.598118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:53.598156Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:53.598223Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:53.598254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.598287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:53.598344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.598382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:53.598417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:53.598447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:53.598473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:53.598542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:53.598593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:53.598628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:53.600279Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:53.600396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:53.600421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 139 2025-03-28T09:25:56.940694Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 16], version: 18446744073709551615 2025-03-28T09:25:56.940724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 4 2025-03-28T09:25:56.941912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-03-28T09:25:56.941975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 139 2025-03-28T09:25:56.941994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 139 2025-03-28T09:25:56.942015Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 139, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:56.942034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:56.942091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 139, subscribers: 0 2025-03-28T09:25:56.943167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:56.943208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:15 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:56.943227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:14 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:56.943243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:56.943260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:16 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:25:56.944486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-03-28T09:25:56.945009Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:25:56.946289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.946521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-03-28T09:25:56.947171Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 15 TabletID: 72075186233409556 2025-03-28T09:25:56.947336Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 14 TabletID: 72075186233409555 2025-03-28T09:25:56.948771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 15 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2025-03-28T09:25:56.949010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 Forgetting tablet 72075186233409556 2025-03-28T09:25:56.950379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 14 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2025-03-28T09:25:56.950566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 Forgetting tablet 72075186233409555 2025-03-28T09:25:56.951391Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:25:56.951674Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 16 TabletID: 72075186233409557 Forgetting tablet 72075186233409547 2025-03-28T09:25:56.954800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 Forgetting tablet 72075186233409557 2025-03-28T09:25:56.955305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:25:56.955522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:56.955892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 16 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2025-03-28T09:25:56.956070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2025-03-28T09:25:56.956402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:56.956459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2025-03-28T09:25:56.956522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:56.956706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2025-03-28T09:25:56.956794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:56.956823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:56.956920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:56.959386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:25:56.959440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:25:56.959564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:15 2025-03-28T09:25:56.959583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2025-03-28T09:25:56.959635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:14 2025-03-28T09:25:56.959651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2025-03-28T09:25:56.961846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:25:56.961881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:25:56.961936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:16 2025-03-28T09:25:56.961969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2025-03-28T09:25:56.962186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:56.962293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:56.962342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:56.962374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:56.962438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:56.963905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2025-03-28T09:25:56.964615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2025-03-28T09:25:56.964663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2025-03-28T09:25:56.965455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2025-03-28T09:25:56.965532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2025-03-28T09:25:56.965567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2288:4061] TestWaitNotification: OK eventTxId 139 2025-03-28T09:25:56.966812Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:56.966997Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 184us result status StatusSuccess 2025-03-28T09:25:56.967295Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:57.031106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:57.031225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:57.031268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:57.031314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:57.031358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:57.031393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:57.031481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:57.031582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:57.031894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:57.112664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:57.112721Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:57.128697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:57.128978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:57.129213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:57.137146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:57.137387Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:57.138006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.138224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:57.141363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.142680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:57.142777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.142958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:57.143027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:57.143071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:57.143154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.150119Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:57.265707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:57.265941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.266177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:57.266432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:57.266481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.269219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.269372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:57.269571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.269621Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:57.269692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:57.269723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:57.271772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.271847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:57.271881Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:57.273520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.273559Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.273594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:57.273637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.277226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:57.278943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:57.279103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:57.280125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.280245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:57.280290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:57.280549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:57.280603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:57.280763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:57.280831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:57.282859Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:57.282926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:57.283094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.283131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:57.283567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.283615Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:57.283706Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:57.283735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.283771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:57.283799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.283834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:57.283885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.283929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:57.283959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:57.284019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:57.284067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:57.284115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:57.285810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:57.285911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:57.285942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:25:57.352316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:57.352357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:57.352476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:25:57.352538Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.352557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:25:57.352579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-28T09:25:57.352848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.352881Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:25:57.352964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:57.352994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:57.353020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:57.353042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:57.353081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:25:57.353124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:57.353153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:25:57.353183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:25:57.353243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:57.353276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:25:57.353300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-28T09:25:57.353324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-28T09:25:57.353793Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:57.353867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:57.353895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:57.353924Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:25:57.353958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:57.354549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:57.354594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:57.354619Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:57.354647Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:25:57.354676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:25:57.354753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:25:57.357055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:57.357737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:25:57.357907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:57.357945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:25:57.358256Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:57.358351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:57.358385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:337:2328] TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:57.358831Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:57.359020Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 207us result status StatusSuccess 2025-03-28T09:25:57.359401Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:57.359853Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:57.359988Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 156us result status StatusSuccess 2025-03-28T09:25:57.360260Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:57.360628Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/MyDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:57.360788Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/MyDir" took 138us result status StatusSuccess 2025-03-28T09:25:57.361047Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/MyDir" PathDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true >> TestKinesisHttpProxy::ListShards [GOOD] >> DemoTx::Scenario_4 >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] >> TestYmqHttpProxy::TestTagQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:57.564910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:57.565060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:57.565105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:57.565153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:57.565211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:57.565235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:57.565318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:57.565400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:57.565750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:57.647501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:57.647582Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:57.666551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:57.666887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:57.667068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:57.677727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:57.677994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:57.678664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.678920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:57.681152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.682451Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:57.682514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.682709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:57.682772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:57.682835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:57.682916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.692527Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:57.828126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:57.828317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.828481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:57.828690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:57.828726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.831391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.831547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:57.831714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.831782Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:57.831822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:57.831873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:57.833702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.833792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:57.833839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:57.835441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.835482Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.835538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:57.835580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.839238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:57.841468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:57.841641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:57.842710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.842828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:57.842875Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:57.843123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:57.843178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:57.843328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:57.843423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:57.845438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:57.845498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:57.845651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.845686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:57.846060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.846102Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:57.846193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:57.846226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.846262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:57.846291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.846322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:57.846392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.846427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:57.846454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:57.846514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:57.846578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:57.846610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:57.848292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:57.848397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:57.848430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... O: TDropSubdomain TPropose operationId# 101:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:25:57.921728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:57.921762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:57.921851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 130 2025-03-28T09:25:57.921966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:57.922025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:57.922894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:57.922981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:25:57.924457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:57.924490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:57.924590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:57.924711Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.924742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:25:57.924775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:25:57.925005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.925041Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2025-03-28T09:25:57.925088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:57.925119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:57.925160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:57.925191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:57.925222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:25:57.925253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:57.925284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:25:57.925309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:25:57.925382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:57.925421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:25:57.925450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:25:57.925493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:25:57.926129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:57.926197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:57.926227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:57.926261Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:25:57.926300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:57.926873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:57.926964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:57.926996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:57.927020Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:25:57.927044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:57.927114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:25:57.927924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:57.927971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:57.928074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:25:57.928628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:25:57.928675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:57.928729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:57.931375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:57.933294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:57.933378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:25:57.933439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:25:57.933665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:57.933723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:25:57.934158Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:57.934238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:57.934290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:339:2330] TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:57.934816Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:57.934984Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 196us result status StatusPathDoesNotExist 2025-03-28T09:25:57.935158Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:57.935647Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:57.935828Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 171us result status StatusSuccess 2025-03-28T09:25:57.936174Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain >> TestYmqHttpProxy::TestGetQueueAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:57.779691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:57.779814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:57.779890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:57.779939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:57.779984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:57.780011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:57.780068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:57.780160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:57.780546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:57.870763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:57.870833Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:57.887255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:57.887600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:57.887817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:57.894511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:57.894820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:57.895606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.895883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:57.898146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.899663Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:57.899729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.899943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:57.899998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:57.900042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:57.900126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.908204Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:58.039661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:58.039925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.040144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:58.040437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:58.040498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.043624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:58.043781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:58.043992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.044049Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:58.044089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:58.044123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:58.046845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.046930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:58.046979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:58.049297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.049351Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.049411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:58.049460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:58.060425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:58.063331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:58.063591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:58.064702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:58.064862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:58.064914Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:58.065209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:58.065266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:58.065445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:58.065529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:58.067993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:58.068062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:58.068254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:58.068295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:58.068725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.068775Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:58.068883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:58.068917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:58.068958Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:58.068993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:58.069033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:58.069076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:58.069132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:58.069167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:58.069251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:58.069316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:58.069363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:58.071243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:58.071362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:58.071399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:25:58.071441Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:25:58.071495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:58.071625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:25:58.075147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:25:58.076040Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-03-28T09:25:58.076658Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:25:58.091540Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:25:58.094337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:58.094618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.094814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2025-03-28T09:25:58.095764Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:25:58.099428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:58.099623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2025-03-28T09:25:58.100208Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2025-03-28T09:25:58.103146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:58.103452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.103611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2025-03-28T09:25:58.106020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:58.106190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2025-03-28T09:25:58.106488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:25:58.106549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2025-03-28T09:25:58.106672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:58.106718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:25:58.107252Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:25:58.107379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:25:58.107434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:286:2277] 2025-03-28T09:25:58.107700Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:58.107758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:58.107788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:286:2277] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2025-03-28T09:25:58.108239Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:58.108412Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 180us result status StatusPathDoesNotExist 2025-03-28T09:25:58.108595Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:58.109449Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:58.109612Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 161us result status StatusPathDoesNotExist 2025-03-28T09:25:58.109781Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:25:58.110281Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:58.110469Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 160us result status StatusSuccess 2025-03-28T09:25:58.110885Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::ListShardsEmptyFields >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TPersQueueTest::SetupLockSession [GOOD] >> TPersQueueTest::StreamReadCreateAndDestroyMsgs >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:56.099580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:56.099694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:56.099741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:56.099783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:56.099842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:56.099887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:56.099937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:56.100012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:56.100367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:56.183057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:56.183109Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:56.206491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:56.206742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:56.206926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:56.214460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:56.214652Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:56.215258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.215475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:56.217201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.218422Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.218473Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.218647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:56.218721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.218761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:56.218834Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.225142Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:56.352363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:56.352591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.352794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:56.353029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:56.353084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.355187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.355331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:56.355495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.355577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:56.355634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:56.355669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:56.357331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.357383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:56.357413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:56.358963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.359003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.359038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.359080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.367868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:56.372319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:56.372511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:56.373551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.373671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:56.373716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.373990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:56.374045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.374266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:56.374363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:56.379352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.379417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.379612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.379654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:56.380075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.380117Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:56.380206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:56.380236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.380269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:56.380314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.380352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:56.380395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.380427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:56.380456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:56.380533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:56.380573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:56.380603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:56.382307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:56.382428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:56.382466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 46678944 2025-03-28T09:25:58.492307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 106:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 431 RawX2: 8589936987 } Origin: 72075186233409548 State: 2 TxId: 106 Step: 0 Generation: 2 2025-03-28T09:25:58.492376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 106:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:58.492426Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.492470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-03-28T09:25:58.492515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 106:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:25:58.492569Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 129 -> 240 2025-03-28T09:25:58.494650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.495199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.495259Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0ProgressState, operation type TxCopyTable 2025-03-28T09:25:58.495324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 106:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:25:58.495367Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 106, done: 0, blocked: 1 2025-03-28T09:25:58.495456Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 106:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-03-28T09:25:58.495499Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 240 -> 240 2025-03-28T09:25:58.497712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.497771Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-03-28T09:25:58.497909Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:25:58.497953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:25:58.498001Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:25:58.498041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:25:58.498081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-03-28T09:25:58.498181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:637:2560] message: TxId: 106 2025-03-28T09:25:58.498240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:25:58.498289Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-28T09:25:58.498324Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-28T09:25:58.498495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-28T09:25:58.498563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:58.500635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:25:58.500718Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:813:2712] TestWaitNotification: OK eventTxId 106 2025-03-28T09:25:58.501478Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:58.501712Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 273us result status StatusSuccess 2025-03-28T09:25:58.502137Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:58.502902Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:58.503109Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 231us result status StatusSuccess 2025-03-28T09:25:58.503458Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:58.504938Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:58.505117Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 198us result status StatusSuccess 2025-03-28T09:25:58.505566Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 200 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:58.806928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:58.807052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:58.807096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:58.807157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:58.807207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:58.807253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:58.807333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:58.807422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:58.807814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:58.905967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:58.906030Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:58.925805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:58.926138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:58.926317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:58.933114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:58.933393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:58.934060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:58.934301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:58.936641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:58.938095Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:58.938167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:58.938362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:58.938438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:58.938493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:58.938585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:58.946964Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:59.088438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:59.088699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.088948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:59.089250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:59.089314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.091916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.092062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:59.092266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.092321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:59.092364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:59.092399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:59.094580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.094645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:59.094701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:59.096337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.096385Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.096431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.096481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.100232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:59.102201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:59.102393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:59.103570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.103708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:59.103760Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.104074Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:59.104139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.104334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:59.104413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:59.106558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.106653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:59.106854Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.106900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:59.107324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.107372Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:59.107466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:59.107502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.107559Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:59.107598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.107636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:59.107681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.107717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:59.107747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:59.107866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:59.107925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:59.107987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:59.109918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:59.110043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:59.110085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... _TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-28T09:25:59.148711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.148822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:59.148868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:25:59.149025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:25:59.149089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:25:59.149258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:59.149331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:59.149396Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:25:59.150112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:59.150697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:25:59.152161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.152221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:59.152363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:59.152458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.152486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:25:59.152515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:25:59.152709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.152754Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:25:59.152853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:59.152885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:59.152914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:25:59.152939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:59.152970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:25:59.153009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:25:59.153039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:25:59.153063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:25:59.153118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:25:59.153151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:25:59.153180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:25:59.153203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:25:59.153804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:59.153907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:59.153948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:59.153987Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:25:59.154025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:59.154577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:59.154629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:25:59.154648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:25:59.154739Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:25:59.154790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:25:59.154876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:25:59.157314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:25:59.157430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-28T09:25:59.160233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:59.160436Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2025-03-28T09:25:59.160476Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2025-03-28T09:25:59.160642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-03-28T09:25:59.160720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2025-03-28T09:25:59.162905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:59.163080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2025-03-28T09:25:59.163424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:25:59.163459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2025-03-28T09:25:59.163583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:25:59.163607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:25:59.163953Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:25:59.164064Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:25:59.164097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:25:59.164122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:305:2296] 2025-03-28T09:25:59.164248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:25:59.164266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:305:2296] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> TestYmqHttpProxy::TestListQueues >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:55.953976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:55.954102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:55.954157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:55.954218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:55.954265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:55.954317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:55.954377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:55.954456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:55.954831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:56.038030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:56.038088Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:56.053883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:56.054181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:56.054374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:56.061562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:56.061791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:56.062398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.062609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:56.064508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.065714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.065774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.065927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:56.065980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.066030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:56.066109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.072600Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:56.217207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:56.217444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.217663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:56.217963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:56.218015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.220224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.220354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:56.220524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.220571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:56.220598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:56.220627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:56.222607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.222670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:56.222718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:56.224666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.224713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.224750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.224795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.228326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:56.230069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:56.230225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:56.231231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:56.231347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:56.231401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.231667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:56.231721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:56.231877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:56.231946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:56.233766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:56.233857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:56.233991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:56.234026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:56.234405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:56.234448Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:56.234547Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:56.234576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.234609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:56.234640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.234663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:56.234707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:56.234732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:56.234760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:56.234860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:56.234919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:56.234943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:56.236272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:56.236359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:56.236390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 69694 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:25:59.713146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409548, partId: 0 2025-03-28T09:25:59.713291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 4294969694 } Origin: 72075186233409548 State: 5 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:25:59.713344Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-03-28T09:25:59.715248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.715319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-28T09:25:59.715371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 103:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:25:59.715414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 103, done: 0, blocked: 1 2025-03-28T09:25:59.715495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 103:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 103 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-28T09:25:59.715634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 137 -> 129 2025-03-28T09:25:59.715762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:25:59.715826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:59.717768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.718807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.720241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.720306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:59.720499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:25:59.720675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.720719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:25:59.720789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-28T09:25:59.720867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.720916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:25:59.721007Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.721046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:25:59.721093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-03-28T09:25:59.723033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:59.723154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:59.723198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:25:59.723250Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-03-28T09:25:59.723298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:25:59.724083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:59.724197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:25:59.724228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:25:59.724256Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:25:59.724290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:25:59.724371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-28T09:25:59.729435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.729514Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:59.729888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:25:59.730116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:25:59.730165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:59.730204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:25:59.730235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:59.730269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:25:59.730351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:403:2369] message: TxId: 103 2025-03-28T09:25:59.730402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:25:59.730438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:25:59.730468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:25:59.730560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:25:59.731134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.731172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:25:59.731731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:59.733208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:25:59.734571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.734621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-03-28T09:25:59.734710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:25:59.734750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:734:2668] 2025-03-28T09:25:59.735560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2025-03-28T09:25:59.737178Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:25:59.737464Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 300us result status StatusSuccess 2025-03-28T09:25:59.737943Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:26:00.036546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:26:00.036676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:26:00.036723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:26:00.036768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:26:00.036809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:26:00.036838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:26:00.036914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:26:00.036984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:26:00.037300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:26:00.128475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:26:00.128524Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:00.153595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:26:00.153920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:26:00.154110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:26:00.159858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:26:00.160077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:26:00.160821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:00.161038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:26:00.166171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:00.167696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:00.167793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:00.167988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:26:00.168057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:00.168129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:26:00.168222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.183815Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:26:00.353590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:26:00.353862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.354110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:26:00.354423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:26:00.354485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.357277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:00.357439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:26:00.357640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.357700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:26:00.357738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:26:00.357773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:26:00.359955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.360035Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:26:00.360076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:26:00.361960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.362008Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.362063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:00.362111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.366014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:26:00.368145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:26:00.368376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:26:00.369343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:00.369452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:00.369497Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:00.369740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:26:00.369787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:00.369951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:26:00.370010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:26:00.371880Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:00.371987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:00.372188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:00.372238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:26:00.372731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.372793Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:26:00.372904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:26:00.372952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.373008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:26:00.373052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.373095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:26:00.373161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.373207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:26:00.373287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:26:00.373362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:26:00.373444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:26:00.373486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:26:00.375673Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:26:00.375821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:26:00.375867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:26:00.375908Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:26:00.375969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:26:00.376105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:26:00.379250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:26:00.379722Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2025-03-28T09:26:00.380261Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:26:00.396101Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:26:00.398103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "USER_0" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:26:00.398340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateSubDomain Propose, path: /MyRoot/USER_0, opId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.398477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.399150Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:26:00.407985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: TimeCastBucketsPerMediator is 0" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:00.408155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: TimeCastBucketsPerMediator is 0, operation: CREATE DATABASE, path: /MyRoot/USER_0 2025-03-28T09:26:00.408568Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-28T09:26:00.408816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:26:00.408863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-28T09:26:00.409212Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:26:00.409316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:26:00.409348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:282:2273] TestWaitNotification: OK eventTxId 100 2025-03-28T09:26:00.409705Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:26:00.409859Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 175us result status StatusPathDoesNotExist 2025-03-28T09:26:00.410021Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:59.857609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:59.857750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:59.857797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:59.857850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:59.857915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:59.857979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:59.858060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:59.858154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:59.858568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:59.948595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:59.948660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:59.975302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:59.975671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:59.975857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:59.994716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:59.994973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:59.995638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.995856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:59.997918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.999307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.999376Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.999563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:59.999631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:59.999678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:59.999761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.009373Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:26:00.138896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:26:00.139135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.139337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:26:00.139553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:26:00.139599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.141578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:00.141694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:26:00.141845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.141888Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:26:00.141936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:26:00.141965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:26:00.143835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.143899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:26:00.143939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:26:00.145649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.145696Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.145736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:00.145782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.155314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:26:00.157361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:26:00.157608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:26:00.158760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:00.158912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:00.158966Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:00.159285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:26:00.159350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:00.159548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:26:00.159636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:26:00.161678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:00.161737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:00.161939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:00.161983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:26:00.162410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.162466Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:26:00.162567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:26:00.162603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.162646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:26:00.162701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.162765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:26:00.162811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.162871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:26:00.162908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:26:00.162983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:26:00.163043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:26:00.163078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:26:00.164973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:26:00.165104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:26:00.165148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.407920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409551, partId: 0 2025-03-28T09:26:00.408049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186233409551 2025-03-28T09:26:00.408097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 101:0 HandleReply TEvConfigureStatus operationId:101:0 at schemeshard:72057594046678944 2025-03-28T09:26:00.408131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409551 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2025-03-28T09:26:00.408164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 3 -> 128 2025-03-28T09:26:00.410446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.410647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.410721Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.410766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:26:00.410821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-28T09:26:00.410958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:26:00.412846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-28T09:26:00.412990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-28T09:26:00.413374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:00.413504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:00.413556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:26:00.413917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:26:00.413976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:26:00.414142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:26:00.414211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-28T09:26:00.414266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:26:00.416772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:00.416853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:00.417084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:26:00.417251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:00.417328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:26:00.417393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:26:00.417492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.417550Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:26:00.417674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:26:00.417726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:26:00.417798Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:26:00.417852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:26:00.417905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:26:00.417966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:26:00.418106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:26:00.418171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:26:00.418458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-03-28T09:26:00.418509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2025-03-28T09:26:00.418545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:26:00.418580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:26:00.419723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:26:00.419843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:26:00.419887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:26:00.419958Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:26:00.420008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:26:00.420987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:26:00.421082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:26:00.421124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:26:00.421168Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:26:00.421201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-28T09:26:00.421274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2025-03-28T09:26:00.421314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:563:2472] 2025-03-28T09:26:00.424555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:26:00.425682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:26:00.425787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:26:00.425813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:564:2473] TestWaitNotification: OK eventTxId 101 2025-03-28T09:26:00.426221Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:26:00.426391Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 150us result status StatusSuccess 2025-03-28T09:26:00.426798Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:26:00.119652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:26:00.119779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:26:00.119827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:26:00.119874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:26:00.119921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:26:00.119967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:26:00.120027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:26:00.120104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:26:00.120481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:26:00.207188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:26:00.207239Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:00.225004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:26:00.225314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:26:00.225485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:26:00.230820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:26:00.231025Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:26:00.231641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:00.231842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:26:00.233864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:00.235106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:00.235176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:00.235344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:26:00.235403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:00.235446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:26:00.235533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.241307Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:26:00.360840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:26:00.361080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.361308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:26:00.361601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:26:00.361650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.364153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:00.364313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:26:00.364509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.364563Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:26:00.364601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:26:00.364636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:26:00.369427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.369494Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:26:00.369530Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:26:00.371450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.371511Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.371576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:00.371633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.375422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:26:00.377402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:26:00.377581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:26:00.378716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:00.378864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:00.378913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:00.379197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:26:00.379250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:00.379410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:26:00.379560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:26:00.381701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:00.381760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:00.381932Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:00.381970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:26:00.382389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.382435Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:26:00.382560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:26:00.382591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.382627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:26:00.382656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.382711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:26:00.382750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:00.382782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:26:00.382811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:26:00.382874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:26:00.382923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:26:00.382967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:26:00.384749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:26:00.384883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:26:00.384922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rsion: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:26:00.811834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:26:00.811883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:26:00.811915Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:26:00.811941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:26:00.812001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-28T09:26:00.812837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1424 } } 2025-03-28T09:26:00.812877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-03-28T09:26:00.813028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1424 } } 2025-03-28T09:26:00.813124Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1424 } } 2025-03-28T09:26:00.815151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 622 RawX2: 4294969827 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:26:00.815202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2025-03-28T09:26:00.815382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 622 RawX2: 4294969827 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:26:00.815437Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:26:00.815515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 622 RawX2: 4294969827 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:26:00.815605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, datashard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:00.815644Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.815694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2025-03-28T09:26:00.815739Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:26:00.816817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:26:00.818848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:26:00.818966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.820467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.820785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:26:00.820835Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:26:00.820933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:26:00.820983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:26:00.821036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:26:00.821086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:26:00.821119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-28T09:26:00.821209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:273:2264] message: TxId: 101 2025-03-28T09:26:00.821254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:26:00.821292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:26:00.821326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:26:00.821463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:26:00.823269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:26:00.823329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:274:2265] TestWaitNotification: OK eventTxId 101 2025-03-28T09:26:00.823848Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:26:00.824085Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 237us result status StatusSuccess 2025-03-28T09:26:00.824482Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:00.825135Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:26:00.825328Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 190us result status StatusSuccess 2025-03-28T09:26:00.825753Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> TPersQueueTest::TopicServiceCommitOffsetBadOffsets [GOOD] >> TPersQueueTest::TopicServiceReadBudget ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:59.132705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:59.132814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:59.132882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:59.132920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:59.132961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:59.132987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:59.133060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:59.133130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:59.133427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:59.215984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:59.216037Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:59.231037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:59.231258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:59.231419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:59.263612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:59.263862Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:59.264503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.264726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:59.271500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.272794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.272870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.273047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:59.273104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:59.273145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:59.273227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.283451Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:59.432260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:59.432493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.432703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:59.432946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:59.432996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.436288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.436422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:59.436612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.436686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:59.436720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:59.436754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:59.438520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.438590Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:59.438624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:59.440276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.440316Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.440352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.440393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.450855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:59.453017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:59.453203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:59.454257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.454392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:59.454442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.454753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:59.454828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.454992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:59.455066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:59.457234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.457302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:59.457474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.457510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:59.457923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.457971Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:59.458061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:59.458092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.458131Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:59.458163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.458195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:59.458232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.458263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:59.458312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:59.458399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:59.458442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:59.458484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:59.460230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:59.460369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:59.460407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3-28T09:26:01.326162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.326342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.326389Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:26:01.326461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2025-03-28T09:26:01.326603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:26:01.328253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2025-03-28T09:26:01.328351Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 108 at step: 5000004 2025-03-28T09:26:01.328862Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:01.328973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 8589936749 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:01.329077Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2025-03-28T09:26:01.329220Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 128 -> 129 2025-03-28T09:26:01.329359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:26:01.329401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:26:01.329881Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409549;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=108;fline=tx_controller.cpp:211;event=finished_tx;tx_id=108; FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-03-28T09:26:01.331653Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:01.331700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:01.331864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-03-28T09:26:01.331984Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:01.332016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:338:2314], at schemeshard: 72057594046678944, txId: 108, path id: 1 2025-03-28T09:26:01.332047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:338:2314], at schemeshard: 72057594046678944, txId: 108, path id: 5 2025-03-28T09:26:01.332396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.332440Z node 2 :FLAT_TX_SCHEMESHARD INFO: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:26:01.332498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2025-03-28T09:26:01.333296Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-03-28T09:26:01.333410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2025-03-28T09:26:01.333461Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-03-28T09:26:01.333501Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-03-28T09:26:01.333535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:26:01.334277Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-03-28T09:26:01.334392Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2025-03-28T09:26:01.334420Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2025-03-28T09:26:01.334446Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2025-03-28T09:26:01.334472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-28T09:26:01.334534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2025-03-28T09:26:01.337638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2025-03-28T09:26:01.338074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2025-03-28T09:26:01.339498Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2025-03-28T09:26:01.351574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-03-28T09:26:01.351620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2025-03-28T09:26:01.351788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2025-03-28T09:26:01.351840Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 108:0 129 -> 240 2025-03-28T09:26:01.353549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.353706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.353805Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 108:0 ProgressState 2025-03-28T09:26:01.353897Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-28T09:26:01.353919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-28T09:26:01.353947Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#108:0 progress is 1/1 2025-03-28T09:26:01.353969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-28T09:26:01.353993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2025-03-28T09:26:01.354043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:492:2442] message: TxId: 108 2025-03-28T09:26:01.354077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2025-03-28T09:26:01.354105Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 108:0 2025-03-28T09:26:01.354131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 108:0 2025-03-28T09:26:01.354212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:26:01.355861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-28T09:26:01.355898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:879:2793] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2025-03-28T09:26:01.358141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:26:01.358291Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.358576Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2025-03-28T09:26:01.360573Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:01.360716Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-03-28T09:26:01.361091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-03-28T09:26:01.361131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-03-28T09:26:01.361625Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-03-28T09:26:01.361725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-03-28T09:26:01.361763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:914:2828] TestWaitNotification: OK eventTxId 109 >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.650547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.650705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.650774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.650814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.651977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.652037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.652140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.652281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.653473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.737803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.737872Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.754260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.754758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.754984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.768537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.768732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.770435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.771450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.778062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.785509Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.785590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.786407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.786493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.786567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.786670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.803713Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:20.932829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:20.934943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.936139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:20.937338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:20.937453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.941734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.941903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:20.942114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.942182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:20.942246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:20.942286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:20.944188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.944268Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:20.944330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:20.946167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.946251Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.946289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.946336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.951435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:20.953478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:20.954813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:20.956182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.956357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:20.956411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.957745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:20.957823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.958015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:20.958130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.960352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.960402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.960595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.960667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:20.961142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.961205Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:20.961321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.961395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.961445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.961495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.961539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:20.961595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.961632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:20.961659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:20.961713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:20.961754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:20.961779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:20.963610Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.963721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.963757Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:25100 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1E181179-F1D2-4985-9EB8-3823E64C14D4 amz-sdk-request: attempt=1 content-length: 130 content-md5: rsyfbQ5vVOk4oQ1A/altew== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=99&uploadId=1 / 130 2025-03-28T09:26:01.202825Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: aecc9f6d0e6f54e938a10d40fda96d7b } 2025-03-28T09:26:01.203098Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5421] 2025-03-28T09:26:01.203272Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5422], sender# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:25100 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 34E4D849-2087-4851-ADAD-9402635257FE amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2025-03-28T09:26:01.206629Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2025-03-28T09:26:01.206871Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5421] 2025-03-28T09:26:01.206940Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5422], sender# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:25100 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 70E94319-CCE0-4898-98FA-338183EB97BC amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2025-03-28T09:26:01.210201Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-03-28T09:26:01.210254Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3458:5422], success# 1, error# , multipart# 1, uploadId# 1 2025-03-28T09:26:01.216559Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3458:5422], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:25100 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 53DEAFCB-1ABC-42DC-AF1A-ED9C8D559D27 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-03-28T09:26:01.226029Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3458:5422], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2025-03-28T09:26:01.226383Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-28T09:26:01.242974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-28T09:26:01.243050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:26:01.243243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-28T09:26:01.243360Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-28T09:26:01.243428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:01.243469Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.243507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:26:01.243567Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:26:01.243739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:01.247955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.248594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.248659Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:26:01.248776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:26:01.248817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:26:01.248858Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:26:01.248894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:26:01.248931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:26:01.249007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:26:01.249058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:26:01.249097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:26:01.249131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:26:01.249257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:26:01.253796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:26:01.253862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3442:5407] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.651039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.651179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.651245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.651290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.652008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.652065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.652170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.652272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.653498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.742877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.742944Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.760867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.761133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.761311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.767961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.768186Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.770425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.771440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.778367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.785389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.785472Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.786796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.786871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.786922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.787016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.793707Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:20.963093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:20.963453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.963735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:20.964049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:20.964128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.967903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.968070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:20.968267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.968358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:20.968425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:20.968485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:20.971687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.971767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:20.971807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:20.975607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.975653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.975704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.975768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.979426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:20.981493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:20.981758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:20.982914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.983104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:20.983155Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.983449Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:20.983502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:20.983663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:20.983786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.986397Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.986447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.986614Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.986653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:20.987093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.987147Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:20.987251Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.987282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.987319Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:20.987393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.987442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:20.987487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:20.987531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:20.987560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:20.987629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:20.987670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:20.987702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:20.989469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.989585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:20.989623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... : } REQUEST: PUT /data_00.csv.zst?partNumber=99&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19185 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 756B5D37-CB51-4BD3-8F2D-0585A1C1A3E8 amz-sdk-request: attempt=1 content-length: 55 content-md5: Ry5TonSXZhxkAEEJCUBcTg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=99&uploadId=1 / 55 2025-03-28T09:26:01.108842Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: 472e53a27497661c6400410909405c4e } 2025-03-28T09:26:01.109066Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5421] 2025-03-28T09:26:01.109284Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5422], sender# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19185 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D57C6134-D5C9-485E-BB01-5D5EBFD2F70D amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2025-03-28T09:26:01.112692Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2025-03-28T09:26:01.112933Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3457:5421] 2025-03-28T09:26:01.113111Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3458:5422], sender# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19185 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 41A04DAE-701F-486D-A0D7-B253CDBE3158 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2025-03-28T09:26:01.116006Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3458:5422], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2025-03-28T09:26:01.116070Z node 1 :DATASHARD_BACKUP INFO: [Export] [s3] Finish: self# [1:3458:5422], success# 1, error# , multipart# 1, uploadId# 1 2025-03-28T09:26:01.122243Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3458:5422], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19185 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 84A9BC07-47B4-444A-B715-320C39B077B8 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2025-03-28T09:26:01.134960Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3458:5422], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2025-03-28T09:26:01.135484Z node 1 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3457:5421], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-28T09:26:01.181617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-28T09:26:01.181700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:26:01.181880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-28T09:26:01.181993Z node 1 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2025-03-28T09:26:01.182057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:01.182095Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.182135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:26:01.182183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:26:01.182365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:01.186571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.187265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.187325Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:26:01.187448Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:26:01.187490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:26:01.187550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:26:01.187589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:26:01.187630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:26:01.187713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:26:01.187765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:26:01.187815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:26:01.187850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:26:01.187977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:26:01.192115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:26:01.192171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3442:5407] TestWaitNotification: OK eventTxId 102 >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> BasicStatistics::TwoDatabases [GOOD] >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::DirectReadBadCases [GOOD] >> TPersQueueTest::DirectReadStop >> TestProgram::Like >> TColumnEngineTestLogs::IndexWriteOverload >> GenericFederatedQuery::YdbManagedSelectConstant >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> GenericFederatedQuery::YdbFilterPushdown |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:59.372925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:59.373049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:59.373092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:59.373142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:59.373188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:59.373366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:59.373430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:59.373527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:59.373877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:59.458222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:59.458288Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:59.481587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:59.481870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:59.482063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:59.488220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:59.488478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:59.489186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.489424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:59.492623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.493912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.493970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.494139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:59.494203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:59.494250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:59.494328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.501958Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:59.623882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:59.624145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.624393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:59.624661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:59.624728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.627694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.627855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:59.628069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.628126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:59.628163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:59.628205Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:59.634047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.634127Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:59.634165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:59.642270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.642337Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.642382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.642431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.646639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:59.649085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:59.649317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:59.650427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.650592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:59.650645Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.651008Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:59.651084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.651260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:59.651337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:59.653757Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.653821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:59.654043Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.654095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:59.654498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.654549Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:59.654648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:59.654701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.654747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:59.654781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.654818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:59.654876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.654935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:59.654968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:59.655049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:59.655111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:59.655151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:59.657125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:59.657254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:59.657292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... sage: Source { RawX1: 529 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:26:03.188107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-03-28T09:26:03.188319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 529 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:26:03.188367Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-03-28T09:26:03.190205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:03.190275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-03-28T09:26:03.190329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:26:03.190362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-03-28T09:26:03.190443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-03-28T09:26:03.190557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-03-28T09:26:03.190744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-03-28T09:26:03.190823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-28T09:26:03.192807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:03.193759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:03.195267Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-28T09:26:03.195319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-28T09:26:03.195597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-03-28T09:26:03.195768Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-28T09:26:03.195825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-03-28T09:26:03.220676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-03-28T09:26:03.221080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:03.221137Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-03-28T09:26:03.221221Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:03.221251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-03-28T09:26:03.221282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-03-28T09:26:03.222216Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:26:03.222306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:26:03.222352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-28T09:26:03.222394Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 9 2025-03-28T09:26:03.222433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-03-28T09:26:03.223632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:26:03.223708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:26:03.223727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-28T09:26:03.223747Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:26:03.223767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-03-28T09:26:03.223814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-28T09:26:03.226805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:03.226877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-03-28T09:26:03.227270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-28T09:26:03.227503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:26:03.227572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:26:03.227614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:26:03.227643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:26:03.227678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-28T09:26:03.227750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:557:2493] message: TxId: 104 2025-03-28T09:26:03.227804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:26:03.227841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:26:03.227870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:26:03.227966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-03-28T09:26:03.228301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-28T09:26:03.228332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-28T09:26:03.229009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-28T09:26:03.230847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-28T09:26:03.232003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-28T09:26:03.232062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-03-28T09:26:03.232155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:26:03.232192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:828:2745] 2025-03-28T09:26:03.232922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-03-28T09:26:03.234497Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-03-28T09:26:03.234861Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 235us result status StatusSuccess 2025-03-28T09:26:03.235297Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> YdbLogStore::LogTable [GOOD] >> YdbMonitoring::SelfCheck >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N5(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N3(17):{\"i\":\"7,16\",\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N6(17):{\"i\":\"7,15\",\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N4(27):{\"i\":\"17\",\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N7(27):{\"i\":\"18\",\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N8(64):{\"i\":\"19,20\",\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N9(64):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N1->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","o":"21","t":"Calculation"},"w":64,"id":8},"2":{"p":{"i":"7,16","o":"17","t":"Calculation"},"w":17,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","o":"18","t":"Calculation"},"w":17,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":64,"id":9},"7":{"p":{"i":"18","o":"20","t":"Calculation"},"w":27,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"6":{"p":{"i":"17","o":"19","t":"Calculation"},"w":27,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow11BooleanTypeE; |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel >> TestProgram::YqlKernel [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: 2025-03-28T09:23:31.550092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:527:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:23:31.550343Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:31.550449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f95/r3tmp/tmpl9u8VA/pdisk_1.dat 2025-03-28T09:23:32.033819Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27772, node 1 2025-03-28T09:23:32.602854Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:32.602918Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:32.602971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:32.603696Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:23:32.606255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:23:32.716359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:32.716498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:32.751148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4997 2025-03-28T09:23:33.619539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:23:37.591692Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-03-28T09:23:37.640930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:37.641061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:37.693429Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:23:37.695992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:38.020704Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:38.021452Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:38.022003Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:38.022173Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:38.022464Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:38.022559Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:38.022669Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:38.022797Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:38.022877Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:38.236257Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:38.236388Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:38.258436Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:38.488635Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:38.569497Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:23:38.569707Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:23:38.631283Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:23:38.632680Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:23:38.632912Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:23:38.632975Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:23:38.633028Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:23:38.633085Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:23:38.633190Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:23:38.633257Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:23:38.634254Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:23:38.673072Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:23:38.673203Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1947:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:23:38.679275Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1957:2607] 2025-03-28T09:23:38.687662Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1993:2624] 2025-03-28T09:23:38.688309Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:1993:2624], schemeshard id = 72075186224037897 2025-03-28T09:23:38.688759Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2025-03-28T09:23:38.717713Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:23:38.717772Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:23:38.717847Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database1/.metadata/_statistics 2025-03-28T09:23:38.739098Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:23:38.747260Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:23:38.747446Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:23:38.975661Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:23:39.203929Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:23:39.291537Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:23:40.657785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:23:45.023097Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:23:45.064402Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:45.064546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:45.133680Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:23:45.136201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:45.428964Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:45.429553Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:45.430230Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:45.430428Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:45.432278Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:45.432530Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:45.432668Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:45.432797Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:45.432907Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:45.559771Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:45.559888Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:45.580514Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:45.797340Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:45.853188Z node 2 :STATISTICS INFO: [72075186224038895] OnActivateExecutor 2025-03-28T09:23:45.853278Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Execute 2025-03-28T09:23:45.932057Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Complete 2025-03-28T09:23:45.933539Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Execute 2025-03-28T09:23:45.933775Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:23:45.933833Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ColumnStatistics: column count# 0 2025-03-28T09:23:45.933886Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:23:45.933941Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:23:45.934001Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:23:45.934053Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Complete 2025-03-28T09:23:45.935898Z node 2 :STATISTICS INFO: [72075186224038895] Subscribed for config changes 2025-03-28T09:23:46.016392Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038895, at schemeshard: 72075186224038898 2025-03-28T09:23:46.016502Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3222:2602], at schemeshard: 72075186224038898, StatisticsAggregatorId: 72075186224038895, at schemeshard: 72075186224038898 2025-03-28T09:23:46.026833Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3231:2609] 2025-03-28T09:23:46.037242Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3270:2628 ... path count: 2, at schemeshard: 72075186224038898 2025-03-28T09:25:57.271842Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 233.000000s, at schemeshard: 72075186224038898 2025-03-28T09:25:57.272276Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038898, stats size# 49 2025-03-28T09:25:57.296434Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Complete 2025-03-28T09:25:57.913029Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:10314:4780]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:25:57.913370Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-28T09:25:57.913416Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:10314:4780], StatRequests.size() = 1 2025-03-28T09:25:58.382633Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-03-28T09:25:58.382722Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-03-28T09:25:58.382775Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038898, LocalPathId: 3] is data table. 2025-03-28T09:25:58.382815Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038898, LocalPathId: 3] 2025-03-28T09:25:58.383205Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-03-28T09:25:58.386910Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:25:58.393319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10341:4460], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:58.393453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:10351:4465], DatabaseId: /Root/Database2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:58.393621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:58.418182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038898 2025-03-28T09:25:58.518550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:10355:4468], DatabaseId: /Root/Database2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-03-28T09:25:58.589386Z node 2 :TX_PROXY ERROR: Actor# [2:10445:4517] txid# 281474976730659, issues: { message: "Check failed: path: \'/Root/Database2/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224038898, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:58.613644Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:10474:4532]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:25:58.614067Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:25:58.614197Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:10476:4534] 2025-03-28T09:25:58.614276Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:10476:4534] 2025-03-28T09:25:58.615016Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:10477:4535] 2025-03-28T09:25:58.615221Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:10476:4534], server id = [2:10477:4535], tablet id = 72075186224038895, status = OK 2025-03-28T09:25:58.615302Z node 2 :STATISTICS DEBUG: [72075186224038895] EvConnectNode, pipe server id = [2:10477:4535], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:25:58.615367Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:25:58.615577Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:25:58.615665Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:10474:4532], StatRequests.size() = 1 2025-03-28T09:25:58.776541Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjcyOTRmN2MtNTgwMDRiMjctNjBlNTQ5MTItOTBhNDU5ZA==, TxId: 2025-03-28T09:25:58.776637Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjcyOTRmN2MtNTgwMDRiMjctNjBlNTQ5MTItOTBhNDU5ZA==, TxId: 2025-03-28T09:25:58.777344Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-03-28T09:25:58.797419Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 3] 2025-03-28T09:25:58.797483Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:25:58.822494Z node 2 :STATISTICS DEBUG: [72075186224038895] EvFastPropagateCheck 2025-03-28T09:25:58.822564Z node 2 :STATISTICS DEBUG: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:25:58.870541Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:10476:4534], schemeshard count = 1 2025-03-28T09:25:59.495888Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:25:59.496382Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:25:59.496431Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:25:59.496474Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-28T09:25:59.496506Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:25:59.497010Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database1 2025-03-28T09:25:59.499889Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:25:59.528064Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=M2FhNzdmMDMtNjM0N2RmMzUtNzc1MmU1ZGItNWYwMmVlMTE=, TxId: 2025-03-28T09:25:59.528136Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=M2FhNzdmMDMtNjM0N2RmMzUtNzc1MmU1ZGItNWYwMmVlMTE=, TxId: 2025-03-28T09:25:59.528826Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:25:59.556305Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:25:59.556352Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:25:59.606938Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 122 ], ReplyToActorId[ [3:10572:4820]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:25:59.607297Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 122 ] 2025-03-28T09:25:59.607341Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 122, ReplyToActorId = [3:10572:4820], StatRequests.size() = 1 2025-03-28T09:26:00.870786Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 123 ], ReplyToActorId[ [3:10643:4846]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:26:00.871246Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 123 ] 2025-03-28T09:26:00.871297Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 123, ReplyToActorId = [3:10643:4846], StatRequests.size() = 1 2025-03-28T09:26:01.216898Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-03-28T09:26:01.216963Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-03-28T09:26:01.217026Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038898, LocalPathId: 4] is data table. 2025-03-28T09:26:01.217067Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038898, LocalPathId: 4] 2025-03-28T09:26:01.217426Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database2 2025-03-28T09:26:01.221058Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:26:01.243279Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjhjZjE0NGUtZjU1ZGFlODktNjQwMTE4YjMtM2Q4MjZhZDc=, TxId: 2025-03-28T09:26:01.243350Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjhjZjE0NGUtZjU1ZGFlODktNjQwMTE4YjMtM2Q4MjZhZDc=, TxId: 2025-03-28T09:26:01.243994Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-03-28T09:26:01.259761Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 4] 2025-03-28T09:26:01.259820Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:26:01.784667Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2025-03-28T09:26:01.785095Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:26:01.785143Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:26:01.785772Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:26:01.786328Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-03-28T09:26:01.835311Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 124 ], ReplyToActorId[ [3:10727:4860]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:26:01.835750Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 124 ] 2025-03-28T09:26:01.835786Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 124, ReplyToActorId = [3:10727:4860], StatRequests.size() = 1 2025-03-28T09:26:01.836608Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:10729:4618]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:26:01.841015Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:26:01.841091Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:10729:4618], StatRequests.size() = 1 >> TPersQueueTest::SameOffset [GOOD] >> TPersQueueTest::SchemeOperationsTest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(28):{\"i\":\"3,4\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":28,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","o":"15","t":"Calculation"},"w":28,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; >> TestProgram::YqlKernelContains >> TestProgram::JsonValueBinary >> TestProgram::YqlKernelContains [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag >> TestProgram::SimpleFunction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:52.071191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:52.071310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:52.071352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:52.071400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:52.071447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:52.071472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:52.071560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:52.071631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:52.072010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:52.157546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:52.157611Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:52.173057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:52.173309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:52.173488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:52.179400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:52.179668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:52.180252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.180462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:52.187455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.188794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.188858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.189031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:52.189093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.189151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:52.189235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.198887Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:52.318406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:52.318632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.318870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:52.319100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:52.319151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.321368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.321513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:52.321691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.321741Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:52.321773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:52.321801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:52.323828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.323888Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:52.323921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:52.325735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.325782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.325841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.325883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.329445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:52.335794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:52.336014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:52.337114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:52.337274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:52.337318Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.337591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:52.337639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:52.337804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:52.337907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:52.340302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:52.340367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:52.340546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:52.340587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:52.341033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:52.341093Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:52.341181Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:52.341213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.341249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:52.341281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.341313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:52.341350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:52.341389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:52.341413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:52.341493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:52.341546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:52.341578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:52.343354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:52.343464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:52.343515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... teStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:04.502789Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:481:2439] sender: [1:764:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:481:2439] sender: [1:767:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:481:2439] sender: [1:768:2058] recipient: [1:766:2685] Leader for TabletID 72057594046678944 is [1:769:2686] sender: [1:770:2058] recipient: [1:766:2685] 2025-03-28T09:26:04.544883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:26:04.545002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:26:04.545049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:26:04.545088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:26:04.545182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:26:04.545213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:26:04.545273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:26:04.545353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:26:04.545839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:26:04.562452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:26:04.563853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:26:04.563989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:26:04.564071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:26:04.564098Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:04.564378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:26:04.565001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:26:04.565083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:26:04.565120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Table2, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:26:04.565172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.565229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.565520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:26:04.565599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:26:04.565664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-28T09:26:04.565770Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:26:04.565938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-03-28T09:26:04.566062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.566149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:26:04.566185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:26:04.566206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:26:04.566231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:26:04.566337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:26:04.566486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.566675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2025-03-28T09:26:04.566996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.567116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.567390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.567447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.567652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.567764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.567846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.567977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.568034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.568168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.568336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.568471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.568511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.568547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:26:04.576999Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:04.577090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:04.577435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:26:04.577483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:04.577515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:26:04.577638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:769:2686] sender: [1:823:2058] recipient: [1:15:2062] 2025-03-28T09:26:04.610161Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:26:04.610444Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 298us result status StatusSuccess 2025-03-28T09:26:04.610910Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> TestProgram::YqlKernelEquals >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(28):{\"i\":\"7,9\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":28,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","o":"15","t":"Calculation"},"w":28,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> TestProgram::YqlKernelEquals [GOOD] >> TestProgram::JsonValueBinary [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N2(17):{\"i\":\"2\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(17):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"15","t":"Projection"},"w":17,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"2","o":"15","t":"Calculation"},"w":17,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> YdbOlapStore::LogWithUnionAllAscending [GOOD] >> YdbOlapStore::LogWithUnionAllDescending |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TPersQueueTest::Cache [GOOD] >> TPersQueueTest::CacheHead >> TestYmqHttpProxy::TestTagQueue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(28):{\"i\":\"10,11\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"10","p":{"address":{"name":"i16","id":10}},"o":"10","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"10,11","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":28,"id":5},"4":{"p":{"i":"11","p":{"address":{"name":"float","id":11}},"o":"11","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"10,11","o":"15","t":"Calculation"},"w":28,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9FloatTypeE; digraph program {N0[shape=box, label="N3(28):{\"i\":\"10,11\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; } FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"6,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T ... 21H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"6,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"6,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; >> TestYmqHttpProxy::TestDeleteQueue |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestYmqHttpProxy::TestUntagQueue |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> TopicService::UnknownConsumer [GOOD] >> TestProgram::CountWithNulls >> KqpJoinOrder::Chain65Nodes [GOOD] >> TestProgram::CountWithNulls [GOOD] >> TColumnEngineTestLogs::IndexTtl >> TestProgram::JsonExistsBinary >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK >> TestProgram::JsonExistsBinary [GOOD] >> TColumnEngineTestLogs::IndexTtl [GOOD] >> TColumnEngineTestLogs::IndexReadWithPredicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N2(17):{\"i\":\"2\",\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N0(2):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N2[shape=box, label="N1(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N3(17):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":17,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"2","o":"10001","t":"Calculation"},"w":17,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestKinesisHttpProxy::TestWrongStream >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"6,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"6,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] >> TestYmqHttpProxy::TestListQueues [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=88432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8392;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING; ... ALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=496;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1;tx_id=1;););visible=0;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38200;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38264;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:38232;index_size:0;meta:((produced=INSERTED;));); |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TopicService::UnknownTopic >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicatesStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexReadWithPredicates [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... t=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:6;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37072;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:7;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37136;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:8;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37120;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:9;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37160;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:10;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37088;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:11;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37560;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:12;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37488;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:13;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:14;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:15;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:16;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37624;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:17;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:18;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:19;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37616;index_size:0;meta:((produced=INSERTED;));); FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=3;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_skipped;pathId=1;portion=(portion_id:20;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37592;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadRead [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); >> TestYmqHttpProxy::TestPurgeQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteLoadReadStrPK [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=2; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=3912;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=1072;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=760;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=0;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=0;snapshot=plan_step=1;tx_id=2;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=portion_info.h:377;event=IsVisible;analyze_portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;)););visible=1;snapshot=plan_step=2;tx_id=1;; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=334;fline=column_engine_logs.cpp:460;event=portion_selected;pathId=1;portion=(portion_id:1;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:6184;index_size:0;meta:((produced=INSERTED;));); ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/join/unittest >> KqpJoinOrder::Chain65Nodes [GOOD] Test command err: Trying to start YDB, gRPC: 21284, MsgBus: 1318 2025-03-28T09:21:24.222311Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788066231481935:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:24.222395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0017ff/r3tmp/tmpHnVmyi/pdisk_1.dat 2025-03-28T09:21:24.979232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:21:25.017727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:21:25.017810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:21:25.023757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21284, node 1 2025-03-28T09:21:25.294000Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:21:25.294023Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:21:25.294029Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:21:25.294134Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1318 TClient is connected to server localhost:1318 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:21:26.374355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:21:26.396941Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:21:29.216846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788087706318857:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.216942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.223082Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788066231481935:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:21:29.223155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:21:29.555429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.702308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788087706318962:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.702371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.715212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.772838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788087706319038:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.772892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.781789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.830260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788087706319114:2360], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.830426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.851267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.912019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788087706319189:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.912087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.928968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:21:29.988039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788087706319266:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.988114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:29.999813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.104735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788092001286641:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.104795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.118247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.207044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788092001286722:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.207121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.228053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.317898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788092001286803:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.324251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.327426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.394925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788092001286881:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.395012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.412418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:21:30.492342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788092001286963:2426], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:30.492443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permi ... ult, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.148208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.199112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181159625:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.199198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.232425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.291167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181159709:2833], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.291231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.304896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710712:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.367060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181159788:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.367128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.379191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710713:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.431750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181159870:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.431842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.445487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710714:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.503417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181159950:2861], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.503480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.507849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710715:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.560235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181160028:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.560318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.566424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710716:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.615486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181160109:2879], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.615572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.624218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710717:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.692119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181160190:2888], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.692181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.707959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710718:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.762097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181160270:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.762235Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.777214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710719:0, at schemeshard: 72057594046644480 2025-03-28T09:21:34.852533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181160353:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.852597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:34.859257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710720:0, at schemeshard: 72057594046644480 2025-03-28T09:21:35.007677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788109181160440:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:35.007828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:35.013954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710721:0, at schemeshard: 72057594046644480 2025-03-28T09:21:35.067453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788113476127819:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:35.067508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:35.079785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710722:0, at schemeshard: 72057594046644480 2025-03-28T09:21:35.161455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788113476127903:2933], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:35.161526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:35.161742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788113476127908:2936], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:21:35.165828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710723:3, at schemeshard: 72057594046644480 2025-03-28T09:21:35.186838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486788113476127910:2937], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710723 completed, doublechecking } 2025-03-28T09:21:35.286162Z node 1 :TX_PROXY ERROR: Actor# [1:7486788113476127974:5807] txid# 281474976710724, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 70], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:21:39.961894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:21:39.961930Z node 1 :IMPORT WARN: Table profiles were not loaded
: Warning: Execution, code: 1060
: Warning: Cost Based Optimizer could not be applied to this query: Enumeration is too large, use PRAGMA MaxDPHypDPTableSize='4294967295' to disable the limitation, code: 8000 >> TPersQueueTest::NoDecompressionMemoryLeaks [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar >> TestProgram::YqlKernelEndsWithScalar [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith >> TPersQueueTest::WriteEmptyData [GOOD] >> TPersQueueTest::WriteNonExistingPartition |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"amet.\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"7,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"7,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"amet."},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> TestYmqHttpProxy::TestChangeMessageVisibility |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(28):{\"i\":\"7,9\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":28,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","o":"15","t":"Calculation"},"w":28,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> YdbMonitoring::SelfCheck [GOOD] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> DemoTx::Scenario_4 [GOOD] >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] >> TestProgram::JsonValue |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:57.183078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:57.183212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:57.183259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:57.183307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:57.183349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:57.183378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:57.183431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:57.183526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:57.183852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:57.264145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:57.264199Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:57.276031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:57.276268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:57.276418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:57.282521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:57.282751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:57.283354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.283558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:57.285515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.286824Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:57.286883Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.287046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:57.287118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:57.287167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:57.287242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.293590Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:57.412759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:57.412994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.413207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:57.413487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:57.413543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.415919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.416061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:57.416259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.416312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:57.416343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:57.416372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:57.418387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.418443Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:57.418475Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:57.420234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.420274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.420310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:57.420375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.432822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:57.434634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:57.434846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:57.435765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:57.435863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:57.435899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:57.436091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:57.436134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:57.436251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:57.436305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:57.437912Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:57.437962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:57.438123Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:57.438169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:57.438516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:57.438547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:57.438635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:57.438669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.438715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:57.438737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.438763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:57.438800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:57.438832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:57.438863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:57.438928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:57.438985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:57.439018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:57.440687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:57.440791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:57.440825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 94046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 763 RawX2: 4294969994 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-03-28T09:26:10.797182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 0 2025-03-28T09:26:10.797284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Source { RawX1: 763 RawX2: 4294969994 } Origin: 72075186233409549 State: 5 TxId: 107 Step: 0 Generation: 2 2025-03-28T09:26:10.797358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72057594046678944 2025-03-28T09:26:10.797847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:26:10.797906Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 ProgressState, operation type: TxDropTable, at tablet# 72057594046678944 2025-03-28T09:26:10.797961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 107:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:26:10.797998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-03-28T09:26:10.798076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72057594046678944 2025-03-28T09:26:10.798183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-03-28T09:26:10.798292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:26:10.798359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:26:10.800842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:26:10.801663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:26:10.801939Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:10.801991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:26:10.802137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:26:10.802236Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:10.802268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 107, path id: 2 2025-03-28T09:26:10.802300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 107, path id: 4 2025-03-28T09:26:10.802585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:26:10.802626Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:26:10.802701Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:26:10.802733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:26:10.802795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-03-28T09:26:10.803570Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-03-28T09:26:10.803694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2025-03-28T09:26:10.803742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-03-28T09:26:10.803800Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 14 2025-03-28T09:26:10.803838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:26:10.804658Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-03-28T09:26:10.804749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2025-03-28T09:26:10.804784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2025-03-28T09:26:10.804810Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:26:10.804840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:26:10.804906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-03-28T09:26:10.807800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:26:10.807858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:26:10.808188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:26:10.808346Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-28T09:26:10.808388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:26:10.808428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-28T09:26:10.808469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:26:10.808504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-03-28T09:26:10.808543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:26:10.808595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-03-28T09:26:10.808645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-03-28T09:26:10.808746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:26:10.809162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:10.809214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:26:10.809671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-28T09:26:10.811649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2025-03-28T09:26:10.812485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:10.812536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 0, path id: 2 2025-03-28T09:26:10.813230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2025-03-28T09:26:10.813852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-03-28T09:26:10.813913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-03-28T09:26:10.814507Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-03-28T09:26:10.814619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-28T09:26:10.814663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:994:2918] TestWaitNotification: OK eventTxId 107 2025-03-28T09:26:10.815347Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:26:10.815584Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 242us result status StatusSuccess 2025-03-28T09:26:10.816030Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestProgram::JsonValue [GOOD] |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> YdbOlapStore::LogGrepExisting [GOOD] >> YdbOlapStore::LogExistingUserId >> TestProgram::JsonExists >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> TestProgram::JsonExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbMonitoring::SelfCheck [GOOD] Test command err: 2025-03-28T09:23:54.189091Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788710721480225:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:54.189231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145a/r3tmp/tmpxWBd8h/pdisk_1.dat 2025-03-28T09:23:54.937358Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26877, node 1 2025-03-28T09:23:55.166770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:55.170765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:55.363902Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:55.379737Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:55.379771Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:55.379785Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:55.379962Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:23:55.729888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:56.099007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Uint8" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:37288" , at schemeshard: 72057594046644480 2025-03-28T09:23:56.099398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:56.111089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: LogStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:23:56.111144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-28T09:23:56.111199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:23:56.111260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:23:56.111295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:23:56.111347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-03-28T09:23:56.111587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-03-28T09:23:56.113478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-03-28T09:23:56.113707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:23:56.113724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:56.113857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:23:56.113904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-03-28T09:23:56.117485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-28T09:23:56.117624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-03-28T09:23:56.117795Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:23:56.117806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:23:56.117940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:23:56.118012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:23:56.118030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486788715016448014:2380], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-28T09:23:56.118041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486788715016448014:2380], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-28T09:23:56.118070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:23:56.118115Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-03-28T09:23:56.118723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:23:56.119249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } Binde ... 4480 2025-03-28T09:25:59.616665Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:25:59.616742Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:25:59.616774Z node 64 :FLAT_TX_SCHEMESHARD INFO: TDropOlapStore TProposedDeleteParts operationId# 281474976715667:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:25:59.616832Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-03-28T09:25:59.617033Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715667:0 progress is 1/1 2025-03-28T09:25:59.617049Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 1/1 2025-03-28T09:25:59.617072Z node 64 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715667:0 progress is 1/1 2025-03-28T09:25:59.617088Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 1/1 2025-03-28T09:25:59.617108Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715667, ready parts: 1/1, is published: true 2025-03-28T09:25:59.617166Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [64:7486789248269785153:2370] message: TxId: 281474976715667 2025-03-28T09:25:59.617191Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715667 ready parts: 1/1 2025-03-28T09:25:59.617217Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715667:0 2025-03-28T09:25:59.617228Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715667:0 2025-03-28T09:25:59.617393Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-03-28T09:25:59.619368Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:25:59.619395Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:25:59.619424Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:25:59.619440Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:25:59.629139Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:25:59.630238Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-03-28T09:25:59.630566Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:25:59.631540Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:25:59.631780Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:59.632592Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:25:59.632784Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:25:59.633606Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:25:59.633819Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:25:59.633853Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:25:59.633910Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:25:59.635148Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[64:7486789243974817193:2325];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:25:59.648830Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037889 not found 2025-03-28T09:25:59.648917Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037891 not found 2025-03-28T09:25:59.648951Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037888 not found 2025-03-28T09:25:59.649003Z node 64 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 64, TabletId: 72075186224037890 not found 2025-03-28T09:25:59.660075Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[64:7486789243974817198:2326];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:25:59.671623Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:25:59.671671Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:25:59.680501Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[64:7486789243974817218:2327];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:25:59.685117Z node 64 :TX_COLUMNSHARD DEBUG: Server pipe reset at tablet 72075186224037890 2025-03-28T09:25:59.686816Z node 64 :TX_COLUMNSHARD DEBUG: Server pipe reset at tablet 72075186224037890 2025-03-28T09:25:59.687223Z node 64 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[64:7486789243974817191:2324];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:25:59.688354Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:25:59.688381Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:25:59.750458Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:25:59.750505Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-28T09:25:59.750587Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:25:59.750608Z node 64 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:25:59.750650Z node 64 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:26:04.711275Z node 67 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[67:7486789267784585650:2108];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:04.712619Z node 67 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00145a/r3tmp/tmpTXraUO/pdisk_1.dat 2025-03-28T09:26:04.880280Z node 67 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:04.921338Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:04.921449Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:04.924874Z node 67 :HIVE WARN: HIVE#72057594037968897 Node(67, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17441, node 67 2025-03-28T09:26:05.004963Z node 67 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:26:05.004992Z node 67 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:26:05.005004Z node 67 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:26:05.005152Z node 67 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30217 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:05.378069Z node 67 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... self_check_result: GOOD issue_log { id: "YELLOW-f489-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-1ba8-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1ba8-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-e9e2-1231c6b1-67" reason: "YELLOW-e9e2-1231c6b1-68" reason: "YELLOW-e9e2-1231c6b1-69" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-e9e2-1231c6b1-67" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 67 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-68" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 68 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-e9e2-1231c6b1-69" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 69 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } location { id: 67 host: "::1" port: 12001 } >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] >> TestProgram::YqlKernelEndsWith |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"5,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"5,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape= ... 003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203B\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?6 VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"5,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"5,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10DoubleTypeE; >> TestProgram::YqlKernelEndsWith [GOOD] >> TestProgram::CountUIDByVAT >> TestProgram::YqlKernelStartsWithScalar |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestInsertTable::TestInsertCommit [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=insert_table.cpp:43;event=commit_insertion;path_id=0;blob_range={ Blob: DS:0:[2222:1:1:2:100:1:0] Offset: 0 Size: 0 }; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=152;columns=1; >> TestProgram::CountUIDByVAT [GOOD] >> TestProgram::YqlKernelStartsWithScalar [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"5,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"5,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] >> TestKinesisHttpProxy::BadRequestUnknownMethod >> DemoTx::Scenario_5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(28):{\"i\":\"7,9\",\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N1(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(28):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"15","t":"Projection"},"w":28,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","o":"15","t":"Calculation"},"w":28,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N3(18):{\"i\":\"2,4\",\"o\":\"10001\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N1(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N4(27):{\"i\":\"10001,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N0(4):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"2,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6->N4->N2->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":4},{"from":0}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"2,4","t":"FetchOriginalData"},"w":4,"id":6},"5":{"p":{"i":"10001,4","t":"Projection"},"w":27,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"2,4","o":"10001","t":"Aggregation"},"w":18,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9Int32TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:33;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:102;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:154;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"Lorem\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N3(17):{\"i\":\"7,15\",\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N1(2):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N3[shape=box, label="N2(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N4(17):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N0->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"i":"7,15","o":"16","t":"Calculation"},"w":17,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"4":{"p":{"i":"16","t":"Projection"},"w":17,"id":4},"0":{"p":{"p":{"v":"Lorem"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:482;T=N5arrow9UInt8TypeE; >> TestKinesisHttpProxy::TestCounters >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] |88.7%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:59.128496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:59.128612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:59.128663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:59.128697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:59.128742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:59.128771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:59.128819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:59.128930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:59.129207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:59.212358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:59.212422Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:59.228207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:59.228490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:59.228679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:59.251860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:59.252117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:59.252748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.252974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:59.257674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.259104Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.259168Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.259368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:59.259438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:59.259482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:59.259586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.278725Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:59.404400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:59.404635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.404842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:59.405085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:59.405143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.413168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.413329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:59.413533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.413584Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:59.413627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:59.413656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:59.416492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.416576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:59.416612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:59.418539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.418584Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.418624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.418676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.422241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:59.424317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:59.424479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:59.425539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:59.425666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:59.425711Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.425963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:59.426017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:59.426178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:59.426259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:59.431763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:59.431818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:59.432011Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:59.432050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:59.432442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:59.432496Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:59.432603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:59.432651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.432688Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:59.432726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.432764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:59.432803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:59.432849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:59.432879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:59.432939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:59.432977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:59.433006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:59.434779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:59.434895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:59.434931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... : Source { RawX1: 529 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:26:12.558852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-03-28T09:26:12.559036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72075186233409546, message: Source { RawX1: 529 RawX2: 4294969769 } Origin: 72075186233409549 State: 5 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:26:12.559116Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvDataShard::TEvSchemaChanged, save it, at schemeshard: 72075186233409546 2025-03-28T09:26:12.562137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:12.562237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 ProgressState, operation type: TxDropTable, at tablet# 72075186233409546 2025-03-28T09:26:12.562295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 104:0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:26:12.562349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 104, done: 0, blocked: 1 2025-03-28T09:26:12.562428Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 104:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 104 Name: RenamePathBarrier }, at tablet# 72075186233409546 2025-03-28T09:26:12.562568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 137 -> 129 2025-03-28T09:26:12.562720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 4 2025-03-28T09:26:12.562788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-28T09:26:12.565177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:12.565426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:12.567921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-28T09:26:12.567990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-28T09:26:12.568205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 2] 2025-03-28T09:26:12.568416Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-28T09:26:12.568460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 1 2025-03-28T09:26:12.568658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 104, path id: 2 2025-03-28T09:26:12.568794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:12.568870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2025-03-28T09:26:12.568973Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:12.569021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2025-03-28T09:26:12.569083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-03-28T09:26:12.570771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:26:12.570892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:26:12.570929Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-28T09:26:12.570975Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 11 2025-03-28T09:26:12.571032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 5 2025-03-28T09:26:12.572060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:26:12.572156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2025-03-28T09:26:12.572184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2025-03-28T09:26:12.572216Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:26:12.572246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 4 2025-03-28T09:26:12.572325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-28T09:26:12.576881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2025-03-28T09:26:12.576957Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2025-03-28T09:26:12.577351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 3 2025-03-28T09:26:12.577588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:26:12.577629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:26:12.577672Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:26:12.577718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:26:12.577758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-28T09:26:12.577840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:557:2493] message: TxId: 104 2025-03-28T09:26:12.577890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:26:12.577933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:26:12.577968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:26:12.578077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 2] was 2 2025-03-28T09:26:12.578777Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-28T09:26:12.578822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-28T09:26:12.579932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-28T09:26:12.584130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2025-03-28T09:26:12.585895Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-28T09:26:12.585966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:447:2399], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-03-28T09:26:12.586443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:26:12.586515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1435:3343] 2025-03-28T09:26:12.587167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2025-03-28T09:26:12.592014Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2025-03-28T09:26:12.592272Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 295us result status StatusSuccess 2025-03-28T09:26:12.592795Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 >> TestScript::StepMerging [GOOD] >> TestKinesisHttpProxy::ListShardsTimestamp >> TPersQueueTest::StreamReadCreateAndDestroyMsgs [GOOD] >> TPersQueueTest::StreamReadCommitAndStatusMsgs >> TestYmqHttpProxy::TestUntagQueue [GOOD] |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] |88.8%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::YdbFilterPushdown [GOOD] >> TestKinesisHttpProxy::TestWrongStream [GOOD] >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNoTable >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] Test command err: Trying to start YDB, gRPC: 63523, MsgBus: 23723 2025-03-28T09:26:04.075638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789268078368890:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:04.075754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ad0/r3tmp/tmpjSQMgJ/pdisk_1.dat 2025-03-28T09:26:04.556176Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:04.566444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:04.566528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:04.569442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63523, node 1 2025-03-28T09:26:04.743281Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:26:04.743300Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:26:04.743305Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:26:04.743396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23723 TClient is connected to server localhost:23723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:05.492730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:26:05.508486Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:26:07.315080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789280963271445:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.315251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.703736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:26:07.828972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789280963271568:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.829040Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.829357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789280963271573:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.833334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:26:07.843285Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:26:07.843869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789280963271575:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:26:07.943082Z node 1 :TX_PROXY ERROR: Actor# [1:7486789280963271615:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:08.759609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:26:09.076636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789268078368890:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:09.076950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:26:09.195490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-28T09:26:09.658991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:26:10.205592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:26:10.666028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:26:11.188087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:26:11.233073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:26:13.161942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TPersQueueTest::DirectReadStop [GOOD] >> TPersQueueTest::DirectReadCleanCache ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 20057, MsgBus: 65490 2025-03-28T09:26:04.074057Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789268710818987:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:04.074158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000adb/r3tmp/tmpwHZ4YB/pdisk_1.dat 2025-03-28T09:26:04.527071Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:04.547106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:04.547768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:04.558746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20057, node 1 2025-03-28T09:26:04.747375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:26:04.747400Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:26:04.747408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:26:04.747527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65490 TClient is connected to server localhost:65490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:05.480211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:26:07.266766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789281595721540:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.266956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.703740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:26:07.860632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789281595721662:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.860713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.861060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789281595721667:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.864419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:26:07.873484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789281595721669:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:26:07.972806Z node 1 :TX_PROXY ERROR: Actor# [1:7486789281595721710:2402] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:08.759172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:26:09.073913Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789268710818987:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:09.073985Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:26:09.216778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-28T09:26:09.731992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:26:10.222567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:26:10.792652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:26:11.264251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:26:11.305215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:26:13.210112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710709:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } columns { name: "data_column" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "data_column" type { optional_type { item { type_id: STRING } } } } } items { column { name: "filtered_column" type { optional_type { item { type_id: INT32 } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "filtered_column" } right_value { typed_value { type { type_id: INT32 } value { int32_value: 42 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TestKinesisHttpProxy::TestWrongStream2 >> IndexBuildTest::CancellationNoTable [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TPersQueueTest::TopicServiceReadBudget [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancellationNoTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:25:20.753306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:25:20.753459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.753501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:25:20.753531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:25:20.753566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:25:20.753586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:25:20.753642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:25:20.753705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:25:20.754316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:25:20.852743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:25:20.852799Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:20.865947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:25:20.866238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:25:20.866438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:25:20.872672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:25:20.872915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:25:20.873599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:20.873847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:20.875644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:20.885850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:25:20.885899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:20.885955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:25:20.886051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:25:20.894958Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:25:21.050833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:25:21.051102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.051315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:25:21.051554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:25:21.051613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.054119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.054260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:25:21.054441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.054495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:25:21.054538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:25:21.054573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:25:21.056696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.056752Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:25:21.056787Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:25:21.058654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.058724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.058772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.058816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.062157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.065055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:25:21.065235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:25:21.066376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:25:21.066499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:25:21.066550Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.066916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:25:21.066981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:25:21.067144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:25:21.067219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:25:21.070120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:25:21.070177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:25:21.070340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:25:21.070379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:25:21.070803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:25:21.070888Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:25:21.070974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.071008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.071045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:25:21.071076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.071125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:25:21.071163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:25:21.071199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:25:21.071227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:25:21.071312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:25:21.071349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:25:21.071398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:25:21.073190Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.073289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:25:21.073323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... less db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:15.840044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:26:15.840227Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:26:15.852010Z node 2 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [2:122:2148] sender: [2:240:2058] recipient: [2:15:2062] 2025-03-28T09:26:15.863245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:26:15.863464Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:15.863683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:26:15.863885Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:26:15.863938Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:15.866029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:15.866122Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:26:15.866289Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:15.866334Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:26:15.866367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:26:15.866397Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:26:15.868122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:15.868164Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:26:15.868195Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:26:15.869630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:15.869664Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:15.869706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:15.869752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:26:15.869890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:26:15.871297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:26:15.871464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:26:15.872239Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:15.872343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936749 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:15.872392Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:15.872616Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:26:15.872670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:15.872849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:26:15.872918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:26:15.874604Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:15.874646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:15.874844Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:15.874888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:26:15.875154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:15.875191Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:26:15.875285Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:26:15.875320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:15.875351Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:26:15.875394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:15.875430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:26:15.875463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:15.875491Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:26:15.875515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:26:15.875559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:26:15.875607Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:26:15.875635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:26:15.876168Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:26:15.876253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:26:15.876297Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:26:15.876342Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:26:15.876386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:26:15.876477Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:26:15.878800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:26:15.879183Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:15.879554Z node 2 :TX_PROXY DEBUG: actor# [2:270:2261] Bootstrap 2025-03-28T09:26:15.896646Z node 2 :TX_PROXY DEBUG: actor# [2:270:2261] Become StateWork (SchemeCache [2:275:2266]) 2025-03-28T09:26:15.897073Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: DoExecute TxId: 101 DatabaseName: "/MyRoot" Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { settings { } } } max_batch_rows: 2 max_shards_in_flight: 2 } 2025-03-28T09:26:15.897229Z node 2 :BUILD_INDEX NOTICE: TIndexBuilder::TXTYPE_CREATE_INDEX_BUILD: Reply TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 2025-03-28T09:26:15.897652Z node 2 :TX_PROXY DEBUG: actor# [2:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:26:15.900014Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 BUILDINDEX RESPONSE CREATE: NKikimrIndexBuilder.TEvCreateResponse TxId: 101 Status: BAD_REQUEST Issues { message: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" severity: 1 } SchemeStatus: 2 TestWaitNotification wait txId: 101 2025-03-28T09:26:15.900317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:26:15.900361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:26:15.900761Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:26:15.900854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:26:15.900890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:282:2273] TestWaitNotification: OK eventTxId 101 2025-03-28T09:26:15.901246Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" PageSize: 100 PageToken: "" 2025-03-28T09:26:15.901317Z node 2 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" >> TestYmqHttpProxy::TestPurgeQueue [GOOD] >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] >> TExtSubDomainTest::GenericCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TColumnEngineTestLogs::IndexWriteOverload [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=column_engine.h:144;event=RegisterTable;path_id=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64::0;records=1;size=8; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=38120;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=6840;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:1;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:33376;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8008;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:2;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36912;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4192;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8016;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:3;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36944;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8024;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:4;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:36976;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8040;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=332;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:5;path_id:1;records_count:1000;min_schema_snapshot:(plan_step=1;tx_id=1;);schema_version:0;level:0;column_size:37024;index_size:0;meta:((produced=INSERTED;)););path_id=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=testing;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64::0;records=1;count=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=152;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=41432;columns=5; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_plan_step;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=timestamp;fline=native.cpp:110;event=serialize;size=4200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=_yql_tx_id;fline=native.cpp:110;event=serialize;size=232;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_type;fline=native.cpp:110;event=serialize;size=8240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=resource_id;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=uid;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;field_name=message;fline=native.cpp:110;event=serialize;size=8056;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING; ... onent=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=432;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=448;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] >> TestYmqHttpProxy::TestSendMessageBatch >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> TestKinesisHttpProxy::ErroneousRequestGetRecords |88.8%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbTableSplit::SplitByLoadWithDeletes >> YdbTableSplit::SplitByLoadWithReads >> YdbTableSplit::RenameTablesAndSplit |88.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch |88.9%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:26:01.131479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:26:01.131620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:26:01.131679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:26:01.131729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:26:01.131781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:26:01.131807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:26:01.131863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:26:01.131948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:26:01.132310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:26:01.211950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:26:01.212012Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:01.226107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:26:01.226318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:26:01.226444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:26:01.233139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:26:01.233317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:26:01.233789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:01.233972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:26:01.235761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:01.237017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:01.237078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:01.237247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:26:01.237316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:01.237365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:26:01.237444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.244558Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:26:01.354971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:26:01.355184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.355370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:26:01.355586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:26:01.355630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.357862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:01.357974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:26:01.358100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.358142Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:26:01.358168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:26:01.358193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:26:01.360020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.360083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:26:01.360121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:26:01.361653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.361708Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.361756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:01.361809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:26:01.365519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:26:01.367447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:26:01.367664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:26:01.368612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:01.368751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:01.368801Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:01.369007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:26:01.369045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:26:01.369207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:26:01.369340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:26:01.371291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:01.371349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:26:01.371498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:01.371555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:26:01.371931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:26:01.371982Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:26:01.372077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:26:01.372116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:01.372156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:26:01.372195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:01.372232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:26:01.372275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:26:01.372311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:26:01.372341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:26:01.372411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:26:01.372472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:26:01.372508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:26:01.374315Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:26:01.374430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:26:01.374466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... Id 72075186233409548 2025-03-28T09:26:17.462287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:26:17.462342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:26:17.464083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 3150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:26:17.464238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 AckTo { RawX1: 0 RawX2: 0 } } Step: 3150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:26:17.464297Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropPQ TPropose, operationId: 103:0 HandleReply TEvOperationPlan, step: 3150, at schemeshard: 72057594046678944 2025-03-28T09:26:17.464474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:26:17.464557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-03-28T09:26:17.464765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:26:17.464828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:26:17.466542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:26:17.466585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:26:17.467080Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:26:17.467128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:26:17.467280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:26:17.467344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:26:17.467468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:26:17.467501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:26:17.467557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:26:17.467605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-28T09:26:17.468081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:26:17.468140Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:26:17.468250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:26:17.468287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:26:17.468334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:26:17.468374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:26:17.468413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-28T09:26:17.468456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:26:17.468493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:26:17.468525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:26:17.468664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:26:17.468709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-03-28T09:26:17.468744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 9 2025-03-28T09:26:17.468776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-03-28T09:26:17.469499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:26:17.469594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:26:17.469626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:26:17.469670Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:26:17.469721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:26:17.470334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:26:17.470384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:26:17.470448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:26:17.470798Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-03-28T09:26:17.470964Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-03-28T09:26:17.471407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:26:17.471715Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:26:17.471787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:26:17.471814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:26:17.471851Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 9 2025-03-28T09:26:17.471887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:26:17.471952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-28T09:26:17.472845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:26:17.475492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:26:17.475604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:26:17.477433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:26:17.477531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:26:17.477602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:26:17.478020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:26:17.478068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:26:17.478510Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:26:17.478600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:26:17.478636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:772:2686] TestWaitNotification: OK eventTxId 103 2025-03-28T09:26:17.972574Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:26:17.972847Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 333us result status StatusSuccess 2025-03-28T09:26:17.973243Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TDatabaseResolverTests::Ydb_Dedicated >> TDatabaseResolverTests::Ydb_Serverless >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::ClickHouseNative >> TDatabaseResolverTests::DataStreams_Serverless >> TDatabaseResolverTests::PostgreSQL >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError >> TDatabaseResolverTests::Greenplum_MasterNode |88.9%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied >> TDatabaseResolverTests::PostgreSQL_PermissionDenied >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] |88.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::TestCounters [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2025-03-28T09:26:19.891861Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2025-03-28T09:26:19.893581Z node 1 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2025-03-28T09:26:20.008646Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] >> TopicService::UnknownTopic [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2025-03-28T09:26:19.986281Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2025-03-28T09:26:19.981234Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2025-03-28T09:26:20.020208Z node 2 :FQ_DATABASE_RESOLVER ERROR: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] |88.9%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.9%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadTableSnapshots::ReadTableSnapshot >> DataShardReadTableSnapshots::ReadTableDropColumn >> TestKinesisHttpProxy::TestEmptyHttpBody >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> DataShardReadTableSnapshots::ReadTableSplitBefore >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> TReplicaTest::CommitWithoutHandshake >> TReplicaTest::UpdateWithoutHandshake >> TReplicaTest::Update |88.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicService::UseDoubleSlashInTopicPath >> TReplicaTest::HandshakeWithStaleGeneration >> TReplicaTest::Subscribe >> TReplicaTest::Merge >> TReplicaTest::Handshake >> TReplicaTest::Unsubscribe |88.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2025-03-28T09:25:28.620703Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789114202900850:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:28.620770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00083a/r3tmp/tmpNh1PZP/pdisk_1.dat 2025-03-28T09:25:29.251759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:29.251877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:29.254321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:29.363522Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2838, node 1 2025-03-28T09:25:29.541290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:29.541313Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:29.541320Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:29.541428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6534 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:30.113630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:6534 2025-03-28T09:25:30.457650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.464939Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:25:30.466570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.483438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.655898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.717196Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-03-28T09:25:30.720918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.814790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.875152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.921642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.013618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.064700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.154806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.245528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:32.445572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789131382771455:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.445573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789131382771446:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.445657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.449529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-28T09:25:32.458756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789131382771460:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-28T09:25:32.512342Z node 1 :TX_PROXY ERROR: Actor# [1:7486789131382771511:2920] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:33.321495Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe14a4v53s92a06jzvfpf4x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRjNWJhMTEtYTdhOTk4NWMtN2JjNmU2NjMtYzE3ZGQ4OTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.341723Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe14a4v53s92a06jzvfpf4x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRjNWJhMTEtYTdhOTk4NWMtN2JjNmU2NjMtYzE3ZGQ4OTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.356644Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe14a4v53s92a06jzvfpf4x, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWRjNWJhMTEtYTdhOTk4NWMtN2JjNmU2NjMtYzE3ZGQ4OTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.393529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.481170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.517478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.565284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.618395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.621317Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789114202900850:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:33.621378Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... waiting... 2025-03-28T09:25:33.654434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.698140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:33.734117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.806514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.895056Z node 1 :HTTP INFO: Listening on http://127.0.0.1:13442 2025-03-28T09:25:34.897575Z node 1 :SQS INFO: Start SQS service actor 2025-03-28T09:25:34.897727Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-28T09:25:34.898790Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-28T09:25:34.898969Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-28T09:25:34.899123Z node 1 :HTTP INFO: Listening on http://[::]:7112 2025-03-28T09:25:34.927449Z node 1 :SQS INFO: Request SQS users list 2025- ... nd: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:18.780637Z node 7 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 17ms 2025-03-28T09:26:18.781108Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:18.781146Z node 7 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-03-28T09:26:18.781262Z node 7 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 19ms 2025-03-28T09:26:18.781741Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:18.781758Z node 7 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 19ms 2025-03-28T09:26:18.781851Z node 7 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:18.781956Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:18.781980Z node 7 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-03-28T09:26:18.782053Z node 7 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 20ms 2025-03-28T09:26:18.782324Z node 7 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:18.918945Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7486789328370031864:2450]: Pool not found 2025-03-28T09:26:18.919143Z node 7 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-03-28T09:26:19.338921Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7486789328370031850:2446]: Pool not found 2025-03-28T09:26:19.339086Z node 7 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-03-28T09:26:19.341771Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486789332664999313:2472], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:19.341780Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7486789332664999314:2473], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-03-28T09:26:19.341852Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:19.674512Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7486789332664999311:2471]: Pool not found 2025-03-28T09:26:19.675206Z node 7 :SQS DEBUG: [cleanup removed queues] there are no queues to delete 2025-03-28T09:26:19.738994Z node 7 :HTTP DEBUG: (#37,[::1]:48574) incoming connection opened 2025-03-28T09:26:19.739078Z node 7 :HTTP DEBUG: (#37,[::1]:48574) -> (POST /Root) 2025-03-28T09:26:19.739213Z node 7 :HTTP_PROXY INFO: proxy service: incoming request from [b886:5300:6050:0:a086:5300:6050:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 9b8e11f7-df5c2ac5-2d439ce7-f0685550 2025-03-28T09:26:19.739490Z node 7 :HTTP_PROXY INFO: http request [UnknownMethodName] requestId [9b8e11f7-df5c2ac5-2d439ce7-f0685550] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2025-03-28T09:26:19.739683Z node 7 :HTTP DEBUG: (#37,[::1]:48574) <- (400 InvalidAction) 2025-03-28T09:26:19.739735Z node 7 :HTTP DEBUG: (#37,[::1]:48574) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 3 { } 0 2025-03-28T09:26:19.739774Z node 7 :HTTP DEBUG: (#37,[::1]:48574) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 9b8e11f7-df5c2ac5-2d439ce7-f0685550 x-amz-crc32: 139748724 Content-Type: application/x-amz-json-1.1 Content-Length: 76 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 2025-03-28T09:26:19.739862Z node 7 :HTTP DEBUG: (#37,[::1]:48574) connection closed Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} >> TestKinesisHttpProxy::ListShardsToken >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::Handshake [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber >> TReplicaTest::DoubleUnsubscribe >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TReplicaTest::StrongNotificationAfterCommit >> TReplicaTest::IdempotencyUpdatesVariant2 >> TReplicaTest::DoubleDelete >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> TReplicaTest::SubscribeUnknownPath [GOOD] >> TReplicaTest::SyncVersion >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> TReplicaTest::UpdateWithStaleGeneration [GOOD] >> TReplicaTest::Delete [GOOD] >> TReplicaTest::DoubleDelete [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] >> TReplicaTest::StrongNotificationAfterCommit [GOOD] >> TReplicaTest::SyncVersion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2025-03-28T09:26:22.339611Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.339712Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject update from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-03-28T09:26:22.339822Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-03-28T09:26:22.339868Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-03-28T09:26:22.344540Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.344781Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-03-28T09:26:22.348482Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-03-28T09:26:22.348599Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-03-28T09:26:22.348640Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.348712Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.348790Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-03-28T09:26:22.348823Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.543233Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-28T09:26:22.543307Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.543448Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.543488Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:6:2053] Reject update from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-03-28T09:26:22.543593Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-03-28T09:26:22.543665Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-03-28T09:26:22.543736Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.543832Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-03-28T09:26:22.543881Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-03-28T09:26:22.543949Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:7:2054] 2025-03-28T09:26:22.544040Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.544118Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.544198Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:7:2054] 2025-03-28T09:26:22.544253Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# [OwnerId: 1, LocalPathId: 1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2025-03-28T09:26:22.296338Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:26:22.296432Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.339584Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.339691Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.346010Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.346160Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-03-28T09:26:22.346241Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.346430Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:7:2054] 2025-03-28T09:26:22.348457Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# path 2025-03-28T09:26:22.348567Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:7:2054] 2025-03-28T09:26:22.348608Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.348697Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:7:2054] 2025-03-28T09:26:22.348730Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:7:2054], path# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.553411Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-28T09:26:22.553477Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.553570Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.553598Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.553653Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.553705Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2025-03-28T09:26:22.315276Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:26:22.315361Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.315462Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-03-28T09:26:22.315491Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-03-28T09:26:22.344532Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.344787Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2025-03-28T09:26:22.344860Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.345051Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.345106Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.351842Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.353800Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2025-03-28T09:26:22.353875Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Unsubscribe: subscriber# [1:8:2055], path# path 2025-03-28T09:26:22.353960Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-03-28T09:26:22.353989Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-28T09:26:22.354017Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.539102Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2025-03-28T09:26:22.296660Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-03-28T09:26:22.296734Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-03-28T09:26:22.296821Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:26:22.296854Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject handshake from stale populator: sender# [1:7:2054], owner# 1, generation# 1, pending generation# 2 2025-03-28T09:26:22.529766Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-28T09:26:22.529814Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.529908Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-03-28T09:26:22.529932Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.530031Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.530184Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.530210Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.536388Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.536555Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-03-28T09:26:22.536581Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-28T09:26:22.536605Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.536690Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.536718Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.536740Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.536795Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.536841Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-03-28T09:26:22.536895Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.536977Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-03-28T09:26:22.537015Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.807729Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-28T09:26:22.807803Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.807888Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.807914Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.807957Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.808041Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.808072Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-03-28T09:26:22.808102Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.808155Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.808226Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-03-28T09:26:22.808251Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2025-03-28T09:26:22.808287Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2025-03-28T09:26:22.808334Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.808371Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.808394Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.808437Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.808459Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-03-28T09:26:22.808479Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2025-03-28T09:26:22.289262Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:26:22.289401Z node 1 :SCHEME_BOARD_REPLICA ERROR: [1:6:2053] Reject commit from unknown populator: sender# [1:7:2054], owner# 1, generation# 1 2025-03-28T09:26:22.294667Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:26:22.294793Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.526659Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-03-28T09:26:22.527087Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 0 2025-03-28T09:26:22.527171Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-03-28T09:26:22.527204Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.527289Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2025-03-28T09:26:22.527320Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Commit generation: owner# 1, generation# 1 2025-03-28T09:26:22.527376Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:7:2054] 2025-03-28T09:26:22.527422Z node 2 :SCHEME_BOARD_REPLICA ERROR: [2:6:2053] Reject commit from stale populator: sender# [2:7:2054], owner# 1, generation# 0, pending generation# 1 2025-03-28T09:26:22.527474Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:7:2054] 2025-03-28T09:26:22.527508Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 2 2025-03-28T09:26:22.801137Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-28T09:26:22.801204Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.801348Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.801392Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.808832Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.809007Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-03-28T09:26:22.809102Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.809285Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:9:2056] 2025-03-28T09:26:22.809335Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.809447Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-03-28T09:26:22.809499Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2025-03-28T09:26:22.809540Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2025-03-28T09:26:22.809701Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:10:2057] 2025-03-28T09:26:22.809748Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:10:2057], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.809867Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:11:2058] 2025-03-28T09:26:22.809920Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:11:2058], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.810027Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:12:2059] 2025-03-28T09:26:22.810068Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:12:2059], path# path, domainOwnerId# 0, capabilities# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2025-03-28T09:26:22.296361Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:26:22.296432Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.561073Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-28T09:26:22.561205Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.561394Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.561449Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.568874Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.569101Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-03-28T09:26:22.569216Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.569379Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-03-28T09:26:22.569455Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:7:2054], path# path 2025-03-28T09:26:22.569529Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:7:2054] 2025-03-28T09:26:22.872600Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-28T09:26:22.873694Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.873849Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-03-28T09:26:22.873892Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-03-28T09:26:22.873963Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.874087Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.874135Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.874202Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.874370Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-03-28T09:26:22.874406Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-28T09:26:22.874436Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.874601Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2025-03-28T09:26:22.874664Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.874769Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 40 2025-03-28T09:26:22.874802Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2025-03-28T09:26:22.315357Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:26:22.315438Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.339590Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.339691Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.345678Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.345848Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:7:2054] 2025-03-28T09:26:22.345950Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.346110Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-03-28T09:26:22.346149Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-28T09:26:22.346202Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.543636Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:7:2054] 2025-03-28T09:26:22.543705Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-03-28T09:26:22.543823Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.839939Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-28T09:26:22.840000Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.840098Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 76 2025-03-28T09:26:22.840123Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.840183Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2025-03-28T09:26:22.840302Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:7:2054] 2025-03-28T09:26:22.840356Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:7:2054], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.840437Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:7:2054], cookie# 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2025-03-28T09:26:22.314061Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2025-03-28T09:26:22.314168Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path 2025-03-28T09:26:22.344550Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.344786Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:9:2056] 2025-03-28T09:26:22.344847Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.344949Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.345057Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:26:22.345102Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.345270Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.345333Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.352528Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.353883Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:7:2054], cookie# 0, event size# 40 2025-03-28T09:26:22.353967Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-28T09:26:22.354004Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.530797Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-28T09:26:22.530903Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.531049Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2025-03-28T09:26:22.531091Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.531165Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.531291Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.531362Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.531430Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.531569Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-03-28T09:26:22.531620Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-28T09:26:22.531670Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.531775Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:8:2055] 2025-03-28T09:26:22.531839Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Unsubscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.531913Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.531951Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:22.531983Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:22.532048Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:22.532112Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-03-28T09:26:22.532179Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:22.532285Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:9:2056] 2025-03-28T09:26:22.532333Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2025-03-28T09:26:22.815687Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:8:2055] 2025-03-28T09:26:22.815761Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-03-28T09:26:22.815837Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 1, capabilities# 2025-03-28T09:26:22.815980Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-28T09:26:22.816024Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:22.816114Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-28T09:26:22.816160Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Commit generation: owner# 1, generation# 1 2025-03-28T09:26:22.816266Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> TestKinesisHttpProxy::TestWrongRequest >> TExtSubDomainTest::GenericCases [GOOD] >> TestYmqHttpProxy::TestListDeadLetterSourceQueues >> TPersQueueTest::WriteNonExistingPartition [GOOD] >> TPersQueueTest::WriteNonExistingTopic >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] >> DemoTx::Scenario_5 [GOOD] >> TPersQueueTest::PreferredCluster_TwoEnabledClustersAndWriteSessionsWithDifferentPreferredCluster_SessionWithMismatchedClusterDiesAndOthersAlive [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive >> TReplicaTest::Commit >> TPersQueueTest::SchemeOperationsTest [GOOD] >> TPersQueueTest::SchemeOperationFirstClassCitizen ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2025-03-28T09:26:18.578543Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789327996998083:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:18.578601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cd2/r3tmp/tmpjmTz3H/pdisk_1.dat 2025-03-28T09:26:19.031340Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:19.032051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:19.037577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:19.082666Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13622 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:26:19.256173Z node 1 :TX_PROXY DEBUG: actor# [1:7486789327996998315:2116] Handle TEvNavigate describe path dc-1 2025-03-28T09:26:19.256234Z node 1 :TX_PROXY DEBUG: Actor# [1:7486789332291966077:2432] HANDLE EvNavigateScheme dc-1 2025-03-28T09:26:19.257950Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486789327996998342:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:26:19.258008Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486789327996998342:2131], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:26:19.262250Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789332291966078:2433][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:26:19.264336Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997993:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486789332291966082:2433] 2025-03-28T09:26:19.264375Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997996:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486789332291966083:2433] 2025-03-28T09:26:19.264443Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486789327996997993:2050] Subscribe: subscriber# [1:7486789332291966082:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:26:19.264451Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486789327996997996:2053] Subscribe: subscriber# [1:7486789332291966083:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:26:19.264504Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997999:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486789332291966084:2433] 2025-03-28T09:26:19.264525Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486789327996997999:2056] Subscribe: subscriber# [1:7486789332291966084:2433], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:26:19.264553Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789332291966082:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486789327996997993:2050] 2025-03-28T09:26:19.264594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789332291966083:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486789327996997996:2053] 2025-03-28T09:26:19.264608Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997993:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486789332291966082:2433] 2025-03-28T09:26:19.264634Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997996:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486789332291966083:2433] 2025-03-28T09:26:19.264651Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789332291966084:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486789327996997999:2056] 2025-03-28T09:26:19.264694Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997999:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486789332291966084:2433] 2025-03-28T09:26:19.264699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789332291966078:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486789332291966079:2433] 2025-03-28T09:26:19.264740Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789332291966078:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486789332291966080:2433] 2025-03-28T09:26:19.264789Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486789332291966078:2433][/dc-1] Set up state: owner# [1:7486789327996998342:2131], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:26:19.266028Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789332291966078:2433][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486789332291966081:2433] 2025-03-28T09:26:19.266125Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486789332291966078:2433][/dc-1] Path was already updated: owner# [1:7486789327996998342:2131], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:26:19.266217Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789332291966082:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486789332291966079:2433], cookie# 1 2025-03-28T09:26:19.266245Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789332291966083:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486789332291966080:2433], cookie# 1 2025-03-28T09:26:19.266260Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789332291966084:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486789332291966081:2433], cookie# 1 2025-03-28T09:26:19.266316Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997993:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486789332291966082:2433], cookie# 1 2025-03-28T09:26:19.266342Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997996:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486789332291966083:2433], cookie# 1 2025-03-28T09:26:19.266363Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997999:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486789332291966084:2433], cookie# 1 2025-03-28T09:26:19.266419Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789332291966082:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486789327996997993:2050], cookie# 1 2025-03-28T09:26:19.266444Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789332291966083:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486789327996997996:2053], cookie# 1 2025-03-28T09:26:19.266457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789332291966084:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486789327996997999:2056], cookie# 1 2025-03-28T09:26:19.266496Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789332291966078:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486789332291966079:2433], cookie# 1 2025-03-28T09:26:19.266517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789332291966078:2433][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:26:19.266539Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789332291966078:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486789332291966080:2433], cookie# 1 2025-03-28T09:26:19.266557Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789332291966078:2433][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:26:19.266577Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789332291966078:2433][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486789332291966081:2433], cookie# 1 2025-03-28T09:26:19.266594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789332291966078:2433][/dc-1] Unexpected sync response: sender# [1:7486789332291966081:2433], cookie# 1 2025-03-28T09:26:19.316337Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486789327996998342:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:26:19.317430Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486789327996998342:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... scriber: { Subscriber: [1:7486789345176868767:3003] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:26:22.178746Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789345176868768:3004][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:26:22.179141Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997993:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7486789345176868778:3004] 2025-03-28T09:26:22.179161Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486789327996997993:2050] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-03-28T09:26:22.179217Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486789327996997993:2050] Subscribe: subscriber# [1:7486789345176868778:3004], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:26:22.179256Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997996:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7486789345176868779:3004] 2025-03-28T09:26:22.179270Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486789327996997996:2053] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-03-28T09:26:22.179293Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486789327996997996:2053] Subscribe: subscriber# [1:7486789345176868779:3004], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:26:22.179373Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997999:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [1:7486789345176868780:3004] 2025-03-28T09:26:22.179387Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486789327996997999:2056] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2025-03-28T09:26:22.179407Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486789327996997999:2056] Subscribe: subscriber# [1:7486789345176868780:3004], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:26:22.179436Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789345176868778:3004][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486789327996997993:2050] 2025-03-28T09:26:22.179460Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789345176868779:3004][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486789327996997996:2053] 2025-03-28T09:26:22.179479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486789345176868780:3004][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486789327996997999:2056] 2025-03-28T09:26:22.179516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789345176868768:3004][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486789345176868775:3004] 2025-03-28T09:26:22.179558Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789345176868768:3004][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486789345176868776:3004] 2025-03-28T09:26:22.179585Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486789345176868768:3004][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7486789327996998342:2131], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:26:22.179656Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486789345176868768:3004][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486789345176868777:3004] 2025-03-28T09:26:22.179686Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486789345176868768:3004][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7486789327996998342:2131], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:26:22.179712Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997993:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486789345176868778:3004] 2025-03-28T09:26:22.179727Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997996:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486789345176868779:3004] 2025-03-28T09:26:22.179743Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486789327996997999:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486789345176868780:3004] 2025-03-28T09:26:22.179783Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486789327996998342:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-28T09:26:22.179864Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486789327996998342:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7486789345176868768:3004] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:26:22.179941Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486789327996998342:2131], cacheItem# { Subscriber: { Subscriber: [1:7486789345176868768:3004] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:26:22.180038Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486789345176868781:3005], recipient# [1:7486789345176868766:2338], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:26:22.601392Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486789327996998342:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:26:22.601513Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486789327996998342:2131], cacheItem# { Subscriber: { Subscriber: [1:7486789332291966169:2507] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:26:22.601603Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486789345176868783:3006], recipient# [1:7486789345176868782:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:26:23.159064Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486789327996998342:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:26:23.159211Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486789327996998342:2131], cacheItem# { Subscriber: { Subscriber: [1:7486789345176868750:3001] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:26:23.159352Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486789349471836099:3010], recipient# [1:7486789349471836098:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TReplicaTest::Commit [GOOD] >> TReplicaTest::AckNotifications >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] |89.0%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] >> TTxLocatorTest::TestAllocateAllByPieces ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2025-03-28T09:26:25.287773Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:26:25.287844Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:25.287949Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:7:2054] 2025-03-28T09:26:25.287980Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 1, generation# 1 2025-03-28T09:26:25.288027Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:7:2054] 2025-03-28T09:26:25.288054Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 1, generation# 2 2025-03-28T09:26:25.577888Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2025-03-28T09:26:25.577959Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path 2025-03-28T09:26:25.578126Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-03-28T09:26:25.578270Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:7:2054] 2025-03-28T09:26:25.578325Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:25.578446Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:25.578484Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:25.584318Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:25.584533Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:8:2055] 2025-03-28T09:26:25.584669Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:7:2054], cookie# 0, event size# 40 2025-03-28T09:26:25.584707Z node 2 :SCHEME_BOARD_REPLICA NOTICE: [2:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2025-03-28T09:26:25.584740Z node 2 :SCHEME_BOARD_REPLICA INFO: [2:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:25.584837Z node 2 :SCHEME_BOARD_REPLICA DEBUG: [2:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:8:2055] 2025-03-28T09:26:25.869953Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:7:2054] 2025-03-28T09:26:25.870019Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Successful handshake: owner# 1, generation# 1 2025-03-28T09:26:25.870154Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:25.870202Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:25.870266Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:25.870388Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2025-03-28T09:26:25.870459Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-03-28T09:26:25.870574Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:25.870608Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2025-03-28T09:26:25.870668Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:25.870887Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:7:2054], cookie# 0, event size# 72 2025-03-28T09:26:25.870923Z node 3 :SCHEME_BOARD_REPLICA NOTICE: [3:6:2053] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2025-03-28T09:26:25.870958Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2025-03-28T09:26:25.871174Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path 2025-03-28T09:26:25.871263Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2025-03-28T09:26:25.871319Z node 3 :SCHEME_BOARD_REPLICA INFO: [3:6:2053] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2025-03-28T09:26:25.871419Z node 3 :SCHEME_BOARD_REPLICA DEBUG: [3:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:8:2055] >> TTxLocatorTest::TestAllocateAll >> TTxLocatorTest::TestZeroRange >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] >> TTxLocatorTest::TestAllocateAll [GOOD] >> TTxLocatorTest::TestZeroRange [GOOD] >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] |89.0%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |89.0%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} >> TFstClassSrcIdPQTest::TestTableCreated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2025-03-28T09:25:28.585833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789112973835867:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:28.590318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a06/r3tmp/tmpbxiAV9/pdisk_1.dat 2025-03-28T09:25:29.226622Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:29.240201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:29.240311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:29.253319Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5872, node 1 2025-03-28T09:25:29.572224Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:29.572256Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:29.572264Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:29.572412Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18614 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:30.120376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.155545Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:18614 2025-03-28T09:25:30.384996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.394236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.418016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:25:30.650123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.699729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.743879Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-28T09:25:30.752803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.805888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.850787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.893397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.966062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 2025-03-28T09:25:31.003646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:31.091004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:25:32.422308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789130153706494:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.422328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789130153706488:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.422439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.426863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-28T09:25:32.440941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789130153706502:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-28T09:25:32.521037Z node 1 :TX_PROXY ERROR: Actor# [1:7486789130153706553:2919] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:33.320018Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe14a4342efr6bby2c8hzdb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY0NzgyOTItZWQzYWE4ZGQtNDAzYTQyZGYtNDBhNmM4Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.340970Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe14a4342efr6bby2c8hzdb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY0NzgyOTItZWQzYWE4ZGQtNDAzYTQyZGYtNDBhNmM4Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.348367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe14a4342efr6bby2c8hzdb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY0NzgyOTItZWQzYWE4ZGQtNDAzYTQyZGYtNDBhNmM4Nzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.425765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.465193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.499045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.569305Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789112973835867:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:33.569426Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:33.571934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:33.604603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.675001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.750020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:33.826616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.852577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.899500Z node 1 :HTTP INFO: Listening on http://127.0.0.1:9076 2025-03-28T09:25:34.901115Z node 1 :SQS INFO: Start SQS service actor 2025-03-28T09:25:34.901222Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: false EnableDeadLetterQueues: true } 2025-03-28T09:25:34.901278Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-28T09:25:34.901976Z node 1 :HTTP INFO: Listening on http://[::]:27437 2025-03-28T09:25:34.903462Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-28T09:25:34.936495Z node 1 :SQS INFO: Request SQS users list 20 ... { Optional { Bool: true } } } } } 2025-03-28T09:26:24.915526Z node 7 :SQS DEBUG: Request [1ab68875-4762aac4-2e2bbffc-ed4095d5] Query(idx=TAG_QUEUE_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 6ms 2025-03-28T09:26:24.915583Z node 7 :SQS TRACE: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] Query(idx=TAG_QUEUE_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976715931 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "updated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { Bool: true } } } } } 2025-03-28T09:26:24.915603Z node 7 :SQS DEBUG: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] Query(idx=TAG_QUEUE_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 6ms 2025-03-28T09:26:24.915606Z node 7 :SQS TRACE: Request [1ab68875-4762aac4-2e2bbffc-ed4095d5] Query(idx=TAG_QUEUE_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976715932 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "updated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { Bool: true } } } } } 2025-03-28T09:26:24.915640Z node 7 :SQS TRACE: Request [1ab68875-4762aac4-2e2bbffc-ed4095d5] Query(idx=TAG_QUEUE_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"updated": true} 2025-03-28T09:26:24.915677Z node 7 :SQS DEBUG: Request [1ab68875-4762aac4-2e2bbffc-ed4095d5] Query(idx=TAG_QUEUE_ID) Queue [cloud4/000000000000000301v0] execution duration: 9ms 2025-03-28T09:26:24.915706Z node 7 :SQS TRACE: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] Query(idx=TAG_QUEUE_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976715931 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "updated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { Bool: true } } } } } 2025-03-28T09:26:24.915728Z node 7 :SQS TRACE: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] Query(idx=TAG_QUEUE_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"updated": true} 2025-03-28T09:26:24.915756Z node 7 :SQS DEBUG: Request [1ab68875-4762aac4-2e2bbffc-ed4095d5] Sending executed reply 2025-03-28T09:26:24.915767Z node 7 :SQS DEBUG: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] Query(idx=TAG_QUEUE_ID) Queue [cloud4/000000000000000301v0] execution duration: 9ms 2025-03-28T09:26:24.915804Z node 7 :SQS DEBUG: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] Sending executed reply 2025-03-28T09:26:24.915845Z node 7 :SQS DEBUG: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] Sending clear attributes cache event for queue [cloud4/000000000000000301v0] 2025-03-28T09:26:24.915847Z node 7 :SQS DEBUG: Request [1ab68875-4762aac4-2e2bbffc-ed4095d5] Sending clear attributes cache event for queue [cloud4/000000000000000301v0] 2025-03-28T09:26:24.915880Z node 7 :SQS TRACE: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] SendReplyAndDie from action actor { UntagQueue { RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" } } 2025-03-28T09:26:24.915885Z node 7 :SQS TRACE: Request [1ab68875-4762aac4-2e2bbffc-ed4095d5] SendReplyAndDie from action actor { UntagQueue { RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" } } 2025-03-28T09:26:24.915935Z node 7 :SQS TRACE: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] Sending sqs response: { UntagQueue { RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" } RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-03-28T09:26:24.915945Z node 7 :SQS TRACE: Request [1ab68875-4762aac4-2e2bbffc-ed4095d5] Sending sqs response: { UntagQueue { RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" } RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-03-28T09:26:24.915986Z node 7 :SQS DEBUG: Executing queue [cloud4/000000000000000301v0] attributes cache request. Req id: 9e81cbd5-462ec5e0-c58fae4c-736b39e1 2025-03-28T09:26:24.916041Z node 7 :SQS TRACE: HandleSqsResponse UntagQueue { RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" } RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-03-28T09:26:24.916045Z node 7 :SQS DEBUG: Request [9e81cbd5-462ec5e0-c58fae4c-736b39e1] Sending execute request for query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) to queue leader 2025-03-28T09:26:24.916070Z node 7 :SQS DEBUG: Request [9e81cbd5-462ec5e0-c58fae4c-736b39e1] Executing compiled query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) 2025-03-28T09:26:24.916090Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7486789355873955313:2786]: UntagQueue { RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" } RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-03-28T09:26:24.916116Z node 7 :SQS DEBUG: Request [9e81cbd5-462ec5e0-c58fae4c-736b39e1] Starting executor actor for query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID). Mode: COMPILE_AND_EXEC 2025-03-28T09:26:24.916146Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7486789355873955339:5586]. Found: 1 2025-03-28T09:26:24.916155Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7486789355873955340:5587]. Found: 1 2025-03-28T09:26:24.916156Z node 7 :SQS TRACE: HandleSqsResponse UntagQueue { RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" } RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-03-28T09:26:24.916198Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7486789355873955312:2785]: UntagQueue { RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" } RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2025-03-28T09:26:24.916198Z node 7 :SQS TRACE: Request [9e81cbd5-462ec5e0-c58fae4c-736b39e1] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Serializing params: {"QUEUE_ID_NUMBER": 4, "QUEUE_ID_NUMBER_HASH": 2169371982377735806, "NAME": "000000000000000301v0", "USER_NAME": "cloud4"} 2025-03-28T09:26:24.916267Z node 7 :SQS TRACE: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] HandleResponse: { UntagQueue { RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" } RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2025-03-28T09:26:24.916322Z node 7 :SQS TRACE: Request [1ab68875-4762aac4-2e2bbffc-ed4095d5] HandleResponse: { UntagQueue { RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" } RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2025-03-28T09:26:24.916329Z node 7 :SQS DEBUG: Request [d643f0bd-fd3c0787-8cb115e3-5c319e20] Sending reply from proxy actor: { UntagQueue { RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" } RequestId: "d643f0bd-fd3c0787-8cb115e3-5c319e20" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-03-28T09:26:24.916381Z node 7 :SQS DEBUG: Request [1ab68875-4762aac4-2e2bbffc-ed4095d5] Sending reply from proxy actor: { UntagQueue { RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" } RequestId: "1ab68875-4762aac4-2e2bbffc-ed4095d5" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2025-03-28T09:26:24.916418Z node 7 :SQS TRACE: Request [9e81cbd5-462ec5e0-c58fae4c-736b39e1] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037,\nFlags\010Name\010Args\016Payload\022Parameter\022QueueName\010Tags\022SelectRow\004If2ContentBasedDeduplication\030DelaySeconds\014DlqArn\016DlqName\022FifoQueue\036MaxReceiveCount$MaximumMessageSize,MessageRetentionPeriod,ReceiveMessageWaitTime8ShowDetailedCountersDeadline\"VisibilityTimeout\014Exists\022SetResult\006\006\206\202\t\211\004\202\203\005@\203\014V\000\003?\002\026queueExists\t\211\002?\004\207\205\026\207\203\014\207\203\010\207\203\001H\207\203\001H\207\203\014\207\203\010\207\203\010\207\203\010\207\203\010\207\203\010\207\203\010&*.26:>BFJNR\000\t\211\n?8\203\005\004\200\205\026\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004\203\004&*.26:>BFJN\213\004\203\010\203\010\203\004\036\000\003?< \000\001\205\000\000\000\000\001\024\000\000\000\000\000\000\000?6\005?T\003?>\006\003?@\010\003?B\026\003?D\024\003?F\n\003?H\030\003?J\014\003?L\016\003?N\020\003?P\032\003?R\022\377\007\013?Z\t\351\000?V\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\200\003?\202(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?X\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\226\003?\230\036QUEUE_ID_NUMBER\003\022\000\000\003?\\\000\037\000\001\t\211\004\202\203\005@?8V\000\003?\260\nattrs?\252\001\t\211\004\202\203\005@\203\001HV\000\003?\270\010tags\t\211\006?\272\203\014?\272\203\001H\"\000\t\211\002?\300?\300\014Not\000\t\211\002?\300?8R\000?\252\000\000\003?\272\004{}\t\211\006?\302\203\014?\302\203\001H\"\000\t\211\006?\320\203\005@\203\001H?\322\030Invoke\000\003?\326\014Equals\003?\330\000\t\211\004?\322\207\203\001H?\322 Coalesce\000\t\211\004?\342\207\205\004\207\203\001H?\342\026\032\203\004\030Member\000\t\211\n?\354\203\005\004\200\205\004\203\004\203\004\026\032\213\004\203\001H\203\001H\203\004\036\000\003?\362 \000\001\205\000\000\000\000\001\003\000\000\000\000\000\000\000?\352\005?\370\003?\364\004\003?\366 \003\013?\376\t\351\000?\372\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?%\002\003?)\002\022USER_NAME\003\022\000\003?\374(000000000000000301v0\002\003?\001\002\000\037\003?\356\002\002\003?\322\004{}\002\003\003?\302\004{}?a\002\002\002\001\000/" } Params { Bin: "\037\000\005\205\010\203\001H\203\010\203\010\203\001H\020NAME> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2025-03-28T09:26:26.842593Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:26:26.845360Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:26:26.848005Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:26:26.863538Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.870018Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:26:26.887144Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.887266Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.887386Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:26:26.887523Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.887596Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.888382Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:26:26.888556Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-28T09:26:26.891016Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2025-03-28T09:26:26.893219Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2025-03-28T09:26:26.897854Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2025-03-28T09:26:26.898115Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2025-03-28T09:26:26.898442Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2025-03-28T09:26:26.898588Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.898802Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.898844Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.898975Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2025-03-28T09:26:26.899154Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#100000 2025-03-28T09:26:26.899284Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.899316Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.899467Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2025-03-28T09:26:26.899584Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.899615Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.899749Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.899817Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2025-03-28T09:26:26.899960Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.900069Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.900127Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2025-03-28T09:26:26.900250Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.900356Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2025-03-28T09:26:26.900403Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2025-03-28T09:26:26.900557Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.900624Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.900703Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2025-03-28T09:26:26.900730Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 100000 to# 200000 2025-03-28T09:26:26.900807Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.900870Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2025-03-28T09:26:26.900899Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 200000 to# 300000 2025-03-28T09:26:26.901049Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.901112Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2025-03-28T09:26:26.901130Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 300000 to# 400000 2025-03-28T09:26:26.901175Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2025-03-28T09:26:26.901189Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 400000 to# 500000 2025-03-28T09:26:26.901256Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.901294Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.901317Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2025-03-28T09:26:26.901330Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 500000 to# 600000 2025-03-28T09:26:26.901407Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.901441Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2025-03-28T09:26:26.901454Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 600000 to# 700000 2025-03-28T09:26:26.901506Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2025-03-28T09:26:26.901526Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 700000 to# 800000 2025-03-28T09:26:26.901712Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.901749Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2025-03-28T09:26:26.901767Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 800000 to# 900000 2025-03-28T09:26:26.901836Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.901886Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2025-03-28T09:26:26.901904Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-03-28T09:26:26.906168Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:119:2153] requested range size#100000 2025-03-28T09:26:26.906446Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:121:2155] requested range size#100000 2025-03-28T09:26:26.906769Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.906866Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:117:2151] requested range size#100000 2025-03-28T09:26:26.907179Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.907493Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.907641Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:123:2157] requested range size#100000 2025-03-28T09:26:26.907828Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.907943Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:125:2159] requested range size#100000 2025-03-28T09:26:26.908314Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:127:2161] requested range size#100000 2025-03-28T09:26:26.908468Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [720575940464476 ... 0} 2025-03-28T09:26:26.962871Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8300000 Reserved to# 8400000 2025-03-28T09:26:26.962895Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:391:2425] TEvAllocateResult from# 8300000 to# 8400000 2025-03-28T09:26:26.962978Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.963079Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:90:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.963172Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8400000 Reserved to# 8500000 2025-03-28T09:26:26.963191Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:393:2427] TEvAllocateResult from# 8400000 to# 8500000 2025-03-28T09:26:26.963266Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.963305Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8500000 Reserved to# 8600000 2025-03-28T09:26:26.963319Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:395:2429] TEvAllocateResult from# 8500000 to# 8600000 2025-03-28T09:26:26.963360Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2025-03-28T09:26:26.963375Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:397:2431] TEvAllocateResult from# 8600000 to# 8700000 2025-03-28T09:26:26.963487Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:91:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.963521Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.963595Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2025-03-28T09:26:26.963649Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:399:2433] TEvAllocateResult from# 8700000 to# 8800000 2025-03-28T09:26:26.963720Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.963760Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2025-03-28T09:26:26.963774Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:401:2435] TEvAllocateResult from# 8800000 to# 8900000 2025-03-28T09:26:26.963821Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2025-03-28T09:26:26.963844Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:403:2437] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2025-03-28T09:26:26.966916Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2025-03-28T09:26:26.967282Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2025-03-28T09:26:26.967613Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2025-03-28T09:26:26.967808Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.967932Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2025-03-28T09:26:26.968064Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.968268Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2025-03-28T09:26:26.968360Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.968478Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.968634Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2025-03-28T09:26:26.968827Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.968974Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:449:2483] requested range size#100000 2025-03-28T09:26:26.969103Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.969207Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.969273Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:431:2465] requested range size#100000 2025-03-28T09:26:26.969362Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.969494Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.969705Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2025-03-28T09:26:26.969839Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.970079Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2025-03-28T09:26:26.970227Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.970409Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2025-03-28T09:26:26.970441Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9000000 to# 9100000 2025-03-28T09:26:26.970525Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.970600Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.970888Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2025-03-28T09:26:26.970923Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9100000 to# 9200000 2025-03-28T09:26:26.970979Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.971120Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2025-03-28T09:26:26.971162Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9200000 to# 9300000 2025-03-28T09:26:26.971227Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.971376Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2025-03-28T09:26:26.971406Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9300000 to# 9400000 2025-03-28T09:26:26.971527Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2025-03-28T09:26:26.971563Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9400000 to# 9500000 2025-03-28T09:26:26.971634Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.971681Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.971817Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2025-03-28T09:26:26.971862Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9500000 to# 9600000 2025-03-28T09:26:26.971937Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.972045Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2025-03-28T09:26:26.972073Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:449:2483] TEvAllocateResult from# 9600000 to# 9700000 2025-03-28T09:26:26.972126Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.972279Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2025-03-28T09:26:26.972308Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:431:2465] TEvAllocateResult from# 9700000 to# 9800000 2025-03-28T09:26:26.972358Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.972463Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2025-03-28T09:26:26.972512Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9800000 to# 9900000 2025-03-28T09:26:26.972589Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2025-03-28T09:26:26.972612Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2025-03-28T09:26:26.842709Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:26:26.845387Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:26:26.848148Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:26:26.863491Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.870020Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:26:26.887143Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.887257Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.887381Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:26:26.887528Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.887591Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.888346Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:26:26.888548Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-28T09:26:26.890083Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#0 2025-03-28T09:26:26.893983Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.894125Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.894237Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2025-03-28T09:26:26.894269Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 0 expected SUCCESS >> TestKinesisHttpProxy::GoodRequestCreateStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2025-03-28T09:26:26.843811Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:26:26.845404Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:26:26.847980Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:26:26.864372Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.872684Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:26:26.888764Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.888835Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.888914Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:26:26.888998Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.889029Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.889154Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:26:26.889258Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-28T09:26:26.890081Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#281474976710655 2025-03-28T09:26:26.894179Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.894325Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.894442Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2025-03-28T09:26:26.894505Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2025-03-28T09:26:26.898865Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2109] requested range size#1 2025-03-28T09:26:26.899048Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-03-28T09:26:26.899094Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:75:2109] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2025-03-28T09:26:26.842593Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2025-03-28T09:26:26.845357Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2025-03-28T09:26:26.847974Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2025-03-28T09:26:26.863464Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.870013Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 OnActivateExecutor 2025-03-28T09:26:26.887144Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.887260Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.887385Z node 1 :TABLET_MAIN DEBUG: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2025-03-28T09:26:26.887523Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.887575Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.888363Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxSchema Complete 2025-03-28T09:26:26.888577Z node 1 :TABLET_MAIN INFO: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2025-03-28T09:26:26.890092Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:70:2105] requested range size#8796093022207 2025-03-28T09:26:26.893937Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.894081Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.894210Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2025-03-28T09:26:26.894251Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:70:2105] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2025-03-28T09:26:26.898105Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:75:2109] requested range size#8796093022207 2025-03-28T09:26:26.898618Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.898776Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.898887Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2025-03-28T09:26:26.898939Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:75:2109] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2025-03-28T09:26:26.899382Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:79:2113] requested range size#8796093022207 2025-03-28T09:26:26.899758Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.899831Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.899939Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2025-03-28T09:26:26.899982Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:79:2113] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2025-03-28T09:26:26.900395Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#8796093022207 2025-03-28T09:26:26.900845Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.900914Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.901011Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2025-03-28T09:26:26.901071Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2025-03-28T09:26:26.901483Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2121] requested range size#8796093022207 2025-03-28T09:26:26.901903Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.901984Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.902072Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2025-03-28T09:26:26.902111Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:87:2121] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2025-03-28T09:26:26.902559Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:91:2125] requested range size#8796093022207 2025-03-28T09:26:26.902965Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.903040Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.903145Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2025-03-28T09:26:26.903188Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:91:2125] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2025-03-28T09:26:26.903657Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:95:2129] requested range size#8796093022207 2025-03-28T09:26:26.903982Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.904068Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.904164Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2025-03-28T09:26:26.904234Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:95:2129] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2025-03-28T09:26:26.904834Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:99:2133] requested range size#8796093022207 2025-03-28T09:26:26.905147Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.905212Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.905308Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2025-03-28T09:26:26.905350Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:99:2133] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2025-03-28T09:26:26.905868Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:103:2137] requested range size#8796093022207 2025-03-28T09:26:26.906254Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.906362Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.906444Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2025-03-28T09:26:26.906483Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:103:2137] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2025-03-28T09:26:26.906985Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:107:2141] requested range size#8796093022207 2025-03-28T09:26:26.907290Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.907362Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.907474Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 79164837199863 Reserved to# 87960930222070 2025-03-28T09:26:26.907534Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:107:2141] TEvAllocateResult from# 79164837199863 to# 87960930222070 expected SUCCESS 2025-03-28T09:26:26.908073Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:111:2145] requested range size#8796093022207 2025-03-28T09:26:26.908420Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.908529Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:13:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.908611Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 87960930222070 Reserved to# 96757023244277 2025-03-28T09:26:26.908655Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:111:2145] TEvAllocateResult from# 87960930222070 to# 96757023244277 expected SUCCESS 2025-03-28T09:26:26.909181Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:115:2149] requested range size#8796093022207 2025-03-28T09:26:26.909494Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.909568Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:14:0:0:69:0] Status# OK StatusFla ... e 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:151:2185] requested range size#8796093022207 2025-03-28T09:26:26.921373Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.921448Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:23:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.921561Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 175921860444140 Reserved to# 184717953466347 2025-03-28T09:26:26.921601Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:151:2185] TEvAllocateResult from# 175921860444140 to# 184717953466347 expected SUCCESS 2025-03-28T09:26:26.922374Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:155:2189] requested range size#8796093022207 2025-03-28T09:26:26.922741Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.922820Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:24:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.922948Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 184717953466347 Reserved to# 193514046488554 2025-03-28T09:26:26.922989Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:155:2189] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2025-03-28T09:26:26.923716Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:159:2193] requested range size#8796093022207 2025-03-28T09:26:26.924059Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.924136Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.924221Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2025-03-28T09:26:26.924272Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:159:2193] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2025-03-28T09:26:26.925023Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:163:2197] requested range size#8796093022207 2025-03-28T09:26:26.925341Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.925409Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.925506Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2025-03-28T09:26:26.925553Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:163:2197] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2025-03-28T09:26:26.926235Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:167:2201] requested range size#8796093022207 2025-03-28T09:26:26.926564Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.926715Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.926806Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2025-03-28T09:26:26.926846Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:167:2201] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2025-03-28T09:26:26.927575Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:171:2205] requested range size#8796093022207 2025-03-28T09:26:26.927957Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.928042Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.928145Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2025-03-28T09:26:26.928179Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:171:2205] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2025-03-28T09:26:26.928930Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:175:2209] requested range size#8796093022207 2025-03-28T09:26:26.929254Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.929329Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.929412Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2025-03-28T09:26:26.929462Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:175:2209] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2025-03-28T09:26:26.930222Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:179:2213] requested range size#8796093022207 2025-03-28T09:26:26.930541Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.930601Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.930728Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2025-03-28T09:26:26.930771Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:179:2213] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2025-03-28T09:26:26.931544Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:183:2217] requested range size#8796093022207 2025-03-28T09:26:26.931935Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.931997Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.932073Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2025-03-28T09:26:26.932110Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:183:2217] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2025-03-28T09:26:26.932876Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:187:2221] requested range size#8796093022207 2025-03-28T09:26:26.933253Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.933346Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.933447Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2025-03-28T09:26:26.933488Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:187:2221] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2025-03-28T09:26:26.934303Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:191:2225] requested range size#8796093022207 2025-03-28T09:26:26.934634Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.934764Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.934863Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2025-03-28T09:26:26.934922Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:191:2225] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2025-03-28T09:26:26.935751Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:195:2229] requested range size#8796093022207 2025-03-28T09:26:26.936065Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.936140Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.936247Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2025-03-28T09:26:26.936286Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:195:2229] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2025-03-28T09:26:26.937160Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:199:2233] requested range size#31 2025-03-28T09:26:26.937519Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.937590Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:26:26.937717Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2025-03-28T09:26:26.937752Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 Send to Sender# [1:199:2233] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2025-03-28T09:26:26.938572Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:203:2237] requested range size#1 2025-03-28T09:26:26.938750Z node 1 :TX_ALLOCATOR DEBUG: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2025-03-28T09:26:26.938797Z node 1 :TX_ALLOCATOR ERROR: tablet# 72057594046447617 Send to Sender# [1:203:2237] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE >> TBlobStorageWardenTest::TestCreatePDiskAndGroup |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2025-03-28T09:25:28.643645Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789112690715667:2240];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:28.647049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008cd/r3tmp/tmpyzquHd/pdisk_1.dat 2025-03-28T09:25:29.229100Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:29.239001Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:29.260723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:29.291103Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9254, node 1 2025-03-28T09:25:29.564770Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:29.564806Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:29.564815Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:29.564956Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:30.116664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:19204 2025-03-28T09:25:30.419744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.431249Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:25:30.440103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.458707Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-28T09:25:30.467182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.630728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.717413Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-03-28T09:25:30.721877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.801747Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-28T09:25:30.807702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.841461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.876581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.928889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.001208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.093045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.177559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:32.175027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789129870586130:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.175282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789129870586142:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.175538Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.181138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-28T09:25:32.190945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789129870586144:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-28T09:25:32.251675Z node 1 :TX_PROXY ERROR: Actor# [1:7486789129870586195:2920] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:33.324558Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe149wb3kjavfxxcz9xnfsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NThhZGFkYjQtMzgyNDE3Yy1hNmY1MGUzZC02OGJjNjk4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.340243Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe149wb3kjavfxxcz9xnfsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NThhZGFkYjQtMzgyNDE3Yy1hNmY1MGUzZC02OGJjNjk4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.350868Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe149wb3kjavfxxcz9xnfsg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NThhZGFkYjQtMzgyNDE3Yy1hNmY1MGUzZC02OGJjNjk4Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root waiting... 2025-03-28T09:25:33.405372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.457310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.495884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.538925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:33.589049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.612247Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789112690715667:2240];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:33.612308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:33.662666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.712971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.754554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.807012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.883153Z node 1 :HTTP INFO: Listening on http://127.0.0.1:12850 2025-03-28T09:25:34.896608Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-28T09:25:34.902776Z node 1 :SQS INFO: Start SQS service actor 2025-03-28T09:25:34.903095Z node 1 :HTTP INFO: ... c8" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "207b3f2d-96f8bd59-80ae1ff5-98eb640f" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "927d6abd-6c0be024-16439342-ba7e9e59" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-03-28T09:26:25.717558Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7486789360414484754:2512]: SendMessageBatch { RequestId: "927d6abd-6c0be024-16439342-ba7e9e59" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "d2cc4a5e-d29bf631-2d93c926-24d402c8" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "207b3f2d-96f8bd59-80ae1ff5-98eb640f" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "927d6abd-6c0be024-16439342-ba7e9e59" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2025-03-28T09:26:25.717641Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7486789360414484758:3652]. Found: 1 2025-03-28T09:26:25.717684Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program: ( (let queueIdNumber (Parameter 'QUEUE_ID_NUMBER (DataType 'Uint64))) (let queueIdNumberHash (Parameter 'QUEUE_ID_NUMBER_HASH (DataType 'Uint64))) (let timeFrom (Parameter 'TIME_FROM (DataType 'Uint64))) (let sentTsIdx '/Root/SQS/.FIFO/SentTimestampIdx) (let sentIdxRange '( '('QueueIdNumberHash queueIdNumberHash queueIdNumberHash) '('QueueIdNumber queueIdNumber queueIdNumber) '('SentTimestamp timeFrom (Uint64 '18446744073709551615)) '('Offset (Uint64 '0) (Uint64 '18446744073709551615)))) (let sentIdxSelect '( 'SentTimestamp 'Offset)) (let selectResult (SelectRange sentTsIdx sentIdxRange sentIdxSelect '('('"ItemsLimit" (Uint64 '1))))) (let messages (Member selectResult 'List)) (return (Extend (AsList (SetResult 'messages messages)) )) ) 2025-03-28T09:26:25.717864Z node 7 :SQS TRACE: Request [927d6abd-6c0be024-16439342-ba7e9e59] HandleResponse: { SendMessageBatch { RequestId: "927d6abd-6c0be024-16439342-ba7e9e59" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "d2cc4a5e-d29bf631-2d93c926-24d402c8" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "207b3f2d-96f8bd59-80ae1ff5-98eb640f" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "927d6abd-6c0be024-16439342-ba7e9e59" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2025-03-28T09:26:25.718001Z node 7 :SQS DEBUG: Request [927d6abd-6c0be024-16439342-ba7e9e59] Sending reply from proxy actor: { SendMessageBatch { RequestId: "927d6abd-6c0be024-16439342-ba7e9e59" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "d2cc4a5e-d29bf631-2d93c926-24d402c8" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "207b3f2d-96f8bd59-80ae1ff5-98eb640f" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "927d6abd-6c0be024-16439342-ba7e9e59" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2025-03-28T09:26:25.718581Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [927d6abd-6c0be024-16439342-ba7e9e59] Got succesfult GRPC response. 2025-03-28T09:26:25.718835Z node 7 :HTTP_PROXY INFO: http request [SendMessageBatch] requestId [927d6abd-6c0be024-16439342-ba7e9e59] reply ok 2025-03-28T09:26:25.718996Z node 7 :HTTP_PROXY DEBUG: http request [SendMessageBatch] requestId [927d6abd-6c0be024-16439342-ba7e9e59] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 644 SourceAddress: 58a8:7900:6050:0:40a8:7900:6050:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2025-03-28T09:26:25.719206Z node 7 :HTTP DEBUG: (#37,[::1]:38196) <- (200 ) 2025-03-28T09:26:25.719293Z node 7 :HTTP DEBUG: (#37,[::1]:38196) connection closed Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"d2cc4a5e-d29bf631-2d93c926-24d402c8"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"207b3f2d-96f8bd59-80ae1ff5-98eb640f"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} 2025-03-28T09:26:25.721318Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } 2025-03-28T09:26:25.721363Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 3ms 2025-03-28T09:26:25.721411Z node 7 :SQS DEBUG: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2025-03-28T09:26:25.721424Z node 7 :SQS DEBUG: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2025-03-28T09:26:25.721515Z node 7 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2025-03-28T09:26:25.721590Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2025-03-28T09:26:25.721887Z node 7 :SQS TRACE: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001\032\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] |89.1%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |89.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished >> TPersQueueTest::DirectReadCleanCache [GOOD] >> TPersQueueTest::DirectReadRestartPQRB >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2025-03-28T09:25:28.607524Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789113154805742:2087];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:28.646935Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008c8/r3tmp/tmpGHCmWp/pdisk_1.dat 2025-03-28T09:25:29.291329Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:29.327654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:29.327744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:29.331992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2163, node 1 2025-03-28T09:25:29.570290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:29.570314Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:29.570321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:29.570457Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:30.114618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.146541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:17163 2025-03-28T09:25:30.419772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.427887Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:25:30.433749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.460032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.670323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.722946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.804316Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-28T09:25:30.809246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.850631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.913263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.986434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.042922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.093764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.137117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:32.456033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789130334676344:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.456167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.458763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789130334676353:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.462986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-28T09:25:32.475035Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789130334676358:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-28T09:25:32.573174Z node 1 :TX_PROXY ERROR: Actor# [1:7486789130334676409:2920] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:33.320093Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe14a552qfk2chzymsdzk10, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTA2MGI1NTUtZDc0MjUxNTQtNjU3NDA3Y2EtM2Q1M2ZmYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.342841Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe14a552qfk2chzymsdzk10, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTA2MGI1NTUtZDc0MjUxNTQtNjU3NDA3Y2EtM2Q1M2ZmYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.348332Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe14a552qfk2chzymsdzk10, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTA2MGI1NTUtZDc0MjUxNTQtNjU3NDA3Y2EtM2Q1M2ZmYzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.401636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.450608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.479062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.514574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.550309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.583668Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789113154805742:2087];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:33.583718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:33.588692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:33.627168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.660886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.715188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.777988Z node 1 :HTTP INFO: Listening on http://127.0.0.1:20030 2025-03-28T09:25:34.781269Z node 1 :SQS INFO: Start SQS service actor 2025-03-28T09:25:34.781411Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-28T09:25:34.782878Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-28T09:25:34.784174Z node 1 :SQS DEBUG: Enable scheme board sc ... requests for [cloud4/000000000000000101v0/3]. ActiveMessageRequests: 1 2025-03-28T09:26:26.915902Z node 7 :SQS DEBUG: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Sending execute request for query(idx=CHANGE_VISIBILITY_ID) to queue leader 2025-03-28T09:26:26.915921Z node 7 :SQS DEBUG: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Executing compiled query(idx=CHANGE_VISIBILITY_ID) 2025-03-28T09:26:26.915988Z node 7 :SQS DEBUG: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Starting executor actor for query(idx=CHANGE_VISIBILITY_ID). Mode: COMPILE_AND_EXEC 2025-03-28T09:26:26.916061Z node 7 :SQS TRACE: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 3, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 1902612799742245372, "NOW": 1743153986915, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1743153986772, "Offset": 1, "NewVisibilityDeadline": 1743153987915}, {"LockTimestamp": 1743153986810, "Offset": 2, "NewVisibilityDeadline": 1743153988915}]} 2025-03-28T09:26:26.916370Z node 7 :SQS TRACE: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\034\014Exists*NewVisibilityDeadline\014Offset\006Arg\014Member\nFlags\010Name\010Args\016Payload\022Parameter\006And\032LockTimestamp$VisibilityDeadline\014Invoke\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\n\203\014\207\203\010\203\014\203\010?\020(ChangeConddCurrentVisibilityDeadline\002\006\n$SetResult\000\003?\006\014result\t\211\006?\024\206\205\006?\020?\020?\020.\006\n?\032?\0220MapParameter\000\t\351\000?\034\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?&\003?(\010KEYS\003&\000\t\251\000?\032\016\000\005?\022\t\211\004?\010\207\203\014?\010 Coalesce\000\t\211\004?<\207\203\014\207\203\014*\000\t\211\006?B\203\005@\203\010?\0146\000\003?J\026LessOrEqual\t\351\000?L\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?X\003?Z\006NOW\003&\000\t\211\004?\014\207\205\004\207\203\010?\014.2\203\004\022\000\t\211\n?n\203\005\004\200\205\004\203\004\203\004.2\213\010\203\010\203\010\203\004?\020\203\004$SelectRow\000\003?t \000\001\205\000\000\000\000\001\030\000\000\000\000\000\000\000?l\005?z\003?v\020\003?x\026\003\013?\202\t\351\000?|\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?\226\003?\230\277\004we\003?\004\337L\301\004we\003?\006\004\007\003\003?\014\337X\277\004we\003?\016\004\003?\020\177\374y\277\311Bpg\032\003?\022\177w\247k\263\235 {\362\003?\024\006\177/" } FlatMKQL: true } UserRequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" } ExecTimeoutPeriod: 60000 }. Params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 3, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 1902612799742245372, "NOW": 1743153986915, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1743153986772, "Offset": 1, "NewVisibilityDeadline": 1743153987915}, {"LockTimestamp": 1743153986810, "Offset": 2, "NewVisibilityDeadline": 1743153988915}]} 2025-03-28T09:26:26.924459Z node 7 :SQS TRACE: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976710717 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "result" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ChangeCond" Type { Kind: Data Data { Scheme: 6 } } } Member { Name: "CurrentVisibilityDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Exists" Type { Kind: Data Data { Scheme: 6 } } } Member { Name: "NewVisibilityDeadline" Type { Kind: Data Data { Scheme: 4 } } } Member { Name: "Offset" Type { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Value { Struct { Optional { List { Struct { Bool: true } Struct { Optional { Uint64: 1743153987892 } } Struct { Bool: true } Struct { Uint64: 1743153987915 } Struct { Uint64: 1 } } List { Struct { Bool: true } Struct { Optional { Uint64: 1743154016810 } } Struct { Bool: true } Struct { Uint64: 1743153988915 } Struct { Uint64: 2 } } } } } } } 2025-03-28T09:26:26.924501Z node 7 :SQS DEBUG: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 7ms 2025-03-28T09:26:26.924778Z node 7 :SQS TRACE: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976710717 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "result" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ChangeCond" Type { Kind: Data Data { Scheme: 6 } } } Member { Name: "CurrentVisibilityDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Exists" Type { Kind: Data Data { Scheme: 6 } } } Member { Name: "NewVisibilityDeadline" Type { Kind: Data Data { Scheme: 4 } } } Member { Name: "Offset" Type { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Value { Struct { Optional { List { Struct { Bool: true } Struct { Optional { Uint64: 1743153987892 } } Struct { Bool: true } Struct { Uint64: 1743153987915 } Struct { Uint64: 1 } } List { Struct { Bool: true } Struct { Optional { Uint64: 1743154016810 } } Struct { Bool: true } Struct { Uint64: 1743153988915 } Struct { Uint64: 2 } } } } } } } 2025-03-28T09:26:26.924862Z node 7 :SQS TRACE: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"result": [{"ChangeCond": true, "CurrentVisibilityDeadline": 1743153987892, "Exists": true, "NewVisibilityDeadline": 1743153987915, "Offset": 1}, {"ChangeCond": true, "CurrentVisibilityDeadline": 1743154016810, "Exists": true, "NewVisibilityDeadline": 1743153988915, "Offset": 2}]} 2025-03-28T09:26:26.924988Z node 7 :SQS DEBUG: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] execution duration: 9ms 2025-03-28T09:26:26.925161Z node 7 :SQS DEBUG: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Sending executed reply 2025-03-28T09:26:26.925343Z node 7 :SQS TRACE: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Received reply from DB: ExecComplete 2025-03-28T09:26:26.925386Z node 7 :SQS TRACE: Decrement active message requests for [[cloud4/000000000000000101v0/3]. ActiveMessageRequests: 0 2025-03-28T09:26:26.925490Z node 7 :SQS TRACE: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] SendReplyAndDie from action actor { ChangeMessageVisibilityBatch { RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } } 2025-03-28T09:26:26.925594Z node 7 :SQS TRACE: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Sending sqs response: { ChangeMessageVisibilityBatch { RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-03-28T09:26:26.925689Z node 7 :SQS DEBUG: Request ChangeMessageVisibilityBatch working duration: 9ms 2025-03-28T09:26:26.925746Z node 7 :SQS TRACE: HandleSqsResponse ChangeMessageVisibilityBatch { RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-03-28T09:26:26.925801Z node 7 :SQS TRACE: Sending answer to proxy actor [7:7486789363269060050:2562]: ChangeMessageVisibilityBatch { RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2025-03-28T09:26:26.925855Z node 7 :SQS TRACE: Dec local leader ref for actor [7:7486789363269060051:3848]. Found: 1 2025-03-28T09:26:26.925942Z node 7 :SQS TRACE: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] HandleResponse: { ChangeMessageVisibilityBatch { RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2025-03-28T09:26:26.926026Z node 7 :SQS DEBUG: Request [889f9a7e-f002f01b-3d80fb0-5aa3214d] Sending reply from proxy actor: { ChangeMessageVisibilityBatch { RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "889f9a7e-f002f01b-3d80fb0-5aa3214d" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2025-03-28T09:26:26.926208Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [889f9a7e-f002f01b-3d80fb0-5aa3214d] Got succesfult GRPC response. 2025-03-28T09:26:26.926302Z node 7 :HTTP_PROXY INFO: http request [ChangeMessageVisibilityBatch] requestId [889f9a7e-f002f01b-3d80fb0-5aa3214d] reply ok 2025-03-28T09:26:26.926400Z node 7 :HTTP_PROXY DEBUG: http request [ChangeMessageVisibilityBatch] requestId [889f9a7e-f002f01b-3d80fb0-5aa3214d] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 788 ResponseSizeInBytes: 221 SourceAddress: 78f4:9100:6050:0:60f4:9100:6050:0 ResourceId: 000000000000000101v0 Action: ChangeMessageVisibilityBatch 2025-03-28T09:26:26.926480Z node 7 :HTTP DEBUG: (#41,[::1]:46072) <- (200 ) 2025-03-28T09:26:26.926566Z node 7 :HTTP DEBUG: (#41,[::1]:46072) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice >> TPersQueueTest::StreamReadCommitAndStatusMsgs [GOOD] >> TPersQueueTest::StreamReadManyUpdateTokenAndRead ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] Test command err: Trying to start YDB, gRPC: 3226, MsgBus: 2939 2025-03-28T09:26:04.076533Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789271445891552:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:04.076622Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000af5/r3tmp/tmpJZbzal/pdisk_1.dat 2025-03-28T09:26:04.533016Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:04.556194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:04.556316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:04.558456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3226, node 1 2025-03-28T09:26:04.742049Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:26:04.742073Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:26:04.742085Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:26:04.742221Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2939 TClient is connected to server localhost:2939 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:05.478926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:26:07.142745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789284330793967:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.142923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.707978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:26:07.828798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789284330794089:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.828875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.829231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789284330794094:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:07.833306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:26:07.843056Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:26:07.846850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789284330794096:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:26:07.942880Z node 1 :TX_PROXY ERROR: Actor# [1:7486789284330794136:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:08.759093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:26:09.076445Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789271445891552:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:09.076525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:26:09.201988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:1, at schemeshard: 72057594046644480 2025-03-28T09:26:09.654451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:26:10.150983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:26:10.650160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710681:0, at schemeshard: 72057594046644480 2025-03-28T09:26:11.123798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:26:11.197520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:26:14.532725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710725:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 Trying to start YDB, gRPC: 7998, MsgBus: 7282 2025-03-28T09:26:16.333394Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789321239947641:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:16.333502Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000af5/r3tmp/tmpB1ojBE/pdisk_1.dat 2025-03-28T09:26:16.437924Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7998, node 2 2025-03-28T09:26:16.475397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:16.475525Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:16.476866Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:16.505218Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:26:16.505246Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:26:16.505251Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:26:16.505345Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7282 TClient is connected to server localhost:7282 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:26:16.961529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:26:19.358325Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789334124850189:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:19.358413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:19.376181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-28T09:26:19.414871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789334124850308:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:19.414985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:19.415187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789334124850313:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:19.419056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:2, at schemeshard: 72057594046644480 2025-03-28T09:26:19.438254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486789334124850315:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:26:19.522147Z node 2 :TX_PROXY ERROR: Actor# [2:7486789334124850355:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:20.036401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:26:20.581659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-28T09:26:21.264371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:26:21.334813Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486789321239947641:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:21.334900Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:26:21.840474Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:26:22.368713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-28T09:26:22.882499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:26:22.920230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.625866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715727:0, at schemeshard: 72057594046644480 Call DescribeTable. data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Expected: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } CRAB Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "col1" type { type_id: UINT16 } } columns { name: "col2" type { type_id: DOUBLE } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } CRAB Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } from { table: "example_1" } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL CRAB Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { } from { table: "example_1" } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 >> TestKinesisHttpProxy::ListShardsToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2025-03-28T09:25:28.587970Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789115270750078:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:28.588769Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009a0/r3tmp/tmpgeExQb/pdisk_1.dat 2025-03-28T09:25:29.283141Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:29.306530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:29.306642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:29.307907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7400, node 1 2025-03-28T09:25:29.538564Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:29.538584Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:29.538588Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:29.538665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:30.112965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:23572 2025-03-28T09:25:30.385704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.395542Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:25:30.409729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.435388Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-28T09:25:30.453825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.622628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.688917Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-03-28T09:25:30.693625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.755009Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-28T09:25:30.760119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.816987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.863275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.939010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.004554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.078819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.119724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:32.225817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789132450620708:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.225908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789132450620699:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.226039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.230485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-28T09:25:32.241739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789132450620713:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-28T09:25:32.329622Z node 1 :TX_PROXY ERROR: Actor# [1:7486789132450620764:2918] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:33.319538Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe149xx6k6jttzq0h35x98v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjE2ZjVmMjEtNTc1MDg0ODMtNzJkNWYzMDgtZjk0ZTA4MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.340633Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe149xx6k6jttzq0h35x98v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjE2ZjVmMjEtNTc1MDg0ODMtNzJkNWYzMDgtZjk0ZTA4MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.348345Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe149xx6k6jttzq0h35x98v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjE2ZjVmMjEtNTc1MDg0ODMtNzJkNWYzMDgtZjk0ZTA4MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.398311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:33.432586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.478848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.512805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.551088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.580654Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789115270750078:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:33.580709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:33.600290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.653130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.685410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.726502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.813260Z node 1 :HTTP INFO: Listening on http://127.0.0.1:30705 2025-03-28T09:25:34.815531Z node 1 :SQS INFO: Start SQS service actor 2025-03-28T09:25:34.815672Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-28T09:25:34.815980Z node 1 :SQS DEBUG: ... } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:27.092918Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "O\022\nFlags\010Name\010Args\016Payload\022Parameter\016Account\nValue\014Member\022SetResult\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\006\207\203\001H\207\203\001H\207\203\001H\026\006\032\"\000\003?\006\020settings\t\211\004?\026\205\004?\026\203\014\020List$Truncated\203\004\036\000\t\211\026?\036\203\005\004\200\205\006\203\004\203\004\203\004\026\006\032\213\004\203\001H\203\001H\213\000\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?$ \000\001\205\000\000\000\000\001\r\000\000\000\000\000\000\000?\024\005?,\003?&\002\003?(\004\003?*\006\007\013?2\t\351\000?.\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?V\003?X\022FROM_USER\003\022\000\t\351\000?0\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?l\003?n\022FROM_NAME\003\022\000\000\013?4\003?6\000\t\351\000?8\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\210\003?\212\024BATCH_SIZE\003\022\000\003?:\000\003?<\000\006\004?@\003\203\014\000\003\203\014\000\003\003?B\000\277\007\003? \000\002\001\000\006\002?\002\t\211\004\202\203\005@?\034\"\000\003?\262\022truncated\t\211\004?\034?\036\203\004\036\000?\250\003?\270\002\002\001\000\003/" } } 2025-03-28T09:26:27.092956Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] compilation duration: 26ms 2025-03-28T09:26:27.093064Z node 8 :SQS TRACE: Handle compiled user settings query: { Status: 48 MiniKQLCompileResults { CompiledProgram: "O\022\nFlags\010Name\010Args\016Payload\022Parameter\016Account\nValue\014Member\022SetResult\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\006\207\203\001H\207\203\001H\207\203\001H\026\006\032\"\000\003?\006\020settings\t\211\004?\026\205\004?\026\203\014\020List$Truncated\203\004\036\000\t\211\026?\036\203\005\004\200\205\006\203\004\203\004\203\004\026\006\032\213\004\203\001H\203\001H\213\000\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?$ \000\001\205\000\000\000\000\001\r\000\000\000\000\000\000\000?\024\005?,\003?&\002\003?(\004\003?*\006\007\013?2\t\351\000?.\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?V\003?X\022FROM_USER\003\022\000\t\351\000?0\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?l\003?n\022FROM_NAME\003\022\000\000\013?4\003?6\000\t\351\000?8\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\210\003?\212\024BATCH_SIZE\003\022\000\003?:\000\003?<\000\006\004?@\003\203\014\000\003\203\014\000\003\003?B\000\277\007\003? \000\002\001\000\006\002?\002\t\211\004\202\203\005@?\034\"\000\003?\262\022truncated\t\211\004?\034?\036\203\004\036\000?\250\003?\270\002\002\001\000\003/" } } 2025-03-28T09:26:27.093139Z node 8 :SQS DEBUG: Request [] Starting executor actor for query(idx=GET_USER_SETTINGS_ID). Mode: COMPILE_AND_EXEC 2025-03-28T09:26:27.093212Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Serializing params: {"FROM_USER": "", "FROM_NAME": "", "BATCH_SIZE": 1000} 2025-03-28T09:26:27.093446Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\022\nFlags\010Name\010Args\016Payload\022Parameter\016Account\nValue\014Member\022SetResult\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\006\207\203\001H\207\203\001H\207\203\001H\026\006\032\"\000\003?\006\020settings\t\211\004?\026\205\004?\026\203\014\020List$Truncated\203\004\036\000\t\211\026?\036\203\005\004\200\205\006\203\004\203\004\203\004\026\006\032\213\004\203\001H\203\001H\213\000\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?$ \000\001\205\000\000\000\000\001\r\000\000\000\000\000\000\000?\024\005?,\003?&\002\003?(\004\003?*\006\007\013?2\t\351\000?.\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?V\003?X\022FROM_USER\003\022\000\t\351\000?0\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?l\003?n\022FROM_NAME\003\022\000\000\013?4\003?6\000\t\351\000?8\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\210\003?\212\024BATCH_SIZE\003\022\000\003?:\000\003?<\000\006\004?@\003\203\014\000\003\203\014\000\003\003?B\000\277\007\003? \000\002\001\000\006\002?\002\t\211\004\202\203\005@?\034\"\000\003?\262\022truncated\t\211\004?\034?\036\203\004\036\000?\250\003?\270\002\002\001\000\003/" } Params { Bin: "\037\000\005\205\006\203\010\203\001H\203\001H(BATCH_SIZE$FROM_NAME$FROM_USER\003?\000\241\017\003?\002\000\003?\004\000\007/" } FlatMKQL: true } } ExecTimeoutPeriod: 60000 }. Params: {"FROM_USER": "", "FROM_NAME": "", "BATCH_SIZE": 1000} 2025-03-28T09:26:27.247489Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:27.247528Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 152ms 2025-03-28T09:26:27.247838Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:27.247882Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-03-28T09:26:27.247971Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 152ms 2025-03-28T09:26:27.248356Z node 8 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:27.257359Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7486789369956281581:2448]: Pool not found 2025-03-28T09:26:27.258382Z node 8 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-03-28T09:26:27.786076Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7486789369956281530:2443]: Pool not found 2025-03-28T09:26:27.786639Z node 8 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-03-28T09:26:27.790103Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486789369956281733:2472], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:27.790190Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7486789369956281734:2473], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-03-28T09:26:27.790243Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:28.042965Z node 8 :HTTP DEBUG: (#37,[::1]:56668) incoming connection opened 2025-03-28T09:26:28.043036Z node 8 :HTTP DEBUG: (#37,[::1]:56668) -> (POST /Root) 2025-03-28T09:26:28.043146Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [5812:1900:6050:0:4012:1900:6050:0] request [CreateStream] url [/Root] database [/Root] requestId: 65b346f1-ca29e71-2b9be080-6e8e6aaa 2025-03-28T09:26:28.043780Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [65b346f1-ca29e71-2b9be080-6e8e6aaa] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2025-03-28T09:26:28.044025Z node 8 :HTTP DEBUG: (#37,[::1]:56668) <- (400 MissingParameter) 2025-03-28T09:26:28.044071Z node 8 :HTTP DEBUG: (#37,[::1]:56668) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 4 null 0 2025-03-28T09:26:28.044111Z node 8 :HTTP DEBUG: (#37,[::1]:56668) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: 65b346f1-ca29e71-2b9be080-6e8e6aaa x-amz-crc32: 851558042 Content-Type: application/x-amz-json-1.1 Content-Length: 127 {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2025-03-28T09:26:28.044192Z node 8 :HTTP DEBUG: (#37,[::1]:56668) connection closed Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} 2025-03-28T09:26:28.081286Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7486789369956281731:2471]: Pool not found 2025-03-28T09:26:28.081576Z node 8 :SQS DEBUG: [cleanup removed queues] there are no queues to delete |89.1%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] >> TPersQueueTest::TopicServiceSimpleHappyWrites [GOOD] >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode >> TestKinesisHttpProxy::TestWrongRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2025-03-28T09:26:28.697811Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:26:28.697933Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:26:29.014305Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:26:29.014437Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:26:29.014521Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet |89.1%| [TA] $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2025-03-28T09:26:28.689954Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:26:28.690085Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:26:29.015739Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:0:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:26:29.015853Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:1:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2025-03-28T09:26:29.015961Z node 1 :BS_SYNCLOG WARN: VDISK[3e000000:_:0:2:0]: Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet >> TPersQueueTest::CacheHead [GOOD] >> TPersQueueTest::CheckACLForGrpcWrite >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects >> TGRpcStreamingTest::ReadFinish >> TGRpcStreamingTest::SimpleEcho >> TGRpcStreamingTest::ClientNeverWrites >> TGRpcStreamingTest::WritesDoneFromClient >> TGRpcStreamingTest::WriteAndFinishWorks |89.2%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> TestYmqHttpProxy::TestListQueueTags |89.2%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicService::UseDoubleSlashInTopicPath [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> TPQCDTest::TestDiscoverClusters >> TPQCDTest::TestPrioritizeLocalDatacenter >> TPQCDTest::TestUnavailableWithoutNetClassifier >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> TPQCDTest::TestUnavailableWithoutClustersList >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] >> TPQCDTest::TestUnavailableWithoutBoth >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] >> TopicService::RelativePath >> TGRpcStreamingTest::ClientDisconnects [GOOD] >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] >> TGRpcStreamingTest::ReadFinish [GOOD] >> TGRpcStreamingTest::SimpleEcho [GOOD] >> Normalizers::PortionsNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp >> TFlatTest::SplitEmptyTwice [GOOD] >> Backup::ProposeBackup >> TGRpcStreamingTest::ClientNeverWrites [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2025-03-28T09:25:32.932611Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789130232251951:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:32.932754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007f0/r3tmp/tmpXhpVqj/pdisk_1.dat 2025-03-28T09:25:33.296979Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:33.318053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:33.318156Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:33.319208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21401, node 1 2025-03-28T09:25:33.401333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:33.401357Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:33.401363Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:33.401451Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22619 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:33.700087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:22619 2025-03-28T09:25:33.916788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.928944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.947775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:34.076531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:34.128231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:34.171896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:34.213545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:25:34.245799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:34.281701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:34.325405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:34.359203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:34.398269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:36.002981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789147412122423:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.002981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789147412122415:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.003056Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:36.006368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480 2025-03-28T09:25:36.020777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789147412122429:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2025-03-28T09:25:36.082782Z node 1 :TX_PROXY ERROR: Actor# [1:7486789147412122480:2916] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:36.580409Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqe14dm0fzzd7nx5j1rzgz56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFkMjZmNTgtNjkyODYyYWMtNmE0NmIyOS00MjVmZTZjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:36.587741Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqe14dm0fzzd7nx5j1rzgz56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFkMjZmNTgtNjkyODYyYWMtNmE0NmIyOS00MjVmZTZjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:36.590982Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqe14dm0fzzd7nx5j1rzgz56, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDFkMjZmNTgtNjkyODYyYWMtNmE0NmIyOS00MjVmZTZjNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:36.621509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:36.669780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:36.697957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:36.729402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:36.767828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:36.800447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:36.838820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715684:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:36.873674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:36.909717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:36.982501Z node 1 :HTTP INFO: Listening on http://127.0.0.1:18428 2025-03-28T09:25:37.928963Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789130232251951:2207];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:37.929066Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:37.990114Z node 1 :SQS INFO: Start SQS service actor 2025-03-28T09:25:37.990246Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-28T09:25:37.991646Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-28T09:25:37.992868Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-28T09:25:37.993132Z node 1 :HTTP INFO: Listening on http://[::]:6386 2025-03-28T09:25:38.012506Z node 1 :SQS INFO: Request SQS users list 2025-03-28T09:25:38.012550Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-28T09:25:38.014886Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables after 0.000000s 2025-03-28T09:25:38.014933Z node 1 :SQS DEBUG: [Node tracker] bootstrap on node=1 2025-03-28T09:25:38.018228Z node 1 :SQS DEBUG: Request [] Startin ... ptional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:29.928508Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:29.928512Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 30ms 2025-03-28T09:26:29.928526Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 30ms 2025-03-28T09:26:29.928762Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:29.928806Z node 8 :SQS TRACE: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2025-03-28T09:26:29.928879Z node 8 :SQS DEBUG: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 32ms 2025-03-28T09:26:29.928898Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:29.928924Z node 8 :SQS TRACE: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2025-03-28T09:26:29.929045Z node 8 :SQS DEBUG: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 32ms 2025-03-28T09:26:29.929358Z node 8 :SQS TRACE: Handle user settings: { Status: 48 TxId: 281474976715687 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:29.929816Z node 8 :SQS TRACE: Handle queues list: { Status: 48 TxId: 281474976715688 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2025-03-28T09:26:30.099843Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7486789377860164405:2448]: Pool not found 2025-03-28T09:26:30.100405Z node 8 :SQS DEBUG: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2025-03-28T09:26:30.546462Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7486789377860164407:2449]: Pool not found 2025-03-28T09:26:30.546586Z node 8 :SQS DEBUG: [cleanup removed queues] getting queues... 2025-03-28T09:26:30.549320Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7486789382155131862:2473], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2025-03-28T09:26:30.549341Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486789382155131861:2472], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:30.549415Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:30.834865Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7486789382155131859:2471]: Pool not found 2025-03-28T09:26:30.835269Z node 8 :SQS DEBUG: [cleanup removed queues] there are no queues to delete 2025-03-28T09:26:30.860565Z node 8 :HTTP DEBUG: (#37,[::1]:57418) incoming connection opened 2025-03-28T09:26:30.860678Z node 8 :HTTP DEBUG: (#37,[::1]:57418) -> (POST /) 2025-03-28T09:26:30.860848Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [58f9:c700:6050:0:40f9:c700:6050:0] request [CreateStream] url [/] database [] requestId: 728a4906-5b8b9d5d-3b0d48ea-cab6c4c 2025-03-28T09:26:30.861617Z node 8 :HTTP_PROXY WARN: http request [CreateStream] requestId [728a4906-5b8b9d5d-3b0d48ea-cab6c4c] got new request with incorrect json from [58f9:c700:6050:0:40f9:c700:6050:0] database '' 2025-03-28T09:26:30.861867Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [728a4906-5b8b9d5d-3b0d48ea-cab6c4c] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2025-03-28T09:26:30.862629Z node 8 :HTTP DEBUG: (#37,[::1]:57418) <- (400 InvalidArgumentException) 2025-03-28T09:26:30.862732Z node 8 :HTTP DEBUG: (#37,[::1]:57418) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked 57 { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 0 2025-03-28T09:26:30.862778Z node 8 :HTTP DEBUG: (#37,[::1]:57418) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 728a4906-5b8b9d5d-3b0d48ea-cab6c4c x-amz-crc32: 3053902336 Content-Type: application/x-amz-json-1.1 Content-Length: 135 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 2025-03-28T09:26:30.862866Z node 8 :HTTP DEBUG: (#37,[::1]:57418) connection closed Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2025-03-28T09:26:32.531358Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789388194900267:2181];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:32.531423Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e5a/r3tmp/tmpLUVYcZ/pdisk_1.dat 2025-03-28T09:26:32.980960Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:32.982360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:32.982445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:32.985800Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:33.054815Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:55434 2025-03-28T09:26:33.055292Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7486789392489867963:2258] peer# ipv6:[::1]:55434 2025-03-28T09:26:33.055336Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:55434 2025-03-28T09:26:33.055446Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:55434 grpc status# (0) message# 2025-03-28T09:26:33.055833Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# false data# peer# ipv6:[::1]:55434 2025-03-28T09:26:33.055874Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:55434 2025-03-28T09:26:33.055873Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-03-28T09:26:33.055902Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:55434 grpc status# (0) message# 2025-03-28T09:26:33.055948Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:55434 (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: 2025-03-28T09:26:25.685869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:26:25.686408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:26:25.686478Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a42/r3tmp/tmpFRtu7N/pdisk_1.dat 2025-03-28T09:26:26.141911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.204616Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:26.247207Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:26:26.248149Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:26:26.248471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:26.252939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:26.264509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:26.360213Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:26:26.360340Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:26:26.360539Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:26:26.483986Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:26:26.484103Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:26:26.484727Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:26:26.484837Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:26:26.485192Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:26:26.485410Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:26:26.485504Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:26:26.485819Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:26:26.487391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.488463Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:26:26.488530Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:26:26.538825Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:26:26.539929Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:26:26.540325Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:26:26.540605Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:26:26.595008Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:26:26.595856Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:26:26.596001Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:26:26.597838Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:26:26.597917Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:26:26.597983Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:26:26.598373Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:26:26.598508Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:26:26.598581Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:26:26.609420Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:26:26.637613Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:26:26.639769Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:26:26.640002Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:26:26.640195Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:26:26.640239Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:26:26.640277Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:26:26.640550Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:26.640650Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:26.642220Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:26:26.642342Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:26:26.643908Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:26:26.643971Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:26:26.644125Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:26:26.644183Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:26:26.644220Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:26:26.644253Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:26:26.644306Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:26:26.644449Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:26.644489Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:26.644538Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:26:26.644615Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:26:26.644658Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:26:26.644803Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:26:26.645164Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:26:26.645250Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:26:26.648765Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:26:26.648850Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:26:26.648896Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:26:26.648950Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:26:26.648989Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:26:26.649317Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:26:26.649363Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:26:26.649434Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:26:26.649470Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:26:26.649533Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:26:26.649562Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:26:26.649607Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:26:26.649638Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:26:26.649662Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:26:26.651278Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:26:26.651357Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:26:26.662091Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 186224037896 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.782483Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037896 2025-03-28T09:26:33.782523Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-03-28T09:26:33.782562Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-03-28T09:26:33.782611Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-03-28T09:26:33.783016Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1360:3078], Recipient [2:1080:2858]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-03-28T09:26:33.783052Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1080:2858] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-03-28T09:26:33.783083Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1080:2858] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-03-28T09:26:33.783186Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1360:3078], Recipient [2:1259:2999]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:26:33.783217Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-28T09:26:33.783288Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-03-28T09:26:33.783578Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:26:33.783714Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1360:3078], Recipient [2:1080:2858]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2025-03-28T09:26:33.783748Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1080:2858] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2025-03-28T09:26:33.783775Z node 2 :TX_PROXY TRACE: [ReadTable [2:1080:2858] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1360:3078] ShardId# 72075186224037896 2025-03-28T09:26:33.783859Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2025-03-28T09:26:33.783926Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1360:3078], Recipient [2:1080:2858]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2025-03-28T09:26:33.783955Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1080:2858] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2025-03-28T09:26:33.784278Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:1079:2858], Recipient [2:1080:2858]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2025-03-28T09:26:33.784316Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1080:2858] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-28T09:26:33.784346Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1080:2858] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2025-03-28T09:26:33.784387Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-03-28T09:26:33.784441Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2025-03-28T09:26:33.784590Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:1360:3078], Recipient [2:1080:2858]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2025-03-28T09:26:33.784621Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1080:2858] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2025-03-28T09:26:33.784647Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1080:2858] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2025-03-28T09:26:33.784694Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037896 2025-03-28T09:26:33.784722Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715664, at: 72075186224037896 2025-03-28T09:26:33.784841Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1259:2999], Recipient [2:1259:2999]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.784872Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.784915Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-03-28T09:26:33.784942Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.784971Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2025-03-28T09:26:33.784996Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2025-03-28T09:26:33.785025Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2025-03-28T09:26:33.785061Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-03-28T09:26:33.785088Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2025-03-28T09:26:33.785115Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2025-03-28T09:26:33.785146Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-03-28T09:26:33.785178Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2025-03-28T09:26:33.785204Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2025-03-28T09:26:33.785229Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2025-03-28T09:26:33.785257Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2025-03-28T09:26:33.785295Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2025-03-28T09:26:33.785317Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2025-03-28T09:26:33.785339Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2025-03-28T09:26:33.785368Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:26:33.785391Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037896 2025-03-28T09:26:33.785416Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037896 has no attached operations 2025-03-28T09:26:33.785441Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037896 2025-03-28T09:26:33.785491Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-03-28T09:26:33.785522Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2025-03-28T09:26:33.785553Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:26:33.785618Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-03-28T09:26:33.785819Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1259:2999], Recipient [2:1080:2858]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 313 } } 2025-03-28T09:26:33.785852Z node 2 :TX_PROXY DEBUG: [ReadTable [2:1080:2858] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2025-03-28T09:26:33.785908Z node 2 :TX_PROXY INFO: [ReadTable [2:1080:2858] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.017619s execute time: 0.628338s total time: 0.645957s 2025-03-28T09:26:33.786313Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1080:2858], Recipient [2:880:2710]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-28T09:26:33.786463Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1080:2858], Recipient [2:990:2792]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-28T09:26:33.786755Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1080:2858], Recipient [2:995:2794]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-28T09:26:33.787234Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1363:3081], Recipient [2:1144:2915]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:33.787271Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:33.787308Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1361:3079], serverId# [2:1363:3081], sessionId# [0:0:0] 2025-03-28T09:26:33.787376Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1080:2858], Recipient [2:1256:2997]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-28T09:26:33.787541Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1080:2858], Recipient [2:1259:2999]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-28T09:26:33.787780Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1080:2858], Recipient [2:1144:2915]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 2025-03-28T09:26:33.787912Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1364:3082], Recipient [2:1149:2917]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:33.787946Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:33.787978Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1362:3080], serverId# [2:1364:3082], sessionId# [0:0:0] 2025-03-28T09:26:33.788098Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:1080:2858], Recipient [2:1149:2917]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715663 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2025-03-28T09:26:32.524322Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789388063160093:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:32.525439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e1f/r3tmp/tmp5Jy6eA/pdisk_1.dat 2025-03-28T09:26:32.930606Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:32.964806Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:32.964939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:32.970003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:33.060171Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:33910 2025-03-28T09:26:33.061201Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7486789392358127917:2258] peer# ipv6:[::1]:33910 2025-03-28T09:26:33.061253Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:33910 2025-03-28T09:26:33.061417Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# false data# peer# ipv6:[::1]:33910 2025-03-28T09:26:33.061514Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-03-28T09:26:33.061570Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:33910 grpc status# (9) message# Everything is A-OK 2025-03-28T09:26:33.061909Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:33910 2025-03-28T09:26:33.061967Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:33910 grpc status# (9) message# Everything is A-OK 2025-03-28T09:26:33.061999Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:33910 (finish done) 2025-03-28T09:26:33.062024Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2025-03-28T09:26:32.547727Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789390900081276:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:32.548535Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e38/r3tmp/tmpNARjb8/pdisk_1.dat 2025-03-28T09:26:32.995890Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:33.002868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:33.002953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:33.004823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:33.064223Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:47780 2025-03-28T09:26:33.064628Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7486789395195049032:2257] peer# ipv6:[::1]:47780 2025-03-28T09:26:33.064667Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:47780 2025-03-28T09:26:33.064746Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:47780 2025-03-28T09:26:33.065049Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:47780 grpc status# (0) message# 2025-03-28T09:26:33.065545Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:47780 2025-03-28T09:26:33.065611Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-03-28T09:26:33.065772Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# false data# peer# ipv6:[::1]:47780 2025-03-28T09:26:33.065797Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:47780 2025-03-28T09:26:33.065829Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 0 2025-03-28T09:26:33.065833Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:47780 grpc status# (0) message# 2025-03-28T09:26:33.065841Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-03-28T09:26:33.065910Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:47780 (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: 2025-03-28T09:26:25.621277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:26:25.621701Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:26:25.621776Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a8a/r3tmp/tmpXx68xJ/pdisk_1.dat 2025-03-28T09:26:26.142899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.212123Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:26.249237Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:26:26.250168Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:26:26.250414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:26.250547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:26.263944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:26.358942Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:26:26.359071Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:26:26.360828Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:26:26.503223Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:26:26.503339Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:26:26.503950Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:26:26.504049Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:26:26.504381Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:26:26.504602Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:26:26.504713Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:26:26.505018Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:26:26.506478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.507513Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:26:26.507595Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:26:26.542171Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:26:26.543217Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:26:26.543688Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:26:26.543983Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:26:26.592236Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:26:26.593037Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:26:26.593212Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:26:26.595267Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:26:26.595343Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:26:26.595470Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:26:26.596892Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:26:26.597038Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:26:26.597113Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:26:26.609458Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:26:26.666142Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:26:26.666364Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:26:26.666482Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:26:26.666517Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:26:26.666552Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:26:26.666584Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:26:26.666860Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:26.666911Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:26.667248Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:26:26.667340Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:26:26.667782Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:26:26.667829Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:26:26.667905Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:26:26.667955Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:26:26.667985Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:26:26.668017Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:26:26.668061Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:26:26.668174Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:26.668206Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:26.668248Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:26:26.668312Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:26:26.668348Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:26:26.668467Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:26:26.668714Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:26:26.668786Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:26:26.668881Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:26:26.668931Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:26:26.668967Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:26:26.669004Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:26:26.669036Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:26:26.669306Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:26:26.669344Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:26:26.669391Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:26:26.669423Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:26:26.669475Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:26:26.669506Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:26:26.669544Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:26:26.669574Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:26:26.669606Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:26:26.671796Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:26:26.671931Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:26:26.687338Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... planned 0 immediate 1 planned 0 2025-03-28T09:26:33.654466Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for WaitForStreamClearance 2025-03-28T09:26:33.654504Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit WaitForStreamClearance 2025-03-28T09:26:33.654609Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715663] at 72075186224037890 2025-03-28T09:26:33.654664Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:26:33.654708Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit WaitForStreamClearance 2025-03-28T09:26:33.654733Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit ReadTableScan 2025-03-28T09:26:33.654761Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-03-28T09:26:33.654940Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Continue 2025-03-28T09:26:33.654964Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.654988Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-03-28T09:26:33.655021Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-28T09:26:33.655061Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-28T09:26:33.655101Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:26:33.655477Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1000:2804], Recipient [2:885:2712]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:26:33.655523Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-28T09:26:33.655605Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1000:2804], Recipient [2:970:2776]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-03-28T09:26:33.655636Z node 2 :TX_PROXY DEBUG: [ReadTable [2:970:2776] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-03-28T09:26:33.655690Z node 2 :TX_PROXY DEBUG: [ReadTable [2:970:2776] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1000:2804] 2025-03-28T09:26:33.655818Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-03-28T09:26:33.656155Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:26:33.656353Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1000:2804], Recipient [2:970:2776]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-03-28T09:26:33.656403Z node 2 :TX_PROXY DEBUG: [ReadTable [2:970:2776] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-03-28T09:26:33.656439Z node 2 :TX_PROXY TRACE: [ReadTable [2:970:2776] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1000:2804] ShardId# 72075186224037890 2025-03-28T09:26:33.656493Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-03-28T09:26:33.656570Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:1000:2804], Recipient [2:970:2776]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2025-03-28T09:26:33.656595Z node 2 :TX_PROXY DEBUG: [ReadTable [2:970:2776] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-03-28T09:26:33.656904Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:969:2776], Recipient [2:970:2776]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2025-03-28T09:26:33.656936Z node 2 :TX_PROXY DEBUG: [ReadTable [2:970:2776] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-28T09:26:33.656962Z node 2 :TX_PROXY DEBUG: [ReadTable [2:970:2776] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1000:2804] 2025-03-28T09:26:33.657013Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2025-03-28T09:26:33.657086Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:26:33.657260Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:1000:2804], Recipient [2:970:2776]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2025-03-28T09:26:33.657292Z node 2 :TX_PROXY DEBUG: [ReadTable [2:970:2776] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2025-03-28T09:26:33.657317Z node 2 :TX_PROXY TRACE: [ReadTable [2:970:2776] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1000:2804] ShardId# 72075186224037890 2025-03-28T09:26:33.657387Z node 2 :TX_PROXY INFO: [ReadTable [2:970:2776] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.017019s execute time: 0.186661s total time: 0.203680s 2025-03-28T09:26:33.657535Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2025-03-28T09:26:33.657574Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2025-03-28T09:26:33.657743Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-28T09:26:33.657767Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715663, at: 72075186224037890 2025-03-28T09:26:33.658017Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:970:2776], Recipient [2:880:2710]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 2025-03-28T09:26:33.658312Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:885:2712], Recipient [2:885:2712]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.658333Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.658397Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:26:33.658426Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.658460Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2025-03-28T09:26:33.658493Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2025-03-28T09:26:33.658519Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2025-03-28T09:26:33.658544Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:26:33.658564Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2025-03-28T09:26:33.658582Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2025-03-28T09:26:33.658600Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-03-28T09:26:33.658622Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2025-03-28T09:26:33.658645Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2025-03-28T09:26:33.658670Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-03-28T09:26:33.659106Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-03-28T09:26:33.659164Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:26:33.659189Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-03-28T09:26:33.659216Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2025-03-28T09:26:33.659245Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:26:33.659269Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-28T09:26:33.659295Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-28T09:26:33.659325Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-28T09:26:33.659376Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:26:33.659404Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2025-03-28T09:26:33.659456Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:26:33.659524Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:26:33.659808Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549569, Sender [2:970:2776], Recipient [2:885:2712]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2025-03-28T09:26:33.659848Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2025-03-28T09:26:33.659885Z node 2 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2025-03-28T09:26:33.659947Z node 2 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2025-03-28T09:26:33.660094Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287431, Sender [2:970:2776], Recipient [2:885:2712]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2025-03-28T09:26:33.660137Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2025-03-28T09:26:33.660266Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:970:2776], Recipient [2:885:2712]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715662 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2025-03-28T09:26:32.526846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789389051054521:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:32.530856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e39/r3tmp/tmpjuPIyf/pdisk_1.dat 2025-03-28T09:26:32.938915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:32.943009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:32.947190Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:32.948513Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:33.084883Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:47324 2025-03-28T09:26:33.085236Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7486789393346022342:2256] peer# ipv6:[::1]:47324 2025-03-28T09:26:33.085274Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade read Name# Session peer# ipv6:[::1]:47324 2025-03-28T09:26:33.088588Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] read finished Name# Session ok# true data# peer# ipv6:[::1]:47324 2025-03-28T09:26:33.088684Z node 1 :GRPC_SERVER DEBUG: Received TEvReadFinished, success = 1 2025-03-28T09:26:33.088745Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:47324 2025-03-28T09:26:33.089075Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade finish Name# Session peer# ipv6:[::1]:47324 grpc status# (0) message# 2025-03-28T09:26:33.089092Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:47324 2025-03-28T09:26:33.089350Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:47324 2025-03-28T09:26:33.089463Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:47324 grpc status# (0) message# 2025-03-28T09:26:33.089541Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:47324 (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2025-03-28T09:26:32.558975Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789391716082520:2135];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:32.559505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e68/r3tmp/tmpkQMR3F/pdisk_1.dat 2025-03-28T09:26:32.940403Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:32.997234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:32.997381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:32.998551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:33.101623Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:48358 2025-03-28T09:26:33.101641Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:48358 2025-03-28T09:26:33.102939Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7486789396011050266:2258] peer# ipv6:[::1]:48358 2025-03-28T09:26:33.102977Z node 1 :GRPC_SERVER DEBUG: Received TEvNotifiedWhenDone 2025-03-28T09:26:33.103303Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2025-03-28T09:26:33.103401Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# unknown (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2025-03-28T09:25:28.574294Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789114154752954:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:28.575295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00094d/r3tmp/tmp4zZyzd/pdisk_1.dat 2025-03-28T09:25:29.242939Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:29.307710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:29.307824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:29.310106Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15722, node 1 2025-03-28T09:25:29.538386Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:29.538406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:29.538411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:29.538520Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:30.118641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.144453Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:12732 2025-03-28T09:25:30.445668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.467830Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:25:30.474167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.493176Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-28T09:25:30.499559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.649314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.722848Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.733432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.812269Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-28T09:25:30.817010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.907319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.960553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.003490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.088297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.125920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.185985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:32.454804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789131334623561:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.454810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789131334623569:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.454919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.458072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-28T09:25:32.467120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789131334623575:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-28T09:25:32.557043Z node 1 :TX_PROXY ERROR: Actor# [1:7486789131334623626:2918] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:33.319584Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe14a53aq0s9hqvtws6w8aw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg1ZDE2MWUtYjQ2MzlkY2YtZjk2NWJhNy1jNWEwZWZlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.343851Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe14a53aq0s9hqvtws6w8aw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg1ZDE2MWUtYjQ2MzlkY2YtZjk2NWJhNy1jNWEwZWZlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.348906Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe14a53aq0s9hqvtws6w8aw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDg1ZDE2MWUtYjQ2MzlkY2YtZjk2NWJhNy1jNWEwZWZlMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.389807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.464516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.497749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.527485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.567707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.583932Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789114154752954:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:33.584101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:33.603080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.635236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.702796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.739328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.814725Z node 1 :HTTP INFO: Listening on http://127.0.0.1:19404 2025-03-28T09:25:34.816822Z node ... e DELETING 2025-03-28T09:26:30.054431Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] TxId 281474976710693, State DELETING 2025-03-28T09:26:30.054445Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] delete TxId 281474976710693 2025-03-28T09:26:30.054469Z node 8 :SQS DEBUG: [cleanup removed queues] there are no queues to delete 2025-03-28T09:26:30.054729Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] Registered with mediator time cast 2025-03-28T09:26:30.054744Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] Registered with mediator time cast 2025-03-28T09:26:30.054773Z node 8 :HTTP_PROXY INFO: http request [CreateStream] requestId [fe6f2b78-1a50db0-2784a487-7315e21] reply ok 2025-03-28T09:26:30.055061Z node 8 :HTTP DEBUG: (#37,[::1]:36616) <- (200 ) 2025-03-28T09:26:30.055150Z node 8 :HTTP DEBUG: (#37,[::1]:36616) connection closed 2025-03-28T09:26:30.055928Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:26:30.055945Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Try execute txs with state EXECUTED 2025-03-28T09:26:30.055959Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976710693, State EXECUTED 2025-03-28T09:26:30.055976Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976710693 State EXECUTED FrontTxId 281474976710693 2025-03-28T09:26:30.055995Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:26:30.056011Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976710693, NewState WAIT_RS_ACKS 2025-03-28T09:26:30.056026Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976710693 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:26:30.056049Z node 8 :PERSQUEUE DEBUG: [TxId: 281474976710693] PredicateAcks: 0/0 2025-03-28T09:26:30.056057Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:26:30.056068Z node 8 :PERSQUEUE DEBUG: [TxId: 281474976710693] PredicateAcks: 0/0 2025-03-28T09:26:30.056085Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] add an TxId 281474976710693 to the list for deletion 2025-03-28T09:26:30.056104Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976710693, NewState DELETING 2025-03-28T09:26:30.056125Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] delete key for TxId 281474976710693 2025-03-28T09:26:30.056173Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:26:30.057194Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:26:30.057210Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] Try execute txs with state DELETING 2025-03-28T09:26:30.057223Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] TxId 281474976710693, State DELETING 2025-03-28T09:26:30.057239Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] delete TxId 281474976710693 2025-03-28T09:26:30.107750Z node 8 :HTTP DEBUG: (#37,[::1]:36630) incoming connection opened 2025-03-28T09:26:30.107813Z node 8 :HTTP DEBUG: (#37,[::1]:36630) -> (POST /Root) 2025-03-28T09:26:30.107925Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [5879:6600:6050:0:4079:6600:6050:0] request [ListShards] url [/Root] database [/Root] requestId: ecededd-ee59716f-1ecc2059-294241c8 2025-03-28T09:26:30.108354Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [ecededd-ee59716f-1ecc2059-294241c8] got new request from [5879:6600:6050:0:4079:6600:6050:0] database '/Root' stream 'teststream' 2025-03-28T09:26:30.108916Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [ecededd-ee59716f-1ecc2059-294241c8] [auth] Authorized successfully 2025-03-28T09:26:30.109009Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [ecededd-ee59716f-1ecc2059-294241c8] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1743153990.109090 469859 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-28T09:26:30.112032Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7486789381790514610:2536], now have 1 active actors on pipe 2025-03-28T09:26:30.112113Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7486789381790514611:2537], now have 1 active actors on pipe 2025-03-28T09:26:30.112983Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [ecededd-ee59716f-1ecc2059-294241c8] reply ok 2025-03-28T09:26:30.113230Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7486789381790514610:2536] destroyed 2025-03-28T09:26:30.113261Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7486789381790514611:2537] destroyed 2025-03-28T09:26:30.113343Z node 8 :HTTP DEBUG: (#37,[::1]:36630) <- (200 ) Http output full {"NextToken":"COCTzODdMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-03-28T09:26:30.113426Z node 8 :HTTP DEBUG: (#37,[::1]:36630) connection closed 200 {"NextToken":"COCTzODdMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-03-28T09:26:30.114874Z node 8 :HTTP DEBUG: (#37,[::1]:36632) incoming connection opened 2025-03-28T09:26:30.114935Z node 8 :HTTP DEBUG: (#37,[::1]:36632) -> (POST /Root) 2025-03-28T09:26:30.115088Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [38b7:5700:6050:0:20b7:5700:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 433ad2ce-4d6fc942-93011069-4bce8b98 2025-03-28T09:26:30.115506Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [433ad2ce-4d6fc942-93011069-4bce8b98] got new request from [38b7:5700:6050:0:20b7:5700:6050:0] database '/Root' stream 'teststream' E0000 00:00:1743153990.116088 469860 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-28T09:26:30.115937Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [433ad2ce-4d6fc942-93011069-4bce8b98] [auth] Authorized successfully 2025-03-28T09:26:30.116024Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [433ad2ce-4d6fc942-93011069-4bce8b98] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-28T09:26:30.117130Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7486789381790514622:2541], now have 1 active actors on pipe 2025-03-28T09:26:30.117212Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7486789381790514623:2542], now have 1 active actors on pipe 2025-03-28T09:26:30.127256Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [433ad2ce-4d6fc942-93011069-4bce8b98] reply ok 2025-03-28T09:26:30.127596Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7486789381790514622:2541] destroyed 2025-03-28T09:26:30.127700Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7486789381790514623:2542] destroyed 2025-03-28T09:26:30.127856Z node 8 :HTTP DEBUG: (#37,[::1]:36632) <- (200 ) 2025-03-28T09:26:30.128025Z node 8 :HTTP DEBUG: (#37,[::1]:36632) connection closed Http output full {"NextToken":"COuTzODdMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"COuTzODdMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2025-03-28T09:26:30.129421Z node 8 :HTTP DEBUG: (#37,[::1]:36646) incoming connection opened 2025-03-28T09:26:30.129492Z node 8 :HTTP DEBUG: (#37,[::1]:36646) -> (POST /Root) 2025-03-28T09:26:30.129605Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [5820:3e00:6050:0:4020:3e00:6050:0] request [ListShards] url [/Root] database [/Root] requestId: 53e083ba-18c805e3-a96fee6a-857ed977 2025-03-28T09:26:30.130052Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [53e083ba-18c805e3-a96fee6a-857ed977] got new request from [5820:3e00:6050:0:4020:3e00:6050:0] database '/Root' stream 'teststream' 2025-03-28T09:26:30.130653Z node 8 :HTTP_PROXY DEBUG: http request [ListShards] requestId [53e083ba-18c805e3-a96fee6a-857ed977] [auth] Authorized successfully E0000 00:00:1743153990.130860 469859 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-28T09:26:30.130781Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [53e083ba-18c805e3-a96fee6a-857ed977] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-28T09:26:30.132053Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7486789381790514634:2546], now have 1 active actors on pipe 2025-03-28T09:26:30.132104Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7486789381790514635:2547], now have 1 active actors on pipe 2025-03-28T09:26:30.132997Z node 8 :HTTP_PROXY INFO: http request [ListShards] requestId [53e083ba-18c805e3-a96fee6a-857ed977] reply ok 2025-03-28T09:26:30.133346Z node 8 :HTTP DEBUG: (#37,[::1]:36646) <- (200 ) 2025-03-28T09:26:30.133422Z node 8 :HTTP DEBUG: (#37,[::1]:36646) connection closed 2025-03-28T09:26:30.133573Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7486789381790514634:2546] destroyed 2025-03-28T09:26:30.133611Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7486789381790514635:2547] destroyed Http output full {"NextToken":"CPSTzODdMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"CPSTzODdMhACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2025-03-28T09:26:32.523184Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789391177917147:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:32.523235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e4d/r3tmp/tmpxGylht/pdisk_1.dat 2025-03-28T09:26:32.975179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:32.975263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:32.977612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:33.007271Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:33.065412Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream accepted Name# Session ok# true peer# ipv6:[::1]:47734 2025-03-28T09:26:33.065705Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade attach Name# Session actor# [1:7486789395472884971:2256] peer# ipv6:[::1]:47734 2025-03-28T09:26:33.065731Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:47734 2025-03-28T09:26:33.065976Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] facade write Name# Session data# peer# ipv6:[::1]:47734 grpc status# (0) message# 2025-03-28T09:26:33.066178Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:47734 2025-03-28T09:26:33.066286Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-03-28T09:26:33.066463Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream done notification Name# Session ok# true peer# ipv6:[::1]:47734 2025-03-28T09:26:33.066568Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] write finished Name# Session ok# true peer# ipv6:[::1]:47734 2025-03-28T09:26:33.066635Z node 1 :GRPC_SERVER DEBUG: Received TEvWriteFinished, success = 1 2025-03-28T09:26:33.066668Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] stream finished Name# Session ok# true peer# ipv6:[::1]:47734 grpc status# (0) message# 2025-03-28T09:26:33.066806Z node 1 :GRPC_SERVER DEBUG: [0x51f00002a080] deregistering request Name# Session peer# ipv6:[::1]:47734 (finish done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: 2025-03-28T09:26:25.932730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:26:25.933173Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:26:25.933223Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a55/r3tmp/tmpbcpE2C/pdisk_1.dat 2025-03-28T09:26:26.340198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.391953Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:26.431671Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:26:26.432817Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:26:26.433118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:26.433250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:26.446089Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:26.531035Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:26:26.531129Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:26:26.531313Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:26:26.655395Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:26:26.655510Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:26:26.656106Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:26:26.656221Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:26:26.656602Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:26:26.656837Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:26:26.656935Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:26:26.657255Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:26:26.658861Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.659991Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:26:26.660063Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:26:26.692647Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:26:26.693822Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:26:26.694312Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:26:26.694656Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:26:26.743675Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:26:26.744481Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:26:26.744607Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:26:26.746566Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:26:26.746654Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:26:26.746752Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:26:26.747185Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:26:26.747335Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:26:26.747424Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:26:26.758402Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:26:26.797129Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:26:26.797357Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:26:26.797497Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:26:26.797538Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:26:26.797575Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:26:26.797609Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:26:26.797837Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:26.797887Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:26.798236Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:26:26.798347Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:26:26.798811Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:26:26.798853Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:26:26.798915Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:26:26.798953Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:26:26.798984Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:26:26.799011Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:26:26.799051Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:26:26.799157Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:26.799189Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:26.799226Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:26:26.799289Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:26:26.799322Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:26:26.799448Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:26:26.799711Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:26:26.799774Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:26:26.799872Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:26:26.799919Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:26:26.799953Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:26:26.799993Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:26:26.800022Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:26:26.800262Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:26:26.800294Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:26:26.800355Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:26:26.800385Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:26:26.800436Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:26:26.800462Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:26:26.800497Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:26:26.800527Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:26:26.800549Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:26:26.801937Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:26:26.801991Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:26:26.812748Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... _DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-03-28T09:26:33.706106Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:860:2694], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715662 Cleared: true 2025-03-28T09:26:33.706138Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-28T09:26:33.706338Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.706369Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.706420Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:26:33.706460Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.706506Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:26:33.706556Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit WaitForStreamClearance 2025-03-28T09:26:33.706598Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715662] at 72075186224037888 2025-03-28T09:26:33.706631Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-03-28T09:26:33.706666Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit WaitForStreamClearance 2025-03-28T09:26:33.706710Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit ReadTableScan 2025-03-28T09:26:33.706739Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-03-28T09:26:33.706944Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2025-03-28T09:26:33.706973Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.707000Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:26:33.707026Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:26:33.707058Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:26:33.707115Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:26:33.707578Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:872:2705], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:26:33.707623Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-28T09:26:33.707698Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-03-28T09:26:33.707980Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-28T09:26:33.708023Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-03-28T09:26:33.708277Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-28T09:26:33.708452Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:26:33.708549Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-03-28T09:26:33.708618Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-03-28T09:26:33.708669Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-03-28T09:26:33.709063Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-28T09:26:33.709098Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-03-28T09:26:33.709168Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-28T09:26:33.709265Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:26:33.709332Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-03-28T09:26:33.709430Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-03-28T09:26:33.709472Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-03-28T09:26:33.709728Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-28T09:26:33.709754Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-03-28T09:26:33.709787Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-28T09:26:33.709838Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:26:33.709882Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2025-03-28T09:26:33.709909Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-03-28T09:26:33.709929Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-03-28T09:26:33.710136Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-28T09:26:33.710164Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2025-03-28T09:26:33.710186Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-28T09:26:33.710237Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-28T09:26:33.710358Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-03-28T09:26:33.710389Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-03-28T09:26:33.710433Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:26:33.710470Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-03-28T09:26:33.710629Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.710675Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.710757Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:26:33.710794Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.710829Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-03-28T09:26:33.710861Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2025-03-28T09:26:33.710892Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2025-03-28T09:26:33.710932Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-03-28T09:26:33.710963Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2025-03-28T09:26:33.710994Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:26:33.711018Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-03-28T09:26:33.711044Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2025-03-28T09:26:33.711076Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:26:33.711110Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:26:33.711147Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:26:33.711190Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2025-03-28T09:26:33.711208Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:26:33.711236Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2025-03-28T09:26:33.711269Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:26:33.711296Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:26:33.711323Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:26:33.711354Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:26:33.711412Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:26:33.711440Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2025-03-28T09:26:33.711473Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:26:33.711529Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:26:33.711714Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2025-03-28T09:26:33.711790Z node 2 :TX_PROXY INFO: [ReadTable [2:860:2694] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013596s execute time: 0.109195s total time: 0.122791s 2025-03-28T09:26:33.712048Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:860:2694], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 >> YdbOlapStore::BulkUpsert [GOOD] >> YdbOlapStore::DuplicateRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: 2025-03-28T09:26:25.591483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:26:25.592049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:26:25.592107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a75/r3tmp/tmpudcsp6/pdisk_1.dat 2025-03-28T09:26:26.140504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.200221Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:26.245970Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:26:26.250207Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:26:26.250483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:26.250616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:26.266960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:26.359482Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:26:26.359601Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:26:26.360947Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:26:26.533771Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:26:26.533844Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:26:26.534245Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:26:26.534328Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:26:26.534541Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:26:26.534670Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:26:26.534772Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:26:26.535023Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:26:26.536187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.537211Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:26:26.537303Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:26:26.567243Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:26:26.568124Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:26:26.568544Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:26:26.568757Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:26:26.606993Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:26:26.607843Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:26:26.607973Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:26:26.609732Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:26:26.609805Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:26:26.609859Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:26:26.610231Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:26:26.610355Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:26:26.610428Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:26:26.621234Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:26:26.694289Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:26:26.694502Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:26:26.694649Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:26:26.694701Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:26:26.694741Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:26:26.694771Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:26:26.695010Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:26.695059Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:26.695428Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:26:26.695534Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:26:26.696058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:26:26.696113Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:26:26.696176Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:26:26.696212Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:26:26.696242Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:26:26.696270Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:26:26.696310Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:26:26.696409Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:26.696441Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:26.696481Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:26:26.696545Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:26:26.696580Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:26:26.696738Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:26:26.696961Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:26:26.697033Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:26:26.697145Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:26:26.697195Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:26:26.697230Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:26:26.697263Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:26:26.697292Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:26:26.697537Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:26:26.697570Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:26:26.697617Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:26:26.697648Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:26:26.697700Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:26:26.697729Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:26:26.697761Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:26:26.697787Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:26:26.697809Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:26:26.699219Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:26:26.699280Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:26:26.710215Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 281474976715659] at 72075186224037888 is Executed 2025-03-28T09:26:33.157887Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit MakeScanSnapshot 2025-03-28T09:26:33.157911Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit WaitForStreamClearance 2025-03-28T09:26:33.157933Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-03-28T09:26:33.158001Z node 2 :TX_DATASHARD TRACE: Requested stream clearance from [2:745:2626] for [0:281474976715659] at 72075186224037888 2025-03-28T09:26:33.158041Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-03-28T09:26:33.158104Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:26:33.158261Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287427, Sender [2:666:2570], Recipient [2:745:2626]: NKikimrTx.TEvStreamClearanceRequest TxId: 281474976715659 ShardId: 72075186224037888 KeyRange { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } 2025-03-28T09:26:33.158304Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamClearanceRequest from ShardId# 72075186224037888 2025-03-28T09:26:33.158368Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Sending TEvStreamClearanceResponse to [2:666:2570] ShardId# 72075186224037888 2025-03-28T09:26:33.158769Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287942, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715659 2025-03-28T09:26:33.158834Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2025-03-28T09:26:33.159026Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2025-03-28T09:26:33.159082Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-28T09:26:33.159231Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.159261Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.159320Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:26:33.159356Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.159402Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:26:33.159443Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2025-03-28T09:26:33.159488Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [0:281474976715659] at 72075186224037888 2025-03-28T09:26:33.159530Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-03-28T09:26:33.159563Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2025-03-28T09:26:33.159596Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2025-03-28T09:26:33.159628Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-03-28T09:26:33.159874Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2025-03-28T09:26:33.159906Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.159937Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:26:33.159968Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:26:33.159996Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:26:33.160060Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:26:33.160555Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:779:2647], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:26:33.160597Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-28T09:26:33.160707Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:779:2647], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2025-03-28T09:26:33.160750Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2025-03-28T09:26:33.161148Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:744:2626], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2025-03-28T09:26:33.161201Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-28T09:26:33.161246Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2025-03-28T09:26:33.161323Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-03-28T09:26:33.161455Z node 2 :TX_DATASHARD ERROR: Got scan fatal error: Invalid DyNumber binary representation 2025-03-28T09:26:33.161513Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2025-03-28T09:26:33.161696Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:26:33.161735Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715659, at: 72075186224037888 2025-03-28T09:26:33.161852Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:779:2647], Recipient [2:745:2626]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2025-03-28T09:26:33.161889Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2025-03-28T09:26:33.161936Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2025-03-28T09:26:33.162081Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:666:2570], Recipient [2:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.162147Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.162215Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:26:33.162261Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.162307Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2025-03-28T09:26:33.162348Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2025-03-28T09:26:33.162402Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2025-03-28T09:26:33.162463Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-03-28T09:26:33.162512Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2025-03-28T09:26:33.162554Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:26:33.162590Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-03-28T09:26:33.162629Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2025-03-28T09:26:33.162659Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:26:33.162722Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:26:33.162759Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:26:33.162805Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2025-03-28T09:26:33.162830Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:26:33.162858Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2025-03-28T09:26:33.162899Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:26:33.162944Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:26:33.162982Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:26:33.163030Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:26:33.163124Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:26:33.163167Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2025-03-28T09:26:33.163219Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2025-03-28T09:26:33.163282Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2025-03-28T09:26:33.163367Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:26:33.163693Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:666:2570], Recipient [2:745:2626]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 493 } } 2025-03-28T09:26:33.163748Z node 2 :TX_PROXY DEBUG: [ReadTable [2:745:2626] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2025-03-28T09:26:33.163818Z node 2 :TX_PROXY ERROR: [ReadTable [2:745:2626] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2025-03-28T09:26:33.164190Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:745:2626], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 1500 TxId: 281474976715658 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: 2025-03-28T09:26:25.768916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:26:25.769439Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:26:25.769517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a18/r3tmp/tmpnkQee2/pdisk_1.dat 2025-03-28T09:26:26.158732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.214347Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:26.264571Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:26:26.265659Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:26:26.265977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:26.266131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:26.278264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:26.360939Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:26:26.361032Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:26:26.361196Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:26:26.471166Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:26:26.471297Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:26:26.471939Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:26:26.472054Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:26:26.472427Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:26:26.472754Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:26:26.472944Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:26:26.473293Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:26:26.480799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:26:26.482934Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:26:26.483025Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:26:26.536450Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:26:26.537626Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:26:26.539030Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:26:26.539310Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:26:26.593177Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:26:26.594009Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:26:26.594139Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:26:26.596441Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:26:26.596545Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:26:26.596609Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:26:26.597031Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:26:26.597186Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:26:26.597272Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:26:26.608165Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:26:26.670414Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:26:26.670652Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:26:26.670799Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:26:26.670838Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:26:26.670866Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:26:26.670925Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:26:26.671127Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:26.671169Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:26.671466Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:26:26.671553Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:26:26.671986Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:26:26.672041Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:26:26.672132Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:26:26.672271Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:26:26.672304Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:26:26.672338Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:26:26.672388Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:26:26.672508Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:26.672538Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:26:26.672574Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:26:26.672625Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:26:26.672658Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:26:26.672782Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:26:26.673018Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:26:26.673099Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:26:26.673216Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:26:26.673287Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:26:26.673337Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:26:26.673376Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:26:26.673415Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:26:26.673789Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:26:26.673826Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:26:26.673877Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:26:26.673907Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:26:26.673953Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:26:26.673978Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:26:26.674007Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:26:26.674033Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:26:26.674054Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:26:26.675534Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:26:26.675597Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:26:26.687252Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 86224037890 has no attached operations 2025-03-28T09:26:33.552149Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-28T09:26:33.552203Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:26:33.552641Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:997:2800], Recipient [2:860:2694]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-03-28T09:26:33.552678Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-03-28T09:26:33.552714Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-03-28T09:26:33.552797Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:997:2800], Recipient [2:899:2725]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:26:33.552828Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-28T09:26:33.552901Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-03-28T09:26:33.553174Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:26:33.553291Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:997:2800], Recipient [2:860:2694]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2025-03-28T09:26:33.553325Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-03-28T09:26:33.553356Z node 2 :TX_PROXY TRACE: [ReadTable [2:860:2694] TxId# 281474976715661] Sending TEvStreamDataAck to [2:997:2800] ShardId# 72075186224037890 2025-03-28T09:26:33.553445Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:997:2800], Recipient [2:860:2694]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-03-28T09:26:33.553472Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-03-28T09:26:33.553512Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-03-28T09:26:33.553829Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:859:2694], Recipient [2:860:2694]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-03-28T09:26:33.553861Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-28T09:26:33.553891Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-03-28T09:26:33.553935Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-03-28T09:26:33.554018Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:26:33.554139Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:997:2800], Recipient [2:860:2694]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2025-03-28T09:26:33.554183Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2025-03-28T09:26:33.554212Z node 2 :TX_PROXY TRACE: [ReadTable [2:860:2694] TxId# 281474976715661] Sending TEvStreamDataAck to [2:997:2800] ShardId# 72075186224037890 2025-03-28T09:26:33.554280Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2025-03-28T09:26:33.554342Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287428, Sender [2:997:2800], Recipient [2:860:2694]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2025-03-28T09:26:33.554369Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2025-03-28T09:26:33.554622Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287941, Sender [2:859:2694], Recipient [2:860:2694]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2025-03-28T09:26:33.554651Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2025-03-28T09:26:33.554677Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2025-03-28T09:26:33.554742Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-03-28T09:26:33.554798Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2025-03-28T09:26:33.554958Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269287429, Sender [2:997:2800], Recipient [2:860:2694]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2025-03-28T09:26:33.554986Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2025-03-28T09:26:33.555015Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2025-03-28T09:26:33.555064Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-28T09:26:33.555092Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037890 2025-03-28T09:26:33.555226Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:899:2725], Recipient [2:899:2725]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.555257Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:26:33.555308Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:26:33.555344Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:26:33.555378Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2025-03-28T09:26:33.555406Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2025-03-28T09:26:33.555440Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2025-03-28T09:26:33.555480Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-03-28T09:26:33.555511Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2025-03-28T09:26:33.555547Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2025-03-28T09:26:33.555576Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-03-28T09:26:33.555613Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2025-03-28T09:26:33.555655Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2025-03-28T09:26:33.555685Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2025-03-28T09:26:33.555713Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2025-03-28T09:26:33.555754Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2025-03-28T09:26:33.555779Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2025-03-28T09:26:33.555802Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2025-03-28T09:26:33.555827Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:26:33.555853Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-28T09:26:33.555882Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-28T09:26:33.555907Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-28T09:26:33.555964Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:26:33.555998Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2025-03-28T09:26:33.556038Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:26:33.556143Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:26:33.556382Z node 2 :TX_PROXY TRACE: StateReadTable, received event# 269550080, Sender [2:899:2725], Recipient [2:860:2694]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 328 } } 2025-03-28T09:26:33.556426Z node 2 :TX_PROXY DEBUG: [ReadTable [2:860:2694] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2025-03-28T09:26:33.556488Z node 2 :TX_PROXY INFO: [ReadTable [2:860:2694] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.014234s execute time: 0.339062s total time: 0.353296s 2025-03-28T09:26:33.556835Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:860:2694], Recipient [2:666:2570]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-03-28T09:26:33.557062Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:860:2694], Recipient [2:897:2723]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-03-28T09:26:33.557289Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:860:2694], Recipient [2:899:2725]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2025-03-28T09:25:28.613307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789116834986658:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:28.613912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000966/r3tmp/tmpo78vua/pdisk_1.dat 2025-03-28T09:25:29.324146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:29.324241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:29.335318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:29.394821Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5484, node 1 2025-03-28T09:25:29.539529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:29.539565Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:29.539574Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:29.539722Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24314 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:30.120544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.141069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:24314 waiting... 2025-03-28T09:25:30.384974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:30.401579Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:25:30.403944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.414714Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.421211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:25:30.608282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.685797Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2025-03-28T09:25:30.690192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:30.738130Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2025-03-28T09:25:30.743048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.825809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.869204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.918527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:30.978464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.036326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:31.087266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:32.484180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789134014857161:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.484279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789134014857153:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.484417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:32.488764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-28T09:25:32.504943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789134014857167:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-28T09:25:32.604609Z node 1 :TX_PROXY ERROR: Actor# [1:7486789134014857218:2920] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:33.319611Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe14a5z36rmer7bwws7cdbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZiNjAxMjEtYzdjZWRhYy1jMmJiOGM4MS04YTIxNzIwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.340678Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe14a5z36rmer7bwws7cdbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZiNjAxMjEtYzdjZWRhYy1jMmJiOGM4MS04YTIxNzIwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.348820Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe14a5z36rmer7bwws7cdbm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTZiNjAxMjEtYzdjZWRhYy1jMmJiOGM4MS04YTIxNzIwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:33.413850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.458563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:33.495145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.568015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.591512Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789116834986658:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:33.591571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; waiting... 2025-03-28T09:25:33.602840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:25:33.642117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.683444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.722551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.827518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:33.882606Z node 1 :HTTP INFO: Listening on http://127.0.0.1:19602 2025-03-28T09:25:34.887989Z node ... t "Service1_id@as" 2025-03-28T09:26:34.927516Z node 8 :TICKET_PARSER DEBUG: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2025-03-28T09:26:34.930792Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [9e7f79b-c7950f3-27b8dd71-8d1f91b5] [auth] Authorized successfully 2025-03-28T09:26:34.931010Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [9e7f79b-c7950f3-27b8dd71-8d1f91b5] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-28T09:26:34.933648Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7486789399606344621:2535], now have 1 active actors on pipe 2025-03-28T09:26:34.933732Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [8:7486789399606344622:2536], now have 1 active actors on pipe 2025-03-28T09:26:34.933825Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [8:7486789399606344623:2537], now have 1 active actors on pipe 2025-03-28T09:26:34.933885Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [8:7486789399606344624:2538], now have 1 active actors on pipe 2025-03-28T09:26:34.933943Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7486789399606344625:2539], now have 1 active actors on pipe 2025-03-28T09:26:34.935274Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [8:7486789399606344624:2538] destroyed 2025-03-28T09:26:34.935305Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7486789399606344625:2539] destroyed 2025-03-28T09:26:34.935328Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7486789399606344621:2535] destroyed 2025-03-28T09:26:34.935359Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [8:7486789399606344622:2536] destroyed 2025-03-28T09:26:34.935379Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [8:7486789399606344623:2537] destroyed 2025-03-28T09:26:34.936363Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [9e7f79b-c7950f3-27b8dd71-8d1f91b5] reply ok Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1743153995,"StorageLimitMb":0,"StreamName":"testtopic"}} 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1743153995,"StorageLimitMb":0,"StreamName":"testtopic"}} 2025-03-28T09:26:34.936654Z node 8 :HTTP DEBUG: (#37,[::1]:59644) <- (200 ) 2025-03-28T09:26:34.936753Z node 8 :HTTP DEBUG: (#37,[::1]:59644) connection closed 2025-03-28T09:26:34.939039Z node 8 :HTTP DEBUG: (#37,[::1]:59652) incoming connection opened 2025-03-28T09:26:34.939105Z node 8 :HTTP DEBUG: (#37,[::1]:59652) -> (POST /Root) 2025-03-28T09:26:34.939239Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [d85e:2e00:6050:0:c05e:2e00:6050:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: f9bc71ea-8adbf44-b31f29e4-f19d7d74 2025-03-28T09:26:34.939595Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [f9bc71ea-8adbf44-b31f29e4-f19d7d74] got new request from [d85e:2e00:6050:0:c05e:2e00:6050:0] database '/Root' stream 'testtopic' 2025-03-28T09:26:34.940072Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStreamSummary] requestId [f9bc71ea-8adbf44-b31f29e4-f19d7d74] [auth] Authorized successfully 2025-03-28T09:26:34.940219Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [f9bc71ea-8adbf44-b31f29e4-f19d7d74] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-28T09:26:34.941584Z node 8 :HTTP_PROXY INFO: http request [DescribeStreamSummary] requestId [f9bc71ea-8adbf44-b31f29e4-f19d7d74] reply ok 2025-03-28T09:26:34.941911Z node 8 :HTTP DEBUG: (#37,[::1]:59652) <- (200 ) 2025-03-28T09:26:34.941990Z node 8 :HTTP DEBUG: (#37,[::1]:59652) connection closed Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1743153.994,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1743153.994,"StreamName":"testtopic"}} 2025-03-28T09:26:34.943173Z node 8 :HTTP DEBUG: (#40,[::1]:59654) incoming connection opened 2025-03-28T09:26:34.943232Z node 8 :HTTP DEBUG: (#40,[::1]:59654) -> (POST /Root) 2025-03-28T09:26:34.943357Z node 8 :HTTP_PROXY INFO: proxy service: incoming request from [3828:2600:6050:0:2028:2600:6050:0] request [DescribeStream] url [/Root] database [/Root] requestId: a6abce9e-c66d41e5-ed621925-cfd5a7fa 2025-03-28T09:26:34.943667Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [a6abce9e-c66d41e5-ed621925-cfd5a7fa] got new request from [3828:2600:6050:0:2028:2600:6050:0] database '/Root' stream 'testtopic' 2025-03-28T09:26:34.944241Z node 8 :HTTP_PROXY DEBUG: http request [DescribeStream] requestId [a6abce9e-c66d41e5-ed621925-cfd5a7fa] [auth] Authorized successfully 2025-03-28T09:26:34.944339Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [a6abce9e-c66d41e5-ed621925-cfd5a7fa] sending grpc request to '' database: '/Root' iam token size: 0 2025-03-28T09:26:34.946279Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [8:7486789399606344654:2550], now have 1 active actors on pipe 2025-03-28T09:26:34.946336Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server connected, pipe [8:7486789399606344655:2551], now have 1 active actors on pipe 2025-03-28T09:26:34.946369Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server connected, pipe [8:7486789399606344651:2547], now have 1 active actors on pipe 2025-03-28T09:26:34.946416Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server connected, pipe [8:7486789399606344652:2548], now have 1 active actors on pipe 2025-03-28T09:26:34.946449Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server connected, pipe [8:7486789399606344653:2549], now have 1 active actors on pipe 2025-03-28T09:26:34.947846Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037911] server disconnected, pipe [8:7486789399606344655:2551] destroyed 2025-03-28T09:26:34.947885Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037907] server disconnected, pipe [8:7486789399606344651:2547] destroyed 2025-03-28T09:26:34.947905Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037908] server disconnected, pipe [8:7486789399606344652:2548] destroyed 2025-03-28T09:26:34.947936Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037909] server disconnected, pipe [8:7486789399606344653:2549] destroyed 2025-03-28T09:26:34.947956Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [8:7486789399606344654:2550] destroyed 2025-03-28T09:26:34.948042Z node 8 :HTTP_PROXY INFO: http request [DescribeStream] requestId [a6abce9e-c66d41e5-ed621925-cfd5a7fa] reply ok 2025-03-28T09:26:34.948249Z node 8 :HTTP DEBUG: (#40,[::1]:59654) <- (200 ) 2025-03-28T09:26:34.948314Z node 8 :HTTP DEBUG: (#40,[::1]:59654) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1743153995,"StorageLimitMb":0,"StreamName":"testtopic"}} >> Backup::ProposeBackup [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp |89.3%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> EvWrite::AbortInTransaction >> TPersQueueTest::WriteNonExistingTopic [GOOD] >> TColumnShardTestReadWrite::WriteReadNoCompression >> TPersQueueTest::WriteAfterAlter >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Backup::ProposeBackup [GOOD] Test command err: 2025-03-28T09:26:36.258746Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:36.383319Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:36.414114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:36.414489Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:36.424425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:36.424696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:36.424959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:36.425123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:36.425273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:36.425419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:36.425553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:36.425677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:36.425795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:36.425954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:36.426089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:36.426229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:36.467379Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:36.467645Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:36.467707Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:36.467932Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:36.468130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:36.468202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:36.468244Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:36.468370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:36.468447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:36.468512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:36.468553Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:36.468742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:36.468819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:36.468867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:36.468914Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:36.469082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:36.469176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:36.469232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:36.469264Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:36.469341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:36.469376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:36.469405Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:36.469457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:36.469499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:36.469531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:36.469916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-03-28T09:26:36.469983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-03-28T09:26:36.470049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-03-28T09:26:36.470118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-03-28T09:26:36.470253Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:36.470310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:36.470336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:36.470468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:36.470508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:36.470528Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:36.470656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:36.470792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:36.470816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:36.470950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:36.470974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:36.470995Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:36.471098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:36.471129Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:36.471166Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-28T09:26:37.492435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:37.492584Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;);columns=7;rows=100; 2025-03-28T09:26:37.492680Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=4813;num_rows=100;batch_columns=key1,key2,field,_yql_plan_step,_yql_tx_id,_yql_write_id,_yql_delete_flag; 2025-03-28T09:26:37.492895Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:295:2313];bytes=4813;rows=100;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string _yql_plan_step: uint64 _yql_tx_id: uint64 _yql_write_id: uint64 _yql_delete_flag: bool; 2025-03-28T09:26:37.493052Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-28T09:26:37.493242Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-28T09:26:37.493405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-28T09:26:37.494310Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:0:0:1:3:2752:0]; 2025-03-28T09:26:37.508186Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:37.508414Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-28T09:26:37.508606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-28T09:26:37.508672Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:301:2319] finished for tablet 9437184 2025-03-28T09:26:37.509295Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:295:2313];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["f_ack"],"t":0.016},{"events":["l_task_result"],"t":0.042},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.121}],"full":{"a":1743153997387321,"name":"_full_task","f":1743153997387321,"d_finished":0,"c":0,"l":1743153997508753,"d":121432},"events":[{"name":"bootstrap","f":1743153997387651,"d_finished":5854,"c":1,"l":1743153997393505,"d":5854},{"a":1743153997508000,"name":"ack","f":1743153997403772,"d_finished":3598,"c":3,"l":1743153997493447,"d":4351},{"a":1743153997507976,"name":"processing","f":1743153997396171,"d_finished":19506,"c":27,"l":1743153997493451,"d":20283},{"name":"ProduceResults","f":1743153997390983,"d_finished":10160,"c":32,"l":1743153997508643,"d":10160},{"a":1743153997508651,"name":"Finish","f":1743153997508651,"d_finished":0,"c":0,"l":1743153997508753,"d":102},{"name":"task_result","f":1743153997396191,"d_finished":15429,"c":24,"l":1743153997430259,"d":15429}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-28T09:26:37.509420Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:295:2313];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:37.510019Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:295:2313];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.003},{"events":["l_bootstrap"],"t":0.006},{"events":["f_processing","f_task_result"],"t":0.008},{"events":["f_ack"],"t":0.016},{"events":["l_task_result"],"t":0.042},{"events":["l_ProduceResults","f_Finish"],"t":0.121},{"events":["l_ack","l_processing","l_Finish"],"t":0.122}],"full":{"a":1743153997387321,"name":"_full_task","f":1743153997387321,"d_finished":0,"c":0,"l":1743153997509471,"d":122150},"events":[{"name":"bootstrap","f":1743153997387651,"d_finished":5854,"c":1,"l":1743153997393505,"d":5854},{"a":1743153997508000,"name":"ack","f":1743153997403772,"d_finished":3598,"c":3,"l":1743153997493447,"d":5069},{"a":1743153997507976,"name":"processing","f":1743153997396171,"d_finished":19506,"c":27,"l":1743153997493451,"d":21001},{"name":"ProduceResults","f":1743153997390983,"d_finished":10160,"c":32,"l":1743153997508643,"d":10160},{"a":1743153997508651,"name":"Finish","f":1743153997508651,"d_finished":0,"c":0,"l":1743153997509471,"d":820},{"name":"task_result","f":1743153997396191,"d_finished":15429,"c":24,"l":1743153997430259,"d":15429}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;;); 2025-03-28T09:26:37.510131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:37.386082Z;index_granules=0;index_portions=3;index_batches=3;committed_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13880;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13880;selected_rows=0; 2025-03-28T09:26:37.510204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:37.510571Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:301:2319];TabletId=9437184;ScanId=1;TxId=1;ScanGen=9437184;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;program_input=(column_ids=1,2,3,4294967040,4294967041,4294967042,4294967043;column_names=_yql_delete_flag,_yql_plan_step,_yql_tx_id,_yql_write_id,field,key1,key2;);;; 2025-03-28T09:26:37.511115Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 1 at tablet 9437184 2025-03-28T09:26:37.541575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=115;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:244:2262]; 2025-03-28T09:26:37.541671Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::NOlap::NBackground::TEvExecuteGeneralLocalTransaction;method=TTxController::FinishProposeOnComplete;tx_id=115;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=115; >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> TColumnShardTestReadWrite::ReadWithProgramLike >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> TColumnShardTestReadWrite::ReadWithProgram >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> TColumnShardTestReadWrite::RebootWriteReadStandalone >> EvWrite::WriteWithLock >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> TColumnShardTestReadWrite::WriteReadStandalone >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] |89.3%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::SchemeOperationFirstClassCitizen [GOOD] >> TPersQueueTest::SchemeOperationsCheckPropValues |89.3%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] >> EvWrite::AbortInTransaction [GOOD] >> TColumnShardTestReadWrite::WriteStandalone >> TColumnShardTestReadWrite::WriteOverload >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> TestYmqHttpProxy::TestListQueueTags [GOOD] >> Normalizers::ColumnChunkNormalizer >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 >> Normalizers::PortionsNormalizer [GOOD] >> EvWrite::WriteWithLock [GOOD] >> TPersQueueTest::PreferredCluster_DisabledRemoteClusterAndWriteSessionsWithDifferentPreferredClusterAndLaterRemoteClusterEnabled_SessionWithMismatchedClusterDiesAfterPreferredClusterEnabledAndOtherSessionsAlive [GOOD] >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay |89.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/join/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2025-03-28T09:26:38.934557Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:39.036819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:39.063137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:39.063462Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:39.071526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:39.071735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:39.071963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:39.072121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:39.072246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:39.072361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:39.072491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:39.072615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:39.072724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:39.072890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.073004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:39.073104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:39.103029Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:39.103249Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:39.103323Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:39.103518Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.103716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:39.103789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:39.103831Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:39.103972Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:39.104053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:39.104116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:39.104158Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:39.104325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.104411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:39.104453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:39.104485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:39.104621Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:39.104686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:39.104725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:39.104753Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:39.104837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:39.104883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:39.104912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:39.104961Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:39.104997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:39.105032Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:39.105437Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=39; 2025-03-28T09:26:39.105512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-03-28T09:26:39.105627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=63; 2025-03-28T09:26:39.105710Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:26:39.105884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:39.105963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:39.106002Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:39.106212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:39.106260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.106290Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.106442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:39.106491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:39.106533Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:39.106762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:39.106817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:39.106849Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:39.106975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:39.107020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:39.107086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-28T09:26:40.139335Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:40.139747Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:26:40.139785Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-28T09:26:40.139833Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-03-28T09:26:40.139886Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=4096;merger=0;interval_id=1; 2025-03-28T09:26:40.139954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:26:40.140059Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:40.140092Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=4096;finished=1; 2025-03-28T09:26:40.140123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:40.140333Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:40.140548Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:4096;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:40.140601Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:40.140709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=4096; 2025-03-28T09:26:40.140773Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=458752;num_rows=4096;batch_columns=key,field; 2025-03-28T09:26:40.140890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:305:2323];bytes=458752;rows=4096;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-03-28T09:26:40.141015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:40.141131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:40.141258Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:40.141674Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:40.141809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:40.141911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:40.141950Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:309:2327] finished for tablet 9437184 2025-03-28T09:26:40.142395Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:305:2323];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["l_task_result"],"t":0.063},{"events":["f_ack"],"t":0.064},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.065}],"full":{"a":1743154000076227,"name":"_full_task","f":1743154000076227,"d_finished":0,"c":0,"l":1743154000142000,"d":65773},"events":[{"name":"bootstrap","f":1743154000076547,"d_finished":3212,"c":1,"l":1743154000079759,"d":3212},{"a":1743154000141648,"name":"ack","f":1743154000140299,"d_finished":995,"c":1,"l":1743154000141294,"d":1347},{"a":1743154000141632,"name":"processing","f":1743154000083992,"d_finished":28784,"c":9,"l":1743154000141297,"d":29152},{"name":"ProduceResults","f":1743154000078512,"d_finished":2676,"c":12,"l":1743154000141936,"d":2676},{"a":1743154000141939,"name":"Finish","f":1743154000141939,"d_finished":0,"c":0,"l":1743154000142000,"d":61},{"name":"task_result","f":1743154000084032,"d_finished":27618,"c":8,"l":1743154000140162,"d":27618}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:40.142468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:305:2323];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:40.143954Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:305:2323];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.007},{"events":["l_task_result"],"t":0.063},{"events":["f_ack"],"t":0.064},{"events":["l_ProduceResults","f_Finish"],"t":0.065},{"events":["l_ack","l_processing","l_Finish"],"t":0.066}],"full":{"a":1743154000076227,"name":"_full_task","f":1743154000076227,"d_finished":0,"c":0,"l":1743154000142519,"d":66292},"events":[{"name":"bootstrap","f":1743154000076547,"d_finished":3212,"c":1,"l":1743154000079759,"d":3212},{"a":1743154000141648,"name":"ack","f":1743154000140299,"d_finished":995,"c":1,"l":1743154000141294,"d":1866},{"a":1743154000141632,"name":"processing","f":1743154000083992,"d_finished":28784,"c":9,"l":1743154000141297,"d":29671},{"name":"ProduceResults","f":1743154000078512,"d_finished":2676,"c":12,"l":1743154000141936,"d":2676},{"a":1743154000141939,"name":"Finish","f":1743154000141939,"d_finished":0,"c":0,"l":1743154000142519,"d":580},{"name":"task_result","f":1743154000084032,"d_finished":27618,"c":8,"l":1743154000140162,"d":27618}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:40.144065Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:40.075588Z;index_granules=0;index_portions=1;index_batches=176;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=494016;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=494016;selected_rows=0; 2025-03-28T09:26:40.144131Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:40.144373Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:309:2327];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2025-03-28T09:26:38.814234Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.920930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.949737Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.950098Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.959231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.959475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.959754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.959898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.960030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.960166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.960316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.960432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.960555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.960684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.960799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.960924Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:38.993308Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:38.993710Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:38.993793Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:38.994021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.994202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:38.994312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:38.994368Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:38.994487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:38.994561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:38.994608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:38.994639Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:38.994853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.994944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:38.994992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:38.995030Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:38.995175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:38.995237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:38.995286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:38.995331Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:38.995409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:38.995448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:38.995483Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:38.995536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:38.995573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:38.995609Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:38.996039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-28T09:26:38.996171Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=72; 2025-03-28T09:26:38.996272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-03-28T09:26:38.996387Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=48; 2025-03-28T09:26:38.996574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:38.996637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:38.996690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:38.996902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:38.996948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.996992Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.997177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:38.997228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:38.997260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:38.997451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:38.997495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:38.997525Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:38.997684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:38.997737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:38.997791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:39.953980Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:26:39.954011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-28T09:26:39.954050Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=6; 2025-03-28T09:26:39.954095Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=6; 2025-03-28T09:26:39.954133Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:26:39.954204Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-28T09:26:39.954233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-03-28T09:26:39.954279Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:39.954474Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:39.954599Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-28T09:26:39.954660Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:39.954778Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;);columns=1;rows=10; 2025-03-28T09:26:39.954828Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=61;num_rows=10;batch_columns=message; 2025-03-28T09:26:39.954944Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:303:2321];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2025-03-28T09:26:39.955077Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-28T09:26:39.955208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-28T09:26:39.955310Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-28T09:26:39.955446Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:39.955553Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-28T09:26:39.955650Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-28T09:26:39.955703Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:304:2322] finished for tablet 9437184 2025-03-28T09:26:39.956136Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:303:2321];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults"],"t":0.011},{"events":["l_ack","l_processing","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1743153999943692,"name":"_full_task","f":1743153999943692,"d_finished":0,"c":0,"l":1743153999955752,"d":12060},"events":[{"name":"bootstrap","f":1743153999943857,"d_finished":2272,"c":1,"l":1743153999946129,"d":2272},{"a":1743153999955428,"name":"ack","f":1743153999954450,"d_finished":887,"c":1,"l":1743153999955337,"d":1211},{"a":1743153999955416,"name":"processing","f":1743153999947214,"d_finished":5573,"c":9,"l":1743153999955340,"d":5909},{"name":"ProduceResults","f":1743153999945034,"d_finished":2531,"c":12,"l":1743153999955690,"d":2531},{"a":1743153999955692,"name":"Finish","f":1743153999955692,"d_finished":0,"c":0,"l":1743153999955752,"d":60},{"name":"task_result","f":1743153999947229,"d_finished":4567,"c":8,"l":1743153999954330,"d":4567}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-28T09:26:39.956231Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:303:2321];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:39.956629Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:303:2321];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults"],"t":0.011},{"events":["l_ack","l_processing","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1743153999943692,"name":"_full_task","f":1743153999943692,"d_finished":0,"c":0,"l":1743153999956276,"d":12584},"events":[{"name":"bootstrap","f":1743153999943857,"d_finished":2272,"c":1,"l":1743153999946129,"d":2272},{"a":1743153999955428,"name":"ack","f":1743153999954450,"d_finished":887,"c":1,"l":1743153999955337,"d":1735},{"a":1743153999955416,"name":"processing","f":1743153999947214,"d_finished":5573,"c":9,"l":1743153999955340,"d":6433},{"name":"ProduceResults","f":1743153999945034,"d_finished":2531,"c":12,"l":1743153999955690,"d":2531},{"a":1743153999955692,"name":"Finish","f":1743153999955692,"d_finished":0,"c":0,"l":1743153999956276,"d":584},{"name":"task_result","f":1743153999947229,"d_finished":4567,"c":8,"l":1743153999954330,"d":4567}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;;); 2025-03-28T09:26:39.956714Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:39.943227Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-28T09:26:39.956751Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:39.956976Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:304:2322];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::PortionsNormalizer [GOOD] Test command err: 2025-03-28T09:26:36.232406Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:36.390082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:36.417155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:36.417517Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:36.426035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2025-03-28T09:26:36.426295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2025-03-28T09:26:36.426402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-03-28T09:26:36.426575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:36.426799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:36.426947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:36.427073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:36.427175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:36.427275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:36.427401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:36.427547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:36.427704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:36.427830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:36.427937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:36.461628Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:36.461911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2025-03-28T09:26:36.461974Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-03-28T09:26:36.462250Z node 1 :TX_COLUMNSHARD CRIT: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:286;tasks_for_remove=0; 2025-03-28T09:26:36.462387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-03-28T09:26:36.462468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-03-28T09:26:36.462545Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-03-28T09:26:36.463031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-03-28T09:26:36.463135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-03-28T09:26:36.463244Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=53; 2025-03-28T09:26:36.463337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-03-28T09:26:36.463469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2025-03-28T09:26:36.463524Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-03-28T09:26:36.463561Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-03-28T09:26:36.463805Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:36.463998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:36.464050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:36.464082Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-03-28T09:26:36.464165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:36.464216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:36.464263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:36.464300Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:36.464497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:36.464560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:36.464594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:36.464629Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:36.464735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:36.464790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:36.464825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:36.464852Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:36.464932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:36.464977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:36.465006Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:36.465061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:36.465097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:36.465127Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:36.465481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=34; 2025-03-28T09:26:36.465574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-03-28T09:26:36.465647Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-03-28T09:26:36.465736Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-03-28T09:26:36.465907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:36.465972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=a ... 09:26:39.687335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:26:39.687426Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:26:39.687468Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:26:39.687512Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:26:39.687567Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:26:39.687630Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:26:39.687708Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:26:39.687775Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:26:39.687847Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:39.687892Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:39.687985Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-03-28T09:26:39.688045Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:26:39.877798Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2025-03-28T09:26:39.877997Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-03-28T09:26:39.878202Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-03-28T09:26:39.878286Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-03-28T09:26:39.879060Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6}]},{"owner_id":2,"inputs":[{"from":7}]},{"owner_id":4,"inputs":[{"from":7}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key1","id":1}},"o":"1","t":"AssembleOriginalData"},"w":11,"id":2},"6":{"p":{"i":"3","p":{"address":{"name":"field","id":3}},"o":"3","t":"AssembleOriginalData"},"w":11,"id":6},"7":{"p":{"p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"1,2,3","t":"FetchOriginalData"},"w":6,"id":7},"4":{"p":{"i":"2","p":{"address":{"name":"key2","id":2}},"o":"2","t":"AssembleOriginalData"},"w":11,"id":4},"0":{"p":{"i":"1,2,3","t":"Projection"},"w":33,"id":0}}}; 2025-03-28T09:26:39.879191Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:26:39.879785Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:398:2412];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:471:2477];trace_detailed=; 2025-03-28T09:26:39.880441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-03-28T09:26:39.880682Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-03-28T09:26:39.881041Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:39.881180Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:39.881314Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:39.881376Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:471:2477] finished for tablet 9437184 2025-03-28T09:26:39.881759Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:469:2476];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743153999879697,"name":"_full_task","f":1743153999879697,"d_finished":0,"c":0,"l":1743153999881433,"d":1736},"events":[{"name":"bootstrap","f":1743153999879894,"d_finished":954,"c":1,"l":1743153999880848,"d":954},{"a":1743153999881014,"name":"ack","f":1743153999881014,"d_finished":0,"c":0,"l":1743153999881433,"d":419},{"a":1743153999880995,"name":"processing","f":1743153999880995,"d_finished":0,"c":0,"l":1743153999881433,"d":438},{"name":"ProduceResults","f":1743153999880836,"d_finished":298,"c":2,"l":1743153999881357,"d":298},{"a":1743153999881361,"name":"Finish","f":1743153999881361,"d_finished":0,"c":0,"l":1743153999881433,"d":72}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:39.881848Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:469:2476];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:39.882199Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:469:2476];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743153999879697,"name":"_full_task","f":1743153999879697,"d_finished":0,"c":0,"l":1743153999881892,"d":2195},"events":[{"name":"bootstrap","f":1743153999879894,"d_finished":954,"c":1,"l":1743153999880848,"d":954},{"a":1743153999881014,"name":"ack","f":1743153999881014,"d_finished":0,"c":0,"l":1743153999881892,"d":878},{"a":1743153999880995,"name":"processing","f":1743153999880995,"d_finished":0,"c":0,"l":1743153999881892,"d":897},{"name":"ProduceResults","f":1743153999880836,"d_finished":298,"c":2,"l":1743153999881357,"d":298},{"a":1743153999881361,"name":"Finish","f":1743153999881361,"d_finished":0,"c":0,"l":1743153999881892,"d":531}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:39.882272Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:39.879147Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:26:39.882315Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:39.882414Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:471:2477];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2025-03-28T09:26:38.819076Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.908256Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.933849Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.934167Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.942283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.942489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.942755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.942884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.943000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.943131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.943258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.943365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.943487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.943604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.943723Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.943862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:38.978020Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:38.978334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:38.978387Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:38.978551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.978724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:38.978820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:38.978867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:38.978980Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:38.979116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:38.979165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:38.979198Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:38.979358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.979425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:38.979468Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:38.979506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:38.979942Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:38.980008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:38.980057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:38.980121Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:38.980198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:38.980241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:38.980271Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:38.980313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:38.980347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:38.980381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:38.980771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=38; 2025-03-28T09:26:38.980875Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=56; 2025-03-28T09:26:38.980962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-03-28T09:26:38.981103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=92; 2025-03-28T09:26:38.981329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:38.981388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:38.981426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:38.981622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:38.981679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.981711Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.981891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:38.981939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:38.981973Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:38.982158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:38.982203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:38.982233Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:38.982417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:38.982471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:38.982515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... Gen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-28T09:26:39.835954Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:39.836054Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2025-03-28T09:26:39.836144Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=OPTIONAL_ASSEMBLER::LAST;details={columns=(column_ids=5,9;column_names=level,saved_at;);;};;scan_step_idx=7; 2025-03-28T09:26:39.836282Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=fetching.cpp:58;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=8; 2025-03-28T09:26:39.836462Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:26:39.836498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2025-03-28T09:26:39.836536Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:28;event=fetched;interval_idx=0; 2025-03-28T09:26:39.836592Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=interval.cpp:17;event=start_construct_result;interval_idx=0;interval_id=2;memory=8400226;count=1; 2025-03-28T09:26:39.837073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:149;event=DoExecute;interval_idx=0; 2025-03-28T09:26:39.839881Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=source.cpp:50;event=source_ready;intervals_count=1;source_idx=0; 2025-03-28T09:26:39.840053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-28T09:26:39.840121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=0; 2025-03-28T09:26:39.840164Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:39.840382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:26:39.840420Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-28T09:26:39.840460Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-03-28T09:26:39.840520Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=0;merger=0;interval_id=2; 2025-03-28T09:26:39.840588Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:26:39.840694Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-28T09:26:39.840888Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-28T09:26:39.841128Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:39.841269Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-28T09:26:39.841393Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-28T09:26:39.841442Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:286:2304] finished for tablet 9437184 2025-03-28T09:26:39.842018Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:285:2303];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish","l_task_result"],"t":0.014}],"full":{"a":1743153999826945,"name":"_full_task","f":1743153999826945,"d_finished":0,"c":0,"l":1743153999841520,"d":14575},"events":[{"name":"bootstrap","f":1743153999827173,"d_finished":2649,"c":1,"l":1743153999829822,"d":2649},{"a":1743153999841100,"name":"ack","f":1743153999841100,"d_finished":0,"c":0,"l":1743153999841520,"d":420},{"a":1743153999841084,"name":"processing","f":1743153999831240,"d_finished":6352,"c":9,"l":1743153999840974,"d":6788},{"name":"ProduceResults","f":1743153999828643,"d_finished":2327,"c":11,"l":1743153999841422,"d":2327},{"a":1743153999841427,"name":"Finish","f":1743153999841427,"d_finished":0,"c":0,"l":1743153999841520,"d":93},{"name":"task_result","f":1743153999831258,"d_finished":6220,"c":9,"l":1743153999840971,"d":6220}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-28T09:26:39.842115Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:285:2303];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:39.842646Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:285:2303];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_ProduceResults","f_Finish","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":1743153999826945,"name":"_full_task","f":1743153999826945,"d_finished":0,"c":0,"l":1743153999842193,"d":15248},"events":[{"name":"bootstrap","f":1743153999827173,"d_finished":2649,"c":1,"l":1743153999829822,"d":2649},{"a":1743153999841100,"name":"ack","f":1743153999841100,"d_finished":0,"c":0,"l":1743153999842193,"d":1093},{"a":1743153999841084,"name":"processing","f":1743153999831240,"d_finished":6352,"c":9,"l":1743153999840974,"d":7461},{"name":"ProduceResults","f":1743153999828643,"d_finished":2327,"c":11,"l":1743153999841422,"d":2327},{"a":1743153999841427,"name":"Finish","f":1743153999841427,"d_finished":0,"c":0,"l":1743153999842193,"d":766},{"name":"task_result","f":1743153999831258,"d_finished":6220,"c":9,"l":1743153999840971,"d":6220}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;;); 2025-03-28T09:26:39.843000Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:39.826094Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-28T09:26:39.843060Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:39.843444Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:286:2304];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2025-03-28T09:26:34.116558Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789399674392254:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:34.123712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018e9/r3tmp/tmpK5Lc8E/pdisk_1.dat 2025-03-28T09:26:34.527727Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:34.531421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:34.531541Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:34.541165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32692, node 1 2025-03-28T09:26:34.727401Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/0018e9/r3tmp/yandexYKC7DF.tmp 2025-03-28T09:26:34.727443Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/0018e9/r3tmp/yandexYKC7DF.tmp 2025-03-28T09:26:34.727610Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/0018e9/r3tmp/yandexYKC7DF.tmp 2025-03-28T09:26:34.727761Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2809 PQClient connected to localhost:32692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:35.230349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:26:35.247746Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:26:35.258301Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:26:37.104240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789412559294863:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.104383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.104824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789412559294889:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.109936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:26:37.170073Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789412559294891:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:26:37.232971Z node 1 :TX_PROXY ERROR: Actor# [1:7486789412559294958:2393] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:37.570957Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789412559294973:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:26:37.577604Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjBiYjdjMTYtNWY0MjM1ODctMjRkN2U2MWMtYTgxODhhN2I=, ActorId: [1:7486789412559294859:2331], ActorState: ExecuteState, TraceId: 01jqe169938g73jkyjsq1dr5c8, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:26:37.590385Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:26:37.645721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:26:37.767508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:26:37.906429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:26:38.292120Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe16a5s1k46ra7kzchqfn3b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWI3MTZhNjItNWM5NWU0ZDEtOTBlNzJmYTAtNzkzNWYwMmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:39.105592Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789399674392254:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:39.105689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2025-03-28T09:26:34.092906Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789396768729654:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:34.095597Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001924/r3tmp/tmpdbvGe6/pdisk_1.dat 2025-03-28T09:26:34.543081Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10862, node 1 2025-03-28T09:26:34.591438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:34.591566Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:34.592728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:34.727424Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001924/r3tmp/yandexJSNaf0.tmp 2025-03-28T09:26:34.727452Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001924/r3tmp/yandexJSNaf0.tmp 2025-03-28T09:26:34.727622Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001924/r3tmp/yandexJSNaf0.tmp 2025-03-28T09:26:34.727747Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9213 PQClient connected to localhost:10862 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:35.215724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:26:35.239283Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:26:35.279483Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2025-03-28T09:26:37.038320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789409653632138:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.038325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789409653632151:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.038419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.042963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-28T09:26:37.055561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789409653632153:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-28T09:26:37.120428Z node 1 :TX_PROXY ERROR: Actor# [1:7486789409653632218:2392] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:37.570977Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789409653632228:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:26:37.578551Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGVmOGM1NzUtNTAyNmRjMGUtZGJiMzkzOGYtYWI1ZThiMDM=, ActorId: [1:7486789409653632136:2331], ActorState: ExecuteState, TraceId: 01jqe1696d19s25s7gxfnma0w2, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:26:37.581622Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:26:37.644228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:26:37.763355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:26:37.852246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:26:38.291976Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe16a2tcjssych1brr4n0p9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWU3MjRiNWEtNzY0OTg4ZmYtM2QzMzI4NzUtNjgxMTk4NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:39.092760Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789396768729654:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:39.092818Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2025-03-28T09:26:34.086080Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789399351951470:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:34.096135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00192a/r3tmp/tmppB8FY9/pdisk_1.dat 2025-03-28T09:26:34.547229Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:34.552183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:34.552295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:34.556576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5864, node 1 2025-03-28T09:26:34.723105Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00192a/r3tmp/yandex0hBsiR.tmp 2025-03-28T09:26:34.723125Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00192a/r3tmp/yandex0hBsiR.tmp 2025-03-28T09:26:34.724420Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00192a/r3tmp/yandex0hBsiR.tmp 2025-03-28T09:26:34.724550Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:26:37.017598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789412236854079:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.017597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789412236854046:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.017719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.035797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-03-28T09:26:37.059696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789412236854081:2370], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-28T09:26:37.187082Z node 1 :TX_PROXY ERROR: Actor# [1:7486789412236854144:2362] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:37.576558Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789412236854153:2377], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:26:37.578100Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGVhYmYwODgtNjI3ZmU5ZjgtMTNjODRhMTctY2U4ZTM4NmE=, ActorId: [1:7486789412236854039:2362], ActorState: ExecuteState, TraceId: 01jqe1696kf3gvv0y1gm7ge0ck, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:26:37.600200Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |89.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2025-03-28T09:26:34.088535Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789397838354500:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:34.088613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018cd/r3tmp/tmprfnj1Y/pdisk_1.dat 2025-03-28T09:26:34.493553Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:34.531849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:34.531938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:34.542530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5663, node 1 2025-03-28T09:26:34.725423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:26:34.725458Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:26:34.725466Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:26:34.725682Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:26:37.134473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789410723256960:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.134490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789410723256975:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.134565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.140841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480 2025-03-28T09:26:37.161288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789410723256978:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2025-03-28T09:26:37.287296Z node 1 :TX_PROXY ERROR: Actor# [1:7486789410723257039:2362] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:37.620104Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789410723257053:2379], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:26:37.622003Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjJhZmI5ZGUtYzI1ZTg3YTQtNGI5NmUxYS1hYmY2ZTc4Mg==, ActorId: [1:7486789410723256954:2366], ActorState: ExecuteState, TraceId: 01jqe169aacxxh3am8jreb3bsv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:26:37.641993Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2025-03-28T09:26:38.682157Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.799611Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.825102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.825454Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.833446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.833649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.833910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.834094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.834209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.834339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.834448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.834553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.834657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.834832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.834956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.835059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:38.868284Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:38.868618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:38.868687Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:38.868888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.869117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:38.869214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:38.869260Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:38.869392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:38.869479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:38.869548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:38.869593Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:38.869774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.869850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:38.869913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:38.869956Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:38.870119Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:38.870194Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:38.870252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:38.870289Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:38.870397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:38.870446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:38.870493Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:38.870549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:38.870592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:38.870625Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:38.871063Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=65; 2025-03-28T09:26:38.871157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-03-28T09:26:38.871250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=40; 2025-03-28T09:26:38.871352Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-03-28T09:26:38.871546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:38.871616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:38.871692Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:38.871912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:38.871968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.872001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.872158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:38.872213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:38.872245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:38.872444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:38.872492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:38.872607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:38.872779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:38.872829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:38.872878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... D DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-03-28T09:26:39.467175Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-03-28T09:26:39.473684Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-03-28T09:26:39.473860Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:211;event=finished_tx;tx_id=10; 2025-03-28T09:26:39.499221Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-03-28T09:26:39.499436Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=229592;columns=2; 2025-03-28T09:26:39.517209Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:138:2170];fline=actor.cpp:22;event=flush_writing;size=229592;count=1; 2025-03-28T09:26:39.521006Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-03-28T09:26:39.523001Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-28T09:26:39.537332Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-28T09:26:39.537503Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:26:39.537998Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:116;event=abort;tx_id=222;problem=finished; 2025-03-28T09:26:39.538070Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:134;event=abort;tx_id=222;problem=finished; 2025-03-28T09:26:39.538294Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 10 at tablet 9437184, mediator 0 2025-03-28T09:26:39.538352Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] execute at tablet 9437184 2025-03-28T09:26:39.538397Z node 1 :TX_COLUMNSHARD ERROR: TxPlanStep[5] Ignore old txIds [112] for step 10 last planned step 10 at tablet 9437184 2025-03-28T09:26:39.538443Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[5] complete at tablet 9437184 2025-03-28T09:26:39.538811Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {10:max} readable: {10:max} at tablet 9437184 2025-03-28T09:26:39.538929Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:26:39.541227Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-03-28T09:26:39.541328Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2025-03-28T09:26:39.542102Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"5":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2025-03-28T09:26:39.542247Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:26:39.543849Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={10:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:258:2276];trace_detailed=; 2025-03-28T09:26:39.545103Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2;column_names=field,key;);; 2025-03-28T09:26:39.545313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2025-03-28T09:26:39.545676Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:39.545858Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:39.545962Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:39.546017Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:258:2276] finished for tablet 9437184 2025-03-28T09:26:39.546410Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:252:2270];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743153999543783,"name":"_full_task","f":1743153999543783,"d_finished":0,"c":0,"l":1743153999546076,"d":2293},"events":[{"name":"bootstrap","f":1743153999543993,"d_finished":1501,"c":1,"l":1743153999545494,"d":1501},{"a":1743153999545652,"name":"ack","f":1743153999545652,"d_finished":0,"c":0,"l":1743153999546076,"d":424},{"a":1743153999545630,"name":"processing","f":1743153999545630,"d_finished":0,"c":0,"l":1743153999546076,"d":446},{"name":"ProduceResults","f":1743153999545458,"d_finished":308,"c":2,"l":1743153999545997,"d":308},{"a":1743153999546001,"name":"Finish","f":1743153999546001,"d_finished":0,"c":0,"l":1743153999546076,"d":75}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:39.546496Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:252:2270];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:39.546990Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:252:2270];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":1743153999543783,"name":"_full_task","f":1743153999543783,"d_finished":0,"c":0,"l":1743153999546589,"d":2806},"events":[{"name":"bootstrap","f":1743153999543993,"d_finished":1501,"c":1,"l":1743153999545494,"d":1501},{"a":1743153999545652,"name":"ack","f":1743153999545652,"d_finished":0,"c":0,"l":1743153999546589,"d":937},{"a":1743153999545630,"name":"processing","f":1743153999545630,"d_finished":0,"c":0,"l":1743153999546589,"d":959},{"name":"ProduceResults","f":1743153999545458,"d_finished":308,"c":2,"l":1743153999545997,"d":308},{"a":1743153999546001,"name":"Finish","f":1743153999546001,"d_finished":0,"c":0,"l":1743153999546589,"d":588}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:39.547086Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:39.542209Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:26:39.547135Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:39.547234Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:258:2276];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=uint64;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2025-03-28T09:25:38.490256Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789157958579621:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:38.490319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000761/r3tmp/tmpUGwfpO/pdisk_1.dat 2025-03-28T09:25:38.857321Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15930, node 1 2025-03-28T09:25:38.913233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:38.913343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:38.955066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:39.039641Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:39.039670Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:39.039678Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:39.039867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14679 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:39.370469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:39.395573Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 TClient is connected to server localhost:14679 waiting... 2025-03-28T09:25:39.619475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:39.627426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:39.642130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:39.765156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:39.806018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:39.849153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:39.880039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:25:39.913443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:39.959264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:25:39.994336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:40.030289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:40.067207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:41.580284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789170843482963:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:41.580283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789170843482956:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:41.580376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:41.583428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480 2025-03-28T09:25:41.593670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789170843482970:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2025-03-28T09:25:41.661660Z node 1 :TX_PROXY ERROR: Actor# [1:7486789170843483021:2916] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 18], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:42.191827Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe14k29chrx0cd1hd958t3c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVmODM4OTktMTQxYWNjMjctMmNiODU5ZjUtMjQxMjA2MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:42.199624Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe14k29chrx0cd1hd958t3c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVmODM4OTktMTQxYWNjMjctMmNiODU5ZjUtMjQxMjA2MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:42.203558Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe14k29chrx0cd1hd958t3c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjVmODM4OTktMTQxYWNjMjctMmNiODU5ZjUtMjQxMjA2MmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:25:42.242023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:42.273497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:42.304325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:42.332111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:42.360564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710682:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:42.388010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:42.417283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:42.446546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:42.494245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:42.557490Z node 1 :HTTP INFO: Listening on http://127.0.0.1:22162 2025-03-28T09:25:43.490782Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789157958579621:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:43.490841Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:43.561157Z node 1 :SQS INFO: Start SQS service actor 2025-03-28T09:25:43.561268Z node 1 :SQS DEBUG: SQS service config: { EnableSqs: true YandexCloudMode: true EnableDeadLetterQueues: true } 2025-03-28T09:25:43.562397Z node 1 :SQS DEBUG: Enable scheme board scheme cache 2025-03-28T09:25:43.563902Z node 1 :SQS INFO: Start SQS proxy service actor 2025-03-28T09:25:43.564170Z node 1 :HTTP INFO: Listening on http://[::]:1700 2025-03-28T09:25:43.582400Z node 1 :SQS INFO: Request SQS users list 2025-03-28T09:25:43.582431Z node 1 :SQS DEBUG: Request SQS queues list 2025-03-28T09:25:43.585850Z node 1 :SQS NOTICE: [Node tracker] schedule describe tables aft ... 3?R\022\377\007\013?Z\t\351\000?V\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\200\003?\202(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?X\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?\226\003?\230\036QUEUE_ID_NUMBER\003\022\000\000\003?\\\000\037\000\001\t\211\004\202\203\005@?8V\000\003?\260\nattrs?\252\001\t\211\004\202\203\005@\203\001HV\000\003?\270\010tags\t\211\006?\272\203\014?\272\203\001H\"\000\t\211\002?\300?\300\014Not\000\t\211\002?\300?8R\000?\252\000\000\003?\272\004{}\t\211\006?\302\203\014?\302\203\001H\"\000\t\211\006?\320\203\005@\203\001H?\322\030Invoke\000\003?\326\014Equals\003?\330\000\t\211\004?\322\207\203\001H?\322 Coalesce\000\t\211\004?\342\207\205\004\207\203\001H?\342\026\032\203\004\030Member\000\t\211\n?\354\203\005\004\200\205\004\203\004\203\004\026\032\213\004\203\001H\203\001H\203\004\036\000\003?\362 \000\001\205\000\000\000\000\001\003\000\000\000\000\000\000\000?\352\005?\370\003?\364\004\003?\366 \003\013?\376\t\351\000?\372\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?%\002\003?)\002\022USER_NAME\003\022\000\003?\374(000000000000000301v0\002\003?\001\002\000\037\003?\356\002\002\003?\322\004{}\002\003\003?\302\004{}?a\002\002\002\001\000/" } Params { Bin: "\037\000\005\205\010\203\001H\203\010\203\010\203\001H\020NAME> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2025-03-28T09:23:24.565920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788581677916315:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:24.565970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f09/r3tmp/tmptaxr0h/pdisk_1.dat 2025-03-28T09:23:25.308150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:25.308235Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:25.309970Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:25.340526Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:8109 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:23:25.692929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:25.719274Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:25.735472Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:23:25.742847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:23:26.043119Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.011s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:23:26.049641Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:23:26.087412Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-28T09:23:26.095890Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153805897 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743153805897 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) waiting... 2025-03-28T09:23:29.568532Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788581677916315:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:23:29.568596Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:23:29.650465Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.68, eph 1} end=0, 2 blobs 291r (max 291), put Spent{time=0.017s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (84350 0 0)b }, ecr=1.000 2025-03-28T09:23:29.687077Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.69, eph 1} end=0, 2 blobs 873r (max 873), put Spent{time=0.038s,wait=0.029s,interrupts=1} Part{ 1 pk, lobs 0 +0, (57331 0 0)b }, ecr=1.000 2025-03-28T09:23:30.051423Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.516, eph 1} end=0, 2 blobs 3r (max 3), put Spent{time=0.010s,wait=0.009s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-03-28T09:23:30.194543Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.117, eph 2} end=0, 2 blobs 495r (max 584), put Spent{time=0.119s,wait=0.059s,interrupts=1} Part{ 1 pk, lobs 0 +0, (143258 0 0)b }, ecr=1.000 2025-03-28T09:23:30.237096Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.120, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.102s,wait=0.101s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-03-28T09:23:30.261175Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.121, eph 1} end=0, 2 blobs 2r (max 2), put Spent{time=0.125s,wait=0.125s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-03-28T09:23:30.321016Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.122, eph 1} end=0, 2 blobs 501r (max 501), put Spent{time=0.185s,wait=0.168s,interrupts=1} Part{ 1 pk, lobs 0 +0, (31966 0 0)b }, ecr=1.000 2025-03-28T09:23:30.353993Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.123, eph 1} end=0, 2 blobs 1500r (max 1500), put Spent{time=0.201s,wait=0.092s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103070 0 0)b }, ecr=1.000 2025-03-28T09:23:30.385658Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.542, eph 1} end=0, 2 blobs 10001r (max 10001), put Spent{time=0.207s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-03-28T09:23:30.438548Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.135, eph 2} end=0, 2 blobs 1626r (max 1629), put Spent{time=0.135s,wait=0.010s,interrupts=1} Part{ 1 pk, lobs 0 +0, (106600 0 0)b }, ecr=1.000 2025-03-28T09:23:30.610600Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.194, eph 3} end=0, 2 blobs 747r (max 748), put Spent{time=0.035s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (216077 0 0)b }, ecr=1.000 2025-03-28T09:23:30.654499Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.207, eph 3} end=0, 2 blobs 2382r (max 2385), put Spent{time=0.042s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (156118 0 0)b }, ecr=1.000 2025-03-28T09:23:30.879817Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1026, eph 2} end=0, 2 blobs 3r (max 5), put Spent{time=0.023s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-03-28T09:23:30.990575Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.257, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.095s,wait=0.084s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-03-28T09:23:31.055878Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.258, eph 2} end=0, 2 blobs 2r (max 3), put Spent{time=0.160s,wait=0.149s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-03-28T09:23:31.091849Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.255, eph 4} end=0, 2 blobs 1005r (max 1006), put Spent{time=0.196s,wait=0.027s,interrupts=1} Part{ 1 pk, lobs 0 +0, (290573 0 0)b }, ecr=1.000 2025-03-28T09:23:31.131320Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.259, eph 2} end=0, 2 blobs 1005r (max 1005), put Spent{time=0.235s,wait=0.010s,interrupts=1} Part{ 1 pk, lobs 0 +0, (63918 0 0)b }, ecr=1.000 2025-03-28T09:23:31.283916Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.256, eph 2} end=0, 2 blobs 3012r (max 3012), put Spent{time=0.388s,wait=0.099s,interrupts=1} Part{ 1 pk, lobs 0 +0, (206768 0 0)b }, ecr=1.000 2025-03-28T09:23:31.285010Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.272, eph 4} end=0, 2 blobs 3144r (max 3147), put Spent{time=0.221s,wait=0.015s,interrupts=1} Part{ 1 pk, lobs 0 +0, (206026 0 0)b }, ecr=1.000 2025-03-28T09:23:31.348249Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1052, eph 2} end=0, 2 blobs 10001r (max 10504), put Spent{time=0.441s,wait=0.008s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2025-03-28T09:23:31.475399Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.331, eph 5} end=0, 2 blobs 1256r (max 1257), put Spent{time=0.071s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (363105 0 0)b }, ecr=1.000 2025-03-28T09:23:31.536305Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.346, eph 5} end=0, 2 blobs 3903r (max 3906), put Spent{time=0.043s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (255739 0 0)b }, ecr=1.000 2025-03-28T09:23:31.704807Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1534, eph 3} end=0, 2 blobs 3r (max 5), put Spent{time=0.009s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (187 0 0)b }, ecr=1.000 2025-03-28T09:23:31.759381Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.396, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.036s,wait=0.019s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2025-03-28T09:23:31.760470Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.397, eph 3} end=0, 2 blobs 2r (max 3), put Spent{time=0.029s,wait=0.020s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2025-03-28T09:23:31.842189Z node 1 :OPS_COMPACT I ... ARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715690:0, at schemeshard: 72057594046644480 2025-03-28T09:26:31.276799Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715690:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:26:31.277108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 9 2025-03-28T09:26:31.277277Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-03-28T09:26:31.277298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-03-28T09:26:31.277342Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715690:0 progress is 1/1 2025-03-28T09:26:31.277360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-03-28T09:26:31.277379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715690, ready parts: 1/1, is published: true 2025-03-28T09:26:31.277424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7486789386876793685:2417] message: TxId: 281474976715690 2025-03-28T09:26:31.277451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715690 ready parts: 1/1 2025-03-28T09:26:31.277477Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715690:0 2025-03-28T09:26:31.277499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715690:0 2025-03-28T09:26:31.277630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 2025-03-28T09:26:31.277943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:26:31.279379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-28T09:26:31.279665Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-03-28T09:26:31.280031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:26:31.280059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-28T09:26:31.281860Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-28T09:26:31.366959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486789382581825657 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-03-28T09:26:31.367034Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:26:31.367232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486789386876793466 RawX2: 4503608217307465 } TabletId: 72075186224037895 State: 4 2025-03-28T09:26:31.367250Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:26:31.367370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486789386876793474 RawX2: 4503608217307467 } TabletId: 72075186224037893 State: 4 2025-03-28T09:26:31.367389Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:26:31.367497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486789382581825971 RawX2: 4503608217307447 } TabletId: 72075186224037890 State: 4 2025-03-28T09:26:31.367522Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:26:31.367644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486789386876793470 RawX2: 4503608217307466 } TabletId: 72075186224037894 State: 4 2025-03-28T09:26:31.367662Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:26:31.367844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:26:31.367921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:26:31.367999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:26:31.368068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:26:31.368242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:26:31.369250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486789382581825968 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-03-28T09:26:31.369299Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:26:31.369656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:26:31.370151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:26:31.370352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-03-28T09:26:31.370584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 8 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2025-03-28T09:26:31.370731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-28T09:26:31.370871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-03-28T09:26:31.370974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-28T09:26:31.371091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:26:31.371191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:26:31.371268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-03-28T09:26:31.371345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:26:31.372896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:26:31.372925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:26:31.372956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-03-28T09:26:31.372963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-03-28T09:26:31.372977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-03-28T09:26:31.372983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-03-28T09:26:31.373368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:26:31.373520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:26:31.373656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:26:31.373674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:26:31.373697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-03-28T09:26:31.373702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-03-28T09:26:31.373746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:26:31.373765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:26:31.373819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:26:31.374476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:26:31.374508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:26:31.374559Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:26:31.376649Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-28T09:26:31.376682Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-28T09:26:31.376718Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-28T09:26:31.376765Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-03-28T09:26:31.376808Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2025-03-28T09:26:31.376845Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2025-03-28T09:26:34.099851Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789399381814859:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:34.099957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018f9/r3tmp/tmpc5P9LP/pdisk_1.dat 2025-03-28T09:26:34.541745Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:34.602341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 8506, node 1 2025-03-28T09:26:34.602446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:34.604313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:34.723216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:26:34.723243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:26:34.723251Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:26:34.723389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61476 PQClient connected to localhost:8506 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:35.212419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:26:37.256677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789412266717475:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.256688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789412266717444:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.256881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.261208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:26:37.283421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789412266717481:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:26:37.542847Z node 1 :TX_PROXY ERROR: Actor# [1:7486789412266717547:2394] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:37.644418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:26:37.684477Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789412266717562:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:26:37.686248Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTNmNjg0NWQtZDViY2U2YjMtMWUxYmUwMGYtYjViYzA4NmU=, ActorId: [1:7486789412266717441:2332], ActorState: ExecuteState, TraceId: 01jqe169dgbg21kpj4vzmn5n1y, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:26:37.689006Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:26:37.810786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:26:37.922288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:26:38.292100Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe16a5df247tfqw748kcyav, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkyMzJiN2MtMzNhYTU3YmQtZWRmYmJmNmYtOTAzYTRiNGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:39.095687Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789399381814859:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:39.095786Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.3%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCDTest::TestDiscoverClusters [GOOD] |89.4%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> TColumnShardTestReadWrite::Write >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] >> Normalizers::ColumnChunkNormalizer [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> YdbOlapStore::LogExistingUserId [GOOD] >> TColumnShardTestReadWrite::ReadStale >> TColumnShardTestReadWrite::WriteReadZSTD >> TColumnShardTestReadWrite::ReadGroupBy >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] >> EvWrite::WriteInTransaction >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] >> TPersQueueTest::CheckACLForGrpcWrite [GOOD] >> TPersQueueTest::CheckACLForGrpcRead >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] >> TColumnShardTestReadWrite::ReadAggregate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 304 176 28 48 32 24 16 24 56 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ColumnChunkNormalizer [GOOD] Test command err: 2025-03-28T09:26:39.905050Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:40.023631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:40.053534Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:40.053868Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:40.064387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-03-28T09:26:40.064637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:40.066227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:40.066418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:40.066578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:40.066716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:40.066829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:40.066935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:40.067034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:40.067158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:40.067265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:40.067380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:40.107117Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:40.107344Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:40.107406Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-03-28T09:26:40.107627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:40.107825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:40.107894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:40.107933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-03-28T09:26:40.108002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:40.108073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:40.108108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:40.108128Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:40.108279Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:40.108344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:40.108428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:40.108519Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:40.108607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:40.108690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:40.108749Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:40.108811Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:40.108911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:40.108951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:40.108976Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:40.109021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:40.109058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:40.109082Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:40.131019Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-03-28T09:26:40.131147Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-03-28T09:26:40.131271Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=73; 2025-03-28T09:26:40.131371Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-28T09:26:40.131650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:40.131756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:40.131817Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:40.132026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:40.132099Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:40.132163Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:40.132318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:40.132364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:40.132398Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:40.132606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:40.132656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:40.132688Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:40.132797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:40.132836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normaliza ... Id=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:44.166422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:26:44.166486Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-28T09:26:44.166546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-03-28T09:26:44.166623Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-03-28T09:26:44.166680Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:26:44.166852Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:44.166907Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-03-28T09:26:44.166953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:44.167216Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:44.167404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:44.167456Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:44.167598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-03-28T09:26:44.167698Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-03-28T09:26:44.167839Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:479:2485];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-03-28T09:26:44.168007Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:44.168155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:44.202286Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:44.203399Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:44.203611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:44.203829Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:44.203890Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:481:2486] finished for tablet 9437184 2025-03-28T09:26:44.204404Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:479:2485];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.079},{"events":["f_ack","l_task_result"],"t":0.343},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.38}],"full":{"a":1743154003823782,"name":"_full_task","f":1743154003823782,"d_finished":0,"c":0,"l":1743154004203961,"d":380179},"events":[{"name":"bootstrap","f":1743154003824017,"d_finished":2602,"c":1,"l":1743154003826619,"d":2602},{"a":1743154004203360,"name":"ack","f":1743154004167173,"d_finished":35207,"c":1,"l":1743154004202380,"d":35808},{"a":1743154004203324,"name":"processing","f":1743154003903589,"d_finished":182269,"c":9,"l":1743154004202384,"d":182906},{"name":"ProduceResults","f":1743154003825688,"d_finished":37535,"c":12,"l":1743154004203870,"d":37535},{"a":1743154004203874,"name":"Finish","f":1743154004203874,"d_finished":0,"c":0,"l":1743154004203961,"d":87},{"name":"task_result","f":1743154003903625,"d_finished":146836,"c":8,"l":1743154004167011,"d":146836}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:44.204533Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:479:2485];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:44.205023Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:479:2485];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.079},{"events":["f_ack","l_task_result"],"t":0.343},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.38}],"full":{"a":1743154003823782,"name":"_full_task","f":1743154003823782,"d_finished":0,"c":0,"l":1743154004204583,"d":380801},"events":[{"name":"bootstrap","f":1743154003824017,"d_finished":2602,"c":1,"l":1743154003826619,"d":2602},{"a":1743154004203360,"name":"ack","f":1743154004167173,"d_finished":35207,"c":1,"l":1743154004202380,"d":36430},{"a":1743154004203324,"name":"processing","f":1743154003903589,"d_finished":182269,"c":9,"l":1743154004202384,"d":183528},{"name":"ProduceResults","f":1743154003825688,"d_finished":37535,"c":12,"l":1743154004203870,"d":37535},{"a":1743154004203874,"name":"Finish","f":1743154004203874,"d_finished":0,"c":0,"l":1743154004204583,"d":709},{"name":"task_result","f":1743154003903625,"d_finished":146836,"c":8,"l":1743154004167011,"d":146836}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:44.205136Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:43.823114Z;index_granules=0;index_portions=1;index_batches=939;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589280;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589280;selected_rows=0; 2025-03-28T09:26:44.205182Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:44.205489Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:481:2486];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2025-03-28T09:26:38.742536Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.849415Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.874120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.874454Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.880828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.881072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.881317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.881469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.881593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.881747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.881857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.882012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.882128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.882242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.882376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.882492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:38.923468Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:38.923823Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:38.923877Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:38.924043Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.924229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:38.924312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:38.924360Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:38.924462Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:38.924529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:38.924570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:38.924622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:38.924779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.924840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:38.924876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:38.924907Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:38.925048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:38.925116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:38.925163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:38.925189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:38.925283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:38.925319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:38.925348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:38.925394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:38.925429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:38.925458Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:38.925801Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-03-28T09:26:38.925907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-03-28T09:26:38.925992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-03-28T09:26:38.926092Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-03-28T09:26:38.926247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:38.926309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:38.926350Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:38.926538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:38.926580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.926608Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.926833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:38.926888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:38.926923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:38.927103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:38.927164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:38.927196Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:38.927337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:38.927383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:38.927426Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... umn_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.208330Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:44.208495Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-28T09:26:44.208606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-28T09:26:44.208796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:432:2447];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-03-28T09:26:44.209015Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.209185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.209326Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.209599Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:44.209819Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.209959Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.210005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:433:2448] finished for tablet 9437184 2025-03-28T09:26:44.210666Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:432:2447];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.015},{"events":["f_ack"],"t":0.016},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.018}],"full":{"a":1743154004191790,"name":"_full_task","f":1743154004191790,"d_finished":0,"c":0,"l":1743154004210071,"d":18281},"events":[{"name":"bootstrap","f":1743154004192145,"d_finished":3596,"c":1,"l":1743154004195741,"d":3596},{"a":1743154004209570,"name":"ack","f":1743154004207974,"d_finished":1401,"c":1,"l":1743154004209375,"d":1902},{"a":1743154004209553,"name":"processing","f":1743154004197269,"d_finished":7233,"c":10,"l":1743154004209378,"d":7751},{"name":"ProduceResults","f":1743154004194150,"d_finished":3965,"c":13,"l":1743154004209989,"d":3965},{"a":1743154004209993,"name":"Finish","f":1743154004209993,"d_finished":0,"c":0,"l":1743154004210071,"d":78},{"name":"task_result","f":1743154004197293,"d_finished":5671,"c":9,"l":1743154004207770,"d":5671}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.210788Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:432:2447];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:44.211294Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:432:2447];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["l_task_result"],"t":0.015},{"events":["f_ack"],"t":0.016},{"events":["l_ProduceResults","f_Finish"],"t":0.018},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":1743154004191790,"name":"_full_task","f":1743154004191790,"d_finished":0,"c":0,"l":1743154004210836,"d":19046},"events":[{"name":"bootstrap","f":1743154004192145,"d_finished":3596,"c":1,"l":1743154004195741,"d":3596},{"a":1743154004209570,"name":"ack","f":1743154004207974,"d_finished":1401,"c":1,"l":1743154004209375,"d":2667},{"a":1743154004209553,"name":"processing","f":1743154004197269,"d_finished":7233,"c":10,"l":1743154004209378,"d":8516},{"name":"ProduceResults","f":1743154004194150,"d_finished":3965,"c":13,"l":1743154004209989,"d":3965},{"a":1743154004209993,"name":"Finish","f":1743154004209993,"d_finished":0,"c":0,"l":1743154004210836,"d":843},{"name":"task_result","f":1743154004197293,"d_finished":5671,"c":9,"l":1743154004207770,"d":5671}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.211404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:44.191070Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-03-28T09:26:44.211453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:44.211856Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2025-03-28T09:26:34.087263Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789399200178010:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:34.087364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00190a/r3tmp/tmpCvZrdr/pdisk_1.dat 2025-03-28T09:26:34.543788Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:34.549126Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:34.549217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:34.552238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12845, node 1 2025-03-28T09:26:34.723260Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00190a/r3tmp/yandexhYU7ey.tmp 2025-03-28T09:26:34.723284Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00190a/r3tmp/yandexhYU7ey.tmp 2025-03-28T09:26:34.724787Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00190a/r3tmp/yandexhYU7ey.tmp 2025-03-28T09:26:34.724900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12114 PQClient connected to localhost:12845 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:35.211959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:26:35.237648Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:26:35.267541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:26:37.327442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789412085080612:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.327698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.328240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789412085080649:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:37.333156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:26:37.366883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789412085080651:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:26:37.622884Z node 1 :TX_PROXY ERROR: Actor# [1:7486789412085080717:2394] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:37.651826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:26:37.787386Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789412085080743:2346], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:26:37.787744Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTAwN2QzY2ItOGViMDEyNmYtOGUwYThmYjUtNThiZDZhYzg=, ActorId: [1:7486789412085080609:2330], ActorState: ExecuteState, TraceId: 01jqe169f14dp0981j748t17pv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:26:37.788884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:26:37.818418Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:26:37.875197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:26:38.291924Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe16a3dbjtw5jvz5ttrfrhm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQ3MTFlYi1iNDdjOWE0Ni1hMzM3NGRlZi0zZjFjNDMzNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:39.090999Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789399200178010:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:39.091086Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:26:39.582367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710668. Ctx: { TraceId: 01jqe16bfpez1daf5a79q6kg9j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjZjMzc0YzEtYWFmZTc1NTctOGEzM2VkZGMtYjFjMTY3MGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:39.589754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqe16bfpez1daf5a79q6kg9j, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjZjMzc0YzEtYWFmZTc1NTctOGEzM2VkZGMtYjFjMTY3MGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:40.794246Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqe16cqr4rq3qx3r7g5cznw4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2U3NjEzYjYtZmI1MjM1NWQtYzkxZjhkMzctYjI3Y2NlM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:40.798525Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqe16cqr4rq3qx3r7g5cznw4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2U3NjEzYjYtZmI1MjM1NWQtYzkxZjhkMzctYjI3Y2NlM2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:42.009967Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe16dwa1jkyvwvpfb7a7erp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTEyM2EzNzMtNTA2OGEzNzQtZjdjYzI3MDYtYWRlYjAwZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:42.021550Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710677. Ctx: { TraceId: 01jqe16dwa1jkyvwvpfb7a7erp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTEyM2EzNzMtNTA2OGEzNzQtZjdjYzI3MDYtYWRlYjAwZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:43.404418Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710681. Ctx: { TraceId: 01jqe16f6tabrbtbszh390fjwq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmJiNWE0MTYtMmNiYjNkNzItYjgyNmIzNDctOTVhYThmZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:43.412164Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710682. Ctx: { TraceId: 01jqe16f6tabrbtbszh390fjwq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmJiNWE0MTYtMmNiYjNkNzItYjgyNmIzNDctOTVhYThmZjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2025-03-28T09:26:38.826901Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.909741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.934497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.934820Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.942739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.942927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.943179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.943291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.943418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.943545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.943678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.943801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.943926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.944033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.944157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.944257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:38.975239Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:38.975446Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:38.975485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:38.975608Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.975759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:38.975830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:38.975862Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:38.975930Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:38.975974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:38.976000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:38.976020Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:38.976148Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.976187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:38.976211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:38.976232Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:38.976326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:38.976362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:38.976397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:38.976417Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:38.976486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:38.976515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:38.976535Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:38.976564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:38.976593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:38.976643Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:38.977041Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-28T09:26:38.977136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-03-28T09:26:38.977231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-03-28T09:26:38.977318Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:26:38.977511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:38.977588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:38.977628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:38.977848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:38.977896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.977931Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.978095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:38.978146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:38.978193Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:38.978389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:38.978433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:38.978463Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:38.978599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:38.978643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:38.978733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... timestamp[us] request_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.340523Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:44.340674Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-28T09:26:44.340774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-28T09:26:44.340934Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:432:2447];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-28T09:26:44.341099Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.341253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.341404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.341649Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:44.341817Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.341973Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.342036Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:433:2448] finished for tablet 9437184 2025-03-28T09:26:44.342579Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:432:2447];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":1743154004324862,"name":"_full_task","f":1743154004324862,"d_finished":0,"c":0,"l":1743154004342113,"d":17251},"events":[{"name":"bootstrap","f":1743154004325096,"d_finished":3531,"c":1,"l":1743154004328627,"d":3531},{"a":1743154004341624,"name":"ack","f":1743154004340183,"d_finished":1254,"c":1,"l":1743154004341437,"d":1743},{"a":1743154004341610,"name":"processing","f":1743154004330150,"d_finished":6771,"c":10,"l":1743154004341440,"d":7274},{"name":"ProduceResults","f":1743154004327018,"d_finished":3614,"c":13,"l":1743154004342017,"d":3614},{"a":1743154004342021,"name":"Finish","f":1743154004342021,"d_finished":0,"c":0,"l":1743154004342113,"d":92},{"name":"task_result","f":1743154004330170,"d_finished":5371,"c":9,"l":1743154004339983,"d":5371}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.342665Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:432:2447];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:44.343156Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:432:2447];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":1743154004324862,"name":"_full_task","f":1743154004324862,"d_finished":0,"c":0,"l":1743154004342735,"d":17873},"events":[{"name":"bootstrap","f":1743154004325096,"d_finished":3531,"c":1,"l":1743154004328627,"d":3531},{"a":1743154004341624,"name":"ack","f":1743154004340183,"d_finished":1254,"c":1,"l":1743154004341437,"d":2365},{"a":1743154004341610,"name":"processing","f":1743154004330150,"d_finished":6771,"c":10,"l":1743154004341440,"d":7896},{"name":"ProduceResults","f":1743154004327018,"d_finished":3614,"c":13,"l":1743154004342017,"d":3614},{"a":1743154004342021,"name":"Finish","f":1743154004342021,"d_finished":0,"c":0,"l":1743154004342735,"d":714},{"name":"task_result","f":1743154004330170,"d_finished":5371,"c":9,"l":1743154004339983,"d":5371}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:44.343258Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:44.324164Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-28T09:26:44.343331Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:44.343747Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TPQCDTest::TestRelatedServicesAreRunning >> TPersQueueTest::StreamReadManyUpdateTokenAndRead [GOOD] >> TPersQueueTest::SetupWriteSession >> TopicService::RelativePath [GOOD] >> TColumnShardTestReadWrite::ReadStale [GOOD] >> TColumnShardTestReadWrite::WriteStandalone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2025-03-28T09:26:38.804454Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.897852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.914255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.914534Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.922828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.923049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.923285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.923435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.923552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.923695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.923810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.923914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.924063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.924179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.924285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.924407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:38.954127Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:38.954471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:38.954554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:38.954779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.954984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:38.955073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:38.955122Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:38.955239Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:38.955323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:38.955369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:38.955412Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:38.955595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.955687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:38.955734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:38.955787Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:38.955958Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:38.956024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:38.956088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:38.956124Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:38.956212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:38.956251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:38.956283Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:38.956351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:38.956397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:38.956459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:38.956846Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-28T09:26:38.956950Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-03-28T09:26:38.957068Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=54; 2025-03-28T09:26:38.957149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-03-28T09:26:38.957336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:38.957395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:38.957430Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:38.957654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:38.957703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.957732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.957898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:38.957947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:38.957979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:38.958203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:38.958247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:38.958282Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:38.958427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:38.958479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:38.958531Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-03-28T09:26:44.660557Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> EvWrite::WriteInTransaction [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2025-03-28T09:26:44.821852Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:44.915105Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:44.935535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:44.935808Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:44.944420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:44.944660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:44.944902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:44.945040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:44.945149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:44.945286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:44.945419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:44.945536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:44.945649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:44.945760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.945868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:44.946009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:44.970028Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:44.970340Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:44.970404Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:44.970594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:44.970765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:44.970837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:44.970881Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:44.970971Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:44.971025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:44.971056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:44.971079Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:44.971218Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:44.971272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:44.971299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:44.971323Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:44.971453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:44.971494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:44.971529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:44.971547Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:44.971597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:44.971632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:44.971657Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:44.971707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:44.971754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:44.971778Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:44.972081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-03-28T09:26:44.972159Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-28T09:26:44.972255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-03-28T09:26:44.972338Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-03-28T09:26:44.972493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:44.972542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:44.972575Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:44.972716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:44.972746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.972766Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.972909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:44.972953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:44.972980Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:44.973134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:44.973162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:44.973181Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:44.973282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:44.973316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:44.973363Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ock_id=1;broken=0; 2025-03-28T09:26:45.657550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=77304; 2025-03-28T09:26:45.661310Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::c4771118-bb611f0-80d5cf8e-3f2ebcce; 2025-03-28T09:26:45.661383Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-03-28T09:26:45.661507Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=77304;blobs_count=9;max_limit=251658240;has_more=0;external_task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce; 2025-03-28T09:26:45.661739Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce; 2025-03-28T09:26:45.662142Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=69691;external_task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;type=CS::INDEXATION;priority=0;; 2025-03-28T09:26:45.662335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=69691;external_task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;type=CS::INDEXATION;priority=0;; 2025-03-28T09:26:45.662376Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;mem=69691;cpu=0; 2025-03-28T09:26:45.662535Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;task_id=1;mem=69691;cpu=0; 2025-03-28T09:26:45.662669Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce; 2025-03-28T09:26:45.681531Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-28T09:26:45.681706Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-03-28T09:26:45.682303Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:239;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:26:45.682361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:26:45.682451Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=77304;indexing_debug={task_ids=c4771118-bb611f0-80d5cf8e-3f2ebcce,;}; 2025-03-28T09:26:45.682561Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:26:45.682828Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:26:45.682883Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:45.682922Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:45.683009Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:26:45.683374Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 1 version: {640000:max} readable: {1000000:max} at tablet 9437184 2025-03-28T09:26:45.695678Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2025-03-28T09:26:45.695832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=constructor.cpp:18;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2025-03-28T09:26:45.695906Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {640000:max}. CS min read snapshot: {700000:max}. now: 2025-03-28T09:26:45.695886Z; 2025-03-28T09:26:45.713075Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:1:255:1:6824:0]; 2025-03-28T09:26:45.713189Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:1:255:2:6824:0]; 2025-03-28T09:26:45.722577Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:26:45.722892Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[8] (CS::INDEXATION) apply at tablet 9437184 2025-03-28T09:26:45.723967Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 7 2025-03-28T09:26:45.724116Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:26:45.724678Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {640000:max} readable: {1000000:max} at tablet 9437184 2025-03-28T09:26:45.742354Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-03-28T09:26:45.742429Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;fline=with_appended.cpp:65;portions=1,;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce; 2025-03-28T09:26:45.742712Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::c4771118-bb611f0-80d5cf8e-3f2ebcce; 2025-03-28T09:26:45.742776Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:26:45.742846Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:26:45.742908Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:26:45.742976Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:26:45.743029Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:45.743072Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:45.743156Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.993000s; 2025-03-28T09:26:45.743216Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:26:45.743382Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 7 2025-03-28T09:26:45.743585Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=c4771118-bb611f0-80d5cf8e-3f2ebcce;mem=69691;cpu=0; 2025-03-28T09:26:45.743730Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:26:45.743810Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:26:45.746507Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-03-28T09:26:45.746624Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2025-03-28T09:26:45.747567Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"5":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2025-03-28T09:26:45.747678Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={640000:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot build metadata withno ranges;details=Snapshot too old: {640000:max}. CS min read snapshot: {700000:max}. now: 2025-03-28T09:26:45.747644Z; >> TPersQueueTest::WhenDisableNodeAndCreateTopic_ThenAllPartitionsAreOnOtherNode [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteInTransaction [GOOD] Test command err: 2025-03-28T09:26:44.889661Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:44.976321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:44.995991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:44.996299Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:45.004176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:45.004342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:45.004522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:45.004645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:45.004734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:45.004864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:45.004979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:45.005090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:45.005186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:45.005323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.005451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:45.005561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:45.030064Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:45.030307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:45.030359Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:45.030557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:45.030780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:45.030868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:45.030912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:45.031038Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:45.031110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:45.031171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:45.031225Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:45.031397Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:45.031463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:45.031500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:45.031546Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:45.031696Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:45.031764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:45.031816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:45.031850Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:45.031920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:45.031958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:45.031988Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:45.032038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:45.032074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:45.032121Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:45.032408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-03-28T09:26:45.032480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-28T09:26:45.032560Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-03-28T09:26:45.032660Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=52; 2025-03-28T09:26:45.032812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:45.032880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:45.032916Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:45.033080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:45.033113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.033137Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.033298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:45.033342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:45.033373Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:45.033552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:45.033593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:45.033619Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:45.033767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:45.033826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:45.033880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:45.861986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:26:45.862085Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-28T09:26:45.862142Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=1; 2025-03-28T09:26:45.862190Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=2048;merger=0;interval_id=1; 2025-03-28T09:26:45.862221Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:26:45.862283Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:45.862313Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=2048;finished=1; 2025-03-28T09:26:45.862346Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:45.862537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:45.862671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:2048;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:45.862726Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:45.862865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=2048; 2025-03-28T09:26:45.862943Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=229376;num_rows=2048;batch_columns=key,field; 2025-03-28T09:26:45.863077Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:285:2303];bytes=229376;rows=2048;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-03-28T09:26:45.863194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:45.863284Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:45.863401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:45.863720Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:45.863848Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:45.863973Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:45.864036Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:289:2307] finished for tablet 9437184 2025-03-28T09:26:45.864392Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:285:2303];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.036},{"events":["l_ProduceResults","f_Finish"],"t":0.037},{"events":["l_ack","l_processing","l_Finish"],"t":0.038}],"full":{"a":1743154005826066,"name":"_full_task","f":1743154005826066,"d_finished":0,"c":0,"l":1743154005864090,"d":38024},"events":[{"name":"bootstrap","f":1743154005826357,"d_finished":3133,"c":1,"l":1743154005829490,"d":3133},{"a":1743154005863697,"name":"ack","f":1743154005862514,"d_finished":919,"c":1,"l":1743154005863433,"d":1312},{"a":1743154005863654,"name":"processing","f":1743154005829804,"d_finished":15156,"c":9,"l":1743154005863435,"d":15592},{"name":"ProduceResults","f":1743154005828256,"d_finished":2453,"c":12,"l":1743154005864019,"d":2453},{"a":1743154005864023,"name":"Finish","f":1743154005864023,"d_finished":0,"c":0,"l":1743154005864090,"d":67},{"name":"task_result","f":1743154005829827,"d_finished":14068,"c":8,"l":1743154005862389,"d":14068}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:45.864442Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:285:2303];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:45.864721Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:285:2303];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.036},{"events":["l_ProduceResults","f_Finish"],"t":0.037},{"events":["l_ack","l_processing","l_Finish"],"t":0.038}],"full":{"a":1743154005826066,"name":"_full_task","f":1743154005826066,"d_finished":0,"c":0,"l":1743154005864471,"d":38405},"events":[{"name":"bootstrap","f":1743154005826357,"d_finished":3133,"c":1,"l":1743154005829490,"d":3133},{"a":1743154005863697,"name":"ack","f":1743154005862514,"d_finished":919,"c":1,"l":1743154005863433,"d":1693},{"a":1743154005863654,"name":"processing","f":1743154005829804,"d_finished":15156,"c":9,"l":1743154005863435,"d":15973},{"name":"ProduceResults","f":1743154005828256,"d_finished":2453,"c":12,"l":1743154005864019,"d":2453},{"a":1743154005864023,"name":"Finish","f":1743154005864023,"d_finished":0,"c":0,"l":1743154005864471,"d":448},{"name":"task_result","f":1743154005829827,"d_finished":14068,"c":8,"l":1743154005862389,"d":14068}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:26:45.864809Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:45.825590Z;index_granules=0;index_portions=1;index_batches=82;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=238056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=238056;selected_rows=0; 2025-03-28T09:26:45.864851Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:45.865028Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:289:2307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> TColumnShardTestReadWrite::WriteExoticTypes >> Normalizers::SchemaVersionsNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2025-03-28T09:26:39.677193Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:39.769076Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:39.789896Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:39.790184Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:39.797788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:39.798003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:39.798243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:39.798385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:39.798488Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:39.798623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:39.798864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:39.798978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:39.799106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:39.799238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.799345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:39.799446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:39.846710Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:39.847096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:39.847160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:39.847358Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.847539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:39.847630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:39.847699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:39.847801Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:39.847873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:39.847923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:39.847955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:39.848125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.848212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:39.848264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:39.848297Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:39.848435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:39.848491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:39.848529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:39.848556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:39.848667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:39.848708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:39.848738Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:39.848784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:39.848819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:39.848847Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:39.849241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-03-28T09:26:39.849354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=57; 2025-03-28T09:26:39.849463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=57; 2025-03-28T09:26:39.849555Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:26:39.849728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:39.849783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:39.849814Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:39.850015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:39.850063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.850094Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.850243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:39.850288Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:39.850320Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:39.850510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:39.850582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:39.850614Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:39.851301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:39.851368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:39.851410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-03-28T09:26:45.519132Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::CompactionGCFailingBs >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> TColumnShardTestReadWrite::CompactionInGranule_PKString >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> TopicService::AccessRights >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> TColumnShardTestReadWrite::CompactionGC ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogExistingUserId [GOOD] Test command err: 2025-03-28T09:24:41.218644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788910978748276:2235];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:41.219021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013de/r3tmp/tmpik4tgl/pdisk_1.dat 2025-03-28T09:24:41.735484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:41.735622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:41.739958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:41.755300Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11323, node 1 2025-03-28T09:24:41.899522Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:24:41.902922Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:24:42.171095Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:42.171121Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:42.171128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:42.171253Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:42.547505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:6898 2025-03-28T09:24:42.879061Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:43.051255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:24:43.051554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:24:43.052021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:24:43.052263Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:24:43.052429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:24:43.052550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:24:43.052637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:24:43.052735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:24:43.052861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:24:43.053013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:24:43.053179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:24:43.053369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486788915273716485:2324];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:24:43.084941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:24:43.085006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:24:43.085249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:24:43.085383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:24:43.085506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:24:43.085622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:24:43.085737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:24:43.085863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:24:43.085985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:24:43.086112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:24:43.086239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:24:43.086349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486788915273716510:2327];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:24:43.123735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788915273716487:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:24:43.123808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788915273716487:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:24:43.124107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788915273716487:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:24:43.124277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788915273716487:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:24:43.124390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788915273716487:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:24:43.124518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788915273716487:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:24:43.124633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788915273716487:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:24:43.124735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486788915273716487:2325];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:24:43.124854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037 ... actor: [28:7486789428786273343:3586], task: 65, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 11374 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 659 FinishTimeMs: 1743154002417 InputRows: 1 InputBytes: 310 OutputRows: 1 OutputBytes: 310 ResultRows: 1 ResultBytes: 310 ComputeCpuTimeUs: 287 BuildCpuTimeUs: 372 HostName: "ghrun-mdsjyjltvm" NodeId: 28 CreateTimeMs: 1743154001513 } MaxMemoryUsage: 1048576 } 2025-03-28T09:26:42.418942Z node 28 :KQP_EXECUTER INFO: TxId: 281474976710670. Ctx: { TraceId: 01jqe16cvz752zjd18yg81wxyv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [28:7486789428786273343:3586] 2025-03-28T09:26:42.422980Z node 28 :KQP_EXECUTER INFO: ActorId: [28:7486789428786273257:3516] TxId: 281474976710670. Ctx: { TraceId: 01jqe16cvz752zjd18yg81wxyv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 494990 DurationUs: 1003001 Tables { TablePath: "/Root/OlapStore/log1" ReadRows: 50 ReadBytes: 16000 } ExecuterCpuTimeUs: 259927 StartTimeMs: 1743154001416 FinishTimeMs: 1743154002419 Stages { StageGuid: "e73d0299-3df0c964-2dcd21f0-dcbda1c2" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/OlapStore/log1\" \'\"72057594046644480:3\" \'\"\" \'1))\n (let $2 \'(\'\"json_payload\" \'\"level\" \'\"message\" \'\"resource_id\" \'\"resource_type\" \'\"timestamp\" \'\"uid\"))\n (let $3 (KqpWideReadOlapTableRanges $1 (Void) $2 \'() \'() (lambda \'($6) $6)))\n (let $4 (Bool \'false))\n (let $5 \'(\'(\'5 $4) \'(\'4 $4) \'(\'3 $4) \'(\'6 $4)))\n (return (FromFlow (WideTopSort (WideFilter $3 (lambda \'($7 $8 $9 $10 $11 $12 $13) (block \'(\n (let $14 (DataType \'Utf8))\n (let $15 (OptionalType $14))\n (let $16 \'((ResourceType \'\"JsonPath\")))\n (let $17 (DictType $14 (ResourceType \'\"JsonNode\")))\n (let $18 (CallableType \'() \'((VariantType (TupleType (TupleType (DataType \'Uint8) (DataType \'String)) $15))) \'((OptionalType (DataType \'JsonDocument))) $16 \'($17)))\n (let $19 (Udf \'\"Json2.JsonDocumentSqlValueConvertToUtf8\" (Void) (VoidType) \'\"\" $18 (VoidType) \'\"\" \'(\'(\'\"strict\"))))\n (let $20 (CallableType \'() $16 \'($14)))\n (let $21 (Udf \'\"Json2.CompilePath\" (Void) (VoidType) \'\"\" $20 (VoidType) \'\"\" \'()))\n (let $22 (Apply $19 $7 (Apply $21 (Utf8 \'\"$.auth.user.id\")) (Dict $17)))\n (let $23 (Visit $22 \'0 (lambda \'($24) (Nothing $15)) \'1 (lambda \'($25) $25)))\n (return (Coalesce (== $23 (String \'\"1000042\")) $4))\n )))) (Uint64 \'50) $5)))\n))))\n)\n" ComputeActors { CpuTimeUs: 3576 Tasks { TaskId: 60 CpuTimeUs: 1480 FinishTimeMs: 1743154002381 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 75 BuildCpuTimeUs: 1405 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-mdsjyjltvm" NodeId: 28 CreateTimeMs: 1743154001499 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743154002318 } Stages { StageId: 1 StageGuid: "c145c782-4030b7e5-d35ecba5-f2c2a568" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'50)) (lambda \'($2 $3 $4 $5 $6 $7 $8) (AsStruct \'(\'\"json_payload\" $2) \'(\'\"level\" $3) \'(\'\"message\" $4) \'(\'\"resource_id\" $5) \'(\'\"resource_type\" $6) \'(\'\"timestamp\" $7) \'(\'\"uid\" $8)))))))\n)\n" ComputeActors { CpuTimeUs: 11374 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 659 FinishTimeMs: 1743154002417 InputRows: 1 InputBytes: 310 OutputRows: 1 OutputBytes: 310 ResultRows: 1 ResultBytes: 310 ComputeCpuTimeUs: 287 BuildCpuTimeUs: 372 HostName: "ghrun-mdsjyjltvm" NodeId: 28 CreateTimeMs: 1743154001513 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743154002318 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":4,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"50\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"TopSort-Filter-TableFullScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"50\",\"Name\":\"TopSort\",\"TopSortBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[{\"InternalOperatorId\":2}],\"Name\":\"Filter\",\"Predicate\":\"Visit == \\\"1000042\\\"\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/OlapStore\\/log1\",\"ReadColumns\":[\"json_payload\",\"level\",\"message\",\"resource_id\",\"resource_type\",\"timestamp\",\"uid\"],\"ReadRanges\":[\"timestamp (-∞, +∞)\",\"resource_type (-∞, +∞)\",\"resource_id (-∞, +∞)\",\"uid (-∞, +∞)\"],\"Scan\":\"Parallel\",\"SsaProgram\":{\"Command\":[{\"Projection\":{\"Columns\":[{\"Id\":2},{\"Id\":7},{\"Id\":1},{\"Id\":3},{\"Id\":6},{\"Id\":5},{\"Id\":4}]}}],\"Version\":5},\"Table\":\"OlapStore\\/log1\"}],\"PlanNodeId\":1,\"StageGuid\":\"e73d0299-3df0c964-2dcd21f0-dcbda1c2\",\"Stats\":{\"BaseTimeMs\":1743154002318,\"ComputeNodes\":[{\"CpuTimeUs\":3576,\"Tasks\":[{\"ComputeTimeUs\":75,\"FinishTimeMs\":1743154002381,\"Host\":\"ghrun-mdsjyjltvm\",\"NodeId\":28,\"TaskId\":60}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"OlapStore\\/log1\"]}],\"SortColumns\":[\"timestamp (Desc)\",\"resource_type (Desc)\",\"resource_id (Desc)\",\"uid (Desc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"c145c782-4030b7e5-d35ecba5-f2c2a568\",\"Stats\":{\"BaseTimeMs\":1743154002318,\"ComputeNodes\":[{\"CpuTimeUs\":11374,\"Tasks\":[{\"ComputeTimeUs\":287,\"FinishTimeMs\":1743154002417,\"Host\":\"ghrun-mdsjyjltvm\",\"InputBytes\":310,\"InputRows\":1,\"NodeId\":28,\"OutputBytes\":310,\"OutputRows\":1,\"ResultBytes\":310,\"ResultRows\":1,\"TaskId\":65}]}],\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 2135 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\014\010\206\023\020\356X\030\267\254\016 A" } } 2025-03-28T09:26:42.422989Z node 28 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710670;task_id=65;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:26:42.423109Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7486789428786273257:3516] TxId: 281474976710670. Ctx: { TraceId: 01jqe16cvz752zjd18yg81wxyv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:26:42.423221Z node 28 :KQP_EXECUTER DEBUG: ActorId: [28:7486789428786273257:3516] TxId: 281474976710670. Ctx: { TraceId: 01jqe16cvz752zjd18yg81wxyv, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.083331s ReadRows: 50 ReadBytes: 16000 ru: 55 rate limiter was not found force flag: 1 2025-03-28T09:26:42.423346Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, ActorId: [28:7486789424491305918:3516], ActorState: ExecuteState, TraceId: 01jqe16cvz752zjd18yg81wxyv, TEvTxResponse, CurrentTx: 1/1 response.status: SUCCESS 2025-03-28T09:26:42.423360Z node 28 :KQP_COMPUTE DEBUG: SelfId: [28:7486789428786273343:3586], TxId: 281474976710670, task: 65. Ctx: { SessionId : ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==. CustomerSuppliedId : . TraceId : 01jqe16cvz752zjd18yg81wxyv. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Send stats to executor actor [28:7486789428786273257:3516] TaskId: 65 Stats: CpuTimeUs: 11374 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 659 FinishTimeMs: 1743154002417 InputRows: 1 InputBytes: 310 OutputRows: 1 OutputBytes: 310 ResultRows: 1 ResultBytes: 310 ComputeCpuTimeUs: 287 BuildCpuTimeUs: 372 HostName: "ghrun-mdsjyjltvm" NodeId: 28 CreateTimeMs: 1743154001513 } MaxMemoryUsage: 1048576 2025-03-28T09:26:42.423980Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, ActorId: [28:7486789424491305918:3516], ActorState: ExecuteState, TraceId: 01jqe16cvz752zjd18yg81wxyv, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 1015.015 QueriesCount: 1 2025-03-28T09:26:42.424077Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, ActorId: [28:7486789424491305918:3516], ActorState: ExecuteState, TraceId: 01jqe16cvz752zjd18yg81wxyv, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:26:42.424202Z node 28 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, ActorId: [28:7486789424491305918:3516], ActorState: ExecuteState, TraceId: 01jqe16cvz752zjd18yg81wxyv, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:26:42.424268Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, ActorId: [28:7486789424491305918:3516], ActorState: ExecuteState, TraceId: 01jqe16cvz752zjd18yg81wxyv, EndCleanup, isFinal: 1 2025-03-28T09:26:42.424361Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, ActorId: [28:7486789424491305918:3516], ActorState: ExecuteState, TraceId: 01jqe16cvz752zjd18yg81wxyv, Sent query response back to proxy, proxyRequestId: 5, proxyId: [28:7486789372951694068:2280] 2025-03-28T09:26:42.424410Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, ActorId: [28:7486789424491305918:3516], ActorState: unknown state, TraceId: 01jqe16cvz752zjd18yg81wxyv, Cleanup temp tables: 0 2025-03-28T09:26:42.427760Z node 28 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154001001, txId: 18446744073709551615] shutting down 2025-03-28T09:26:42.427939Z node 28 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=28&id=MTJjY2ExMTUtNWY2ZDkxNDAtZTJlZjIwNjYtMWMwNDQ1MA==, ActorId: [28:7486789424491305918:3516], ActorState: unknown state, TraceId: 01jqe16cvz752zjd18yg81wxyv, Session actor destroyed RESULT: [[42000u;"nginx";"resource_6";"19";[2];["message"];["{\"auth\":{\"org_id\":7704,\"service\":{\"internal\":\"false\",\"ip\":\"258.258.258.258\"},\"type\":\"token\",\"user\":{\"id\":1000042,\"ip\":\"257.257.257.257\",\"is_cloud\":\"false\"}}}"]]] --------------------- STATS: total CPU: 1455 duration: 1003001 usec cpu: 494990 usec { name: "/Root/OlapStore/log1" reads { rows: 50 bytes: 16000 } } 2025-03-28T09:26:42.457151Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[28:7486789381541629538:2328];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:26:42.549913Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[28:7486789381541629534:2326];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:26:42.555058Z node 28 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[28:7486789381541629531:2325];fline=actor.cpp:33;event=skip_flush_writing; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2025-03-28T09:26:38.854131Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.954144Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.978586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.978976Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.988956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.989163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.989393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.989516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.989673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.989814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.989920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.990053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.990167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.990275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.990415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.990520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:39.013002Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:39.013221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:39.013274Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:39.013424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.013560Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:39.013627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:39.013659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:39.013733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:39.013780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:39.013821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:39.013848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:39.013962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.014006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:39.014030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:39.014048Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:39.014165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:39.014220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:39.014256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:39.014314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:39.014385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:39.014423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:39.014457Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:39.014505Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:39.014545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:39.014593Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:39.015006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-03-28T09:26:39.015107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-03-28T09:26:39.015174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-28T09:26:39.015236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-03-28T09:26:39.015395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:39.015456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:39.015487Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:39.015623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:39.015677Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.015699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.015806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:39.015846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:39.015887Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:39.016032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:39.016066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:39.016084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:39.016195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:39.016230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:39.016265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... quest_id: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:46.816973Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:46.817086Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-28T09:26:46.817175Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-28T09:26:46.817308Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1064:2935];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-28T09:26:46.817431Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:46.817513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:46.817625Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:46.817804Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:46.817914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:46.818027Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:46.818065Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1065:2936] finished for tablet 9437184 2025-03-28T09:26:46.818459Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1064:2935];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1743154006805847,"name":"_full_task","f":1743154006805847,"d_finished":0,"c":0,"l":1743154006818104,"d":12257},"events":[{"name":"bootstrap","f":1743154006806011,"d_finished":2450,"c":1,"l":1743154006808461,"d":2450},{"a":1743154006817782,"name":"ack","f":1743154006816716,"d_finished":936,"c":1,"l":1743154006817652,"d":1258},{"a":1743154006817767,"name":"processing","f":1743154006809462,"d_finished":4847,"c":10,"l":1743154006817654,"d":5184},{"name":"ProduceResults","f":1743154006807388,"d_finished":2668,"c":13,"l":1743154006818051,"d":2668},{"a":1743154006818054,"name":"Finish","f":1743154006818054,"d_finished":0,"c":0,"l":1743154006818104,"d":50},{"name":"task_result","f":1743154006809478,"d_finished":3790,"c":9,"l":1743154006816568,"d":3790}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:46.818537Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1064:2935];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:46.818884Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1064:2935];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.012}],"full":{"a":1743154006805847,"name":"_full_task","f":1743154006805847,"d_finished":0,"c":0,"l":1743154006818575,"d":12728},"events":[{"name":"bootstrap","f":1743154006806011,"d_finished":2450,"c":1,"l":1743154006808461,"d":2450},{"a":1743154006817782,"name":"ack","f":1743154006816716,"d_finished":936,"c":1,"l":1743154006817652,"d":1729},{"a":1743154006817767,"name":"processing","f":1743154006809462,"d_finished":4847,"c":10,"l":1743154006817654,"d":5655},{"name":"ProduceResults","f":1743154006807388,"d_finished":2668,"c":13,"l":1743154006818051,"d":2668},{"a":1743154006818054,"name":"Finish","f":1743154006818054,"d_finished":0,"c":0,"l":1743154006818575,"d":521},{"name":"task_result","f":1743154006809478,"d_finished":3790,"c":9,"l":1743154006816568,"d":3790}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:46.818942Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:46.805336Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-28T09:26:46.818981Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:46.819230Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2025-03-28T09:26:19.534823Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789332520733071:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:19.535411Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001554/r3tmp/tmpda87Pi/pdisk_1.dat 2025-03-28T09:26:19.968786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:19.969185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:19.975056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:20.007930Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61813, node 1 2025-03-28T09:26:20.046085Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:26:20.046108Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:26:20.203430Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:26:20.203457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:26:20.203467Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:26:20.203618Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4121 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:20.804716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:4121 2025-03-28T09:26:22.873238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789345405635875:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:22.873358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:23.743776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:26:23.971031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789349700603366:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:23.971098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:23.995682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153983914 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153983914 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:26:24.226120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789353995570776:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.226193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.232633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789353995570785:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.232721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.232775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789353995570781:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.233154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789353995570792:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.233175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789353995570788:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.236165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789353995570802:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.236218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789353995570800:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.236456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.245289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789353995570837:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.245415Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.245826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789353995570841:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.246734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:26:24.246965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710662:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:26:24.246997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710662:1, at schemeshard: 72057594046644480 2025-03-28T09:26:24.247108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710662:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:26:24.247127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710662:2, at schemeshard: 72057594046644480 2025-03-28T09:26:24.247211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710662:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:26:24.247263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710662:3, path# /Root/.metadata/workload_manager/pools/default 2025-03-28T09:26:24.247572Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change ... set, use /Root 2025-03-28T09:26:44.088150Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723080. Ctx: { TraceId: 01jqe16g3ncz1377pzzm3azmvj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY3ZmU3YTUtZGQyMjY2ZjctZGU3MzRjM2EtZDZjZmRjNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.088151Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723079. Ctx: { TraceId: 01jqe16g3nen6abtf4eyy1ftgs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM3OTI5MzItMWRmMWUzZTgtNGU1NzlhM2ItZGQyMDk3Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.089739Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723081. Ctx: { TraceId: 01jqe16g3nbqf5yr3151ds849n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg3MGEzZWItMjk2YTBmZTktYmVkOGI0ZmEtN2E5ZTgzMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.090191Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723082. Ctx: { TraceId: 01jqe16g3kbkvjfpxytz9ken00, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTliM2I4MTUtNjkwY2JhOWMtMWQ2ZGRiMDQtZDEzN2FhYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.091052Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723083. Ctx: { TraceId: 01jqe16g3n41b05d30a681x41r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM1ZWNlYjUtMTUyODlkMDItYzA4ZGQ0NmEtMjJlYTVhMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.091367Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723084. Ctx: { TraceId: 01jqe16g3p5h2vpw053vvhbtbs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFjMDY5NDUtMmRiMDc3ZDktOWY5MjBhMjgtMWJhYzM1N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.095754Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723085. Ctx: { TraceId: 01jqe16g3r7weze1n413bppjg3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTMzM2ZhZmQtMWIzMjYwYzQtNTgyZTZjYTEtOWRlMTJiZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.097021Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723087. Ctx: { TraceId: 01jqe16g3v1fm4djv8534n33vh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFjYzQyMjItOWFiZGZlMmYtYjNjZGRjM2EtMjZjMTZmYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.097021Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723086. Ctx: { TraceId: 01jqe16g3v4s680mz5cqpw2jy5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWEwMDk2MzktODlmNDczYTMtYmMwMjYzOWEtMWQxNmI3ZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.097453Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723088. Ctx: { TraceId: 01jqe16g3v717c0w56ggpgwk5p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU2OTc0MTAtZjkzOGE2MGUtNzRiZDA1MWUtOTMyNGJmYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.107549Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723089. Ctx: { TraceId: 01jqe16g481sxc6529fqqrywbn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM3OTI5MzItMWRmMWUzZTgtNGU1NzlhM2ItZGQyMDk3Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.115344Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723093. Ctx: { TraceId: 01jqe16g4fe2eq20yr6nzx6b4f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTU2OTc0MTAtZjkzOGE2MGUtNzRiZDA1MWUtOTMyNGJmYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.117630Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723091. Ctx: { TraceId: 01jqe16g4f2m122wbtxyssdn2s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTMzM2ZhZmQtMWIzMjYwYzQtNTgyZTZjYTEtOWRlMTJiZjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.117793Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723095. Ctx: { TraceId: 01jqe16g4f0e7jjpeazjad6mvg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTliM2I4MTUtNjkwY2JhOWMtMWQ2ZGRiMDQtZDEzN2FhYzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.118477Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723090. Ctx: { TraceId: 01jqe16g4ef3r6sjrjqbmq51n1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY3ZmU3YTUtZGQyMjY2ZjctZGU3MzRjM2EtZDZjZmRjNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.118904Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723098. Ctx: { TraceId: 01jqe16g4ffv75yr09h3dha7jf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg3MGEzZWItMjk2YTBmZTktYmVkOGI0ZmEtN2E5ZTgzMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.119281Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723092. Ctx: { TraceId: 01jqe16g4f90038qfktks53dmz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFjMDY5NDUtMmRiMDc3ZDktOWY5MjBhMjgtMWJhYzM1N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.119651Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723094. Ctx: { TraceId: 01jqe16g4fddxngypsvtyermre, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWEwMDk2MzktODlmNDczYTMtYmMwMjYzOWEtMWQxNmI3ZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.120068Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723096. Ctx: { TraceId: 01jqe16g4f3ybwzhvm0yqk00z3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmFjYzQyMjItOWFiZGZlMmYtYjNjZGRjM2EtMjZjMTZmYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.120448Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723097. Ctx: { TraceId: 01jqe16g4f76ebtgvy4k8z2tdy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM1ZWNlYjUtMTUyODlkMDItYzA4ZGQ0NmEtMjJlYTVhMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.132717Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723099. Ctx: { TraceId: 01jqe16g4yd5xtk4ccwpw6yzy3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM3OTI5MzItMWRmMWUzZTgtNGU1NzlhM2ItZGQyMDk3Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.137539Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723100. Ctx: { TraceId: 01jqe16g562a710pdh22pycvxc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTY3ZmU3YTUtZGQyMjY2ZjctZGU3MzRjM2EtZDZjZmRjNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-03-28T09:26:44.143460Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723101. Ctx: { TraceId: 01jqe16g5b7hznm5535a3h591v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFjMDY5NDUtMmRiMDc3ZDktOWY5MjBhMjgtMWJhYzM1N2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.145198Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723102. Ctx: { TraceId: 01jqe16g5b79rbq4n35e9xqnpp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM1ZWNlYjUtMTUyODlkMDItYzA4ZGQ0NmEtMjJlYTVhMDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.145697Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723103. Ctx: { TraceId: 01jqe16g5c0ewr4m8qvaj7tqt3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTg3MGEzZWItMjk2YTBmZTktYmVkOGI0ZmEtN2E5ZTgzMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.148845Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976723104. Ctx: { TraceId: 01jqe16g5j1963m8zzr0anvc0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzM3OTI5MzItMWRmMWUzZTgtNGU1NzlhM2ItZGQyMDk3Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153983914 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153983914 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:26:44.525085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037889 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 31936 rowCount 355 cpuUsage 0 2025-03-28T09:26:44.529578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Ready' dataSize 41472 rowCount 472 cpuUsage 0 2025-03-28T09:26:44.625766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-28T09:26:44.625955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 355, DataSize 31936 2025-03-28T09:26:44.626091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 472, DataSize 41472 2025-03-28T09:26:44.626260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 Table has 2 shards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2025-03-28T09:26:38.639397Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.750760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.777027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.777400Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.786124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.786348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.786616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.786785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.786905Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.787040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.787177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.787316Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.787433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.787554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.787689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.787801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:38.827493Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:38.827907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:38.827990Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:38.828203Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.828395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:38.828493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:38.828550Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:38.828671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:38.828748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:38.828793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:38.828837Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:38.829002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.829064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:38.829104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:38.829134Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:38.829301Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:38.829378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:38.829491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:38.829530Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:38.829631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:38.829678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:38.829710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:38.829766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:38.829807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:38.829839Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:38.830288Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-03-28T09:26:38.830406Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-03-28T09:26:38.830507Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-03-28T09:26:38.830601Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-03-28T09:26:38.830805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:38.830873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:38.830912Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:38.831145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:38.831196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.831229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.831398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:38.831467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:38.831509Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:38.831733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:38.831784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:38.831815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:38.831975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:38.832026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:38.832075Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:47.020613Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:47.020776Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-28T09:26:47.020872Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-28T09:26:47.021053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1064:2935];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-28T09:26:47.021231Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:47.021362Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:47.021504Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:47.021745Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:47.021906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:47.022054Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:47.022098Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1065:2936] finished for tablet 9437184 2025-03-28T09:26:47.022702Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1064:2935];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.016}],"full":{"a":1743154007005680,"name":"_full_task","f":1743154007005680,"d_finished":0,"c":0,"l":1743154007022162,"d":16482},"events":[{"name":"bootstrap","f":1743154007005899,"d_finished":3515,"c":1,"l":1743154007009414,"d":3515},{"a":1743154007021717,"name":"ack","f":1743154007020249,"d_finished":1283,"c":1,"l":1743154007021532,"d":1728},{"a":1743154007021699,"name":"processing","f":1743154007010829,"d_finished":6491,"c":10,"l":1743154007021534,"d":6954},{"name":"ProduceResults","f":1743154007007841,"d_finished":3438,"c":13,"l":1743154007022080,"d":3438},{"a":1743154007022083,"name":"Finish","f":1743154007022083,"d_finished":0,"c":0,"l":1743154007022162,"d":79},{"name":"task_result","f":1743154007010853,"d_finished":5055,"c":9,"l":1743154007020045,"d":5055}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:47.022796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1064:2935];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:47.023280Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1064:2935];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.017}],"full":{"a":1743154007005680,"name":"_full_task","f":1743154007005680,"d_finished":0,"c":0,"l":1743154007022850,"d":17170},"events":[{"name":"bootstrap","f":1743154007005899,"d_finished":3515,"c":1,"l":1743154007009414,"d":3515},{"a":1743154007021717,"name":"ack","f":1743154007020249,"d_finished":1283,"c":1,"l":1743154007021532,"d":2416},{"a":1743154007021699,"name":"processing","f":1743154007010829,"d_finished":6491,"c":10,"l":1743154007021534,"d":7642},{"name":"ProduceResults","f":1743154007007841,"d_finished":3438,"c":13,"l":1743154007022080,"d":3438},{"a":1743154007022083,"name":"Finish","f":1743154007022083,"d_finished":0,"c":0,"l":1743154007022850,"d":767},{"name":"task_result","f":1743154007010853,"d_finished":5055,"c":9,"l":1743154007020045,"d":5055}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:47.023364Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:47.005038Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-28T09:26:47.023412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:47.023806Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> Normalizers::EmptyTablesNormalizer >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> TFstClassSrcIdPQTest::TestTableCreated [GOOD] >> TFstClassSrcIdPQTest::NoMapping >> TColumnShardTestReadWrite::ReadSomePrograms >> TColumnShardTestReadWrite::ReadAggregate [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> TColumnShardTestReadWrite::WriteStandaloneOverload >> TColumnShardTestReadWrite::WriteReadExoticTypes >> TColumnShardTestReadWrite::Write [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2025-03-28T09:26:45.332992Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:45.441877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:45.467748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:45.468089Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:45.476507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:45.476740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:45.476967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:45.477078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:45.477233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:45.477346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:45.477471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:45.477595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:45.477704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:45.477824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.477937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:45.478039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:45.508875Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:45.509223Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:45.509279Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:45.509433Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:45.509544Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:45.509606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:45.509635Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:45.509726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:45.509774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:45.509853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:45.509875Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:45.509996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:45.510036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:45.510060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:45.510076Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:45.510133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:45.510163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:45.510197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:45.510219Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:45.510266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:45.510292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:45.510311Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:45.510339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:45.510362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:45.510379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:45.510733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=61; 2025-03-28T09:26:45.510800Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=23; 2025-03-28T09:26:45.510862Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=31; 2025-03-28T09:26:45.510913Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=23; 2025-03-28T09:26:45.511053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:45.511103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:45.511131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:45.511297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:45.511348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.511369Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.511522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:45.511574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:45.511613Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:45.511773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:45.511823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:45.511851Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:45.511980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:45.512018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:45.512060Z node 1 :TX_COLUMNSHARD INFO: tablet_i ... 2025-03-28T09:26:48.843686Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:48.843948Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:26:48.843988Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-28T09:26:48.844027Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=76; 2025-03-28T09:26:48.844074Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=76; 2025-03-28T09:26:48.844115Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:26:48.844210Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-28T09:26:48.844257Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-03-28T09:26:48.844309Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:48.844835Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:48.844981Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-28T09:26:48.845020Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:48.845121Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;);columns=4;rows=1; 2025-03-28T09:26:48.845191Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-03-28T09:26:48.845302Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[2:436:2454];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-03-28T09:26:48.845445Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-28T09:26:48.845566Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-28T09:26:48.845656Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-28T09:26:48.845956Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:48.846071Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-28T09:26:48.846170Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-28T09:26:48.846207Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: Scan [2:437:2455] finished for tablet 9437184 2025-03-28T09:26:48.846671Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[2:436:2454];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.014}],"full":{"a":1743154008831864,"name":"_full_task","f":1743154008831864,"d_finished":0,"c":0,"l":1743154008846261,"d":14397},"events":[{"name":"bootstrap","f":1743154008832093,"d_finished":2406,"c":1,"l":1743154008834499,"d":2406},{"a":1743154008845932,"name":"ack","f":1743154008844810,"d_finished":871,"c":1,"l":1743154008845681,"d":1200},{"a":1743154008845917,"name":"processing","f":1743154008834601,"d_finished":6846,"c":10,"l":1743154008845683,"d":7190},{"name":"ProduceResults","f":1743154008833409,"d_finished":2631,"c":13,"l":1743154008846193,"d":2631},{"a":1743154008846196,"name":"Finish","f":1743154008846196,"d_finished":0,"c":0,"l":1743154008846261,"d":65},{"name":"task_result","f":1743154008834618,"d_finished":5822,"c":9,"l":1743154008844363,"d":5822}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-28T09:26:48.846787Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[2:436:2454];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:48.847221Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[2:436:2454];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.014}],"full":{"a":1743154008831864,"name":"_full_task","f":1743154008831864,"d_finished":0,"c":0,"l":1743154008846837,"d":14973},"events":[{"name":"bootstrap","f":1743154008832093,"d_finished":2406,"c":1,"l":1743154008834499,"d":2406},{"a":1743154008845932,"name":"ack","f":1743154008844810,"d_finished":871,"c":1,"l":1743154008845681,"d":1776},{"a":1743154008845917,"name":"processing","f":1743154008834601,"d_finished":6846,"c":10,"l":1743154008845683,"d":7766},{"name":"ProduceResults","f":1743154008833409,"d_finished":2631,"c":13,"l":1743154008846193,"d":2631},{"a":1743154008846196,"name":"Finish","f":1743154008846196,"d_finished":0,"c":0,"l":1743154008846837,"d":641},{"name":"task_result","f":1743154008834618,"d_finished":5822,"c":9,"l":1743154008844363,"d":5822}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;;); 2025-03-28T09:26:48.847324Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:48.831330Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-03-28T09:26:48.847363Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:48.847624Z node 2 :TX_COLUMNSHARD_SCAN INFO: SelfId=[2:437:2455];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; >> YdbTableSplit::SplitByLoadWithReads [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2025-03-28T09:26:44.675011Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:44.757603Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:44.776880Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:44.777218Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:44.784542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:44.784740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:44.784975Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:44.785115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:44.785233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:44.785338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:44.785418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:44.785487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:44.785577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:44.785678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.785741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:44.785801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:44.815460Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:44.815890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:44.815957Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:44.816128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:44.816295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:44.816385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:44.816432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:44.816541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:44.816611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:44.816658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:44.816690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:44.816858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:44.816950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:44.817009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:44.817040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:44.817188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:44.817250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:44.817306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:44.817339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:44.817416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:44.817452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:44.817480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:44.817523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:44.817557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:44.817617Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:44.818020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-03-28T09:26:44.818139Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=55; 2025-03-28T09:26:44.818228Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2025-03-28T09:26:44.818313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:26:44.818493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:44.818545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:44.818578Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:44.818848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:44.818900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.818933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.819113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:44.819161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:44.819190Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:44.819366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:44.819408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:44.819436Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:44.819574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:44.819630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:44.819697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-03-28T09:26:49.327207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] >> Normalizers::SchemaVersionsNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2025-03-28T09:26:49.355655Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:49.447453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:49.471799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:49.472051Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:49.479399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:49.479601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:49.479842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:49.479957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:49.480074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:49.480245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:49.480385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:49.480497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:49.480627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:49.480756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.480871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:49.480991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:49.516066Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:49.516414Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:49.516473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:49.516622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:49.516772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:49.516842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:49.516874Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:49.516949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:49.516996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:49.517022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:49.517041Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:49.517168Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:49.517207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:49.517231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:49.517247Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:49.517362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:49.517423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:49.517472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:49.517507Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:49.517585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:49.517632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:49.517666Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:49.517713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:49.517763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:49.517798Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:49.518165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-03-28T09:26:49.518278Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=49; 2025-03-28T09:26:49.518361Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-28T09:26:49.518459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-03-28T09:26:49.518633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:49.518777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:49.518821Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:49.519041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:49.519126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.519160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.519328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:49.519392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:49.519425Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:49.519619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:49.519680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:49.519727Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:49.519895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:49.519947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:49.519999Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... s_manager.cpp:245;method=RegisterTable;path_id=1; 2025-03-28T09:26:50.125686Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-03-28T09:26:50.132728Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-03-28T09:26:50.132956Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tx_controller.cpp:211;event=finished_tx;tx_id=10; 2025-03-28T09:26:50.163964Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-03-28T09:26:50.164291Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3200;columns=5; 2025-03-28T09:26:50.180778Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=3200;count=1; 2025-03-28T09:26:50.182888Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-03-28T09:26:50.183238Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-28T09:26:50.196493Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-28T09:26:50.196647Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:26:50.209993Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 100 at tablet 9437184, mediator 0 2025-03-28T09:26:50.210071Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-03-28T09:26:50.210361Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:83;progress_tx_id=100;lock_id=1;broken=0; 2025-03-28T09:26:50.210549Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=tx_controller.cpp:211;event=finished_tx;tx_id=100; 2025-03-28T09:26:50.222505Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-03-28T09:26:50.222669Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=100;lock_id=1;broken=0; 2025-03-28T09:26:50.222857Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=3384; 2025-03-28T09:26:50.226670Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::c72fadfc-bb611f0-9c080637-6b172707; 2025-03-28T09:26:50.226812Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-03-28T09:26:50.226938Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=3384;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=c72fadfc-bb611f0-9c080637-6b172707; 2025-03-28T09:26:50.227170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=c72fadfc-bb611f0-9c080637-6b172707; 2025-03-28T09:26:50.227613Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=3035;external_task_id=c72fadfc-bb611f0-9c080637-6b172707;type=CS::INDEXATION;priority=0;; 2025-03-28T09:26:50.227829Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=1;task=cpu=0;mem=3035;external_task_id=c72fadfc-bb611f0-9c080637-6b172707;type=CS::INDEXATION;priority=0;; 2025-03-28T09:26:50.227869Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=c72fadfc-bb611f0-9c080637-6b172707;mem=3035;cpu=0; 2025-03-28T09:26:50.228060Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=c72fadfc-bb611f0-9c080637-6b172707;task_id=1;mem=3035;cpu=0; 2025-03-28T09:26:50.228218Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=c72fadfc-bb611f0-9c080637-6b172707;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=c72fadfc-bb611f0-9c080637-6b172707; 2025-03-28T09:26:50.232350Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=c72fadfc-bb611f0-9c080637-6b172707;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-28T09:26:50.232497Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-03-28T09:26:50.234893Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:26:50.235237Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[8] (CS::INDEXATION) apply at tablet 9437184 2025-03-28T09:26:50.235960Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-03-28T09:26:50.236057Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:26:50.236475Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:239;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:26:50.236520Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:26:50.236594Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=3384;indexing_debug={task_ids=c72fadfc-bb611f0-9c080637-6b172707,;}; 2025-03-28T09:26:50.236680Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:26:50.236905Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:26:50.236952Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:50.236985Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:50.237069Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:26:50.237494Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 100 scanId: 0 version: {100:100} readable: {100:max} at tablet 9437184 2025-03-28T09:26:50.249748Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-03-28T09:26:50.249849Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;fline=with_appended.cpp:65;portions=1,;task_id=c72fadfc-bb611f0-9c080637-6b172707; 2025-03-28T09:26:50.250063Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::c72fadfc-bb611f0-9c080637-6b172707; 2025-03-28T09:26:50.250133Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:26:50.250207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:26:50.250278Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:26:50.250367Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:26:50.250425Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:50.250479Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:50.250575Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998500s; 2025-03-28T09:26:50.250643Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=c72fadfc-bb611f0-9c080637-6b172707;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:26:50.250776Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:3384:0] 2025-03-28T09:26:50.250836Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2025-03-28T09:26:50.250954Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=1;external_task_id=c72fadfc-bb611f0-9c080637-6b172707;mem=3035;cpu=0; 2025-03-28T09:26:50.251108Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:26:50.251271Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2025-03-28T09:26:50.251411Z node 1 :TX_COLUMNSHARD_SCAN WARN: tx_id=100;scan_id=0;gen=0;table=;snapshot={100:100};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:14;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram protobuf; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2025-03-28T09:26:19.527941Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789333742143822:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:19.530881Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001548/r3tmp/tmplEHJEC/pdisk_1.dat 2025-03-28T09:26:19.982043Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:19.983046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:19.983150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28656, node 1 2025-03-28T09:26:20.032846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:26:20.041806Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:26:20.041834Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:26:20.201409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:26:20.201438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:26:20.201446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:26:20.201584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:20.800205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:17968 2025-03-28T09:26:22.821354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789346627046628:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:22.821560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:23.743880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:26:23.972500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789350922014120:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:23.972576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:23.996385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153983914 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 1 shards TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153983914 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:26:24.162100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981528:2384], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.162171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.162357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981539:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.162382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981540:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.162401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981541:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.164243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981551:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.164361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981554:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.164391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981556:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.164432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.164497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981569:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.169560Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981573:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.169619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981591:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.169907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.170610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981610:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.170654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.172299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789355216981612:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:24.173665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:26:24.173904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:26:24.173930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkD ... : 01jqe16g4c7dyatr9mrd2m1nrg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWViNzdlMjYtY2VlY2RmY2EtYWFhZDcwMS03NTU2MTJh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.111218Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726133. Ctx: { TraceId: 01jqe16g4c283mrpz1fp1s707g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzIyYzIxMWQtNmExMzAyZTQtMTA1NDk4Y2UtZjRmZDNlNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.114271Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726134. Ctx: { TraceId: 01jqe16g4eawav8xnz498ddbzt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY4OGZhZS00ZjVkNTc2Ni03NjVhNDIyMy0xZGNmNmEyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.116389Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726135. Ctx: { TraceId: 01jqe16g4gck5ybk1xqq720k9s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODdkZGRhNmYtODdhMjk5NjgtNzRjMmYxYmItNTU3ZjJhN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.116653Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726136. Ctx: { TraceId: 01jqe16g4g9tmqpg277p20vqxe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjlhNWY0M2UtNzc5NmU4NjUtN2RkNGY1YTUtOTg4OGU2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.117237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726137. Ctx: { TraceId: 01jqe16g4gf6ajg80vj61xrms8, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhhMjE4NTEtM2ViZWYwYmYtZTBhZWRlMzQtOGVkZWI0M2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.123272Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726138. Ctx: { TraceId: 01jqe16g4nbffzza7235dzaqcc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNlMzVkMmItNDQ0OTQwZDAtY2U1OTc5OGEtODU4ODYxMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.124187Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726139. Ctx: { TraceId: 01jqe16g4n7z1msbv8hpzw4gj3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzdmNGE2YmYtNzllZDdjODgtYjhkYzgxMzItOGI5MjM5MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.125297Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726140. Ctx: { TraceId: 01jqe16g4pd7rrfdg5dsy571wt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWViNzdlMjYtY2VlY2RmY2EtYWFhZDcwMS03NTU2MTJh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.127294Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726141. Ctx: { TraceId: 01jqe16g4s4tg4cnm5hc1vpcy7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY4OGZhZS00ZjVkNTc2Ni03NjVhNDIyMy0xZGNmNmEyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.133692Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726142. Ctx: { TraceId: 01jqe16g4wcfkg8xhw3b4gtq86, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzIyYzIxMWQtNmExMzAyZTQtMTA1NDk4Y2UtZjRmZDNlNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.134516Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726143. Ctx: { TraceId: 01jqe16g4zf9y6w9jxnb64b0f2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjlhNWY0M2UtNzc5NmU4NjUtN2RkNGY1YTUtOTg4OGU2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.135166Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726144. Ctx: { TraceId: 01jqe16g515n6kw6frs60ja9m0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhhMjE4NTEtM2ViZWYwYmYtZTBhZWRlMzQtOGVkZWI0M2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls request: /Root/Foo 2025-03-28T09:26:44.141953Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726119, task: 1, CA Id [1:7486789441116433119:2372]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:44.141999Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726121, task: 1, CA Id [1:7486789441116433131:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:44.142059Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726145. Ctx: { TraceId: 01jqe16g5684hxtcrz45xnrqkm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzNlMzVkMmItNDQ0OTQwZDAtY2U1OTc5OGEtODU4ODYxMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.143691Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726146. Ctx: { TraceId: 01jqe16g5959ysrefgc69qkrf2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzdmNGE2YmYtNzllZDdjODgtYjhkYzgxMzItOGI5MjM5MTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.144366Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726147. Ctx: { TraceId: 01jqe16g59bv2t7126f69ts6jk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWViNzdlMjYtY2VlY2RmY2EtYWFhZDcwMS03NTU2MTJh, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153983914 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) 2025-03-28T09:26:44.150269Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726148. Ctx: { TraceId: 01jqe16g5f4kh0qnes7te1w8dn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjY4OGZhZS00ZjVkNTc2Ni03NjVhNDIyMy0xZGNmNmEyMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.152976Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726149. Ctx: { TraceId: 01jqe16g5f7dww0nymm9ta31dk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzIyYzIxMWQtNmExMzAyZTQtMTA1NDk4Y2UtZjRmZDNlNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.154540Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726150. Ctx: { TraceId: 01jqe16g5jejxybwbf21v33msx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjlhNWY0M2UtNzc5NmU4NjUtN2RkNGY1YTUtOTg4OGU2OTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.159048Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976726151. Ctx: { TraceId: 01jqe16g5qe9qt2wp02yvarctt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjhhMjE4NTEtM2ViZWYwYmYtZTBhZWRlMzQtOGVkZWI0M2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:26:44.177148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-28T09:26:44.177330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037889 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:26:44.177478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:26:44.177634Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T09:26:44.279862Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726119, task: 1, CA Id [1:7486789441116433119:2372]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:44.279867Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726121, task: 1, CA Id [1:7486789441116433131:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:44.500091Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726119, task: 1, CA Id [1:7486789441116433119:2372]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:44.500126Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726121, task: 1, CA Id [1:7486789441116433131:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:45.013458Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726121, task: 1, CA Id [1:7486789441116433131:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:45.087208Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726119, task: 1, CA Id [1:7486789441116433119:2372]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:45.726062Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726119, task: 1, CA Id [1:7486789441116433119:2372]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:45.879357Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726121, task: 1, CA Id [1:7486789441116433131:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:46.579668Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726119, task: 1, CA Id [1:7486789441116433119:2372]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:46.622182Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726121, task: 1, CA Id [1:7486789441116433131:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:47.198712Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726121, task: 1, CA Id [1:7486789441116433131:2373]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2025-03-28T09:26:47.507877Z node 1 :KQP_COMPUTE WARN: TxId: 281474976726119, task: 1, CA Id [1:7486789441116433119:2372]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 TClient::Ls request: /Root/Foo TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Foo" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743153983914 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "Foo" Columns { Name: "NameHash" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Versio... (TRUNCATED) Table has 2 shards >> TColumnShardTestReadWrite::WriteReadDuplicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SchemaVersionsNormalizer [GOOD] Test command err: 2025-03-28T09:26:47.051828Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:47.154047Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:47.180678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:47.181032Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:47.188851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2025-03-28T09:26:47.189110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-03-28T09:26:47.189260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:47.189389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:47.189526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:47.189634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:47.189720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:47.189793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:47.189857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:47.189957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:47.190075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.190160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:47.190224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:47.213196Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:47.213399Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2025-03-28T09:26:47.213456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-03-28T09:26:47.213734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2025-03-28T09:26:47.213810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-03-28T09:26:47.213843Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-03-28T09:26:47.213967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.214065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:47.214108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:47.214138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-03-28T09:26:47.214210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:47.214249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:47.214295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:47.214349Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:47.214487Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.214554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:47.214608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:47.214637Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:47.214733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:47.214789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:47.214829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:47.214876Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:47.214955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:47.215014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:47.215049Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:47.215114Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:47.215179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:47.215213Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:47.215586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-03-28T09:26:47.215710Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=45; 2025-03-28T09:26:47.215813Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-03-28T09:26:47.215924Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-28T09:26:47.216089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:47.216134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:47.216168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:47.216373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:47.216424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.216455Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.216606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:47.216648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:47.216682Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:47.216846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;pr ... : SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:50.684868Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:84;event=TEvTaskProcessedResult; 2025-03-28T09:26:50.684940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-28T09:26:50.685000Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2; 2025-03-28T09:26:50.685063Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=20048;merger=0;interval_id=2; 2025-03-28T09:26:50.685119Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:26:50.685238Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:50.685284Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=20048;finished=1; 2025-03-28T09:26:50.685324Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:26:50.685560Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:50.685762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:20048;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:50.685816Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:50.685953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;);columns=3;rows=20048; 2025-03-28T09:26:50.686034Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=2405760;num_rows=20048;batch_columns=key1,key2,field; 2025-03-28T09:26:50.686180Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:457:2464];bytes=2405760;rows=20048;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2025-03-28T09:26:50.686339Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:50.686453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:50.686585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:50.687442Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:50.687578Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:50.687722Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:50.687767Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:459:2465] finished for tablet 9437184 2025-03-28T09:26:50.688207Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:457:2464];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.231},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.234}],"full":{"a":1743154010453645,"name":"_full_task","f":1743154010453645,"d_finished":0,"c":0,"l":1743154010687822,"d":234177},"events":[{"name":"bootstrap","f":1743154010453856,"d_finished":2798,"c":1,"l":1743154010456654,"d":2798},{"a":1743154010687414,"name":"ack","f":1743154010685530,"d_finished":1085,"c":1,"l":1743154010686615,"d":1493},{"a":1743154010687397,"name":"processing","f":1743154010459782,"d_finished":120052,"c":9,"l":1743154010686618,"d":120477},{"name":"ProduceResults","f":1743154010455546,"d_finished":2852,"c":12,"l":1743154010687752,"d":2852},{"a":1743154010687755,"name":"Finish","f":1743154010687755,"d_finished":0,"c":0,"l":1743154010687822,"d":67},{"name":"task_result","f":1743154010459804,"d_finished":118811,"c":8,"l":1743154010685377,"d":118811}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:50.688298Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:457:2464];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:50.688719Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:457:2464];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.231},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.234}],"full":{"a":1743154010453645,"name":"_full_task","f":1743154010453645,"d_finished":0,"c":0,"l":1743154010688357,"d":234712},"events":[{"name":"bootstrap","f":1743154010453856,"d_finished":2798,"c":1,"l":1743154010456654,"d":2798},{"a":1743154010687414,"name":"ack","f":1743154010685530,"d_finished":1085,"c":1,"l":1743154010686615,"d":2028},{"a":1743154010687397,"name":"processing","f":1743154010459782,"d_finished":120052,"c":9,"l":1743154010686618,"d":121012},{"name":"ProduceResults","f":1743154010455546,"d_finished":2852,"c":12,"l":1743154010687752,"d":2852},{"a":1743154010687755,"name":"Finish","f":1743154010687755,"d_finished":0,"c":0,"l":1743154010688357,"d":602},{"name":"task_result","f":1743154010459804,"d_finished":118811,"c":8,"l":1743154010685377,"d":118811}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:50.688816Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:50.453092Z;index_granules=0;index_portions=1;index_batches=953;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2589608;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2589608;selected_rows=0; 2025-03-28T09:26:50.688869Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:50.689116Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:459:2465];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> Normalizers::EmptyTablesNormalizer [GOOD] >> EvWrite::WriteWithSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2025-03-28T09:26:45.688622Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789444053826642:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:45.688686Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018b2/r3tmp/tmpLegkaI/pdisk_1.dat 2025-03-28T09:26:46.014336Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:46.030554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:46.030677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:46.033245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30657, node 1 2025-03-28T09:26:46.115443Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/0018b2/r3tmp/yandexoGmF8I.tmp 2025-03-28T09:26:46.115470Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/0018b2/r3tmp/yandexoGmF8I.tmp 2025-03-28T09:26:46.115703Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/0018b2/r3tmp/yandexoGmF8I.tmp 2025-03-28T09:26:46.115920Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31153 PQClient connected to localhost:30657 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:46.397959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:26:48.602516Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789456938729243:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:48.602579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789456938729270:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:48.602630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:48.607062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:26:48.620216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789456938729272:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:26:48.835487Z node 1 :TX_PROXY ERROR: Actor# [1:7486789456938729339:2390] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:26:48.877711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:26:48.989973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:26:48.992960Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789456938729354:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:26:48.993271Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTFmNTk0MjctOTQ2MWJkNmUtOTIwYzBmNmMtYWMyYjYyNDA=, ActorId: [1:7486789456938729240:2329], ActorState: ExecuteState, TraceId: 01jqe16mgn1x2emzggb8f1ev1j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:26:48.995453Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:26:49.077167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:26:49.315004Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe16n1f2z96d93mf8h32v3a, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzQwYTFiYmEtMTUzNmY5NjUtOGFiYjU0OWItYmE1ODNiOTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |89.5%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2025-03-28T09:26:48.337913Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:48.447812Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:48.477023Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:48.477377Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:48.486189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=PortionsCleaner; 2025-03-28T09:26:48.486446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2025-03-28T09:26:48.486630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:48.486881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:48.487040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:48.487165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:48.487312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:48.487446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:48.487572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:48.487748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:48.487904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:48.488024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:48.488139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:48.520527Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:48.520796Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=PortionsCleaner; 2025-03-28T09:26:48.520869Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2025-03-28T09:26:48.521311Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-03-28T09:26:48.521387Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=31; 2025-03-28T09:26:48.521478Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=48; 2025-03-28T09:26:48.521573Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-03-28T09:26:48.521745Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=PortionsCleaner;id=NO_VALUE_OPTIONAL; 2025-03-28T09:26:48.521869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2025-03-28T09:26:48.521919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2025-03-28T09:26:48.522049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:48.522180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:48.522249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:48.522289Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2025-03-28T09:26:48.522375Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:48.522461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:48.522503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:48.522537Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:48.522707Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:48.522769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:48.522817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:48.522850Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:48.522936Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:48.522987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:48.523025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:48.523060Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:48.523147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:48.523189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:48.523219Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:48.523254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:48.523354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:48.523382Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:48.523666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=22; 2025-03-28T09:26:48.523764Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-03-28T09:26:48.523840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=24; 2025-03-28T09:26:48.523908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=27; 2025-03-28T09:26:48.524078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:48.524136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:48.524173Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:48.524373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:48.524417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:48.524441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:48.524542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpda ... cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:26:51.509510Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:26:51.509854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:26:51.509960Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:26:51.510003Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:26:51.510034Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:26:51.510088Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:26:51.510165Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:26:51.510231Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:26:51.510311Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:26:51.510381Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:51.510436Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:26:51.510534Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:26:51.680112Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 111 scanId: 0 version: {11:111} readable: {11:max} at tablet 9437184 2025-03-28T09:26:51.680283Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2025-03-28T09:26:51.680511Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-03-28T09:26:51.680613Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } } } ; 2025-03-28T09:26:51.681481Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6}]},{"owner_id":2,"inputs":[{"from":7}]},{"owner_id":4,"inputs":[{"from":7}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key1","id":1}},"o":"1","t":"AssembleOriginalData"},"w":11,"id":2},"6":{"p":{"i":"3","p":{"address":{"name":"field","id":3}},"o":"3","t":"AssembleOriginalData"},"w":11,"id":6},"7":{"p":{"p":{"data":[{"name":"key1","id":1},{"name":"key2","id":2},{"name":"field","id":3}]},"o":"1,2,3","t":"FetchOriginalData"},"w":6,"id":7},"4":{"p":{"i":"2","p":{"address":{"name":"key2","id":2}},"o":"2","t":"AssembleOriginalData"},"w":11,"id":4},"0":{"p":{"i":"1,2,3","t":"Projection"},"w":33,"id":0}}}; 2025-03-28T09:26:51.681606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:26:51.682222Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:404:2417];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={11:111};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:465:2470];trace_detailed=; 2025-03-28T09:26:51.682968Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=1,2,3;column_names=field,key1,key2;);; 2025-03-28T09:26:51.683228Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; 2025-03-28T09:26:51.683615Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:51.683794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:51.683928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:51.683980Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:465:2470] finished for tablet 9437184 2025-03-28T09:26:51.684406Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:463:2469];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","f_ProduceResults","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154011682141,"name":"_full_task","f":1743154011682141,"d_finished":0,"c":0,"l":1743154011684045,"d":1904},"events":[{"name":"bootstrap","f":1743154011682322,"d_finished":1079,"c":1,"l":1743154011683401,"d":1079},{"a":1743154011683585,"name":"ack","f":1743154011683585,"d_finished":0,"c":0,"l":1743154011684045,"d":460},{"a":1743154011683566,"name":"processing","f":1743154011683566,"d_finished":0,"c":0,"l":1743154011684045,"d":479},{"name":"ProduceResults","f":1743154011683383,"d_finished":322,"c":2,"l":1743154011683961,"d":322},{"a":1743154011683965,"name":"Finish","f":1743154011683965,"d_finished":0,"c":0,"l":1743154011684045,"d":80}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:51.684513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:463:2469];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:51.684905Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:463:2469];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743154011682141,"name":"_full_task","f":1743154011682141,"d_finished":0,"c":0,"l":1743154011684565,"d":2424},"events":[{"name":"bootstrap","f":1743154011682322,"d_finished":1079,"c":1,"l":1743154011683401,"d":1079},{"a":1743154011683585,"name":"ack","f":1743154011683585,"d_finished":0,"c":0,"l":1743154011684565,"d":980},{"a":1743154011683566,"name":"processing","f":1743154011683566,"d_finished":0,"c":0,"l":1743154011684565,"d":999},{"name":"ProduceResults","f":1743154011683383,"d_finished":322,"c":2,"l":1743154011683961,"d":322},{"a":1743154011683965,"name":"Finish","f":1743154011683965,"d_finished":0,"c":0,"l":1743154011684565,"d":600}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;;); 2025-03-28T09:26:51.685011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:51.681569Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:26:51.685056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:51.685165Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:465:2470];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=20048; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=20048; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2025-03-28T09:26:46.955186Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:47.049618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:47.074056Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:47.074399Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:47.082979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:47.083135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:47.083334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:47.083423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:47.083501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:47.083615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:47.083707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:47.083776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:47.083868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:47.083958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.084018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:47.084076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:47.108109Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:47.108349Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:47.108385Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:47.108505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.108648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:47.108714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:47.108746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:47.108827Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:47.108885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:47.108916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:47.108935Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:47.109037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.109076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:47.109111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:47.109144Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:47.109331Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:47.109391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:47.109444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:47.109492Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:47.109589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:47.109626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:47.109653Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:47.109707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:47.109750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:47.109786Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:47.110172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-03-28T09:26:47.110268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-03-28T09:26:47.110362Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-03-28T09:26:47.110450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-03-28T09:26:47.110612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:47.110663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:47.110715Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:47.110904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:47.110945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.110972Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.111119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:47.111161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:47.111191Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:47.111378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:47.111412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:47.111437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:47.111563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:47.111608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:47.111651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000032;32;32;32;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000033;33;33;33;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000034;34;34;34;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000035;35;35;35;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000036;36;36;36;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000037;37;37;37;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000038;38;38;38;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000039;39;39;39;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000040;40;40;40;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000041;41;41;41;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000042;42;42;42;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000043;43;43;43;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000044;44;44;44;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000045;45;45;45;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000046;46;46;46;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000047;47;47;47;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000048;48;48;48;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000049;49;49;49;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000050;50;50;50;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000051;51;51;51;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000052;52;52;52;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000053;53;53;53;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000054;54;54;54;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000055;55;55;55;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000056;56;56;56;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000057;57;57;57;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000058;58;58;58;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000059;59;59;59;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000060;60;60;60;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000061;61;61;61;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000062;62;62;62;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000063;63;63;63;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000064;64;64;64;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000065;65;65;65;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000066;66;66;66;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000067;67;67;67;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000068;68;68;68;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000069;69;69;69;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000070;70;70;70;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000071;71;71;71;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000072;72;72;72;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000073;73;73;73;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000074;74;74;74;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000075;75;75;75;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000076;76;76;76;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000077;77;77;77;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000078;78;78;78;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000079;79;79;79;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000080;80;80;80;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000081;81;81;81;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000082;82;82;82;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000083;83;83;83;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000084;84;84;84;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000085;85;85;85;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000086;86;86;86;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000087;87;87;87;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000088;88;88;88;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000089;89;89;89;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000090;90;90;90;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000091;91;91;91;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000092;92;92;92;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000093;93;93;93;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000094;94;94;94;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000095;95;95;95;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000096;96;96;96;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000097;97;97;97;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000098;98;98;98;"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"1970-01-01 00:00:00.000099;99;99;99;"}}]}; 2025-03-28T09:26:52.423885Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=30;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2025-03-28T09:26:44.866397Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:44.965786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:44.988389Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:44.988731Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:44.996828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:44.997073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:44.997359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:44.997522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:44.997642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:44.997799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:44.997930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:44.998067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:44.998187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:44.998330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.998456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:44.998590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:45.024379Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:45.024701Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:45.024755Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:45.024943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:45.025139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:45.025239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:45.025290Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:45.025406Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:45.025489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:45.025534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:45.025565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:45.025731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:45.025789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:45.025830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:45.025881Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:45.026052Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:45.026127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:45.026197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:45.026230Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:45.026313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:45.026364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:45.026401Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:45.026460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:45.026500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:45.026534Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:45.026997Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-03-28T09:26:45.027124Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=61; 2025-03-28T09:26:45.027230Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2025-03-28T09:26:45.027347Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=54; 2025-03-28T09:26:45.027570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:45.027642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:45.027699Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:45.027934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:45.027993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.028029Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.028200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:45.028256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:45.028293Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:45.028503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:45.028555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:45.028592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:45.028760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:45.028819Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:45.028869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:52.857445Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:52.857611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-28T09:26:52.857742Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=1984;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-28T09:26:52.857921Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1064:2935];bytes=1984;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2025-03-28T09:26:52.858110Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:52.858262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:52.858414Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:52.858676Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:52.858871Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:52.859029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:52.859102Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1065:2936] finished for tablet 9437184 2025-03-28T09:26:52.859687Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1064:2935];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.017}],"full":{"a":1743154012841820,"name":"_full_task","f":1743154012841820,"d_finished":0,"c":0,"l":1743154012859172,"d":17352},"events":[{"name":"bootstrap","f":1743154012842058,"d_finished":3620,"c":1,"l":1743154012845678,"d":3620},{"a":1743154012858647,"name":"ack","f":1743154012857098,"d_finished":1351,"c":1,"l":1743154012858449,"d":1876},{"a":1743154012858626,"name":"processing","f":1743154012847250,"d_finished":6601,"c":10,"l":1743154012858452,"d":7147},{"name":"ProduceResults","f":1743154012844025,"d_finished":3534,"c":13,"l":1743154012859075,"d":3534},{"a":1743154012859080,"name":"Finish","f":1743154012859080,"d_finished":0,"c":0,"l":1743154012859172,"d":92},{"name":"task_result","f":1743154012847273,"d_finished":5092,"c":9,"l":1743154012856881,"d":5092}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:52.859793Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1064:2935];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:52.860286Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1064:2935];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.015},{"events":["l_ProduceResults","f_Finish"],"t":0.017},{"events":["l_ack","l_processing","l_Finish"],"t":0.018}],"full":{"a":1743154012841820,"name":"_full_task","f":1743154012841820,"d_finished":0,"c":0,"l":1743154012859846,"d":18026},"events":[{"name":"bootstrap","f":1743154012842058,"d_finished":3620,"c":1,"l":1743154012845678,"d":3620},{"a":1743154012858647,"name":"ack","f":1743154012857098,"d_finished":1351,"c":1,"l":1743154012858449,"d":2550},{"a":1743154012858626,"name":"processing","f":1743154012847250,"d_finished":6601,"c":10,"l":1743154012858452,"d":7821},{"name":"ProduceResults","f":1743154012844025,"d_finished":3534,"c":13,"l":1743154012859075,"d":3534},{"a":1743154012859080,"name":"Finish","f":1743154012859080,"d_finished":0,"c":0,"l":1743154012859846,"d":766},{"name":"task_result","f":1743154012847273,"d_finished":5092,"c":9,"l":1743154012856881,"d":5092}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:52.860377Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:52.841130Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=10308;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10308;selected_rows=0; 2025-03-28T09:26:52.860432Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:52.860840Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:1065:2936];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> TPersQueueTest::SchemeOperationsCheckPropValues [GOOD] >> TPersQueueTest::ReadRuleServiceType >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2025-03-28T09:26:49.802956Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:49.892772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:49.916742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:49.917069Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:49.925528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:49.925774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:49.926058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:49.926183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:49.926293Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:49.926433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:49.926545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:49.926667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:49.926990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:49.927115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.927260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:49.927372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:49.960049Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:49.960398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:49.960463Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:49.960650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:49.960839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:49.960921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:49.960964Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:49.961079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:49.961154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:49.961227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:49.961261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:49.961449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:49.961529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:49.961580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:49.961610Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:49.961779Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:49.961863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:49.961918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:49.961947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:49.962039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:49.962077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:49.962113Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:49.962160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:49.962205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:49.962238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:49.962656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-28T09:26:49.962838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-03-28T09:26:49.962946Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=55; 2025-03-28T09:26:49.963037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-28T09:26:49.963210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:49.963266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:49.963301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:49.963514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:49.963563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.963593Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.963787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:49.963846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:49.963880Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:49.964097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:49.964140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:49.964170Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:49.964292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:49.964336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:49.964403Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... timestamp[us] request_id: binary;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:55.487966Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:26:55.488120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;);columns=10;rows=31; 2025-03-28T09:26:55.488224Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=2759;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2025-03-28T09:26:55.488388Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:432:2447];bytes=2759;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2025-03-28T09:26:55.488549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:55.488726Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:55.488861Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:55.489097Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:26:55.489257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:55.489404Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:55.489453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:433:2448] finished for tablet 9437184 2025-03-28T09:26:55.489939Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:432:2447];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.018},{"events":["l_bootstrap"],"t":0.019},{"events":["f_processing","f_task_result"],"t":0.02},{"events":["f_ack","l_task_result"],"t":0.03},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.032}],"full":{"a":1743154015457406,"name":"_full_task","f":1743154015457406,"d_finished":0,"c":0,"l":1743154015489506,"d":32100},"events":[{"name":"bootstrap","f":1743154015457610,"d_finished":19350,"c":1,"l":1743154015476960,"d":19350},{"a":1743154015489075,"name":"ack","f":1743154015487658,"d_finished":1250,"c":1,"l":1743154015488908,"d":1681},{"a":1743154015489058,"name":"processing","f":1743154015478324,"d_finished":6375,"c":10,"l":1743154015488910,"d":6823},{"name":"ProduceResults","f":1743154015475423,"d_finished":3568,"c":13,"l":1743154015489432,"d":3568},{"a":1743154015489439,"name":"Finish","f":1743154015489439,"d_finished":0,"c":0,"l":1743154015489506,"d":67},{"name":"task_result","f":1743154015478343,"d_finished":4978,"c":9,"l":1743154015487471,"d":4978}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:55.490029Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:432:2447];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:26:55.490479Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:432:2447];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.018},{"events":["l_bootstrap"],"t":0.019},{"events":["f_processing","f_task_result"],"t":0.02},{"events":["f_ack","l_task_result"],"t":0.03},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.032}],"full":{"a":1743154015457406,"name":"_full_task","f":1743154015457406,"d_finished":0,"c":0,"l":1743154015490071,"d":32665},"events":[{"name":"bootstrap","f":1743154015457610,"d_finished":19350,"c":1,"l":1743154015476960,"d":19350},{"a":1743154015489075,"name":"ack","f":1743154015487658,"d_finished":1250,"c":1,"l":1743154015488908,"d":2246},{"a":1743154015489058,"name":"processing","f":1743154015478324,"d_finished":6375,"c":10,"l":1743154015488910,"d":7388},{"name":"ProduceResults","f":1743154015475423,"d_finished":3568,"c":13,"l":1743154015489432,"d":3568},{"a":1743154015489439,"name":"Finish","f":1743154015489439,"d_finished":0,"c":0,"l":1743154015490071,"d":632},{"name":"task_result","f":1743154015478343,"d_finished":4978,"c":9,"l":1743154015487471,"d":4978}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;;); 2025-03-28T09:26:55.490563Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:26:55.456786Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=13268;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=13268;selected_rows=0; 2025-03-28T09:26:55.490610Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:26:55.490996Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=11;SelfId=[1:433:2448];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec >> TPersQueueTest::WriteAfterAlter [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed >> DataShardReadIteratorSysTables::ShouldRead >> DataShardReadIterator::ShouldReverseReadMultipleKeys >> DataShardReadIterator::ShouldReceiveErrorAfterSplit >> DataShardReadIteratorBatchMode::SelectingColumns >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive >> TopicService::AccessRights [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndCloseClientSessionWithEnabledRemotePreferredClusterDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay >> TopicService::ThereAreGapsInTheOffsetRanges >> TPersQueueTest::CheckACLForGrpcRead [GOOD] >> TPersQueueTest::CheckKillBalancer >> TPersQueueTest::DirectReadRestartPQRB [GOOD] >> TPersQueueTest::DirectReadRestartTablet >> DataShardReadIteratorSysTables::ShouldRead [GOOD] >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid >> DataShardReadIterator::ShouldReadRangeInclusiveEndsCellVec [GOOD] >> DataShardReadIterator::ShouldRangeReadReverseLeftInclusive [GOOD] >> DataShardReadIterator::ShouldNotReadAfterCancel >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow >> DataShardReadIteratorBatchMode::SelectingColumns [GOOD] >> DataShardReadIteratorBatchMode::ShouldHandleReadAck >> TFstClassSrcIdPQTest::NoMapping [GOOD] >> TFstClassSrcIdPQTest::ProperPartitionSelected >> DataShardReadIterator::ShouldReceiveErrorAfterSplit [GOOD] >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted >> DataShardReadIterator::ShouldReverseReadMultipleKeys [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne >> DataShardReadIterator::ShouldReverseReadMultipleRangesOneByOneWithAcks [GOOD] >> DataShardReadIterator::ShouldStopWhenNodeDisconnected >> TPersQueueTest::SetupWriteSession [GOOD] >> TPersQueueTest::StoreNoMoreThanXSourceIDs >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Compressed [GOOD] >> TPersQueueTest::TestWriteStat >> DataShardReadIteratorSysTables::ShouldNotReadUserTableUsingLocalTid [GOOD] >> DataShardReadIteratorSysTables::ShouldNotAllowArrow >> TColumnShardTestReadWrite::WriteOverload [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsArrow [GOOD] >> DataShardReadIterator::ShouldReadRangeReverse >> DataShardReadIterator::ShouldNotReadAfterCancel [GOOD] >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 >> DataShardReadIteratorBatchMode::ShouldHandleReadAck [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace >> DataShardReadIterator::ShouldReceiveErrorAfterSplitWhenExhausted [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite >> DataShardReadIterator::ShouldReverseReadMultipleKeysOneByOne [GOOD] >> DataShardReadIterator::ShouldReverseReadMultipleRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload [GOOD] Test command err: 2025-03-28T09:26:39.825741Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:39.968591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:39.995470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:39.995833Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:40.004059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:40.004299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:40.004551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:40.004689Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:40.004804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:40.004929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:40.005045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:40.005164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:40.005311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:40.005430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:40.005536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:40.005672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:40.035647Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:40.035993Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:40.036054Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:40.036217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:40.036424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:40.036535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:40.036585Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:40.036704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:40.036788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:40.036836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:40.036875Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:40.037026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:40.037093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:40.037133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:40.037160Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:40.037335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:40.037396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:40.037438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:40.037488Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:40.037567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:40.037608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:40.037641Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:40.037688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:40.037737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:40.037791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:40.038169Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-28T09:26:40.038265Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=44; 2025-03-28T09:26:40.038383Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=71; 2025-03-28T09:26:40.098905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=86; 2025-03-28T09:26:40.099186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:40.099265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:40.099316Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:40.099547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:40.099607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:40.099642Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:40.099859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:40.099913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:40.099946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:40.100123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:40.100172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:40.100201Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:40.100344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:40.100399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:40.100461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 2 2025-03-28T09:27:05.041983Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:05.044257Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 5 at tablet 9437184 2025-03-28T09:27:05.101490Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-03-28T09:27:05.121867Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-03-28T09:27:05.122028Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:05.149404Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-03-28T09:27:05.211453Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-03-28T09:27:05.236081Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-03-28T09:27:05.236234Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:05.262936Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-03-28T09:27:05.311575Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-03-28T09:27:05.331272Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-03-28T09:27:05.331427Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:05.333809Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-03-28T09:27:05.392876Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-03-28T09:27:05.426338Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-03-28T09:27:05.426460Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:05.428126Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-03-28T09:27:05.480949Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-03-28T09:27:05.527808Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-03-28T09:27:05.528012Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:05.553638Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-03-28T09:27:05.618137Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-03-28T09:27:05.805030Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-03-28T09:27:05.805211Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:05.956738Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-03-28T09:27:06.010627Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-03-28T09:27:06.034183Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-03-28T09:27:06.034342Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:06.054848Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-03-28T09:27:06.094680Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-03-28T09:27:06.114642Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-03-28T09:27:06.114817Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:06.116622Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-03-28T09:27:06.161145Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-03-28T09:27:06.189823Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-03-28T09:27:06.189970Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:06.191623Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-03-28T09:27:06.237218Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-03-28T09:27:06.263481Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-03-28T09:27:06.263668Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:06.277200Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-03-28T09:27:06.322513Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-03-28T09:27:06.342744Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-03-28T09:27:06.342952Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:06.345125Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-03-28T09:27:06.392922Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-03-28T09:27:06.420095Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-03-28T09:27:06.420319Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:06.422183Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-03-28T09:27:06.463259Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-03-28T09:27:06.504845Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-03-28T09:27:06.505041Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:06.507184Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-03-28T09:27:06.566601Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-03-28T09:27:06.591917Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-03-28T09:27:06.592116Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:06.605916Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-03-28T09:27:06.668753Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-03-28T09:27:06.695625Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-03-28T09:27:06.695817Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:06.697426Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-03-28T09:27:06.750360Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-03-28T09:27:06.942865Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-03-28T09:27:06.943049Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:07.031107Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-03-28T09:27:07.081942Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-03-28T09:27:07.113139Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-03-28T09:27:07.113328Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:27:08.287445Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; CATCH TEvWrite, status OK 2025-03-28T09:27:08.345685Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-03-28T09:27:08.404612Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-03-28T09:27:08.427122Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-03-28T09:27:08.427323Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> TPersQueueTest::ReadRuleServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeLimit >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2025-03-28T09:26:51.661905Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:51.762321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:51.790755Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:51.791134Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:51.801753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:51.801986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:51.802213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:51.802337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:51.802443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:51.802556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:51.802674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:51.802822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:51.802949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:51.803054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:51.803154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:51.803265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:51.840059Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:51.840375Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:51.840424Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:51.840618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:51.840806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:51.840908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:51.840950Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:51.841039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:51.841120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:51.841168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:51.841202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:51.841339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:51.841390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:51.841418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:51.841438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:51.841583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:51.841642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:51.841679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:51.841705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:51.841769Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:51.841794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:51.841815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:51.841876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:51.841909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:51.841962Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:51.842351Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-03-28T09:26:51.842435Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-03-28T09:26:51.842520Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-03-28T09:26:51.842614Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-03-28T09:26:51.842784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:51.842839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:51.842867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:51.843022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:51.843057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:51.843076Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:51.843193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:51.843230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:51.843261Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:51.843449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:51.843523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:51.843554Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:51.843750Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:51.843807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:51.843856Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=10;merger=0;interval_id=49; 2025-03-28T09:27:10.713255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:27:10.713348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:27:10.713383Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=10;finished=1; 2025-03-28T09:27:10.713431Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:27:10.714636Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:27:10.714846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:27:10.714887Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:27:10.714983Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=10; 2025-03-28T09:27:10.715048Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=80;num_rows=10;batch_columns=timestamp; 2025-03-28T09:27:10.715192Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:4120:6132];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-28T09:27:10.715299Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:27:10.715398Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:27:10.715480Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:27:10.715964Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:27:10.716067Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:27:10.716164Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:27:10.716205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:4125:6137] finished for tablet 9437184 2025-03-28T09:27:10.716639Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:4120:6132];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing"],"t":0.003},{"events":["f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.016},{"events":["f_ack"],"t":0.018},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.019}],"full":{"a":1743154030696571,"name":"_full_task","f":1743154030696571,"d_finished":0,"c":0,"l":1743154030716256,"d":19685},"events":[{"name":"bootstrap","f":1743154030697112,"d_finished":2392,"c":1,"l":1743154030699504,"d":2392},{"a":1743154030715942,"name":"ack","f":1743154030714607,"d_finished":898,"c":1,"l":1743154030715505,"d":1212},{"a":1743154030715924,"name":"processing","f":1743154030700561,"d_finished":3469,"c":8,"l":1743154030715507,"d":3801},{"name":"ProduceResults","f":1743154030698284,"d_finished":2486,"c":11,"l":1743154030716190,"d":2486},{"a":1743154030716193,"name":"Finish","f":1743154030716193,"d_finished":0,"c":0,"l":1743154030716256,"d":63},{"name":"task_result","f":1743154030700579,"d_finished":2412,"c":7,"l":1743154030713482,"d":2412}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:27:10.716705Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:4120:6132];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:27:10.717097Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:4120:6132];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing"],"t":0.003},{"events":["f_task_result"],"t":0.004},{"events":["l_task_result"],"t":0.016},{"events":["f_ack"],"t":0.018},{"events":["l_ProduceResults","f_Finish"],"t":0.019},{"events":["l_ack","l_processing","l_Finish"],"t":0.02}],"full":{"a":1743154030696571,"name":"_full_task","f":1743154030696571,"d_finished":0,"c":0,"l":1743154030716743,"d":20172},"events":[{"name":"bootstrap","f":1743154030697112,"d_finished":2392,"c":1,"l":1743154030699504,"d":2392},{"a":1743154030715942,"name":"ack","f":1743154030714607,"d_finished":898,"c":1,"l":1743154030715505,"d":1699},{"a":1743154030715924,"name":"processing","f":1743154030700561,"d_finished":3469,"c":8,"l":1743154030715507,"d":4288},{"name":"ProduceResults","f":1743154030698284,"d_finished":2486,"c":11,"l":1743154030716190,"d":2486},{"a":1743154030716193,"name":"Finish","f":1743154030716193,"d_finished":0,"c":0,"l":1743154030716743,"d":550},{"name":"task_result","f":1743154030700579,"d_finished":2412,"c":7,"l":1743154030713482,"d":2412}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:27:10.717163Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:27:10.696055Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=2812;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2812;selected_rows=0; 2025-03-28T09:27:10.717201Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:27:10.717372Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:4125:6137];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TopicService::ThereAreGapsInTheOffsetRanges [GOOD] >> DataShardReadIterator::ShouldHandleReadAck >> TopicService::OnePartitionAndNoGapsInTheOffsets >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> DataShardReadIteratorSysTables::ShouldNotAllowArrow [GOOD] >> ReadIteratorExternalBlobs::ExtBlobs >> DataShardReadIterator::ShouldReadRangeReverse [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight >> DataShardReadIterator::ShouldLimitReadRangeChunk1Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 >> YdbOlapStore::LogWithUnionAllDescending [GOOD] >> YdbOlapStore::LogTsRangeDescending >> DataShardReadIterator::ShouldReverseReadMultipleRanges [GOOD] >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite >> DataShardReadIteratorConsistency::LocalSnapshotReadWithPlanQueueRace [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed >> DataShardReadIterator::ShouldStopWhenNodeDisconnected [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks >> DataShardReadIterator::ShouldHandleReadAck [GOOD] >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck >> DataStreams::TestUpdateStream >> TFstClassSrcIdPQTest::ProperPartitionSelected [GOOD] >> TPQCompatTest::DiscoverTopics >> TPersQueueTest::PreferredCluster_NonExistentPreferredCluster_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay >> EvWrite::WriteWithSplit [GOOD] >> DataShardReadIterator::ShouldReadRangeInclusiveEndsMissingLeftRight [GOOD] >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds >> TColumnShardTestReadWrite::WriteStandaloneOverload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithSplit [GOOD] Test command err: 2025-03-28T09:26:52.289320Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:52.369040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:52.391268Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:52.391606Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:52.399119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:52.399298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:52.399521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:52.399666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:52.399788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:52.399900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:52.400004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:52.400096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:52.400188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:52.400323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:52.400427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:52.400523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:52.425610Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:52.425843Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:52.425913Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:52.426079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:52.426295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:52.426377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:52.426419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:52.426539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:52.426603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:52.426662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:52.426716Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:52.426881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:52.426945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:52.427009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:52.427044Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:52.427176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:52.427238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:52.427307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:52.427339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:52.427412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:52.427447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:52.427475Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:52.427516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:52.427550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:52.427579Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:52.427992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-03-28T09:26:52.428089Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=42; 2025-03-28T09:26:52.428175Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-03-28T09:26:52.428263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=45; 2025-03-28T09:26:52.428447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:52.428525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:52.428562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:52.428768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:52.428817Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:52.428847Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:52.429002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:52.429056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:52.429086Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:52.429267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:52.429307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:52.429333Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:52.429454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:52.429492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:52.429532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... tId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:858:2875];bytes=3691800;rows=450;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-03-28T09:27:19.375078Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:27:19.375188Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:27:19.375226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:27:19.375257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:27:19.376619Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:27:19.376734Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:149;schema=key: uint64 field: string;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:27:19.376772Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:27:19.376870Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;);columns=2;rows=149; 2025-03-28T09:27:19.376943Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=1222396;num_rows=149;batch_columns=key,field; 2025-03-28T09:27:19.377065Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:858:2875];bytes=1222396;rows=149;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2025-03-28T09:27:19.377174Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:27:19.377282Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:27:19.377400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:27:19.377951Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:27:19.378053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:27:19.378137Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:27:19.378177Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:867:2884] finished for tablet 9437184 2025-03-28T09:27:19.378728Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:858:2875];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.007},{"events":["l_bootstrap"],"t":0.016},{"events":["f_processing","f_task_result"],"t":0.017},{"events":["l_task_result"],"t":0.711},{"events":["f_ack"],"t":18.376},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":18.39}],"full":{"a":1743154020987595,"name":"_full_task","f":1743154020987595,"d_finished":0,"c":0,"l":1743154039378232,"d":18390637},"events":[{"name":"bootstrap","f":1743154020988304,"d_finished":15723,"c":1,"l":1743154021004027,"d":15723},{"a":1743154039377928,"name":"ack","f":1743154039363840,"d_finished":7801,"c":9,"l":1743154039377432,"d":8105},{"a":1743154039377909,"name":"processing","f":1743154021004718,"d_finished":231857,"c":53,"l":1743154039377434,"d":232180},{"name":"ProduceResults","f":1743154020995552,"d_finished":17044,"c":64,"l":1743154039378157,"d":17044},{"a":1743154039378161,"name":"Finish","f":1743154039378161,"d_finished":0,"c":0,"l":1743154039378232,"d":71},{"name":"task_result","f":1743154021004745,"d_finished":222712,"c":44,"l":1743154021698833,"d":222712}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:27:19.378814Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:858:2875];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:27:19.379287Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:858:2875];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.007},{"events":["l_bootstrap"],"t":0.016},{"events":["f_processing","f_task_result"],"t":0.017},{"events":["l_task_result"],"t":0.711},{"events":["f_ack"],"t":18.376},{"events":["l_ProduceResults","f_Finish"],"t":18.39},{"events":["l_ack","l_processing","l_Finish"],"t":18.391}],"full":{"a":1743154020987595,"name":"_full_task","f":1743154020987595,"d_finished":0,"c":0,"l":1743154039378857,"d":18391262},"events":[{"name":"bootstrap","f":1743154020988304,"d_finished":15723,"c":1,"l":1743154021004027,"d":15723},{"a":1743154039377928,"name":"ack","f":1743154039363840,"d_finished":7801,"c":9,"l":1743154039377432,"d":8730},{"a":1743154039377909,"name":"processing","f":1743154021004718,"d_finished":231857,"c":53,"l":1743154039377434,"d":232805},{"name":"ProduceResults","f":1743154020995552,"d_finished":17044,"c":64,"l":1743154039378157,"d":17044},{"a":1743154039378161,"name":"Finish","f":1743154039378161,"d_finished":0,"c":0,"l":1743154039378857,"d":696},{"name":"task_result","f":1743154021004745,"d_finished":222712,"c":44,"l":1743154021698833,"d":222712}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;;); 2025-03-28T09:27:19.379373Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:27:00.985990Z;index_granules=0;index_portions=5;index_batches=2052;committed_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=17133336;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=17133336;selected_rows=0; 2025-03-28T09:27:19.379422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:27:19.379695Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:867:2884];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit98 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneOverload [GOOD] Test command err: 2025-03-28T09:26:49.724067Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:49.837226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:49.862588Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:49.862946Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:49.871709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:49.871944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:49.872199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:49.872334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:49.872447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:49.872571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:49.872694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:49.872811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:49.872958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:49.873067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.873177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:49.873294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:49.903737Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:49.904096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:49.904168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:49.904377Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:49.904561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:49.904647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:49.904691Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:49.904799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:49.904872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:49.904929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:49.904964Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:49.905154Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:49.905232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:49.905276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:49.905307Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:49.905476Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:49.905540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:49.905591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:49.905622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:49.905699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:49.905737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:49.905772Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:49.905823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:49.905901Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:49.905939Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:49.906353Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=56; 2025-03-28T09:26:49.906474Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=58; 2025-03-28T09:26:49.906574Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2025-03-28T09:26:49.906666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-03-28T09:26:49.906898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:49.906957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:49.906991Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:49.907202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:49.907255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.907286Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.907446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:49.907497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:49.907527Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:49.907777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:49.907824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:49.907861Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:49.907998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:49.908045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:49.908098Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 2 2025-03-28T09:27:16.180518Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:16.182810Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 5 at tablet 9437184 2025-03-28T09:27:16.231606Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-03-28T09:27:16.252657Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 2 2025-03-28T09:27:16.252847Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:16.281959Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 6 at tablet 9437184 2025-03-28T09:27:16.340203Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-03-28T09:27:16.360436Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:6 Blob count: 2 2025-03-28T09:27:16.360629Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=9;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:16.385367Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 7 at tablet 9437184 2025-03-28T09:27:16.446308Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-03-28T09:27:16.466359Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 2 2025-03-28T09:27:16.466536Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=10;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:16.468716Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 8 at tablet 9437184 2025-03-28T09:27:16.544252Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-03-28T09:27:16.583098Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:8 Blob count: 2 2025-03-28T09:27:16.583294Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=11;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:16.585738Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 9 at tablet 9437184 2025-03-28T09:27:16.646093Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-03-28T09:27:16.677537Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:9 Blob count: 2 2025-03-28T09:27:16.677728Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=12;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:16.718399Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 10 at tablet 9437184 2025-03-28T09:27:16.796548Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-03-28T09:27:16.963866Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:10 Blob count: 2 2025-03-28T09:27:16.964034Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=13;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:17.075367Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 11 at tablet 9437184 2025-03-28T09:27:17.132883Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-03-28T09:27:17.154393Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:11 Blob count: 2 2025-03-28T09:27:17.154575Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=14;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:17.167263Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 12 at tablet 9437184 2025-03-28T09:27:17.220288Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-03-28T09:27:17.241191Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:12 Blob count: 2 2025-03-28T09:27:17.241362Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=15;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:17.243442Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 13 at tablet 9437184 2025-03-28T09:27:17.300079Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-03-28T09:27:17.327701Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:13 Blob count: 2 2025-03-28T09:27:17.327896Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=16;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:17.330037Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 14 at tablet 9437184 2025-03-28T09:27:17.383039Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-03-28T09:27:17.404501Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:14 Blob count: 2 2025-03-28T09:27:17.404657Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=17;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:17.419175Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 15 at tablet 9437184 2025-03-28T09:27:17.469244Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-03-28T09:27:17.488822Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:15 Blob count: 2 2025-03-28T09:27:17.489020Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=18;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:17.491365Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 16 at tablet 9437184 2025-03-28T09:27:17.539706Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-03-28T09:27:17.575964Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:16 Blob count: 2 2025-03-28T09:27:17.576174Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=19;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:17.578477Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 17 at tablet 9437184 2025-03-28T09:27:17.632811Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-03-28T09:27:17.665356Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:17 Blob count: 2 2025-03-28T09:27:17.665544Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=20;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:17.676225Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 18 at tablet 9437184 2025-03-28T09:27:17.723532Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-03-28T09:27:17.747851Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:18 Blob count: 2 2025-03-28T09:27:17.748077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=21;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:17.750408Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 19 at tablet 9437184 2025-03-28T09:27:17.808141Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-03-28T09:27:17.843827Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:19 Blob count: 2 2025-03-28T09:27:17.843989Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=22;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:17.846182Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 20 at tablet 9437184 2025-03-28T09:27:17.893888Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-03-28T09:27:18.078454Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:20 Blob count: 2 2025-03-28T09:27:18.078616Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=23;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; RESEND TEvWrite 2025-03-28T09:27:18.160030Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 21 at tablet 9437184 2025-03-28T09:27:18.213974Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-03-28T09:27:18.241171Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:21 Blob count: 2 2025-03-28T09:27:18.241375Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=24;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:27:19.422396Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6330728;count=1; CATCH TEvWrite, status OK 2025-03-28T09:27:19.485733Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 22 at tablet 9437184 2025-03-28T09:27:19.543082Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-03-28T09:27:19.566616Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:22 Blob count: 2 2025-03-28T09:27:19.566868Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=25;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKey-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite >> DataShardReadIteratorConsistency::LocalSnapshotReadHasRequiredDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies >> DataShardReadIterator::ShouldReturnMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite >> DataStreams::TestNonChargeableUser >> DataStreams::TestStreamStorageRetention >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> DataShardReadIterator::ShouldHandleOutOfOrderReadAck [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead >> TopicService::OnePartitionAndNoGapsInTheOffsets [GOOD] >> TPersQueueTest::ReadRuleServiceTypeLimit [GOOD] >> TPersQueueTest::ReadRuleDisallowDefaultServiceType >> DataShardReadIterator::TryCommitLocksPrepared-Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks >> DataShardReadIterator::ShouldReadRangeNonInclusiveEnds [GOOD] >> DataShardReadIterator::ShouldReadRangeLeftInclusive >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutEmptyMessage >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit99 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> DataShardReadIteratorConsistency::LocalSnapshotReadNoUnnecessaryDependencies [GOOD] >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite >> TPersQueueTest::CheckKillBalancer [GOOD] >> TPersQueueTest::CheckDeleteTopic >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeRead [GOOD] >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers >> DataShardReadIterator::ShouldReadRangeLeftInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeRightInclusive >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit100 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 >> ReadIteratorExternalBlobs::ExtBlobs [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys >> DataShardReadIterator::TryCommitLocksPrepared+Volatile-BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenWriteInSeparateTransactions-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite >> DataShardReadIteratorConsistency::LocalSnapshotReadWithConcurrentWrites [GOOD] >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination >> TPQCompatTest::DiscoverTopics [GOOD] >> TPQCompatTest::SetupLockSession ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2025-03-28T09:27:19.281658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789590387502139:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:19.281721Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001545/r3tmp/tmpNGRfXG/pdisk_1.dat 2025-03-28T09:27:19.766636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:19.767543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:19.777291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27195, node 1 2025-03-28T09:27:19.828650Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:19.839737Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:27:19.839767Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:27:20.004702Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:20.004729Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:20.004736Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:20.004898Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16232 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:20.513312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:20.610049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:16232 2025-03-28T09:27:20.816177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:21.336009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:27:21.606906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:27:24.197109Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486789614595179363:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:24.197249Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001545/r3tmp/tmp4Mgjog/pdisk_1.dat 2025-03-28T09:27:24.449570Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:24.476868Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:24.476967Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:24.480531Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64665, node 4 2025-03-28T09:27:24.570434Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:24.570460Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:24.570469Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:24.570628Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:24.824482Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:24.893803Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:1156 2025-03-28T09:27:25.088830Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } ALTER_SCHEME: { Name: "test-topic" Split { Partition: 1 SplitBoundary: "a" } } 2025-03-28T09:27:26.412351Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 107:0, at schemeshard: 72057594046644480 2025-03-28T09:27:27.650998Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:27:27.759889Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:27:27.927080Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:27:28.302455Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:27:30.062969Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486789638331955223:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:30.063040Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001545/r3tmp/tmptdzFuD/pdisk_1.dat 2025-03-28T09:27:30.263703Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:30.296535Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:30.296641Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:30.304146Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11507, node 7 2025-03-28T09:27:30.419726Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:30.419753Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:30.419762Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:30.419932Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:30.771170Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:30.855894Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:3687 2025-03-28T09:27:31.091729Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:31.114876Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 >> DataShardReadIterator::ShouldHandleReadAckWhenExhaustedRangeReadReverse [GOOD] >> DataShardReadIterator::ShouldForbidDuplicatedReadId >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> TPersQueueTest::PreferredCluster_EnabledRemotePreferredClusterAndRemoteClusterEnabledDelaySec_SessionDiesOnlyAfterDelay [GOOD] >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay >> DataStreams::TestListStreamConsumers [GOOD] >> DataStreams::TestListShards1Shard >> DataStreams::TestGetShardIterator >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Compressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: 2025-03-28T09:23:18.854271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:526:2414], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:23:18.854559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:23:18.854674Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fa7/r3tmp/tmpXqtXjO/pdisk_1.dat 2025-03-28T09:23:19.385634Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10324, node 1 2025-03-28T09:23:19.850540Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:23:19.850596Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:23:19.850652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:23:19.851468Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:23:19.854895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:23:19.984788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:19.984928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:20.010774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4179 2025-03-28T09:23:20.760447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:23:24.700759Z node 3 :STATISTICS INFO: Subscribed for config changes on node 3 2025-03-28T09:23:24.749433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:24.749577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:24.808939Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:23:24.811549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:25.116729Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:25.117294Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:25.117804Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:25.117961Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:25.118170Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:25.118250Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:25.118336Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:25.118448Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:25.118537Z node 3 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:25.334979Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:25.335117Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:25.350467Z node 3 :HIVE WARN: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:25.558931Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:25.622929Z node 3 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:23:25.623055Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:23:25.691244Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:23:25.692607Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:23:25.692834Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:23:25.692899Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:23:25.692962Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:23:25.693020Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:23:25.693139Z node 3 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:23:25.693199Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:23:25.693617Z node 3 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:23:25.726608Z node 3 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:23:25.726786Z node 3 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [3:1949:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:23:25.744364Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1962:2606] 2025-03-28T09:23:25.752234Z node 3 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [3:1980:2616] 2025-03-28T09:23:25.752460Z node 3 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:1980:2616], schemeshard id = 72075186224037897 2025-03-28T09:23:25.776443Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared1 2025-03-28T09:23:25.831885Z node 3 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:23:25.838938Z node 3 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:23:25.839050Z node 3 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared1/.metadata/_statistics 2025-03-28T09:23:25.873182Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:23:25.899492Z node 3 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:23:25.899681Z node 3 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:23:26.296116Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:23:26.521170Z node 3 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:23:26.596004Z node 3 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:23:27.725752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:23:32.467685Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:23:32.509759Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:32.509885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:32.554521Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:23:32.563926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:32.837322Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:32.837870Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:32.838540Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:32.839050Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:32.839417Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:32.839546Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:32.839693Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:32.839826Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:32.839962Z node 2 :HIVE WARN: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:23:32.986610Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:23:32.986761Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:23:33.002902Z node 2 :HIVE WARN: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:23:33.248436Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:23:33.310540Z node 2 :STATISTICS INFO: [72075186224038895] OnActivateExecutor 2025-03-28T09:23:33.310651Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Execute 2025-03-28T09:23:33.397117Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInitSchema::Complete 2025-03-28T09:23:33.398586Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Execute 2025-03-28T09:23:33.399152Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:23:33.399249Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ColumnStatistics: column count# 0 2025-03-28T09:23:33.399333Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:23:33.399409Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:23:33.399489Z node 2 :STATISTICS DEBUG: [72075186224038895] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:23:33.399582Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxInit::Complete 2025-03-28T09:23:33.400364Z node 2 :STATISTICS INFO: [72075186224038895] Subscribed for config changes 2025-03-28T09:23:33.463195Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224038895, at schemeshard: 72075186224038898 2025-03-28T09:23:33.463325Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3223:2597], at schemeshard: 72075186224038898, StatisticsAggregatorId: 72075186224038895, at schemeshard: 72075186224038898 2025-03-28T09:23:33.469540Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3234:2607] 2025-03-28T09:23:33.475933Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:3256:2619] 20 ... erId: 72075186224038898, LocalPathId: 3] 2025-03-28T09:27:28.494290Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2025-03-28T09:27:28.498542Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:27:28.504664Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12612:5273], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:27:28.504792Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:12622:5278], DatabaseId: /Root/Shared2, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:27:28.504868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared2, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:27:28.531183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976730658:2, at schemeshard: 72075186224038898 2025-03-28T09:27:28.630369Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:12626:5281], DatabaseId: /Root/Shared2, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976730658 completed, doublechecking } 2025-03-28T09:27:28.842633Z node 2 :TX_PROXY ERROR: Actor# [2:12717:5329] txid# 281474976730659, issues: { message: "Check failed: path: \'/Root/Shared2/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224038898, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:27:28.869417Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:12746:5344]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:27:28.869937Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:27:28.870045Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:12748:5346] 2025-03-28T09:27:28.870121Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:12748:5346] 2025-03-28T09:27:28.872002Z node 2 :STATISTICS DEBUG: [72075186224038895] EvServerConnected, pipe server id = [2:12749:5347] 2025-03-28T09:27:28.872297Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:12748:5346], server id = [2:12749:5347], tablet id = 72075186224038895, status = OK 2025-03-28T09:27:28.872398Z node 2 :STATISTICS DEBUG: [72075186224038895] EvConnectNode, pipe server id = [2:12749:5347], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:27:28.872467Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:27:28.872792Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:27:28.872865Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:12746:5344], StatRequests.size() = 1 2025-03-28T09:27:29.001390Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjA1ZWEzNWUtNDAzMzljYzktN2M3MDU4YmUtMTAzMzEyMzg=, TxId: 2025-03-28T09:27:29.001505Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjA1ZWEzNWUtNDAzMzljYzktN2M3MDU4YmUtMTAzMzEyMzg=, TxId: 2025-03-28T09:27:29.002304Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-03-28T09:27:29.019221Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038898, LocalPathId: 3] 2025-03-28T09:27:29.019273Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:27:29.075380Z node 2 :STATISTICS DEBUG: [72075186224038895] EvFastPropagateCheck 2025-03-28T09:27:29.075441Z node 2 :STATISTICS DEBUG: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:27:29.168240Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:12748:5346], schemeshard count = 1 2025-03-28T09:27:29.719857Z node 3 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-28T09:27:29.719931Z node 3 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 182.000000s, at schemeshard: 72075186224037899 2025-03-28T09:27:29.720271Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 26 2025-03-28T09:27:29.740482Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:27:30.082482Z node 3 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:27:30.095228Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:30.095287Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:30.095321Z node 3 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is data table. 2025-03-28T09:27:30.095351Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:27:30.095631Z node 3 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared1 2025-03-28T09:27:30.098240Z node 3 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:27:30.126343Z node 3 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=MTdhMTU5MWItNWYzODQ2NzAtNzlkOGE1MmYtNGNiYWZjNDY=, TxId: 2025-03-28T09:27:30.126397Z node 3 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=MTdhMTU5MWItNWYzODQ2NzAtNzlkOGE1MmYtNGNiYWZjNDY=, TxId: 2025-03-28T09:27:30.127221Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:27:30.149096Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:27:30.149149Z node 3 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:27:30.242453Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 119 ], ReplyToActorId[ [3:12851:5670]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:27:30.243076Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 119 ] 2025-03-28T09:27:30.243148Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 119, ReplyToActorId = [3:12851:5670], StatRequests.size() = 1 2025-03-28T09:27:32.563792Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 120 ], ReplyToActorId[ [3:12934:5704]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:27:32.564342Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 120 ] 2025-03-28T09:27:32.564402Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 120, ReplyToActorId = [3:12934:5704], StatRequests.size() = 1 2025-03-28T09:27:33.173911Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224038900 2025-03-28T09:27:33.173990Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 239.000000s, at schemeshard: 72075186224038900 2025-03-28T09:27:33.174541Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id# 72075186224038900, stats size# 26 2025-03-28T09:27:33.197256Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxSchemeShardStats::Complete 2025-03-28T09:27:33.481458Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal 2025-03-28T09:27:33.481528Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:33.481574Z node 2 :STATISTICS DEBUG: [72075186224038895] IsColumnTable. Path [OwnerId: 72075186224038900, LocalPathId: 2] is data table. 2025-03-28T09:27:33.481613Z node 2 :STATISTICS DEBUG: [72075186224038895] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224038900, LocalPathId: 2] 2025-03-28T09:27:33.482168Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared2 2025-03-28T09:27:33.485450Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:27:33.506287Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjAxMDcyMzYtMjg3OTRjYWUtODJlOTY0Y2ItZDM0NzM5MWQ=, TxId: 2025-03-28T09:27:33.506359Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjAxMDcyMzYtMjg3OTRjYWUtODJlOTY0Y2ItZDM0NzM5MWQ=, TxId: 2025-03-28T09:27:33.507191Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Execute 2025-03-28T09:27:33.524611Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224038900, LocalPathId: 2] 2025-03-28T09:27:33.524673Z node 2 :STATISTICS DEBUG: [72075186224038895] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:27:34.583729Z node 3 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 3 2025-03-28T09:27:34.584011Z node 3 :STATISTICS DEBUG: EvPropagateStatistics, node id = 3 2025-03-28T09:27:34.584763Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:27:34.596845Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:34.596908Z node 3 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:34.696846Z node 3 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 121 ], ReplyToActorId[ [3:13036:5724]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:27:34.697384Z node 3 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 121 ] 2025-03-28T09:27:34.697459Z node 3 :STATISTICS DEBUG: ReplySuccess(), request id = 121, ReplyToActorId = [3:13036:5724], StatRequests.size() = 1 2025-03-28T09:27:34.698564Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:13038:5447]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:27:34.705319Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:27:34.705869Z node 2 :STATISTICS DEBUG: [72075186224038895] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2025-03-28T09:27:34.705926Z node 2 :STATISTICS DEBUG: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:27:34.706225Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:27:34.706310Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:13038:5447], StatRequests.size() = 1 >> YdbOlapStore::DuplicateRows [GOOD] >> YdbOlapStore::LogExistingRequest >> DataShardReadIterator::ShouldReadRangeRightInclusive [GOOD] >> DataShardReadIterator::ShouldReadRangeOneByOne >> TopicService::MultiplePartitionsAndNoGapsInTheOffsets [GOOD] >> DataStreams::TestDeleteStream >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit101 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 >> DataStreams::TestShardPagination [GOOD] >> TTopicYqlTest::DropTopicYql >> DataShardReadIteratorConsistency::Bug_7674_IteratorDuplicateRows [GOOD] >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2025-03-28T09:27:23.500457Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789610252664418:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:23.500656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f0/r3tmp/tmpMCCtoL/pdisk_1.dat 2025-03-28T09:27:23.891113Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:23.902774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:23.902900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:23.911040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22276, node 1 2025-03-28T09:27:24.006061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:24.006093Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:24.006102Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:24.006243Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23710 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:24.300549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:24.450750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:23710 2025-03-28T09:27:24.651574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting...
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2025-03-28T09:27:24.953584Z node 1 :TX_PROXY ERROR: Actor# [1:7486789614547633887:3488] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:27:28.105298Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486789630859635202:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:28.105355Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f0/r3tmp/tmpOLZkaR/pdisk_1.dat 2025-03-28T09:27:28.347109Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22547, node 4 2025-03-28T09:27:28.476175Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:28.476260Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:28.534302Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:28.554129Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:28.554159Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:28.554168Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:28.554303Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2096 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:28.916636Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:29.045735Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2096 2025-03-28T09:27:29.242487Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:33.105605Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486789630859635202:2206];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:33.105668Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:27:35.511928Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486789661832686804:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:35.512083Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014f0/r3tmp/tmpcopFQZ/pdisk_1.dat 2025-03-28T09:27:35.726530Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:35.753278Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:35.753387Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:35.762362Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15209, node 7 2025-03-28T09:27:35.838979Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:35.839002Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:35.839010Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:35.839581Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5921 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:36.116335Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:36.180090Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:5921 2025-03-28T09:27:36.409553Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRange-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefix-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite >> DataShardReadIterator::ShouldForbidDuplicatedReadId [GOOD] >> DataShardReadIterator::ShouldFailUknownColumns >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission >> TPersQueueTest::ReadRuleDisallowDefaultServiceType [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigration >> DataShardReadIterator::TryCommitLocksPrepared-Volatile+BreakLocks [GOOD] >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks >> DataStreams::TestListShards1Shard [GOOD] >> DataStreams::TestGetRecordsStreamWithSingleShard ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] Test command err: 2025-03-28T09:27:22.617739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789604422844809:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:22.618807Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001536/r3tmp/tmpPm5VKJ/pdisk_1.dat 2025-03-28T09:27:23.051856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:23.051980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:23.081823Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:23.082095Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7407, node 1 2025-03-28T09:27:23.215396Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:23.215424Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:23.215432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:23.215567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:23.487082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:23.618773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:11758 2025-03-28T09:27:23.828911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:24.169675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480 encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2025-03-28T09:27:24.263496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:27:24.361799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:27:24.397181Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-28T09:27:24.397227Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:27:24.397248Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:27:24.408929Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-28T09:27:24.409005Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-03-28T09:27:24.409044Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1743154044032-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1743154044,"finish":1743154044},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743154044}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1743154044032-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743154044,"finish":1743154044},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743154044}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1743154044322-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1743154044,"finish":1743154044},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037890","source_wt":1743154044}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037890-1743154044322-4","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743154044,"finish":1743154044},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037890","source_wt":1743154044}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1743154044322-5","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1743154044,"finish":1743154044},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743154044}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1743154044322-6","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743154044,"finish":1743154044},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743154044}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1743154044032-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1743154044,"finish":1743154044},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037888","source_wt":1743154044}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037888-1743154044032-2","schema":"ydb.serverless.v1","tags":{"ydb_size":0},"usage":{"quantity":0,"unit":"byte*second","start":1743154044,"finish":1743154044},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"1.0.0","source_id":"72075186224037888","source_wt":1743154044}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1743154044322-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1743154044,"finish":1743154044},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root"},"version":"v1","source_id":"72075186224037890","source_wt":1743154044}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"used_storage-root-72075186224037890-1743154044322-4","schema":"ydb.serverless.v1","tags":{"ydb_size ... ed: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:33.375331Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:33.473397Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:2530 2025-03-28T09:27:33.696215Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1743154053.898165 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154053.898422 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154053.906831 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154053.906963 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154053.917396 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154053.917505 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154053.927385 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154053.927509 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-28T09:27:33.950134Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:27:34.039788Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 E0000 00:00:1743154054.123613 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154054.123693 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-28T09:27:34.133615Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 E0000 00:00:1743154054.197532 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154054.197676 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-28T09:27:34.207376Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480 E0000 00:00:1743154054.272360 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154054.272466 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154054.293688 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154054.293838 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-28T09:27:34.334947Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:27:34.356231Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2025-03-28T09:27:34.356257Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-03-28T09:27:34.356278Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2025-03-28T09:27:34.356297Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-03-28T09:27:34.356323Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-03-28T09:27:34.356736Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-03-28T09:27:34.368023Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-03-28T09:27:34.368088Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-28T09:27:34.368117Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-03-28T09:27:34.368145Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-03-28T09:27:34.368217Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-03-28T09:27:34.368322Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found E0000 00:00:1743154054.385715 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154054.385841 482931 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2025-03-28T09:27:37.763567Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486789669486320706:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:37.763665Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001536/r3tmp/tmpNcHEJk/pdisk_1.dat 2025-03-28T09:27:37.957840Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:38.000986Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:38.001085Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:38.012853Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2687, node 10 2025-03-28T09:27:38.188109Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:38.188136Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:38.188145Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:38.188300Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17744 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:38.477757Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:38.568310Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17744 2025-03-28T09:27:38.828916Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... E0000 00:00:1743154059.047289 483800 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154059.064921 483800 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154059.076712 483800 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154059.088933 483800 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1743154059.098562 483800 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> DataShardReadIterator::ShouldReadRangeOneByOne [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 >> YdbOlapStore::LogTsRangeDescending [GOOD] >> YdbQueryService::TestAttachTwice >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit198 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestGetRecordsWithCount >> DataShardReadIterator::ShouldFailUknownColumns [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> DataShardReadIteratorConsistency::LeaseConfirmationNotOutOfOrder [GOOD] >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel >> KqpErrors::ResolveTableError >> DataShardReadIterator::TryCommitLocksPrepared+Volatile+BreakLocks [GOOD] >> DataShardReadIterator::ShouldRollbackLocksWhenWrite >> YdbTableSplit::RenameTablesAndSplit [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk5 [GOOD] >> DataShardReadIterator::ShouldReadRangeChunk7 >> TColumnShardTestReadWrite::CompactionGC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2025-03-28T09:26:19.525186Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789333414302121:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:19.525245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00155d/r3tmp/tmpKABE4N/pdisk_1.dat 2025-03-28T09:26:20.009064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:26:20.009184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:26:20.012387Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:26:20.014128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21953, node 1 2025-03-28T09:26:20.203371Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:26:20.203401Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:26:20.203408Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:26:20.203510Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:26:20.811608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:26:22.821335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789346299205068:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:22.821510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:23.740416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Foo, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:26:23.747775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:26:23.747819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:26:23.751646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Foo 2025-03-28T09:26:23.862037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153983900, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:26:23.924022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710658:0 2025-03-28T09:26:23.955034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789350594172611:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:23.955100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:26:23.992302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /Root/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:26:23.992868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:26:23.992903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:26:23.995423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Foo 2025-03-28T09:26:24.014398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153984061, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:26:24.025042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m 2025-03-28T09:26:24.525223Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789333414302121:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:26:24.525322Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2025-03-28T09:26:33.937747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:26:33.944550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:26:34.024180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-03-28T09:26:34.031067Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-28T09:26:34.031692Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:26:35.012337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:26:35.012367Z node 1 :IMPORT WARN: Table profiles were not loaded partitions 1 2025-03-28T09:26:36.090664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMoveTable Propose, from: /Root/Foo, to: /Root/Bar, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:26:36.090993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:26:36.093233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Foo, dst path: /Root/Bar 2025-03-28T09:26:36.107978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154476154, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:26:36.113485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 281474976710660, done: 0, blocked: 1 2025-03-28T09:26:36.123008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710660:0 2025-03-28T09:26:36.124871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 Fast forward 1m partitions 1 Fast forward 1m 2025-03-28T09:26:43.977438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 cpuUsage 0.4973 2025-03-28T09:26:44.077490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:26:44.077621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 6841088 row count 50000 2025-03-28T09:26:44.077675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=Bar, is column=0, is olap=0 2025-03-28T09:26:44.077714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 3: RowCount 50000, DataSize 6841088 2025-03-28T09:26:44.077833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Requesting full stats from datashard 72075186224037890 2025-03-28T09:26:44.077945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T09:26:44.078156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037890 state: 'Ready' data size: 6841088 row count: 50000 2025-03-28T09:26:44.078232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPartitionHistogram::Execute partition histogram at tablet 72057594046644480 from datashard 72075186224037890 for pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 dataSizeHistogram buckets 0 2025-03-28T09:26:44.078285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Failed to find proper split key (initially) for 'Split by size' of datashard 72075186224037890 partitions 1 Fast forward 1m 2025-03-28T09:26:48.977669Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037890 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6841088 rowCount 50000 cpuUsage 0.1245 2025-03-28T09:26:49.078183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:26:49.078326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:3 data size 6841088 row count 50000 2025-03-28T09:26:49.078382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037890 maps to shardIdx: 72057594046644480:3 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=Bar, is column=0, is olap=0 2025-03-28T09:26:49.078425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037890 followerId=0, pathId 3: RowCount 50000, DataSize 6841088 2025-03-28T09:26:49.078526Z node 1 :FLAT_TX ... node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:27:50.116811Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 ack parts [ [72075186224037890:1:119:1:12288:10920:0] ] return to tablet 72075186224037891 2025-03-28T09:27:50.118640Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 parts [ [72075186224037890:1:119:1:12288:10920:0] ] return ack processed 2025-03-28T09:27:50.118647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:27:50.118742Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ 281474976710664 ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:27:50.118799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-03-28T09:27:50.118836Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710664 datashard 72075186224037892 state PreOffline 2025-03-28T09:27:50.118844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2025-03-28T09:27:50.118868Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 Got TEvSchemaChangedResult from SS at 72075186224037892 2025-03-28T09:27:50.118913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:27:50.118945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976710664:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:27:50.119022Z node 1 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:27:50.119040Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710664 datashard 72075186224037891 state PreOffline 2025-03-28T09:27:50.119063Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-03-28T09:27:50.119224Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7486789724256331510:2797], serverId# [1:7486789724256331515:4825], sessionId# [0:0:0] 2025-03-28T09:27:50.119321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-28T09:27:50.119443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2025-03-28T09:27:50.119453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-03-28T09:27:50.119466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710664:0 progress is 1/1 2025-03-28T09:27:50.119477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-03-28T09:27:50.119495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710664, ready parts: 1/1, is published: true 2025-03-28T09:27:50.119550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7486789724256331453:2794] message: TxId: 281474976710664 2025-03-28T09:27:50.119583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710664 ready parts: 1/1 2025-03-28T09:27:50.119596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710664:0 2025-03-28T09:27:50.119610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710664:0 2025-03-28T09:27:50.119649Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037890:1:119:1:12288:10920:0] ] return ack processed 2025-03-28T09:27:50.119702Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:27:50.119748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-28T09:27:50.119795Z node 1 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-03-28T09:27:50.120001Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7486789724256331525:2799], serverId# [1:7486789724256331534:4842], sessionId# [0:0:0] 2025-03-28T09:27:50.120242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486789393543846001 RawX2: 4503603922340191 } TabletId: 72075186224037890 State: 4 2025-03-28T09:27:50.120304Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:27:50.122307Z node 1 :TX_DATASHARD DEBUG: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:27:50.122395Z node 1 :TX_DATASHARD INFO: 72075186224037892 Initiating switch from PreOffline to Offline state 2025-03-28T09:27:50.122454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:27:50.122832Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-28T09:27:50.122912Z node 1 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:27:50.123479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486789719961363809 RawX2: 4503603922340559 } TabletId: 72075186224037891 State: 4 2025-03-28T09:27:50.123544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:27:50.125985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:27:50.126122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:27:50.126267Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-28T09:27:50.126375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:27:50.126878Z node 1 :TX_DATASHARD INFO: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:27:50.126987Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-28T09:27:50.127041Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7486789406428748267:3259], serverId# [1:7486789406428748268:3260], sessionId# [0:0:0] 2025-03-28T09:27:50.127340Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-28T09:27:50.127417Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-28T09:27:50.127637Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:27:50.128513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486789719961363837 RawX2: 4503603922340560 } TabletId: 72075186224037892 State: 4 2025-03-28T09:27:50.128599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:27:50.129123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:27:50.129162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:27:50.129248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:27:50.129500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:27:50.130913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:27:50.130945Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-28T09:27:50.130976Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [1:7486789719961364133:4751], serverId# [1:7486789719961364134:4752], sessionId# [0:0:0] 2025-03-28T09:27:50.131039Z node 1 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-03-28T09:27:50.131303Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-28T09:27:50.131346Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-28T09:27:50.131581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:27:50.131603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:27:50.132153Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-28T09:27:50.134375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-28T09:27:50.134587Z node 1 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-03-28T09:27:50.134606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:27:50.134820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:27:50.134851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:27:50.134892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:27:50.135055Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-03-28T09:27:50.135963Z node 1 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-28T09:27:50.136050Z node 1 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-28T09:27:50.136750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-28T09:27:50.136781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-03-28T09:27:50.136826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 >> TPQCompatTest::SetupLockSession [GOOD] >> TPQCompatTest::BadTopics |89.5%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2025-03-28T09:26:47.764933Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:47.857219Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:47.880669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:47.880973Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:47.887809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:47.888034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:47.888281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:47.888390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:47.888490Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:47.888622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:47.888747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:47.888859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:47.888953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:47.889051Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.889157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:47.889278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:47.910121Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:47.910351Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:47.910390Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:47.910546Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.910714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:47.910785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:47.910823Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:47.910949Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:47.911024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:47.911065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:47.911096Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:47.911208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.911249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:47.911275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:47.911295Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:47.911386Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:47.911421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:47.911459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:47.911501Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:47.911555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:47.911577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:47.911595Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:47.911638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:47.911663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:47.911707Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:47.911992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2025-03-28T09:26:47.912057Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-03-28T09:26:47.912137Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-03-28T09:26:47.912199Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-03-28T09:26:47.912339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:47.912378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:47.912400Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:47.912512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:47.912539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.912577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.912684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:47.912715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:47.912735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:47.912871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:47.912902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:47.912920Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:47.913013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:47.913047Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:47.913078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... p_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=eb1ace86-bb611f0-8b585144-e97fceda,;}; 2025-03-28T09:27:51.067821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-03-28T09:27:51.067891Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:27:51.067968Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:27:51.068031Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:27:51.068082Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:27:51.068226Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.415500s; 2025-03-28T09:27:51.068294Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:27:51.333320Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=4735248;count=1; 2025-03-28T09:27:51.381240Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 45 at tablet 9437184 2025-03-28T09:27:51.381600Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:89 Blob count: 1 2025-03-28T09:27:51.394464Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:89 Blob count: 1 2025-03-28T09:27:51.394668Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=270;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=eb1ace86-bb611f0-8b585144-e97fceda,;}; 2025-03-28T09:27:51.400877Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:44:255:1:574112:0]; 2025-03-28T09:27:51.400999Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:44:255:2:592928:0]; GC for channel 2 deletes blobs: GC for channel 4 deletes blobs: GC for channel 3 deletes blobs: [9437184:2:85:3:0:5870200:0] Added portions: 151 152 2025-03-28T09:27:51.427915Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:27:51.428303Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[271] (CS::INDEXATION) apply at tablet 9437184 2025-03-28T09:27:51.432328Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:88 Blob count: 2 2025-03-28T09:27:51.432509Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=92311612;raw_bytes=143732845;count=39;records=1462497} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7381080;raw_bytes=7369506;count=2;records=75000} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:27:51.448833Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-03-28T09:27:51.448914Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;fline=with_appended.cpp:65;portions=75,76,;task_id=eb1ace86-bb611f0-8b585144-e97fceda; 2025-03-28T09:27:51.449192Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::eb1ace86-bb611f0-8b585144-e97fceda; 2025-03-28T09:27:51.449254Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:27:51.449326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:27:51.449389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-03-28T09:27:51.449440Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:27:51.449506Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:27:51.449560Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:27:51.449606Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:27:51.449685Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.404000s; 2025-03-28T09:27:51.449740Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=eb1ace86-bb611f0-8b585144-e97fceda;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:27:51.449848Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:87:2:0:5870200:0] 2025-03-28T09:27:51.449910Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:88 Blob count: 2 2025-03-28T09:27:51.450968Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=eb1ace86-bb611f0-8b585144-e97fceda;mem=5963210;cpu=0; 2025-03-28T09:27:51.451688Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:27:51.452582Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 6080043 at tablet 9437184, mediator 0 2025-03-28T09:27:51.452665Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[274] execute at tablet 9437184 2025-03-28T09:27:51.453326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=abstract.h:83;progress_tx_id=1043;lock_id=1;broken=0; 2025-03-28T09:27:51.453561Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=tx_controller.cpp:211;event=finished_tx;tx_id=1043; 2025-03-28T09:27:51.467084Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[274] complete at tablet 9437184 2025-03-28T09:27:51.467217Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=1043;lock_id=1;broken=0; 2025-03-28T09:27:51.467358Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=5870200; 2025-03-28T09:27:51.467513Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::ebb0d7d2-bb611f0-aa4e16af-17d0eb8a; 2025-03-28T09:27:51.467581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-03-28T09:27:51.467690Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=5870200;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=ebb0d7d2-bb611f0-aa4e16af-17d0eb8a; 2025-03-28T09:27:51.467761Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=ebb0d7d2-bb611f0-aa4e16af-17d0eb8a; 2025-03-28T09:27:51.467950Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5963210;external_task_id=ebb0d7d2-bb611f0-aa4e16af-17d0eb8a;type=CS::INDEXATION;priority=0;; 2025-03-28T09:27:51.468237Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=46;task=cpu=0;mem=5963210;external_task_id=ebb0d7d2-bb611f0-aa4e16af-17d0eb8a;type=CS::INDEXATION;priority=0;; 2025-03-28T09:27:51.468288Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=ebb0d7d2-bb611f0-aa4e16af-17d0eb8a;mem=5963210;cpu=0; 2025-03-28T09:27:51.468332Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=ebb0d7d2-bb611f0-aa4e16af-17d0eb8a;task_id=46;mem=5963210;cpu=0; 2025-03-28T09:27:51.468498Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=ebb0d7d2-bb611f0-aa4e16af-17d0eb8a;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=ebb0d7d2-bb611f0-aa4e16af-17d0eb8a; Added portions: 153 154 2025-03-28T09:27:52.041491Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=ebb0d7d2-bb611f0-aa4e16af-17d0eb8a;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-28T09:27:52.041660Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |89.5%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldLimitRead10RangesChunk99Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 >> TPersQueueTest::CheckDeleteTopic [GOOD] >> TPersQueueTest::CheckDecompressionTasksWithoutSession >> TPersQueueTest::StoreNoMoreThanXSourceIDs [GOOD] >> TPersQueueTest::SetupWriteSessionOnDisabledCluster >> KqpErrors::ProposeResultLost_RwTx+UseSink >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] >> DataShardReadIterator::ShouldFailWrongSchema [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2025-03-28T09:26:47.073091Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:47.152770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:47.174704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:47.175094Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:47.181838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:47.182006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:47.182195Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:47.182268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:47.182332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:47.182413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:47.182508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:47.182595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:47.182666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:47.182792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.182872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:47.182936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:47.206332Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:47.206569Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:47.206622Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:47.206801Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.206927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:47.206984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:47.207013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:47.207112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:47.207172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:47.207199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:47.207218Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:47.207354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.207401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:47.207427Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:47.207445Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:47.207540Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:47.207576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:47.207600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:47.207621Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:47.207683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:47.207717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:47.207736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:47.207771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:47.207803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:47.207826Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:47.208116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=29; 2025-03-28T09:26:47.208204Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=51; 2025-03-28T09:26:47.208275Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-03-28T09:26:47.208345Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=36; 2025-03-28T09:26:47.208492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:47.208529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:47.208551Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:47.208717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:47.208765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.208796Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.209015Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:47.209100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:47.209146Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:47.209380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:47.209438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:47.209472Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:47.209620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:47.209683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:47.209726Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... p_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=ec49b9d4-bb611f0-9ddc0d14-d789d911,;}; 2025-03-28T09:27:53.048372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-03-28T09:27:53.048422Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:27:53.048491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:27:53.048545Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:27:53.048587Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:27:53.048667Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.408000s; 2025-03-28T09:27:53.048720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:27:53.286338Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:281:2294];fline=actor.cpp:22;event=flush_writing;size=4735248;count=1; 2025-03-28T09:27:53.347333Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 45 at tablet 9437184 2025-03-28T09:27:53.347721Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 3:87 Blob count: 1 2025-03-28T09:27:53.364688Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 3:87 Blob count: 1 2025-03-28T09:27:53.364916Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=263;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:769;event=skip_indexation;reason=in_progress;count=1;insert_overload_size=5870200;indexing_debug={task_ids=ec49b9d4-bb611f0-9ddc0d14-d789d911,;}; 2025-03-28T09:27:53.372017Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:3:44:255:1:574112:0]; 2025-03-28T09:27:53.372196Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:3:44:255:2:592928:0]; GC for channel 3 deletes blobs: GC for channel 2 deletes blobs: GC for channel 4 deletes blobs: [9437184:3:83:4:0:5870200:0] Added portions: 151 152 2025-03-28T09:27:53.404928Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:27:53.405303Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[264] (CS::INDEXATION) apply at tablet 9437184 2025-03-28T09:27:53.410081Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 3:86 Blob count: 2 2025-03-28T09:27:53.410249Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=92311612;raw_bytes=143732845;count=39;records=1462497} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7381080;raw_bytes=7369506;count=2;records=75000} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:27:53.437131Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::INDEXATION;success=1; 2025-03-28T09:27:53.437238Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;fline=with_appended.cpp:65;portions=75,76,;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911; 2025-03-28T09:27:53.437553Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;fline=manager.cpp:15;event=unlock;process_id=CS::INDEXATION::ec49b9d4-bb611f0-9ddc0d14-d789d911; 2025-03-28T09:27:53.437635Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:27:53.437721Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:27:53.437790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;tablet_id=9437184;fline=columnshard_impl.cpp:831;event=skip_compaction;reason=disabled; 2025-03-28T09:27:53.437860Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=0; 2025-03-28T09:27:53.437942Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:27:53.437997Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:27:53.438042Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:27:53.438120Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.397000s; 2025-03-28T09:27:53.438176Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:27:53.438295Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:85:3:0:5870200:0] 2025-03-28T09:27:53.438361Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 3:86 Blob count: 2 2025-03-28T09:27:53.439528Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=45;external_task_id=ec49b9d4-bb611f0-9ddc0d14-d789d911;mem=5963210;cpu=0; 2025-03-28T09:27:53.440335Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:27:53.441187Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 6080043 at tablet 9437184, mediator 0 2025-03-28T09:27:53.441263Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[267] execute at tablet 9437184 2025-03-28T09:27:53.441626Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=abstract.h:83;progress_tx_id=1043;lock_id=1;broken=0; 2025-03-28T09:27:53.441870Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=1043;fline=tx_controller.cpp:211;event=finished_tx;tx_id=1043; 2025-03-28T09:27:53.456872Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[267] complete at tablet 9437184 2025-03-28T09:27:53.457023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=1043;lock_id=1;broken=0; 2025-03-28T09:27:53.457170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=5870200; 2025-03-28T09:27:53.457331Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=manager.cpp:10;event=lock;process_id=CS::INDEXATION::ece07752-bb611f0-980c2c9d-471d3a12; 2025-03-28T09:27:53.457384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=ro_controller.cpp:45;event=CS::INDEXATION;tablet_id=9437184; 2025-03-28T09:27:53.457482Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:754;event=indexation;bytes=5870200;blobs_count=1;max_limit=251658240;has_more=0;external_task_id=ece07752-bb611f0-980c2c9d-471d3a12; 2025-03-28T09:27:53.457550Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:620;event=start_changes;type=CS::INDEXATION;task_id=ece07752-bb611f0-980c2c9d-471d3a12; 2025-03-28T09:27:53.457712Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:281:2294];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5963210;external_task_id=ece07752-bb611f0-980c2c9d-471d3a12;type=CS::INDEXATION;priority=0;; 2025-03-28T09:27:53.457976Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:281:2294];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=46;task=cpu=0;mem=5963210;external_task_id=ece07752-bb611f0-980c2c9d-471d3a12;type=CS::INDEXATION;priority=0;; 2025-03-28T09:27:53.458022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:281:2294];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=ece07752-bb611f0-980c2c9d-471d3a12;mem=5963210;cpu=0; 2025-03-28T09:27:53.458069Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:281:2294];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=ece07752-bb611f0-980c2c9d-471d3a12;task_id=46;mem=5963210;cpu=0; 2025-03-28T09:27:53.458213Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=ece07752-bb611f0-980c2c9d-471d3a12;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=ece07752-bb611f0-980c2c9d-471d3a12; Added portions: 153 154 2025-03-28T09:27:54.059963Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=ece07752-bb611f0-980c2c9d-471d3a12;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-28T09:27:54.060147Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:281:2294];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; Compactions happened: 14 Indexations happened: 31 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 34 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] >> TTopicYqlTest::DropTopicYql [GOOD] >> TTopicYqlTest::CreateTopicYqlBackCompatibility >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TPersQueueTest::AllEqual [GOOD] >> TPersQueueTest::BadSids >> KqpErrors::ProposeError >> YdbQueryService::TestAttachTwice [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite >> TPersQueueTest::PreferredCluster_RemotePreferredClusterEnabledWhileSessionInitializing_SessionDiesOnlyAfterInitializationAndDelay [GOOD] >> TPersQueueTest::PartitionsMapping >> TPersQueueTest::WhenTheTopicIsDeletedBeforeDataIsDecompressed_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: 2025-03-28T09:20:16.623066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:20:16.623353Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:20:16.623412Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000fd5/r3tmp/tmp7nEQuR/pdisk_1.dat 2025-03-28T09:20:17.019782Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23430, node 1 2025-03-28T09:20:17.398939Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:20:17.398996Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:20:17.399022Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:20:17.399572Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:20:17.402173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:20:17.513108Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:17.513222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:17.538562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23419 2025-03-28T09:20:18.252145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:20:23.061109Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:20:23.131741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:23.131885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:23.198147Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:20:23.205090Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:23.489628Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:20:23.490165Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:20:23.490659Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:20:23.490851Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:20:23.491101Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:20:23.491221Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:20:23.491350Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:20:23.491458Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:20:23.491581Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:20:23.666723Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:20:23.666847Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:20:23.688795Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:20:23.866600Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:20:23.970510Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:20:23.970630Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:20:24.004937Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:20:24.009435Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:20:24.009678Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:20:24.009736Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:20:24.009799Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:20:24.009852Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:20:24.009898Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:20:24.009949Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:20:24.010573Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:20:24.061391Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:20:24.062234Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:20:24.077241Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:20:24.077314Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:20:24.077390Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:20:24.090007Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:20:24.090143Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:20:24.110174Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:20:24.110577Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:20:24.163545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:20:24.178533Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:20:24.179620Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:20:24.375681Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:20:24.780236Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:20:24.868042Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:20:25.820037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2236:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:25.820179Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:20:25.838472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:20:26.055099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:26.055354Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:20:26.055689Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:20:26.055840Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:20:26.055964Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:20:26.056097Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:20:26.056224Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:20:26.056348Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:20:26.056479Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:20:26.056626Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:20:26.056785Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:20:26.056928Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2352:2865];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:20:26.109971Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2355:2867];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:20:26.110087Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2355:2867];tablet_id=72075186224037900;process= ... NextTraversal. No force traversals. 2025-03-28T09:26:41.248627Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:26:42.603793Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:26:42.603860Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:26:43.769801Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:26:43.769963Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:26:45.078369Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:26:45.078438Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:26:47.425651Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:26:47.504161Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:26:47.504238Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:26:50.137343Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:26:50.137618Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:26:50.211783Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:26:50.211852Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:26:52.693359Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:26:52.693428Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:26:53.893066Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:26:55.317791Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:26:55.317879Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:26:56.586729Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:26:56.586947Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:26:57.950366Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:26:57.950438Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:00.423452Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:27:00.484387Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:00.484447Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:03.241991Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:27:03.242159Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:27:03.300673Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:03.300741Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:05.861099Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:05.861172Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:07.048481Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:27:08.366136Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:08.366198Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:09.530261Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:27:09.530446Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:27:10.861925Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:10.862017Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:13.308076Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:27:13.366985Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:13.367066Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:14.606324Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:27:14.606402Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:27:14.606435Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:27:14.606465Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:27:16.182311Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:27:16.182532Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:27:16.243581Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:16.243671Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:18.793468Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:18.793538Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:19.958751Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:27:21.314303Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:21.314382Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:22.472783Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:27:22.472961Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:27:23.828633Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:23.828703Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:25.988581Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:27:26.071404Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:26.071470Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:28.500396Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:27:28.500584Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:27:28.598196Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:28.598261Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:31.211321Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:31.211386Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:32.425612Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:27:33.927364Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:33.927435Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:35.158153Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:27:35.158350Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:27:36.533452Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:36.533521Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:38.965013Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:27:39.048922Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:39.049020Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:41.484978Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:27:41.485203Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:27:41.578883Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:41.578957Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:43.947447Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:43.947520Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:45.132444Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:27:46.493600Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:46.493671Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:47.703033Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:27:47.703263Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:27:47.716577Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-28T09:27:47.716665Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 236.000000s, at schemeshard: 72075186224037897 2025-03-28T09:27:47.716903Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 53 ... waiting for TEvSchemeShardStats 2 (done) ... waiting for TEvPropagateStatistics 2025-03-28T09:27:47.733618Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:27:49.129747Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:49.129819Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:51.570893Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:27:51.654949Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:27:51.655014Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:27:52.974355Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:27:52.974448Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:27:52.974501Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:27:52.974553Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:27:54.611396Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:27:54.611649Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 ... waiting for TEvPropagateStatistics (done) 2025-03-28T09:27:54.612073Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:16121:9791]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:27:54.616232Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-03-28T09:27:54.616309Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [2:16121:9791], StatRequests.size() = 1 >> DataShardReadIteratorFastCancel::ShouldProcessFastCancel [GOOD] >> DataShardReadIteratorLatency::ReadSplitLatency |89.6%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldReadRangeChunk7 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix1 >> DataShardReadIterator::ShouldRollbackLocksWhenWrite [GOOD] >> DataShardReadIterator::TryWriteManyRows+Commit >> ReadIteratorExternalBlobs::ExtBlobsWithSpecificKeys [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbQueryService::TestAttachTwice [GOOD] Test command err: 2025-03-28T09:24:30.493897Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788863736990189:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:30.493940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00140d/r3tmp/tmpp11ZIH/pdisk_1.dat 2025-03-28T09:24:30.933990Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:30.961082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:30.961163Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:30.971700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4601, node 1 2025-03-28T09:24:31.143269Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:31.143303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:31.143310Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:31.146148Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:31.495914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:5791 2025-03-28T09:24:31.895634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "OlapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } Columns { Name: "request_id" Type: "Utf8" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:24:31.896094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/OlapStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:31.896606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: OlapStore, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:24:31.896648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-28T09:24:31.896734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:24:31.896813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:24:31.896851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:24:31.896922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-03-28T09:24:31.897188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-03-28T09:24:31.899323Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-03-28T09:24:31.899658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:24:31.899682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:31.899809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:24:31.899843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-03-28T09:24:31.905455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-28T09:24:31.905614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE COLUMN STORE, path: /Root/OlapStore 2025-03-28T09:24:31.905829Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:24:31.905842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:24:31.906010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:24:31.906079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:24:31.906095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486788863736990781:2381], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2025-03-28T09:24:31.906116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486788863736990781:2381], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 2 2025-03-28T09:24:31.906149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:24:31.906179Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710658:0 ProgressState, operation type: TxCreateOlapStore, at tablet# 72057594046644480 2025-03-28T09:24:31.906900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 1 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:24:31.907446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710658:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: ColumnShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 2 BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels { StoragePoolName: "hdd2" } BindedChannels ... \'\"PointPrefixLen\" \'0)))\n (let $7 (KqpWideReadOlapTableRanges $1 %kqp%tx_result_binding_0_0 $4 \'(\'(\'\"ItemsLimit\" $5) \'(\'\"Reverse\")) $6 (lambda \'($10) $10)))\n (let $8 (Bool \'false))\n (let $9 \'(\'(\'8 $8) \'(\'6 $8) \'(\'5 $8) \'(\'9 $8)))\n (return (FromFlow (WideTopSort $7 $5 $9)))\n))))\n)\n" ComputeActors { CpuTimeUs: 1527 Tasks { TaskId: 53 CpuTimeUs: 678 FinishTimeMs: 1743154064292 Tables { TablePath: "/Root/OlapStore/log1" } ComputeCpuTimeUs: 51 BuildCpuTimeUs: 627 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-mdsjyjltvm" NodeId: 22 CreateTimeMs: 1743154064168 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743154064252 } Stages { StageId: 1 StageGuid: "ae31d4c7-9de22c43-5743a9ea-89b5e640" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'50)) (lambda \'($2 $3 $4 $5 $6 $7 $8 $9 $10 $11) (AsStruct \'(\'\"ingested_at\" $2) \'(\'\"json_payload\" $3) \'(\'\"level\" $4) \'(\'\"message\" $5) \'(\'\"request_id\" $6) \'(\'\"resource_id\" $7) \'(\'\"resource_type\" $8) \'(\'\"saved_at\" $9) \'(\'\"timestamp\" $10) \'(\'\"uid\" $11)))))))\n)\n" ComputeActors { CpuTimeUs: 12015 Tasks { TaskId: 65 StageId: 1 CpuTimeUs: 510 FinishTimeMs: 1743154064331 ComputeCpuTimeUs: 126 BuildCpuTimeUs: 384 HostName: "ghrun-mdsjyjltvm" NodeId: 22 CreateTimeMs: 1743154064185 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743154064252 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":4,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"50\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"TopSort-TableRangeScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"50\",\"Name\":\"TopSort\",\"TopSortBy\":\"[row.timestamp,row.resource_type,row.resource_id,row.uid]\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableRangeScan\",\"Path\":\"\\/Root\\/OlapStore\\/log1\",\"ReadColumns\":[\"ingested_at\",\"json_payload\",\"level\",\"message\",\"request_id\",\"resource_id\",\"resource_type\",\"saved_at\",\"timestamp\",\"uid\"],\"ReadLimit\":\"50\",\"ReadRanges\":[\"timestamp [4000000, 4093000]\"],\"ReadRangesExpectedSize\":1,\"ReadRangesKeys\":[\"timestamp\"],\"Reverse\":true,\"Scan\":\"Parallel\",\"SsaProgram\":{\"Command\":[{\"Projection\":{\"Columns\":[{\"Id\":8},{\"Id\":2},{\"Id\":7},{\"Id\":1},{\"Id\":10},{\"Id\":3},{\"Id\":6},{\"Id\":9},{\"Id\":5},{\"Id\":4}]}}],\"Version\":5},\"Table\":\"OlapStore\\/log1\"}],\"PlanNodeId\":1,\"StageGuid\":\"dedba665-46453191-a6c72b59-8272d156\",\"Stats\":{\"BaseTimeMs\":1743154064252,\"ComputeNodes\":[{\"CpuTimeUs\":1527,\"Tasks\":[{\"ComputeTimeUs\":51,\"FinishTimeMs\":1743154064292,\"Host\":\"ghrun-mdsjyjltvm\",\"NodeId\":22,\"TaskId\":53}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"OlapStore\\/log1\"]}],\"SortColumns\":[\"timestamp (Desc)\",\"resource_type (Desc)\",\"resource_id (Desc)\",\"uid (Desc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"ae31d4c7-9de22c43-5743a9ea-89b5e640\",\"Stats\":{\"BaseTimeMs\":1743154064252,\"ComputeNodes\":[{\"CpuTimeUs\":12015,\"Tasks\":[{\"ComputeTimeUs\":126,\"FinishTimeMs\":1743154064331,\"Host\":\"ghrun-mdsjyjltvm\",\"NodeId\":22,\"TaskId\":65}]}],\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1964 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\002\022\014\010\271\t\020\357]\030\226\341\007 A" } } 2025-03-28T09:27:44.337830Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7486789699249883656:3517] TxId: 281474976710670. Ctx: { TraceId: 01jqe18a360cx3v1f1p5d5817x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=ZTU3OTU1YjAtMTE5NTVjNmUtYjUyYTY1ZDAtYjQ2MzhhZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:27:44.337921Z node 22 :KQP_EXECUTER DEBUG: ActorId: [22:7486789699249883656:3517] TxId: 281474976710670. Ctx: { TraceId: 01jqe18a360cx3v1f1p5d5817x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=22&id=ZTU3OTU1YjAtMTE5NTVjNmUtYjUyYTY1ZDAtYjQ2MzhhZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.127126s ReadRows: 0 ReadBytes: 0 ru: 84 rate limiter was not found force flag: 1 2025-03-28T09:27:44.338057Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTU3OTU1YjAtMTE5NTVjNmUtYjUyYTY1ZDAtYjQ2MzhhZTQ=, ActorId: [22:7486789694954916312:3517], ActorState: ExecuteState, TraceId: 01jqe18a360cx3v1f1p5d5817x, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-28T09:27:44.338665Z node 22 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=22&id=ZTU3OTU1YjAtMTE5NTVjNmUtYjUyYTY1ZDAtYjQ2MzhhZTQ=, ActorId: [22:7486789694954916312:3517], ActorState: ExecuteState, TraceId: 01jqe18a360cx3v1f1p5d5817x, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 235.501 QueriesCount: 1 2025-03-28T09:27:44.338779Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTU3OTU1YjAtMTE5NTVjNmUtYjUyYTY1ZDAtYjQ2MzhhZTQ=, ActorId: [22:7486789694954916312:3517], ActorState: ExecuteState, TraceId: 01jqe18a360cx3v1f1p5d5817x, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:27:44.338923Z node 22 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=22&id=ZTU3OTU1YjAtMTE5NTVjNmUtYjUyYTY1ZDAtYjQ2MzhhZTQ=, ActorId: [22:7486789694954916312:3517], ActorState: ExecuteState, TraceId: 01jqe18a360cx3v1f1p5d5817x, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:27:44.338977Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTU3OTU1YjAtMTE5NTVjNmUtYjUyYTY1ZDAtYjQ2MzhhZTQ=, ActorId: [22:7486789694954916312:3517], ActorState: ExecuteState, TraceId: 01jqe18a360cx3v1f1p5d5817x, EndCleanup, isFinal: 1 2025-03-28T09:27:44.339070Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTU3OTU1YjAtMTE5NTVjNmUtYjUyYTY1ZDAtYjQ2MzhhZTQ=, ActorId: [22:7486789694954916312:3517], ActorState: ExecuteState, TraceId: 01jqe18a360cx3v1f1p5d5817x, Sent query response back to proxy, proxyRequestId: 5, proxyId: [22:7486789639120337066:2101] 2025-03-28T09:27:44.339119Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTU3OTU1YjAtMTE5NTVjNmUtYjUyYTY1ZDAtYjQ2MzhhZTQ=, ActorId: [22:7486789694954916312:3517], ActorState: unknown state, TraceId: 01jqe18a360cx3v1f1p5d5817x, Cleanup temp tables: 0 2025-03-28T09:27:44.342669Z node 22 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154064000, txId: 18446744073709551615] shutting down 2025-03-28T09:27:44.343027Z node 22 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=22&id=ZTU3OTU1YjAtMTE5NTVjNmUtYjUyYTY1ZDAtYjQ2MzhhZTQ=, ActorId: [22:7486789694954916312:3517], ActorState: unknown state, TraceId: 01jqe18a360cx3v1f1p5d5817x, Session actor destroyed RESULT: [] --------------------- STATS: total CPU: 2554 duration: 1181 usec cpu: 1181 usec duration: 224415 usec cpu: 245663 usec { name: "/Root/OlapStore/log1" } 2025-03-28T09:27:44.384170Z node 22 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037892 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-28T09:27:44.483320Z node 22 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:27:44.483569Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046644480:5 data size 0 row count 0 2025-03-28T09:27:44.483682Z node 22 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037892 maps to shardIdx: 72057594046644480:5 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], pathId map=oltp_log1, is column=0, is olap=0 2025-03-28T09:27:44.483731Z node 22 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037892 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-28T09:27:44.484271Z node 22 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T09:27:46.913927Z node 25 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[25:7486789709486956680:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:46.914104Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00140d/r3tmp/tmpkIXiD3/pdisk_1.dat 2025-03-28T09:27:47.261874Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:47.329979Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:47.330175Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:47.335326Z node 25 :HIVE WARN: HIVE#72057594037968897 Node(25, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30186, node 25 2025-03-28T09:27:47.479371Z node 25 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:47.479409Z node 25 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:47.479432Z node 25 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:47.479653Z node 25 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13150 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:48.151510Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:51.912717Z node 25 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[25:7486789709486956680:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:51.912917Z node 25 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPersQueueTest::DirectReadRestartTablet [GOOD] >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit900 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> KqpErrors::ResolveTableError [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChange [GOOD] >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TPersQueueTest::ReadRuleServiceTypeMigration [GOOD] >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: 2025-03-28T09:27:57.661440Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:27:57.662847Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f4f/r3tmp/tmpomXjFX/pdisk_1.dat 2025-03-28T09:27:58.317902Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:58.574440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:27:58.668853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:58.669732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:58.676025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:58.676121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:58.693521Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:27:58.694262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:58.694667Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:59.050439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:28:00.977185Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-03-28T09:28:00.978128Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-03-28T09:28:00.978988Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-28T09:28:00.979064Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-03-28T09:28:00.980192Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-03-28T09:28:01.010376Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-03-28T09:28:01.065929Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 300.000000s, cancelAfter: (empty maybe) 2025-03-28T09:28:01.066028Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-03-28T09:28:01.066097Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-28T09:28:01.066156Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-03-28T09:28:01.066222Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-03-28T09:28:01.066942Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-03-28T09:28:01.070848Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Bootstrap done, become ReadyState 2025-03-28T09:28:01.072238Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Executing physical tx, type: 2, stages: 1 2025-03-28T09:28:01.073511Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got request, become WaitResolveState 2025-03-28T09:28:01.073787Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-28T09:28:01.075891Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key sets: 1 2025-03-28T09:28:01.076128Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:28:01.076303Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2025-03-28T09:28:01.082150Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] will be executed on 1 shards. 2025-03-28T09:28:01.083364Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2025-03-28T09:28:01.086657Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Database not set, use /Root 2025-03-28T09:28:01.086764Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 1, snapshot: {0, 0} 2025-03-28T09:28:01.099863Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. datashard task: 1, proto: Id: 1 Executer { ActorId { RawX1: 1558 RawX2: 4294970243 } } Program { RuntimeVersion: 100000 Raw: "\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/" Settings { LevelDataPrediction: 1 InputDataPrediction: 1 OutputDataPrediction: 1 NodesCount: 52 } } Parameters { key: "%kqp%tx_result_binding_0_0" value { TransportVersion: 20000 Raw: "\010\000\000\000\000\006\002\002\004\004\006\006" Chunks: 3 } } Outputs { Effects { } } Meta { [type.googleapis.com/NKikimrTxDataShard.TKqpTransaction.TDataTaskMeta] { Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\001\000\000\000" KeyPoints: "\001\000\004\000\000\000\002\000\000\000" KeyPoints: "\001\000\004\000\000\000\003\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } } } UseLlvm: false RequestContext { key: "CurrentExecutionId" value: "" } RequestContext { key: "CustomerSuppliedId" value: "" } RequestContext { key: "Database" value: "" } RequestContext { key: "DatabaseId" value: "/Root" } RequestContext { key: "PoolId" value: "" } RequestContext { key: "SessionId" value: "ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=" } RequestContext { key: "TraceId" value: "01jqe18tb1a5rbhp5r67v5cfmc" } EnableSpilling: false DisableMetering: true 2025-03-28T09:28:01.102883Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. State: WaitResolveState, Executing KQP transaction on shard: 72075186224037888, tasks: [1], lockTxId: (empty maybe), locks: , immediate: 1 2025-03-28T09:28:01.103061Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ExecuteDatashardTransaction traceId.verbosity: 0 2025-03-28T09:28:01.103235Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Total tasks: 1, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:28:01.103314Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Updating channels after the creation of compute actors 2025-03-28T09:28:01.103392Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, datashard 72075186224037888 not finished yet: Executing 2025-03-28T09:28:01.103459Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), 2025-03-28T09:28:01.103524Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-28T09:28:01.174706Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Got propose result, shard: 72075186224037888, status: COMPLETE, error: 2025-03-28T09:28:01.174986Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. terminate execution. 2025-03-28T09:28:01.175061Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Terminate, become ZombieState 2025-03-28T09:28:01.175116Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1558:2947] TxId: 281474976715658. Ctx: { TraceId: 01jqe18tb1a5rbhp5r67v5cfmc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDgxN2NhOTEtMzM1MmEzYmUtYzUyNzM3MWEtOTk4ZDFlZTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:28:01.273336Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:1573:2966], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2025-03-28T09:28:01.275098Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmYxN2I3YS1kN2NlMmIzZi05ZDQ3MjQ0My03ZWU3NzI4Mw==, ActorId: [1:1571:2964], ActorState: ExecuteState, TraceId: 01jqe18vcyfj10eq9afpy7vfwe, ReplyQueryCompileError, status UNAVAILABLE remove tx with tx_id: >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite >> DataShardReadIteratorLatency::ReadSplitLatency [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> KqpErrors::ProposeResultLost_RwTx+UseSink [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink >> TRtmrTest::CreateWithoutTimeCastBuckets >> DataShardReadIterator::ShouldReadRangePrefix1 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:28:06.334041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:28:06.334230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:28:06.334276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:28:06.334307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:28:06.335710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:28:06.335786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:28:06.335863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:28:06.335980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:28:06.338479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:28:06.429160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:28:06.429220Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:06.447939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:28:06.448211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:28:06.448385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:28:06.457547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:28:06.457780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:28:06.462445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:06.463945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:28:06.470321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:06.479392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:06.479569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:06.480466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:28:06.480553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:06.480654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:28:06.480761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.493838Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:28:06.630943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:28:06.632825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.634641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:28:06.636073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:28:06.636179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.639288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:06.639437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:28:06.639647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.639793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:28:06.639830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:28:06.639857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:28:06.641983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.642040Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:28:06.642075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:28:06.643949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.644005Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.644049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:06.644111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:28:06.648603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:28:06.650863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:28:06.651908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:28:06.653090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:06.653270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:28:06.653328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:06.655007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:28:06.655080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:06.655267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:28:06.655368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:28:06.657764Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:06.657816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:06.658024Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:06.658092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:28:06.659692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.659746Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:28:06.659844Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:28:06.659876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:06.659955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:28:06.659999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:06.660042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:28:06.660086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:06.660119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:28:06.660148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:28:06.660222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:28:06.660258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:28:06.660308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:28:06.662209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:28:06.662323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:28:06.662366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... NFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:28:06.707545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 1 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:28:06.707586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:28:06.707612Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 1 2025-03-28T09:28:06.707636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:28:06.707697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 0/1, is published: true 2025-03-28T09:28:06.711146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.711204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateRTMR TConfigureParts ProgressState operationId# 100:0 at tablet72057594046678944 2025-03-28T09:28:06.711253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 3 -> 128 2025-03-28T09:28:06.712519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:28:06.713053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:28:06.714520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.714564Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:28:06.714651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 100 ready parts: 1/1 2025-03-28T09:28:06.714802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:28:06.716648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2025-03-28T09:28:06.716784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000002 2025-03-28T09:28:06.717148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:06.717301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:28:06.717363Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2025-03-28T09:28:06.717452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 100:0 128 -> 240 2025-03-28T09:28:06.717628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:28:06.717715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 100 2025-03-28T09:28:06.719730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:06.719770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:06.719929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:28:06.720029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:06.720073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 1 2025-03-28T09:28:06.720105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 100, path id: 2 2025-03-28T09:28:06.720160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2025-03-28T09:28:06.720206Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 100:0 ProgressState 2025-03-28T09:28:06.720306Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:28:06.720392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:28:06.720430Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#100:0 progress is 1/1 2025-03-28T09:28:06.720459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:28:06.720504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2025-03-28T09:28:06.720541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2025-03-28T09:28:06.720572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 100:0 2025-03-28T09:28:06.720622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 100:0 2025-03-28T09:28:06.720691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:28:06.720726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2025-03-28T09:28:06.720758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:28:06.720784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:28:06.721996Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:28:06.722078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:28:06.722134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:28:06.722193Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:28:06.722237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:28:06.723506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:28:06.723601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2025-03-28T09:28:06.723641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2025-03-28T09:28:06.723686Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:28:06.723717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:28:06.723782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2025-03-28T09:28:06.726298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2025-03-28T09:28:06.727330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2025-03-28T09:28:06.727601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2025-03-28T09:28:06.727645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2025-03-28T09:28:06.729119Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:28:06.729229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:28:06.729267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:309:2300] TestWaitNotification: OK eventTxId 100 2025-03-28T09:28:06.729760Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:28:06.729986Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 224us result status StatusSuccess 2025-03-28T09:28:06.730407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } RtmrVolumeDescription { Name: "rtmr1" PathId: 2 PartitionsCount: 0 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpErrors::ProposeError [GOOD] >> KqpErrors::ProposeErrorEvWrite >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1000 [GOOD] >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:116:2058] recipient: [1:110:2142] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:108:2140] Leader for TabletID 72057594046447617 is [1:131:2155] sender: [1:132:2058] recipient: [1:109:2141] Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:134:2058] recipient: [1:110:2142] 2025-03-28T09:24:11.432487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:24:11.432629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:24:11.432700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:24:11.432747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:24:11.436289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:24:11.436360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:24:11.436546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:24:11.436673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:24:11.439054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:24:11.570905Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:24:11.570985Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:131:2155] sender: [1:168:2058] recipient: [1:15:2062] 2025-03-28T09:24:11.587178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:24:11.587459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:24:11.587622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:24:11.643097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:24:11.643445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:24:11.647992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:24:11.649383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:24:11.660732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:24:11.671491Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:24:11.671586Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:24:11.671964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:24:11.672046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:24:11.672148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:24:11.676227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:216:2058] recipient: [1:214:2215] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:216:2058] recipient: [1:214:2215] Leader for TabletID 72057594037968897 is [1:220:2219] sender: [1:221:2058] recipient: [1:214:2215] 2025-03-28T09:24:11.682709Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:241:2058] recipient: [1:15:2062] 2025-03-28T09:24:11.829055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:24:11.830752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:24:11.832524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:24:11.833864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:24:11.834006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:24:11.839970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:24:11.840123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:24:11.840337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:24:11.840388Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:24:11.840432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:24:11.840468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:24:11.842774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:24:11.842846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:24:11.842893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:24:11.845255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:24:11.845302Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:24:11.845356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:24:11.845413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:24:11.850112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:24:11.853165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:24:11.854232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:133:2156] sender: [1:256:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:24:11.855539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:24:11.855695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:24:11.855748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:24:11.857133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:24:11.857210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:24:11.858453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:24:11.858594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:24:11.862234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:24:11.862301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:24:11.862510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:24:11.862557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:24:11.862988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:24:11.863122Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:24:11.863229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:24:11.863267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:24:11.863309Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:24:11.863340Z no ... uccess 2025-03-28T09:28:06.498802Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:28:06.505664Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:28:06.506545Z node 103 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 907us result status StatusSuccess 2025-03-28T09:28:06.507526Z node 103 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbOlapStore::LogExistingRequest [GOOD] >> YdbOlapStore::LogCountByResource |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] >> TBSV::ShouldLimitBlockStoreVolumeDropRate >> DataShardReadIterator::ShouldFailReadNextAfterSchemeChangeExhausted [GOOD] >> DataShardReadIterator::NoErrorOnFinalACK |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2025-03-28T09:27:40.102813Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789681994312096:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:40.102939Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014dc/r3tmp/tmprwn4mB/pdisk_1.dat 2025-03-28T09:27:40.543718Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:40.575028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:40.575131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:40.585615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61832, node 1 2025-03-28T09:27:40.814553Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:40.814578Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:40.814588Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:40.814729Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:41.136588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:41.244891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:64155 2025-03-28T09:27:41.427261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:41.663698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:27:41.686584Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-28T09:27:41.686605Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-28T09:27:41.686619Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:27:41.688171Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:27:41.693455Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-28T09:27:41.693528Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-03-28T09:27:41.693552Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-03-28T09:27:41.693585Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-03-28T09:27:45.000417Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486789698682052801:2245];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014dc/r3tmp/tmpeawRGY/pdisk_1.dat 2025-03-28T09:27:45.106349Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:27:45.223094Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:45.263267Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:45.263380Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:45.269003Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20787, node 4 2025-03-28T09:27:45.355901Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:45.355945Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:45.355960Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:45.356136Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6599 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:45.625542Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:45.728808Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:6599 2025-03-28T09:27:45.909802Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:46.144858Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:27:46.220653Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:27:46.254185Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-03-28T09:27:46.254217Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-03-28T09:27:46.254244Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-03-28T09:27:46.254264Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2025-03-28T09:27:50.210028Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486789722604384042:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:50.210191Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014dc/r3tmp/tmpSp5hW0/pdisk_1.dat 2025-03-28T09:27:50.617235Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:50.658980Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:50.659106Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:50.664811Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16965, node 7 2025-03-28T09:27:50.875576Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:50.875604Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:50.875618Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:50.875773Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14935 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:51.201973Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:51.284203Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:14935 2025-03-28T09:27:51.508003Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:51.748359Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:27:51.831256Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:27:51.905804Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:27:51.927156Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-03-28T09:27:51.927998Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2025-03-28T09:27:51.928037Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2025-03-28T09:27:51.928051Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-03-28T09:27:51.932193Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-28T09:27:51.932252Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-03-28T09:27:51.932296Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-03-28T09:27:51.932357Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-03-28T09:27:55.785949Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486789744068518004:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:55.790867Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014dc/r3tmp/tmpgUXnYF/pdisk_1.dat 2025-03-28T09:27:56.046049Z node 10 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18950, node 10 2025-03-28T09:27:56.148875Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:56.148985Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:56.164977Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:56.174478Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:56.174511Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:56.174522Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:56.174701Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17061 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:56.466896Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:56.529285Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17061 2025-03-28T09:27:56.744453Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:56.762913Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:28:00.789081Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486789744068518004:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:00.789214Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:28:09.470896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:28:09.471051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:28:09.471097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:28:09.471136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:28:09.471196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:28:09.471226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:28:09.471289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:28:09.471364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:28:09.471731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:28:09.567381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:28:09.567445Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:09.584888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:28:09.585218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:28:09.585415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:28:09.599885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:28:09.600162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:28:09.600947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:09.601163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:28:09.604409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:09.616808Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:09.616931Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:09.617560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:28:09.617651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:09.617724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:28:09.617830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.625555Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:28:09.768635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:28:09.769917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.770855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:28:09.774186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:28:09.774330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.780580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:09.780778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:28:09.780993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.781129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:28:09.781174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:28:09.781208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:28:09.787730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.787816Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:28:09.787869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:28:09.790134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.790189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.790244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:09.790310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:28:09.794198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:28:09.799666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:28:09.800596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:28:09.801911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:09.802067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:28:09.802123Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:09.803404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:28:09.803477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:09.803746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:28:09.803833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:28:09.808498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:09.808558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:09.808748Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:09.808805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:28:09.809209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.809268Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:28:09.809375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:28:09.809413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:09.809447Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:28:09.809473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:09.809506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:28:09.809548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:09.809584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:28:09.809611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:28:09.809680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:28:09.809719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:28:09.809777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:28:09.811816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:28:09.811956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:28:09.811988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... -28T09:28:10.063712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.063768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.063817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.076985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:28:10.082540Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:10.083055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:28:10.083116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:10.083287Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.083713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:28:10.083762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:28:10.083840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:28:10.089008Z node 1 :FLAT_TX_SCHEMESHARD WARN: TTxCleanBlockStoreVolumes Complete, done PersistRemoveBlockStoreVolume for 1 volumes, left 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.089091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:10.089132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:28:10.091793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:401:2377] sender: [1:465:2058] recipient: [1:15:2062] 2025-03-28T09:28:10.144828Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:28:10.145039Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 227us result status StatusPathDoesNotExist 2025-03-28T09:28:10.145215Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:28:10.146184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:401:2377] sender: [1:466:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:401:2377] sender: [1:469:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:401:2377] sender: [1:470:2058] recipient: [1:468:2428] Leader for TabletID 72057594046678944 is [1:471:2429] sender: [1:472:2058] recipient: [1:468:2428] 2025-03-28T09:28:10.179183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:28:10.179253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:28:10.179279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:28:10.179303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:28:10.179338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:28:10.179364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:28:10.179423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:28:10.179486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:28:10.179823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:28:10.202536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:28:10.204179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:28:10.204398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:28:10.204592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:28:10.204662Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:10.204817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:28:10.205614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:10.205758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.205858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.206320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.206423Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:28:10.206761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.206934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.207085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.207211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.207368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.207641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.207983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.208135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.208618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.208735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.208984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.209139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.209242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.209484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.209588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.209755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.210070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.210273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.210340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.210400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:28:10.222460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:10.222557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:10.223041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:28:10.223095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:10.223138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:28:10.223280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:471:2429] sender: [1:530:2058] recipient: [1:15:2062] 2025-03-28T09:28:10.255568Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:28:10.255793Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 205us result status StatusPathDoesNotExist 2025-03-28T09:28:10.255947Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TPQCompatTest::BadTopics [GOOD] >> TPQCompatTest::CommitOffsets >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] >> DataShardReadIteratorPageFaults::CancelPageFaultedReadThenDropTable [GOOD] >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeInvisibleRowSkips2-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite >> TBSV::ShardsNotLeftInShardsToDelete >> TPersQueueTest::CheckDecompressionTasksWithoutSession [GOOD] >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:28:09.467973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:28:09.468101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:28:09.468143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:28:09.468179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:28:09.469225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:28:09.469271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:28:09.469351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:28:09.469451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:28:09.471037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:28:09.566250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:28:09.566303Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:09.585803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:28:09.586047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:28:09.586191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:28:09.594362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:28:09.594708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:28:09.598563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:09.598968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:28:09.604357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:09.616689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:09.616781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:09.617565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:28:09.617645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:09.617754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:28:09.617860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.625401Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:28:09.808382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:28:09.808643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.808880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:28:09.809125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:28:09.809187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.812527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:09.812670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:28:09.812854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.812913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:28:09.812951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:28:09.812990Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:28:09.815521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.815592Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:28:09.815644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:28:09.817964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.818036Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.818086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:09.818165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:28:09.821991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:28:09.824983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:28:09.825292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:28:09.826522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:09.826718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:28:09.826789Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:09.827115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:28:09.827184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:09.827549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:28:09.827695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:28:09.830280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:09.830331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:09.830518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:09.830591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:28:09.831092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:09.831144Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:28:09.831250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:28:09.831294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:09.831351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:28:09.831384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:09.831420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:28:09.831465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:09.831500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:28:09.831535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:28:09.831633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:28:09.831684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:28:09.831734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:28:09.833771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:28:09.833894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:28:09.833933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-03-28T09:28:11.027579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:28:11.028335Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 TabletID: 72075186233409569 2025-03-28T09:28:11.029003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-03-28T09:28:11.029232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 4 Forgetting tablet 72075186233409569 2025-03-28T09:28:11.029844Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 TabletID: 72075186233409568 2025-03-28T09:28:11.030088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-03-28T09:28:11.030255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 3 Forgetting tablet 72075186233409568 2025-03-28T09:28:11.031548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-28T09:28:11.032184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-28T09:28:11.032690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-03-28T09:28:11.032837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000028 2025-03-28T09:28:11.033209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000028, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:11.033333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000028 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:28:11.033389Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000028, at schemeshard: 72057594046678944 2025-03-28T09:28:11.033517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-03-28T09:28:11.033645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-03-28T09:28:11.033680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-28T09:28:11.033740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-03-28T09:28:11.033796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-28T09:28:11.033868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:28:11.033939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-03-28T09:28:11.033976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-03-28T09:28:11.034029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-28T09:28:11.034090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-03-28T09:28:11.034126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-03-28T09:28:11.034271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 2 2025-03-28T09:28:11.034317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-03-28T09:28:11.034360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 54 2025-03-28T09:28:11.034406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 13], 18446744073709551615 2025-03-28T09:28:11.038512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-03-28T09:28:11.038573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2025-03-28T09:28:11.038824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 2025-03-28T09:28:11.038875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2025-03-28T09:28:11.039304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:28:11.039359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:28:11.039541Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:11.039573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:11.039742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 13] 2025-03-28T09:28:11.039876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:11.039924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-03-28T09:28:11.039968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 129, path id: 13 FAKE_COORDINATOR: Erasing txId 129 2025-03-28T09:28:11.040461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:28:11.040555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 13 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:28:11.040589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-03-28T09:28:11.040627Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 13], version: 18446744073709551615 2025-03-28T09:28:11.040669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 1 2025-03-28T09:28:11.041131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:28:11.041175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 13], at schemeshard: 72057594046678944 2025-03-28T09:28:11.041235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:28:11.041593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:28:11.041656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 54 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:28:11.041683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-03-28T09:28:11.041711Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 54 2025-03-28T09:28:11.041736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:28:11.041798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-03-28T09:28:11.042014Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 24 2025-03-28T09:28:11.042298Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 23 2025-03-28T09:28:11.042562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 24 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2025-03-28T09:28:11.043459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 23 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2025-03-28T09:28:11.044839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-28T09:28:11.047325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:28:11.047437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-28T09:28:11.048065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:24 2025-03-28T09:28:11.049048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2025-03-28T09:28:11.049744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2025-03-28T09:28:11.049791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2025-03-28T09:28:11.050512Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2025-03-28T09:28:11.050624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2025-03-28T09:28:11.050662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:1685:3555] TestWaitNotification: OK eventTxId 129 >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyPrefixRightBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite >> TPersQueueTest::BadSids [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheBeginning [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::CreateTopicYqlBackCompatibility [GOOD] Test command err: 2025-03-28T09:25:18.655573Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789073719997130:2218];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.656046Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.690218Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789071037596529:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.690338Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001216/r3tmp/tmpup9gwO/pdisk_1.dat 2025-03-28T09:25:18.885596Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:18.893205Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:19.077608Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:19.103920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.103999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.107169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.107233Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.110320Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:19.110471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:19.111828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15037, node 1 2025-03-28T09:25:19.187386Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001216/r3tmp/yandexAGsbyG.tmp 2025-03-28T09:25:19.187414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001216/r3tmp/yandexAGsbyG.tmp 2025-03-28T09:25:19.187578Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001216/r3tmp/yandexAGsbyG.tmp 2025-03-28T09:25:19.187717Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:19.313969Z INFO: TTestServer started on Port 62405 GrpcPort 15037 TClient is connected to server localhost:62405 PQClient connected to localhost:15037 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:19.638078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:19.736485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:20.022968Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:22.071160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789090899867325:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.071309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.071613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789090899867337:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.075968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:25:22.105623Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-03-28T09:25:22.105859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789090899867339:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:25:22.304602Z node 1 :TX_PROXY ERROR: Actor# [1:7486789090899867430:2805] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:22.336482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.424294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.442709Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789090899867440:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.444852Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTllMmFmOGItZmRmZWE1NmYtNDFjOGZjYTktZTc4ZjYzNzc=, ActorId: [1:7486789090899867322:2341], ActorState: ExecuteState, TraceId: 01jqe1400adr3gqftxvcmd0dhk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.447278Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.510632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:25:22.781223Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe140h13eekz7fzzbqew3nx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2JmZTk3Y2EtM2YwOTdlZjgtOTcwMjdlODgtZDI4MmMyNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486789090899867803:3089] 2025-03-28T09:25:23.644248Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789073719997130:2218];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:23.644319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:23.688864Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486789071037596529:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:23.688941Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:25:29.127598Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486789073719997247:2136], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:25:29.127852Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486789073719997247:2136], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-03-28T09:25:29.127947Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486789073719997247:2136], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486789078014965014:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743153919724 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:25:29.128038Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486789073719997247:2136], cacheItem# { Subscriber: { Subscriber: [1:7486789078014965014:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743153919724 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId ... s { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } AllPartitions { PartitionId: 0 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7486789751688091209 RawX2: 107374184615 } Partitions { Partition { PartitionId: 0 } } 2025-03-28T09:28:08.155003Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:28:08.162982Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:28:08.163036Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state EXECUTED 2025-03-28T09:28:08.163062Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676, State EXECUTED 2025-03-28T09:28:08.163084Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676 State EXECUTED FrontTxId 281474976710676 2025-03-28T09:28:08.163108Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:28:08.163130Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676, NewState WAIT_RS_ACKS 2025-03-28T09:28:08.163149Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:28:08.163180Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-03-28T09:28:08.163191Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:28:08.163210Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-03-28T09:28:08.163228Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] add an TxId 281474976710676 to the list for deletion 2025-03-28T09:28:08.163257Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676, NewState DELETING 2025-03-28T09:28:08.163291Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete key for TxId 281474976710676 2025-03-28T09:28:08.163366Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:28:08.163188Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:28:08.163218Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-03-28T09:28:08.175335Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:28:08.163233Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State EXECUTED 2025-03-28T09:28:08.175367Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state DELETING 2025-03-28T09:28:08.175384Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976710676, State DELETING 2025-03-28T09:28:08.175409Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976710676 2025-03-28T09:28:08.163250Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State EXECUTED FrontTxId 281474976710676 2025-03-28T09:28:08.163267Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:28:08.163283Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState WAIT_RS_ACKS 2025-03-28T09:28:08.163299Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:28:08.163322Z node 25 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-03-28T09:28:08.163330Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:28:08.163342Z node 25 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-03-28T09:28:08.163355Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710676 to the list for deletion 2025-03-28T09:28:08.163373Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState DELETING 2025-03-28T09:28:08.163395Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710676 2025-03-28T09:28:08.163442Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:28:08.174629Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:28:08.174658Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-03-28T09:28:08.174679Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State DELETING 2025-03-28T09:28:08.175513Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710676 TClient::Ls request: /Root/PQ/rt3.dc1--legacy--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--legacy--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710676 CreateStep: 1743154088172 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037894 } PersQueueGroup { Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 10... (TRUNCATED) === PATH DESCRIPTION: Name: "rt3.dc1--legacy--topic1" PathId: 13 TotalGroupCount: 2 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } YdbDatabasePath: "/Root" PartitionStrategy { MinPartitionCount: 2 MaxPartitionCount: 5 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } Partitions { PartitionId: 0 TabletId: 72075186224037893 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 2 2025-03-28T09:28:08.862820Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710677, task: 1, CA Id [25:7486789803227700685:2478]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-03-28T09:28:08.896087Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710677, task: 1, CA Id [25:7486789803227700685:2478]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-28T09:28:08.944813Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710677, task: 1, CA Id [25:7486789803227700685:2478]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-28T09:28:09.006785Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710677, task: 1, CA Id [25:7486789803227700685:2478]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-28T09:28:09.089518Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710677, task: 1, CA Id [25:7486789803227700685:2478]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-28T09:28:09.226034Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710677, task: 1, CA Id [25:7486789803227700685:2478]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-28T09:28:09.549818Z node 25 :KQP_COMPUTE WARN: TxId: 281474976710677, task: 1, CA Id [25:7486789803227700685:2478]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-28T09:28:09.651617Z node 25 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710678. Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-28T09:28:09.651828Z node 25 :KQP_EXECUTER WARN: ActorId: [25:7486789807522668038:2480] TxId: 281474976710678. Ctx: { TraceId: 01jqe192ytdtg6sdj9812bsym5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=25&id=YTk3OTQ1YjQtZDgzOGY1Y2ItN2VmNWUzZmMtZThjMDdlNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-28T09:28:09.652138Z node 25 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=25&id=YTk3OTQ1YjQtZDgzOGY1Y2ItN2VmNWUzZmMtZThjMDdlNTg=, ActorId: [25:7486789803227700699:2480], ActorState: ExecuteState, TraceId: 01jqe192ytdtg6sdj9812bsym5, Create QueryResponse for error on request, msg: 2025-03-28T09:28:09.653686Z node 25 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqe193g6aejpxp4q0j60qkgf" } } YdbStatus: UNAVAILABLE ConsumedRu: 364 } >> TPersQueueTest::PartitionsMapping [GOOD] >> TPersQueueTest::MessageMetadata >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix2 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:28:12.512026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:28:12.512142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:28:12.512177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:28:12.512323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:28:12.512374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:28:12.512397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:28:12.512447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:28:12.512505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:28:12.512777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:28:12.584898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:28:12.584946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:12.598178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:28:12.598549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:28:12.598755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:28:12.605354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:28:12.605579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:28:12.606302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:12.606565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:28:12.608772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:12.609994Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:12.610072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:12.610250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:28:12.610322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:12.610376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:28:12.610458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:28:12.617634Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:28:12.750951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:28:12.751178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:12.751386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:28:12.751646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:28:12.751701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:12.754418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:12.754628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:28:12.754846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:12.754907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:28:12.754949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:28:12.754979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:28:12.757162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:12.757220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:28:12.757255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:28:12.758884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:12.758924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:12.758958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:12.759015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:28:12.762147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:28:12.764071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:28:12.764246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:28:12.765212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:12.765348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:28:12.765405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:12.765653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:28:12.765701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:28:12.765861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:28:12.765940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:28:12.768385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:12.768435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:12.768610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:12.768687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:28:12.769097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:28:12.769154Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:28:12.769261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:28:12.769297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:12.769336Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:28:12.769366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:12.769400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:28:12.769437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:28:12.769470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:28:12.769497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:28:12.769572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:28:12.769625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:28:12.769674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:28:12.771597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:28:12.771707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:28:12.771741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... D DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2025-03-28T09:28:12.882897Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:28:12.883486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:28:12.883690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2025-03-28T09:28:12.884515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:28:12.885087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:28:12.885597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:28:12.885707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-28T09:28:12.886016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:12.886105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:28:12.886190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropBlockStoreVolume TPropose, operationId: 102:0 HandleReply TEvOperationPlan, step: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:28:12.886289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:28:12.886446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:28:12.886498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:28:12.886537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:28:12.886566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:28:12.886623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:28:12.886673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:28:12.886733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:28:12.886776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:28:12.886805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:28:12.886834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:28:12.886933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:28:12.886985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:28:12.887025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:28:12.887078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2025-03-28T09:28:12.890047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:28:12.890095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:28:12.890207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:28:12.890264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:28:12.890488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:28:12.890518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:28:12.890752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:28:12.890782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:28:12.890904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:28:12.891020Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:28:12.891047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:28:12.891138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:28:12.891638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:28:12.891706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:28:12.891735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:28:12.891771Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:28:12.891825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:28:12.892245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:28:12.892284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:28:12.892341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:28:12.892549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:28:12.892622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:28:12.892649Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:28:12.892672Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:28:12.892695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:28:12.892764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:28:12.893099Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 2025-03-28T09:28:12.893277Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 2025-03-28T09:28:12.893470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:28:12.893882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:28:12.895628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:28:12.896670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:28:12.896783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:28:12.898258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:28:12.898335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:28:12.898616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:28:12.898655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:28:12.899062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:28:12.899147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:28:12.899181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:392:2372] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-03-28T09:28:12.899520Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-28T09:28:12.899608Z node 1 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::BadSids [GOOD] Test command err: 2025-03-28T09:25:18.755619Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:25:18.755707Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:25:19.299704Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:25:19.299785Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info === Server->StartServer(false); 2025-03-28T09:25:19.807807Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486789074079792462:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:19.808065Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:19.837525Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486789077439489782:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:19.837589Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001227/r3tmp/tmpRm98dE/pdisk_1.dat 2025-03-28T09:25:20.052467Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:20.067797Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:20.361907Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:20.362036Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:20.394007Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:20.394140Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:20.401098Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-28T09:25:20.401327Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:20.406655Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:20.414146Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11099, node 3 2025-03-28T09:25:20.490560Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:25:20.490638Z node 3 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:25:20.551393Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001227/r3tmp/yandexl9puru.tmp 2025-03-28T09:25:20.551423Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001227/r3tmp/yandexl9puru.tmp 2025-03-28T09:25:20.551656Z node 3 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001227/r3tmp/yandexl9puru.tmp 2025-03-28T09:25:20.551808Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:20.619800Z INFO: TTestServer started on Port 64306 GrpcPort 11099 TClient is connected to server localhost:64306 PQClient connected to localhost:11099 === TenantModeEnabled() = 0 === Init PQ - start server on port 11099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:21.050903Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:25:21.051179Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:21.051421Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:25:21.051672Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:25:21.051706Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:21.054101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:21.054232Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:25:21.054395Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:21.054436Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:25:21.054481Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:25:21.054524Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-28T09:25:21.056846Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:21.056872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:25:21.056888Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:21.058367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:21.058416Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:25:21.058435Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:25:21.060358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:21.060414Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:21.060442Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:21.060482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:21.065506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:21.067592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:25:21.067709Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:25:21.070842Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153921117, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:21.071039Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743153921117 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:25:21.071074Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:21.071424Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:25:21.071461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:21.071645Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:25:21.071719Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:25:21.073917Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:25:21.073959Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:25:21.074152Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:25:21.074196Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7486789078374760444:2419], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:25:21.074243Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:21.074268Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:25:21.074371Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:25:21.074383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:21.074408Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:25:21.074433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 2814749767 ... ionChooser [21:7486789812085332836:2544] (SourceId=base64:aa, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-28T09:28:10.345268Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7486789812085332836:2544] (SourceId=base64:aa, PreferedPartition=(NULL)) ReplyResult: Partition=6, SeqNo=(NULL) 2025-03-28T09:28:10.345289Z node 21 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [21:7486789812085332836:2544] (SourceId=base64:aa, PreferedPartition=(NULL)) Start idle 2025-03-28T09:28:10.345333Z node 21 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 5 sessionId: partition: 6 expectedGeneration: (NULL) 2025-03-28T09:28:10.347074Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=6) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 22, Generation: 1 2025-03-28T09:28:10.349358Z node 21 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 6 MaxSeqNo: 0 sessionId: base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0 2025-03-28T09:28:10.346948Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [21:7486789812085332862:2544], now have 1 active actors on pipe 2025-03-28T09:28:10.350317Z :INFO: [] MessageGroupId [base64:aa] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743154090350 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:28:10.347253Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-28T09:28:10.347285Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 6 2025-03-28T09:28:10.347390Z node 22 :PERSQUEUE INFO: new Cookie base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0 generated for partition 6 topic 'rt3.dc1--topic1' owner base64:aa 2025-03-28T09:28:10.347521Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 6 2025-03-28T09:28:10.347593Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 6 messageNo: 0 requestId: cookie: 0 2025-03-28T09:28:10.348345Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-28T09:28:10.348366Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 6 2025-03-28T09:28:10.348471Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 6 messageNo: 0 requestId: cookie: 0 2025-03-28T09:28:10.350512Z :INFO: [] MessageGroupId [base64:aa] SessionId [] Write session established. Init response: session_id: "base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0" topic: "topic1" cluster: "dc1" partition_id: 6 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:28:10.353724Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0] Write 1 messages with Id from 1 to 1 2025-03-28T09:28:10.353897Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0] Write session: close. Timeout = 18446744073709551 ms 2025-03-28T09:28:10.354460Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0] Write session: try to update token 2025-03-28T09:28:10.354550Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0] Send 1 message(s) (0 left), first sequence number is 1 2025-03-28T09:28:10.355559Z node 21 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-28T09:28:10.356205Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=6) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-28T09:28:10.357549Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=6) Received event: NActors::IEventHandle 2025-03-28T09:28:10.356708Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-28T09:28:10.356755Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 6 2025-03-28T09:28:10.356865Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 6 messageNo: 0 requestId: cookie: 1 2025-03-28T09:28:10.358056Z node 22 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-28T09:28:10.358086Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 6 2025-03-28T09:28:10.358528Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message topic: rt3.dc1--topic1 partition: 6 SourceId: '\0base64:aa' SeqNo: 1 partNo : 0 messageNo: 1 size 92 offset: -1 2025-03-28T09:28:10.358848Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] Topic 'rt3.dc1--topic1' partition 6 part blob processing sourceId '\0base64:aa' seqNo 1 partNo 0 2025-03-28T09:28:10.359811Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] Topic 'rt3.dc1--topic1' partition 6 part blob complete sourceId '\0base64:aa' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 169 count 1 nextOffset 1 batches 1 2025-03-28T09:28:10.360713Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] Add new write blob: topic 'rt3.dc1--topic1' partition 6 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000006_00000000000000000000_00000_0000000001_00000| size 157 WTime 1743154090360 2025-03-28T09:28:10.360913Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:28:10.360960Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] --- delete ---------------- 2025-03-28T09:28:10.361002Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] [x0000000006, x0000000007) 2025-03-28T09:28:10.361049Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] --- write ----------------- 2025-03-28T09:28:10.361103Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] m0000000006pbase64:aa 2025-03-28T09:28:10.361119Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] d0000000006_00000000000000000000_00000_0000000001_00000| 2025-03-28T09:28:10.361134Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] i0000000006 2025-03-28T09:28:10.361174Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] --- rename ---------------- 2025-03-28T09:28:10.361220Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] =========================== 2025-03-28T09:28:10.361277Z node 22 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:28:10.361373Z node 22 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 6 offset 0 partNo 0 count 1 size 157 2025-03-28T09:28:10.371534Z node 22 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 6 offset 0 count 1 size 157 actorID [22:7486789800145598506:2403] 2025-03-28T09:28:10.371670Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:28:10.371728Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] TPartition::ReplyWrite. Partition: 6 2025-03-28T09:28:10.371796Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] Answering for message sourceid: '\0base64:aa', Topic: 'rt3.dc1--topic1', Partition: 6, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-28T09:28:10.374814Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=6) Received event: NActors::IEventHandle 2025-03-28T09:28:10.372043Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] Topic 'rt3.dc1--topic1' partition 6 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:28:10.372079Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] Topic 'rt3.dc1--topic1' partition 6 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:28:10.372175Z node 22 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 6 messageNo: 1 requestId: cookie: 1 2025-03-28T09:28:10.372322Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] read cookie 0 Topic 'rt3.dc1--topic1' partition 6 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-28T09:28:10.372356Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-28T09:28:10.372401Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-28T09:28:10.372425Z node 22 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:28:10.372505Z node 22 :PERSQUEUE DEBUG: Topic 'rt3.dc1--topic1' partition 6 user user readTimeStamp done, result 1743154090358 queuesize 0 startOffset 0 2025-03-28T09:28:10.372664Z node 22 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 6 offset 0 partno 0 count 1 parts 0 size 157 2025-03-28T09:28:10.375818Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 6 write_statistics { persist_duration_ms: 11 queued_in_partition_duration_ms: 1 } 2025-03-28T09:28:10.375918Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0] Write session: acknoledged message 1 2025-03-28T09:28:10.454083Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0] Write session will now close 2025-03-28T09:28:10.454206Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0] Write session: aborting 2025-03-28T09:28:10.454976Z :INFO: [] MessageGroupId [base64:aa] SessionId [base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:28:10.455044Z :DEBUG: [] MessageGroupId [base64:aa] SessionId [base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0] Write session: destroy 2025-03-28T09:28:10.456230Z node 21 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0 grpc read done: success: 0 data: 2025-03-28T09:28:10.456264Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0 grpc read failed 2025-03-28T09:28:10.456305Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0 grpc closed 2025-03-28T09:28:10.456331Z node 21 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: base64:aa|6d7df1de-f19b2f02-f8a770e8-384e581b_0 is DEAD 2025-03-28T09:28:10.457729Z node 21 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=6) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:28:10.458506Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [21:7486789812085332862:2544] destroyed 2025-03-28T09:28:10.458568Z node 22 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 6, State: StateIdle] TPartition::DropOwner. |89.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRanges::UpdateMulti >> KqpKv::ReadRows_SpecificKey >> KqpRanges::IsNotNullInJsonValue >> KqpNewEngine::Select1 >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns >> KqpNotNullColumns::UpsertNotNull >> DataShardReadIterator::ShouldLimitRead10RangesChunk100Limit1001 [GOOD] >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower >> TPersQueueTest::EachMessageGetsExactlyOneAcknowledgementInCorrectOrder [GOOD] >> TPersQueueTest::Delete >> KqpNewEngine::SimpleUpsertSelect >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] >> KqpNewEngine::PkSelect1 >> DataShardReadIterator::NoErrorOnFinalACK [GOOD] >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] Test command err: 2025-03-28T09:28:00.485761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:28:00.486131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:28:00.486314Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:28:00.487214Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:28:00.487595Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:28:00.487859Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ea2/r3tmp/tmp8Dt13E/pdisk_1.dat 2025-03-28T09:28:00.940735Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:01.122023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:28:01.229688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:01.229875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:01.233471Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:01.233571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:01.248815Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:28:01.249478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:01.249934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:01.543397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:28:02.537630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1601:2963], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:02.537771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1611:2968], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:02.537853Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:02.546084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:28:03.158658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1615:2971], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:28:03.365177Z node 1 :TX_PROXY ERROR: Actor# [1:1769:3058] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:03.713859Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-03-28T09:28:03.714179Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-03-28T09:28:03.714375Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-03-28T09:28:03.714607Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-28T09:28:03.714918Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-03-28T09:28:03.715118Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:28:03.715297Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (Iterator (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3))))) )))) ) 2025-03-28T09:28:03.715448Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] create compute task: 1 2025-03-28T09:28:03.715612Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:28:03.715676Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-28T09:28:03.719358Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:1798:2961] 2025-03-28T09:28:03.719451Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:1798:2961], channels: 0 2025-03-28T09:28:03.719545Z node 1 :KQP_EXECUTER INFO: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 0, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:28:03.719598Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-03-28T09:28:03.719671Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:1798:2961] 2025-03-28T09:28:03.719738Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:1798:2961], channels: 0 2025-03-28T09:28:03.719845Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:1798:2961], 2025-03-28T09:28:03.719962Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1798:2961], 2025-03-28T09:28:03.720021Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODI1YjlkMzItMjhiMjFlMDMtZWIyMTRiNTYtYzUwNjIyMjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-28T09:28:03.736259Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18wq6f9t2a9abes2fcer6, Database: , DatabaseId: /Root, SessionId: ydb:// ... meUs: 755 Tasks { TaskId: 1 CpuTimeUs: 491 ComputeCpuTimeUs: 12 BuildCpuTimeUs: 479 HostName: "ghrun-mdsjyjltvm" NodeId: 4 CreateTimeMs: 1743154094353 } MaxMemoryUsage: 1048576 } 2025-03-28T09:28:14.357209Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1852:2479], CA [3:1850:3105], 2025-03-28T09:28:14.357265Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1852:2479], CA [3:1850:3105], 2025-03-28T09:28:14.368288Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [3:1851:3105], finished: 0 2025-03-28T09:28:14.368439Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 1, to: [3:1851:3105] 2025-03-28T09:28:14.376169Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1852:2479], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { CpuTimeUs: 1714 Tasks { TaskId: 1 CpuTimeUs: 684 OutputRows: 3 OutputBytes: 12 IngressRows: 3 ComputeCpuTimeUs: 205 BuildCpuTimeUs: 479 WaitInputTimeUs: 8790 HostName: "ghrun-mdsjyjltvm" NodeId: 4 StartTimeMs: 1743154094365 CreateTimeMs: 1743154094353 } MaxMemoryUsage: 1048576 } 2025-03-28T09:28:14.376290Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1852:2479], CA [3:1850:3105], 2025-03-28T09:28:14.376353Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:1852:2479], CA [3:1850:3105], 2025-03-28T09:28:14.379122Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got result, channelId: 2, shardId: 0, inputIndex: 0, from: [3:1851:3105], finished: 1 2025-03-28T09:28:14.379214Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send ack to channelId: 2, seqNo: 2, to: [3:1851:3105] 2025-03-28T09:28:14.380471Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:1850:3105], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1754 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 974 FinishTimeMs: 1743154094380 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 264 BuildCpuTimeUs: 710 HostName: "ghrun-mdsjyjltvm" NodeId: 3 CreateTimeMs: 1743154094352 } MaxMemoryUsage: 1048576 } 2025-03-28T09:28:14.380601Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:1850:3105] 2025-03-28T09:28:14.380730Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [4:1852:2479], 2025-03-28T09:28:14.380803Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:1852:2479], 2025-03-28T09:28:14.381300Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [4:1852:2479], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2127 DurationUs: 14000 Tasks { TaskId: 1 CpuTimeUs: 699 FinishTimeMs: 1743154094379 OutputRows: 3 OutputBytes: 12 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } IngressRows: 3 ComputeCpuTimeUs: 220 BuildCpuTimeUs: 479 WaitInputTimeUs: 11842 HostName: "ghrun-mdsjyjltvm" NodeId: 4 StartTimeMs: 1743154094365 CreateTimeMs: 1743154094353 } MaxMemoryUsage: 1048576 } 2025-03-28T09:28:14.381429Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [4:1852:2479] 2025-03-28T09:28:14.383497Z node 3 :KQP_EXECUTER INFO: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 10700 DurationUs: 1743154092837899 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } ExecuterCpuTimeUs: 6819 StartTimeMs: 1543 FinishTimeMs: 1743154094381 Stages { StageId: 1 StageGuid: "a6461e52-ac09b0fd-49d57532-a91c9880" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'\"1001\")) (lambda \'($2 $3) (AsStruct \'(\'\"key\" $2) \'(\'\"value\" $3)))))))\n)\n" ComputeActors { CpuTimeUs: 1754 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 974 FinishTimeMs: 1743154094380 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 264 BuildCpuTimeUs: 710 HostName: "ghrun-mdsjyjltvm" NodeId: 3 CreateTimeMs: 1743154094352 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743154094365 } Stages { StageGuid: "167cbda3-25e06210-983ba1dc-aa0a2094" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($3) (Member $3 \'\"key\") (Member $3 \'\"value\")))\n (return (FromFlow (ExpandMap (Take (ToFlow $1) (Uint64 \'\"1001\")) $2)))\n))))\n)\n" BaseTimeMs: 1743154094365 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":6,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":5,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Limit\":\"1001\",\"Name\":\"Limit\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TableFullScan\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/table-1\",\"ReadColumns\":[\"key\",\"value\"],\"ReadLimit\":\"1001\",\"ReadRanges\":[\"key (-∞, +∞)\"],\"ReadRangesPointPrefixLen\":\"0\",\"Scan\":\"Sequential\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"table-1\"]}],\"StageGuid\":\"167cbda3-25e06210-983ba1dc-aa0a2094\",\"Stats\":{\"BaseTimeMs\":1743154094365,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"key (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"a6461e52-ac09b0fd-49d57532-a91c9880\",\"Stats\":{\"BaseTimeMs\":1743154094365,\"ComputeNodes\":[{\"CpuTimeUs\":1754,\"Tasks\":[{\"ComputeTimeUs\":264,\"FinishTimeMs\":1743154094380,\"Host\":\"ghrun-mdsjyjltvm\",\"InputBytes\":12,\"InputRows\":3,\"NodeId\":3,\"OutputBytes\":12,\"OutputRows\":3,\"ResultBytes\":12,\"ResultRows\":3,\"TaskId\":2}]}],\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1501 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\004\022\013\010\332\r\020\317\020\030\251\036 \002" } } 2025-03-28T09:28:14.383600Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:28:14.383688Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-03-28T09:28:14.383792Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:1842:3105] TxId: 281474976715663. Ctx: { TraceId: 01jqe1982x8an9t9are9xaj0x4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDdhNTgxY2EtNGNmM2JmZDgtYWM4MGQ1MS03NTQ5MjA0ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.003881s ReadRows: 3 ReadBytes: 24 ru: 3 rate limiter was not found force flag: 1 { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } } >> TPersQueueTest::SetupWriteSessionOnDisabledCluster [GOOD] >> TPersQueueTest::SetupReadSession >> TPersQueueTest::WhenTheTopicIsDeletedAfterDecompressingTheData_Uncompressed [GOOD] >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed >> KqpNewEngine::InShardsWrite >> KqpErrors::ProposeErrorEvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite >> DataShardReadIteratorPageFaults::LocksNotLostOnPageFault [GOOD] >> DataShardReadIteratorState::ShouldCalculateQuota [GOOD] >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion >> KqpNewEngine::Select1 [GOOD] >> KqpNewEngine::Replace >> KqpKv::ReadRows_SpecificKey [GOOD] >> KqpKv::ReadRows_UnknownTable >> KqpNotNullColumns::CreateTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumnPg >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite >> KqpNotNullColumns::UpsertNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] Test command err: 2025-03-28T09:28:02.244438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:28:02.244789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:28:02.244962Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:28:02.245601Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:28:02.245911Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:28:02.246149Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e91/r3tmp/tmpTNTbQF/pdisk_1.dat 2025-03-28T09:28:02.682260Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:02.872936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:28:02.978435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:02.978584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:02.981809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:02.981916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:03.002655Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:28:03.003363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:03.003933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:03.308479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:28:04.289827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1601:2963], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:04.289938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1611:2968], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:04.290023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:04.294506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:28:04.880332Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1615:2971], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:28:05.082200Z node 1 :TX_PROXY ERROR: Actor# [1:1769:3058] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:05.424798Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution. Operation timeout: 0.000000s, cancelAfter: (empty maybe) 2025-03-28T09:28:05.424875Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Begin literal execution, txs: 1 2025-03-28T09:28:05.424946Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-28T09:28:05.425078Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-03-28T09:28:05.425148Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-03-28T09:28:05.428095Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. Execution is complete, results: 1 2025-03-28T09:28:05.437192Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution. Operation timeout: 299.440159s, cancelAfter: (empty maybe) 2025-03-28T09:28:05.437290Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Begin literal execution, txs: 1 2025-03-28T09:28:05.437365Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-28T09:28:05.437415Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) 2025-03-28T09:28:05.437473Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-03-28T09:28:05.438107Z node 1 :KQP_EXECUTER DEBUG: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Execution is complete, results: 1 2025-03-28T09:28:05.438360Z node 1 :KQP_EXECUTER TRACE: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Bootstrap done, become ReadyState 2025-03-28T09:28:05.438656Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-03-28T09:28:05.438874Z node 1 :KQP_EXECUTER TRACE: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got request, become WaitResolveState 2025-03-28T09:28:05.438981Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-28T09:28:05.439226Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key sets: 1 2025-03-28T09:28:05.439443Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 2] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:28:05.439592Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (declare %kqp%tx_result_binding_0_0 (ListType (StructType '('"key" (OptionalType (DataType 'Uint32))) '('"value" (OptionalType (DataType 'Uint32)))))) (return (lambda '() (block '( (let $1 (KqpTable '"/Root/table-1" '"72057594046644480:2" '"" '1)) (let $2 (OptionalType (DataType 'Uint32))) (return (KqpEffects (KqpUpsertRows $1 (Iterator %kqp%tx_result_binding_0_0) '('"key" '"value") '('('"Mode" '"upsert"))))) )))) ) 2025-03-28T09:28:05.439922Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] will be executed on 1 shards. 2025-03-28T09:28:05.440043Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:1795:2961] TxId: 281474976715660. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, stage: [0,0] create datashard task: 1, shard: 72075186224037888, meta: TTaskMeta{ ShardId: 72075186224037888, Reads: { none }, Writes: { ranges: TShardKeyRanges{ (Uint32 : 1), (Uint32 : 2), (Uint32 : 3), } } } 2025-03-28T09:28:05.440462Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe18ye09q6rvtnpr4q3c4e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGJkY2ExMjctOTNmNDQ0MzctYzJmNTllMGUtNzdkMjQ3ZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:28:05.4 ... : ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Updating channels after the creation of compute actors 2025-03-28T09:28:17.975477Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [3:2057:3208] 2025-03-28T09:28:17.975548Z node 3 :KQP_EXECUTER DEBUG: TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [3:2057:3208], channels: 0 2025-03-28T09:28:17.975610Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:2057:3208], 2025-03-28T09:28:17.975671Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2057:3208], 2025-03-28T09:28:17.975727Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-28T09:28:17.976646Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:2057:3208], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-03-28T09:28:17.976715Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [3:2057:3208], 2025-03-28T09:28:17.976774Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2057:3208], 2025-03-28T09:28:17.977721Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [3:2057:3208], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 667 Tasks { TaskId: 1 CpuTimeUs: 109 FinishTimeMs: 1743154097977 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 26 BuildCpuTimeUs: 83 HostName: "ghrun-mdsjyjltvm" NodeId: 3 CreateTimeMs: 1743154097976 } MaxMemoryUsage: 1048576 } 2025-03-28T09:28:17.977869Z node 3 :KQP_EXECUTER INFO: TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [3:2057:3208] 2025-03-28T09:28:17.977996Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send Commit to BufferActor=[3:2053:3208] 2025-03-28T09:28:17.978078Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000667s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:28:17.996780Z node 3 :KQP_COMPUTE WARN: SelfId: [3:2060:3208], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [3:2044:3208]Got OUT_OF_SPACE for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:2060:3208]. Ignored this error. 2025-03-28T09:28:17.996933Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:2053:3208], SessionActorId: [3:2044:3208], statusCode=OVERLOADED. Issue=
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 . sessionActorId=[3:2044:3208]. isRollback=0 2025-03-28T09:28:17.997276Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, ActorId: [3:2044:3208], ActorState: ExecuteState, TraceId: 01jqe19bqw4q06m97k6kmv7wfw, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [3:2054:3208] from: [3:2053:3208] 2025-03-28T09:28:17.997430Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got EvAbortExecution, status: OVERLOADED, message: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-03-28T09:28:17.997522Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2006 } 2025-03-28T09:28:17.997616Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-03-28T09:28:17.997902Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ReplyErrorAndDie. Response: Status: OVERLOADED Issues { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2006 severity: 1 } Result { Stats { CpuTimeUs: 667 Stages { StageGuid: "17c69bda-14838d64-e353fe12-692455ca" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (Just (Uint32 \'5)))\n (return (Iterator (AsList (AsStruct \'(\'\"key\" $1) \'(\'\"value\" $1)))))\n))))\n)\n" ComputeActors { CpuTimeUs: 667 Tasks { TaskId: 1 CpuTimeUs: 109 FinishTimeMs: 1743154097977 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 26 BuildCpuTimeUs: 83 HostName: "ghrun-mdsjyjltvm" NodeId: 3 CreateTimeMs: 1743154097976 } MaxMemoryUsage: 1048576 } } } } , to ActorId: [3:2044:3208] 2025-03-28T09:28:17.997957Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2025-03-28T09:28:17.999028Z node 3 :KQP_EXECUTER INFO: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 3639 DurationUs: 1743154096431649 ExecuterCpuTimeUs: 2972 StartTimeMs: 1566 FinishTimeMs: 1743154097998 Stages { StageGuid: "17c69bda-14838d64-e353fe12-692455ca" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (Just (Uint32 \'5)))\n (return (Iterator (AsList (AsStruct \'(\'\"key\" $1) \'(\'\"value\" $1)))))\n))))\n)\n" ComputeActors { CpuTimeUs: 667 Tasks { TaskId: 1 CpuTimeUs: 109 FinishTimeMs: 1743154097977 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 26 BuildCpuTimeUs: 83 HostName: "ghrun-mdsjyjltvm" NodeId: 3 CreateTimeMs: 1743154097976 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743154097977 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Sink\",\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"ConstantExpr-Sink\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{key: 5,value: 5}]\",\"Name\":\"Iterator\"},{\"Inputs\":[],\"Name\":\"Upsert\",\"Path\":\"\\/Root\\/table-1\",\"SinkType\":\"KqpTableSink\",\"Table\":\"table-1\"}],\"PlanNodeId\":1,\"StageGuid\":\"17c69bda-14838d64-e353fe12-692455ca\",\"Stats\":{\"BaseTimeMs\":1743154097977,\"ComputeNodes\":[{\"CpuTimeUs\":667,\"Tasks\":[{\"ComputeTimeUs\":26,\"EgressBytes\":10,\"EgressRows\":1,\"FinishTimeMs\":1743154097977,\"Host\":\"ghrun-mdsjyjltvm\",\"NodeId\":3,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"table-1\"]}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 722 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\013\010\233\005\020\233\005\030\233\005 \001" } } 2025-03-28T09:28:17.999123Z node 3 :KQP_EXECUTER DEBUG: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:28:17.999180Z node 3 :KQP_EXECUTER TRACE: ActorId: [3:2054:3208] TxId: 281474976715672. Ctx: { TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Terminate, become ZombieState 2025-03-28T09:28:17.999424Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NGJiMzY3ZmEtNmFmZjE4MzgtN2E3ZTc0YzUtZjcyNzBjNjU=, ActorId: [3:2044:3208], ActorState: ExecuteState, TraceId: 01jqe19bqw4q06m97k6kmv7wfw, Create QueryResponse for error on request, msg: >> KqpNewEngine::SimpleUpsertSelect [GOOD] >> KqpNewEngine::ShuffleWrite |89.7%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards >> DataShardReadIterator::ShouldReadRangePrefix3 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix4 >> KqpRanges::UpdateMulti [GOOD] >> KqpRanges::UpdateWhereInBigLiteralList >> KqpNewEngine::PkSelect1 [GOOD] >> KqpNewEngine::PkSelect2 >> KqpSort::TopSortParameter >> KqpRanges::IsNotNullInJsonValue [GOOD] >> KqpRanges::DuplicateKeyPredicateLiteral >> TPersQueueTest::ReadRuleServiceTypeMigrationWithDisallowDefault [GOOD] >> TPersQueueTest::ReadWithoutConsumerFederation >> DataShardReadIterator::ShouldNotReadFutureMvccFromFollower [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink >> KqpKv::ReadRows_UnknownTable [GOOD] >> KqpKv::ReadRows_NonExistentKeys >> DataShardReadIterator::ShouldCancelMvccSnapshotFromFuture [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction >> KqpNewEngine::InShardsWrite [GOOD] >> KqpNewEngine::Join >> KqpNotNullColumns::UpdateTable_DontChangeNotNull [GOOD] >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex >> KqpNotNullColumns::AlterAddNotNullColumnPg [GOOD] >> KqpNotNullColumns::AlterDropNotNullColumn >> KqpNewEngine::Replace [GOOD] >> KqpNewEngine::SelfJoin >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno >> KqpNewEngine::ShuffleWrite [GOOD] >> KqpNewEngine::StaleRO >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheEnd [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeLeftBorder-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite >> KqpKv::ReadRows_NonExistentKeys [GOOD] >> KqpKv::ReadRows_NotFullPK >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinue-EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite >> KqpRanges::DuplicateKeyPredicateLiteral [GOOD] >> KqpRanges::DuplicateKeyPredicateParam >> KqpNewEngine::PkSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect2 >> TPersQueueTest::Codecs_InitWriteSession_DefaultTopicSupportedCodecsInInitResponse [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorSysTables::ShouldForbidSchemaVersion [GOOD] Test command err: 2025-03-28T09:27:00.475985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:27:00.476587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:27:00.476663Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00107e/r3tmp/tmpLxJS0J/pdisk_1.dat 2025-03-28T09:27:00.909410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:27:00.964958Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:01.012617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:01.012758Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:01.028177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:01.140152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:27:01.248174Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:27:01.249385Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:27:01.249893Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:27:01.250142Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:27:01.333709Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:27:01.334568Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:27:01.334882Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:27:01.337051Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:27:01.337154Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:27:01.337216Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:27:01.337669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:27:01.337815Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:27:01.337949Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:27:01.348820Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:27:01.403177Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:27:01.403461Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:27:01.403610Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:27:01.403654Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:27:01.403694Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:27:01.403752Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.404001Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.404071Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.404420Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:27:01.404519Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:27:01.405039Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:01.405093Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:27:01.405156Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:27:01.405211Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:27:01.405248Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:27:01.405283Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:27:01.405348Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:27:01.405486Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.405525Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.405574Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:27:01.405643Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:27:01.405681Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:27:01.405791Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:27:01.406031Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:27:01.406111Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:27:01.406233Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:27:01.406283Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:27:01.406327Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:27:01.406371Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:27:01.406408Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.407049Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:27:01.407115Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:27:01.407151Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:27:01.407205Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.407257Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:27:01.407291Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:27:01.407323Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:27:01.407436Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.407470Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:27:01.409129Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:27:01.409185Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:27:01.420132Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:27:01.420220Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.420272Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.420335Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:27:01.420435Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:27:01.581001Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.581071Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.581126Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:27:01.581611Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:27:01.581652Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:27:01.581785Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.581831Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:27:01.581897Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:27:01.581940Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:27:01.600804Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:27:01.600899Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.601772Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.601818Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.601909Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:0 ... -03-28T09:28:26.850938Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:28:26.850961Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit DropIndexNotice 2025-03-28T09:28:26.850981Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit MoveTable 2025-03-28T09:28:26.851006Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit MoveTable 2025-03-28T09:28:26.851030Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:28:26.851053Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit MoveTable 2025-03-28T09:28:26.851073Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit MoveIndex 2025-03-28T09:28:26.851096Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit MoveIndex 2025-03-28T09:28:26.851120Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:28:26.851144Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit MoveIndex 2025-03-28T09:28:26.851169Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CreateCdcStream 2025-03-28T09:28:26.851192Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CreateCdcStream 2025-03-28T09:28:26.851213Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:28:26.851238Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CreateCdcStream 2025-03-28T09:28:26.851260Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit AlterCdcStream 2025-03-28T09:28:26.851282Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit AlterCdcStream 2025-03-28T09:28:26.851314Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:28:26.851336Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit AlterCdcStream 2025-03-28T09:28:26.851358Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit DropCdcStream 2025-03-28T09:28:26.851381Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit DropCdcStream 2025-03-28T09:28:26.851403Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:28:26.851428Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit DropCdcStream 2025-03-28T09:28:26.851450Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CreateIncrementalRestoreSrc 2025-03-28T09:28:26.851478Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CreateIncrementalRestoreSrc 2025-03-28T09:28:26.851502Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:28:26.851523Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CreateIncrementalRestoreSrc 2025-03-28T09:28:26.851544Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CompleteOperation 2025-03-28T09:28:26.851566Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CompleteOperation 2025-03-28T09:28:26.851918Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is DelayComplete 2025-03-28T09:28:26.851988Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CompleteOperation 2025-03-28T09:28:26.852063Z node 13 :TX_DATASHARD TRACE: Add [2500:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2025-03-28T09:28:26.852117Z node 13 :TX_DATASHARD TRACE: Trying to execute [2500:281474976715663] at 72075186224037890 on unit CompletedOperations 2025-03-28T09:28:26.852159Z node 13 :TX_DATASHARD TRACE: Execution status for [2500:281474976715663] at 72075186224037890 is Executed 2025-03-28T09:28:26.852184Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [2500:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2025-03-28T09:28:26.852217Z node 13 :TX_DATASHARD TRACE: Execution plan for [2500:281474976715663] at 72075186224037890 has finished 2025-03-28T09:28:26.852286Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:28:26.852346Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-28T09:28:26.852408Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-28T09:28:26.852471Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-28T09:28:26.854831Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [13:24:2071], Recipient [13:978:2789]: {TEvRegisterTabletResult TabletId# 72075186224037890 Entry# 2000} 2025-03-28T09:28:26.854908Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-28T09:28:26.854981Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 2000 2025-03-28T09:28:26.855051Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:28:26.856171Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2500} 2025-03-28T09:28:26.856288Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:28:26.856805Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:685:2581], Recipient [13:881:2712]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:28:26.856849Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:28:26.857296Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:685:2581], Recipient [13:978:2789]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:28:26.857331Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:28:26.857810Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [13:685:2581], Recipient [13:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:28:26.857845Z node 13 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:28:26.858468Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:28:26.858518Z node 13 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715663] at 72075186224037890 on unit CreateTable 2025-03-28T09:28:26.858582Z node 13 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-28T09:28:26.858668Z node 13 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-03-28T09:28:26.858743Z node 13 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715663] at 72075186224037890 on unit CompleteOperation 2025-03-28T09:28:26.858826Z node 13 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715663] from 72075186224037890 at tablet 72075186224037890 send result to client [13:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:28:26.858920Z node 13 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-03-28T09:28:26.859064Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:28:26.860235Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [13:24:2071], Recipient [13:978:2789]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2500 ReadStep# 2500 } 2025-03-28T09:28:26.860286Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-28T09:28:26.860354Z node 13 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2500 2025-03-28T09:28:26.860667Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1021:2823], Recipient [13:978:2789]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:1023:2825] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:28:26.860713Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:28:26.861695Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:409:2404], Recipient [13:978:2789]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715663 2025-03-28T09:28:26.861753Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-28T09:28:26.861821Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037890 state Ready 2025-03-28T09:28:26.861896Z node 13 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-03-28T09:28:26.867841Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1038:2834], Recipient [13:978:2789]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:28:26.867947Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:28:26.868026Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [13:1037:2833], serverId# [13:1038:2834], sessionId# [0:0:0] 2025-03-28T09:28:26.868143Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [13:1036:2832], Recipient [13:978:2789]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-28T09:28:26.869663Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:593:2518], Recipient [13:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72075186224037888 TableId: 2 SchemaVersion: 1111 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2025-03-28T09:28:26.869896Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:28:26.870035Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:28:26.870347Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1040:2836], Recipient [13:978:2789]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:28:26.870390Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:28:26.870464Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [13:1039:2835], serverId# [13:1040:2836], sessionId# [0:0:0] >> DataShardReadIterator::ShouldReadRangePrefix4 [GOOD] >> DataShardReadIterator::ShouldReadRangePrefix5 >> TPQCompatTest::CommitOffsets [GOOD] >> TPQCompatTest::LongProducerAndLongMessageGroupId >> KqpSort::TopSortParameter [GOOD] >> KqpSort::TopSortExpr >> KqpRanges::UpdateWhereInBigLiteralList [GOOD] >> KqpRanges::UpdateWhereInBigLiteralListPrefix >> TPersQueueTest::MessageMetadata [GOOD] >> TPersQueueTest::LOGBROKER_7820 >> KqpNotNullColumns::AlterDropNotNullColumn [GOOD] >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> KqpNewEngine::Join [GOOD] >> KqpNewEngine::JoinIdxLookup >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc+UseSink [GOOD] >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInOneTransaction [GOOD] >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations >> KqpNotNullColumns::UpdateTable_DontChangeNotNullWithIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex >> KqpNewEngine::SelfJoin [GOOD] >> KqpNewEngine::ScalarFunctions >> DataShardReadIterator::TryWriteManyRows+Commit [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit >> KqpKv::ReadRows_NotFullPK [GOOD] >> KqpKv::ReadRows_SpecificReturnValue ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2025-03-28T09:27:44.591136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789697810999662:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:44.591198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d3/r3tmp/tmp1KjuK2/pdisk_1.dat 2025-03-28T09:27:45.032461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:45.041909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:45.042004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:45.053297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2705, node 1 2025-03-28T09:27:45.223800Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:45.223832Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:45.223855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:45.223999Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:45.529041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:45.637378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:6383 2025-03-28T09:27:45.800498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:46.153507Z node 1 :PERSQUEUE ERROR: [PQ: 72075186224037888, Partition: 0, State: StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2025-03-28T09:27:49.428470Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486789722319768392:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:49.428621Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d3/r3tmp/tmp44NxY7/pdisk_1.dat 2025-03-28T09:27:49.700594Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21834, node 4 2025-03-28T09:27:49.824368Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:49.824457Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:49.906814Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:49.915636Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:49.915661Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:49.915674Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:49.915825Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22611 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:50.206984Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:50.343445Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:22611 2025-03-28T09:27:50.648152Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:54.428360Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486789722319768392:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:54.428448Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:04.650269Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:28:04.650299Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:21.531833Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486789856202106311:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:21.531876Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d3/r3tmp/tmpPPdpD6/pdisk_1.dat 2025-03-28T09:28:21.687522Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:21.725796Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:21.725917Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:21.735167Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2808, node 7 2025-03-28T09:28:21.939878Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:21.939927Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:21.939937Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:21.940091Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62773 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:22.256405Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:22.329923Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:62773 2025-03-28T09:28:22.564380Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:26.871116Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486789880035949245:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:26.871223Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014d3/r3tmp/tmpF0U8wH/pdisk_1.dat 2025-03-28T09:28:27.196225Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:27.231816Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:27.231943Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:27.236946Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3100, node 10 2025-03-28T09:28:27.323252Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:27.323276Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:27.323285Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:27.323442Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:27.659198Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:27.748657Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:15069 2025-03-28T09:28:28.061430Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:28.075836Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 >> KqpNewEngine::StaleRO [GOOD] >> KqpNewEngine::SqlInFromCompact >> KqpSqlIn::KeySuffix >> TPersQueueTest::Delete [GOOD] >> TPersQueueTest::FetchRequest >> KqpNotNullColumns::CreateIndexedTableWithDisabledNotNullDataColumns [GOOD] >> KqpNotNullColumns::Describe >> KqpNewEngine::PkRangeSelect2 [GOOD] >> KqpNewEngine::PkRangeSelect3 >> KqpRanges::DuplicateKeyPredicateParam [GOOD] >> KqpRanges::DuplicateKeyPredicateMixed >> KqpSort::TopSortExpr [GOOD] >> KqpSort::TopSortExprPk >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips+EvWrite [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] >> KqpMergeCn::TopSortBy_PK_Uint64_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Int32_Limit3 >> KqpKv::ReadRows_SpecificReturnValue [GOOD] >> KqpKv::ReadRows_PgValue >> KqpRanges::UpdateWhereInBigLiteralListPrefix [GOOD] >> KqpRanges::UpdateWhereInFullScan+UseSink >> DataStreams::TestInvalidRetentionCombinations [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReadRangePrefix5 [GOOD] Test command err: 2025-03-28T09:27:00.534488Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:27:00.540091Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:27:00.540185Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001053/r3tmp/tmp8InzCD/pdisk_1.dat 2025-03-28T09:27:00.947907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:27:01.001396Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:01.048477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:01.048598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:01.063604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:01.163278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:27:01.208728Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:27:01.209980Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:27:01.210506Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:27:01.210997Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:27:01.274340Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:27:01.275325Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:27:01.275468Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:27:01.277280Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:27:01.277374Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:27:01.277436Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:27:01.277807Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:27:01.277922Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:27:01.278024Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:27:01.290810Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:27:01.367159Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:27:01.367411Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:27:01.367565Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:27:01.367634Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:27:01.367689Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:27:01.367743Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.368014Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.368084Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.368445Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:27:01.368579Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:27:01.369486Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:01.369542Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:27:01.369603Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:27:01.369669Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:27:01.369717Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:27:01.369749Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:27:01.369798Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:27:01.369946Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.370042Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.370094Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:27:01.370170Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:27:01.370233Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:27:01.370364Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:27:01.370629Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:27:01.370731Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:27:01.370859Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:27:01.370920Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:27:01.370957Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:27:01.370995Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:27:01.371035Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.371372Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:27:01.371423Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:27:01.371457Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:27:01.371507Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.371559Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:27:01.371604Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:27:01.371642Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:27:01.371677Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.371704Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:27:01.373305Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:27:01.373365Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:27:01.386161Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:27:01.386248Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.386291Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.386336Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:27:01.386418Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:27:01.550923Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.550975Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.551021Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:27:01.551422Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:27:01.551461Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:27:01.551563Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.551616Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:27:01.551674Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:27:01.551719Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:27:01.556440Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:27:01.556524Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.557265Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.557307Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.557379Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:0 ... 4976715664] at 72075186224037889 executing on unit WaitForPlan 2025-03-28T09:28:36.918224Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit PlanQueue 2025-03-28T09:28:36.918397Z node 15 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715664 at step 3000 at tablet 72075186224037889 { Transactions { TxId: 281474976715664 AckTo { RawX1: 0 RawX2: 0 } } Step: 3000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-28T09:28:36.918434Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:28:36.918621Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:880:2712], Recipient [15:880:2712]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:28:36.918655Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:28:36.919168Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:28:36.919214Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:28:36.919249Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:28:36.919288Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-03-28T09:28:36.919320Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-03-28T09:28:36.919358Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:28:36.919401Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-03-28T09:28:36.919443Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-03-28T09:28:36.919483Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-03-28T09:28:36.919625Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-03-28T09:28:36.919669Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:28:36.919711Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-03-28T09:28:36.919744Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:28:36.919776Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:28:36.919819Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-03-28T09:28:36.919866Z node 15 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-03-28T09:28:36.919908Z node 15 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-03-28T09:28:36.919973Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:28:36.919999Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:28:36.920026Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-03-28T09:28:36.920054Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-03-28T09:28:36.920158Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-03-28T09:28:36.920187Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-03-28T09:28:36.920227Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-03-28T09:28:36.920272Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-03-28T09:28:36.920298Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:28:36.920323Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-03-28T09:28:36.920347Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-03-28T09:28:36.920376Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:28:36.920522Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-03-28T09:28:36.920555Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-03-28T09:28:36.920596Z node 15 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:28:36.920636Z node 15 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:28:36.920669Z node 15 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:28:36.920695Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:28:36.920722Z node 15 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-03-28T09:28:36.920762Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:28:36.920801Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:28:36.920845Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-28T09:28:36.920885Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-28T09:28:36.943642Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-03-28T09:28:36.943813Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:28:36.943894Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-03-28T09:28:36.944015Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1039:2837], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:28:36.944119Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:28:36.944522Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-03-28T09:28:36.944572Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:28:36.944603Z node 15 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:28:36.944648Z node 15 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1039:2837], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:28:36.944689Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:28:36.946422Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW RangesSize: 1 2025-03-28T09:28:36.946627Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:28:36.946810Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-03-28T09:28:36.946993Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:28:36.947074Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:28:36.947134Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:28:36.947194Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:28:36.947243Z node 15 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-03-28T09:28:36.947314Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:28:36.947348Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:28:36.947375Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:28:36.947403Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:28:36.947564Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW } 2025-03-28T09:28:36.948064Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-03-28T09:28:36.948141Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 1} after executionsCount# 1 2025-03-28T09:28:36.948228Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 1} sends rowCount# 2, bytes# 32, quota rows left# 18446744073709551613, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:28:36.948561Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 1} finished in read 2025-03-28T09:28:36.948681Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:28:36.948715Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:28:36.948746Z node 15 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:28:36.948775Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:28:36.948833Z node 15 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:28:36.948858Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:28:36.948896Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-03-28T09:28:36.948962Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:28:36.949150Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> KqpNewEngine::JoinIdxLookup [GOOD] >> KqpNewEngine::ItemsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2025-03-28T09:27:38.116697Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789672879507502:2224];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:38.116787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e6/r3tmp/tmpOEinDt/pdisk_1.dat 2025-03-28T09:27:38.550432Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:38.564111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:38.564243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:38.570031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8266, node 1 2025-03-28T09:27:38.786459Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:38.786481Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:38.786511Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:38.786627Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:39.173942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:39.312641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:7170 2025-03-28T09:27:39.512409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:42.973259Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486789689904760770:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:42.975763Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e6/r3tmp/tmpDpgKMQ/pdisk_1.dat 2025-03-28T09:27:43.191662Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21965, node 4 2025-03-28T09:27:43.294625Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:43.296326Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:43.344641Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:43.344675Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:43.344681Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:43.344824Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:27:43.368652Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:43.650197Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:43.763889Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:24864 2025-03-28T09:27:43.988275Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:44.228168Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:27:44.269710Z node 4 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [4:7486789698494696645:2834], for# user2@builtin, access# DescribeSchema 2025-03-28T09:27:44.281524Z node 4 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [4:7486789698494696648:2835], for# user2@builtin, access# DescribeSchema 2025-03-28T09:27:44.296292Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:27:47.780517Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486789713739755822:2165];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:47.780814Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e6/r3tmp/tmptBI2TM/pdisk_1.dat 2025-03-28T09:27:48.016781Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:48.091902Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:48.091998Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:48.102932Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18127, node 7 2025-03-28T09:27:48.203474Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:27:48.203498Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:27:48.203509Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:27:48.203666Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:27:48.499890Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:48.596943Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:17304 2025-03-28T09:27:48.849091Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:27:48.888826Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-03-28T09:27:52.779011Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486789713739755822:2165];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:27:52.779182Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:02.977446Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:28:02.977476Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:33.027135Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486789908544472918:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:33.027180Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014e6/r3tmp/tmpEXUgVp/pdisk_1.dat 2025-03-28T09:28:33.391338Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:33.440848Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:33.440963Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:33.446516Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20876, node 10 2025-03-28T09:28:33.791024Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:33.791055Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:33.791065Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:33.791247Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3196 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:34.233909Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:34.374623Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:3196 2025-03-28T09:28:34.633201Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:34.658737Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 10, code: 500080
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 144, storage 0, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 130048, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 1049600, code: 500080 >> TPersQueueTest::WhenTheTopicIsDeletedAfterReadingTheData_Uncompressed [GOOD] >> TTopicYqlTest::CreateAndAlterTopicYql >> KqpNotNullColumns::Describe [GOOD] >> KqpNotNullColumns::CreateTableWithNotNullColumns |89.7%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardReadIterator::ShouldCommitLocksWhenReadWriteInSeparateTransactions [GOOD] >> DataShardReadIterator::HandlePersistentSnapshotGoneInContinue [GOOD] >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] >> KqpRanges::IsNullPartial >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithDeletesInTheMiddle [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndex [GOOD] >> KqpNotNullColumns::UpdateTable_UniqIndexPg >> KqpNewEngine::MultiSelect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::HandleMvccGoneInContinue [GOOD] Test command err: 2025-03-28T09:27:15.884838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:27:15.885269Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:27:15.885321Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001019/r3tmp/tmp7G0QfL/pdisk_1.dat 2025-03-28T09:27:16.269498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:27:16.316461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:16.360409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:16.360537Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:16.372275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:16.453889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:27:16.505800Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:27:16.506608Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:27:16.507154Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:27:16.507406Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:27:16.554041Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:27:16.554723Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:27:16.554862Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:27:16.556697Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:27:16.556825Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:27:16.556914Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:27:16.557303Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:27:16.557461Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:27:16.557543Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:27:16.571080Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:27:16.594470Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:27:16.594899Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:27:16.595036Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:27:16.595076Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:27:16.595106Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:27:16.595139Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:16.595342Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:16.595409Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:16.595683Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:27:16.595762Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:27:16.596178Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:16.596235Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:27:16.596327Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:27:16.596375Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:27:16.596407Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:27:16.596441Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:27:16.596483Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:27:16.596615Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:16.596650Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:16.596690Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:27:16.596751Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:27:16.596786Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:27:16.596899Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:27:16.597096Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:27:16.597169Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:27:16.597279Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:27:16.597332Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:27:16.597373Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:27:16.597406Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:27:16.597436Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:16.597708Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:27:16.597740Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:27:16.597773Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:27:16.597824Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:16.597870Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:27:16.597899Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:27:16.597936Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:27:16.597964Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:16.597986Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:27:16.599445Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:27:16.599497Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:27:16.610244Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:27:16.610341Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:16.610376Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:16.610414Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:27:16.610480Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:27:16.763273Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:16.763334Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:16.763396Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:27:16.763838Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:27:16.763876Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:27:16.763976Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:16.764013Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:27:16.764052Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:27:16.764087Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:27:16.768315Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:27:16.768385Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:16.769158Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:16.769197Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:16.769262Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:1 ... de 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037888 has finished 2025-03-28T09:28:40.404697Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:28:40.404757Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:28:40.404831Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:28:40.404890Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:28:40.405122Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [13:881:2712], Recipient [13:881:2712]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:28:40.405159Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:28:40.405226Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:28:40.405264Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:28:40.405295Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:28:40.405329Z node 13 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715665] in PlanQueue unit at 72075186224037889 2025-03-28T09:28:40.405360Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit PlanQueue 2025-03-28T09:28:40.405391Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-03-28T09:28:40.405420Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit PlanQueue 2025-03-28T09:28:40.405453Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit LoadTxDetails 2025-03-28T09:28:40.405485Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit LoadTxDetails 2025-03-28T09:28:40.405670Z node 13 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715665 keys extracted: 0 2025-03-28T09:28:40.405738Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-03-28T09:28:40.405776Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit LoadTxDetails 2025-03-28T09:28:40.405807Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:28:40.405855Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:28:40.405904Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically complete end at 72075186224037889 2025-03-28T09:28:40.405943Z node 13 :TX_DATASHARD TRACE: Operation [3500:281474976715665] is the new logically incomplete end at 72075186224037889 2025-03-28T09:28:40.405985Z node 13 :TX_DATASHARD TRACE: Activated operation [3500:281474976715665] at 72075186224037889 2025-03-28T09:28:40.406029Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-03-28T09:28:40.406054Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:28:40.406092Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-03-28T09:28:40.406121Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CreateVolatileSnapshot 2025-03-28T09:28:40.406231Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is ExecutedNoMoreRestarts 2025-03-28T09:28:40.406260Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-03-28T09:28:40.406296Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-03-28T09:28:40.406333Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit DropVolatileSnapshot 2025-03-28T09:28:40.406356Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-03-28T09:28:40.406380Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-03-28T09:28:40.406407Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompleteOperation 2025-03-28T09:28:40.406432Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:28:40.406583Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is DelayComplete 2025-03-28T09:28:40.406627Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompleteOperation 2025-03-28T09:28:40.406668Z node 13 :TX_DATASHARD TRACE: Add [3500:281474976715665] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:28:40.406813Z node 13 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715665] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:28:40.406850Z node 13 :TX_DATASHARD TRACE: Execution status for [3500:281474976715665] at 72075186224037889 is Executed 2025-03-28T09:28:40.406873Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715665] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:28:40.406902Z node 13 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715665] at 72075186224037889 has finished 2025-03-28T09:28:40.406937Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:28:40.406973Z node 13 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:28:40.407012Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-28T09:28:40.407049Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-28T09:28:40.418147Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-03-28T09:28:40.418310Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:28:40.418417Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037888 on unit CompleteOperation 2025-03-28T09:28:40.418522Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1073:2868], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:28:40.418614Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:28:40.418992Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-03-28T09:28:40.419045Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:28:40.419079Z node 13 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715665] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:28:40.419124Z node 13 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1073:2868], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:28:40.419162Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:28:40.420925Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [13:593:2518], Recipient [13:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-03-28T09:28:40.421123Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:28:40.421226Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-03-28T09:28:40.421433Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-28T09:28:40.421509Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:28:40.421584Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:28:40.421635Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:28:40.421683Z node 13 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-03-28T09:28:40.421746Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-28T09:28:40.421773Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:28:40.421796Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:28:40.421818Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:28:40.421977Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715665 } ResultFormat: FORMAT_ARROW } 2025-03-28T09:28:40.422748Z node 13 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715665 2025-03-28T09:28:40.422856Z node 13 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[13:593:2518], 1} after executionsCount# 1 2025-03-28T09:28:40.422943Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:593:2518], 1} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:28:40.423201Z node 13 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[13:593:2518], 1} finished in read 2025-03-28T09:28:40.423303Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-28T09:28:40.423333Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:28:40.423378Z node 13 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:28:40.423408Z node 13 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:28:40.423821Z node 13 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-28T09:28:40.423857Z node 13 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:28:40.423905Z node 13 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-03-28T09:28:40.423986Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:28:40.424179Z node 13 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> KqpNewEngine::SqlInFromCompact [GOOD] >> KqpNewEngine::SqlInAsScalar >> KqpNewEngine::PkRangeSelect3 [GOOD] >> KqpNewEngine::PkRangeSelect4 >> KqpRanges::DuplicateKeyPredicateMixed [GOOD] >> KqpRanges::DuplicateCompositeKeyPredicate >> KqpSort::TopSortExprPk [GOOD] >> KqpSort::TopSortTableExpr ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2025-03-28T09:26:44.842134Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:44.946642Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:44.966910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:44.967215Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:44.975582Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:44.975809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:44.976019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:44.976124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:44.976277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:44.976393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:44.976516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:44.976623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:44.976725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:44.976848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.976958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:44.977086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:45.005490Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:45.005812Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:45.005868Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:45.006036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:45.006200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:45.006291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:45.006356Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:45.006502Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:45.006575Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:45.006680Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:45.006746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:45.006938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:45.007016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:45.007063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:45.007095Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:45.007196Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:45.007247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:45.007291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:45.007323Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:45.007398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:45.007441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:45.007489Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:45.007543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:45.007588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:45.007644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:45.008092Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=64; 2025-03-28T09:26:45.008187Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-03-28T09:26:45.008293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=52; 2025-03-28T09:26:45.008381Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-28T09:26:45.008563Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:45.008629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:45.008671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:45.008883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:45.008934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.008966Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.009167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:45.009229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:45.009278Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:45.009478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:45.009521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:45.009568Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:45.009742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:45.009797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:45.009858Z node 1 :TX_COLUMNS ... D_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=merge.cpp:74;event=DoApply;interval_idx=0; 2025-03-28T09:28:41.086271Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:21;event=interval_result_received;interval_idx=0;intervalId=2052; 2025-03-28T09:28:41.086335Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:47;event=interval_result;interval_idx=0;count=1;merger=0;interval_id=2052; 2025-03-28T09:28:41.086393Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=scanner.cpp:65;event=intervals_finished; 2025-03-28T09:28:41.086535Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-28T09:28:41.086591Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=1;count=1;finished=1; 2025-03-28T09:28:41.086638Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:28:41.087358Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:28:41.087565Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-28T09:28:41.087626Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:28:41.087764Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;);columns=4;rows=1; 2025-03-28T09:28:41.087858Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=26;num_rows=1;batch_columns=100,101,102,103; 2025-03-28T09:28:41.088022Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[54:434:2452];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2025-03-28T09:28:41.088185Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-28T09:28:41.088348Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-28T09:28:41.088481Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-28T09:28:41.088826Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:28:41.088976Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-28T09:28:41.089110Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-28T09:28:41.089170Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: Scan [54:435:2453] finished for tablet 9437184 2025-03-28T09:28:41.089868Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[54:434:2452];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.017},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.019}],"full":{"a":1743154121069684,"name":"_full_task","f":1743154121069684,"d_finished":0,"c":0,"l":1743154121089237,"d":19553},"events":[{"name":"bootstrap","f":1743154121069911,"d_finished":3201,"c":1,"l":1743154121073112,"d":3201},{"a":1743154121088798,"name":"ack","f":1743154121087326,"d_finished":1194,"c":1,"l":1743154121088520,"d":1633},{"a":1743154121088780,"name":"processing","f":1743154121074851,"d_finished":9633,"c":10,"l":1743154121088523,"d":10090},{"name":"ProduceResults","f":1743154121071551,"d_finished":3392,"c":13,"l":1743154121089147,"d":3392},{"a":1743154121089153,"name":"Finish","f":1743154121089153,"d_finished":0,"c":0,"l":1743154121089237,"d":84},{"name":"task_result","f":1743154121074871,"d_finished":8259,"c":9,"l":1743154121086725,"d":8259}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-28T09:28:41.089972Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[54:434:2452];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:28:41.090603Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[54:434:2452];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.017},{"events":["l_ProduceResults","f_Finish"],"t":0.019},{"events":["l_ack","l_processing","l_Finish"],"t":0.02}],"full":{"a":1743154121069684,"name":"_full_task","f":1743154121069684,"d_finished":0,"c":0,"l":1743154121090028,"d":20344},"events":[{"name":"bootstrap","f":1743154121069911,"d_finished":3201,"c":1,"l":1743154121073112,"d":3201},{"a":1743154121088798,"name":"ack","f":1743154121087326,"d_finished":1194,"c":1,"l":1743154121088520,"d":2424},{"a":1743154121088780,"name":"processing","f":1743154121074851,"d_finished":9633,"c":10,"l":1743154121088523,"d":10881},{"name":"ProduceResults","f":1743154121071551,"d_finished":3392,"c":13,"l":1743154121089147,"d":3392},{"a":1743154121089153,"name":"Finish","f":1743154121089153,"d_finished":0,"c":0,"l":1743154121090028,"d":875},{"name":"task_result","f":1743154121074871,"d_finished":8259,"c":9,"l":1743154121086725,"d":8259}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;;); 2025-03-28T09:28:41.090738Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:28:41.069118Z;index_granules=0;index_portions=1;index_batches=2;committed_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=16001;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=16001;selected_rows=0; 2025-03-28T09:28:41.090803Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:28:41.091191Z node 54 :TX_COLUMNSHARD_SCAN INFO: SelfId=[54:435:2453];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldProperlyOrderConflictingTransactionsMvcc-UseSink [GOOD] Test command err: 2025-03-28T09:27:00.281568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:27:00.282141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:27:00.282198Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00105e/r3tmp/tmpjFq8vc/pdisk_1.dat 2025-03-28T09:27:00.897925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:27:00.972894Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:01.020397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:01.020523Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:01.032456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:01.136765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:27:01.217287Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:27:01.218584Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:27:01.219156Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:27:01.219459Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:27:01.262856Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:27:01.263703Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:27:01.263884Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:27:01.269701Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:27:01.269820Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:27:01.269882Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:27:01.271938Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:27:01.272116Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:27:01.272244Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:27:01.286000Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:27:01.328978Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:27:01.329210Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:27:01.329377Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:27:01.329424Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:27:01.329466Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:27:01.329505Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.329753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.329823Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.330197Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:27:01.330320Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:27:01.331215Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:01.331274Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:27:01.331338Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:27:01.331396Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:27:01.331437Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:27:01.331475Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:27:01.331532Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:27:01.331673Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.331716Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.331788Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:27:01.331863Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:27:01.331906Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:27:01.332034Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:27:01.332451Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:27:01.332549Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:27:01.332678Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:27:01.332744Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:27:01.332791Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:27:01.332833Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:27:01.332869Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.333189Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:27:01.333229Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:27:01.333267Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:27:01.333321Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.333379Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:27:01.333409Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:27:01.333451Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:27:01.333485Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.333512Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:27:01.335630Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:27:01.335715Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:27:01.350621Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:27:01.350722Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.350762Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.350809Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:27:01.350892Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:27:01.514832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.514916Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.514978Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:27:01.515494Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:27:01.515546Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:27:01.515680Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.515754Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:27:01.515801Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:27:01.515849Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:27:01.523820Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:27:01.523910Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.524839Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.524891Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.524969Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:0 ... TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:28:41.337261Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-28T09:28:41.337349Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:28:41.337419Z node 15 :TX_DATASHARD DEBUG: Found ready candidate operation [0:8] at 72075186224037888 for ExecuteRead 2025-03-28T09:28:41.337781Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [15:61:2108], Recipient [15:1060:2855]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 15 Status: STATUS_NOT_FOUND 2025-03-28T09:28:41.337941Z node 15 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 6, sender: [15:593:2518], selfId: [15:57:2104], source: [15:1160:2929] 2025-03-28T09:28:41.338073Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:666:2570], Recipient [15:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:28:41.338106Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:28:41.338149Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:28:41.338181Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:28:41.338211Z node 15 :TX_DATASHARD DEBUG: Return cached ready operation [0:8] at 72075186224037888 2025-03-28T09:28:41.338256Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:28:41.338368Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 2, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-03-28T09:28:41.338778Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-03-28T09:28:41.338822Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 3} after executionsCount# 2 2025-03-28T09:28:41.338864Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:28:41.339012Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 3} finished in read 2025-03-28T09:28:41.339076Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-28T09:28:41.339104Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:28:41.339132Z node 15 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:28:41.339161Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:28:41.339217Z node 15 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-28T09:28:41.339257Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:28:41.339299Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-03-28T09:28:41.339331Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:28:41.339415Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:28:41.339488Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:28:41.339555Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:28:41.340168Z node 15 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=15&id=YmE1OTY4MDktMmFjMDQxMzctOTNlMTI4ZGItYmFlY2JmYWQ=, workerId: [15:1160:2929], local sessions count: 0 2025-03-28T09:28:41.341658Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-03-28T09:28:41.341854Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:28:41.341961Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CheckRead 2025-03-28T09:28:41.342098Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-03-28T09:28:41.342165Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:28:41.342235Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:28:41.342307Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:28:41.342372Z node 15 :TX_DATASHARD TRACE: Activated operation [0:9] at 72075186224037888 2025-03-28T09:28:41.342438Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-03-28T09:28:41.342465Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:28:41.342489Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:28:41.342513Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:28:41.342669Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 4 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-03-28T09:28:41.343137Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2025-03-28T09:28:41.343236Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 4} after executionsCount# 1 2025-03-28T09:28:41.343338Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 4} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:28:41.343624Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 4} finished in read 2025-03-28T09:28:41.343745Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-03-28T09:28:41.343776Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:28:41.343803Z node 15 :TX_DATASHARD TRACE: Add [0:9] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:28:41.343833Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:9] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:28:41.343882Z node 15 :TX_DATASHARD TRACE: Execution status for [0:9] at 72075186224037888 is Executed 2025-03-28T09:28:41.343924Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:9] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:28:41.343972Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:9] at 72075186224037888 has finished 2025-03-28T09:28:41.344071Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:28:41.345152Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW KeysSize: 1 2025-03-28T09:28:41.345331Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:28:41.345459Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CheckRead 2025-03-28T09:28:41.345593Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-03-28T09:28:41.345663Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:28:41.345727Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:28:41.345780Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:28:41.345837Z node 15 :TX_DATASHARD TRACE: Activated operation [0:10] at 72075186224037888 2025-03-28T09:28:41.345902Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-03-28T09:28:41.345931Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:28:41.345959Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:28:41.345982Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:28:41.346154Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 5 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_ARROW } 2025-03-28T09:28:41.346630Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2025-03-28T09:28:41.346752Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 5} after executionsCount# 1 2025-03-28T09:28:41.346850Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 5} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:28:41.347121Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 5} finished in read 2025-03-28T09:28:41.347241Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-03-28T09:28:41.347271Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:28:41.347304Z node 15 :TX_DATASHARD TRACE: Add [0:10] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:28:41.347333Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:10] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:28:41.347382Z node 15 :TX_DATASHARD TRACE: Execution status for [0:10] at 72075186224037888 is Executed 2025-03-28T09:28:41.347420Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:28:41.347462Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:10] at 72075186224037888 has finished 2025-03-28T09:28:41.347531Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] >> KqpNewEngine::DeleteOn+UseSink >> TPersQueueTest::ReadWithoutConsumerFederation [GOOD] >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen >> KqpMergeCn::TopSortBy_Int32_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Float_Limit4 >> YdbOlapStore::LogCountByResource [GOOD] >> KqpRanges::IsNull >> KqpSqlIn::KeySuffix [GOOD] >> KqpSqlIn::KeySuffix_OnlyTail >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2025-03-28T09:26:40.527515Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:40.632389Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:40.658746Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:40.659057Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:40.666061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:40.666325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:40.666571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:40.666719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:40.666830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:40.666967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:40.667112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:40.667232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:40.667370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:40.667497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:40.667604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:40.667751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:40.694765Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:40.695109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:40.695168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:40.695369Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:40.695535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:40.695627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:40.695690Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:40.695803Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:40.695875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:40.695919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:40.695954Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:40.696127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:40.696192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:40.696243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:40.696291Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:40.696455Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:40.696511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:40.696558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:40.696588Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:40.696675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:40.696713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:40.696767Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:40.696824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:40.696861Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:40.696906Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:40.697290Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=37; 2025-03-28T09:26:40.697394Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=50; 2025-03-28T09:26:40.697488Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2025-03-28T09:26:40.697563Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=34; 2025-03-28T09:26:40.697743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:40.697799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:40.697831Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:40.698032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:40.698108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:40.698143Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:40.698306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:40.698350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:40.698384Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:40.698549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:40.698585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:40.698614Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:40.699106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:40.699175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:40.699258Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 9Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:117:2768:0]; 2025-03-28T09:28:42.683047Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:118:2768:0]; 2025-03-28T09:28:42.683105Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:119:2768:0]; 2025-03-28T09:28:42.683163Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:120:2768:0]; 2025-03-28T09:28:42.683225Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:121:2768:0]; 2025-03-28T09:28:42.683282Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:122:2768:0]; 2025-03-28T09:28:42.683337Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:123:2768:0]; 2025-03-28T09:28:42.683391Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:124:2768:0]; 2025-03-28T09:28:42.683447Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:125:2768:0]; 2025-03-28T09:28:42.683506Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:126:2768:0]; 2025-03-28T09:28:42.683567Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:127:2768:0]; 2025-03-28T09:28:42.683630Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:128:2696:0]; 2025-03-28T09:28:42.683691Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:129:2696:0]; 2025-03-28T09:28:42.683754Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:130:2696:0]; 2025-03-28T09:28:42.683810Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:131:2696:0]; 2025-03-28T09:28:42.683866Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:132:8920:0]; 2025-03-28T09:28:42.683922Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:133:2776:0]; 2025-03-28T09:28:42.684000Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:134:2776:0]; 2025-03-28T09:28:42.684060Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:135:2776:0]; 2025-03-28T09:28:42.684118Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:136:2776:0]; 2025-03-28T09:28:42.684179Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:137:2776:0]; 2025-03-28T09:28:42.684240Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:138:2768:0]; 2025-03-28T09:28:42.684298Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:139:2768:0]; 2025-03-28T09:28:42.684359Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:140:2768:0]; 2025-03-28T09:28:42.684427Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:141:2768:0]; 2025-03-28T09:28:42.684491Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:142:2768:0]; 2025-03-28T09:28:42.684558Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:143:2768:0]; 2025-03-28T09:28:42.684624Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:144:2768:0]; 2025-03-28T09:28:42.684684Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:145:2768:0]; 2025-03-28T09:28:42.684745Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:146:2768:0]; 2025-03-28T09:28:42.684807Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:147:2768:0]; 2025-03-28T09:28:42.684892Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:148:2768:0]; 2025-03-28T09:28:42.684956Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:149:2768:0]; 2025-03-28T09:28:42.685017Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:150:2768:0]; 2025-03-28T09:28:42.685080Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:151:2768:0]; 2025-03-28T09:28:42.685140Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:152:2768:0]; 2025-03-28T09:28:42.685198Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:153:2768:0]; 2025-03-28T09:28:42.685263Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:154:2768:0]; 2025-03-28T09:28:42.685327Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:155:2768:0]; 2025-03-28T09:28:42.685388Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:156:2768:0]; 2025-03-28T09:28:42.685452Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:157:2768:0]; 2025-03-28T09:28:42.685512Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:158:2768:0]; 2025-03-28T09:28:42.685571Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:159:2768:0]; 2025-03-28T09:28:42.685632Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:160:2768:0]; 2025-03-28T09:28:42.685694Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:161:2768:0]; 2025-03-28T09:28:42.685760Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:162:2768:0]; 2025-03-28T09:28:42.685823Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:163:2768:0]; 2025-03-28T09:28:42.685885Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:164:2768:0]; 2025-03-28T09:28:42.685952Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:165:2768:0]; 2025-03-28T09:28:42.686014Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:166:2768:0]; 2025-03-28T09:28:42.686079Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:167:2768:0]; 2025-03-28T09:28:42.686139Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:168:2768:0]; 2025-03-28T09:28:42.686201Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:169:2768:0]; 2025-03-28T09:28:42.686256Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:170:2768:0]; 2025-03-28T09:28:42.686324Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:171:2768:0]; 2025-03-28T09:28:42.686386Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:172:2696:0]; 2025-03-28T09:28:42.686444Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:173:2696:0]; 2025-03-28T09:28:42.686509Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:174:2696:0]; 2025-03-28T09:28:42.686573Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:175:2696:0]; 2025-03-28T09:28:42.686636Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:176:8904:0]; 2025-03-28T09:28:43.276481Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:28:43.277460Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-03-28T09:28:43.387224Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-03-28T09:28:43.395793Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2213112;raw_bytes=2475629;count=1;records=26059} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100101848;raw_bytes=103700153;count=42;records=1100341} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:28:43.755058Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-03-28T09:28:43.755148Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;fline=with_appended.cpp:65;portions=47,;task_id=90649c0-bb711f0-82e00061-7f896af7; 2025-03-28T09:28:43.756023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::90649c0-bb711f0-82e00061-7f896af7; 2025-03-28T09:28:43.756139Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:1;path_id:1;size:7587944;portions_count:47;); 2025-03-28T09:28:43.756195Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:28:43.756270Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:28:43.756337Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=8; 2025-03-28T09:28:43.756416Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-03-28T09:28:43.756465Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:28:43.756520Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:28:43.756566Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:28:43.756661Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.944000s; 2025-03-28T09:28:43.756719Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=90649c0-bb711f0-82e00061-7f896af7;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:28:43.756925Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-03-28T09:28:43.758665Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=39;external_task_id=90649c0-bb711f0-82e00061-7f896af7;mem=11009198;cpu=0; 2025-03-28T09:28:43.760466Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> KqpNewEngine::KeyColumnOrder >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadRangeRightBorder-EvWrite [GOOD] Test command err: 2025-03-28T09:27:00.522835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:27:00.523334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:27:00.523398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001068/r3tmp/tmpd5zIiW/pdisk_1.dat 2025-03-28T09:27:00.923418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:27:00.978980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:01.036492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:01.036634Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:01.050955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:01.136513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:27:01.228923Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:27:01.230082Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:27:01.230590Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:27:01.230878Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:27:01.293081Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:27:01.293813Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:27:01.293957Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:27:01.296027Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:27:01.296125Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:27:01.296181Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:27:01.296615Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:27:01.296768Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:27:01.296882Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:27:01.310661Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:27:01.344390Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:27:01.344618Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:27:01.344777Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:27:01.344821Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:27:01.344890Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:27:01.344940Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.345209Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.345268Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.345609Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:27:01.345729Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:27:01.346224Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:01.346274Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:27:01.346336Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:27:01.346395Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:27:01.346431Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:27:01.346466Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:27:01.346512Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:27:01.346639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.346677Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.346762Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:27:01.346838Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:27:01.346879Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:27:01.346985Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:27:01.347221Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:27:01.347299Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:27:01.347415Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:27:01.347465Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:27:01.347502Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:27:01.347544Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:27:01.347580Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.347938Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:27:01.348035Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:27:01.348073Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:27:01.348138Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.348199Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:27:01.348230Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:27:01.348268Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:27:01.348301Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.348330Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:27:01.356272Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:27:01.356346Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:27:01.367436Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:27:01.367518Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.367557Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.367607Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:27:01.367683Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:27:01.531391Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.531448Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.531507Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:27:01.531988Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:27:01.532037Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:27:01.532162Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.532209Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:27:01.532250Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:27:01.532290Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:27:01.536879Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:27:01.536962Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.537753Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.537801Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.537888Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:0 ... planned 0 immediate 0 planned 0 2025-03-28T09:28:44.879182Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:28:44.879256Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:28:44.879320Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:28:44.879575Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:880:2712], Recipient [15:880:2712]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:28:44.879616Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:28:44.879670Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:28:44.879706Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:28:44.879741Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:28:44.879808Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-03-28T09:28:44.879845Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-03-28T09:28:44.879883Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-28T09:28:44.879915Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-03-28T09:28:44.879947Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-03-28T09:28:44.879996Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-03-28T09:28:44.880146Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-03-28T09:28:44.880192Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-28T09:28:44.880220Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-03-28T09:28:44.880250Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:28:44.880278Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:28:44.880488Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-03-28T09:28:44.880541Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-03-28T09:28:44.880588Z node 15 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-03-28T09:28:44.880635Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-28T09:28:44.880666Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:28:44.880697Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-03-28T09:28:44.880730Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-03-28T09:28:44.880840Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-03-28T09:28:44.880872Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-03-28T09:28:44.880914Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-03-28T09:28:44.880957Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-03-28T09:28:44.880989Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-28T09:28:44.881021Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-03-28T09:28:44.881050Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-03-28T09:28:44.881083Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:28:44.881231Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-03-28T09:28:44.881265Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-03-28T09:28:44.881305Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:28:44.881346Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:28:44.881409Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-28T09:28:44.881442Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:28:44.881488Z node 15 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-03-28T09:28:44.881532Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:28:44.881569Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:28:44.881609Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-28T09:28:44.881651Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-28T09:28:44.892928Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-03-28T09:28:44.893093Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:28:44.893174Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-03-28T09:28:44.893287Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1075:2869], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:28:44.893373Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:28:44.893547Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-03-28T09:28:44.893596Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:28:44.893626Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:28:44.893670Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1075:2869], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:28:44.893703Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:28:44.895767Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-03-28T09:28:44.895987Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:28:44.896105Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-03-28T09:28:44.896253Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:28:44.896334Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:28:44.896402Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:28:44.896468Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:28:44.896522Z node 15 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-03-28T09:28:44.896587Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:28:44.896623Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:28:44.896652Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:28:44.896678Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:28:44.896827Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 3 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-03-28T09:28:44.897761Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:28:44.897850Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-03-28T09:28:44.897930Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 3} after executionsCount# 1 2025-03-28T09:28:44.898020Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 3} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:28:44.898314Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 3} finished in read 2025-03-28T09:28:44.898435Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:28:44.898469Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:28:44.898503Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:28:44.898535Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:28:44.898592Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:28:44.898620Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:28:44.898655Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-28T09:28:44.898744Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:28:44.898944Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::CreateTableWithNotNullColumns [GOOD] Test command err: Trying to start YDB, gRPC: 13102, MsgBus: 20350 2025-03-28T09:28:14.912787Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789828330023535:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:14.912843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae5/r3tmp/tmpipmbaW/pdisk_1.dat 2025-03-28T09:28:15.346681Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:15.368461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:15.368568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:15.390302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13102, node 1 2025-03-28T09:28:15.543200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:15.543225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:15.543232Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:15.543371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20350 TClient is connected to server localhost:20350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:16.340253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:18.159613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789845509893373:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.159827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.678764Z node 1 :TX_PROXY ERROR: Actor# [1:7486789845509893394:2309] txid# 281474976710658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2025-03-28T09:28:18.719080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789845509893402:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.719150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.737209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21070, MsgBus: 19474 2025-03-28T09:28:19.805029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789850331600512:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:19.805085Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae5/r3tmp/tmp85eViV/pdisk_1.dat 2025-03-28T09:28:20.024699Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:20.040258Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:20.040332Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:20.043425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21070, node 2 2025-03-28T09:28:20.126369Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:20.126392Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:20.126403Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:20.126515Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19474 TClient is connected to server localhost:19474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:20.557308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.565544Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:20.577382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.661084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.873072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.961448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:23.079195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789867511471453:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.079306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.140647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.168006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.199067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.233635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.279539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.317580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.406283Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789867511471968:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.406405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.406637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789867511471973:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.413763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:23.430037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486789867511471975:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:23.514563Z node 2 :TX_PROXY ERROR: Actor# [2:74867898675 ... 4046644480 Trying to start YDB, gRPC: 10852, MsgBus: 13746 2025-03-28T09:28:30.711058Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486789894942113944:2056];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:30.711107Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae5/r3tmp/tmpYPt8lp/pdisk_1.dat 2025-03-28T09:28:30.957431Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:30.981319Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:30.981413Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:30.987388Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10852, node 4 2025-03-28T09:28:31.071311Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:31.071342Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:31.071351Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:31.071508Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13746 TClient is connected to server localhost:13746 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:31.674838Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:31.681962Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:34.780009Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486789912121983792:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:34.780095Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:34.804490Z node 4 :TX_PROXY ERROR: Actor# [4:7486789912121983813:2306] txid# 281474976710658, issues: { message: "It is not allowed to create not null data column: Value" severity: 1 } 2025-03-28T09:28:34.816827Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486789912121983821:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:34.816909Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:34.852310Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 32432, MsgBus: 15849 2025-03-28T09:28:35.951415Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486789917965526341:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:35.951459Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae5/r3tmp/tmpCTehGw/pdisk_1.dat 2025-03-28T09:28:36.225045Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:36.226881Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:36.226972Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:36.228709Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32432, node 5 2025-03-28T09:28:36.285701Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:36.285728Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:36.285738Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:36.285877Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15849 TClient is connected to server localhost:15849 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:36.896392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:36.903665Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:39.899620Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486789935145396177:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:39.899738Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:39.923602Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21217, MsgBus: 14172 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae5/r3tmp/tmpT3IQEd/pdisk_1.dat 2025-03-28T09:28:41.002945Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486789937529991421:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:41.053400Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:28:41.142296Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:41.144484Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:41.144578Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:41.146965Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21217, node 6 2025-03-28T09:28:41.241370Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:41.241401Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:41.241413Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:41.241560Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14172 TClient is connected to server localhost:14172 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:41.873173Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:41.906825Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:45.005440Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 >> TPersQueueTest::Codecs_WriteMessageWithDefaultCodecs_MessagesAreAcknowledged [GOOD] >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError >> KqpRanges::UpdateWhereInFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInFullScan-UseSink >> KqpNotNullColumns::UpdateNotNullPk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIterator::ShouldReturnBrokenLockWhenReadKeyWithContinueInvisibleRowSkips-EvWrite [GOOD] Test command err: 2025-03-28T09:27:00.494835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:27:00.495164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:27:00.495224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001046/r3tmp/tmpZeDuZN/pdisk_1.dat 2025-03-28T09:27:00.900346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:27:00.952326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:01.006872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:01.008465Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:01.023392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:01.140003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:27:01.204905Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:27:01.206175Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:27:01.207347Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:27:01.207676Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:27:01.266378Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:27:01.267142Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:27:01.267250Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:27:01.269066Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:27:01.269158Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:27:01.269202Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:27:01.270990Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:27:01.271201Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:27:01.271320Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:27:01.283394Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:27:01.308109Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:27:01.314810Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:27:01.315095Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:27:01.315139Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:27:01.315179Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:27:01.315237Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.315504Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.315571Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.319261Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:27:01.319436Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:27:01.327087Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:01.327175Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:27:01.327250Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:27:01.327294Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:27:01.327334Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:27:01.327373Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:27:01.327452Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:27:01.327654Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.327701Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.327767Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:27:01.327906Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:27:01.327951Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:27:01.328088Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:27:01.335029Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:27:01.335126Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:27:01.335258Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:27:01.335317Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:27:01.335364Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:27:01.335403Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:27:01.335477Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.335848Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:27:01.335901Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:27:01.335939Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:27:01.335988Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.336064Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:27:01.336102Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:27:01.336143Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:27:01.336203Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.336235Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:27:01.337879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:27:01.337934Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:27:01.348757Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:27:01.348841Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.348880Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.348936Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:27:01.349023Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:27:01.509440Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.509514Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.509558Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:27:01.510034Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:27:01.510076Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:27:01.510240Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.510292Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:27:01.510329Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:27:01.510388Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:27:01.515337Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:27:01.515414Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.518924Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.518982Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.519051Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:0 ... ed 0 immediate 0 planned 0 2025-03-28T09:28:45.788927Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:28:45.788994Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:28:45.789060Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:28:45.789338Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [15:880:2712], Recipient [15:880:2712]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:28:45.789382Z node 15 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:28:45.789440Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:28:45.789477Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:28:45.789517Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:28:45.790087Z node 15 :TX_DATASHARD DEBUG: Found ready operation [3500:281474976715666] in PlanQueue unit at 72075186224037889 2025-03-28T09:28:45.790132Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit PlanQueue 2025-03-28T09:28:45.790170Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-28T09:28:45.790205Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit PlanQueue 2025-03-28T09:28:45.790244Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit LoadTxDetails 2025-03-28T09:28:45.790282Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit LoadTxDetails 2025-03-28T09:28:45.790450Z node 15 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3500:281474976715666 keys extracted: 0 2025-03-28T09:28:45.790509Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-28T09:28:45.790544Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit LoadTxDetails 2025-03-28T09:28:45.790578Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:28:45.790614Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:28:45.790662Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically complete end at 72075186224037889 2025-03-28T09:28:45.790733Z node 15 :TX_DATASHARD TRACE: Operation [3500:281474976715666] is the new logically incomplete end at 72075186224037889 2025-03-28T09:28:45.790784Z node 15 :TX_DATASHARD TRACE: Activated operation [3500:281474976715666] at 72075186224037889 2025-03-28T09:28:45.790831Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-28T09:28:45.790861Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:28:45.790893Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-03-28T09:28:45.790927Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CreateVolatileSnapshot 2025-03-28T09:28:45.791048Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is ExecutedNoMoreRestarts 2025-03-28T09:28:45.791082Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-03-28T09:28:45.791128Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-03-28T09:28:45.791166Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit DropVolatileSnapshot 2025-03-28T09:28:45.791222Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-28T09:28:45.791260Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-03-28T09:28:45.791305Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompleteOperation 2025-03-28T09:28:45.791343Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:28:45.791540Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is DelayComplete 2025-03-28T09:28:45.791584Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompleteOperation 2025-03-28T09:28:45.791634Z node 15 :TX_DATASHARD TRACE: Add [3500:281474976715666] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:28:45.791679Z node 15 :TX_DATASHARD TRACE: Trying to execute [3500:281474976715666] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:28:45.791721Z node 15 :TX_DATASHARD TRACE: Execution status for [3500:281474976715666] at 72075186224037889 is Executed 2025-03-28T09:28:45.791751Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [3500:281474976715666] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:28:45.791787Z node 15 :TX_DATASHARD TRACE: Execution plan for [3500:281474976715666] at 72075186224037889 has finished 2025-03-28T09:28:45.791837Z node 15 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:28:45.791882Z node 15 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:28:45.791926Z node 15 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-28T09:28:45.791998Z node 15 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-28T09:28:45.803336Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2025-03-28T09:28:45.803533Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:28:45.803622Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037888 on unit CompleteOperation 2025-03-28T09:28:45.803741Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037888 at tablet 72075186224037888 send result to client [15:1075:2869], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:28:45.803847Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:28:45.804262Z node 15 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3500} 2025-03-28T09:28:45.804321Z node 15 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:28:45.804353Z node 15 :TX_DATASHARD TRACE: Complete execution for [3500:281474976715666] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:28:45.804397Z node 15 :TX_DATASHARD DEBUG: Complete [3500 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [15:1075:2869], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:28:45.804442Z node 15 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:28:45.806357Z node 15 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [15:593:2518], Recipient [15:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW KeysSize: 1 2025-03-28T09:28:45.806553Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:28:45.806675Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-03-28T09:28:45.806941Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:28:45.807027Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:28:45.807093Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:28:45.807175Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:28:45.807232Z node 15 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-03-28T09:28:45.807298Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:28:45.807333Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:28:45.807364Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:28:45.807399Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:28:45.807589Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 10 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3500 TxId: 281474976715666 } LockTxId: 1011121314 ResultFormat: FORMAT_ARROW } 2025-03-28T09:28:45.808127Z node 15 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 1011121314, counter# 18446744073709551615 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:28:45.808222Z node 15 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/281474976715666 2025-03-28T09:28:45.808327Z node 15 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[15:593:2518], 10} after executionsCount# 1 2025-03-28T09:28:45.808425Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 10} sends rowCount# 1, bytes# 16, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551599, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:28:45.808708Z node 15 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[15:593:2518], 10} finished in read 2025-03-28T09:28:45.808831Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:28:45.808864Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:28:45.808892Z node 15 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:28:45.808922Z node 15 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:28:45.808978Z node 15 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:28:45.809006Z node 15 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:28:45.809051Z node 15 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-28T09:28:45.809132Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:28:45.809343Z node 15 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> TPersQueueTest::SetupReadSession [GOOD] >> TPersQueueTest::TestBigMessage >> KqpRanges::IsNullPartial [GOOD] >> KqpRanges::LiteralOr >> KqpNotNullColumns::InsertNotNullPk >> KqpRanges::NullInKey ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbOlapStore::LogCountByResource [GOOD] Test command err: 2025-03-28T09:24:41.571774Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788912701809299:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:41.571832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:41.685031Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486788914576668661:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:41.685413Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:24:41.691803Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486788913114655260:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:41.691840Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013cf/r3tmp/tmptfU8wk/pdisk_1.dat 2025-03-28T09:24:42.598868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:24:42.739455Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:24:42.741803Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:42.757700Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:24:42.769999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:42.770117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:42.773562Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:42.773631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:42.777423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:42.777469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:42.798439Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:24:42.798481Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:24:42.798617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:42.799450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:24:42.799589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24359, node 1 2025-03-28T09:24:43.301858Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:43.301876Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:43.301882Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:43.302072Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:43.903932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:46.572102Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486788912701809299:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.572950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:46.685252Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486788914576668661:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.685368Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:24:46.694782Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486788913114655260:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:46.694852Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Killing node 1 Killing node 2 2025-03-28T09:24:57.714821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:24:57.714850Z node 1 :IMPORT WARN: Table profiles were not loaded Killing node 3 2025-03-28T09:25:05.885685Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486789016665470888:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:05.885751Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013cf/r3tmp/tmpRhR2BM/pdisk_1.dat 2025-03-28T09:25:06.040962Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:06.061328Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:06.061500Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:06.064074Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9734, node 5 2025-03-28T09:25:06.164576Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:25:06.164600Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:25:06.164607Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:25:06.164746Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23707 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:06.372970Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... TClient is connected to server localhost:23707 2025-03-28T09:25:06.704590Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "Root" DiffACL: "\n\033\010\000\022\027\010\001\020\200\200\002\032\ralice@builtin \003" } } TxId: 281474976715658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:25:06.704769Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: //Root, operationId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:06.704884Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 1] name: Root type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:25:06.704900Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:25:06.705043Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715658:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-03-28T09:25:06.705069Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:25:06.705132Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-28T09:25:06.705167Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-28T09:25:06.705198Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715658:0 progress is 1/1 2025-03-28T09:25:06.705208Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715658 ready parts: 1/1 2025-03-28T09:25:06.705247Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:25:06.705295Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715658, ready parts: 1/1, is published: false 2025-03-28T09:25:06.705314Z node 5 :FLAT_TX_ ... 4123265 } MaxMemoryUsage: 1048576 } 2025-03-28T09:28:43.267193Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7486789953167274359:3530] TxId: 281474976710674. Ctx: { TraceId: 01jqe1a2b7b7ayavq6529d279g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [50:7486789953167274362:3816], 2025-03-28T09:28:43.267253Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, ActorId: [50:7486789944577338927:3530], ActorState: ExecuteState, TraceId: 01jqe1a2b7b7ayavq6529d279g, Forwarded TEvStreamData to [50:7486789944577338925:3529] 2025-03-28T09:28:43.267284Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7486789953167274362:3816], TxId: 281474976710674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=. CustomerSuppliedId : . TraceId : 01jqe1a2b7b7ayavq6529d279g. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646926 2025-03-28T09:28:43.267398Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7486789953167274362:3816], TxId: 281474976710674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=. CustomerSuppliedId : . TraceId : 01jqe1a2b7b7ayavq6529d279g. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 SrcEndpoint { ActorId { RawX1: 7486789953167274362 RawX2: 4503814375739112 } } DstEndpoint { ActorId { RawX1: 7486789953167274359 RawX2: 4503814375738826 } } InMemory: true } 2025-03-28T09:28:43.267455Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976710674, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2025-03-28T09:28:43.267983Z node 50 :KQP_EXECUTER DEBUG: TxId: 281474976710674, send ack to channelId: 1, seqNo: 1, enough: 0, freeSpace: 8388572, to: [50:7486789953167274363:3816] 2025-03-28T09:28:43.268045Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7486789953167274362:3816], TxId: 281474976710674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=. CustomerSuppliedId : . TraceId : 01jqe1a2b7b7ayavq6529d279g. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:28:43.268072Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976710674, task: 1. Tasks execution finished 2025-03-28T09:28:43.268094Z node 50 :KQP_COMPUTE DEBUG: SelfId: [50:7486789953167274362:3816], TxId: 281474976710674, task: 1. Ctx: { SessionId : ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=. CustomerSuppliedId : . TraceId : 01jqe1a2b7b7ayavq6529d279g. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:28:43.268193Z node 50 :KQP_COMPUTE DEBUG: TxId: 281474976710674, task: 1. pass away 2025-03-28T09:28:43.268292Z node 50 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:28:43.268577Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7486789953167274359:3530] TxId: 281474976710674. Ctx: { TraceId: 01jqe1a2b7b7ayavq6529d279g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [50:7486789953167274362:3816], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 1082 Tasks { TaskId: 1 CpuTimeUs: 198 FinishTimeMs: 1743154123268 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 82 BuildCpuTimeUs: 116 HostName: "ghrun-mdsjyjltvm" NodeId: 50 CreateTimeMs: 1743154123265 } MaxMemoryUsage: 1048576 } 2025-03-28T09:28:43.268636Z node 50 :KQP_EXECUTER INFO: TxId: 281474976710674. Ctx: { TraceId: 01jqe1a2b7b7ayavq6529d279g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [50:7486789953167274362:3816] 2025-03-28T09:28:43.269767Z node 50 :KQP_EXECUTER INFO: ActorId: [50:7486789953167274359:3530] TxId: 281474976710674. Ctx: { TraceId: 01jqe1a2b7b7ayavq6529d279g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 4254 DurationUs: 4078 ExecuterCpuTimeUs: 3172 StartTimeMs: 1743154123264 FinishTimeMs: 1743154123268 Stages { StageGuid: "ac1435e9-156d9524-6472b653-aec86cfb" Program: "(\n(declare %kqp%tx_result_binding_0_0 (ListType (StructType \'(\'\"column0\" (DataType \'Uint64)))))\n(return (lambda \'() (Iterator %kqp%tx_result_binding_0_0)))\n)\n" ComputeActors { CpuTimeUs: 1082 Tasks { TaskId: 1 CpuTimeUs: 198 FinishTimeMs: 1743154123268 OutputRows: 1 OutputBytes: 3 ResultRows: 1 ResultBytes: 3 ComputeCpuTimeUs: 82 BuildCpuTimeUs: 116 HostName: "ghrun-mdsjyjltvm" NodeId: 50 CreateTimeMs: 1743154123265 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743154123266 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":8,\"Plans\":[{\"Node Type\":\"ResultSet_1\",\"PlanNodeId\":7,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"CTE Name\":\"precompute_0_0\",\"Node Type\":\"ConstantExpr\",\"Operators\":[{\"Inputs\":[],\"Iterator\":\"precompute_0_0\",\"Name\":\"Iterator\"}],\"PlanNodeId\":6,\"StageGuid\":\"ac1435e9-156d9524-6472b653-aec86cfb\",\"Stats\":{\"BaseTimeMs\":1743154123266,\"ComputeNodes\":[{\"CpuTimeUs\":1082,\"Tasks\":[{\"ComputeTimeUs\":82,\"FinishTimeMs\":1743154123268,\"Host\":\"ghrun-mdsjyjltvm\",\"NodeId\":50,\"OutputBytes\":3,\"OutputRows\":1,\"ResultBytes\":3,\"ResultRows\":1,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 686 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\022\013\010\272\010\020\272\010\030\272\010 \001" } } 2025-03-28T09:28:43.269803Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7486789953167274359:3530] TxId: 281474976710674. Ctx: { TraceId: 01jqe1a2b7b7ayavq6529d279g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:28:43.269843Z node 50 :KQP_EXECUTER DEBUG: ActorId: [50:7486789953167274359:3530] TxId: 281474976710674. Ctx: { TraceId: 01jqe1a2b7b7ayavq6529d279g, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.001082s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:28:43.269915Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, ActorId: [50:7486789944577338927:3530], ActorState: ExecuteState, TraceId: 01jqe1a2b7b7ayavq6529d279g, TEvTxResponse, CurrentTx: 2/2 response.status: SUCCESS 2025-03-28T09:28:43.270293Z node 50 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, ActorId: [50:7486789944577338927:3530], ActorState: ExecuteState, TraceId: 01jqe1a2b7b7ayavq6529d279g, txInfo Status: Active Kind: ReadOnly TotalDuration: 0 ServerDuration: 1287.198 QueriesCount: 1 2025-03-28T09:28:43.270350Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, ActorId: [50:7486789944577338927:3530], ActorState: ExecuteState, TraceId: 01jqe1a2b7b7ayavq6529d279g, Create QueryResponse for action: QUERY_ACTION_EXECUTE with SUCCESS status 2025-03-28T09:28:43.270439Z node 50 :KQP_SESSION INFO: SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, ActorId: [50:7486789944577338927:3530], ActorState: ExecuteState, TraceId: 01jqe1a2b7b7ayavq6529d279g, Cleanup start, isFinal: 1 CleanupCtx: 0 TransactionsToBeAborted.size(): 0 WorkerId: [0:0:0] WorkloadServiceCleanup: 0 2025-03-28T09:28:43.270473Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, ActorId: [50:7486789944577338927:3530], ActorState: ExecuteState, TraceId: 01jqe1a2b7b7ayavq6529d279g, EndCleanup, isFinal: 1 2025-03-28T09:28:43.270531Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, ActorId: [50:7486789944577338927:3530], ActorState: ExecuteState, TraceId: 01jqe1a2b7b7ayavq6529d279g, Sent query response back to proxy, proxyRequestId: 5, proxyId: [50:7486789880152825133:2280] 2025-03-28T09:28:43.270562Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, ActorId: [50:7486789944577338927:3530], ActorState: unknown state, TraceId: 01jqe1a2b7b7ayavq6529d279g, Cleanup temp tables: 0 2025-03-28T09:28:43.272122Z node 50 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154122000, txId: 18446744073709551615] shutting down 2025-03-28T09:28:43.272282Z node 50 :KQP_SESSION DEBUG: SessionId: ydb://session/3?node_id=50&id=MmQ1ODkzNjUtNGNkNjgxMWUtZWJiOThlZjgtNDVkNjUxMmM=, ActorId: [50:7486789944577338927:3530], ActorState: unknown state, TraceId: 01jqe1a2b7b7ayavq6529d279g, Session actor destroyed RESULT: [[3u]] --------------------- STATS: total CPU: 1418 duration: 1269333 usec cpu: 934605 usec { name: "/Root/OlapStore/log1" reads { rows: 2 bytes: 16 } } duration: 4078 usec cpu: 4254 usec 2025-03-28T09:28:43.327284Z node 50 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;self_id=[50:7486789888742760611:2326];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2025-03-28T09:28:43.329266Z node 50 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;self_id=[50:7486789888742760607:2325];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; 2025-03-28T09:28:43.515224Z node 50 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;self_id=[50:7486789888742760647:2327];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2025-03-28T09:28:43.516534Z node 50 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;self_id=[50:7486789888742760604:2324];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2025-03-28T09:28:43.542890Z node 50 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037889;parent=[50:7486789888742760647:2327];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:28:43.542973Z node 50 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037890;parent=[50:7486789888742760607:2325];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:28:43.554869Z node 50 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;parent=[50:7486789888742760604:2324];fline=actor.cpp:33;event=skip_flush_writing; 2025-03-28T09:28:43.554962Z node 50 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037891;parent=[50:7486789888742760611:2326];fline=actor.cpp:33;event=skip_flush_writing; >> KqpNewEngine::MultiSelect [GOOD] >> KqpNewEngine::MultiOutput >> KqpNotNullColumns::InsertNotNullPkPg+useSink >> KqpNewEngine::PkRangeSelect4 [GOOD] >> KqpNewEngine::PrecomputeKey >> KqpKv::ReadRows_PgValue [GOOD] >> KqpKv::ReadRows_PgKey >> KqpNewEngine::ScalarFunctions [GOOD] >> KqpNewEngine::ScalarMultiUsage >> TPQCompatTest::LongProducerAndLongMessageGroupId [GOOD] >> TPQCompatTest::ReadWriteSessions >> KqpSort::TopSortTableExpr [GOOD] >> KqpSort::TopSortResults >> KqpNewEngine::DeleteOn+UseSink [GOOD] >> KqpNewEngine::DeleteOn-UseSink >> KqpMergeCn::TopSortBy_Utf8_Limit2 >> KqpMergeCn::TopSortBy_Float_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_String_Limit3 >> KqpNewEngine::SqlInAsScalar [GOOD] >> KqpNewEngine::SequentialReadsPragma-Enabled >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] >> KqpNewEngine::ItemsLimit [GOOD] >> KqpNewEngine::JoinDictWithPure >> KqpNewEngine::KeyColumnOrder [GOOD] >> KqpNewEngine::KeyColumnOrder2 >> KqpRanges::IsNull [GOOD] >> KqpRanges::IsNotNullSecondComponent >> KqpNotNullColumns::UpdateNotNullPk [GOOD] >> KqpNotNullColumns::UpdateNotNullPkPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2025-03-28T09:26:44.707638Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:44.791941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:44.814125Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:44.814445Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:44.822741Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:44.822966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:44.823198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:44.823271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:44.823336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:44.823443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:44.823540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:44.823645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:44.823751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:44.823816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.823880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:44.823963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:44.846402Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:44.846732Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:44.846783Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:44.846927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:44.847073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:44.847147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:44.847183Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:44.847271Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:44.847334Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:44.847369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:44.847427Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:44.847545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:44.847585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:44.847640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:44.847671Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:44.847810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:44.847851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:44.847899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:44.847921Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:44.847982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:44.848013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:44.848036Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:44.848076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:44.848100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:44.848124Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:44.848425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-03-28T09:26:44.848513Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-03-28T09:26:44.848598Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=38; 2025-03-28T09:26:44.848810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=174; 2025-03-28T09:26:44.848934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:44.848974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:44.849002Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:44.849162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:44.849208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.849232Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.849343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:44.849374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:44.849393Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:44.849522Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:44.849555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:44.849576Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:44.849683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:44.849731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:44.849789Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 0.716692Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:81:2696:0]; 2025-03-28T09:28:50.716748Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:82:8528:0]; 2025-03-28T09:28:50.716808Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:83:2776:0]; 2025-03-28T09:28:50.716872Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:84:2768:0]; 2025-03-28T09:28:50.716930Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:85:2768:0]; 2025-03-28T09:28:50.717009Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:86:2768:0]; 2025-03-28T09:28:50.717085Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:87:2768:0]; 2025-03-28T09:28:50.717157Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:88:2768:0]; 2025-03-28T09:28:50.717222Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:89:2768:0]; 2025-03-28T09:28:50.717276Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:90:2768:0]; 2025-03-28T09:28:50.717345Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:91:2768:0]; 2025-03-28T09:28:50.717416Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:92:2768:0]; 2025-03-28T09:28:50.717479Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:93:2768:0]; 2025-03-28T09:28:50.717553Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:94:2768:0]; 2025-03-28T09:28:50.717625Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:95:2768:0]; 2025-03-28T09:28:50.717701Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:96:2768:0]; 2025-03-28T09:28:50.717769Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:97:2768:0]; 2025-03-28T09:28:50.717865Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:98:2768:0]; 2025-03-28T09:28:50.717933Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:99:2768:0]; 2025-03-28T09:28:50.718007Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:100:2768:0]; 2025-03-28T09:28:50.718068Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:101:2768:0]; 2025-03-28T09:28:50.718138Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:102:2768:0]; 2025-03-28T09:28:50.718225Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:103:2768:0]; 2025-03-28T09:28:50.718296Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:104:2768:0]; 2025-03-28T09:28:50.718363Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:105:2768:0]; 2025-03-28T09:28:50.718417Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:106:2768:0]; 2025-03-28T09:28:50.718492Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:107:2768:0]; 2025-03-28T09:28:50.718567Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:108:2768:0]; 2025-03-28T09:28:50.718629Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:109:2768:0]; 2025-03-28T09:28:50.718840Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:110:2768:0]; 2025-03-28T09:28:50.718923Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:111:2768:0]; 2025-03-28T09:28:50.719000Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:112:2768:0]; 2025-03-28T09:28:50.719066Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:113:2768:0]; 2025-03-28T09:28:50.719125Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:114:2768:0]; 2025-03-28T09:28:50.719182Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:115:2768:0]; 2025-03-28T09:28:50.719293Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:116:2768:0]; 2025-03-28T09:28:50.719375Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:117:2768:0]; 2025-03-28T09:28:50.719429Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:118:2768:0]; 2025-03-28T09:28:50.719497Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:119:2696:0]; 2025-03-28T09:28:50.719568Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:120:2696:0]; 2025-03-28T09:28:50.719657Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:121:2696:0]; 2025-03-28T09:28:50.719733Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:122:2696:0]; 2025-03-28T09:28:50.719801Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:123:8528:0]; 2025-03-28T09:28:50.719880Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:124:2768:0]; 2025-03-28T09:28:50.719938Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:125:2768:0]; 2025-03-28T09:28:50.720006Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:126:2768:0]; 2025-03-28T09:28:50.720080Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:127:2768:0]; 2025-03-28T09:28:50.720163Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:128:2768:0]; 2025-03-28T09:28:50.720228Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:129:2768:0]; 2025-03-28T09:28:50.720287Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:130:2768:0]; 2025-03-28T09:28:50.720347Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:131:2768:0]; 2025-03-28T09:28:50.720402Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:132:2768:0]; 2025-03-28T09:28:50.720455Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:133:2768:0]; 2025-03-28T09:28:50.720512Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:134:2768:0]; 2025-03-28T09:28:50.720570Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:135:2768:0]; 2025-03-28T09:28:50.720629Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:136:2768:0]; 2025-03-28T09:28:50.720686Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:137:2768:0]; 2025-03-28T09:28:50.720746Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:138:2768:0]; 2025-03-28T09:28:50.720802Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:139:2768:0]; 2025-03-28T09:28:50.720889Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:140:2768:0]; 2025-03-28T09:28:50.720968Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:141:2768:0]; 2025-03-28T09:28:50.721040Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:142:2768:0]; 2025-03-28T09:28:50.721125Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:143:2768:0]; 2025-03-28T09:28:50.721200Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:144:2768:0]; 2025-03-28T09:28:50.721270Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:145:2768:0]; 2025-03-28T09:28:50.721335Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:146:2768:0]; 2025-03-28T09:28:50.721404Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:147:2768:0]; 2025-03-28T09:28:50.721463Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:148:2768:0]; 2025-03-28T09:28:50.721514Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:149:2768:0]; 2025-03-28T09:28:50.721565Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:150:2768:0]; 2025-03-28T09:28:50.721630Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:151:2768:0]; 2025-03-28T09:28:50.721696Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:152:2768:0]; 2025-03-28T09:28:50.721759Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:153:2768:0]; 2025-03-28T09:28:50.721817Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:154:2768:0]; 2025-03-28T09:28:50.721870Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:155:2768:0]; 2025-03-28T09:28:50.721931Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:156:2768:0]; 2025-03-28T09:28:50.721987Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:157:2768:0]; 2025-03-28T09:28:50.722044Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:158:2768:0]; 2025-03-28T09:28:50.722117Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:159:2768:0]; 2025-03-28T09:28:50.722187Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:160:2696:0]; 2025-03-28T09:28:50.722243Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:161:2696:0]; 2025-03-28T09:28:50.722300Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:162:2696:0]; 2025-03-28T09:28:50.722355Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:163:2696:0]; 2025-03-28T09:28:50.722412Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:2:255:164:8528:0]; 2025-03-28T09:28:51.399821Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:28:51.400640Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[4] (CS::GENERAL) apply at tablet 9437184 2025-03-28T09:28:51.537738Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:2 Blob count: 692 2025-03-28T09:28:51.545053Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2078720;raw_bytes=2324579;count=1;records=24469} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100419184;raw_bytes=104021253;count=42;records=1103721} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpRanges::DuplicateCompositeKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan-UseSink >> KqpNotNullColumns::InsertNotNullPk [GOOD] >> KqpNotNullColumns::InsertNotNull >> KqpSqlIn::KeySuffix_OnlyTail [GOOD] >> KqpSqlIn::KeySuffix_NotPointPrefix >> KqpNotNullColumns::InsertNotNullPkPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPkPg-useSink >> KqpNewEngine::MultiOutput [GOOD] >> KqpNewEngine::MultiStatement >> KqpNotNullColumns::ReplaceNotNull >> KqpKv::ReadRows_PgKey [GOOD] >> KqpRanges::LiteralOr [GOOD] >> KqpRanges::LiteralOrCompisite >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloaded [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot >> KqpRanges::UpdateWhereInFullScan-UseSink [GOOD] >> KqpRanges::ScanKeyPrefix >> KqpRanges::NullInKey [GOOD] >> KqpRanges::NullInKeySuffix >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> KqpNewEngine::DeleteOn-UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin+UseSink >> KqpNotNullColumns::UpdateTable_UniqIndexPg [GOOD] >> KqpNotNullColumns::UpdateTable_Immediate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_PgKey [GOOD] Test command err: Trying to start YDB, gRPC: 29321, MsgBus: 29288 2025-03-28T09:28:14.924811Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789829850439832:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:14.926408Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000aef/r3tmp/tmpoTRfCw/pdisk_1.dat 2025-03-28T09:28:15.391393Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:15.396177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:15.396280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29321, node 1 2025-03-28T09:28:15.398998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:15.537453Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:15.537477Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:15.537487Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:15.537678Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29288 TClient is connected to server localhost:29288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:16.446888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:16.479521Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:18.201006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789847030309681:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.201126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.674348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS Trying to start YDB, gRPC: 32555, MsgBus: 9250 2025-03-28T09:28:19.820789Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789848679203656:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:19.822778Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000aef/r3tmp/tmp189iDB/pdisk_1.dat 2025-03-28T09:28:19.957623Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:19.991599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:19.991675Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:19.995125Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32555, node 2 2025-03-28T09:28:20.077348Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:20.077394Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:20.077402Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:20.077553Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9250 TClient is connected to server localhost:9250 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:20.560277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.569804Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:28:23.038833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789865859073499:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.038925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.055863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.110963Z node 2 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Unknown table '/Root/WrongTable' Trying to start YDB, gRPC: 63322, MsgBus: 6982 2025-03-28T09:28:23.919938Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486789867213202615:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:23.919989Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000aef/r3tmp/tmpIINK5w/pdisk_1.dat 2025-03-28T09:28:24.125133Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:24.154807Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:24.154890Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:24.156159Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63322, node 3 2025-03-28T09:28:24.337638Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:24.337665Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:24.337674Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:24.337796Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6982 TClient is connected to server localhost:6982 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:24.924748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:24.932147Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:27.444466Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486789884393072441:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:27.444567Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:27.466361Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 IsSuccess(): 1 GetStatus(): SUCCESS [] IsSuccess(): 1 GetStatus(): ... #72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037920 not found 2025-03-28T09:28:47.190123Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715724:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.326202Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037921 not found 2025-03-28T09:28:47.337409Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715726:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.445727Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037922 not found 2025-03-28T09:28:47.450268Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715728:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.571043Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037923 not found 2025-03-28T09:28:47.576119Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715730:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.686283Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715732:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.691279Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037924 not found 2025-03-28T09:28:47.773169Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037925 not found 2025-03-28T09:28:47.780045Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715734:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.898382Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037926 not found 2025-03-28T09:28:47.900654Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715736:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.018881Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037927 not found 2025-03-28T09:28:48.032452Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715738:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.124580Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037928 not found 2025-03-28T09:28:48.130254Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715740:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.240448Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715742:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.240834Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037929 not found 2025-03-28T09:28:48.353420Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037930 not found 2025-03-28T09:28:48.356804Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715744:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.499768Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037931 not found 2025-03-28T09:28:48.509276Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715746:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.658038Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037932 not found 2025-03-28T09:28:48.665350Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715748:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.828455Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037933 not found 2025-03-28T09:28:48.838554Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715750:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.943130Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037934 not found 2025-03-28T09:28:48.948339Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715752:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.086020Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037935 not found 2025-03-28T09:28:49.094773Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715754:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.192996Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037936 not found 2025-03-28T09:28:49.195349Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.304071Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037937 not found 2025-03-28T09:28:49.307506Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715758:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.462602Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037938 not found 2025-03-28T09:28:49.466752Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715760:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.633056Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037939 not found Trying to start YDB, gRPC: 26659, MsgBus: 27478 2025-03-28T09:28:50.896181Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486789981216592715:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:50.897682Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000aef/r3tmp/tmpczcccV/pdisk_1.dat 2025-03-28T09:28:51.079475Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:51.081757Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:51.081840Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:51.083186Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26659, node 7 2025-03-28T09:28:51.159309Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:51.159334Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:51.159346Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:51.159510Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27478 TClient is connected to server localhost:27478 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:51.905935Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:51.920462Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:55.418659Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.560366Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2025-03-28T09:28:55.568285Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.681720Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.687449Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2025-03-28T09:28:55.756263Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486789981216592715:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:55.756392Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:55.822833Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found >> KqpNewEngine::ScalarMultiUsage [GOOD] >> KqpNewEngine::SequentialReadsPragma+Enabled >> KqpNotNullColumns::UpdateNotNullPkPg [GOOD] >> KqpNotNullColumns::UpdateNotNull >> TTopicYqlTest::CreateAndAlterTopicYql [GOOD] >> TTopicYqlTest::BadRequests >> TPersQueueTest::LOGBROKER_7820 [GOOD] >> TPersQueueTest::InflightLimit >> KqpNewEngine::KeyColumnOrder2 [GOOD] >> KqpNewEngine::LocksEffects >> TPersQueueTest::FetchRequest [GOOD] >> TPersQueueTest::EventBatching >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] >> KqpNewEngine::PrecomputeKey [GOOD] >> KqpNewEngine::PrimaryView >> KqpMergeCn::TopSortBy_Utf8_Limit2 [GOOD] >> KqpNewEngine::Aggregate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2025-03-28T09:26:53.739168Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:53.820303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:53.845857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:53.846185Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:53.854844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:53.855076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:53.855318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:53.855435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:53.855530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:53.855670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:53.855807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:53.855934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:53.856050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:53.856147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:53.856270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:53.856379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:53.886460Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:53.886793Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:53.886842Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:53.887019Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:53.887167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:53.887257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:53.887302Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:53.887396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:53.887473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:53.887514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:53.887560Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:53.887740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:53.887802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:53.887840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:53.887864Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:53.887991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:53.888054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:53.888102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:53.888150Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:53.888221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:53.888252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:53.888280Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:53.888324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:53.888361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:53.888394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:53.888742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-03-28T09:26:53.888845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-03-28T09:26:53.888948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=50; 2025-03-28T09:26:53.889051Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-03-28T09:26:53.889234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:53.889299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:53.889332Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:53.889518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:53.889562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:53.889591Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:53.889751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:53.889801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:53.889828Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:53.889994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:53.890039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:53.890101Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:53.890241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:53.890284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:53.890332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... APPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:117:2768:0]; 2025-03-28T09:28:56.015349Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:118:2768:0]; 2025-03-28T09:28:56.015445Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:119:2768:0]; 2025-03-28T09:28:56.015511Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:120:2768:0]; 2025-03-28T09:28:56.015591Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:121:2768:0]; 2025-03-28T09:28:56.015653Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:122:2768:0]; 2025-03-28T09:28:56.015714Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:123:2768:0]; 2025-03-28T09:28:56.015781Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:124:2768:0]; 2025-03-28T09:28:56.015858Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:125:2768:0]; 2025-03-28T09:28:56.015925Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:126:2768:0]; 2025-03-28T09:28:56.016000Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:127:2768:0]; 2025-03-28T09:28:56.016086Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:128:2696:0]; 2025-03-28T09:28:56.016149Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:129:2696:0]; 2025-03-28T09:28:56.016212Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:130:2696:0]; 2025-03-28T09:28:56.016277Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:131:2696:0]; 2025-03-28T09:28:56.016363Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:132:8920:0]; 2025-03-28T09:28:56.016428Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:133:2776:0]; 2025-03-28T09:28:56.016488Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:134:2776:0]; 2025-03-28T09:28:56.016554Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:135:2776:0]; 2025-03-28T09:28:56.016616Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:136:2776:0]; 2025-03-28T09:28:56.016680Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:137:2776:0]; 2025-03-28T09:28:56.016770Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:138:2768:0]; 2025-03-28T09:28:56.016837Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:139:2768:0]; 2025-03-28T09:28:56.016901Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:140:2768:0]; 2025-03-28T09:28:56.016993Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:141:2768:0]; 2025-03-28T09:28:56.017061Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:142:2768:0]; 2025-03-28T09:28:56.017129Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:143:2768:0]; 2025-03-28T09:28:56.017197Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:144:2768:0]; 2025-03-28T09:28:56.017267Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:145:2768:0]; 2025-03-28T09:28:56.017329Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:146:2768:0]; 2025-03-28T09:28:56.017431Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:147:2768:0]; 2025-03-28T09:28:56.017501Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:148:2768:0]; 2025-03-28T09:28:56.017567Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:149:2768:0]; 2025-03-28T09:28:56.017638Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:150:2768:0]; 2025-03-28T09:28:56.017709Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:151:2768:0]; 2025-03-28T09:28:56.017777Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:152:2768:0]; 2025-03-28T09:28:56.017866Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:153:2768:0]; 2025-03-28T09:28:56.017933Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:154:2768:0]; 2025-03-28T09:28:56.017998Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:155:2768:0]; 2025-03-28T09:28:56.018061Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:156:2768:0]; 2025-03-28T09:28:56.018129Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:157:2768:0]; 2025-03-28T09:28:56.018193Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:158:2768:0]; 2025-03-28T09:28:56.018262Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:159:2768:0]; 2025-03-28T09:28:56.018340Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:160:2768:0]; 2025-03-28T09:28:56.018414Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:161:2768:0]; 2025-03-28T09:28:56.018484Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:162:2768:0]; 2025-03-28T09:28:56.018572Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:163:2768:0]; 2025-03-28T09:28:56.018640Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:164:2768:0]; 2025-03-28T09:28:56.018819Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:165:2768:0]; 2025-03-28T09:28:56.018892Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:166:2768:0]; 2025-03-28T09:28:56.018954Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:167:2768:0]; 2025-03-28T09:28:56.019021Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:168:2768:0]; 2025-03-28T09:28:56.019087Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:169:2768:0]; 2025-03-28T09:28:56.019149Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:170:2768:0]; 2025-03-28T09:28:56.019218Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:171:2768:0]; 2025-03-28T09:28:56.019280Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:172:2696:0]; 2025-03-28T09:28:56.019342Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:173:2696:0]; 2025-03-28T09:28:56.019404Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:174:2696:0]; 2025-03-28T09:28:56.019469Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:175:2696:0]; 2025-03-28T09:28:56.019534Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:4:1:255:176:8904:0]; 2025-03-28T09:28:56.622953Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:28:56.623887Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[2] (CS::GENERAL) apply at tablet 9437184 2025-03-28T09:28:56.740771Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-03-28T09:28:56.748651Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2213112;raw_bytes=2475629;count=1;records=26059} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=7587944;raw_bytes=7088522;count=3;records=75200} inactive {blob_bytes=100101848;raw_bytes=103700153;count=42;records=1100341} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:28:57.138245Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-03-28T09:28:57.138319Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;fline=with_appended.cpp:65;portions=47,;task_id=1115efd0-bb711f0-b330f8c9-277174a8; 2025-03-28T09:28:57.139381Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::1115efd0-bb711f0-b330f8c9-277174a8; 2025-03-28T09:28:57.139480Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:1;path_id:1;size:7587944;portions_count:47;); 2025-03-28T09:28:57.139532Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:28:57.139604Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:28:57.139671Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=8; 2025-03-28T09:28:57.139745Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=102; 2025-03-28T09:28:57.139793Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=8;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:28:57.139847Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:28:57.139890Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:28:57.139996Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.944000s; 2025-03-28T09:28:57.140053Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;task_id=1115efd0-bb711f0-b330f8c9-277174a8;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:28:57.140259Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 4:1 Blob count: 747 2025-03-28T09:28:57.142072Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=39;external_task_id=1115efd0-bb711f0-b330f8c9-277174a8;mem=11009198;cpu=0; 2025-03-28T09:28:57.143803Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; >> KqpNotNullColumns::InsertNotNull [GOOD] >> KqpNotNullColumns::InsertFromSelect >> KqpMergeCn::TopSortBy_String_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Timestamp_Limit2 >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2025-03-28T09:26:36.232376Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:36.395639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:36.422559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:36.423015Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:36.431321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:36.431573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:36.431846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:36.431992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:36.432116Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:36.432251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:36.432382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:36.432513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:36.432642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:36.432765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:36.432879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:36.432997Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:36.479107Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:36.479464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:36.479516Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:36.479700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:36.479833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:36.479898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:36.479929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:36.480015Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:36.480078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:36.480108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:36.480131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:36.480272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:36.480315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:36.480338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:36.480373Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:36.480470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:36.480510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:36.480545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:36.480565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:36.480613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:36.480636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:36.480663Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:36.480714Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:36.480748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:36.480773Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:36.481068Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-03-28T09:26:36.481161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-03-28T09:26:36.481231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=36; 2025-03-28T09:26:36.481290Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=25; 2025-03-28T09:26:36.481424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:36.481478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:36.481505Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:36.481636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:36.481660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:36.481682Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:36.481816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:36.481846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:36.481865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:36.481992Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:36.482021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:36.482045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:36.482138Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:36.482166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:36.482216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-28T09:28:58.418228Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5927:7919];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-03-28T09:28:58.419839Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5927:7919];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpNotNullColumns::InsertNotNullPkPg-useSink [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup >> KqpNewEngine::JoinDictWithPure [GOOD] >> KqpNewEngine::IdxLookupExtractMembers >> KqpNotNullColumns::ReplaceNotNull [GOOD] >> KqpNotNullColumns::ReplaceNotNullPg >> KqpRanges::UpdateWhereInNoFullScan+UseSink >> KqpRanges::IsNotNullSecondComponent [GOOD] >> KqpRanges::IsNullInValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma-Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 62082, MsgBus: 11621 2025-03-28T09:28:15.754918Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789832947211336:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:15.754977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ace/r3tmp/tmpO7QChl/pdisk_1.dat 2025-03-28T09:28:16.274545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:16.278447Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:16.278549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:16.282737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62082, node 1 2025-03-28T09:28:16.390293Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:16.390318Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:16.390326Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:16.390456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11621 TClient is connected to server localhost:11621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:17.041976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:17.059481Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:19.185672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789850127081124:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:19.185757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:19.496408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:19.631815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789850127081227:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:19.631972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:19.632312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789850127081232:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:19.650927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:28:19.664026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789850127081234:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:28:19.750238Z node 1 :TX_PROXY ERROR: Actor# [1:7486789850127081286:2401] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21218, MsgBus: 63067 2025-03-28T09:28:21.035443Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789859166994960:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:21.035523Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ace/r3tmp/tmp5rNXFe/pdisk_1.dat 2025-03-28T09:28:21.166061Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21218, node 2 2025-03-28T09:28:21.201956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:21.202033Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:21.206298Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:21.330144Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:21.330166Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:21.330170Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:21.330257Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63067 TClient is connected to server localhost:63067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:21.727983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:21.740997Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:21.812397Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:21.978587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:28:22.047805Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:28:24.394281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789872051898488:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:24.394401Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:24.456146Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:24.499519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:24.537412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:24.574717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:24.655875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:24.694810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:24.753762Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789872051899003:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:24.753851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: ...
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:47.659803Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:47.728360Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.802798Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.847180Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.896631Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.935718Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.011876Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.123252Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486789972988167775:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:48.123358Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:48.123624Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486789972988167780:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:48.128547Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:48.154528Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486789972988167782:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:48.216186Z node 5 :TX_PROXY ERROR: Actor# [5:7486789972988167837:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:48.282579Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486789951513329103:2138];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:48.282655Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19087, MsgBus: 61297 2025-03-28T09:28:52.619022Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486789992175179290:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:52.619141Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ace/r3tmp/tmpt4MgfK/pdisk_1.dat 2025-03-28T09:28:52.845832Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:52.878608Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:52.878732Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:52.881033Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19087, node 6 2025-03-28T09:28:52.978853Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:52.978878Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:52.978887Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:52.979030Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61297 TClient is connected to server localhost:61297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:53.853620Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:53.861560Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:53.881916Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:54.011878Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:54.265436Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:54.359049Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:57.151930Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790013650017537:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:57.152063Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:57.195756Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:57.274618Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:57.322479Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:57.362299Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:57.433143Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:57.485596Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:57.533405Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790013650018057:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:57.533503Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:57.533597Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790013650018062:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:57.537793Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:57.550679Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790013650018064:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:57.606410Z node 6 :TX_PROXY ERROR: Actor# [6:7486790013650018117:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:57.622797Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486789992175179290:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:57.622875Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] >> KqpNewEngine::DuplicatedResults >> KqpNewEngine::MultiStatement [GOOD] >> KqpNewEngine::MultiStatementMixPure >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2025-03-28T09:26:38.800742Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.899247Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.921646Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.922010Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.930333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.930607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.930995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.931136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.931248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.931407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.931531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.931693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.931828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.931942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.932057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.932204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:38.962595Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:38.963077Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:38.963142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:38.963339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.963497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:38.963564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:38.963607Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:38.963735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:38.963808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:38.963865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:38.963900Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:38.964104Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.964215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:38.964262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:38.964308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:38.964450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:38.964511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:38.964558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:38.964592Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:38.964685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:38.964727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:38.964758Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:38.964801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:38.964836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:38.964884Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:38.965343Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-03-28T09:26:38.965449Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-03-28T09:26:38.965536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=43; 2025-03-28T09:26:38.965698Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=111; 2025-03-28T09:26:38.965871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:38.965925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:38.965962Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:38.966169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:38.966213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.966241Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.966402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:38.966449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:38.966477Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:38.966674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:38.966765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:38.966802Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:38.966966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:38.967039Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:38.967096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-28T09:29:01.586562Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5928:7920];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-03-28T09:29:01.588121Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5928:7920];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpNotNullColumns::UpdateNotNull [GOOD] >> KqpNotNullColumns::UpdateNotNullPg >> KqpSort::TopSortResults [GOOD] >> KqpSort::TopParameterFilter >> KqpReturning::ReturningWorks+QueryService >> KqpNewEngine::DeleteWithBuiltin+UseSink [GOOD] >> KqpNewEngine::DeleteWithBuiltin-UseSink >> KqpRanges::NullInKeySuffix [GOOD] >> KqpRanges::NullInPredicate >> KqpSqlIn::KeySuffix_NotPointPrefix [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Int ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::DeleteNotFullScan-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20123, MsgBus: 2711 2025-03-28T09:28:14.913441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789827845726705:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:14.913648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae3/r3tmp/tmpjwIhmR/pdisk_1.dat 2025-03-28T09:28:15.335023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:15.340862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:15.343075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:15.347589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20123, node 1 2025-03-28T09:28:15.539432Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:15.539461Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:15.539473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:15.539598Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2711 TClient is connected to server localhost:2711 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:16.489675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:16.504409Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:16.510302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:16.681062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:16.864282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:16.951228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:18.392938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789845025597665:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.393096Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.768931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.805259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.837755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.874144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.914589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.987601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:19.090379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789849320565484:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:19.090532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:19.090753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789849320565489:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:19.095227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:19.109149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789849320565491:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:19.195177Z node 1 :TX_PROXY ERROR: Actor# [1:7486789849320565546:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:19.918806Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789827845726705:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:19.918868Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:20.349403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:28:20.624596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:28:20.909567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:28:21.101124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:28:21.416554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14935, MsgBus: 9636 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae3/r3tmp/tmpV9FkKD/pdisk_1.dat 2025-03-28T09:28:22.804532Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:28:22.878251Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:22.911749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:22.912791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:22.914930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14935, node 2 2025-03-28T09:28:23.003242Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:23.003272Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:23.003280Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:23.003400Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9636 TClient is connected to server localhost:9636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:23.426582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715 ... emeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:48.544117Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486789973381785032:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:48.544257Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:48.544750Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486789973381785037:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:48.552087Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:48.571200Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486789973381785039:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:28:48.597739Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486789951906946243:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:48.597810Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:48.670397Z node 5 :TX_PROXY ERROR: Actor# [5:7486789973381785098:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:50.239140Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:28:50.625062Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:28:50.868761Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:28:51.089863Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-28T09:28:51.545184Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 13165, MsgBus: 17997 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae3/r3tmp/tmpEzN1ox/pdisk_1.dat 2025-03-28T09:28:54.814058Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:54.825712Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:28:54.888908Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:54.889019Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:54.891635Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13165, node 6 2025-03-28T09:28:54.987080Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:54.987104Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:54.987115Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:54.987275Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17997 TClient is connected to server localhost:17997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:55.676090Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:55.698374Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:55.813353Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:56.027536Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:56.129936Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:58.993309Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790014622547167:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:58.993417Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:59.117547Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:59.205785Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:59.265380Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:59.319301Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:59.407252Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:59.525901Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:59.656457Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790018917514993:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:59.656663Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:59.657210Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790018917514998:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:59.662350Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:59.691573Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790018917515001:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:28:59.790724Z node 6 :TX_PROXY ERROR: Actor# [6:7486790018917515058:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","Stats":{"ComputeNodes":[{"Tasks":[{"NodeId":6,"FinishTimeMs":1743154141894,"TaskId":1,"Host":"ghrun-mdsjyjltvm","ComputeTimeUs":135}],"CpuTimeUs":774}],"UseLlvm":"undefined","Tasks":1,"FinishedTasks":0,"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/Join2"}],"BaseTimeMs":1743154141894,"NodesScanShards":[],"CpuTimeUs":{"Count":1,"Sum":774,"Max":774,"Min":774}},"CTE Name":"precompute_0_0"}],"Node Type":"Effect"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":558033,"CpuTimeUs":547343},"ProcessCpuTimeUs":1998,"TotalDurationUs":575022,"ResourcePoolId":"default","QueuedTimeUs":847},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":0.774,"A-Cpu":0.774,"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2"}],"Node Type":"Delete"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query"}} >> KqpRanges::ScanKeyPrefix [GOOD] >> KqpNotNullColumns::InsertFromSelect [GOOD] >> KqpNotNullColumns::InsertNotNullPg+useSink >> KqpRanges::LiteralOrCompisite [GOOD] >> KqpRanges::LiteralOrCompisiteCollision >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] >> KqpNewEngine::PureExpr >> KqpNewEngine::LocksEffects [GOOD] >> KqpNewEngine::LeftSemiJoin >> KqpNotNullColumns::ReplaceNotNullPg [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup >> KqpNewEngine::Aggregate [GOOD] >> KqpNewEngine::AggregateTuple >> TPersQueueTest::Codecs_WriteMessageWithNonDefaultCodecThatHasToBeConfiguredAdditionally_SessionClosedWithBadRequestError [GOOD] >> TPersQueueTest::CreateTopicWithMeteringMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ScanKeyPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 21381, MsgBus: 27732 2025-03-28T09:28:14.915827Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789827876121051:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:14.915860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000af0/r3tmp/tmp5AJx5q/pdisk_1.dat 2025-03-28T09:28:15.355930Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:15.360131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:15.360254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:15.363066Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21381, node 1 2025-03-28T09:28:15.537979Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:15.538005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:15.538013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:15.538146Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27732 TClient is connected to server localhost:27732 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:16.378265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:16.417086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:16.588830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:16.774986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:16.862803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:18.309662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789845055991939:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.309805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.675884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.717330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.753823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.797492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.834316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.872035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.929439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789845055992452:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.929537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.929749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789845055992457:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.935269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:18.949838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789845055992459:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:19.031473Z node 1 :TX_PROXY ERROR: Actor# [1:7486789849350959808:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:19.918929Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789827876121051:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:19.918986Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12317, MsgBus: 25032 2025-03-28T09:28:21.848782Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789858802797985:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:21.848820Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000af0/r3tmp/tmpZaAn8G/pdisk_1.dat 2025-03-28T09:28:21.958339Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12317, node 2 2025-03-28T09:28:21.999532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:21.999665Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:22.004701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:22.053614Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:22.053640Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:22.053653Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:22.053780Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25032 TClient is connected to server localhost:25032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:22.524256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:22.553612Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:22.615720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:22.770915Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:22.883123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:25.234625Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService ... MESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:52.402630Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:52.505957Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486789989472688770:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:52.506058Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:52.506275Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486789989472688775:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:52.510152Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:52.529609Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486789989472688777:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:28:52.621444Z node 5 :TX_PROXY ERROR: Actor# [5:7486789989472688833:3457] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:52.742141Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486789967997850150:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:52.742283Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:54.048699Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:28:54.476753Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:28:54.693994Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:28:54.905486Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.302340Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 23399, MsgBus: 29629 2025-03-28T09:28:56.910325Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790008773297863:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:56.910410Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000af0/r3tmp/tmpfrGQ0W/pdisk_1.dat 2025-03-28T09:28:57.144792Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:57.171570Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:57.171707Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:57.174298Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23399, node 6 2025-03-28T09:28:57.244931Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:57.244967Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:57.244979Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:57.245195Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29629 TClient is connected to server localhost:29629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:57.918336Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:57.931546Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:58.027715Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:28:58.231595Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:28:58.313001Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:01.583966Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790030248136121:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:01.584094Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:01.648037Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:01.695696Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:01.754655Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:01.807120Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:01.865017Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:01.909807Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790008773297863:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:01.910200Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:01.927027Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:02.030366Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790034543103932:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:02.030489Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:02.031045Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790034543103937:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:02.036453Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:02.050552Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790034543103939:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:02.147000Z node 6 :TX_PROXY ERROR: Actor# [6:7486790034543103994:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] >> KqpMergeCn::TopSortByDesc_Double_Limit3 >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::ReadWithoutConsumerFirstClassCitizen [GOOD] Test command err: 2025-03-28T09:25:18.529760Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789070751790656:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.529954Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.553992Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789070940499802:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.554037Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001255/r3tmp/tmpEmSd6W/pdisk_1.dat 2025-03-28T09:25:18.697149Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:18.697198Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:18.917238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:18.917642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:18.919738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:18.919802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:18.919923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:18.930923Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:18.931099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:18.932441Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15592, node 1 2025-03-28T09:25:19.128625Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001255/r3tmp/yandexlqUaY7.tmp 2025-03-28T09:25:19.128665Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001255/r3tmp/yandexlqUaY7.tmp 2025-03-28T09:25:19.128909Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001255/r3tmp/yandexlqUaY7.tmp 2025-03-28T09:25:19.129095Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:19.313948Z INFO: TTestServer started on Port 19257 GrpcPort 15592 TClient is connected to server localhost:19257 PQClient connected to localhost:15592 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:19.605936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:19.687036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:25:21.930236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789083825402090:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:21.930240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789083825402067:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:21.930347Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:21.930224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789083636693637:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:21.930309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:21.930713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789083636693645:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:21.934016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:25:21.938038Z node 2 :TX_PROXY ERROR: Actor# [2:7486789083825402097:2172] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:25:21.952341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789083636693651:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:25:21.953832Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486789083825402096:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:25:22.027796Z node 2 :TX_PROXY ERROR: Actor# [2:7486789088120369419:2178] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:22.057187Z node 1 :TX_PROXY ERROR: Actor# [1:7486789087931661047:2766] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:22.233901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.235868Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789087931661060:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.236153Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWRlMWVmNjktNDEzZjRkN2YtOTM1ODkxMGQtZjU2NGZlZjM=, ActorId: [1:7486789083636693634:2337], ActorState: ExecuteState, TraceId: 01jqe13zvk7r86exxyw0mmp54p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.238521Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.238916Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789088120369433:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.239154Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTBkZmYzMzItOWZjNDNmYjAtMjA5ODgzMzYtODcyNGE1Zjc=, ActorId: [2:7486789083825402065:2312], ActorState: ExecuteState, TraceId: 01jqe13zvgarhp6mcy5sbhdkp6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.239589Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.316341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.409468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:25:22.636953Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { ... sFromCache: 0 SizeLag: 372 RealReadOffset: 37 WaitQuotaTimeMs: 0 EndOffset: 40 StartOffset: 0 } Cookie: 35 } 2025-03-28T09:29:03.172157Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) ready for read with readOffset 38 endOffset 40 2025-03-28T09:29:03.172180Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) EndOffset 40 ReadOffset 38 ReadGuid 822dd5d8-f6c53f32-cc6c7a02-32e889f6 has messages 1 2025-03-28T09:29:03.172252Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_13550737154759575412_v1 read done: guid# 46c7b5ea-b4922458-d2e0c4cd-2dd710d2, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3), size# 352 2025-03-28T09:29:03.172281Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_13550737154759575412_v1 response to read: guid# 46c7b5ea-b4922458-d2e0c4cd-2dd710d2 2025-03-28T09:29:03.172545Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_13550737154759575412_v1 Process answer. Aval parts: 0 2025-03-28T09:29:03.172698Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3), readOffset# 38, endOffset# 40, WTime# 1743154142757, sizeLag# 372 2025-03-28T09:29:03.172722Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1TEvPartitionReady. Aval parts: 0 2025-03-28T09:29:03.172771Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 read done: guid# 822dd5d8-f6c53f32-cc6c7a02-32e889f6, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3), size# 522 2025-03-28T09:29:03.172795Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 response to read: guid# 822dd5d8-f6c53f32-cc6c7a02-32e889f6 2025-03-28T09:29:03.172931Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 Process answer. Aval parts: 1 Bytes readed: 522 Offset: 35 from session 3 Offset: 36 from session 3 Offset: 37 from session 3 2025-03-28T09:29:03.173647Z node 24 :PQ_READ_PROXY DEBUG: session cookie 1 consumer session _24_1_13550737154759575412_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 3 offsets { end: 39 } } } } 2025-03-28T09:29:03.173715Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer session _24_1_13550737154759575412_v1 closed with error: reason# can't commit when reading without a consumer 2025-03-28T09:29:03.173967Z node 24 :PQ_READ_PROXY INFO: session cookie 1 consumer session _24_1_13550737154759575412_v1 is DEAD 2025-03-28T09:29:03.175328Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_13550737154759575412_v1 2025-03-28T09:29:03.175391Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7486790032257410186:2595] destroyed 2025-03-28T09:29:03.175412Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_13550737154759575412_v1 2025-03-28T09:29:03.175430Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7486790032257410185:2594] destroyed 2025-03-28T09:29:03.175447Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_13550737154759575412_v1 2025-03-28T09:29:03.175466Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7486790032257410184:2593] destroyed 2025-03-28T09:29:03.175481Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_13550737154759575412_v1 2025-03-28T09:29:03.175499Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7486790032257410183:2592] destroyed 2025-03-28T09:29:03.175517Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_1_13550737154759575412_v1 2025-03-28T09:29:03.175535Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7486790032257410182:2591] destroyed 2025-03-28T09:29:03.175624Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_13550737154759575412_v1 2025-03-28T09:29:03.175645Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_13550737154759575412_v1 2025-03-28T09:29:03.175662Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_13550737154759575412_v1 2025-03-28T09:29:03.175679Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_13550737154759575412_v1 2025-03-28T09:29:03.175696Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_1_13550737154759575412_v1 2025-03-28T09:29:03.177211Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 grpc read done: success# 1, data# { read_request { bytes_size: 400 } } 2025-03-28T09:29:03.177471Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 got read request: guid# c4cc7809-38fc5772-852c876a-4eb15ec2 2025-03-28T09:29:03.177526Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 performing read request: guid# ca1a6846-2f3f66da-550f3807-85043938, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3), count# 2, size# 170, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-28T09:29:03.177609Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3)maxCount 2 maxSize 170 maxTimeLagMs 0 readTimestampMs 0 readOffset 38 EndOffset 40 ClientCommitOffset 0 committedOffset 0 Guid ca1a6846-2f3f66da-550f3807-85043938 2025-03-28T09:29:03.179089Z node 25 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-28T09:29:03.179156Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 2 2025-03-28T09:29:03.179319Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] read cookie 34 Topic 'rt3.dc1--topic1' partition 2 user $without_consumer offset 38 count 2 size 170 endOffset 40 max time lag 0ms effective offset 38 2025-03-28T09:29:03.179371Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] read cookie 34 added 0 blobs, size 0 count 0 last offset 38, current partition end offset: 40 2025-03-28T09:29:03.179726Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] Reading cookie 34. All data is from uncompacted head. 2025-03-28T09:29:03.179759Z node 25 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:29:03.179903Z node 25 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 2 messageNo: 0 requestId: cookie: 38 2025-03-28T09:29:03.187647Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) initDone 1 event { CmdReadResult { MaxOffset: 40 Result { Offset: 38 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 40 WriteTimestampMS: 1743154142778 CreateTimestampMS: 1743154142771 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } Result { Offset: 39 Data: "... 94 bytes ..." SourceId: "\000source" SeqNo: 41 WriteTimestampMS: 1743154142798 CreateTimestampMS: 1743154142796 UncompressedSize: 6 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 40 RealReadOffset: 39 WaitQuotaTimeMs: 0 EndOffset: 40 StartOffset: 0 } Cookie: 38 } 2025-03-28T09:29:03.187942Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) wait data in partition inited, cookie 1 from offset40 2025-03-28T09:29:03.188026Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3) EndOffset 40 ReadOffset 40 ReadGuid ca1a6846-2f3f66da-550f3807-85043938 has messages 1 2025-03-28T09:29:03.188214Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 read done: guid# ca1a6846-2f3f66da-550f3807-85043938, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:3), size# 352 2025-03-28T09:29:03.188250Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 response to read: guid# ca1a6846-2f3f66da-550f3807-85043938 2025-03-28T09:29:03.188514Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 Process answer. Aval parts: 0 Bytes readed: 352 Offset: 38 from session 3 Offset: 39 from session 3 2025-03-28T09:29:03.191509Z node 24 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _24_2_5351447854448259575_v1 grpc read done: success# 1, data# { commit_offset_request { commit_offsets { partition_session_id: 3 offsets { end: 39 } } } } 2025-03-28T09:29:03.191542Z node 24 :PQ_READ_PROXY INFO: session cookie 2 consumer session _24_2_5351447854448259575_v1 closed with error: reason# can't commit when reading without a consumer 2025-03-28T09:29:03.191779Z node 24 :PQ_READ_PROXY INFO: session cookie 2 consumer session _24_2_5351447854448259575_v1 is DEAD 2025-03-28T09:29:03.196905Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_5351447854448259575_v1 2025-03-28T09:29:03.196997Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7486790032257410198:2602] destroyed 2025-03-28T09:29:03.197033Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_5351447854448259575_v1 2025-03-28T09:29:03.197053Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7486790032257410196:2600] destroyed 2025-03-28T09:29:03.197070Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_5351447854448259575_v1 2025-03-28T09:29:03.197089Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7486790032257410195:2599] destroyed 2025-03-28T09:29:03.197105Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_5351447854448259575_v1 2025-03-28T09:29:03.197122Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7486790032257410194:2598] destroyed 2025-03-28T09:29:03.197197Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_5351447854448259575_v1 2025-03-28T09:29:03.197217Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_5351447854448259575_v1 2025-03-28T09:29:03.197242Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_5351447854448259575_v1 2025-03-28T09:29:03.197259Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_5351447854448259575_v1 2025-03-28T09:29:03.222814Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _24_2_5351447854448259575_v1 2025-03-28T09:29:03.222904Z node 25 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [24:7486790032257410197:2601] destroyed 2025-03-28T09:29:03.222983Z node 25 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _24_2_5351447854448259575_v1 >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] >> DataShardReadIterator::TryWriteManyRows-Commit [GOOD] >> DataShardReadIteratorBatchMode::RangeFull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2025-03-28T09:26:38.920790Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:39.018242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:39.043602Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:39.044208Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:39.052466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:39.052684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:39.052952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:39.053076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:39.053187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:39.053321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:39.053452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:39.053562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:39.053683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:39.053797Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.053891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:39.053995Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:39.087312Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:39.087705Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:39.087798Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:39.087996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.088165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:39.088243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:39.088287Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:39.088392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:39.088475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:39.088521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:39.088552Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:39.088727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.088793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:39.088830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:39.088858Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:39.089004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:39.089072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:39.089124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:39.089167Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:39.089242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:39.089277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:39.089308Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:39.089351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:39.089412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:39.089456Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:39.089821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=37; 2025-03-28T09:26:39.089927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-03-28T09:26:39.090021Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=45; 2025-03-28T09:26:39.090107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-03-28T09:26:39.090272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:39.090326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:39.090360Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:39.090546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:39.090593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.090620Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.091014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:39.091069Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:39.091100Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:39.091270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:39.091306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:39.091332Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:39.091492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:39.091535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:39.091586Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... lumn_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:44;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:46;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););(portion_id:51;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-03-28T09:29:05.905252Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:6068:8060];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-03-28T09:29:05.906806Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6068:8060];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpNewEngine::Update+UseSink >> KqpNotNullColumns::JoinBothTablesWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateTable_Immediate [GOOD] Test command err: Trying to start YDB, gRPC: 62981, MsgBus: 31640 2025-03-28T09:28:15.020670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789832147569377:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:15.020700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ad9/r3tmp/tmpZnpxko/pdisk_1.dat 2025-03-28T09:28:15.472901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:15.472968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:15.475161Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:15.476674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62981, node 1 2025-03-28T09:28:15.562598Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:15.562621Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:15.562627Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:15.562796Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31640 TClient is connected to server localhost:31640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:16.336582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:18.106191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789845032471931:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.106309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.674329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:18.830840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789845032472034:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.830983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.831441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789845032472039:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.835428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:28:18.849681Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:28:18.850156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789845032472041:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:28:18.950436Z node 1 :TX_PROXY ERROR: Actor# [1:7486789845032472093:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:19.307079Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789849327439433:2358], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-03-28T09:28:19.308472Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTdmMzM0ODQtMjg1Y2M3OTQtZmUxMDlkOTAtM2U3NzAyMzY=, ActorId: [1:7486789845032471926:2328], ActorState: ExecuteState, TraceId: 01jqe19d2e2ses5z8xtq0g6x0c, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-28T09:28:19.339662Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789849327439442:2362], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:28:19.340989Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTdmMzM0ODQtMjg1Y2M3OTQtZmUxMDlkOTAtM2U3NzAyMzY=, ActorId: [1:7486789845032471926:2328], ActorState: ExecuteState, TraceId: 01jqe19d3wem2ybd640r47271f, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 8569, MsgBus: 25720 2025-03-28T09:28:20.156579Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789854851522450:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:20.156691Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ad9/r3tmp/tmptBdIHZ/pdisk_1.dat 2025-03-28T09:28:20.296200Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8569, node 2 2025-03-28T09:28:20.339425Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:20.339508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:20.341867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:20.404281Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:20.404301Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:20.404308Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:20.404444Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25720 TClient is connected to server localhost:25720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:20.940671Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.947813Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:28:23.386776Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789867736424999:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.386861Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.416504Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.476369Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789867736425099:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.476457Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.476675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789867736425104:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permission ... State: Disconnected -> Connecting 2025-03-28T09:28:42.592055Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2872, node 5 2025-03-28T09:28:42.791299Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:42.791325Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:42.791334Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:42.791461Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14180 TClient is connected to server localhost:14180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:43.399504Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:43.408770Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:28:46.558274Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486789963147783869:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:46.558373Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:46.558824Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486789963147783881:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:46.564398Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:28:46.581959Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486789963147783883:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:28:46.655527Z node 5 :TX_PROXY ERROR: Actor# [5:7486789963147783934:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:46.767879Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.414802Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486789945967914232:2268];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:47.467752Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:55.421905Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe1aej46wj86pvahxzxzvj9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=MTYwMTdiMjMtYzJlYzZmNWMtZDZhOGZjMzUtNTUwMzdiYjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:28:55.422238Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=MTYwMTdiMjMtYzJlYzZmNWMtZDZhOGZjMzUtNTUwMzdiYjk=, ActorId: [5:7486789993212555933:2536], ActorState: ExecuteState, TraceId: 01jqe1aej46wj86pvahxzxzvj9, Create QueryResponse for error on request, msg: 2025-03-28T09:28:57.105122Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe1agc9ddk4rjt8vfe090jj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=M2YzNDE4NGEtZjRjODU0NjEtYWQ4MjQ5OWMtNDcyMzg2ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:28:57.105379Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=M2YzNDE4NGEtZjRjODU0NjEtYWQ4MjQ5OWMtNDcyMzg2ZTk=, ActorId: [5:7486790001802490627:2561], ActorState: ExecuteState, TraceId: 01jqe1agc9ddk4rjt8vfe090jj, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29036, MsgBus: 4206 2025-03-28T09:28:58.201706Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790017552793114:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:58.201771Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ad9/r3tmp/tmpwVVfOX/pdisk_1.dat 2025-03-28T09:28:58.340706Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:58.370753Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:58.370859Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:58.372611Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29036, node 6 2025-03-28T09:28:58.467353Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:58.467380Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:58.467390Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:58.467527Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4206 TClient is connected to server localhost:4206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:59.244924Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:59.265091Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:02.669518Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790034732662948:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:02.669653Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:02.687160Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:02.809044Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790034732663099:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:02.809164Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:02.809705Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790034732663104:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:02.814180Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:29:02.830102Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790034732663106:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:29:02.905877Z node 6 :TX_PROXY ERROR: Actor# [6:7486790034732663157:2433] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:03.202062Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790017552793114:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:03.202142Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::SequentialReadsPragma+Enabled [GOOD] Test command err: Trying to start YDB, gRPC: 13923, MsgBus: 30281 2025-03-28T09:28:14.920392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789827481458218:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:14.920472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ada/r3tmp/tmpYzbmXD/pdisk_1.dat 2025-03-28T09:28:15.313416Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:15.340916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:15.343062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:15.349949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13923, node 1 2025-03-28T09:28:15.539419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:15.539444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:15.539450Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:15.539569Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30281 TClient is connected to server localhost:30281 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:16.380173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:16.419412Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:18.185818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789844661327940:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.185911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789844661327949:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.186512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:18.192829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:28:18.202937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789844661327954:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:28:18.270092Z node 1 :TX_PROXY ERROR: Actor# [1:7486789844661328005:2341] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10458, MsgBus: 20891 2025-03-28T09:28:19.794728Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789849417743084:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:19.794782Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ada/r3tmp/tmpSEhDFy/pdisk_1.dat 2025-03-28T09:28:20.023582Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:20.044187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:20.044273Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:20.045475Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10458, node 2 2025-03-28T09:28:20.123170Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:20.123203Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:20.123212Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:20.123355Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20891 TClient is connected to server localhost:20891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:20.588966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.606204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.750094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.925250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:21.001919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:22.962065Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789862302646727:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:22.962175Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.043356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.081872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.152105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.185431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.254497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.293133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.351615Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789866597614538:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.351723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.351767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789866597614543:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.355469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:23.369350Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486789866597614545:2460], DatabaseId: /Root, ... WorkloadService] [TPoolFetcherActor] ActorId: [5:7486789997635421554:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:54.924257Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:54.992020Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.037343Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.075720Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.114780Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.184427Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.283914Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.382931Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790001930389377:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:55.383115Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:55.383649Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790001930389382:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:55.390468Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:55.412341Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790001930389384:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:28:55.501883Z node 5 :TX_PROXY ERROR: Actor# [5:7486790001930389439:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:55.943488Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486789980455550772:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:55.951566Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14566, MsgBus: 22350 2025-03-28T09:28:58.320991Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790014856479074:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:58.321053Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ada/r3tmp/tmpjQElXE/pdisk_1.dat 2025-03-28T09:28:58.518029Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:58.518122Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:58.520073Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:58.520321Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14566, node 6 2025-03-28T09:28:58.607425Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:58.607447Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:58.607455Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:58.607635Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22350 TClient is connected to server localhost:22350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:59.578468Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:59.595629Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:59.692964Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:59.968991Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:00.079840Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:03.321428Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790014856479074:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:03.321513Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:03.365634Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790036331317324:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:03.365762Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:03.425776Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:03.470959Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:03.535173Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:03.594979Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:03.644459Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:03.716332Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:03.866925Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790036331317843:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:03.867094Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:03.867621Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790036331317848:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:03.873718Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:03.894303Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790036331317850:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:03.969465Z node 6 :TX_PROXY ERROR: Actor# [6:7486790036331317904:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpRanges::UpdateWhereInNoFullScan+UseSink [GOOD] >> KqpRanges::UpdateWhereInNoFullScan-UseSink >> KqpMergeCn::TopSortBy_Timestamp_Limit2 [GOOD] >> KqpMergeCn::TopSortBy_Interval_Limit3 >> KqpNewEngine::DuplicatedResults [GOOD] >> KqpNewEngine::FlatmapLambdaMutiusedConnections >> KqpNewEngine::LocksSingleShard >> KqpNotNullColumns::UpdateNotNullPg [GOOD] >> KqpNotNullColumns::UpdateOnNotNull >> KqpReturning::ReturningWorksIndexedDeleteV2-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert+QueryService >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> KqpNewEngine::MultiStatementMixPure [GOOD] >> KqpNewEngine::MultiEffectsOnSameTable >> KqpRanges::IsNullInValue [GOOD] >> KqpRanges::IsNullInJsonValue >> KqpNewEngine::IdxLookupExtractMembers [GOOD] >> KqpNewEngine::FullScanCount >> KqpNotNullColumns::ReplaceNotNullPk >> KqpNotNullColumns::InsertNotNullPg+useSink [GOOD] >> KqpNotNullColumns::InsertNotNullPg-useSink >> KqpNewEngine::StreamLookupWithView >> KqpNewEngine::PrimaryView [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink >> TPQCompatTest::ReadWriteSessions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2025-03-28T09:26:46.302242Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:46.408702Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:46.436274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:46.436827Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:46.446831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:46.447252Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:46.447506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:46.447632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:46.447755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:46.447894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:46.448052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:46.448192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:46.448322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:46.448435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:46.448565Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:46.448682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:46.482607Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:46.482959Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:46.483017Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:46.483227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:46.483405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:46.483496Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:46.483544Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:46.483657Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:46.483759Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:46.483811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:46.483843Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:46.484044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:46.484131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:46.484176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:46.484205Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:46.484351Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:46.484408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:46.484453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:46.484502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:46.484584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:46.484628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:46.484659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:46.484708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:46.484746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:46.484785Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:46.485235Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-03-28T09:26:46.485347Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=56; 2025-03-28T09:26:46.485463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=54; 2025-03-28T09:26:46.485557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-03-28T09:26:46.485747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:46.485809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:46.485865Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:46.486118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:46.486163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:46.486194Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:46.486363Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:46.486412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:46.486449Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:46.486662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:46.486727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:46.486761Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:46.486941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:46.486986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:46.487048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:44;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););(portion_id:48;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:49;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-28T09:29:09.065078Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:5927:7919];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-03-28T09:29:09.066387Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:5927:7919];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpNewEngine::DeleteWithBuiltin-UseSink [GOOD] >> KqpNewEngine::DeleteON ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PrimaryView [GOOD] Test command err: Trying to start YDB, gRPC: 8967, MsgBus: 62013 2025-03-28T09:28:16.109846Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789836413364501:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:16.109886Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ac6/r3tmp/tmpu8VjdS/pdisk_1.dat 2025-03-28T09:28:16.600656Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:16.618772Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:16.618858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:16.620945Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8967, node 1 2025-03-28T09:28:16.751275Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:16.751297Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:16.751308Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:16.751414Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62013 TClient is connected to server localhost:62013 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:17.282158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:17.311808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:17.435198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:17.589933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:17.661607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:19.351644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789849298268165:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:19.351764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:19.715464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:19.758008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:19.788996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:19.864508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:19.911539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:19.963965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:20.018999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789853593235976:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:20.019078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:20.022443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789853593235981:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:20.026636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:20.040026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789853593235983:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:20.098865Z node 1 :TX_PROXY ERROR: Actor# [1:7486789853593236037:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:21.115106Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789836413364501:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:21.115155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10605, MsgBus: 16990 2025-03-28T09:28:22.349140Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789863541650378:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:22.349200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ac6/r3tmp/tmpfF3vM0/pdisk_1.dat 2025-03-28T09:28:22.514611Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:22.516577Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:22.516661Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:22.518382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10605, node 2 2025-03-28T09:28:22.647447Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:22.647475Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:22.647483Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:22.647845Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16990 TClient is connected to server localhost:16990 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:23.135901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:23.143642Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:28:23.150206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:23.206353Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:23.395368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:28:23.466889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESc ... permissions } 2025-03-28T09:28:55.336754Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:55.405623Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.450194Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.491633Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.539039Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.620100Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.672388Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.704556Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486789982479969323:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:55.704610Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:55.776565Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790003954808091:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:55.776668Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:55.777251Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790003954808096:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:55.782173Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:55.798193Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790003954808098:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:28:55.875343Z node 6 :TX_PROXY ERROR: Actor# [6:7486790003954808154:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24177, MsgBus: 9567 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ac6/r3tmp/tmpbXYknt/pdisk_1.dat 2025-03-28T09:28:59.422168Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:28:59.555513Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:59.582312Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:59.582425Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:59.584218Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24177, node 7 2025-03-28T09:28:59.751322Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:59.751348Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:59.751362Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:59.751499Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9567 TClient is connected to server localhost:9567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:00.910973Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:00.919156Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:00.933148Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:01.019867Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:01.268144Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:01.421985Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:03.875349Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790040180436378:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:03.875473Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:03.939528Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.009228Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.063331Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.121572Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.209345Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.288542Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.404099Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790044475404197:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.404206Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.404268Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790044475404202:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.410092Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:04.427330Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790044475404204:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:04.506240Z node 7 :TX_PROXY ERROR: Actor# [7:7486790044475404262:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:06.149417Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.316890Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.376042Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpSort::TopParameterFilter [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithFirstRowPreloadedWithReboot [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns >> KqpNewEngine::PureExpr [GOOD] >> KqpNewEngine::PrunePartitionsByLiteral >> KqpRanges::NullInPredicate [GOOD] >> KqpRanges::NullInPredicateRow >> KqpNotNullColumns::JoinRightTableWithNotNullColumns+StreamLookup [GOOD] >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup >> KqpNewEngine::AggregateTuple [GOOD] >> KqpNewEngine::AsyncIndexUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPQCompatTest::ReadWriteSessions [GOOD] Test command err: 2025-03-28T09:25:18.618071Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789069735922532:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.618208Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.679841Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789070047351740:2103];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.680470Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.858552Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011e1/r3tmp/tmpJVdy4F/pdisk_1.dat 2025-03-28T09:25:18.860865Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:19.086035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.086162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.087032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.087096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.093807Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:19.093967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:19.094452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2023, node 1 2025-03-28T09:25:19.175413Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:25:19.175569Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:25:19.203199Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:19.205127Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/0011e1/r3tmp/yandexOnEaRo.tmp 2025-03-28T09:25:19.205148Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/0011e1/r3tmp/yandexOnEaRo.tmp 2025-03-28T09:25:19.205312Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/0011e1/r3tmp/yandexOnEaRo.tmp 2025-03-28T09:25:19.205424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:19.313977Z INFO: TTestServer started on Port 63339 GrpcPort 2023 TClient is connected to server localhost:63339 PQClient connected to localhost:2023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:19.655710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:19.728314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:25:22.158277Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789087227221279:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.158281Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486789087227221296:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.158354Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.162948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:25:22.186103Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486789087227221300:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:25:22.257312Z node 2 :TX_PROXY ERROR: Actor# [2:7486789087227221327:2178] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:22.576416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.577476Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789087227221341:2321], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.577754Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTBkYjY2Yi0zYTM3NmU2MC1mY2ZlNjE0Ny1iMmQzZDA2OA==, ActorId: [2:7486789087227221269:2312], ActorState: ExecuteState, TraceId: 01jqe1403b6v19zgbra21gh59p, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.578001Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789086915792873:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.579557Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWIyZDY0M2YtNjgwOWMyYjUtZWU2YjUyMGQtOTZjYTU5Y2M=, ActorId: [1:7486789086915792832:2337], ActorState: ExecuteState, TraceId: 01jqe1407pd1s8h5n4xtvkeh0d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.581095Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.586104Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.660254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.753775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:25:22.953922Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe140r39nkq3waey8bzkhx7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFmNWY1NWQtOGMwNGY1MzUtYjk3ZmIxYWEtNTBlZjI2ODg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486789086915793311:3067] 2025-03-28T09:25:23.617834Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789069735922532:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:23.617895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:23.680283Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486789070047351740:2103];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:23.680335Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:25:29.288272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710679:0, at schemeshard: 72057594046644480 CreateTopicNoLegacy: /Root/PQ/rt3.dc1--demo Create topic: /Root/PQ/rt3.dc1--demo AddTopic: /Root/PQ/rt3.dc1--demo ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (na ... -mirrored-from-dc2" } consumer: "user" } } 2025-03-28T09:29:08.662771Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 read init: from# ipv6:[::1]:56820, request# { init_request { topics_read_settings { path: "account/topic2-mirrored-from-dc2" } consumer: "user" } } 2025-03-28T09:29:08.663223Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session shared/user_27_5_8090742265335577815_v1 2025-03-28T09:29:08.663291Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [27:7486790060226413099:2667] destroyed 2025-03-28T09:29:08.663358Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_5_8090742265335577815_v1 2025-03-28T09:29:08.663151Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 auth for : user 2025-03-28T09:29:08.663189Z node 27 :PQ_METACACHE DEBUG: Handle describe topics 2025-03-28T09:29:08.663206Z node 27 :PQ_METACACHE DEBUG: SendSchemeCacheRequest 2025-03-28T09:29:08.663283Z node 27 :PQ_METACACHE DEBUG: send request for 1 topics, got 1 requests infly, db = "Root/LbCommunal" 2025-03-28T09:29:08.663896Z node 27 :PQ_METACACHE DEBUG: Handle SchemeCache response: result# { ErrorCount: 0 DatabaseName: Root/LbCommunal DomainOwnerId: 0 Instant: 12 ResultSet [{ Path: Root/LbCommunal/account/topic2-mirrored-from-dc2 TableId: [72057594046644480:18:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:29:08.664040Z node 27 :PQ_METACACHE DEBUG: Got describe topics SC response 2025-03-28T09:29:08.664093Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 Handle describe topics response 2025-03-28T09:29:08.664302Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 auth is DEAD 2025-03-28T09:29:08.664429Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 auth ok: topics# 1, initDone# 0 2025-03-28T09:29:08.665514Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 register session: topic# rt3.dc2--account--topic2 2025-03-28T09:29:08.666036Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7486790060226413106:2668] connected; active server actors: 1 2025-03-28T09:29:08.666163Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] consumer "user" register session for pipe [27:7486790060226413106:2668] session shared/user_27_6_542781807485038641_v1 2025-03-28T09:29:08.666241Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user register readable partition 0 2025-03-28T09:29:08.666344Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user family created family=1 (Status=Free, Partitions=[0]) 2025-03-28T09:29:08.666446Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user register reading session ReadingSession "shared/user_27_6_542781807485038641_v1" (Sender=[27:7486790060226413102:2668], Pipe=[27:7486790060226413106:2668], Partitions=[], ActiveFamilyCount=0) 2025-03-28T09:29:08.666507Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user rebalancing was scheduled 2025-03-28T09:29:08.666600Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-03-28T09:29:08.666743Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_27_6_542781807485038641_v1" (Sender=[27:7486790060226413102:2668], Pipe=[27:7486790060226413106:2668], Partitions=[], ActiveFamilyCount=0) 2025-03-28T09:29:08.666852Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] consumer user family 1 status Active partitions [0] session "shared/user_27_6_542781807485038641_v1" sender [27:7486790060226413102:2668] lock partition 0 for ReadingSession "shared/user_27_6_542781807485038641_v1" (Sender=[27:7486790060226413102:2668], Pipe=[27:7486790060226413106:2668], Partitions=[], ActiveFamilyCount=1) generation 1 step 3 2025-03-28T09:29:08.666933Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-03-28T09:29:08.666986Z node 27 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037899][topic2-mirrored-from-dc2] consumer user balancing duration: 0.000340s ===Got response: status: SUCCESS init_response { session_id: "shared/user_27_6_542781807485038641_v1" } 2025-03-28T09:29:08.667769Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 assign: record# { Partition: 0 TabletId: 72075186224037898 Topic: "topic2-mirrored-from-dc2" Generation: 1 Step: 3 Session: "shared/user_27_6_542781807485038641_v1" ClientId: "user" PipeClient { RawX1: 7486790060226413106 RawX2: 4503715591490156 } Path: "/Root/LbCommunal/account/topic2-mirrored-from-dc2" } 2025-03-28T09:29:08.667886Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 INITING TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) 2025-03-28T09:29:08.668555Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server connected, pipe [27:7486790060226413109:2671], now have 1 active actors on pipe 2025-03-28T09:29:08.668741Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037898 Generation: 1, pipe: [27:7486790060226413109:2671] 2025-03-28T09:29:08.669120Z node 28 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic2-mirrored-from-dc2' requestId: 2025-03-28T09:29:08.669175Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] got client message batch for topic 'rt3.dc2--account--topic2' partition 0 2025-03-28T09:29:08.669227Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Created session shared/user_27_6_542781807485038641_v1 on pipe: [27:7486790060226413109:2671] 2025-03-28T09:29:08.669316Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/user_27_6_542781807485038641_v1:1 with generation 1 2025-03-28T09:29:08.669481Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] Topic 'rt3.dc2--account--topic2' partition 0 user user session is set to 0 (startOffset 0) session shared/user_27_6_542781807485038641_v1 2025-03-28T09:29:08.669692Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:29:08.669737Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:29:08.669772Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:29:08.669815Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:29:08.669838Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:29:08.669854Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:29:08.669901Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:29:08.669963Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:29:08.670025Z node 28 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:29:08.676426Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:29:08.676521Z node 28 :PERSQUEUE DEBUG: Answer ok topic: 'topic2-mirrored-from-dc2' partition: 0 messageNo: 0 requestId: cookie: 18446744073709551615 2025-03-28T09:29:08.677229Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 0 WriteTimestampMS: 1743154148572 CreateTimestampMS: 1743154148572 SizeLag: 0 WriteTimestampEstimateMS: 0 } Cookie: 18446744073709551615 } 2025-03-28T09:29:08.677299Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 INIT DONE TopicId: Topic topic2-mirrored-from-dc2 in dc dc2 in database: Root, partition 0(assignId:1) EndOffset 0 readOffset 0 committedOffset 0 2025-03-28T09:29:08.677408Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 sending to client partition status ===Got response: status: SUCCESS start_partition_session_request { partition_session { partition_session_id: 1 path: "account/topic2-mirrored-from-dc2" } partition_offsets { } } 2025-03-28T09:29:08.681527Z node 27 :PQ_READ_PROXY DEBUG: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 grpc read done: success# 0, data# { } 2025-03-28T09:29:08.681555Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 grpc read failed 2025-03-28T09:29:08.681576Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 grpc closed 2025-03-28T09:29:08.681606Z node 27 :PQ_READ_PROXY INFO: session cookie 6 consumer shared/user session shared/user_27_6_542781807485038641_v1 is DEAD 2025-03-28T09:29:08.683477Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] Destroy direct read session shared/user_27_6_542781807485038641_v1 2025-03-28T09:29:08.683533Z node 28 :PERSQUEUE DEBUG: [PQ: 72075186224037898] server disconnected, pipe [27:7486790060226413109:2671] destroyed 2025-03-28T09:29:08.683593Z node 28 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_27_6_542781807485038641_v1 2025-03-28T09:29:08.683828Z node 27 :PERSQUEUE_READ_BALANCER INFO: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7486790060226413106:2668] disconnected; active server actors: 1 2025-03-28T09:29:08.683875Z node 27 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037899][topic2-mirrored-from-dc2] pipe [27:7486790060226413106:2668] client user disconnected session shared/user_27_6_542781807485038641_v1 2025-03-28T09:29:08.688886Z node 27 :PQ_METACACHE DEBUG: HandleClustersUpdate 2025-03-28T09:29:08.688908Z node 27 :PQ_METACACHE DEBUG: HandleClustersUpdate LocalCluster !LocalCluster.empty() 2025-03-28T09:29:09.318814Z node 27 :PQ_METACACHE DEBUG: Check version rescan 2025-03-28T09:29:09.328995Z node 27 :PQ_METACACHE DEBUG: Metacache: reset >> KqpSort::ReverseFirstKeyOptimized >> KqpNewEngine::LeftSemiJoin [GOOD] >> KqpNewEngine::LocksInRoTx >> KqpReturning::ReturningWorks+QueryService [GOOD] >> KqpReturning::ReturningWorks-QueryService >> KqpRanges::LiteralOrCompisiteCollision [GOOD] >> KqpRanges::MergeRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameterFilter [GOOD] Test command err: Trying to start YDB, gRPC: 22244, MsgBus: 28566 2025-03-28T09:28:22.660271Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789862454683574:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:22.663302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ab6/r3tmp/tmpkS6YD2/pdisk_1.dat 2025-03-28T09:28:23.139695Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:23.156388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:23.156517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:23.158425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22244, node 1 2025-03-28T09:28:23.245852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:23.245898Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:23.245910Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:23.246034Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28566 TClient is connected to server localhost:28566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:23.829223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:23.875379Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:23.887795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:24.031406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:24.239682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:24.347723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:26.221033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789879634554386:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:26.221151Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:26.692985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:26.754492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:26.797537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:26.835054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:26.876157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:26.955894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:27.036181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789883929522201:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:27.036271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:27.036448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789883929522206:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:27.041550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:27.055393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789883929522208:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:27.161024Z node 1 :TX_PROXY ERROR: Actor# [1:7486789883929522264:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:27.658502Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789862454683574:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:27.658570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16888, MsgBus: 21927 2025-03-28T09:28:29.769648Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789891050046865:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:29.769985Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ab6/r3tmp/tmpaTxSda/pdisk_1.dat 2025-03-28T09:28:29.907041Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:29.929979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:29.930069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:29.931722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16888, node 2 2025-03-28T09:28:30.028998Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:30.029023Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:30.029033Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:30.029143Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21927 TClient is connected to server localhost:21927 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:30.480713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:30.486388Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:28:30.499581Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:30.587615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:30.758957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... PoolFetcherActor] ActorId: [5:7486790010054701390:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:56.428782Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:56.429186Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790010054701396:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:56.433616Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:56.451068Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790010054701398:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:56.522027Z node 5 :TX_PROXY ERROR: Actor# [5:7486790010054701453:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:56.595117Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486789988579862703:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:56.595257Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:57.762998Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10164, MsgBus: 31068 2025-03-28T09:29:04.100725Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790042521868350:2208];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ab6/r3tmp/tmpfwswqz/pdisk_1.dat 2025-03-28T09:29:04.148304Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:29:04.244868Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:04.266487Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:04.266605Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:04.269263Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10164, node 6 2025-03-28T09:29:04.338787Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:04.338811Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:04.338825Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:04.338971Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31068 TClient is connected to server localhost:31068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:05.005075Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:05.015497Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:05.037864Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:05.150471Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:05.359133Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:29:05.472009Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:08.428194Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790059701739141:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:08.428301Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:08.491916Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.535223Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.584484Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.637364Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.685007Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.800460Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.878406Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790059701739660:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:08.878519Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:08.879258Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790059701739665:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:08.883625Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:08.906560Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790059701739667:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:08.975514Z node 6 :TX_PROXY ERROR: Actor# [6:7486790059701739723:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:09.042884Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790042521868350:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:09.042941Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ( (declare $limit (DataType 'Uint64)) (declare $value (DataType 'Int32)) (let $1 (KqpTable '"/Root/TwoShard" '"72057594046644480:2" '"" '1)) (let $2 '('"Key" '"Value1" '"Value2")) (let $3 (KqpRowsSourceSettings $1 $2 '() (Void) '())) (let $4 (DataType 'Int32)) (let $5 (Min (Uint64 '"1001") $limit)) (let $6 (StructType '('"Key" (OptionalType (DataType 'Uint32))) '('"Value1" (OptionalType (DataType 'String))) '('"Value2" (OptionalType $4)))) (let $7 '('('"_logical_id" '497) '('"_id" '"f0c981e3-e4fb0c1a-81b04eb-7356929b") '('"_wide_channels" $6))) (let $8 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $3)) (lambda '($12) (block '( (let $13 (lambda '($16) (block '( (let $17 (Member $16 '"Value2")) (return (Member $16 '"Key") (Member $16 '"Value1") $17 (Coalesce (!= $17 $value) (Bool 'false))) )))) (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda '($18 $19 $20 $21) $21) $5)) (let $15 (lambda '($22 $23 $24 $25) $22 $23 $24)) (return (FromFlow (WideMap $14 $15))) ))) $7)) (let $9 (DqCnUnionAll (TDqOutput $8 '"0"))) (let $10 (DqPhyStage '($9) (lambda '($26) (FromFlow (NarrowMap (Take (ToFlow $26) $5) (lambda '($27 $28 $29) (AsStruct '('"Key" $27) '('"Value1" $28) '('"Value2" $29)))))) '('('"_logical_id" '510) '('"_id" '"ab4941f4-d53e9c6c-31d859dc-2873787d")))) (let $11 (DqCnResult (TDqOutput $10 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($8 $10) '($11) '('('"$limit") '('"$value")) '('('"type" '"data")))) '((KqpTxResultBinding (ListType $6) '"0" '"0")) '('('"type" '"data_query")))) ) >> KqpNewEngine::JoinWithParams >> KqpMergeCn::TopSortByDesc_Double_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Date_Limit4 >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] >> KqpNotNullColumns::UpdateOnNotNull [GOOD] >> KqpNotNullColumns::UpdateOnNotNullPg >> KqpNewEngine::Update+UseSink [GOOD] >> KqpNewEngine::Update-UseSink >> KqpSqlIn::KeyTypeMissmatch_Int [GOOD] >> KqpSqlIn::KeyTypeMissmatch_Str >> TTopicYqlTest::BadRequests [GOOD] >> KqpSort::ReverseOptimized >> KqpNotNullColumns::ReplaceNotNullPk [GOOD] >> KqpNotNullColumns::ReplaceNotNullPkPg >> TPersQueueTest::TestBigMessage [GOOD] >> TPersQueueTest::SetMeteringMode ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change 2025-03-28 09:28:57,746 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:28:58,406 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 355104 646M 646M 564M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/2909/000c16/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 356763 5.6G 5.5G 5.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2909/000c16/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_de 359427 395M 395M 361M └─ moto_server s3 --port 5863 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 117, in test_data_unchanged_after_ttl_change self.ydb_client.query(""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...nner/.ya/build/build_root/2909/000c16/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/000c16/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/000c16', '--source-root', '/home/runner/.ya/build/build_root/2909/000c16/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/000c16/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...nner/.ya/build/build_root/2909/000c16/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/000c16/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/000c16', '--source-root', '/home/runner/.ya/build/build_root/2909/000c16/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/000c16/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) >> KqpNotNullColumns::JoinBothTablesWithNotNullPk-StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup >> KqpNotNullColumns::InsertNotNullPg-useSink [GOOD] >> KqpNotNullColumns::FailedMultiEffects >> KqpReturning::ReturningWorksIndexedUpsert+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2025-03-28T09:26:47.118231Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:47.213729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:47.236994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:47.237359Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:47.245963Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:47.246182Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:47.246429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:47.246518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:47.246618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:47.246778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:47.246903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:47.247018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:47.247092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:47.247199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.247311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:47.247444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:47.274307Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:47.274625Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:47.274710Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:47.274956Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.275158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:47.275249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:47.275301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:47.275424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:47.275525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:47.275578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:47.275633Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:47.275830Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.275904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:47.275949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:47.275982Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:47.276169Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:47.276235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:47.276324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:47.276358Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:47.276438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:47.276479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:47.276512Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:47.276568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:47.276622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:47.276701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:47.277109Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-03-28T09:26:47.277234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=62; 2025-03-28T09:26:47.277350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=57; 2025-03-28T09:26:47.277446Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-03-28T09:26:47.277623Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:47.277684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:47.277729Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:47.277934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:47.277980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.278010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.278198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:47.278247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:47.278276Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:47.278411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:47.278446Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:47.278489Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:47.278642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:47.279602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:47.279658Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... lumn_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:44;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:46;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););(portion_id:51;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-03-28T09:29:13.720764Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:6068:8060];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=3; 2025-03-28T09:29:13.722134Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:6068:8060];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpNewEngine::LocksSingleShard [GOOD] >> KqpNewEngine::LocksMultiShard >> KqpNewEngine::FlatmapLambdaMutiusedConnections [GOOD] >> KqpNewEngine::EmptyMapWithBroadcast >> DataShardReadIteratorBatchMode::RangeFull [GOOD] >> DataShardReadIteratorBatchMode::RangeFromInclusive >> KqpRanges::UpdateWhereInNoFullScan-UseSink [GOOD] >> KqpRanges::UpdateWhereInWithNull ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TTopicYqlTest::BadRequests [GOOD] Test command err: 2025-03-28T09:25:18.579936Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789073798729602:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.579989Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.620179Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789073501901028:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.620318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001209/r3tmp/tmp9lxB5H/pdisk_1.dat 2025-03-28T09:25:18.822657Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:18.824446Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:19.095204Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:19.103051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.103137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.104431Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.104498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.111763Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:19.111887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:19.114404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64334, node 1 2025-03-28T09:25:19.209309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001209/r3tmp/yandexq3sK02.tmp 2025-03-28T09:25:19.209345Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001209/r3tmp/yandexq3sK02.tmp 2025-03-28T09:25:19.209531Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001209/r3tmp/yandexq3sK02.tmp 2025-03-28T09:25:19.209660Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:19.314530Z INFO: TTestServer started on Port 21014 GrpcPort 64334 TClient is connected to server localhost:21014 PQClient connected to localhost:64334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:19.737549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:19.811509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:25:22.158832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789090978599882:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.158906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789090978599873:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.158960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.162907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:25:22.180444Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789090978599890:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:25:22.445279Z node 1 :TX_PROXY ERROR: Actor# [1:7486789090978599979:2754] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:22.470964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.473451Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789090681770620:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.473728Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTA2M2UzNGItMTdiNmRjNDQtZGZhZmRhZjgtODY1NWYyNA==, ActorId: [2:7486789090681770581:2312], ActorState: ExecuteState, TraceId: 01jqe140630kq2d0crqzn4sd70, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.474042Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789090978599996:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.474240Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2QwM2Q5YjctYzQzNjg2Y2EtMjkyMmMzZTgtMTg2NjQ3MzQ=, ActorId: [1:7486789090978599870:2337], ActorState: ExecuteState, TraceId: 01jqe14034807m2gkwmrp56erj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.476679Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.476679Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.554923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.679139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:25:22.944949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe140np90ct9es1b6x0fbqc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzZkYTIzZjgtOTNjMmQzMDctYTRlZGIzMmEtM2NhZjRkYmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486789090978600432:3088] 2025-03-28T09:25:23.579669Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789073798729602:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:23.579749Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:23.625863Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486789073501901028:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:23.625912Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 2 partitions CallPersQueueGRPC request to localhost:64334 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-03-28T09:25:29.009110Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:64334 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 2 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSi ... } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7486790019187386210 RawX2: 107374184608 } Partitions { Partition { PartitionId: 0 } } 2025-03-28T09:29:11.890380Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:29:11.901215Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:29:11.901265Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-03-28T09:29:11.901292Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State CALCULATED 2025-03-28T09:29:11.901322Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State CALCULATED FrontTxId 281474976710676 2025-03-28T09:29:11.901349Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState WAIT_RS 2025-03-28T09:29:11.901398Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from CALCULATED to WAIT_RS 2025-03-28T09:29:11.901464Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-03-28T09:29:11.901509Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-03-28T09:29:11.901583Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState EXECUTING 2025-03-28T09:29:11.901616Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from WAIT_RS to EXECUTING 2025-03-28T09:29:11.901643Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-03-28T09:29:11.901751Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1743154151914, TxId 281474976710676 2025-03-28T09:29:11.902375Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:29:11.902409Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:29:11.902444Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:29:11.902483Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:29:11.902500Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:29:11.902518Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cc1 2025-03-28T09:29:11.902534Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uc1 2025-03-28T09:29:11.902550Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cc2 2025-03-28T09:29:11.902566Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uc2 2025-03-28T09:29:11.902582Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] _config_0 2025-03-28T09:29:11.902614Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:29:11.902646Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:29:11.902711Z node 26 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:29:11.906763Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1743154151914, TxId 281474976710676, Partition 0 2025-03-28T09:29:11.906819Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-03-28T09:29:11.906850Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State EXECUTING 2025-03-28T09:29:11.906876Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State EXECUTING FrontTxId 281474976710676 2025-03-28T09:29:11.906890Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:29:11.906896Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-03-28T09:29:11.906933Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976710676 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-28T09:29:11.906991Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976710676 2025-03-28T09:29:11.907614Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } 2025-03-28T09:29:11.907799Z node 26 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:29:11.907981Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976710676 2025-03-28T09:29:11.908040Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState EXECUTED 2025-03-28T09:29:11.908095Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from EXECUTING to EXECUTED 2025-03-28T09:29:11.908722Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] save tx TxId: 281474976710676 State: EXECUTED MinStep: 1743154151732 MaxStep: 18446744073709551615 Step: 1743154151914 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--legacy--topic1" Version: 0 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "legacy" Ident: "legacy" Topic: "topic1" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--legacy--topic1" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } ReadRuleGenerations: 0 ReadRuleGenerations: 0 AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "c1" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } Consumers { Name: "c2" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 } } BootstrapConfig { } SourceActor { RawX1: 7486790019187386210 RawX2: 107374184608 } Partitions { Partition { PartitionId: 0 } } 2025-03-28T09:29:11.909040Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:29:11.918216Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:29:11.918289Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-03-28T09:29:11.918332Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State EXECUTED 2025-03-28T09:29:11.918358Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 State EXECUTED FrontTxId 281474976710676 2025-03-28T09:29:11.918394Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:29:11.918424Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState WAIT_RS_ACKS 2025-03-28T09:29:11.918446Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:29:11.918478Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-03-28T09:29:11.918488Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:29:11.918509Z node 26 :PERSQUEUE DEBUG: [TxId: 281474976710676] PredicateAcks: 0/0 2025-03-28T09:29:11.918534Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710676 to the list for deletion 2025-03-28T09:29:11.918560Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, NewState DELETING 2025-03-28T09:29:11.918593Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710676 2025-03-28T09:29:11.918666Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:29:11.922778Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:29:11.922832Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-03-28T09:29:11.922858Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710676, State DELETING 2025-03-28T09:29:11.922881Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710676 >> KqpNewEngine::MultiEffectsOnSameTable [GOOD] >> KqpNewEngine::MultiUsagePrecompute >> KqpSqlIn::TableSource >> TPersQueueTest::EventBatching [GOOD] >> TPersQueueTest::DisableWrongSettings >> KqpNewEngine::DeleteWithInputMultiConsumption-UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink >> KqpMergeCn::TopSortBy_Interval_Limit3 [GOOD] >> KqpMergeCn::TopSortBy_Decimal_Limit5 >> KqpRanges::WhereInSubquery >> KqpRanges::IsNullInJsonValue [GOOD] >> KqpRanges::IsNotNullInValue >> KqpSqlIn::SecondaryIndex_PgKey >> KqpNewEngine::DeleteON [GOOD] >> KqpNewEngine::DeleteByKey >> KqpNewEngine::PrunePartitionsByLiteral [GOOD] >> KqpNewEngine::PrunePartitionsByExpr >> KqpReturning::ReturningWorksIndexedInsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedInsert-QueryService >> KqpNotNullColumns::ReplaceNotNullPkPg [GOOD] >> KqpNotNullColumns::SelectNotNullColumns >> KqpNewEngine::StreamLookupWithView [GOOD] >> KqpNewEngine::Truncated >> KqpSort::ReverseFirstKeyOptimized [GOOD] >> KqpSort::ReverseLimitOptimized >> KqpNewEngine::FullScanCount [GOOD] >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] >> KqpNewEngine::Update-UseSink [GOOD] >> KqpNewEngine::UpdateFromParams >> KqpNotNullColumns::JoinRightTableWithNotNullColumns-StreamLookup [GOOD] >> KqpNotNullColumns::OptionalParametersDataQuery >> KqpNewEngine::AsyncIndexUpdate [GOOD] >> KqpNewEngine::AutoChooseIndex >> KqpNewEngine::JoinWithParams [GOOD] >> KqpNewEngine::JoinIdxLookupWithPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-03-28 09:28:57,952 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:28:58,459 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 355184 710M 712M 629M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/2909/000c11/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 356798 2.7G 2.7G 2.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2909/000c11/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_m 359459 458M 458M 425M └─ moto_server s3 --port 12739 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 68, in wait_for if condition_func(): File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 172, in lambda: get_rows_in_portion(bucket1_path) == self.row_count File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 160, in get_rows_in_portion logger.info(f"portions: {portions_stat}, blobs: {table.get_blob_stat_by_tier()}") File "ydb/tests/olap/common/column_table_helper.py", line 41, in get_blob_stat_by_tier results = self.ydb_client.query(stmt) File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...apture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/000c11/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/000c11', '--source-root', '/home/runner/.ya/build/build_root/2909/000c11/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/000c11/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...apture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/000c11/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/000c11', '--source-root', '/home/runner/.ya/build/build_root/2909/000c11/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/000c11/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) >> KqpMergeCn::TopSortBy_Date_Limit4 [GOOD] >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 >> KqpRanges::NullInPredicateRow [GOOD] >> KqpRanges::NoFullScanAtScanQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::FullScanCount [GOOD] Test command err: Trying to start YDB, gRPC: 9483, MsgBus: 24718 2025-03-28T09:28:19.170626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789850467523197:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:19.170658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ac5/r3tmp/tmpMDCjes/pdisk_1.dat 2025-03-28T09:28:19.652406Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:19.682680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:19.682813Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:19.684705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9483, node 1 2025-03-28T09:28:19.755242Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:19.755284Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:19.755296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:19.755402Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24718 TClient is connected to server localhost:24718 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:20.330910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.349806Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.359384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:20.496085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:20.660006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:28:20.746198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:28:22.450605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789863352426876:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:22.450805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:22.784231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:22.824486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:22.876525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:22.932501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:22.966328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.020580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:23.113880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789867647394687:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.113953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.114024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789867647394692:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:23.117716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:23.130163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789867647394694:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:23.209038Z node 1 :TX_PROXY ERROR: Actor# [1:7486789867647394752:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:24.175314Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789850467523197:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:24.175377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18483, MsgBus: 1663 2025-03-28T09:28:25.513508Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789873033902489:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:25.513556Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ac5/r3tmp/tmpacuEKe/pdisk_1.dat 2025-03-28T09:28:25.613719Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18483, node 2 2025-03-28T09:28:25.684813Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:25.684910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:25.715410Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:25.743236Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:25.743261Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:25.743268Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:25.743382Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1663 TClient is connected to server localhost:1663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:26.248109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:26.255279Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:28:26.275995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:26.365679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:26.532478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2 ... hemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.013063Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790030129235844:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:06.013156Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:06.029089Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.087101Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.165937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.279790Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790051604074612:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:06.279899Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:06.280212Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790051604074617:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:06.285560Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:06.306259Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790051604074619:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:06.407091Z node 6 :TX_PROXY ERROR: Actor# [6:7486790051604074675:3462] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25417, MsgBus: 25356 2025-03-28T09:29:09.859868Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790066000561321:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:09.859923Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ac5/r3tmp/tmpu4gUyp/pdisk_1.dat 2025-03-28T09:29:10.132107Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:10.134517Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:10.134607Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:10.137545Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25417, node 7 2025-03-28T09:29:10.215356Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:10.215386Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:10.215398Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:10.215555Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25356 TClient is connected to server localhost:25356 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:11.065723Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:11.073193Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:11.084964Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:11.244575Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:11.486394Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:11.581665Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:14.421255Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790087475399584:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.421366Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.492954Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.547915Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.595397Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.655234Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.715621Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.789764Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.861295Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790066000561321:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:14.861461Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:14.904678Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790087475400101:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.904770Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.904945Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790087475400106:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.908631Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:14.923789Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790087475400108:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:15.021832Z node 7 :TX_PROXY ERROR: Actor# [7:7486790091770367462:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:3:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:3:33: Warning: At function: Filter, At function: Coalesce
:3:58: Warning: At function: SqlIn
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 >> KqpNotNullColumns::FailedMultiEffects [GOOD] >> KqpNewEngine::LocksInRoTx [GOOD] >> KqpNewEngine::JoinWithPrecompute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::UpdateOnNotNullPg [GOOD] Test command err: Trying to start YDB, gRPC: 3617, MsgBus: 15901 2025-03-28T09:28:48.307020Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789974897120550:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:48.307069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a6e/r3tmp/tmpn0sZ4K/pdisk_1.dat 2025-03-28T09:28:48.901920Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:48.921915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:48.922005Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:48.924697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3617, node 1 2025-03-28T09:28:49.014019Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:49.014046Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:49.014054Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:49.014211Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15901 TClient is connected to server localhost:15901 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:49.619374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:49.639561Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:51.735740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789987782023095:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:51.735849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:52.032883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:52.188245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789992076990495:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:52.188355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:52.188718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789992076990500:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:52.193592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:28:52.208639Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:28:52.209011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789992076990502:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:28:52.303831Z node 1 :TX_PROXY ERROR: Actor# [1:7486789992076990554:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:52.753830Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789992076990620:2364], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:61: Error: At function: KiUpdateTable!
:1:61: Error: Cannot update primary key column: Key 2025-03-28T09:28:52.754993Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjRlN2FlZDktZGViNTZjZDItYjllYTMxYy1iYzYwMTA5, ActorId: [1:7486789987782023092:2329], ActorState: ExecuteState, TraceId: 01jqe1adqg5b736b3dyh3w1eq9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:28:52.786969Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789992076990629:2368], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiUpdateTable!
:1:63: Error: Cannot update primary key column: Key 2025-03-28T09:28:52.787365Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjRlN2FlZDktZGViNTZjZDItYjllYTMxYy1iYzYwMTA5, ActorId: [1:7486789987782023092:2329], ActorState: ExecuteState, TraceId: 01jqe1ads3brxcq5mtf8s9347w, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 19563, MsgBus: 14742 2025-03-28T09:28:53.718586Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789994329007238:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:53.719310Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a6e/r3tmp/tmpRBLAbN/pdisk_1.dat 2025-03-28T09:28:53.927334Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19563, node 2 2025-03-28T09:28:54.056605Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:54.056688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:54.057534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:54.101078Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:54.101106Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:54.101113Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:54.101220Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14742 TClient is connected to server localhost:14742 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:54.572770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:54.580041Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:28:57.130435Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790011508877075:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:57.130553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:57.144134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:57.198833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790011508877175:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:57.198921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:57.199135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790011508877180:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:57.203419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe ... ode 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:09.499355Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:09.499476Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29056 TClient is connected to server localhost:29056 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:29:10.188689Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:29:12.986873Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790078835533067:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:12.986953Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.002605Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.084973Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790083130500466:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.085076Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.086034Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790083130500471:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.089800Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:29:13.103578Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790083130500473:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:29:13.189456Z node 5 :TX_PROXY ERROR: Actor# [5:7486790083130500524:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:13.591565Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7486790083130500599:2366], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:63: Error: At function: KiWriteTable!
:1:45: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:45: Error: Failed to convert 'Value': Null to String
:1:45: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:29:13.592962Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=OTkxNTc1NTgtNWIyNzBjYmEtMTQ3Yjc4Yi1mMjdjYmI1YQ==, ActorId: [5:7486790078835533064:2329], ActorState: ExecuteState, TraceId: 01jqe1b238ejm434gk5vkqs9y5, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 6800, MsgBus: 29698 2025-03-28T09:29:14.496562Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790087122879969:2236];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:14.496813Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a6e/r3tmp/tmpgYoIcY/pdisk_1.dat 2025-03-28T09:29:14.618042Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:14.633603Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:14.633697Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:14.635343Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6800, node 6 2025-03-28T09:29:14.691908Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:14.691928Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:14.691940Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:14.692078Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29698 TClient is connected to server localhost:29698 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:15.301654Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:15.322918Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:18.467327Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790104302749635:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.467561Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.493985Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.603170Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790104302749740:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.603302Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.603645Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790104302749745:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.608886Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:29:18.625242Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790104302749747:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:29:18.694493Z node 6 :TX_PROXY ERROR: Actor# [6:7486790104302749798:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:19.362656Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:29:19.377414Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7486790108597717168:2366], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2025-03-28T09:29:19.378329Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=MzE1YmFiNzEtZTU5NDZiNWMtNDYwMDdmZGYtYThlYmZlZjk=, ActorId: [6:7486790104302749617:2329], ActorState: ExecuteState, TraceId: 01jqe1b7gfa40erwb35hz5abks, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-28T09:29:19.442912Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790087122879969:2236];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:19.442991Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSort::ReverseOptimized [GOOD] >> KqpSort::ReverseOptimizedWithPredicate >> KqpNewEngine::LocksMultiShard [GOOD] >> KqpNewEngine::LocksMultiShardOk >> KqpRanges::MergeRanges [GOOD] >> KqpRanges::Like >> KqpReturning::ReturningWorks-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete+QueryService >> KqpSqlIn::KeyTypeMissmatch_Str [GOOD] >> KqpSqlIn::InWithCast ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::FailedMultiEffects [GOOD] Test command err: Trying to start YDB, gRPC: 7947, MsgBus: 31986 2025-03-28T09:28:49.437883Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789976341166899:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:49.440281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a64/r3tmp/tmppDGSoB/pdisk_1.dat 2025-03-28T09:28:49.919385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:49.921610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:49.921680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:49.933770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7947, node 1 2025-03-28T09:28:50.091278Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:50.091299Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:50.091306Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:50.091404Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31986 TClient is connected to server localhost:31986 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:50.830027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:50.860740Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:52.998857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789989226069382:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:52.999002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:53.264155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:53.434842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789993521036782:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:53.434943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:53.435113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789993521036787:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:53.439390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:28:53.464962Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2025-03-28T09:28:53.465561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789993521036789:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:28:53.570452Z node 1 :TX_PROXY ERROR: Actor# [1:7486789993521036843:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:53.917012Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789993521036909:2363], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestInsertNotNullPk, code: 2029 2025-03-28T09:28:53.918248Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDUzNzVhMTgtNzA2YmMxYzktYmE2Y2UzNDktZDdhYzQwNDk=, ActorId: [1:7486789989226069363:2329], ActorState: ExecuteState, TraceId: 01jqe1aew2fjvm3xmn0f10ny44, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-03-28T09:28:53.949535Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789993521036918:2367], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:28:53.950911Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDUzNzVhMTgtNzA2YmMxYzktYmE2Y2UzNDktZDdhYzQwNDk=, ActorId: [1:7486789989226069363:2329], ActorState: ExecuteState, TraceId: 01jqe1aexa9k4a9xkps454t9hw, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 8505, MsgBus: 17274 2025-03-28T09:28:54.771468Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789998028844123:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:54.771495Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a64/r3tmp/tmp8DJpMm/pdisk_1.dat 2025-03-28T09:28:55.059980Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:55.061916Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:55.061986Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:55.063690Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8505, node 2 2025-03-28T09:28:55.191061Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:55.191087Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:55.191095Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:55.191206Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17274 TClient is connected to server localhost:17274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:55.632231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:55.640646Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:28:58.081518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790015208713951:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:58.081675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:58.088826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:58.170899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790015208714054:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:58.170984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:58.171357Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790015208714059:2343], Databas ... d: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:29:11.144280Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:29:11.150018Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:13.955965Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790079875507698:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.956096Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.985321Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.048085Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790084170475095:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.048191Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.048329Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790084170475100:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.084342Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:29:14.098651Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790084170475102:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:29:14.157630Z node 5 :TX_PROXY ERROR: Actor# [5:7486790084170475153:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:14.508846Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7486790084170475221:2363], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-03-28T09:29:14.510474Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=ZWE2YjE2OWMtYWNmYzJjZDgtOWFhYjQzMTItY2M5ZTE3NTc=, ActorId: [5:7486790079875507679:2329], ActorState: ExecuteState, TraceId: 01jqe1b300batz3r5esxk0rq4n, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-28T09:29:14.741201Z node 5 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:29:14.752225Z node 5 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [5:7486790084170475230:2367], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Value, code: 2031 2025-03-28T09:29:14.753945Z node 5 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=5&id=ZWE2YjE2OWMtYWNmYzJjZDgtOWFhYjQzMTItY2M5ZTE3NTc=, ActorId: [5:7486790079875507679:2329], ActorState: ExecuteState, TraceId: 01jqe1b30n7dg95v0q5atty5pm, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: Trying to start YDB, gRPC: 26620, MsgBus: 14075 2025-03-28T09:29:15.799019Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790089748917043:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:15.799106Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a64/r3tmp/tmpfWc59i/pdisk_1.dat 2025-03-28T09:29:15.977169Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:15.999164Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:15.999265Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:16.000962Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26620, node 6 2025-03-28T09:29:16.095334Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:16.095357Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:16.095366Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:16.095505Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14075 TClient is connected to server localhost:14075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:16.740178Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:16.751701Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:20.027873Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790111223754009:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:20.027982Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:20.047428Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:20.148371Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790111223754112:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:20.148475Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:20.148839Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790111223754117:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:20.153689Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:29:20.168134Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790111223754119:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:29:20.226368Z node 6 :TX_PROXY ERROR: Actor# [6:7486790111223754170:2396] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:20.363942Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7486790111223754209:2357], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:55: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64,'Value':String>
:3:55: Error: Failed to convert 'Value': Null to String
:3:55: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:29:20.365915Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NzUyZWRlMDgtMzNlNGEzYzUtMmFmMDVkNDMtODM4NjExMGI=, ActorId: [6:7486790106928786695:2330], ActorState: ExecuteState, TraceId: 01jqe1b8p4dqxmz9v5pv7rt5cq, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:29:20.399101Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill >> KqpNotNullColumns::UpsertNotNullPk >> KqpExtractPredicateLookup::SimpleRange >> KqpNewEngine::PkRangeSelect1 >> KqpNewEngine::EmptyMapWithBroadcast [GOOD] >> KqpNewEngine::FlatMapLambdaInnerPrecompute >> KqpNotNullColumns::SelectNotNullColumns [GOOD] >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] 2025-03-28 09:28:49,616 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:28:49,887 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 351359 697M 699M 534M ydb-tests-fq-multi_plane --basetemp /home/runner/.ya/build/build_root/2909/0010ba/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 397421 455M 22.0M 356M ├─ ydb-tests-fq-multi_plane --basetemp /home/runner/.ya/build/build_root/2909/0010ba/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest- 477329 1.4G 1.4G 1.0G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2909/0010ba/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_ret Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/fq/multi_plane/test_retry.py", line 96, in test_low_rate kikimr.compute_plane.stop() File "ydb/tests/tools/fq_runner/kikimr_runner.py", line 62, in stop self.kikimr_cluster.stop(kill=False) File "ydb/tests/library/harness/kikimr_runner.py", line 487, in stop thread.join() File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007f25e8a52640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 193 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 208 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 462 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 476 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f25e6412640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 193 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 208 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 462 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 476 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f25e7732640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/tools/python3/Lib/queue.py", line 180 in get File "contrib/python/ydb/py3/ydb/_sp_impl.py", line 376 in events_loop File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f25ec3b2640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f25e9d72640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/tools/python3/Lib/queue.py", line 180 in get File "contrib/python/ydb/py3/ydb/_sp_impl.py", line 376 in events_loop File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007f25f1938640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f261d3aa940 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 487 in stop File "ydb/tests/tools/fq_runner/kikimr_runner.py", line 62 in stop File "ydb/tests/tools/fq_runner/kikimr_runner.py", line 743 in stop File "ydb/tests/fq/multi_plane/test_retry.py", line 51 in kikimr File "contrib/python/pytest/py3/_pytest/fixtures.py", line 911 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1024 in finish File "contrib/python/pytest/py3/_pytest/fixtures.py", line 701 in File "contrib/python/pytest/py3/_pytest/runner.py", line 526 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 108 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 306 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 318 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...etemp', '/home/runner/.ya/build/build_root/2909/0010ba/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/0010ba/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/0010ba', '--source-root', '/home/runner/.ya/build/build_root/2909/0010ba/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/0010ba/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/multi_plane', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/multi_plane', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_retry.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...etemp', '/home/runner/.ya/build/build_root/2909/0010ba/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/0010ba/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/0010ba', '--source-root', '/home/runner/.ya/build/build_root/2909/0010ba/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/0010ba/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/multi_plane', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/multi_plane', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_retry.py']' stopped by 600 seconds timeout",), {}) 2025-03-28 09:29:24,234 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-03-28 09:29:24,234 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores >> DataShardReadIteratorBatchMode::RangeFromInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeFromNonInclusive >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit+UseSink [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink >> KqpRanges::UpdateWhereInWithNull [GOOD] >> KqpRanges::UpdateWhereInMultipleUpdate >> KqpNotNullColumns::JoinLeftTableWithNotNullPk+StreamLookup [GOOD] >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup >> KqpNewEngine::PrunePartitionsByExpr [GOOD] >> KqpNewEngine::PruneWritePartitions+UseSink >> KqpRanges::WhereInSubquery [GOOD] >> KqpReturning::ReturningTwice >> KqpNewEngine::PureTxMixedWithDeferred >> KqpReturning::ReturningWorksIndexedUpsert+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedUpsert-QueryService |89.9%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |89.9%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpNewEngine::MultiUsagePrecompute [GOOD] >> KqpNewEngine::MultiUsageInnerConnection >> KqpNewEngine::Truncated [GOOD] >> KqpNewEngine::StaleRO_Immediate >> KqpSort::ReverseLimitOptimized [GOOD] >> KqpSort::ReverseEightShardOptimized >> KqpSort::TopSortTableExprOffset >> KqpNewEngine::DeleteByKey [GOOD] >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink >> KqpSqlIn::TableSource [GOOD] >> KqpSqlIn::SimpleKey_Negated >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] >> KqpRanges::IsNotNullInValue [GOOD] >> KqpRanges::IsNotNullInJsonValue2 >> KqpNewEngine::UpdateFromParams [GOOD] >> KqpNewEngine::UpsertEmptyInput >> KqpNewEngine::JoinIdxLookupWithPredicate [GOOD] >> KqpNewEngine::JoinPure >> KqpNotNullColumns::UpsertNotNullPk [GOOD] >> KqpNotNullColumns::UpsertNotNullPkPg >> KqpSort::ReverseOptimizedWithPredicate [GOOD] >> KqpSort::ReverseMixedOrderNotOptimized >> KqpSqlIn::SecondaryIndex_PgKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey >> KqpNotNullColumns::OptionalParametersDataQuery [GOOD] >> KqpNotNullColumns::OptionalParametersScanQuery >> KqpMergeCn::TopSortByDesc_Datetime_Limit3 [GOOD] >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 >> KqpNewEngine::LocksMultiShardOk [GOOD] >> KqpNewEngine::LocksNoMutations ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::TopSortBy_Decimal_Limit5 [GOOD] Test command err: Trying to start YDB, gRPC: 2399, MsgBus: 18261 2025-03-28T09:28:31.197687Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789902158707343:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:31.197733Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ab0/r3tmp/tmpj6lhWi/pdisk_1.dat 2025-03-28T09:28:31.699210Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:31.728965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:31.729050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:31.732051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2399, node 1 2025-03-28T09:28:31.823472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:31.823492Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:31.823504Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:31.823638Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18261 TClient is connected to server localhost:18261 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:32.542648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:32.560618Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:32.574644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:32.770429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:32.937325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:33.014327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:34.764610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789915043610884:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:34.764720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:35.051120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:35.089530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:35.119311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:35.150634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:35.220968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:35.289712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:35.339010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789919338578698:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:35.339088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:35.339162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789919338578703:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:35.342564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:35.357077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789919338578705:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:35.455044Z node 1 :TX_PROXY ERROR: Actor# [1:7486789919338578761:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:36.197884Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789902158707343:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:36.197955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:36.565787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:28:37.544531Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154117565, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 19127, MsgBus: 64760 2025-03-28T09:28:38.362805Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789929217622608:2150];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:38.405085Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ab0/r3tmp/tmpJ52PVO/pdisk_1.dat 2025-03-28T09:28:38.493583Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:38.524574Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:38.524653Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19127, node 2 2025-03-28T09:28:38.525839Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:38.570652Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:38.570675Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:38.570701Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:38.570804Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64760 TClient is connected to server localhost:64760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:39.105879Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:39.151895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:39.289355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 wai ... :29:13.203319Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.270978Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.349670Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.429932Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.519114Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790080885520738:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.519222Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.522830Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790080885520743:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.527733Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:13.544415Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790080885520745:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:13.638335Z node 6 :TX_PROXY ERROR: Actor# [6:7486790080885520800:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:14.090798Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790063705649248:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:14.101240Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:15.190384Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:16.409471Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154156443, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 26223, MsgBus: 16692 2025-03-28T09:29:17.747068Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790096778123274:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:17.747189Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ab0/r3tmp/tmpKK22MA/pdisk_1.dat 2025-03-28T09:29:17.908124Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:17.940290Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:17.940440Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:17.942607Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26223, node 7 2025-03-28T09:29:18.118221Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:18.118250Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:18.118264Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:18.118439Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16692 TClient is connected to server localhost:16692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:18.948542Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:18.963897Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:18.986332Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:19.104275Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.416091Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.543610Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:22.395741Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790118252961497:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.395822Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.464021Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.555438Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.608128Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.654463Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.736368Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.757158Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790096778123274:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:22.767741Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:22.816247Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.912636Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790118252962024:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.912770Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.913107Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790118252962029:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.918443Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:22.933780Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790118252962031:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:22.998391Z node 7 :TX_PROXY ERROR: Actor# [7:7486790118252962087:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:24.732358Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:26.050503Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154166068, txId: 281474976710673] shutting down >> KqpReturning::ReturningWorksIndexedInsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace+QueryService >> KqpNotNullColumns::SecondaryKeyWithNotNullColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn >> KqpNewEngine::AutoChooseIndex [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLimit >> KqpRanges::Like [GOOD] >> KqpNewEngine::PkRangeSelect1 [GOOD] >> KqpNewEngine::OnlineRO_Consistent >> KqpSqlIn::InWithCast [GOOD] >> KqpNewEngine::PureTxMixedWithDeferred [GOOD] >> KqpNewEngine::ReadAfterWrite >> KqpSqlIn::SimpleKey >> KqpNewEngine::PruneWritePartitions+UseSink [GOOD] >> KqpNewEngine::PruneWritePartitions-UseSink >> KqpNewEngine::DeleteWithInputMultiConsumptionLimit-UseSink [GOOD] >> KqpNewEngine::DependentSelect >> KqpReturning::ReturningTwice [GOOD] >> KqpReturning::ReplaceSerial >> KqpNewEngine::JoinWithPrecompute [GOOD] >> KqpNewEngine::LiteralKeys ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::Like [GOOD] Test command err: Trying to start YDB, gRPC: 1086, MsgBus: 24800 2025-03-28T09:28:41.387023Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789945391789245:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:41.387134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a92/r3tmp/tmpcsthEu/pdisk_1.dat 2025-03-28T09:28:41.778982Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:41.799259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:41.799396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:41.801015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1086, node 1 2025-03-28T09:28:41.901724Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:41.901773Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:41.901783Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:41.901976Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24800 TClient is connected to server localhost:24800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:42.646363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:42.689482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:42.869351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:43.045308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:43.120904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:44.963865Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789958276692902:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:44.964014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:45.348464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:45.390508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:45.442068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:45.483018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:45.541506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:45.581274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:45.678965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789962571660717:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:45.679065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:45.680064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789962571660722:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:45.686533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:45.699288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789962571660724:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:45.766952Z node 1 :TX_PROXY ERROR: Actor# [1:7486789962571660779:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:46.390842Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789945391789245:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:46.390914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:47.032711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.303349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.516383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.693416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.987071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19080, MsgBus: 1334 2025-03-28T09:28:49.186879Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789978189267974:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:49.186944Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a92/r3tmp/tmph50Qil/pdisk_1.dat 2025-03-28T09:28:49.305953Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19080, node 2 2025-03-28T09:28:49.330347Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:49.330430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:49.344243Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:49.406421Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:49.406444Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:49.406451Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:49.406580Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1334 TClient is connected to server localhost:1334 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:49.887731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type ... ol default not found or you don't have access permissions } 2025-03-28T09:29:18.108033Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.164922Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.223467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.302784Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.350490Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.426882Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.511385Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.576101Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790101606071414:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.576198Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.576429Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790101606071419:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.581268Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:18.594603Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790101606071421:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:18.661034Z node 5 :TX_PROXY ERROR: Actor# [5:7486790101606071473:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:18.773759Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790080131232668:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:18.773814Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:20.118703Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:20.798418Z node 5 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154160825, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 65076, MsgBus: 16760 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a92/r3tmp/tmpzcZvYb/pdisk_1.dat 2025-03-28T09:29:22.494835Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:29:22.604482Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65076, node 6 2025-03-28T09:29:22.702085Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:22.702581Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:22.705080Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:22.725856Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:22.725885Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:22.725896Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:22.726047Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16760 TClient is connected to server localhost:16760 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:23.312307Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:23.322975Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:23.331211Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:23.419154Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:23.663215Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:23.774770Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:26.845842Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790136105759654:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:26.845952Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:26.909920Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:26.965482Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.021514Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.127163Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.206574Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.297860Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.384336Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790140400727474:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.384456Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.384713Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790140400727479:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.390368Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:27.453353Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790140400727481:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:27.532310Z node 6 :TX_PROXY ERROR: Actor# [6:7486790140400727539:3460] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:29.145320Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 >> KqpNewEngine::StaleRO_Immediate [GOOD] >> KqpNewEngine::UnionAllPure >> KqpReturning::ReturningWorksIndexedDelete+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDelete-QueryService >> KqpNewEngine::FlatMapLambdaInnerPrecompute [GOOD] >> KqpNewEngine::DqSourceLiteralRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::InWithCast [GOOD] Test command err: Trying to start YDB, gRPC: 13798, MsgBus: 22587 2025-03-28T09:28:35.458449Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789917503139036:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:35.463925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a9f/r3tmp/tmpHXrigp/pdisk_1.dat 2025-03-28T09:28:35.988493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:35.988582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:35.991279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:36.014977Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13798, node 1 2025-03-28T09:28:36.081463Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:36.081500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:36.081521Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:36.081635Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22587 TClient is connected to server localhost:22587 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:36.608303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:36.637018Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:36.656480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:36.831533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:37.030775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:37.139765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:38.981001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789930388042673:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:38.981101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:39.382622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:39.456851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:39.489432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:39.517963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:39.555182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:39.605575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:39.661147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789934683010487:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:39.661223Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:39.661648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789934683010492:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:39.665212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:39.678859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789934683010494:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:39.745775Z node 1 :TX_PROXY ERROR: Actor# [1:7486789934683010547:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:40.459348Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789917503139036:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:40.459417Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:40.713093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:28:40.761318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:28:40.833231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:49: Warning: At function: And
:7:41: Warning: At function: SqlIn
:7:41: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 7279, MsgBus: 19043 2025-03-28T09:28:45.821154Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789962448234873:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a9f/r3tmp/tmpBdw4YX/pdisk_1.dat 2025-03-28T09:28:45.872135Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:28:45.974975Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:46.009820Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:46.009907Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:46.019557Z node 2 :HIVE WARN: HIVE#720575940 ... peration type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.146251Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.197377Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.244596Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.315374Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.418279Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.480808Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790107781788766:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.480875Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.481136Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790107781788771:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.485363Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:19.503828Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790107781788773:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:19.556452Z node 5 :TX_PROXY ERROR: Actor# [5:7486790107781788825:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:19.775058Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790086306949982:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:19.775123Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:4:17: Warning: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:5:22: Warning: At function: Filter, At function: Coalesce
:6:23: Warning: At function: SqlIn
:6:23: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 11857, MsgBus: 23122 2025-03-28T09:29:22.772098Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790121064576388:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:22.775347Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a9f/r3tmp/tmpMB1n2q/pdisk_1.dat 2025-03-28T09:29:23.051090Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:23.056118Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:23.056231Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:23.059625Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11857, node 6 2025-03-28T09:29:23.147330Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:23.147363Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:23.147375Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:23.147812Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23122 TClient is connected to server localhost:23122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:23.869714Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:23.880990Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:23.890978Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:23.987110Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:24.255652Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:24.355164Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:27.565599Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790142539414641:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.565715Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.621571Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.717343Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.767642Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790121064576388:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:27.767733Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:27.792413Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.835430Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.914564Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.975305Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:28.080624Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790146834382466:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:28.080761Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:28.081182Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790146834382471:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:28.085701Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:28.097484Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790146834382473:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:28.200863Z node 6 :TX_PROXY ERROR: Actor# [6:7486790146834382528:3461] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSort::ReverseEightShardOptimized [GOOD] >> KqpSort::PassLimit >> KqpNotNullColumns::UpsertNotNullPkPg [GOOD] >> KqpNotNullColumns::UpsertNotNullPg >> DataShardReadIteratorBatchMode::RangeFromNonInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToInclusive >> TPersQueueTest::TestWriteStat [GOOD] >> TPersQueueTest::TestWriteSessionsConflicts >> KqpSort::TopSortTableExprOffset [GOOD] >> KqpSort::UnionAllSortLimit >> KqpRanges::UpdateWhereInMultipleUpdate [GOOD] >> KqpRanges::ValidatePredicates >> KqpReturning::ReturningWorksIndexedUpsert-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedReplace-QueryService >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] >> KqpNewEngine::UpsertEmptyInput [GOOD] >> KqpNotNullColumns::AlterAddNotNullColumn >> TPersQueueTest::DisableWrongSettings [GOOD] >> TPersQueueTest::DisableDeduplication >> KqpNewEngine::JoinPure [GOOD] >> KqpNewEngine::JoinPureUncomparableKeys >> KqpSort::ReverseMixedOrderNotOptimized [GOOD] >> KqpSort::ReverseRangeOptimized >> KqpNewEngine::MultiUsageInnerConnection [GOOD] >> KqpSqlIn::SimpleKey_Negated [GOOD] >> KqpSqlIn::TupleParameter >> KqpNewEngine::LocksNoMutations [GOOD] >> KqpNewEngine::LocksNoMutationsSharded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::JoinLeftTableWithNotNullPk-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 24482, MsgBus: 3019 2025-03-28T09:28:50.582166Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789983528308267:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:50.582206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a48/r3tmp/tmpn3RZJl/pdisk_1.dat 2025-03-28T09:28:51.142044Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:51.147494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:51.147615Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:51.151324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24482, node 1 2025-03-28T09:28:51.295492Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:51.295521Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:51.295537Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:51.295716Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3019 TClient is connected to server localhost:3019 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:51.896933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:54.181137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790000708178116:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:54.181237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:54.214872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:54.374907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790000708178218:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:54.375009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:54.375709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790000708178223:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:54.379933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:28:54.391818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790000708178225:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:28:54.468136Z node 1 :TX_PROXY ERROR: Actor# [1:7486790000708178277:2397] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:54.715450Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790000708178333:2329], TxId: 281474976710662, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZWFlMWQ1OTctNDg5NTIxYzgtNDUzMzFjZmUtZmNjMDA2OWM=. TraceId : 01jqe1afjff0va8cfnd2nrfwvx. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: key, code: 2031 }. 2025-03-28T09:28:54.716029Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWFlMWQ1OTctNDg5NTIxYzgtNDUzMzFjZmUtZmNjMDA2OWM=, ActorId: [1:7486790000708178098:2329], ActorState: ExecuteState, TraceId: 01jqe1afjff0va8cfnd2nrfwvx, Create QueryResponse for error on request, msg: 2025-03-28T09:28:54.828523Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790000708178350:2329], TxId: 281474976710663, task: 1. Ctx: { TraceId : 01jqe1afp59ed609z4nb7bb9dr. SessionId : ydb://session/3?node_id=1&id=ZWFlMWQ1OTctNDg5NTIxYzgtNDUzMzFjZmUtZmNjMDA2OWM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: key, code: 2031 }. 2025-03-28T09:28:54.828888Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWFlMWQ1OTctNDg5NTIxYzgtNDUzMzFjZmUtZmNjMDA2OWM=, ActorId: [1:7486790000708178098:2329], ActorState: ExecuteState, TraceId: 01jqe1afp59ed609z4nb7bb9dr, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 2570, MsgBus: 19267 2025-03-28T09:28:55.690743Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790004204317522:2091];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:55.690844Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a48/r3tmp/tmpyBMEhk/pdisk_1.dat 2025-03-28T09:28:55.900382Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:55.901067Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:55.901132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:55.916592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2570, node 2 2025-03-28T09:28:55.995186Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:55.995213Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:55.995220Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:55.995322Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19267 TClient is connected to server localhost:19267 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:56.442340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:58.625566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790017089220035:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:58.625736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:58.634092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:58.725393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790017089220141:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:58.725471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:58.725846Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790017089220146:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:58.728917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025- ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:20.387778Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:20.444760Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:20.490673Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:20.566640Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790110158664742:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:20.566767Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:20.567076Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790110158664747:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:20.571038Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:20.589348Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790110158664749:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:20.671152Z node 5 :TX_PROXY ERROR: Actor# [5:7486790110158664804:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:20.770803Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790088683826093:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:20.770990Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:22.020123Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.252098Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21045, MsgBus: 21304 2025-03-28T09:29:24.771828Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790127396129307:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:24.771910Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a48/r3tmp/tmpTPLHau/pdisk_1.dat 2025-03-28T09:29:25.062347Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:25.062453Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:25.063453Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:25.080622Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21045, node 6 2025-03-28T09:29:25.223312Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:25.223339Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:25.223356Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:25.223503Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21304 TClient is connected to server localhost:21304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:25.943825Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.951706Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:25.960364Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:26.066814Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:26.321216Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:26.422553Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:29.302315Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790148870967569:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:29.302536Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:29.357758Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.403490Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.478298Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.521245Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.570666Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.659208Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.768924Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790148870968090:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:29.769051Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:29.769354Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790148870968095:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:29.772309Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790127396129307:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:29.772392Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:29.774198Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:29.788777Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790148870968097:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:29.842905Z node 6 :TX_PROXY ERROR: Actor# [6:7486790148870968151:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:31.263649Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:31.469508Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumn [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink [GOOD] >> KqpMergeCn::TopSortByDesc_Bool_And_PKUint64_Limit4 [GOOD] >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 >> KqpRanges::IsNotNullInJsonValue2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultiUsageInnerConnection [GOOD] Test command err: Trying to start YDB, gRPC: 1241, MsgBus: 10541 2025-03-28T09:28:42.537722Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789949741798367:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:42.537794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a8d/r3tmp/tmp7W4hBZ/pdisk_1.dat 2025-03-28T09:28:43.072409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:43.074724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:43.074820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:43.082750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1241, node 1 2025-03-28T09:28:43.251212Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:43.251258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:43.251267Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:43.251364Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10541 TClient is connected to server localhost:10541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:43.820406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:43.844140Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:43.861189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:28:44.020566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:28:44.201325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:28:44.299020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:28:46.258470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789966921669163:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:46.258570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:46.711751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:46.775609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:46.811744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:46.865016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:46.923152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:46.974225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.068357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789971216636976:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:47.068493Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:47.069086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789971216636981:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:47.073365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:47.094351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789971216636983:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:47.192168Z node 1 :TX_PROXY ERROR: Actor# [1:7486789971216637039:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:47.516993Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789949741798367:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:47.517053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29794, MsgBus: 8487 2025-03-28T09:28:49.767746Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789978134317343:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:49.767794Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a8d/r3tmp/tmpAFhEd9/pdisk_1.dat 2025-03-28T09:28:49.921112Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:49.941365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:49.941448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29794, node 2 2025-03-28T09:28:49.945424Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:50.051488Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:50.051515Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:50.051523Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:50.051642Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8487 TClient is connected to server localhost:8487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:50.578912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:50.595105Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:28:50.613229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:50.671061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:50.800921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2 ...
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.094377Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.159024Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.210891Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.281991Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.326938Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.367248Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.457063Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.529349Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790117162279020:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.529529Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.530131Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790117162279026:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.535449Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:21.550181Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790117162279028:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:21.630348Z node 6 :TX_PROXY ERROR: Actor# [6:7486790117162279083:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:21.938657Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790095687440252:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:22.050240Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10243, MsgBus: 12543 2025-03-28T09:29:25.563815Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790133876008282:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:25.658233Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a8d/r3tmp/tmpzzZ0Cp/pdisk_1.dat 2025-03-28T09:29:25.863358Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:25.874640Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:25.874788Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:25.879959Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10243, node 7 2025-03-28T09:29:26.027281Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:26.027309Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:26.027320Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:26.027495Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12543 TClient is connected to server localhost:12543 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:26.877738Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:26.885506Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:26.895348Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:26.984633Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:27.218254Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:27.312785Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:30.285540Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790155350846506:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:30.285635Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:30.361586Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:30.426813Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:30.491106Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:30.546581Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:30.551859Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790133876008282:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:30.552420Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:30.665375Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:30.731052Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:30.799100Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790155350847025:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:30.799248Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:30.803039Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790155350847030:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:30.810284Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:30.826505Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790155350847032:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:30.906455Z node 7 :TX_PROXY ERROR: Actor# [7:7486790155350847089:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpNewEngine::OnlineRO_Consistent [GOOD] >> KqpNewEngine::OnlineRO_Inconsistent >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsMultipleColumns [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows >> KqpNewEngine::ReadAfterWrite [GOOD] >> KqpNewEngine::ReadRangeWithParams >> KqpSqlIn::SecondaryIndex_SimpleKey [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DeleteWithInputMultiConsumption+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22119, MsgBus: 21629 2025-03-28T09:28:45.189746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789959262343375:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:45.189792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a82/r3tmp/tmpzqngHL/pdisk_1.dat 2025-03-28T09:28:45.743410Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22119, node 1 2025-03-28T09:28:45.771639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:45.771745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:45.775839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:45.903316Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:45.903333Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:45.903340Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:45.903452Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21629 TClient is connected to server localhost:21629 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:46.702144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:46.730833Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:46.752640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:46.929570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:47.196930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:47.291827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:48.839210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789972147246858:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:48.839340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:49.137607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.168660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.193505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.222731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.259090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.307343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.428187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789976442214673:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:49.428259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:49.428495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789976442214678:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:49.432357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:49.446993Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:28:49.448261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789976442214680:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:49.520272Z node 1 :TX_PROXY ERROR: Actor# [1:7486789976442214734:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:50.190814Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789959262343375:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:50.209354Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7343, MsgBus: 14411 2025-03-28T09:28:51.962777Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789985078413890:2218];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a82/r3tmp/tmp6gJxo5/pdisk_1.dat 2025-03-28T09:28:52.094952Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:28:52.158180Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:52.158262Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:52.173228Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:52.174196Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7343, node 2 2025-03-28T09:28:52.354187Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:52.354213Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:52.354223Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:52.354327Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14411 TClient is connected to server localhost:14411 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:52.873552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:52.903358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:52.970113Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:53.134477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting.. ... {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.614973Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.727456Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.779213Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.829061Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.880189Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.923895Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:23.007928Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:23.074055Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790125971093283:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:23.074185Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:23.079119Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790125971093288:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:23.084443Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:23.099151Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790125971093290:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:23.169931Z node 6 :TX_PROXY ERROR: Actor# [6:7486790125971093344:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:23.438804Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790104496254535:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:23.438882Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10266, MsgBus: 4964 2025-03-28T09:29:26.932573Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790137412017688:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:26.932611Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a82/r3tmp/tmpUH3Z49/pdisk_1.dat 2025-03-28T09:29:27.250908Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:27.288242Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:27.288341Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:27.290616Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10266, node 7 2025-03-28T09:29:27.382503Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:27.382533Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:27.382546Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:27.382708Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4964 TClient is connected to server localhost:4964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:28.072943Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.078651Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:28.093762Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.179962Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.412034Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:28.527965Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:29:31.932865Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790137412017688:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:31.932951Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:31.998445Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790158886855961:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:31.998582Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:32.033744Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:32.091694Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:32.144598Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:32.221044Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:32.278272Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:32.369272Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:32.490952Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790163181823779:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:32.491083Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:32.492859Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790163181823784:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:32.498912Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:32.519850Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790163181823786:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:32.576628Z node 7 :TX_PROXY ERROR: Actor# [7:7486790163181823840:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpNotNullColumns::UpsertNotNullPg [GOOD] >> KqpRanges::DateKeyPredicate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::IsNotNullInJsonValue2 [GOOD] Test command err: Trying to start YDB, gRPC: 5073, MsgBus: 21260 2025-03-28T09:28:45.739289Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789959117500868:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:45.739817Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a81/r3tmp/tmpjOutCI/pdisk_1.dat 2025-03-28T09:28:46.188364Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:46.213629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:46.213733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:46.214930Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5073, node 1 2025-03-28T09:28:46.351381Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:46.351403Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:46.351414Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:46.351520Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21260 TClient is connected to server localhost:21260 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:47.010026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:47.031560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:47.195819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:28:47.365184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:28:47.443441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:49.218158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789976297371685:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:49.218305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:49.571353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.609272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.645719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.695492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.726917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.808194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:49.897141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789976297372205:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:49.897269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:49.897722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789976297372210:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:49.901179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:49.917000Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789976297372212:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:50.010100Z node 1 :TX_PROXY ERROR: Actor# [1:7486789980592339563:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:50.741304Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789959117500868:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:50.741372Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:51.068557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:28:51.364255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:28:51.571943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:28:51.746605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:28:52.152691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17341, MsgBus: 6545 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a81/r3tmp/tmpG34eJm/pdisk_1.dat 2025-03-28T09:28:53.678894Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:28:53.694412Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:53.697201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:53.697268Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:53.699698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17341, node 2 2025-03-28T09:28:53.963177Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:53.963197Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:53.963204Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:53.963306Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6545 TClient is connected to server localhost:6545 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:54.516001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:54.522467Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txI ... } 2025-03-28T09:29:22.659558Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790121868468520:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.665159Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:22.684526Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790121868468522:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:22.783193Z node 5 :TX_PROXY ERROR: Actor# [5:7486790121868468577:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:23.198097Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790104688597039:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:23.198154Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:24.268447Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:24.644715Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:29:24.864358Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:29:25.140523Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-28T09:29:25.655696Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 29675, MsgBus: 24285 2025-03-28T09:29:27.462143Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790140363174478:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:27.462198Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a81/r3tmp/tmps9wyPj/pdisk_1.dat 2025-03-28T09:29:27.687318Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:27.700163Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:27.700267Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:27.702329Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29675, node 6 2025-03-28T09:29:27.761720Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:27.761748Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:27.761759Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:27.761895Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24285 TClient is connected to server localhost:24285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:28.429675Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.438311Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:28.471200Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.606493Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.895015Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.996982Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:31.805325Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790157543045439:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:31.805439Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:31.850384Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:31.889960Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:31.930195Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:31.980902Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:32.021960Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:32.115797Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:32.224576Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790161838013256:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:32.224674Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:32.224840Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790161838013261:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:32.229290Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:32.249395Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790161838013263:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:32.330395Z node 6 :TX_PROXY ERROR: Actor# [6:7486790161838013318:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:32.462482Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790140363174478:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:32.462613Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:33.948574Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:34.306723Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:29:34.568432Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480 2025-03-28T09:29:34.799648Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-28T09:29:35.268350Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::OptionalParametersScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 17365, MsgBus: 20723 2025-03-28T09:28:56.185036Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790009135075934:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:56.185070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009c4/r3tmp/tmpcUuYmX/pdisk_1.dat 2025-03-28T09:28:56.638222Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:56.638313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:56.651902Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:56.654261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17365, node 1 2025-03-28T09:28:56.721267Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:56.721289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:56.721296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:56.721401Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20723 TClient is connected to server localhost:20723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:57.289732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:57.300543Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:59.338210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790022019978468:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:59.338331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:59.665706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:28:59.823638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790022019978569:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:59.823735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:59.824008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790022019978574:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:59.828316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:28:59.844582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790022019978576:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:28:59.947681Z node 1 :TX_PROXY ERROR: Actor# [1:7486790022019978628:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:00.135819Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790026314945963:2356], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Value. All not null columns should be initialized, code: 2032 2025-03-28T09:29:00.137156Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGY3YTMwZDktNGU4N2NiMGItOTQyNmIwMTQtNDgwZjI5OTI=, ActorId: [1:7486790022019978464:2329], ActorState: ExecuteState, TraceId: 01jqe1amyk2gjh6gxsa7r8hq51, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-28T09:29:00.178788Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790026314945972:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Int32,'Value':Null> to Struct<'Key':Uint64?,'Value':String>
:1:47: Error: Failed to convert 'Value': Null to String
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:29:00.178956Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGY3YTMwZDktNGU4N2NiMGItOTQyNmIwMTQtNDgwZjI5OTI=, ActorId: [1:7486790022019978464:2329], ActorState: ExecuteState, TraceId: 01jqe1amzp8jr5rnhhtcqhpb50, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 27962, MsgBus: 5178 2025-03-28T09:29:01.171927Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790027678743047:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:01.193080Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009c4/r3tmp/tmp2UAmrJ/pdisk_1.dat 2025-03-28T09:29:01.360744Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:01.394069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:01.394144Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:01.399657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27962, node 2 2025-03-28T09:29:01.567124Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:01.567162Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:01.567174Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:01.567302Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5178 TClient is connected to server localhost:5178 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:02.188514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:02.196077Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:04.451756Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790040563645432:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.451840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.464324Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.547637Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790040563645535:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.547690Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790040563645540:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.547736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions ... LAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:24.735283Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:24.775074Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:24.815196Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:24.875277Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:24.934775Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:25.041556Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790133889046673:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:25.041680Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:25.041960Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790133889046678:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:25.047410Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:25.068527Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790133889046680:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:25.159792Z node 5 :TX_PROXY ERROR: Actor# [5:7486790133889046736:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:25.640466Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790112414207901:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:25.640537Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:26.372524Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6557, MsgBus: 24818 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009c4/r3tmp/tmpcjDQqj/pdisk_1.dat 2025-03-28T09:29:28.756567Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:29:28.859676Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:28.873348Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:28.873445Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:28.875649Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6557, node 6 2025-03-28T09:29:28.975284Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:28.975313Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:28.975324Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:28.975456Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24818 TClient is connected to server localhost:24818 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:29.661326Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:29.678110Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:29.689253Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:29.788406Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:29.995534Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:30.082663Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:33.092649Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790168018401704:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:33.092755Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:33.148492Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:33.194623Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:33.264381Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:33.319934Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:33.369164Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:33.430037Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:33.502845Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790168018402215:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:33.502968Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:33.503279Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790168018402220:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:33.509069Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:33.531224Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790168018402222:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:33.590953Z node 6 :TX_PROXY ERROR: Actor# [6:7486790168018402275:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:35.068715Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:35.576253Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154175602, txId: 281474976715673] shutting down 2025-03-28T09:29:35.827185Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154175861, txId: 281474976715675] shutting down 2025-03-28T09:29:36.030409Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154176057, txId: 281474976715677] shutting down >> KqpReturning::ReplaceSerial [GOOD] >> KqpReturning::ReturningSerial >> KqpNewEngine::PruneWritePartitions-UseSink [GOOD] >> KqpNewEngine::PruneEffectPartitions+UseSink >> KqpNewEngine::DependentSelect [GOOD] >> KqpNewEngine::DqSourceCount >> KqpNewEngine::UnionAllPure [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin >> KqpNewEngine::AutoChooseIndexOrderByLimit [GOOD] >> KqpNewEngine::AutoChooseIndexOrderByLambda >> KqpReturning::ReturningWorksIndexedReplace+QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> KqpSort::UnionAllSortLimit [GOOD] >> KqpSqlIn::CantRewrite >> KqpSort::PassLimit [GOOD] >> KqpSort::OffsetPk >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> Cache::Test3 [GOOD] >> Cache::Test4 [GOOD] >> EscapingBasics::HideSecretsShouldWork [GOOD] >> EntityId::Distinct [GOOD] >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> EntityId::MaxId [GOOD] >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] >> KqpNewEngine::DqSourceLiteralRange [GOOD] >> KqpNewEngine::DqSourceSequentialLimit |90.0%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueTest::CreateTopicWithMeteringMode [GOOD] >> TPersQueueTest::DefaultMeteringMode >> KqpNewEngine::LiteralKeys [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test4 [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::MaxId [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> KqpSqlIn::SimpleKey [GOOD] >> KqpSqlIn::SelectNotAllElements >> KqpNewEngine::JoinPureUncomparableKeys [GOOD] >> KqpNewEngine::JoinProjectMulti >> KqpNotNullColumns::AlterAddNotNullColumn [GOOD] >> KqpNotNullColumns::AlterAddIndex >> DataShardReadIteratorBatchMode::RangeToInclusive [GOOD] >> DataShardReadIteratorBatchMode::RangeToNonInclusive >> Cache::Test5 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LiteralKeys [GOOD] Test command err: Trying to start YDB, gRPC: 28752, MsgBus: 20518 2025-03-28T09:28:46.034291Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789961653181875:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:46.034552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a76/r3tmp/tmpGNlZhs/pdisk_1.dat 2025-03-28T09:28:46.580711Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:46.580799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:46.583787Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:46.584009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28752, node 1 2025-03-28T09:28:46.667177Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:46.667196Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:46.667202Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:46.667303Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20518 TClient is connected to server localhost:20518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:47.397901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:47.427350Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:47.438121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:47.593823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:47.758040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:47.854274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:49.690288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789978833052711:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:49.690402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:50.064877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:50.101552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:50.151868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:50.199698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:50.245038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:50.296161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:50.374815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789983128020522:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:50.374893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:50.375170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789983128020527:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:50.380056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:50.397611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789983128020529:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:50.499072Z node 1 :TX_PROXY ERROR: Actor# [1:7486789983128020585:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:51.032131Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789961653181875:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:51.032257Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:51.713104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28770, MsgBus: 19822 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a76/r3tmp/tmpKmajJ1/pdisk_1.dat 2025-03-28T09:28:53.113121Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:28:53.236811Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:53.252102Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:53.252402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:53.254392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28770, node 2 2025-03-28T09:28:53.360284Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:53.360306Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:53.360314Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:53.360414Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19822 TClient is connected to server localhost:19822 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:28:53.853056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:28:53.872271Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:28:53.885003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:53.969672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:54.146358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 720 ...
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:26.202070Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:26.264047Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:26.312351Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:26.362234Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:26.416602Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:26.485745Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:26.536295Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:26.639819Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790117164572197:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:26.639924Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:26.653477Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790138639410845:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:26.653576Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:26.653976Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790138639410850:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:26.659637Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:26.682634Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790138639410852:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:26.782860Z node 6 :TX_PROXY ERROR: Actor# [6:7486790138639410910:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25739, MsgBus: 21202 2025-03-28T09:29:32.363660Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790162735366788:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:32.364814Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a76/r3tmp/tmpUJxr0k/pdisk_1.dat 2025-03-28T09:29:32.717156Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:32.733804Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:32.733911Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:32.735295Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25739, node 7 2025-03-28T09:29:32.843313Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:32.843343Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:32.843356Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:32.843529Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21202 TClient is connected to server localhost:21202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:33.667508Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:33.678251Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:33.697662Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:33.788948Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:29:34.027606Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:34.225943Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:37.365398Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790162735366788:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:37.365485Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:37.530867Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790184210205019:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:37.530985Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:37.609428Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:37.652074Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:37.699605Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:37.757520Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:37.812414Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:37.923531Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:38.002083Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790188505172829:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:38.002200Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:38.002654Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790188505172834:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:38.012614Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:38.026978Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790188505172836:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:38.126603Z node 7 :TX_PROXY ERROR: Actor# [7:7486790188505172891:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSort::ReverseRangeOptimized [GOOD] >> KqpSort::ReverseRangeLimitOptimized >> KqpNewEngine::LocksNoMutationsSharded [GOOD] >> KqpNewEngine::MultiEffects >> KqpNewEngine::OnlineRO_Inconsistent [GOOD] >> KqpNewEngine::Nondeterministic >> Cache::Test5 [GOOD] >> EntityId::CheckId [GOOD] >> KqpNewEngine::ReadRangeWithParams [GOOD] >> KqpNewEngine::ReadDifferentColumns >> KqpReturning::ReturningWorksIndexedDelete-QueryService [GOOD] >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService >> JsonProtoConversion::ProtoMapToJson [GOOD] >> JsonProtoConversion::JsonToProtoSingleValue |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] >> JsonProtoConversion::JsonToProtoArray [GOOD] >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> KqpReturning::ReturningWorksIndexedReplace-QueryService [GOOD] >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |90.0%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoArray [GOOD] >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> KqpMergeCn::TopSortBy_Date_And_Datetime_Limit4 [GOOD] >> KqpMergeCn::SortBy_PK_Uint64_Desc >> KqpSqlIn::TupleParameter [GOOD] >> KqpSqlIn::TupleLiteral |90.0%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRanges::DateKeyPredicate [GOOD] >> KqpRanges::DeleteNotFullScan+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::SecondaryIndexWithNotNullDataColumnPg [GOOD] Test command err: Trying to start YDB, gRPC: 12318, MsgBus: 8100 2025-03-28T09:29:10.102413Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790068524085906:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:10.102474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007a9/r3tmp/tmphDNNMp/pdisk_1.dat 2025-03-28T09:29:10.649767Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:10.659829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:10.659926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:10.661663Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12318, node 1 2025-03-28T09:29:10.791275Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:10.791296Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:10.791302Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:10.791413Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8100 TClient is connected to server localhost:8100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:11.484828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:13.384982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790081408988428:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.385105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.608944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.741087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790081408988531:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.741154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.741397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790081408988536:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.745534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:29:13.758936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790081408988538:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:29:13.837892Z node 1 :TX_PROXY ERROR: Actor# [1:7486790081408988589:2398] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:14.092048Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790085703955926:2356], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing key column in input: Key for table: /Root/TestReplaceNotNullPk, code: 2029 2025-03-28T09:29:14.092353Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTExZDNiYWItYjUwMjk2NjgtNTA3NmJmZDEtNjhkODYwZGI=, ActorId: [1:7486790081408988425:2329], ActorState: ExecuteState, TraceId: 01jqe1b2jpf230n3bhbsd0fc1e, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-03-28T09:29:14.127368Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790085703955935:2360], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:49: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:49: Error: Failed to convert 'Key': Null to Uint64
:1:49: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:29:14.128616Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTExZDNiYWItYjUwMjk2NjgtNTA3NmJmZDEtNjhkODYwZGI=, ActorId: [1:7486790081408988425:2329], ActorState: ExecuteState, TraceId: 01jqe1b2kq3z0pca8wjqqf483d, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 11857, MsgBus: 6015 2025-03-28T09:29:14.946902Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790085701588644:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:14.947717Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007a9/r3tmp/tmpYY0wlH/pdisk_1.dat 2025-03-28T09:29:15.093777Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:15.110926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:15.111015Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:15.112093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11857, node 2 2025-03-28T09:29:15.191253Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:15.191276Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:15.191288Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:15.191391Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6015 TClient is connected to server localhost:6015 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:15.548415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:17.995604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790098586491152:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:17.995815Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.002080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.061178Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790102881458549:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.061256Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.061421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790102881458554:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.065326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:29:18.077394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPo ... :36.880518Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11068, node 6 2025-03-28T09:29:36.934027Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:36.934043Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:36.934052Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:36.934150Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29613 TClient is connected to server localhost:29613 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:37.556473Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:37.574975Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:40.308499Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790197333294553:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:40.308622Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:40.347617Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:40.504280Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790197333294707:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:40.504396Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:40.504875Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790197333294712:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:40.510496Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:29:40.532008Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790197333294714:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:29:40.597982Z node 6 :TX_PROXY ERROR: Actor# [6:7486790197333294765:2430] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:41.574809Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790180153424877:2221];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:41.574888Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:41.868989Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7486790201628262226:2382], TxId: 281474976715664, task: 1. Ctx: { TraceId : 01jqe1bx91faymx36rx6c97p48. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: BAD_REQUEST KIKIMR_BAD_COLUMN_TYPE: {
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 }. 2025-03-28T09:29:41.869805Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7486790201628262227:2383], TxId: 281474976715664, task: 2. Ctx: { SessionId : ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi. TraceId : 01jqe1bx91faymx36rx6c97p48. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [6:7486790201628262222:2329], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-03-28T09:29:41.870116Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7486790201628262228:2384], TxId: 281474976715664, task: 3. Ctx: { SessionId : ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi. CustomerSuppliedId : . TraceId : 01jqe1bx91faymx36rx6c97p48. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [6:7486790201628262222:2329], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-03-28T09:29:41.870246Z node 6 :KQP_COMPUTE ERROR: SelfId: [6:7486790201628262229:2385], TxId: 281474976715664, task: 4. Ctx: { TraceId : 01jqe1bx91faymx36rx6c97p48. SessionId : ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [6:7486790201628262222:2329], status: BAD_REQUEST, reason: {
: Error: Terminate execution } 2025-03-28T09:29:41.872748Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi, ActorId: [6:7486790197333294550:2329], ActorState: ExecuteState, TraceId: 01jqe1bx91faymx36rx6c97p48, Create QueryResponse for error on request, msg: 2025-03-28T09:29:41.916620Z node 6 :READ_TABLE_API WARN: ForgetAction occurred, send TEvPoisonPill 2025-03-28T09:29:41.946988Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7486790201628262254:2389], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-03-28T09:29:41.948942Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi, ActorId: [6:7486790197333294550:2329], ActorState: ExecuteState, TraceId: 01jqe1bxrz1pv71e4q191k4zb7, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-28T09:29:41.979951Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7486790201628262273:2397], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-03-28T09:29:41.981939Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi, ActorId: [6:7486790197333294550:2329], ActorState: ExecuteState, TraceId: 01jqe1bxta3kfq8xsjztnr37pf, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-28T09:29:42.023481Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7486790205923229589:2405], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:14: Error: At function: KiWriteTable!
:1:14: Error: Missing not null column in input: Index1. All not null columns should be initialized, code: 2032 2025-03-28T09:29:42.025327Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi, ActorId: [6:7486790197333294550:2329], ActorState: ExecuteState, TraceId: 01jqe1bxvkfm6da4zt64ayjrjf, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-28T09:29:42.508209Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:29:42.513528Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7486790205923229609:2413], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-03-28T09:29:42.514937Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi, ActorId: [6:7486790197333294550:2329], ActorState: ExecuteState, TraceId: 01jqe1bxwwc5jerzgh6nqmy33w, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-28T09:29:43.142500Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:29:43.149055Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7486790205923229640:2423], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-03-28T09:29:43.149371Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi, ActorId: [6:7486790197333294550:2329], ActorState: ExecuteState, TraceId: 01jqe1byc1fw6h4v3h3rhqsdm1, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-28T09:29:43.911511Z node 6 :KQP_EXECUTER ERROR: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , DatabaseId: , SessionId: , CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TKqpLiteralExecuter, TKqpEnsure failed. 2025-03-28T09:29:43.916894Z node 6 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [6:7486790210218196966:2434], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
: Error: Tried to insert NULL value into NOT NULL column: Index1, code: 2031 2025-03-28T09:29:43.917146Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=ZDg4MGE4ZS05N2M5MmUxYS04ZTU2YjA4OC05MmZmODhi, ActorId: [6:7486790197333294550:2329], ActorState: ExecuteState, TraceId: 01jqe1byzw60jv1reh1mmw3w5e, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: >> KqpNewEngine::PruneEffectPartitions+UseSink [GOOD] >> KqpNewEngine::PruneEffectPartitions-UseSink >> KqpNewEngine::DqSourceCount [GOOD] >> KqpNewEngine::DqSource |90.1%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} >> TPersQueueTest::InflightLimit [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And [GOOD] >> KqpNewEngine::StreamLookupForDataQuery+StreamLookupJoin [GOOD] >> KqpSort::OffsetPk [GOOD] >> TPersQueueTest::SetMeteringMode [GOOD] >> KqpRm::NotEnoughExecutionUnits >> KqpReturning::ReturningWorksIndexedOperationsWithDefault+QueryService [GOOD] >> KqpRm::ManyTasks >> KqpRm::SingleSnapshotByExchanger >> KqpRm::SingleTask >> KqpRm::Reduce >> KqpRm::DisonnectNodes >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService >> KqpRm::NotEnoughMemory >> KqpRm::SnapshotSharingByExchanger >> KqpRm::NodesMembershipByExchanger >> KqpNewEngine::DqSourceSequentialLimit [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin >> KqpSort::OffsetTopSort >> KqpSort::ReverseRangeLimitOptimized [GOOD] >> KqpNewEngine::JoinProjectMulti [GOOD] >> KqpNotNullColumns::AlterAddIndex [GOOD] >> KqpSqlIn::CantRewrite [GOOD] >> KqpSqlIn::SelectNotAllElements [GOOD] >> KqpNewEngine::Nondeterministic [GOOD] >> KqpNewEngine::DqSourceLocksEffects >> KqpRm::NotEnoughExecutionUnits [GOOD] >> KqpRm::SingleTask [GOOD] >> KqpSort::TopParameter >> TPersQueueTest::TClusterTrackerTest >> KqpRm::ManyTasks [GOOD] >> KqpNewEngine::JoinMultiConsumer >> KqpRm::Reduce [GOOD] >> KqpRm::NotEnoughMemory [GOOD] >> KqpSqlIn::SimpleKey_In_And_In >> KqpSqlIn::ComplexKey >> KqpNewEngine::OrderedScalarContext >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] >> KqpReturning::ReturningSerial [GOOD] >> KqpNewEngine::ReadDifferentColumns [GOOD] >> KqpNewEngine::MultiEffects [GOOD] >> KqpRm::SingleSnapshotByExchanger [GOOD] >> KqpRm::NodesMembershipByExchanger [GOOD] >> KqpRm::DisonnectNodes [GOOD] >> KqpReturning::ReturningColumnsOrder >> KqpNewEngine::ReadDifferentColumnsPk >> KqpNewEngine::LookupColumns >> DataShardReadIteratorBatchMode::RangeToNonInclusive [GOOD] >> TPersQueueTest::DisableDeduplication [GOOD] >> DataShardReadIteratorBatchMode::MultipleRanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2025-03-28T09:29:49.176781Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:29:49.177299Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000d4b/r3tmp/tmplifjKb/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:29:49.177887Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000d4b/r3tmp/tmplifjKb/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000d4b/r3tmp/tmplifjKb/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6455342015582047422 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:29:49.238665Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.238998Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.260906Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-28T09:29:49.261039Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:464:2341] 2025-03-28T09:29:49.261245Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-28T09:29:49.261330Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-03-28T09:29:49.261458Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.261498Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.261535Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.261556Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.261729Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.279718Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.279935Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.280049Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.280319Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.280556Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.280777Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.280853Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.280963Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.281162Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.281189Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.281248Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.281700Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-28T09:29:49.281810Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.282223Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.282773Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.282905Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:29:49.283075Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.283315Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.283601Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.283733Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.286572Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.286638Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.286705Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.286747Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.286789Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:462:2340])) 2025-03-28T09:29:49.290961Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.291228Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:462:2340]) (release resources {0, 100}) 2025-03-28T09:29:49.291279Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:462:2340])) 2025-03-28T09:29:49.291331Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2025-03-28T09:29:49.188821Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:29:49.189320Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000e20/r3tmp/tmpYk4IpE/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:29:49.190007Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000e20/r3tmp/tmpYk4IpE/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000e20/r3tmp/tmpYk4IpE/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17143767140104234359 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:29:49.242276Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.242549Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.260771Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-28T09:29:49.260878Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:464:2341] 2025-03-28T09:29:49.261066Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-28T09:29:49.261145Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-03-28T09:29:49.261266Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.261306Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.261339Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.261359Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.261523Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.276893Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.277103Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.277203Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.277460Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.277692Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.277933Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.278007Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.278115Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.278340Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.278366Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.278437Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.278919Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-28T09:29:49.279062Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.279477Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.279925Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.280055Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:29:49.280150Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.280362Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.280541Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.280615Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.283661Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.283723Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.283773Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.283808Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.283851Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:462:2340])) 2025-03-28T09:29:49.288075Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.288467Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.288524Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.288572Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.288605Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-2 (2 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.288637Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:462:2340])) 2025-03-28T09:29:49.288669Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.288787Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-3-3 (3 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.288811Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-3-3 (3 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.288852Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.288875Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-3-3 (3 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.288899Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:462:2340])) 2025-03-28T09:29:49.288924Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.289032Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-4-4 (4 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.289086Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-4-4 (4 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.289144Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.289186Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-4-4 (4 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.289225Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:462:2340])) 2025-03-28T09:29:49.289261Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.289364Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-5-5 (5 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.289391Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-5-5 (5 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.289429Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.289450Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-5-5 (5 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.289471Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:462:2340])) 2025-03-28T09:29:49.289496Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.289588Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-6-6 (6 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.289613Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-6-6 (6 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.289636Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.289661Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-6-6 (6 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.289683Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:462:2340])) 2025-03-28T09:29:49.289703Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.289783Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-7-7 (7 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.289808Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-7-7 (7 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.289855Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.289896Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-7-7 (7 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.289918Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:462:2340])) 2025-03-28T09:29:49.289942Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.290029Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-8-8 (8 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.290053Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-8-8 (8 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.290078Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.290111Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-8-8 (8 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.290140Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:462:2340])) 2025-03-28T09:29:49.290162Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.290246Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-9-9 (9 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.290270Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-9-9 (9 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.290292Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.290311Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-9-9 (9 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.290335Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:462:2340])) 2025-03-28T09:29:49.290354Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.290473Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:462:2340]) (release resources {0, 100}) 2025-03-28T09:29:49.290522Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:462:2340])) 2025-03-28T09:29:49.290562Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2025-03-28T09:29:49.182304Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:29:49.182934Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000df5/r3tmp/tmpJqTO22/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:29:49.183434Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000df5/r3tmp/tmpJqTO22/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000df5/r3tmp/tmpJqTO22/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10802707030517956099 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:29:49.236952Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.237264Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.268784Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-28T09:29:49.268906Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:464:2341] 2025-03-28T09:29:49.269399Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-28T09:29:49.269509Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-03-28T09:29:49.269673Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.269719Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.269755Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.269798Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.269956Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.292644Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.292868Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.292963Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.293220Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.293455Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.293672Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.293746Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.293850Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.294026Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.294049Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.294116Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.294665Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-28T09:29:49.294808Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.295262Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.295699Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.295822Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:29:49.295936Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.296171Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.296387Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.296454Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2025-03-28T09:29:49.174104Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:29:49.174998Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000d93/r3tmp/tmpk6Ijyh/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:29:49.175588Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000d93/r3tmp/tmpk6Ijyh/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000d93/r3tmp/tmpk6Ijyh/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14154706554864962330 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:29:49.236775Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.237214Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.258461Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-28T09:29:49.258569Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:464:2341] 2025-03-28T09:29:49.258775Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-28T09:29:49.258854Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-03-28T09:29:49.259001Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.259041Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.259067Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.259084Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.260172Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.270423Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.270705Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.270803Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.271125Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.271349Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.271558Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.271645Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.271739Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.271900Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.271922Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.272014Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.273846Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-28T09:29:49.273984Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.274399Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.274953Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.275088Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:29:49.275176Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.275390Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.275568Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.275628Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2025-03-28T09:29:49.271145Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:29:49.271670Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000cc5/r3tmp/tmpRd73ho/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:29:49.272318Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000cc5/r3tmp/tmpRd73ho/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000cc5/r3tmp/tmpRd73ho/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8965588862859333869 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:29:49.314356Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.314663Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.334564Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-28T09:29:49.334717Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:464:2341] 2025-03-28T09:29:49.334923Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-28T09:29:49.335016Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-03-28T09:29:49.335166Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.335215Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.335247Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.335264Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.335388Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.353917Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.354131Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.354225Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.354507Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.354872Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.355080Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.355153Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.355257Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.355419Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.355448Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.355505Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.355973Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-28T09:29:49.356124Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.356555Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.357029Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.357143Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:29:49.357245Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.357470Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.357661Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.357746Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:50.468903Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:29:50.468982Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:29:50.469514Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:50.737757Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> KqpMergeCn::SortBy_PK_Uint64_Desc [GOOD] >> KqpRanges::DeleteNotFullScan+UseSink [GOOD] >> KqpSort::ComplexPkExclusiveSecondOptionalPredicate [GOOD] >> TPersQueueTest::TestWriteSessionsConflicts [GOOD] >> KqpReturning::ReturningWorksIndexedDeleteV2+QueryService [GOOD] >> KqpRanges::NoFullScanAtScanQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2025-03-28T09:29:49.201887Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:29:49.202409Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000dda/r3tmp/tmptIXGSJ/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:29:49.203119Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000dda/r3tmp/tmptIXGSJ/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000dda/r3tmp/tmptIXGSJ/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 5565541649493028426 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:29:49.247944Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.248261Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.263424Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-28T09:29:49.263543Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:464:2341] 2025-03-28T09:29:49.263744Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-28T09:29:49.263841Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-03-28T09:29:49.264001Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.264060Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.264096Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.264119Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.264257Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.274653Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.274923Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.275022Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.275276Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.275501Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.275698Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.275785Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.275889Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.276120Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.276147Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.276215Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.276678Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-28T09:29:49.276781Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.277159Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.277570Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.277693Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:29:49.277817Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.278013Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.278195Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.278275Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.281474Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.281541Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.281588Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.281623Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.281664Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:462:2340])) 2025-03-28T09:29:49.288100Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.288241Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.288283Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.288340Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.288376Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.288417Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:462:2340])) 2025-03-28T09:29:49.288500Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.288739Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.288886Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-03-28T09:29:49.289176Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:50.417679Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:29:50.417823Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-2-1 (1 by [1:462:2340]) (release resources {0, 100}) 2025-03-28T09:29:50.417902Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300200 (remove task kqp-1-2-1 (1 by [1:462:2340])) 2025-03-28T09:29:50.417944Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100400 2025-03-28T09:29:50.418009Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-28T09:29:50.418217Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:462:2340]) (release resources {0, 100}) 2025-03-28T09:29:50.418280Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300200 to 0.100400 (remove task kqp-2-1-2 (2 by [1:462:2340])) 2025-03-28T09:29:50.418393Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-28T09:29:50.418605Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:50.418787Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154190 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:50.419364Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:50.696596Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request >> KqpRm::SnapshotSharingByExchanger [GOOD] >> KqpSqlIn::TupleLiteral [GOOD] >> KqpNewEngine::DqSource [GOOD] >> KqpNewEngine::PruneEffectPartitions-UseSink [GOOD] >> KqpSort::OffsetTopSort [GOOD] >> KqpMergeCn::SortBy_Int32 >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] >> KqpNewEngine::OrderedScalarContext [GOOD] >> KqpRanges::CastKeyBounds >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate >> TPersQueueTest::TestReadRuleServiceTypePassword >> KqpReturning::ReturningTypes >> KqpRanges::NoFullScanAtDNFPredicate >> KqpSqlIn::TupleSelect >> KqpNewEngine::PagingNoPredicateExtract >> KqpNewEngine::DqSourceLimit >> KqpNewEngine::ReadDifferentColumnsPk [GOOD] >> KqpNewEngine::JoinMultiConsumer [GOOD] >> KqpSqlIn::SecondaryIndex_SimpleKey_In_And_In [GOOD] >> KqpSort::TopParameter [GOOD] >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] >> KqpNewEngine::DqSourceLocksEffects [GOOD] >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In >> KqpNewEngine::JoinSameKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNotNullColumns::AlterAddIndex [GOOD] Test command err: Trying to start YDB, gRPC: 27243, MsgBus: 11441 2025-03-28T09:29:08.395580Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790061416120951:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:08.395615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007fd/r3tmp/tmpSc5MP6/pdisk_1.dat 2025-03-28T09:29:08.843590Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:08.847747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:08.847865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:08.850554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27243, node 1 2025-03-28T09:29:08.922794Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:08.922819Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:08.922827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:08.922968Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11441 TClient is connected to server localhost:11441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:09.529240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:09.554821Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:09.580583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:09.735639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:09.884740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:29:09.972641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:11.676512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790074301024632:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:11.676624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:11.999255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:12.033368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:12.062897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:12.098227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:12.136852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:12.171286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:12.233896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790078595992444:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:12.234009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:12.234339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790078595992449:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:12.237733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:12.250104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790078595992451:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:12.324191Z node 1 :TX_PROXY ERROR: Actor# [1:7486790078595992504:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:13.401963Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790061416120951:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:13.402018Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25747, MsgBus: 62565 2025-03-28T09:29:14.511709Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790083502066701:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:14.511757Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007fd/r3tmp/tmpddIRDO/pdisk_1.dat 2025-03-28T09:29:14.718404Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:14.730399Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:14.730485Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:14.731812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25747, node 2 2025-03-28T09:29:14.791181Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:14.791206Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:14.791213Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:14.791316Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62565 TClient is connected to server localhost:62565 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:15.279760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:15.285546Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:15.294527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:15.393060Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:15.555562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:39.195599Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:39.240483Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:39.318789Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:39.359089Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:39.431449Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:39.527227Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790193957766470:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:39.527447Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:39.528849Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790193957766476:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:39.533332Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:39.550530Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790193957766479:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:39.636897Z node 5 :TX_PROXY ERROR: Actor# [5:7486790193957766533:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:41.065548Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 17750, MsgBus: 63058 2025-03-28T09:29:42.454208Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790207749691477:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:42.454284Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007fd/r3tmp/tmp3sYFLE/pdisk_1.dat 2025-03-28T09:29:42.592166Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:42.631893Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:42.632030Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:42.634161Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17750, node 6 2025-03-28T09:29:42.698244Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:42.698272Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:42.698288Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:42.698467Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63058 TClient is connected to server localhost:63058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:43.246796Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:43.260747Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:43.380807Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:43.601643Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:43.681790Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:46.435894Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790224929562417:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:46.436052Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:46.513152Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:46.548911Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:46.580992Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:46.620050Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:46.659272Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:46.700597Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:46.753716Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790224929562928:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:46.753813Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:46.753884Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790224929562933:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:46.759269Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:46.771253Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790224929562935:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:46.843927Z node 6 :TX_PROXY ERROR: Actor# [6:7486790224929562988:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:47.454678Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790207749691477:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:47.454787Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:48.102253Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:48.194434Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:29:48.256835Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2025-03-28T09:29:49.183359Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:29:49.183889Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000e25/r3tmp/tmpvgwmhz/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:29:49.184469Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000e25/r3tmp/tmpvgwmhz/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000e25/r3tmp/tmpvgwmhz/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3418369074042044946 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:29:49.240150Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.240487Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.268459Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-28T09:29:49.268587Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:464:2341] 2025-03-28T09:29:49.268790Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-28T09:29:49.268923Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-03-28T09:29:49.269060Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.269103Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.269137Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.269178Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.269316Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.280583Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.280777Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.280890Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.281171Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.281391Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.281607Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.281675Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.281785Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.281956Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.281979Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.282038Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.282450Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-28T09:29:49.282552Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.283020Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.283486Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.283605Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:29:49.283694Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.283908Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.284108Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.284199Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:50.401710Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:29:50.401828Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:29:50.416441Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-03-28T09:29:50.416598Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-03-28T09:29:50.417090Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-03-28T09:29:50.417835Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:83:2074] ServerId# [1:353:2271] TabletId# 72057594037932033 PipeClientId# [2:83:2074] 2025-03-28T09:29:50.418240Z node 2 :TX_PROXY WARN: actor# [2:145:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-03-28T09:29:50.418388Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:29:50.418570Z node 2 :KQP_RESOURCE_MANAGER INFO: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:50.418862Z node 2 :KQP_RESOURCE_MANAGER INFO: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:469:2103], reason: tenant updated 2025-03-28T09:29:50.419245Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:50.422132Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:50.422291Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:50.759676Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request |90.1%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2025-03-28T09:29:49.182050Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:29:49.182413Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000e42/r3tmp/tmpNe3T0u/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:29:49.182937Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000e42/r3tmp/tmpNe3T0u/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000e42/r3tmp/tmpNe3T0u/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12444879158911494377 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:29:49.254816Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.255112Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.279365Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-28T09:29:49.279478Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:464:2341] 2025-03-28T09:29:49.279720Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-28T09:29:49.279820Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-03-28T09:29:49.280043Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.280085Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.280122Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.280143Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.280264Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.297810Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.297999Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.298087Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.298363Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.298577Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.299064Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.299148Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.299252Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.299418Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.299441Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.299502Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.299940Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-28T09:29:49.300057Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.300565Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.300999Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.301111Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:29:49.301199Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.301404Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.301573Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.301630Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.304521Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:49.304586Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.304631Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:49.304664Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.304705Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:462:2340])) 2025-03-28T09:29:49.304904Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:49.305115Z node 1 :RESOURCE_BROKER DEBUG: Update task kqp-1-1-1 (1 by [1:462:2340]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2025-03-28T09:29:49.305156Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:49.305191Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:462:2340])) 2025-03-28T09:29:49.305258Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2025-03-28T09:29:49.195843Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:29:49.196447Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000c56/r3tmp/tmpXFvFw2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:29:49.197132Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000c56/r3tmp/tmpXFvFw2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000c56/r3tmp/tmpXFvFw2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 301819175357120834 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:29:49.235920Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.236226Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:29:49.258368Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-28T09:29:49.258475Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:464:2341] 2025-03-28T09:29:49.258671Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-28T09:29:49.261709Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-03-28T09:29:49.261911Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.261951Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.261983Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:29:49.262002Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:29:49.262116Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.275741Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.275921Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.276010Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.276282Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.276540Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:29:49.276695Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.276762Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.276856Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.277013Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:29:49.277036Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:49.277097Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154189 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:49.277550Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-28T09:29:49.277637Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.277997Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.278432Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:29:49.278541Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:29:49.278629Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:49.279433Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.279636Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:49.279718Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:50.419251Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:29:50.419331Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:29:50.419474Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:50.419526Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:50.419573Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:50.419610Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:50.419650Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:462:2340])) 2025-03-28T09:29:50.419896Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:50.419986Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-1-2 (2 by [1:462:2340]) priority=0 resources={0, 100} 2025-03-28T09:29:50.420054Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-1-2 (2 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:50.420101Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:29:50.420139Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-1-2 (2 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:29:50.420193Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:462:2340])) 2025-03-28T09:29:50.420269Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:50.420328Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:50.420459Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154190 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-03-28T09:29:50.420821Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:50.709041Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:29:50.709234Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-1-1 (1 by [2:463:2100]) priority=0 resources={0, 100} 2025-03-28T09:29:50.709304Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-1-1 (1 by [2:463:2100]) to queue queue_kqp_resource_manager 2025-03-28T09:29:50.709350Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:463:2100]) from queue queue_kqp_resource_manager 2025-03-28T09:29:50.709397Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-1-1 (1 by [2:463:2100]) to queue queue_kqp_resource_manager 2025-03-28T09:29:50.709459Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:463:2100])) 2025-03-28T09:29:50.709579Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:50.709660Z node 2 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-2-2-2 (2 by [2:463:2100]) priority=0 resources={0, 100} 2025-03-28T09:29:50.709696Z node 2 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-2-2-2 (2 by [2:463:2100]) to queue queue_kqp_resource_manager 2025-03-28T09:29:50.709733Z node 2 :RESOURCE_BROKER DEBUG: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:463:2100]) from queue queue_kqp_resource_manager 2025-03-28T09:29:50.709767Z node 2 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-2-2-2 (2 by [2:463:2100]) to queue queue_kqp_resource_manager 2025-03-28T09:29:50.709802Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:463:2100])) 2025-03-28T09:29:50.709870Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2025-03-28T09:29:50.709965Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:50.710114Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154191 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2025-03-28T09:29:50.710448Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:50.994125Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:29:50.994268Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [1:462:2340]) (release resources {0, 100}) 2025-03-28T09:29:50.994327Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350100 (remove task kqp-1-1-1 (1 by [1:462:2340])) 2025-03-28T09:29:50.994363Z node 1 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200200 2025-03-28T09:29:50.994407Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-28T09:29:50.994454Z node 1 :RESOURCE_BROKER DEBUG: Finish task kqp-2-1-2 (2 by [1:462:2340]) (release resources {0, 100}) 2025-03-28T09:29:50.994489Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350100 to 0.200200 (remove task kqp-2-1-2 (2 by [1:462:2340])) 2025-03-28T09:29:50.994522Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-28T09:29:50.994577Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:50.994721Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154192 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:50.995165Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:29:51.271885Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:29:51.272003Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-1-1-1 (1 by [2:463:2100]) (release resources {0, 100}) 2025-03-28T09:29:51.272080Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:463:2100])) 2025-03-28T09:29:51.272119Z node 2 :RESOURCE_BROKER DEBUG: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2025-03-28T09:29:51.272165Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-28T09:29:51.272205Z node 2 :RESOURCE_BROKER DEBUG: Finish task kqp-2-2-2 (2 by [2:463:2100]) (release resources {0, 100}) 2025-03-28T09:29:51.272240Z node 2 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:463:2100])) 2025-03-28T09:29:51.272272Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2025-03-28T09:29:51.272391Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:29:51.272535Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154193 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:29:51.272833Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:29:54.113907Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::OffsetTopSort [GOOD] Test command err: Trying to start YDB, gRPC: 10909, MsgBus: 65413 2025-03-28T09:29:13.102134Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790080541981868:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:13.102623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000732/r3tmp/tmp7HjWil/pdisk_1.dat 2025-03-28T09:29:13.526476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:13.526563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:13.530602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:13.531081Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10909, node 1 2025-03-28T09:29:13.638821Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:13.638841Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:13.638848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:13.638949Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65413 TClient is connected to server localhost:65413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:14.293444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:14.323323Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:14.338156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:14.501343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:14.684117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:14.765850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:16.479704Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790093426885518:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:16.479795Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:16.882593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:16.922136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.011046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.045716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.093402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.168650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.239053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790097721853336:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:17.239135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:17.239347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790097721853341:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:17.243426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:17.264061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790097721853343:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:17.327659Z node 1 :TX_PROXY ERROR: Actor# [1:7486790097721853396:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:18.102526Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790080541981868:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:18.102584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18163, MsgBus: 15176 2025-03-28T09:29:19.622530Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790105496892988:2219];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000732/r3tmp/tmpChSiC0/pdisk_1.dat 2025-03-28T09:29:19.682124Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:29:19.753716Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:19.767663Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:19.767740Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:19.772643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18163, node 2 2025-03-28T09:29:19.931352Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:19.931383Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:19.931392Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:19.931520Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15176 TClient is connected to server localhost:15176 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:29:20.414949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:29:20.451520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:20.508409Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:20.661572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:20.744892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCre ...
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.919990Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:45.017113Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.061119Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.096588Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.141683Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.184110Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.225058Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.288168Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790218004770582:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:45.288278Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:45.288619Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790218004770587:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:45.294049Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:45.306035Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790218004770589:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:45.390005Z node 5 :TX_PROXY ERROR: Actor# [5:7486790218004770642:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:45.497039Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790196529931944:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:45.497122Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16370, MsgBus: 23729 2025-03-28T09:29:48.082057Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790229684814902:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:48.082106Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000732/r3tmp/tmpoh9VPq/pdisk_1.dat 2025-03-28T09:29:48.249501Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:48.263571Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:48.263667Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:48.265294Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16370, node 6 2025-03-28T09:29:48.323235Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:48.323260Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:48.323270Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:48.323410Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23729 TClient is connected to server localhost:23729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:48.970273Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:48.984636Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:49.001184Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.097106Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:49.290841Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.375616Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.929700Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790242569718541:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.929833Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.962556Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.992334Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.023392Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.055918Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.088268Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.125403Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.170083Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790246864686347:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.170166Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.170248Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790246864686352:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.173981Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:52.184220Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790246864686354:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:52.282362Z node 6 :TX_PROXY ERROR: Actor# [6:7486790246864686408:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:53.082858Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790229684814902:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:53.082936Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PruneEffectPartitions-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27784, MsgBus: 12870 2025-03-28T09:29:05.615273Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790047732042294:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:05.615312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00083d/r3tmp/tmp73B0ja/pdisk_1.dat 2025-03-28T09:29:06.200835Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:06.207847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:06.208010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:06.209978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27784, node 1 2025-03-28T09:29:06.355315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:06.355351Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:06.355362Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:06.355533Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12870 TClient is connected to server localhost:12870 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:07.031488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:07.063314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:07.212176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:07.447208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:07.529648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:09.340288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790064911913254:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:09.340435Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:09.744853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:09.832362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:09.876342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:09.914108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.008791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.059054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.110806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790069206881066:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:10.110878Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:10.114580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790069206881071:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:10.118989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:10.135225Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:29:10.135784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790069206881073:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:10.204966Z node 1 :TX_PROXY ERROR: Actor# [1:7486790069206881126:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:10.618083Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790047732042294:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:10.618158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1404, MsgBus: 32396 2025-03-28T09:29:12.347616Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790075557080387:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:12.347699Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00083d/r3tmp/tmp8q6wiJ/pdisk_1.dat 2025-03-28T09:29:12.505753Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:12.521715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:12.521797Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:12.523791Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1404, node 2 2025-03-28T09:29:12.578916Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:12.578939Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:12.578947Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:12.579055Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32396 TClient is connected to server localhost:32396 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:12.993197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:13.002121Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:13.014957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:13.092984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:13.252639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting.. ...
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:43.729715Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:43.794186Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:43.836923Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:43.900744Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:43.947715Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:43.996001Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.050612Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.116181Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790214397081207:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.116307Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.116626Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790214397081212:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.121911Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:44.137080Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790214397081214:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:44.195155Z node 6 :TX_PROXY ERROR: Actor# [6:7486790214397081267:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:44.470797Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790192922242438:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:44.470868Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28970, MsgBus: 22538 2025-03-28T09:29:46.975996Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790220988458426:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:46.976104Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00083d/r3tmp/tmpeRVwty/pdisk_1.dat 2025-03-28T09:29:47.109300Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:47.142782Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:47.142890Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:47.144396Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28970, node 7 2025-03-28T09:29:47.197556Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:47.197581Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:47.197593Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:47.197744Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22538 TClient is connected to server localhost:22538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:47.850456Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:47.860210Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:47.871012Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:47.993020Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:48.216306Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:48.373914Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:51.662316Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790242463296682:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.662450Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.715047Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.785183Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.822371Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.855377Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.889636Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.927698Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.974848Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790242463297195:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.974984Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.975082Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790242463297200:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.976374Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790220988458426:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:51.976457Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:51.979739Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:52.000856Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790242463297202:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:52.059036Z node 7 :TX_PROXY ERROR: Actor# [7:7486790246758264555:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::AutoChooseIndexOrderByLambda [GOOD] Test command err: Trying to start YDB, gRPC: 21039, MsgBus: 16490 2025-03-28T09:28:52.005569Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789988316442323:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:52.006133Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a31/r3tmp/tmp6UFdf4/pdisk_1.dat 2025-03-28T09:28:52.601597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:52.601684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:52.608211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:52.640105Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21039, node 1 2025-03-28T09:28:52.743320Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:52.743341Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:52.743355Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:52.743478Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16490 TClient is connected to server localhost:16490 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:53.313292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:53.331674Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:28:53.353967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:53.523626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:53.726638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:53.822626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:55.648466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790005496313292:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:55.648552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:55.985816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:56.063986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:56.116625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:56.150805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:56.188431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:56.234492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:56.341049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790009791281109:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:56.341137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:56.341381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790009791281114:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:56.345962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:56.366207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790009791281116:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:56.424741Z node 1 :TX_PROXY ERROR: Actor# [1:7486790009791281171:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:57.008164Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789988316442323:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:57.008230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:57.464405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:28:58.309688Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154138341, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 28551, MsgBus: 14380 2025-03-28T09:28:59.249641Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790020313565834:2210];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a31/r3tmp/tmphtg6zY/pdisk_1.dat 2025-03-28T09:28:59.291746Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:28:59.390616Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:28:59.417934Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:59.418011Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:59.424767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28551, node 2 2025-03-28T09:28:59.547257Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:59.547282Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:59.547289Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:59.547394Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14380 TClient is connected to server localhost:14380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:00.140327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:00.159260Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:00.178479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:00.296505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Opera ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:35.706609Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:35.786662Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:35.866619Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:35.957155Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790152937977954:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:35.957331Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:35.970341Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:36.082336Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:36.162089Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790178707783995:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:36.162223Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:36.162473Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790178707784000:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:36.167544Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:36.180467Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790178707784002:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:36.280494Z node 6 :TX_PROXY ERROR: Actor# [6:7486790178707784057:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:37.602873Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22589, MsgBus: 15697 2025-03-28T09:29:40.066495Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790197176423951:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:40.066551Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a31/r3tmp/tmpREal9f/pdisk_1.dat 2025-03-28T09:29:40.303849Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:40.335739Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:40.335858Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:40.337308Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22589, node 7 2025-03-28T09:29:40.435349Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:40.435381Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:40.435393Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:40.435577Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15697 TClient is connected to server localhost:15697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:41.323829Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:41.331753Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:41.352156Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:41.436809Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:41.679578Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:41.787567Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:44.864032Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790214356294901:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.864139Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.929757Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.982170Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.039496Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.069128Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790197176423951:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:45.069189Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:45.086836Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.137392Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.225605Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.308193Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790218651262720:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:45.308284Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:45.308638Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790218651262725:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:45.314612Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:45.329977Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790218651262727:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:45.410059Z node 7 :TX_PROXY ERROR: Actor# [7:7486790218651262783:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:46.784270Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::StreamLookupForDataQuery-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 14721, MsgBus: 8876 2025-03-28T09:29:10.532840Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790067295305605:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:10.533580Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00074e/r3tmp/tmpCLuGD3/pdisk_1.dat 2025-03-28T09:29:11.041469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:11.041561Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:11.044833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:11.054891Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14721, node 1 2025-03-28T09:29:11.143214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:11.143246Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:11.143268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:11.143379Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8876 TClient is connected to server localhost:8876 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:11.641334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:11.659927Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:13.717266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790080180208142:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.717368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.000191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.142948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:29:14.219428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.255984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715762:2, at schemeshard: 72057594046644480 2025-03-28T09:29:14.304469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715763:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.333452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.371024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715766:2, at schemeshard: 72057594046644480 2025-03-28T09:29:14.409537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.444540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715770:2, at schemeshard: 72057594046644480 2025-03-28T09:29:14.516262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715771:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.548399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715774:2, at schemeshard: 72057594046644480 2025-03-28T09:29:14.592371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715775:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.616163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.693462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715778:2, at schemeshard: 72057594046644480 2025-03-28T09:29:14.728838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715779:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.779034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715782:2, at schemeshard: 72057594046644480 2025-03-28T09:29:14.822186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715783:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.867575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790084475176789:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.867636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:15.077190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790088770144383:2448], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:15.077294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:15.077604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790088770144389:2451], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:15.081166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-03-28T09:29:15.094861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790088770144391:2452], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-03-28T09:29:15.160453Z node 1 :TX_PROXY ERROR: Actor# [1:7486790088770144447:3521] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 23], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:15.533412Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790067295305605:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:15.533464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] Trying to start YDB, gRPC: 19564, MsgBus: 3864 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00074e/r3tmp/tmpaAsL0L/pdisk_1.dat 2025-03-28T09:29:19.658885Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:29:19.672484Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:19.687278Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:19.687355Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:19.688765Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19564, node 2 2025-03-28T09:29:19.763217Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:19.763238Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:19.763250Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:19.763364Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3864 TClient is connected to server localhost:3864 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS Sc ... {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:43.835650Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:43.900980Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:43.939865Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.017516Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.053239Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.102381Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.149409Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.208243Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790215907853485:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.208373Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.208696Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790215907853490:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.213521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:44.231170Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790215907853492:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:44.301525Z node 5 :TX_PROXY ERROR: Actor# [5:7486790215907853546:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:44.964256Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790194433014781:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:44.964322Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16228, MsgBus: 7871 2025-03-28T09:29:47.941699Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790225787871115:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:47.941784Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00074e/r3tmp/tmpVToSBv/pdisk_1.dat 2025-03-28T09:29:48.161697Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:48.197827Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:48.197924Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:48.199983Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16228, node 6 2025-03-28T09:29:48.268541Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:48.268569Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:48.268578Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:48.268727Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7871 TClient is connected to server localhost:7871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:49.073948Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.108583Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:49.124003Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.222222Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.421914Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.618364Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:52.458827Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790247262709375:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.458932Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.508039Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.546965Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.584597Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.621046Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.692476Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.729775Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.806740Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790247262709891:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.806840Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.807032Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790247262709896:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.811820Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:52.822299Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790247262709898:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:52.908334Z node 6 :TX_PROXY ERROR: Actor# [6:7486790247262709953:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:52.942075Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790225787871115:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:52.942156Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::InflightLimit [GOOD] Test command err: 2025-03-28T09:25:18.565344Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789070996302623:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.565425Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.631750Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789072136617355:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.631823Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.755790Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:18.770193Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00120a/r3tmp/tmpbM9aKR/pdisk_1.dat 2025-03-28T09:25:18.999104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:18.999187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.001880Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:19.002985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:19.007833Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:19.009218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.009268Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.027583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13567, node 1 2025-03-28T09:25:19.161980Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00120a/r3tmp/yandexxGYJ8S.tmp 2025-03-28T09:25:19.162005Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00120a/r3tmp/yandexxGYJ8S.tmp 2025-03-28T09:25:19.162204Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00120a/r3tmp/yandexxGYJ8S.tmp 2025-03-28T09:25:19.162319Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:19.314714Z INFO: TTestServer started on Port 61769 GrpcPort 13567 TClient is connected to server localhost:61769 PQClient connected to localhost:13567 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:19.643986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:19.686945Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:19.720351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:25:21.926815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789083881205665:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:21.926914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789083881205673:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:21.926965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:21.930343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:25:21.959242Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-03-28T09:25:21.960364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789083881205679:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:25:22.214447Z node 1 :TX_PROXY ERROR: Actor# [1:7486789088176173064:2796] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:22.239996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.241864Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789088176173077:2353], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.242066Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjNlYzY3YS04ZDgxZDJiNS00ODJmMGM1ZS03YjdiODY0Mg==, ActorId: [1:7486789083881205662:2341], ActorState: ExecuteState, TraceId: 01jqe13zvv6dyma6ky2mnxmfxb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.238717Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789089316486877:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.240363Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Nzc1ZTMxZjctODM4NTk3ZC05MzZmMGZlMi1kMTI1YWIyNA==, ActorId: [2:7486789089316486837:2308], ActorState: ExecuteState, TraceId: 01jqe140103c6pe6n79m3w39ss, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.242403Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.242286Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.323711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.405572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:25:22.628569Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe140dcfr6mrv42sxxvhwp0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM3ZDBmMjctY2FiZGVkNDUtZTA1NGQwY2UtZjMyZGQ2Ng==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486789088176173434:3074] 2025-03-28T09:25:23.564908Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789070996302623:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:23.564969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:23.631422Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486789072136617355:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:23.631477Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 10 partitions CallPersQueueGRPC request to localhost:13567 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } 2025-03-28T09:25:29.024582Z node 1 :PQ_METACACHE DEBUG: HandleDescribeAllTopics 2025-03-28T09:25:29.024617Z node 1 :PQ_METACACHE DEBUG: ProcessDescribeAllTopics 2025-03-28T09:25:29.024626Z node 1 :PQ_METACACHE DEBUG: Describ ... session _27_3_5404828289331218279_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-03-28T09:29:34.923696Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2025-03-28T09:29:34.923781Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1743154171005, sizeLag# 82536 2025-03-28T09:29:34.923799Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1TEvPartitionReady. Aval parts: 1 2025-03-28T09:29:34.924291Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 grpc read done: success# 1, data# { read_request { bytes_size: 1048576 } } 2025-03-28T09:29:34.924453Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 got read request: guid# ebb270ca-5cf18f09-ce9c0128-e27db7bd 2025-03-28T09:29:34.924502Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 performing read request: guid# ac0311ca-7ede7c92-41ee6c0d-8d9585f3, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 99043, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-28T09:29:34.924597Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 99043 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid ac0311ca-7ede7c92-41ee6c0d-8d9585f3 2025-03-28T09:29:34.925121Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-28T09:29:34.925185Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-03-28T09:29:38.707905Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-03-28T09:29:38.707990Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-03-28T09:29:38.708149Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-28T09:29:38.708198Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:29:38.708384Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:29:38.709010Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_17831374738897474292_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1743154171005 CreateTimestampMS: 1743154171001 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1743154171019 CreateTimestampMS: 1743154171019 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1743154171075 CreateTimestampMS: 1743154171066 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1743154171088 CreateTimestampMS: 1743154171086 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 3793 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-03-28T09:29:38.709348Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_17831374738897474292_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-03-28T09:29:38.709410Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_17831374738897474292_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid cb41e275-d6d9db27-721d4f63-3e694557 has messages 1 2025-03-28T09:29:38.709536Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_17831374738897474292_v1 read done: guid# cb41e275-d6d9db27-721d4f63-3e694557, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82607 2025-03-28T09:29:38.709564Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_17831374738897474292_v1 response to read: guid# cb41e275-d6d9db27-721d4f63-3e694557 2025-03-28T09:29:38.709904Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_17831374738897474292_v1 Process answer. Aval parts: 0 Bytes readed: 82607 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-03-28T09:29:38.718957Z node 27 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session _27_2_17831374738897474292_v1 grpc read done: success# 0, data# { } 2025-03-28T09:29:38.718995Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_17831374738897474292_v1 grpc read failed 2025-03-28T09:29:38.719021Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_17831374738897474292_v1 grpc closed 2025-03-28T09:29:38.719050Z node 27 :PQ_READ_PROXY INFO: session cookie 2 consumer session _27_2_17831374738897474292_v1 is DEAD 2025-03-28T09:29:38.720308Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_2_17831374738897474292_v1 2025-03-28T09:29:38.720371Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7486790171164086811:2574] destroyed 2025-03-28T09:29:38.720439Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_2_17831374738897474292_v1 2025-03-28T09:29:42.694701Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] TPersQueueReadBalancer::HandleWakeup 2025-03-28T09:29:42.694805Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186224037892 Cookie: 1 2025-03-28T09:29:42.697390Z node 26 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ LifetimeSeconds: 86400 LowWatermark: 1048584 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10240 BurstSize: 10240 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } 2025-03-28T09:29:42.698469Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 1 DataSize: 0 UsedReserveSize: 0 2025-03-28T09:29:42.699224Z node 25 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic1] ProcessPendingStats. PendingUpdates size 1 2025-03-28T09:29:42.713858Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 Topic 'rt3.dc1--topic1' partition 0 user $without_consumer offset 0 count 4 size 99043 endOffset 4 max time lag 0ms effective offset 0 2025-03-28T09:29:42.713932Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 3 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-03-28T09:29:42.714138Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 3. All data is from uncompacted head. 2025-03-28T09:29:42.714179Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:29:42.714405Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 0 Bytes readed: 82607 Offset: 0 from session 1 Offset: 1 from session 1 Offset: 2 from session 1 Offset: 3 from session 1 2025-03-28T09:29:42.715104Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 2 WriteTimestampMS: 1743154171005 CreateTimestampMS: 1743154171001 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 3 WriteTimestampMS: 1743154171019 CreateTimestampMS: 1743154171019 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 4 WriteTimestampMS: 1743154171075 CreateTimestampMS: 1743154171066 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 20570 bytes ..." SourceId: "\000source" SeqNo: 5 WriteTimestampMS: 1743154171088 CreateTimestampMS: 1743154171086 UncompressedSize: 20480 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 88 RealReadOffset: 3 WaitQuotaTimeMs: 7790 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2025-03-28T09:29:42.715587Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset4 2025-03-28T09:29:42.715661Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid ac0311ca-7ede7c92-41ee6c0d-8d9585f3 has messages 1 2025-03-28T09:29:42.715804Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 read done: guid# ac0311ca-7ede7c92-41ee6c0d-8d9585f3, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 82607 2025-03-28T09:29:42.715837Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 response to read: guid# ac0311ca-7ede7c92-41ee6c0d-8d9585f3 2025-03-28T09:29:42.716204Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 Process answer. Aval parts: 0 2025-03-28T09:29:42.723553Z node 27 :PQ_READ_PROXY DEBUG: session cookie 3 consumer session _27_3_5404828289331218279_v1 grpc read done: success# 0, data# { } 2025-03-28T09:29:42.723583Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_5404828289331218279_v1 grpc read failed 2025-03-28T09:29:42.723617Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_5404828289331218279_v1 closed 2025-03-28T09:29:42.724178Z node 27 :PQ_READ_PROXY INFO: session cookie 3 consumer session _27_3_5404828289331218279_v1 is DEAD 2025-03-28T09:29:42.724507Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session _27_3_5404828289331218279_v1 2025-03-28T09:29:42.724570Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [27:7486790171164086814:2575] destroyed 2025-03-28T09:29:42.724636Z node 27 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: _27_3_5404828289331218279_v1 >> KqpReturning::ReturningColumnsOrder [GOOD] >> KqpReturning::Random >> KqpSqlIn::ComplexKey [GOOD] >> KqpSqlIn::Dict >> KqpNewEngine::LookupColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::TopParameter [GOOD] Test command err: Trying to start YDB, gRPC: 17186, MsgBus: 6350 2025-03-28T09:29:15.037323Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790091086485425:2156];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:15.050118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006e2/r3tmp/tmpfBbGOo/pdisk_1.dat 2025-03-28T09:29:15.457561Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17186, node 1 2025-03-28T09:29:15.487078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:15.487197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:15.488867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:15.575336Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:15.575360Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:15.575367Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:15.575490Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6350 TClient is connected to server localhost:6350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:16.214763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:16.242247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:16.441935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:16.709343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:16.808235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:18.452852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790103971389001:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.452986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.786125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.835805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.871698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.917237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.971156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.043166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.105512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790108266356816:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.105586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.106084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790108266356822:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.109177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:19.122260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790108266356824:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:19.217095Z node 1 :TX_PROXY ERROR: Actor# [1:7486790108266356880:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:20.038803Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790091086485425:2156];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:20.038873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6387, MsgBus: 21387 2025-03-28T09:29:21.727353Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790117332272993:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:21.727391Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006e2/r3tmp/tmpKRK3JF/pdisk_1.dat 2025-03-28T09:29:21.941521Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:21.942983Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:21.943059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:21.944402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6387, node 2 2025-03-28T09:29:22.026128Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:22.026156Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:22.026185Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:22.026301Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21387 TClient is connected to server localhost:21387 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:22.522751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:22.535358Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:22.548955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:22.643091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:22.812396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:22.869559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTa ... {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:46.767737Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:46.820217Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:46.858084Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:46.897339Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:46.944275Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:46.987407Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:47.031330Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:47.090054Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790229085850363:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:47.090155Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:47.090540Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790229085850368:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:47.094392Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:47.103939Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:29:47.104516Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790229085850370:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:47.172714Z node 5 :TX_PROXY ERROR: Actor# [5:7486790229085850423:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:47.734782Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790207611011603:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:47.734851Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21614, MsgBus: 8039 2025-03-28T09:29:49.943591Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790236472406939:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:49.944296Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006e2/r3tmp/tmp8lMUXl/pdisk_1.dat 2025-03-28T09:29:50.103900Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:50.149035Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:50.149156Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:50.153110Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21614, node 6 2025-03-28T09:29:50.205586Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:50.205614Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:50.205626Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:50.205789Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8039 TClient is connected to server localhost:8039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:50.801836Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:50.825343Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:50.904940Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:51.143962Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:51.236039Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:53.850814Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790253652277876:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.850922Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.925282Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.964760Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.002378Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.039464Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.075722Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.109965Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.155924Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790257947245685:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:54.156010Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:54.156012Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790257947245690:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:54.159965Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:54.172076Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790257947245692:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:54.240304Z node 6 :TX_PROXY ERROR: Actor# [6:7486790257947245745:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:54.943690Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790236472406939:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:54.953368Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningWorksIndexedOperationsWithDefault-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 11981, MsgBus: 28257 2025-03-28T09:29:00.424793Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790024518975029:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:00.425193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00098d/r3tmp/tmpCvSyDw/pdisk_1.dat 2025-03-28T09:29:01.109647Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:01.116688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:01.116771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:01.119223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11981, node 1 2025-03-28T09:29:01.327194Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:01.327218Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:01.327226Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:01.327322Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28257 TClient is connected to server localhost:28257 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:02.005849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:02.042929Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:02.053092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:02.182334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:02.341954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:02.437016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:04.168372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790041698845840:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.168531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.463468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.497630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.528185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.599524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.630394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.677323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:04.741181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790041698846355:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.741236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.741381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790041698846360:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.744291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:04.753294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790041698846362:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:04.841029Z node 1 :TX_PROXY ERROR: Actor# [1:7486790041698846417:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:05.390233Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790024518975029:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:05.391345Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:05.871899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:05.969582Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.027144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 19616, MsgBus: 17347 2025-03-28T09:29:09.513708Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790064684944678:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:09.513757Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00098d/r3tmp/tmpxL5MUK/pdisk_1.dat 2025-03-28T09:29:09.711114Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:09.721202Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:09.721275Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19616, node 2 2025-03-28T09:29:09.724704Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:09.835257Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:09.835277Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:09.835284Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:09.835378Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17347 TClient is connected to server localhost:17347 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:29:10.256527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.271287Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0 ... t not found or you don't have access permissions } 2025-03-28T09:29:44.350249Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.393732Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.437173Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.483085Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.576177Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.673739Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:44.817118Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790216386408096:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.817275Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.817601Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790216386408101:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:44.822077Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:44.842914Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790216386408103:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:44.903816Z node 5 :TX_PROXY ERROR: Actor# [5:7486790216386408158:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:45.064722Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790199206536616:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:45.064796Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:46.141811Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6004, MsgBus: 15202 2025-03-28T09:29:48.387698Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790232800593009:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:48.387745Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00098d/r3tmp/tmplOIHhM/pdisk_1.dat 2025-03-28T09:29:48.581348Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:48.632106Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:48.632222Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:48.633681Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6004, node 6 2025-03-28T09:29:48.703322Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:48.703351Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:48.703360Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:48.703528Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15202 TClient is connected to server localhost:15202 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:49.416471Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.434573Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.545693Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.756438Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.860793Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:52.511253Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790249980463961:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.511349Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.569610Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.602636Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.633129Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.666238Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.700636Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.789333Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.869877Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790249980464479:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.869972Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.870019Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790249980464484:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:52.873801Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:52.883011Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790249980464486:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:52.947811Z node 6 :TX_PROXY ERROR: Actor# [6:7486790249980464538:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:53.388190Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790232800593009:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:53.388275Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:54.415976Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLocksEffects [GOOD] Test command err: Trying to start YDB, gRPC: 10614, MsgBus: 16915 2025-03-28T09:29:02.703252Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790033333845516:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:02.703309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008aa/r3tmp/tmp1uqZKY/pdisk_1.dat 2025-03-28T09:29:03.231363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:03.231442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:03.235827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:03.249628Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10614, node 1 2025-03-28T09:29:03.391391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:03.391415Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:03.391429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:03.391566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16915 TClient is connected to server localhost:16915 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:04.073922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:04.107027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:04.133449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:04.263585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:04.425116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:04.498381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:06.118140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790050513716482:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:06.118263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:06.430478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.468552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.540568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.587039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.625386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.674677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.752983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790050513716999:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:06.753053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:06.753247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790050513717004:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:06.756973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:06.769512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790050513717006:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:06.861977Z node 1 :TX_PROXY ERROR: Actor# [1:7486790050513717061:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:07.703893Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790033333845516:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:07.703947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16029, MsgBus: 27721 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008aa/r3tmp/tmpefpRby/pdisk_1.dat 2025-03-28T09:29:09.238752Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:29:09.350671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:09.350764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:09.351317Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:09.361065Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16029, node 2 2025-03-28T09:29:09.441202Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:09.441226Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:09.441234Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:09.441359Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27721 TClient is connected to server localhost:27721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:10.028150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:10.035508Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:10.053200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:10.142316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:10.331057Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:10.403907Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at scheme ... ode 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.501290Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.561761Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.601771Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.648932Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.715314Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:45.751398Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790195535997795:2149];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:45.751466Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:45.800251Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790217010836466:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:45.800409Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:45.800706Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790217010836471:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:45.806317Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:45.818347Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790217010836473:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:45.911817Z node 6 :TX_PROXY ERROR: Actor# [6:7486790217010836530:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11457, MsgBus: 5064 2025-03-28T09:29:48.803993Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790232655984405:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:48.804086Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008aa/r3tmp/tmpO7zew8/pdisk_1.dat 2025-03-28T09:29:49.031007Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:49.070778Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:49.070902Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:49.072468Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11457, node 7 2025-03-28T09:29:49.141235Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:49.141266Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:49.141278Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:49.141427Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5064 TClient is connected to server localhost:5064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:49.896265Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:49.923814Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:49.941551Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:50.046323Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:50.289736Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:50.379703Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:53.069936Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790254130822677:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.070043Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.105342Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.146157Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.183598Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.220455Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.259265Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.333503Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.413061Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790254130823195:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.413190Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790254130823200:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.413216Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.417559Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:53.429109Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790254130823202:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:53.526640Z node 7 :TX_PROXY ERROR: Actor# [7:7486790254130823258:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:53.804021Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790232655984405:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:53.804120Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:55.475473Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=ZDg5ODhhMWYtZmQ5MTQxZmUtMTEyZTFiLWIyODQzYjJl, ActorId: [7:7486790262720758119:2492], ActorState: ExecuteState, TraceId: 01jqe1caxa09mzpy4g1ckdh9yw, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`, code: 2001 >> KqpSqlIn::SimpleKey_In_And_In [GOOD] >> KqpSqlIn::SecondaryIndex_TupleParameter >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] >> KqpRanges::CastKeyBounds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::LookupColumns [GOOD] Test command err: Trying to start YDB, gRPC: 24858, MsgBus: 28009 2025-03-28T09:29:09.186551Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790064824906337:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:09.186653Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007d5/r3tmp/tmpUnJDLW/pdisk_1.dat 2025-03-28T09:29:09.595039Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24858, node 1 2025-03-28T09:29:09.681110Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:09.683740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:09.719700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:09.793367Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:09.793391Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:09.793404Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:09.793494Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28009 TClient is connected to server localhost:28009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:10.499816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:10.524537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:10.530952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:10.682617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.858421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:10.937674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:12.777597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790077709809980:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:12.777731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.112810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.195611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.225796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.257628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.286314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.331824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.430459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790082004777799:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.430533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.430798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790082004777804:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:13.434364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:13.444061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790082004777806:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:13.502900Z node 1 :TX_PROXY ERROR: Actor# [1:7486790082004777860:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:14.187034Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790064824906337:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:14.205811Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3094, MsgBus: 62262 2025-03-28T09:29:15.899240Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790090315006487:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:15.899280Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007d5/r3tmp/tmpXp96zC/pdisk_1.dat 2025-03-28T09:29:16.098814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:16.098885Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:16.101571Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:16.113687Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3094, node 2 2025-03-28T09:29:16.203009Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:16.203030Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:16.203036Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:16.203135Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62262 TClient is connected to server localhost:62262 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:16.638845Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:16.647729Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:16.656930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:16.759989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:16.930557Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting.. ... [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790225125412200:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:47.458281Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:47.513672Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:47.582580Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:47.624523Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:47.700802Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:47.736849Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:47.789910Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:47.879726Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790225125412719:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:47.879844Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:47.880307Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790225125412724:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:47.885068Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:47.899003Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790225125412726:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:47.978332Z node 6 :TX_PROXY ERROR: Actor# [6:7486790225125412782:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:48.364564Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790207945541240:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:48.364641Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7378, MsgBus: 20673 2025-03-28T09:29:50.968234Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790240690168077:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:50.968294Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007d5/r3tmp/tmp72xFLY/pdisk_1.dat 2025-03-28T09:29:51.126969Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:51.169468Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:51.169617Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:51.172126Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7378, node 7 2025-03-28T09:29:51.235281Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:51.235311Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:51.235321Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:51.235459Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20673 TClient is connected to server localhost:20673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:51.831410Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:51.849216Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:51.916810Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:52.143020Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:52.215701Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.157698Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790262165006342:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:55.157821Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:55.212737Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:55.248507Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:55.282548Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:55.316439Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:55.350079Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:55.387363Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:55.436230Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790262165006849:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:55.436354Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:55.436388Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790262165006854:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:55.440934Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:55.451720Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790262165006856:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:55.520231Z node 7 :TX_PROXY ERROR: Actor# [7:7486790262165006910:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:55.968727Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790240690168077:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:55.968797Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TKqpScanData::UnboxedValueSize >> TComputeScheduler::ResourceWeight [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources >> TKqpScanData::UnboxedValueSize [GOOD] >> KqpSort::ComplexPkInclusiveSecondOptionalPredicate [GOOD] >> KqpSort::Offset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TComputeScheduler::ResourceWeight [GOOD] Test command err: 510 500 1510 1500 990 1000 1000 1000 |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::UnboxedValueSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::CastKeyBounds [GOOD] Test command err: Trying to start YDB, gRPC: 14169, MsgBus: 5932 2025-03-28T09:29:23.075401Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790126087288625:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:23.083071Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006a8/r3tmp/tmpUyXtid/pdisk_1.dat 2025-03-28T09:29:23.563890Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:23.578019Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:23.578136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:23.580540Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14169, node 1 2025-03-28T09:29:23.731651Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:23.731680Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:23.731689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:23.731806Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5932 TClient is connected to server localhost:5932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:24.526923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:24.559220Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:26.739530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790138972191180:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:26.739616Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.067126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.206522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790143267158581:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.206605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.206956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790143267158586:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.211092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:29:27.222290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790143267158588:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:29:27.315837Z node 1 :TX_PROXY ERROR: Actor# [1:7486790143267158639:2399] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:27.481648Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790143267158678:2357], status: PRECONDITION_FAILED, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:13: Error: Missing key column in input: Key for table: /Root/TestUpsertNotNullPk, code: 2029 2025-03-28T09:29:27.483155Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWEzMzlkZTktOWI0MmE4MjktNjI1NmUzOTgtYTQxYmU4OTQ=, ActorId: [1:7486790138972191167:2328], ActorState: ExecuteState, TraceId: 01jqe1bfn32fj8ahyvpf40ssd3, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id: 2025-03-28T09:29:27.512063Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790143267158688:2361], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:13: Error: At function: KiWriteTable!
:1:47: Error: Failed to convert type: Struct<'Key':Null,'Value':String> to Struct<'Key':Uint64,'Value':String?>
:1:47: Error: Failed to convert 'Key': Null to Uint64
:1:47: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:29:27.512754Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWEzMzlkZTktOWI0MmE4MjktNjI1NmUzOTgtYTQxYmU4OTQ=, ActorId: [1:7486790138972191167:2328], ActorState: ExecuteState, TraceId: 01jqe1bfp9erk2e84n79fn6zyx, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 16622, MsgBus: 27925 2025-03-28T09:29:28.319214Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790146929677600:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:28.320102Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006a8/r3tmp/tmpz95Xi7/pdisk_1.dat 2025-03-28T09:29:28.514068Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16622, node 2 2025-03-28T09:29:28.643249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:28.643337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:28.651382Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:28.711246Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:28.711267Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:28.711275Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:28.711382Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27925 TClient is connected to server localhost:27925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:29.315912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:29.330919Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:31.779900Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790159814580117:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:31.779998Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:31.797229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:31.846312Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790159814580218:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:31.846385Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:31.846542Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790159814580223:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permission ... UND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:50.567222Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:50.567678Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790241409055784:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:50.571346Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:50.586069Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790241409055786:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:50.661636Z node 5 :TX_PROXY ERROR: Actor# [5:7486790241409055841:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:51.367718Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790224229184312:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:51.367803Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Join2"],"PlanNodeId":1,"Operators":[{"Inputs":[],"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2","SinkType":"KqpTableSink"}],"Node Type":"Stage-Sink","Stats":{"ComputeNodes":[{"Tasks":[{"EgressRows":3,"NodeId":5,"FinishTimeMs":1743154191980,"EgressBytes":39,"TaskId":1,"Host":"ghrun-mdsjyjltvm","ComputeTimeUs":253}],"PeakMemoryUsageBytes":196608,"CpuTimeUs":2582}],"UseLlvm":"undefined","MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[1,1048576]},"Tasks":1,"FinishedTasks":1,"Egress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39,"History":[1,39]}},"Name":"KqpTableSink","Egress":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Splits":{"Count":1,"Sum":1,"Max":1,"Min":1},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39}},"Push":{"Chunks":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":39,"Max":39,"Min":39,"History":[1,39]}}}],"PhysicalStageId":0,"StageDurationUs":0,"EgressRows":{"Count":1,"Sum":3,"Max":3,"Min":3},"BaseTimeMs":1743154191980,"EgressBytes":{"Count":1,"Sum":39,"Max":39,"Min":39},"CpuTimeUs":{"Count":1,"Sum":2177,"Max":2177,"Min":2177,"History":[1,2177]}}}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":322977,"CpuTimeUs":319913},"ProcessCpuTimeUs":376,"TotalDurationUs":332481,"ResourcePoolId":"default","QueuedTimeUs":418},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-SelfCpu":2.177,"A-Cpu":2.177,"Path":"\/Root\/Join2","Name":"Delete","Table":"Join2","SinkType":"KqpTableSink"}],"Node Type":"Delete"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query"}} Trying to start YDB, gRPC: 10715, MsgBus: 21511 2025-03-28T09:29:52.939867Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790249914273270:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:52.939942Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006a8/r3tmp/tmpFbOijx/pdisk_1.dat 2025-03-28T09:29:53.031561Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10715, node 6 2025-03-28T09:29:53.076966Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:53.077074Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:53.078505Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:53.107036Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:53.107060Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:53.107070Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:53.107207Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21511 TClient is connected to server localhost:21511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:53.625826Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:53.631583Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:53.644498Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:53.718894Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:53.890613Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:53.972184Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:56.517322Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790267094144222:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:56.517423Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:56.560135Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.592819Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.624786Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.656964Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.687937Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.721932Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.770440Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790267094144732:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:56.770543Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:56.770590Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790267094144737:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:56.774988Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:56.785423Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790267094144739:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:56.859432Z node 6 :TX_PROXY ERROR: Actor# [6:7486790267094144793:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:57.940293Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790249914273270:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:57.940363Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> DataShardReadIteratorBatchMode::MultipleRanges [GOOD] Test command err: 2025-03-28T09:27:01.569874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:27:01.570312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:27:01.570369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00103d/r3tmp/tmpeVNGPJ/pdisk_1.dat 2025-03-28T09:27:01.953362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:27:01.999203Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:02.048282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:02.048403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:02.060304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:02.142620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:27:02.182345Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:27:02.183477Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:27:02.183979Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:27:02.184216Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:27:02.232532Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:27:02.233219Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:27:02.233322Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:27:02.235026Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:27:02.235105Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:27:02.235159Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:27:02.235505Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:27:02.235633Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:27:02.235727Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:27:02.246571Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:27:02.280402Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:27:02.280624Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:27:02.280756Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:27:02.280792Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:27:02.280824Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:27:02.280857Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:02.281078Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:02.281182Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:02.281497Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:27:02.281614Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:27:02.282087Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:02.282198Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:27:02.282254Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:27:02.282310Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:27:02.282343Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:27:02.282380Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:27:02.282427Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:27:02.282555Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:02.282592Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:02.282632Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:27:02.282715Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:27:02.282756Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:27:02.282865Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:27:02.283081Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:27:02.283176Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:27:02.283299Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:27:02.283348Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:27:02.283382Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:27:02.283418Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:27:02.283451Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:02.283759Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:27:02.283801Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:27:02.283834Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:27:02.283889Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:02.283940Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:27:02.283965Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:27:02.283999Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:27:02.284042Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:02.284082Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:27:02.285488Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:27:02.285558Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:27:02.296179Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:27:02.296242Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:02.296265Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:02.296292Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:27:02.296366Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:27:02.447765Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:02.447824Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:02.447874Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:27:02.448341Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:27:02.448401Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:27:02.448531Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:02.448576Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:27:02.448621Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:27:02.448672Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:27:02.454545Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:27:02.454640Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:02.458236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:02.458299Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:02.458378Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:0 ... ssTransaction::Execute at 72075186224037889 2025-03-28T09:29:58.616852Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:29:58.616906Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:29:58.616951Z node 16 :TX_DATASHARD DEBUG: Found ready operation [3000:281474976715664] in PlanQueue unit at 72075186224037889 2025-03-28T09:29:58.616990Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit PlanQueue 2025-03-28T09:29:58.617033Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:29:58.617072Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit PlanQueue 2025-03-28T09:29:58.617119Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit LoadTxDetails 2025-03-28T09:29:58.617160Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit LoadTxDetails 2025-03-28T09:29:58.617319Z node 16 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 3000:281474976715664 keys extracted: 0 2025-03-28T09:29:58.617374Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:29:58.617411Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit LoadTxDetails 2025-03-28T09:29:58.617459Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:29:58.617490Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:29:58.617545Z node 16 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically complete end at 72075186224037889 2025-03-28T09:29:58.617598Z node 16 :TX_DATASHARD TRACE: Operation [3000:281474976715664] is the new logically incomplete end at 72075186224037889 2025-03-28T09:29:58.617646Z node 16 :TX_DATASHARD TRACE: Activated operation [3000:281474976715664] at 72075186224037889 2025-03-28T09:29:58.617700Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:29:58.617728Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:29:58.617757Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CreateVolatileSnapshot 2025-03-28T09:29:58.617787Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CreateVolatileSnapshot 2025-03-28T09:29:58.617934Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is ExecutedNoMoreRestarts 2025-03-28T09:29:58.617969Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CreateVolatileSnapshot 2025-03-28T09:29:58.618015Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit DropVolatileSnapshot 2025-03-28T09:29:58.618075Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit DropVolatileSnapshot 2025-03-28T09:29:58.618105Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:29:58.618131Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit DropVolatileSnapshot 2025-03-28T09:29:58.618157Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompleteOperation 2025-03-28T09:29:58.618183Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:29:58.618368Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is DelayComplete 2025-03-28T09:29:58.618402Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompleteOperation 2025-03-28T09:29:58.618449Z node 16 :TX_DATASHARD TRACE: Add [3000:281474976715664] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:29:58.618505Z node 16 :TX_DATASHARD TRACE: Trying to execute [3000:281474976715664] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:29:58.618545Z node 16 :TX_DATASHARD TRACE: Execution status for [3000:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:29:58.618569Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [3000:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:29:58.618599Z node 16 :TX_DATASHARD TRACE: Execution plan for [3000:281474976715664] at 72075186224037889 has finished 2025-03-28T09:29:58.618639Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:29:58.618678Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:29:58.618735Z node 16 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-28T09:29:58.618776Z node 16 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-28T09:29:58.641635Z node 16 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3000} 2025-03-28T09:29:58.641812Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:29:58.641924Z node 16 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-03-28T09:29:58.642071Z node 16 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [16:1040:2837], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:29:58.642179Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:29:58.642717Z node 16 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 3000} 2025-03-28T09:29:58.642781Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:29:58.642814Z node 16 :TX_DATASHARD TRACE: Complete execution for [3000:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:29:58.642860Z node 16 :TX_DATASHARD DEBUG: Complete [3000 : 281474976715664] from 72075186224037889 at tablet 72075186224037889 send result to client [16:1040:2837], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:29:58.642906Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:29:58.644885Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [16:593:2518], Recipient [16:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 RangesSize: 3 2025-03-28T09:29:58.645112Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:29:58.645253Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-03-28T09:29:58.645475Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:29:58.645552Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:29:58.645623Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:29:58.645687Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:29:58.645738Z node 16 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-03-28T09:29:58.645831Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:29:58.645867Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:29:58.645894Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:29:58.645925Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:29:58.646114Z node 16 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 Columns: 4 Snapshot { Step: 3000 TxId: 281474976715664 } ResultFormat: FORMAT_ARROW Hints: 1 } 2025-03-28T09:29:58.646197Z node 16 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/281474976715664 2025-03-28T09:29:58.646512Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:29:58.646543Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:29:58.646581Z node 16 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:29:58.646627Z node 16 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:29:58.646704Z node 16 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:29:58.646729Z node 16 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:29:58.717907Z node 16 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-03-28T09:29:58.718067Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:29:58.718293Z node 16 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:29:58.719944Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553236, Sender [16:1061:2855], Recipient [16:666:2570]: NKikimr::TEvDataShard::TEvReadScanStarted 2025-03-28T09:29:58.720196Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269553237, Sender [16:1061:2855], Recipient [16:666:2570]: NKikimr::TEvDataShard::TEvReadScanFinished 2025-03-28T09:29:58.720470Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [16:666:2570], Recipient [16:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:29:58.720521Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:29:58.720633Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:29:58.720712Z node 16 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:29:58.720801Z node 16 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:29:58.720873Z node 16 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:29:58.720935Z node 16 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:29:58.721019Z node 16 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:29:58.721117Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 |90.2%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> KqpNewEngine::DqSourceLimit [GOOD] >> KqpMergeCn::SortBy_Int32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2025-03-28T09:30:00.338942Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:30:00.339545Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000c12/r3tmp/tmprqIZpC/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:30:00.340276Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000c12/r3tmp/tmprqIZpC/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000c12/r3tmp/tmprqIZpC/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4234608986223074022 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:30:00.380689Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:30:00.381020Z node 2 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:30:00.397376Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [1:462:2340] with ResourceBroker at [1:433:2321] 2025-03-28T09:30:00.397508Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [1:464:2341] 2025-03-28T09:30:00.397713Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceManagerActor at [2:463:2100] with ResourceBroker at [2:434:2099] 2025-03-28T09:30:00.397805Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Start KqpResourceInfoExchangerActor at [2:465:2101] 2025-03-28T09:30:00.397978Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:30:00.398044Z node 2 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:30:00.398082Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher 2025-03-28T09:30:00.398102Z node 1 :KQP_RESOURCE_MANAGER CRIT: Failed to deliver subscription request to config dispatcher. 2025-03-28T09:30:00.398246Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:30:00.417624Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154200 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:30:00.417852Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:30:00.417949Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154200 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-03-28T09:30:00.418250Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:30:00.418526Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2025-03-28T09:30:00.418781Z node 2 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:30:00.418858Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:30:00.418974Z node 2 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1743154200 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2025-03-28T09:30:00.419176Z node 1 :KQP_RESOURCE_MANAGER INFO: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2025-03-28T09:30:00.419208Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Don't set KqpProxySharedResources 2025-03-28T09:30:00.419281Z node 1 :KQP_RESOURCE_MANAGER INFO: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1743154200 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2025-03-28T09:30:00.419750Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2025-03-28T09:30:00.419853Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:30:00.420273Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:30:00.420789Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2025-03-28T09:30:00.420910Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2025-03-28T09:30:00.420997Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:30:00.421208Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:30:00.421392Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 1 2025-03-28T09:30:00.421478Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Get resources info from node: 2 2025-03-28T09:30:00.424493Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-1 (1 by [1:462:2340]) priority=0 resources={0, 1000} 2025-03-28T09:30:00.424556Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:30:00.424604Z node 1 :RESOURCE_BROKER DEBUG: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:462:2340]) from queue queue_kqp_resource_manager 2025-03-28T09:30:00.424637Z node 1 :RESOURCE_BROKER DEBUG: Assigning in-fly task kqp-1-2-1 (1 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:30:00.424676Z node 1 :RESOURCE_BROKER DEBUG: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:462:2340])) 2025-03-28T09:30:00.424895Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2025-03-28T09:30:00.424988Z node 1 :RESOURCE_BROKER DEBUG: Submitted new kqp_query task kqp-1-2-2 (2 by [1:462:2340]) priority=0 resources={0, 100000} 2025-03-28T09:30:00.425026Z node 1 :RESOURCE_BROKER DEBUG: Assigning waiting task kqp-1-2-2 (2 by [1:462:2340]) to queue queue_kqp_resource_manager 2025-03-28T09:30:00.425071Z node 1 :RESOURCE_BROKER DEBUG: Not enough resources to start task kqp-1-2-2 (2 by [1:462:2340]) 2025-03-28T09:30:00.425107Z node 1 :RESOURCE_BROKER DEBUG: Removing task kqp-1-2-2 (2 by [1:462:2340]) 2025-03-28T09:30:00.425190Z node 1 :KQP_RESOURCE_MANAGER NOTICE: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx last failed memory allocation: 0B, tx total execution units: 0, started at: 2025-03-28T09:30:00.424423Z } |90.2%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkTx::SnapshotRO >> KqpSnapshotRead::TestSnapshotExpiration-withSink >> KqpSnapshotIsolation::TReadOnlyOltp >> KqpTx::TooManyTx >> KqpSnapshotIsolation::TConflictWriteOltpNoSink >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite >> KqpSnapshotIsolation::TConflictWriteOltp >> KqpSnapshotIsolation::TSimpleOltpNoSink >> KqpSinkTx::LocksAbortOnCommit >> KqpSinkMvcc::OlapNamedStatement >> KqpSnapshotRead::TestReadOnly+withSink >> KqpSinkLocks::TInvalidate >> KqpSinkTx::SnapshotROInteractive2 >> KqpTx::CommitRequired >> KqpReturning::ReturningTypes [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOltp >> KqpTx::SnapshotRO >> KqpSinkTx::OlapSnapshotROInteractive1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::DqSourceLimit [GOOD] Test command err: Trying to start YDB, gRPC: 65030, MsgBus: 13922 2025-03-28T09:29:10.789965Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790068795005370:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:10.790493Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000747/r3tmp/tmpnl36nH/pdisk_1.dat 2025-03-28T09:29:11.211705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:11.216710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:11.216844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:11.219362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65030, node 1 2025-03-28T09:29:11.335061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:11.335082Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:11.335088Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:11.335198Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13922 TClient is connected to server localhost:13922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:11.843913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:11.859541Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:11.875756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:12.011238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:29:12.181252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:12.265706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:14.100792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790085974876186:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.100890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.440829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.481328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.516246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.553200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.591956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.638380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:14.690993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790085974876699:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.691095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.694985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790085974876704:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:14.702632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:14.715776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790085974876706:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:14.800284Z node 1 :TX_PROXY ERROR: Actor# [1:7486790085974876759:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:15.790841Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790068795005370:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:15.801206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29384, MsgBus: 4040 2025-03-28T09:29:17.717659Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790096698799643:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:17.722501Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000747/r3tmp/tmpzGmjIF/pdisk_1.dat 2025-03-28T09:29:17.834188Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29384, node 2 2025-03-28T09:29:17.863175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:17.863272Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:17.866060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:17.967255Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:17.967279Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:17.967291Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:17.967410Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4040 TClient is connected to server localhost:4040 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:18.400902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:18.415752Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:18.437024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:18.507164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:18.727675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... ... WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790246115981511:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.316324Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.376917Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.446833Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.485797Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.524765Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.563985Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.634496Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:51.743936Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790246115982037:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.744013Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790246115982042:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.744021Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.747491Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:51.755963Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790246115982044:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:51.823028Z node 6 :TX_PROXY ERROR: Actor# [6:7486790246115982098:3455] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:52.030660Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790228936110547:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:52.030766Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15464, MsgBus: 30811 2025-03-28T09:29:54.601854Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790258180420088:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:54.601938Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000747/r3tmp/tmpWYZs7d/pdisk_1.dat 2025-03-28T09:29:54.753938Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:54.754059Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:54.754197Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:54.774037Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15464, node 7 2025-03-28T09:29:54.839376Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:54.839403Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:54.839414Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:54.839566Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30811 TClient is connected to server localhost:30811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:55.464155Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.481441Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.557010Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.746765Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.819245Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:58.481907Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790275360291036:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:58.482026Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:58.521830Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.565535Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.604925Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.643031Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.681173Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.760326Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.812575Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790275360291552:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:58.812695Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:58.812710Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790275360291557:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:58.818452Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:58.831173Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790275360291559:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:58.895612Z node 7 :TX_PROXY ERROR: Actor# [7:7486790275360291612:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:59.602099Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790258180420088:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:59.687077Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpMergeCn::SortBy_Int32 [GOOD] Test command err: Trying to start YDB, gRPC: 14661, MsgBus: 19119 2025-03-28T09:29:06.934293Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790052182508841:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:06.934352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000829/r3tmp/tmpkqzf32/pdisk_1.dat 2025-03-28T09:29:07.532694Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:07.536565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:07.536653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:07.539859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14661, node 1 2025-03-28T09:29:07.615216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:07.615238Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:07.615244Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:07.615367Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19119 TClient is connected to server localhost:19119 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:08.210435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:08.258351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:08.391565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:08.585508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:08.658447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:10.283911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790069362379809:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:10.284045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:10.679129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.711527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.745965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.775424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.816443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.869971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:10.983634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790069362380328:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:10.983712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:10.984077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790069362380333:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:10.987869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:11.010365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790069362380335:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:11.094393Z node 1 :TX_PROXY ERROR: Actor# [1:7486790073657347686:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:11.934966Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790052182508841:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:11.935060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:12.171616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:13.029587Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154153062, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 19095, MsgBus: 14235 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000829/r3tmp/tmpQXBtXy/pdisk_1.dat 2025-03-28T09:29:14.069804Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790079051930871:2091];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:14.069911Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:29:14.107681Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:14.119501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:14.119580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19095, node 2 2025-03-28T09:29:14.122194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:14.204064Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:14.204085Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:14.204093Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:14.204202Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14235 TClient is connected to server localhost:14235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:14.675846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:14.687073Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:14.708570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:14.782898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 w ... 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:49.355251Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:49.398796Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:49.499275Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:49.596338Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790234014155467:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:49.596464Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:49.596796Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790234014155472:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:49.601901Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:49.624902Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:29:49.625131Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790234014155474:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:49.717120Z node 6 :TX_PROXY ERROR: Actor# [6:7486790234014155529:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:50.192507Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790216834284021:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:50.192747Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:51.242034Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:52.219857Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154192255, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 9447, MsgBus: 20435 2025-03-28T09:29:53.124196Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790254437416162:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:53.124333Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000829/r3tmp/tmpQ0V85O/pdisk_1.dat 2025-03-28T09:29:53.247036Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:53.274050Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:53.274156Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:53.275715Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9447, node 7 2025-03-28T09:29:53.333342Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:53.333372Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:53.333384Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:53.333546Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20435 TClient is connected to server localhost:20435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:53.907745Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:53.913937Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:53.926040Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:54.000393Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.225803Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:54.303739Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:57.551127Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790271617287118:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:57.551223Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:57.613481Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:57.655628Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:57.751456Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:57.800688Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:57.873197Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:57.929728Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:57.992247Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790271617287633:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:57.992371Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:57.992678Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790271617287638:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:57.997031Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:58.011082Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790271617287640:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:58.082093Z node 7 :TX_PROXY ERROR: Actor# [7:7486790275912254989:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:58.124744Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790254437416162:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:58.124853Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:59.401415Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.304885Z node 7 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154200340, txId: 281474976715673] shutting down >> KqpSnapshotRead::TestReadOnly-withSink >> KqpNewEngine::PagingNoPredicateExtract [GOOD] >> KqpNewEngine::MultipleBroadcastJoin >> KqpSinkLocks::DifferentKeyUpdate >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::ReturningTypes [GOOD] Test command err: Trying to start YDB, gRPC: 24917, MsgBus: 3197 2025-03-28T09:29:04.402280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790041549410970:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:04.402340Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00085b/r3tmp/tmpxEjDhN/pdisk_1.dat 2025-03-28T09:29:04.843826Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:04.845928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:04.845997Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:04.850137Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24917, node 1 2025-03-28T09:29:04.953231Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:04.953250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:04.953263Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:04.953392Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3197 TClient is connected to server localhost:3197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:05.639433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:05.667087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:05.843973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:06.038267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:06.118053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:07.806253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790054434314571:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:07.806411Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:08.109593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.138811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.177323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.211138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.243928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.316624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:08.414222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790058729282389:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:08.414291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:08.414426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790058729282394:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:08.418199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:08.435907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790058729282396:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:08.510929Z node 1 :TX_PROXY ERROR: Actor# [1:7486790058729282453:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:09.406799Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790041549410970:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:09.407013Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:09.556384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:09.600472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:29:09.680241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 25868, MsgBus: 22444 2025-03-28T09:29:13.208352Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790082978719733:2125];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:13.208391Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00085b/r3tmp/tmpbk80jr/pdisk_1.dat 2025-03-28T09:29:13.398752Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:13.424090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:13.424175Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:13.426917Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25868, node 2 2025-03-28T09:29:13.482201Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:13.482222Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:13.482230Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:13.482336Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22444 TClient is connected to server localhost:22444 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:13.976443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:13.984760Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:14.001499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, a ... 4976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:48.177633Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:48.223768Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:48.299130Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:48.349434Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:48.436975Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:48.546836Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790231916003401:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:48.546982Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:48.550820Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790231916003406:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:48.564508Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:48.585302Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790231916003408:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:48.651412Z node 5 :TX_PROXY ERROR: Actor# [5:7486790231916003463:3458] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:48.889729Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790210441164627:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:48.889816Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:50.227657Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:50.318903Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:29:50.385656Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22891, MsgBus: 28386 2025-03-28T09:29:54.422785Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790255907714595:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:54.422867Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00085b/r3tmp/tmpIGBzCJ/pdisk_1.dat 2025-03-28T09:29:54.556608Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:54.556720Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:54.558400Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:54.558946Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22891, node 6 2025-03-28T09:29:54.633123Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:54.633151Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:54.633161Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:54.633304Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28386 TClient is connected to server localhost:28386 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:55.204464Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.222390Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.290150Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.557792Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.654341Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:58.459079Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790273087585554:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:58.459187Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:58.521418Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.560573Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.596310Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.630822Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.667820Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.707982Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:58.790272Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790273087586072:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:58.790379Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:58.790452Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790273087586077:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:58.794566Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:58.806991Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790273087586079:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:58.904350Z node 6 :TX_PROXY ERROR: Actor# [6:7486790273087586135:3456] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:59.423084Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790255907714595:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:59.423187Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSqlIn::TupleSelect [GOOD] >> KqpSqlIn::TupleNotOnlyOfKeys >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink >> KqpSnapshotIsolation::TSimpleOltp >> KqpNewEngine::PushFlatmapInnerConnectionsToStageInput [GOOD] >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage >> KqpReturning::Random [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::NoFullScanAtDNFPredicate [GOOD] Test command err: Trying to start YDB, gRPC: 21846, MsgBus: 23116 2025-03-28T09:28:49.506894Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789979296295194:2182];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:49.508947Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a4f/r3tmp/tmpInpkRB/pdisk_1.dat 2025-03-28T09:28:49.927083Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21846, node 1 2025-03-28T09:28:50.014947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:50.018794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:50.065218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:50.115136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:50.115155Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:50.115159Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:50.115238Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23116 TClient is connected to server localhost:23116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:50.813592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:50.869665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:51.066982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:51.240727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:51.316881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:28:52.800357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789992181198739:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:52.800472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:53.130603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:28:53.170641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:28:53.244016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:28:53.287602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:28:53.329701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:28:53.414524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:28:53.475219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789996476166554:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:53.475335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:53.475414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789996476166559:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:28:53.479501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:28:53.521382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789996476166561:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:28:53.610576Z node 1 :TX_PROXY ERROR: Actor# [1:7486789996476166619:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:28:54.496641Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789979296295194:2182];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:54.508442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:28:54.852845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.100918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.343653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.542159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:28:55.921977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20006, MsgBus: 12147 2025-03-28T09:28:57.235340Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790012375987133:2171];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:28:57.257692Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a4f/r3tmp/tmp6qYOQe/pdisk_1.dat 2025-03-28T09:28:57.325864Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20006, node 2 2025-03-28T09:28:57.363914Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:28:57.364070Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:28:57.366366Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:28:57.416827Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:28:57.416861Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:28:57.416875Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:28:57.416995Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12147 TClient is connected to server localhost:12147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:28:57.908340Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... ol info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.111598Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.111836Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486790244036469220:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:51.117109Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:51.130493Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7486790244036469222:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:51.205532Z node 8 :TX_PROXY ERROR: Actor# [8:7486790244036469277:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:51.250240Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7486790222561630438:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:51.250339Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:52.713742Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.563501Z node 8 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154193599, txId: 281474976715673] shutting down ---------QUERY---------- --!syntax_v1 SELECT * FROM `/Root/TableWithIntKey` WHERE Key1 IN (1, 2, 100, 101, 102, 200, 201, 201, 1000, 1001, 1002, 2000, 2001, 2002) AND (Key1 > 2000) ORDER BY Key1; ---------RESULT--------- [[[2001];#];[[2002];[2]]] ------------------------ Trying to start YDB, gRPC: 18924, MsgBus: 20511 2025-03-28T09:29:54.553693Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486790257698007843:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:54.553962Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a4f/r3tmp/tmpRtAXAU/pdisk_1.dat 2025-03-28T09:29:54.730746Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:54.731861Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:54.731978Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:54.747992Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18924, node 9 2025-03-28T09:29:54.803521Z node 9 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:54.803557Z node 9 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:54.803573Z node 9 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:54.803738Z node 9 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20511 TClient is connected to server localhost:20511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:55.479132Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.485557Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.497041Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:55.615901Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.856738Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:55.935197Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:59.101259Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7486790279172846093:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:59.101339Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:59.147432Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.188242Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.227099Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.263886Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.299664Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.337292Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.382018Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7486790279172846602:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:59.382124Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:59.382127Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [9:7486790279172846607:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:59.386013Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:59.395558Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7486790279172846609:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:59.464762Z node 9 :TX_PROXY ERROR: Actor# [9:7486790279172846663:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:59.554117Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7486790257698007843:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:59.554202Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:00.851016Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.587266Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154201621, txId: 281474976715673] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND (Key2 = 100 OR Key2 = 300) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]]] ------------------------ 2025-03-28T09:30:02.217097Z node 9 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154202251, txId: 281474976715675] shutting down ---------QUERY---------- --!syntax_v1 SELECT Value FROM `/Root/TestDNF` WHERE Key1 = 1 AND Key2 IN (100, 300, 400) ORDER BY Value; ---------RESULT--------- [[[5u]];[[9u]];[[10u]]] ------------------------ >> KqpNewEngine::JoinSameKey [GOOD] >> KqpTx::LocksAbortOnCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpReturning::Random [GOOD] Test command err: Trying to start YDB, gRPC: 21234, MsgBus: 2929 2025-03-28T09:29:17.832171Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790097985454325:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:17.832683Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006b6/r3tmp/tmpVUudQM/pdisk_1.dat 2025-03-28T09:29:18.230271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:18.230333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:18.242857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:18.266639Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21234, node 1 2025-03-28T09:29:18.413156Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:18.413182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:18.413189Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:18.413805Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2929 TClient is connected to server localhost:2929 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:19.089581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.115400Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:19.135784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.300825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.474435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.565734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:21.464854Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790115165325139:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.465404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.788578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.842152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.917363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.972807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.041105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.091130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.174841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790119460292954:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.174930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.175314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790119460292959:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.179806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:22.198428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790119460292961:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:22.262461Z node 1 :TX_PROXY ERROR: Actor# [1:7486790119460293016:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:22.822873Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790097985454325:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:22.822936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Warning: Type annotation, code: 1030
:4:13: Warning: At function: RemovePrefixMembers, At function: RemoveSystemMembers, At function: PersistableRepr, At function: SqlProject
:4:27: Warning: At function: Filter, At function: Coalesce
:4:50: Warning: At function: SqlIn
:4:50: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:13: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 14397, MsgBus: 13535 2025-03-28T09:29:24.887029Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790129545827631:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:24.887479Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006b6/r3tmp/tmpcJGXaR/pdisk_1.dat 2025-03-28T09:29:25.067022Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:25.086356Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:25.086444Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:25.087506Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14397, node 2 2025-03-28T09:29:25.167250Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:25.167273Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:25.167282Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:25.167397Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13535 TClient is connected to server localhost:13535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:25.853500Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.878940Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:25.905950Z node 2 :FLAT_TX_SCH ... cess permissions } 2025-03-28T09:29:54.101260Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.131546Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.166822Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.199883Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.231778Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.305055Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:54.350911Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790256288985734:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:54.351066Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:54.351439Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790256288985739:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:54.355849Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:54.367043Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790256288985741:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:54.443813Z node 5 :TX_PROXY ERROR: Actor# [5:7486790256288985794:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:55.370844Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790239109114256:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:55.370931Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:55.627054Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 [[[2];["321"]];[["111"];[2]]] Trying to start YDB, gRPC: 61674, MsgBus: 11954 2025-03-28T09:29:57.632379Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790269165582852:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:57.632458Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006b6/r3tmp/tmpeLBh0l/pdisk_1.dat 2025-03-28T09:29:57.856941Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:57.879693Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:57.879834Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:57.884018Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61674, node 6 2025-03-28T09:29:57.953763Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:57.953794Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:57.953806Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:57.953972Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11954 TClient is connected to server localhost:11954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:58.476411Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:58.494486Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:58.567788Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:58.799775Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:58.876820Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:01.533882Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790286345453799:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.534028Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.600435Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.647376Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.687698Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.728777Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.772609Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.817014Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.866600Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790286345454313:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.866616Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790286345454318:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.866777Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.870483Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:01.882466Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790286345454320:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:01.956392Z node 6 :TX_PROXY ERROR: Actor# [6:7486790286345454373:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:02.654454Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790269165582852:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.654577Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:03.477494Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::JoinSameKey [GOOD] Test command err: Trying to start YDB, gRPC: 31902, MsgBus: 8274 2025-03-28T09:29:13.869396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790080061079987:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:13.869455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00070f/r3tmp/tmpehhimz/pdisk_1.dat 2025-03-28T09:29:14.408504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:14.408601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:14.409946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:14.412865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31902, node 1 2025-03-28T09:29:14.526450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:14.526471Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:14.526478Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:14.526608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8274 TClient is connected to server localhost:8274 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:15.049805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:15.070122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:15.226563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:15.420872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:15.510628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:17.460542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790097240950967:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:17.460634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:17.748271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.778190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.815721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.846867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.889729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.963575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:18.047782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790101535918778:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.047919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.048195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790101535918784:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:18.052087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:18.068053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790101535918786:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:18.153717Z node 1 :TX_PROXY ERROR: Actor# [1:7486790101535918840:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:18.874821Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790080061079987:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:18.874883Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28017, MsgBus: 13388 2025-03-28T09:29:20.650744Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790111784176287:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:20.651000Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00070f/r3tmp/tmp8PI5pb/pdisk_1.dat 2025-03-28T09:29:20.890249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:20.890340Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:20.891761Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:20.892809Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28017, node 2 2025-03-28T09:29:21.045768Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:21.045793Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:21.045800Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:21.045917Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13388 TClient is connected to server localhost:13388 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:21.595741Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:21.620280Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:21.640857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:21.751874Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:21.942491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:22.026004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreate ... [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790254313440232:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.607554Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.665174Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.699633Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.738275Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.773991Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.810591Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.851929Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:53.900819Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790254313440746:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.900907Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.900965Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790254313440751:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:53.905423Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:53.916011Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790254313440753:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:54.014525Z node 6 :TX_PROXY ERROR: Actor# [6:7486790258608408102:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:54.346856Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790237133569271:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:54.347041Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7601, MsgBus: 21874 2025-03-28T09:29:56.866575Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790264080018266:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:56.866663Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00070f/r3tmp/tmpx2UHcX/pdisk_1.dat 2025-03-28T09:29:57.001325Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:57.034151Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:57.034250Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:57.035758Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7601, node 7 2025-03-28T09:29:57.088864Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:57.088901Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:57.088913Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:57.089079Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21874 TClient is connected to server localhost:21874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:57.683411Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:57.703951Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:57.777033Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:57.972580Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:58.056750Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.034911Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790285554856530:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.035034Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.086042Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.119516Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.154225Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.191405Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.227904Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.302592Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.370593Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790285554857046:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.370708Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.370772Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790285554857051:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.376108Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:01.391693Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790285554857053:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:01.488636Z node 7 :TX_PROXY ERROR: Actor# [7:7486790285554857108:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:01.866612Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790264080018266:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:01.866713Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSqlIn::SecondaryIndex_ComplexKey_In_And_In [GOOD] >> KqpSqlIn::PhasesCount >> KqpTx::CommitRoTx >> KqpSqlIn::Dict [GOOD] >> KqpSqlIn::Delete >> KqpSort::Offset [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> KqpTx::InteractiveTx >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitPrepared >> KqpSqlIn::SecondaryIndex_TupleParameter [GOOD] >> KqpSqlIn::SecondaryIndex_TupleLiteral >> KqpTx::TooManyTx [GOOD] >> KqpTx::SnapshotROInteractive2 >> KqpSnapshotRead::TestReadOnly+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink >> KqpTx::SnapshotRO [GOOD] >> KqpTx::SnapshotROInteractive1 >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSort::Offset [GOOD] Test command err: Trying to start YDB, gRPC: 10106, MsgBus: 22469 2025-03-28T09:29:15.827387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790087879419237:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:15.827934Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006db/r3tmp/tmp5OpLol/pdisk_1.dat 2025-03-28T09:29:16.333289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:16.333427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:16.336927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:16.379674Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10106, node 1 2025-03-28T09:29:16.482478Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:16.482503Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:16.482509Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:16.482625Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22469 TClient is connected to server localhost:22469 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:17.037670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:17.063468Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:17.079459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:17.220977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:17.379805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:17.461226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.012590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790105059290193:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.012719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.425048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.479490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.547015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.577157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.606951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.678901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.727697Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790105059290712:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.727809Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.728161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790105059290717:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:19.731285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:19.739199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790105059290719:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:19.828483Z node 1 :TX_PROXY ERROR: Actor# [1:7486790105059290775:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:20.828519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:20.830906Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790087879419237:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:20.830938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:20.929933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:29:20.983740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2379, MsgBus: 25958 2025-03-28T09:29:25.025628Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790127767980498:2082];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:25.025703Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006db/r3tmp/tmpFIhitr/pdisk_1.dat 2025-03-28T09:29:25.181724Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:25.194294Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:25.194402Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:25.196133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2379, node 2 2025-03-28T09:29:25.352396Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:25.352420Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:25.352427Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:25.352517Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25958 TClient is connected to server localhost:25958 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:25.942468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.955224Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:25.968889Z nod ... [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790265168992594:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:56.283105Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:56.325897Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.354155Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.383015Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.412423Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.442521Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.511401Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:56.558998Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790265168993109:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:56.559084Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:56.559163Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790265168993114:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:56.562596Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:56.572142Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790265168993116:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:56.631860Z node 5 :TX_PROXY ERROR: Actor# [5:7486790265168993169:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:57.509919Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790247989121609:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:57.509999Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1710, MsgBus: 22436 2025-03-28T09:30:00.077727Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790282234802419:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:00.077827Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006db/r3tmp/tmp4SqEw3/pdisk_1.dat 2025-03-28T09:30:00.212014Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:00.240779Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:00.240898Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:00.242414Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1710, node 6 2025-03-28T09:30:00.294136Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:00.294171Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:00.294180Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:00.294341Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22436 TClient is connected to server localhost:22436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:00.929512Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:00.947790Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:01.008983Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:01.206619Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:01.294165Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.591364Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790299414673383:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:04.591498Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:04.659639Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:04.699481Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:04.746391Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:04.795799Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:04.842098Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:04.922495Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:04.992067Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790299414673900:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:04.992183Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:04.993280Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790299414673905:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:04.998465Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.013307Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790299414673907:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:05.078180Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790282234802419:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:05.078262Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:05.082084Z node 6 :TX_PROXY ERROR: Actor# [6:7486790303709641257:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] Test command err: === Server->StartServer(false); 2025-03-28T09:25:18.484799Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789071484310108:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.484882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011f3/r3tmp/tmpvUmska/pdisk_1.dat 2025-03-28T09:25:18.681505Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:18.865081Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:18.892579Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:18.892715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:18.894514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11943, node 1 2025-03-28T09:25:19.125895Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/0011f3/r3tmp/yandexBFSW4f.tmp 2025-03-28T09:25:19.125928Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/0011f3/r3tmp/yandexBFSW4f.tmp 2025-03-28T09:25:19.127206Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/0011f3/r3tmp/yandexBFSW4f.tmp 2025-03-28T09:25:19.127378Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:19.314212Z INFO: TTestServer started on Port 1475 GrpcPort 11943 TClient is connected to server localhost:1475 PQClient connected to localhost:11943 === TenantModeEnabled() = 0 === Init PQ - start server on port 11943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:19.699891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:25:19.700083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.700261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:25:19.701785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:25:19.701845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.703466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:19.703600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:25:19.703733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.703814Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:25:19.703840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:25:19.703854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-03-28T09:25:19.704360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.704413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:25:19.704426Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:25:19.704818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.704846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.704866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:19.704915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.708999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:19.709324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:25:19.709529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 waiting... 2025-03-28T09:25:19.711768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153919759, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:19.711911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743153919759 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:25:19.711952Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:19.712231Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:25:19.712257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:19.712442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:25:19.712986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:25:19.713475Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:25:19.713493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:25:19.713664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:25:19.713702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486789071484310613:2243], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:25:19.713751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.713770Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:25:19.713855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:25:19.713872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.713897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:25:19.713906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.713920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:25:19.713937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.713950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:25:19.713958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-28T09:25:19.713991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:25:19.714031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-03-28T09:25:19.714042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-03-28T09:25:19.717490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-03-28T09:25:19.717601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710657 2025-03-28T09:25:19.717613Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2025-03-28T09:25:19.717644Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-03-28T09:25:19.717669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:25:19.717770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281 ... ::Pointer &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:715:1 #7 0x1a182e6d in __union<2UL, const grpc_core::ChannelArgs::Pointer &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:715:1 #8 0x1a182e6d in construct_at >, grpc_core::ChannelArgs::Pointer>, const std::__y1::in_place_index_t<2UL> &, const grpc_core::ChannelArgs::Pointer &, std::__y1::__variant_detail::__union<(std::__y1::__variant_detail::_Trait)1, 0UL, int, TBasicString >, grpc_core::ChannelArgs::Pointer> *> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/construct_at.h:41:46 #9 0x1a182e6d in __construct_at >, grpc_core::ChannelArgs::Pointer>, const std::__y1::in_place_index_t<2UL> &, const grpc_core::ChannelArgs::Pointer &, std::__y1::__variant_detail::__union<(std::__y1::__variant_detail::_Trait)1, 0UL, int, TBasicString >, grpc_core::ChannelArgs::Pointer> *> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/construct_at.h:49:10 #10 0x1a182e6d in operator() &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:825:13 #11 0x1a182e6d in __invoke<(lambda at /-S/contrib/libs/cxxsupp/libcxx/include/variant:824:11), const std::__y1::__variant_detail::__alt<2UL, grpc_core::ChannelArgs::Pointer> &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #12 0x1a182e6d in decltype(auto) std::__y1::__variant_detail::__visitation::__base::__dispatcher<2ul>::__dispatch[abi:fe190000]>, grpc_core::ChannelArgs::Pointer>>::__generic_construct[abi:fe190000]>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&>(std::__y1::__variant_detail::__ctor>, grpc_core::ChannelArgs::Pointer>>&, std::__y1::__variant_detail::__copy_constructor>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&)::'lambda'(std::__y1::__variant_detail::__copy_constructor>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&)&&, std::__y1::__variant_detail::__base<(std::__y1::__variant_detail::_Trait)1, int, TBasicString>, grpc_core::ChannelArgs::Pointer> const&>(std::__y1::__variant_detail::__copy_constructor>, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> const&, std::__y1::__variant_detail::__base<(std::__y1::__variant_detail::_Trait)1, int, TBasicString>, grpc_core::ChannelArgs::Pointer> const&) /-S/contrib/libs/cxxsupp/libcxx/include/variant:540:14 #13 0x1a17e5c8 in __visit_alt_at<(lambda at /-S/contrib/libs/cxxsupp/libcxx/include/variant:824:11), const std::__y1::__variant_detail::__copy_constructor >, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:502:12 #14 0x1a17e5c8 in __generic_construct >, grpc_core::ChannelArgs::Pointer>, (std::__y1::__variant_detail::_Trait)1> &> /-S/contrib/libs/cxxsupp/libcxx/include/variant:822:7 #15 0x1a17e5c8 in __copy_constructor /-S/contrib/libs/cxxsupp/libcxx/include/variant:897:1 #16 0x1a17e5c8 in __assignment /-S/contrib/libs/cxxsupp/libcxx/include/variant:909:28 #17 0x1a17e5c8 in __move_assignment /-S/contrib/libs/cxxsupp/libcxx/include/variant:995:1 #18 0x1a17e5c8 in __copy_assignment /-S/contrib/libs/cxxsupp/libcxx/include/variant:1025:1 #19 0x1a17e5c8 in __impl /-S/contrib/libs/cxxsupp/libcxx/include/variant:1045:25 #20 0x1a17e5c8 in variant /-S/contrib/libs/cxxsupp/libcxx/include/variant:1192:35 #21 0x1a17e5c8 in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Rebalance(TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>, std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&, std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&) /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:252:30 #22 0x1a17d6ea in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::AddKey(std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&, TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:265:14 #23 0x1a17d6b9 in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::AddKey(std::__y1::shared_ptr>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Node> const&, TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:266:24 #24 0x1a1755c7 in grpc_core::AVL>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>>::Add(TBasicString>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) const /-S/contrib/libs/grpc/src/core/lib/avl/avl.h:36:16 #25 0x1a174faa in grpc_core::ChannelArgs::Set(std::__y1::basic_string_view>, std::__y1::variant>, grpc_core::ChannelArgs::Pointer>) const /-S/contrib/libs/grpc/src/core/lib/channel/channel_args.cc:158:28 #26 0x1a174675 in grpc_core::ChannelArgs::Set(std::__y1::basic_string_view>, grpc_core::ChannelArgs::Pointer) const /-S/contrib/libs/grpc/src/core/lib/channel/channel_args.cc:150:10 #27 0x1a1debe6 in grpc_core::Channel::Create(char const*, grpc_core::ChannelArgs, grpc_channel_stack_type, grpc_transport*) /-S/contrib/libs/grpc/src/core/lib/surface/channel.cc:218:19 #28 0x1a5652d1 in CreateChannel /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/client/chttp2_connector.cc:323:10 #29 0x1a5652d1 in grpc_channel_create /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/client/chttp2_connector.cc:365:14 #30 0x1adc8c60 in grpc::(anonymous namespace)::InsecureChannelCredentialsImpl::CreateChannelWithInterceptors(TBasicString> const&, grpc::ChannelArguments const&, std::__y1::vector>, std::__y1::allocator>>>) /-S/contrib/libs/grpc/src/cpp/client/insecure_credentials.cc:55:13 #31 0x1adc8a3b in grpc::(anonymous namespace)::InsecureChannelCredentialsImpl::CreateChannelImpl(TBasicString> const&, grpc::ChannelArguments const&) /-S/contrib/libs/grpc/src/cpp/client/insecure_credentials.cc:40:12 #32 0x1adc11f4 in grpc::CreateCustomChannel(TBasicString> const&, std::__y1::shared_ptr const&, grpc::ChannelArguments const&) /-S/contrib/libs/grpc/src/cpp/client/create_channel.cc:50:25 #33 0x18d0bc46 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::Connect(NKikimr::NPersQueueTests::TPersQueueV1TestServer&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:824:23 #34 0x18a07e12 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TDirectReadTestSetup::TDirectReadTestSetup(NKikimr::NPersQueueTests::TPersQueueV1TestServer&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:806:13 #35 0x18a1cd16 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TTestCaseDirectReadBadCases::Execute_(NUnitTest::TTestContext&) /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:1192:30 #36 0x18cea907 in operator() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #37 0x18cea907 in __invoke<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #38 0x18cea907 in __call<(lambda at /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #39 0x18cea907 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #40 0x18cea907 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #41 0x19788ca5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #42 0x19788ca5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #43 0x19788ca5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #44 0x197587f8 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #45 0x18ce98b3 in NKikimr::NPersQueueTests::NTestSuiteTPersQueueTest::TCurrentTest::Execute() /-S/ydb/services/persqueue_v1/persqueue_ut.cpp:134:1 #46 0x1975a0c5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #47 0x1978321c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #48 0x7f58f2709d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) SUMMARY: AddressSanitizer: 5611308 byte(s) leaked in 1394 allocation(s). >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink >> TPersQueueTest::TClusterTrackerTest [GOOD] >> TPersQueueTest::SrcIdCompatibility >> KqpSinkLocks::EmptyRangeOlap >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::MixEnginesOldNew >> TPersQueueTest::DefaultMeteringMode [GOOD] >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] >> KqpSinkMvcc::SnapshotExpiration >> KqpTx::CommitRoTx [GOOD] >> KqpTx::CommitRoTx_TLI >> KqpNewEngine::MultipleBroadcastJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DefaultMeteringMode [GOOD] Test command err: 2025-03-28T09:25:18.567482Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789073288580043:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.568843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.609299Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789069937099896:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.610313Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.734380Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:18.745084Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00121a/r3tmp/tmpcYeieG/pdisk_1.dat 2025-03-28T09:25:19.017665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.017760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.018435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.018479Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.019209Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:19.024909Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:19.025041Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:19.027154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1774, node 1 2025-03-28T09:25:19.142717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00121a/r3tmp/yandexszTHWa.tmp 2025-03-28T09:25:19.142753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00121a/r3tmp/yandexszTHWa.tmp 2025-03-28T09:25:19.142962Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00121a/r3tmp/yandexszTHWa.tmp 2025-03-28T09:25:19.143117Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:19.314054Z INFO: TTestServer started on Port 25339 GrpcPort 1774 TClient is connected to server localhost:25339 PQClient connected to localhost:1774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:19.830558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:25:19.910214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:25:22.048048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789090468450281:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.048197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.048779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486789090468450316:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:25:22.053120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:25:22.082962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486789090468450318:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:25:22.351277Z node 1 :TX_PROXY ERROR: Actor# [1:7486789090468450402:2743] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:25:22.377667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.380448Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486789087116969417:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.380754Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yjk2N2MyMzgtMzE3NGYyMDgtNDBlYjFlYzMtNDJiNDQyNmM=, ActorId: [2:7486789087116969378:2312], ActorState: ExecuteState, TraceId: 01jqe1404sf1z21yypg56mx911, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.383114Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486789090468450412:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:25:22.383349Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTMwNDIyY2ItNDBjYjAwMTgtYWYxYmY1OGQtYTI1ZTcxYzY=, ActorId: [1:7486789090468450277:2336], ActorState: ExecuteState, TraceId: 01jqe13zzg5qpbvbxzd3h0amjr, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:25:22.383377Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.383719Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:25:22.459752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:25:22.551035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:25:22.788052Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe140hy33xct1gb885bv889, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Nzk1MDRjNGYtNzE5M2Y2NzMtZDkyZGJlZDUtNDFkZWI5MWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486789090468450842:3067] 2025-03-28T09:25:23.567726Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486789073288580043:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:23.567782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:25:23.608670Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486789069937099896:2143];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:23.608756Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 1 partitions CallPersQueueGRPC request to localhost:1774 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-03-28T09:25:28.955556Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:1774 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000 ... ute txs with state CALCULATING 2025-03-28T09:30:10.601283Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State CALCULATING 2025-03-28T09:30:10.601329Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State CALCULATING FrontTxId 281474976715672 2025-03-28T09:30:10.601356Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-03-28T09:30:10.601405Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState CALCULATED 2025-03-28T09:30:10.601443Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from CALCULATING to CALCULATED 2025-03-28T09:30:10.602011Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] save tx TxId: 281474976715672 State: CALCULATED MinStep: 1743154210504 MaxStep: 18446744073709551615 Step: 1743154210630 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7486790270289302239 RawX2: 124554053789 } Partitions { Partition { PartitionId: 0 } } 2025-03-28T09:30:10.602221Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:30:10.612904Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:30:10.612963Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state CALCULATED 2025-03-28T09:30:10.613070Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State CALCULATED 2025-03-28T09:30:10.613107Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State CALCULATED FrontTxId 281474976715672 2025-03-28T09:30:10.613148Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState WAIT_RS 2025-03-28T09:30:10.613200Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from CALCULATED to WAIT_RS 2025-03-28T09:30:10.613289Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-03-28T09:30:10.613360Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveParticipantsDecision 1 2025-03-28T09:30:10.613466Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState EXECUTING 2025-03-28T09:30:10.613500Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1743154210630, TxId 281474976715672 2025-03-28T09:30:10.613529Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from WAIT_RS to EXECUTING 2025-03-28T09:30:10.613552Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 0, Expected 1 2025-03-28T09:30:10.614056Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:30:10.614109Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:30:10.614150Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:30:10.614208Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:30:10.614226Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:30:10.614242Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] _config_0 2025-03-28T09:30:10.614292Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:30:10.614342Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:30:10.614399Z node 30 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:30:10.618935Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvPQ::TEvTxCommitDone Step 1743154210630, TxId 281474976715672, Partition 0 2025-03-28T09:30:10.618992Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTING 2025-03-28T09:30:10.619023Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:30:10.619032Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State EXECUTING 2025-03-28T09:30:10.619082Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State EXECUTING FrontTxId 281474976715672 2025-03-28T09:30:10.619113Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Received 1, Expected 1 2025-03-28T09:30:10.619157Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId: 281474976715672 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-28T09:30:10.619208Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] complete TxId 281474976715672 2025-03-28T09:30:10.619664Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } 2025-03-28T09:30:10.619765Z node 30 :PERSQUEUE NOTICE: [PQ: 72075186224037892] metering mode METERING_MODE_REQUEST_UNITS 2025-03-28T09:30:10.619917Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete partitions for TxId 281474976715672 2025-03-28T09:30:10.619954Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState EXECUTED 2025-03-28T09:30:10.619996Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from EXECUTING to EXECUTED 2025-03-28T09:30:10.620501Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] save tx TxId: 281474976715672 State: EXECUTED MinStep: 1743154210504 MaxStep: 18446744073709551615 Step: 1743154210630 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "ttt" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/ttt" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7486790270289302239 RawX2: 124554053789 } Partitions { Partition { PartitionId: 0 } } 2025-03-28T09:30:10.620853Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:30:10.632170Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:30:10.632233Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state EXECUTED 2025-03-28T09:30:10.632271Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State EXECUTED 2025-03-28T09:30:10.632307Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 State EXECUTED FrontTxId 281474976715672 2025-03-28T09:30:10.632342Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:30:10.632374Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState WAIT_RS_ACKS 2025-03-28T09:30:10.632412Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:30:10.632466Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] PredicateAcks: 0/0 2025-03-28T09:30:10.632484Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:30:10.632512Z node 30 :PERSQUEUE DEBUG: [TxId: 281474976715672] PredicateAcks: 0/0 2025-03-28T09:30:10.632551Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976715672 to the list for deletion 2025-03-28T09:30:10.632600Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, NewState DELETING 2025-03-28T09:30:10.632657Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976715672 2025-03-28T09:30:10.632767Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:30:10.639663Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:30:10.639723Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-03-28T09:30:10.639757Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976715672, State DELETING 2025-03-28T09:30:10.639793Z node 30 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976715672 2025-03-28T09:30:10.645827Z node 29 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-03-28T09:30:10.645930Z node 29 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/PQ/ttt" 2025-03-28T09:30:10.646011Z node 29 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/PQ/ttt ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::PushPureFlatmapInnerConnectionsToStage [GOOD] Test command err: Trying to start YDB, gRPC: 14981, MsgBus: 19635 2025-03-28T09:29:24.915665Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790126440541682:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:24.915688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000628/r3tmp/tmpHfy9yV/pdisk_1.dat 2025-03-28T09:29:25.504844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:25.504929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:25.525900Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:25.550563Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14981, node 1 2025-03-28T09:29:25.687395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:25.687421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:25.687428Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:25.687551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19635 TClient is connected to server localhost:19635 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:26.432430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:26.445836Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:26.455509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:26.640143Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:26.821012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:26.902927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.616673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790143620412650:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:28.616799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:28.946203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.018293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.053011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.085703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.128836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.213629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.338995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790147915380472:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:29.339079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:29.339437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790147915380477:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:29.343783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:29.356106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790147915380479:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:29.456983Z node 1 :TX_PROXY ERROR: Actor# [1:7486790147915380535:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:29.918792Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790126440541682:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:29.918850Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64850, MsgBus: 11994 2025-03-28T09:29:31.659294Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790156795675404:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:31.660709Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000628/r3tmp/tmpVRXeRr/pdisk_1.dat 2025-03-28T09:29:31.800720Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:31.833225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:31.833306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:31.835657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64850, node 2 2025-03-28T09:29:31.915261Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:31.915288Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:31.915297Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:31.915419Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11994 TClient is connected to server localhost:11994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:32.614778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:32.629973Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:32.637850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:32.719205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:32.905193Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting ... WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790283414368658:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:00.682189Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:00.734754Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.771053Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.811655Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.846151Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.880609Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.952543Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.002736Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790287709336465:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.002839Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.002925Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790287709336470:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.007231Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:01.017676Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790287709336472:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:01.081988Z node 6 :TX_PROXY ERROR: Actor# [6:7486790287709336525:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:01.547867Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790266234497690:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:01.547949Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27444, MsgBus: 20002 2025-03-28T09:30:04.654158Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790298999917282:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:04.654234Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000628/r3tmp/tmpJ5BuM5/pdisk_1.dat 2025-03-28T09:30:04.860605Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:04.865172Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:04.865295Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:04.868952Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27444, node 7 2025-03-28T09:30:04.979224Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:04.979250Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:04.979260Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:04.979375Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20002 TClient is connected to server localhost:20002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:05.628175Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.648240Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.730597Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.931792Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:06.037969Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.354879Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790320474755529:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.355038Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.410720Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.489499Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.544598Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.598031Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.647892Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.656992Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790298999917282:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:09.657057Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:09.711284Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.808418Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790320474756046:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.808540Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.808558Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790320474756051:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.814018Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:09.843591Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790320474756053:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:09.925096Z node 7 :TX_PROXY ERROR: Actor# [7:7486790320474756108:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpTx::CommitPrepared [GOOD] >> KqpTx::InteractiveTx [GOOD] >> KqpTx::InvalidateOnError >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] >> KqpTx::SnapshotROInteractive2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpNewEngine::MultipleBroadcastJoin [GOOD] Test command err: Trying to start YDB, gRPC: 64525, MsgBus: 9177 2025-03-28T09:29:23.722723Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790123879592208:2244];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:23.723129Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000645/r3tmp/tmpjAPGud/pdisk_1.dat 2025-03-28T09:29:24.267740Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:24.284864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:24.284943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:24.288171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64525, node 1 2025-03-28T09:29:24.422519Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:24.422539Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:24.422549Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:24.422659Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9177 TClient is connected to server localhost:9177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:25.203934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.227342Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:25.243496Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.420706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.638135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.765801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:27.627646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790141059462973:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.627742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:28.022879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:28.057054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:28.091896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:28.142505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:28.183532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:28.215823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:28.272351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790145354430781:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:28.272430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:28.272815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790145354430786:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:28.276907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:28.290957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790145354430788:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:28.367235Z node 1 :TX_PROXY ERROR: Actor# [1:7486790145354430841:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:28.722848Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790123879592208:2244];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:28.722915Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8582, MsgBus: 6487 2025-03-28T09:29:30.929553Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790152352944150:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:30.929605Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000645/r3tmp/tmpSVVERf/pdisk_1.dat 2025-03-28T09:29:31.119949Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:31.132300Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:31.132381Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:31.137994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8582, node 2 2025-03-28T09:29:31.212262Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:31.212281Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:31.212288Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:31.212398Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6487 TClient is connected to server localhost:6487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:31.660493Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:31.676238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:31.751848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:31.927143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:32.000312Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable ... undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.644896Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.677531Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.708702Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.743772Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:59.801696Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790278684127395:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:59.801813Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:59.801867Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790278684127400:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:59.806061Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:59.816069Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790278684127402:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:59.917519Z node 6 :TX_PROXY ERROR: Actor# [6:7486790278684127458:3453] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:00.978924Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790261504255904:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:00.979010Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26069, MsgBus: 9453 2025-03-28T09:30:03.111916Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790296342346344:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:03.111979Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000645/r3tmp/tmp0jrVjk/pdisk_1.dat 2025-03-28T09:30:03.334078Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:03.346304Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:03.346417Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:03.352046Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26069, node 7 2025-03-28T09:30:03.490325Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:03.490354Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:03.490364Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:03.490493Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9453 TClient is connected to server localhost:9453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:04.192187Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.201558Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.213608Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:30:04.317810Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.517798Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.605525Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:07.801607Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790313522217293:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:07.801740Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:07.863160Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.926140Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.976465Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.025733Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.080766Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.120151Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790296342346344:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:08.120223Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:08.174143Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.246943Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790317817185104:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:08.247083Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:08.247450Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790317817185109:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:08.253069Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:08.279516Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790317817185111:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:08.344803Z node 7 :TX_PROXY ERROR: Actor# [7:7486790317817185166:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:10.136789Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:10.219885Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:30:10.302580Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 [] >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> KqpSinkTx::InvalidateOnError >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::SnapshotROInteractive1 >> KqpSinkLocks::TInvalidate [GOOD] >> KqpSinkLocks::OlapUncommittedRead >> KqpSinkTx::SnapshotROInteractive2 [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOlap >> KqpTx::SnapshotROInteractive1 [GOOD] >> KqpExtractPredicateLookup::SimpleRange [GOOD] >> KqpExtractPredicateLookup::PointJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 7654, MsgBus: 6240 2025-03-28T09:30:02.146436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790291435045889:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.146473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015f8/r3tmp/tmpTVjAt8/pdisk_1.dat 2025-03-28T09:30:02.723722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.723852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.744598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:02.746816Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7654, node 1 2025-03-28T09:30:02.932284Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.932308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.932341Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.932470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6240 TClient is connected to server localhost:6240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.833487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.875090Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:03.901483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.087786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.322783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.401650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.884992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790304319949554:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.885133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.215245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.241442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.296841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.329173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.375816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.495817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.554270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308614917370:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.554344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.554504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308614917375:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.559045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:06.574017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790308614917377:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:06.644887Z node 1 :TX_PROXY ERROR: Actor# [1:7486790308614917432:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:07.150789Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790291435045889:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.150849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27087, MsgBus: 29980 2025-03-28T09:30:09.052087Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790319660448720:2200];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015f8/r3tmp/tmpAPv4cs/pdisk_1.dat 2025-03-28T09:30:09.200768Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:30:09.239545Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:09.259774Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:09.259850Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:09.261046Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27087, node 2 2025-03-28T09:30:09.343159Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:09.343179Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:09.343188Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:09.343282Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29980 TClient is connected to server localhost:29980 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:30:09.791101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.812795Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.891871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.043278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.124637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.278017Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790332545352213:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.278112Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.325238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.364101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.401366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.468074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.507029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.546498Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.637972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790332545352737:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.638056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.638234Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790332545352742:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.642132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:12.653673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790332545352744:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:12.744882Z node 2 :TX_PROXY ERROR: Actor# [2:7486790332545352800:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:14.045811Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790319660448720:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:14.045882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpLocks::DifferentKeyUpdate >> KqpSinkTx::OlapDeferredEffects >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> KqpSinkLocks::DifferentKeyUpdateOlap >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] >> KqpSqlIn::PhasesCount [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 25909, MsgBus: 1828 2025-03-28T09:30:02.161483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790291233358452:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.163073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e7/r3tmp/tmpmxRmvp/pdisk_1.dat 2025-03-28T09:30:02.700020Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.727726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.727808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.735572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25909, node 1 2025-03-28T09:30:02.925895Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.925913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.925918Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.926002Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1828 TClient is connected to server localhost:1828 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.811763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.850746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.036927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.241929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.349690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.769504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790304118261962:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.769601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.221843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.274021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.342914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.375325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.410442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.487525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.590009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308413229787:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.590078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.590114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308413229792:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.593535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:06.607632Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:30:06.607906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790308413229794:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:06.711794Z node 1 :TX_PROXY ERROR: Actor# [1:7486790308413229850:3458] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:07.147948Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790291233358452:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.161020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20912, MsgBus: 17075 2025-03-28T09:30:09.620307Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790323027313414:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:09.620523Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e7/r3tmp/tmpJAV57D/pdisk_1.dat 2025-03-28T09:30:09.786180Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20912, node 2 2025-03-28T09:30:09.805957Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:09.806037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:09.815181Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:09.864824Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:09.864860Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:09.864870Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:09.864969Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17075 TClient is connected to server localhost:17075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:10.336653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.342583Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:10.352026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.433477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.588156Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.661039Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.635591Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790335912216984:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.635651Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.679895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.706811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.738902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.769030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.803236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.832009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.915952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790335912217497:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.916030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.916254Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790335912217502:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.919589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:12.929514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790335912217504:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:13.024833Z node 2 :TX_PROXY ERROR: Actor# [2:7486790340207184855:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:14.620709Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790323027313414:2134];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:14.620797Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:14.683321Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Mzk0OWI4OTAtYTE5YjcwNTYtZWZiZmM3YTQtNWZiMmEwOTc=, ActorId: [2:7486790340207185112:2488], ActorState: ExecuteState, TraceId: 01jqe1cxpdb99n6h3tq391z04a, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 7199, MsgBus: 17570 2025-03-28T09:30:02.139029Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790291316238630:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.139112Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001605/r3tmp/tmpoYLl2q/pdisk_1.dat 2025-03-28T09:30:02.810195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.810309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.813275Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.817553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7199, node 1 2025-03-28T09:30:02.931174Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.931197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.931206Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.931317Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17570 TClient is connected to server localhost:17570 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:04.036602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.071359Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:04.083891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.253275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.430772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.539342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:06.086501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308496109582:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.086637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.379882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.433395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.482710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.544191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.601599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.657568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.719681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308496110093:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.719828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.720393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308496110098:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.724231Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:06.736663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790308496110100:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:06.803006Z node 1 :TX_PROXY ERROR: Actor# [1:7486790308496110153:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:07.142152Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790291316238630:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.142220Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.977211Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjdhY2NmMDItYjVlODE1OS04MGVjNTNkYi1jM2ViMzgwMQ==, ActorId: [1:7486790312791077711:2489], ActorState: ReadyState, TraceId: 01jqe1cq6e639vy06esxn5vz0d, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:855: Too many transactions, current active: 2 MaxTxPerSession: 2 Trying to start YDB, gRPC: 17320, MsgBus: 3625 2025-03-28T09:30:09.145381Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790322380043106:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:09.145700Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001605/r3tmp/tmpYciXIg/pdisk_1.dat 2025-03-28T09:30:09.342387Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:09.363479Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:09.363555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:09.364593Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17320, node 2 2025-03-28T09:30:09.423353Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:09.423378Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:09.423392Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:09.423508Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3625 TClient is connected to server localhost:3625 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:09.867427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.872250Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:09.880411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.981491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.146038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.221811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.228140Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790335264946613:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.228230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.278793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.311127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.361466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.393136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.420647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.497461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.551473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790335264947128:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.551568Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.551883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790335264947133:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.555350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:12.568289Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790335264947135:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:12.643574Z node 2 :TX_PROXY ERROR: Actor# [2:7486790335264947188:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:14.145585Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790322380043106:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:14.145642Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 5069, MsgBus: 24563 2025-03-28T09:30:02.222907Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790291034525706:2252];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.223312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015dc/r3tmp/tmp8WuuR0/pdisk_1.dat 2025-03-28T09:30:02.779934Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.783809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.783918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.787798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5069, node 1 2025-03-28T09:30:02.938491Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.938509Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.938515Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.938677Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24563 TClient is connected to server localhost:24563 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.842175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.888504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.061815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:04.242150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:04.328285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.817196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790303919429182:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.817305Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.179641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.268860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.302385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.335097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.375612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.446255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.517980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308214396999:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.518055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.518268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308214397004:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.522827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:06.537423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790308214397006:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:06.630284Z node 1 :TX_PROXY ERROR: Actor# [1:7486790308214397061:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:07.217004Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790291034525706:2252];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.217069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:08.996943Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjY1YjFhMmEtMmViOTI0MmQtMWE5NjA5ODktN2QyNzBmMjk=, ActorId: [1:7486790312509364619:2490], ActorState: ExecuteState, TraceId: 01jqe1cr3t3ja7q5jaqjzr96tx, Create QueryResponse for error on request, msg:
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 Trying to start YDB, gRPC: 24955, MsgBus: 21932 2025-03-28T09:30:09.899399Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790321696616571:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:09.899453Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015dc/r3tmp/tmpSlfBa7/pdisk_1.dat 2025-03-28T09:30:10.051877Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24955, node 2 2025-03-28T09:30:10.089860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:10.089947Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:10.090896Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:10.171215Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:10.171238Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:10.171246Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:10.171351Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21932 TClient is connected to server localhost:21932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:10.575693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.585076Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:10.591600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.660006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.805682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.865547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:13.213619Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790338876487499:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.213747Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.260643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.295754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.329609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.364000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.395122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.436327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.524044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790338876488011:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.524150Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.524406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790338876488016:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.527661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:13.540461Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:30:13.541960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790338876488018:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:13.601580Z node 2 :TX_PROXY ERROR: Actor# [2:7486790338876488073:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:14.902905Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790321696616571:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:14.902996Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpTx::RollbackManyTx >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpSqlIn::Delete [GOOD] >> KqpSinkLocks::TInvalidateOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::TupleNotOnlyOfKeys [GOOD] Test command err: Trying to start YDB, gRPC: 24757, MsgBus: 31381 2025-03-28T09:29:17.502902Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790098583404069:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:17.502969Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006cd/r3tmp/tmpPgAIPg/pdisk_1.dat 2025-03-28T09:29:17.964714Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:17.983426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:17.983499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:17.985364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24757, node 1 2025-03-28T09:29:18.132080Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:18.132109Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:18.132116Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:18.132259Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31381 TClient is connected to server localhost:31381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:18.988688Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.026826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.156613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.318352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.411986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:21.194241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790115763275003:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.194363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.523606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.599979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.644042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.678809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.720270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.804076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:21.868644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790115763275521:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.868720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.868908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790115763275526:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.872467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:21.886988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790115763275528:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:21.977268Z node 1 :TX_PROXY ERROR: Actor# [1:7486790115763275583:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:22.502961Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790098583404069:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:22.503019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:23.279041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:23.362810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:29:23.424621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:21: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:26: Warning: At function: Filter, At function: Coalesce
:5:49: Warning: At function: SqlIn
:5:49: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:4:21: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 30146, MsgBus: 13903 2025-03-28T09:29:27.100821Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790143331496922:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:27.100870Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006cd/r3tmp/tmp2CVBOs/pdisk_1.dat 2025-03-28T09:29:27.308855Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:27.323784Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:27.323880Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:27.325834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30146, node 2 2025-03-28T09:29:27.440722Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:27.440745Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:27.440754Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:27.440895Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13903 TClient is connected to server localhost:13903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL ... cheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:29:58.838486Z node 5 :TX_PROXY ERROR: Actor# [5:7486790274772787961:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:59.881198Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790257592916462:2098];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:59.881791Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:00.007348Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.047058Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.088451Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:22: Warning: At function: Filter, At function: Coalesce
:7:31: Warning: At function: SqlIn
:7:31: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:5:17: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 Trying to start YDB, gRPC: 31218, MsgBus: 15084 2025-03-28T09:30:04.123684Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790298687641927:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:04.123738Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006cd/r3tmp/tmpweZcpT/pdisk_1.dat 2025-03-28T09:30:04.319672Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:04.410130Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:04.410243Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:04.411770Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31218, node 6 2025-03-28T09:30:04.533666Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:04.533695Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:04.533707Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:04.533888Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15084 TClient is connected to server localhost:15084 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:05.213699Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.225634Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:05.245612Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.330100Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.547729Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:05.639172Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.819954Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790315867512910:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:08.820066Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:08.885627Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.941664Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.999558Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.046454Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.093585Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.124560Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790298687641927:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:09.124617Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:09.148580Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.241216Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790320162480724:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.241298Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.241485Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790320162480729:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.246979Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:09.261759Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790320162480731:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:09.357793Z node 6 :TX_PROXY ERROR: Actor# [6:7486790320162480787:3454] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:11.026227Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:11.082008Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:30:11.141398Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:26: Warning: At function: Filter, At function: Coalesce
:7:37: Warning: At function: SqlIn
:7:37: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::PhasesCount [GOOD] Test command err: Trying to start YDB, gRPC: 29334, MsgBus: 24002 2025-03-28T09:29:18.396989Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790104389535494:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:18.397068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006b2/r3tmp/tmpg5cGQD/pdisk_1.dat 2025-03-28T09:29:18.935686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:18.960865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:18.960950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:18.963305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29334, node 1 2025-03-28T09:29:19.119734Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:19.119755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:19.119761Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:19.119876Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24002 TClient is connected to server localhost:24002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:19.741860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.759447Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:19.769135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:19.913750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:20.069102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:20.153268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:21.840819Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790117274439158:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:21.840956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.202587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.233966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.267908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.312209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.387445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.445107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:22.540707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790121569406972:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.540793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.541108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790121569406977:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:22.545167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:22.559638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790121569406979:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:22.636789Z node 1 :TX_PROXY ERROR: Actor# [1:7486790121569407035:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:23.398075Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790104389535494:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:23.398123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:23.749698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:23.838956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:29:23.894438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:29:23.973553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:72: Warning: At function: Filter, At function: Coalesce
:5:84: Warning: At function: SqlIn
:5:84: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:72: Warning: At function: Filter, At function: Coalesce
:5:84: Warning: At function: SqlIn
:5:84: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 24360, MsgBus: 10620 2025-03-28T09:29:28.446070Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790146700163579:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:28.446129Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006b2/r3tmp/tmpyYdFYb/pdisk_1.dat 2025-03-28T09:29:28.739705Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24360, node 2 2025-03-28T09:29:28.859369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:28.859481Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:28.935010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:28.947585Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:28.947614Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:28.947622Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:28.947763Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10620 TClient is connected to server localhost:10620 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathS ... is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.376634Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.414971Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.452840Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.500359Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:00.580335Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790284989365000:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:00.580410Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790284989365005:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:00.580433Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:00.584351Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:00.595696Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790284989365007:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:00.689553Z node 5 :TX_PROXY ERROR: Actor# [5:7486790284989365062:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:01.783675Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790267809493537:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:01.783732Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:01.801583Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.850365Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:30:01.969789Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20702, MsgBus: 6219 2025-03-28T09:30:07.339117Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790312035609222:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.339197Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006b2/r3tmp/tmpenmwE7/pdisk_1.dat 2025-03-28T09:30:07.614416Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:07.617376Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:07.617913Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:07.648423Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20702, node 6 2025-03-28T09:30:07.811359Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:07.811389Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:07.811399Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:07.811542Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6219 TClient is connected to server localhost:6219 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:08.716295Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:08.723435Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:08.737670Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:08.852468Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.115616Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.240710Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.340114Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790312035609222:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:12.340882Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:12.388380Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790333510447496:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.388496Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.449875Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.498285Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.544423Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.597112Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.646328Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.760365Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.816247Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790333510448015:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.816336Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.816552Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790333510448020:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.822861Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:12.846058Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790333510448022:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:12.910625Z node 6 :TX_PROXY ERROR: Actor# [6:7486790333510448077:3459] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpTx::MixEnginesOldNew [GOOD] >> KqpSinkLocks::EmptyRange >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 23723, MsgBus: 23788 2025-03-28T09:30:02.141320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790292921015779:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.151364Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015f9/r3tmp/tmpKA0PLi/pdisk_1.dat 2025-03-28T09:30:02.748584Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.750487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.750565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.761221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23723, node 1 2025-03-28T09:30:02.927471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.927500Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.927507Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.927855Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23788 TClient is connected to server localhost:23788 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.786612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.814204Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:03.829146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.036183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:04.216456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.306337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:05.743760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790305805919436:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.743871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.177820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.209267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.252304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.279764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.311451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.357761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.434191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790310100887250:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.434277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.434729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790310100887255:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.439612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:06.457997Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790310100887257:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:06.550660Z node 1 :TX_PROXY ERROR: Actor# [1:7486790310100887312:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:07.142410Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790292921015779:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.142477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:10.837963Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmI4MWJmNDUtY2NmOGRhMmUtNzRlZWMzZmItZWYxZGU2MzY=, ActorId: [1:7486790314395854871:2490], ActorState: ExecuteState, TraceId: 01jqe1csrj3xmdbbcj7k6s4w78, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken Trying to start YDB, gRPC: 4192, MsgBus: 19137 2025-03-28T09:30:11.663078Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790328185730525:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:11.663146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015f9/r3tmp/tmpTWtgxl/pdisk_1.dat 2025-03-28T09:30:11.767339Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4192, node 2 2025-03-28T09:30:11.798438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:11.798531Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:11.799897Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:11.834066Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:11.834092Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:11.834105Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:11.834225Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19137 TClient is connected to server localhost:19137 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:30:12.309770Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.317214Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:12.337377Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.415587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.598590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.687715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:14.816725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790341070634179:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:14.816822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:14.843086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.876063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.906864Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.936352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.967016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.997068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.043738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790345365601982:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.043808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.043855Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790345365601987:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.047137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:15.056977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790345365601989:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:15.113299Z node 2 :TX_PROXY ERROR: Actor# [2:7486790345365602042:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:16.666791Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790328185730525:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:16.666882Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:16.739075Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjdjMzE4OTEtYzg1MjYzNDUtYzY3MzUyNjAtOGMzYmI2NjI=, ActorId: [2:7486790345365602334:2496], ActorState: ExecuteState, TraceId: 01jqe1czqta9qwf5c2m3jak8k1, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] >> KqpTx::CommitRoTx_TLI [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::Delete [GOOD] Test command err: Trying to start YDB, gRPC: 27391, MsgBus: 22208 2025-03-28T09:29:26.725865Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790138144102089:2085];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:26.726059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005f6/r3tmp/tmpATZCPW/pdisk_1.dat 2025-03-28T09:29:27.290810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:27.290911Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:27.293703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27391, node 1 2025-03-28T09:29:27.320900Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:27.481334Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:27.481356Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:27.481362Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:27.481465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22208 TClient is connected to server localhost:22208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:28.112584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.125240Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:29:28.134818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.322244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.494273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:28.576847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:30.315530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790155323973019:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:30.315642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:30.741001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:30.792358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:30.876975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:30.913167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:30.954089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:31.027036Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:31.117739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790159618940839:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:31.117841Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:31.118334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790159618940844:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:31.123410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:31.136300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790159618940846:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:31.237163Z node 1 :TX_PROXY ERROR: Actor# [1:7486790159618940901:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:31.726626Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790138144102089:2085];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:31.726679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10764, MsgBus: 14163 2025-03-28T09:29:34.324106Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790170265483716:2139];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:34.327175Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005f6/r3tmp/tmpWquonc/pdisk_1.dat 2025-03-28T09:29:34.476439Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:34.499036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:34.499119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 10764, node 2 2025-03-28T09:29:34.500292Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:34.607175Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:34.607196Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:34.607203Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:34.607308Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14163 TClient is connected to server localhost:14163 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:35.047659Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:35.054385Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:29:35.072826Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:35.151269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:29:35.339430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting ... {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.428483Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790285885546467:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:01.431995Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:01.444382Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790285885546469:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:01.532430Z node 5 :TX_PROXY ERROR: Actor# [5:7486790285885546524:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:02.979830Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790268705674978:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.979916Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:03.008233Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:03.083189Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:30:03.140220Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 12464, MsgBus: 28231 2025-03-28T09:30:07.680843Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790314998602118:2154];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.683685Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005f6/r3tmp/tmp7TvrTJ/pdisk_1.dat 2025-03-28T09:30:07.839891Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:07.875025Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:07.875138Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:07.877335Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12464, node 6 2025-03-28T09:30:08.013017Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:08.013044Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:08.013054Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:08.013216Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28231 TClient is connected to server localhost:28231 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:30:08.856616Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.886561Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.008755Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.282632Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:09.384716Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.007087Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790336473440275:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.007185Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.079785Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.130163Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.169201Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.244215Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.326158Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.381508Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.437420Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790336473440792:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.437520Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.437701Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790336473440797:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.441299Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:12.452724Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790336473440799:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:12.508999Z node 6 :TX_PROXY ERROR: Actor# [6:7486790336473440853:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:12.680313Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790314998602118:2154];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:12.680405Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:13.900319Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.945728Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.993271Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 19247, MsgBus: 29658 2025-03-28T09:30:02.139876Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790292693414033:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.139964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00164a/r3tmp/tmpDSdzgJ/pdisk_1.dat 2025-03-28T09:30:02.685797Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.721087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.721195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.732047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19247, node 1 2025-03-28T09:30:02.932645Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.932676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.932684Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.932779Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29658 TClient is connected to server localhost:29658 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.902007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.926540Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:03.938659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.085144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.279909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.365252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.706619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790305578317692:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.706729Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.180523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.209199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.240717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.268669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.300696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.375741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.467895Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790309873285508:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.467982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.468240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790309873285513:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.472323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:06.485220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790309873285515:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:06.587960Z node 1 :TX_PROXY ERROR: Actor# [1:7486790309873285570:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:07.143295Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790292693414033:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.143351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.971681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.030835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.097831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8180, MsgBus: 22054 2025-03-28T09:30:12.227127Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790335818948948:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:12.227301Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00164a/r3tmp/tmprLJySk/pdisk_1.dat 2025-03-28T09:30:12.361406Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:12.384702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:12.384760Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:12.387611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8180, node 2 2025-03-28T09:30:12.457507Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:12.457534Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:12.457542Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:12.457671Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22054 TClient is connected to server localhost:22054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:12.944274Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.955458Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:12.964610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:13.043897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:13.254510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:13.340128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:15.658168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790348703852583:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.658322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.704885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.739142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.779614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.814994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.849148Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.886944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.934102Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790348703853090:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.934179Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.934332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790348703853095:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.939247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:15.948694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790348703853097:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:16.034439Z node 2 :TX_PROXY ERROR: Actor# [2:7486790352998820447:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:17.242005Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790335818948948:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:17.242045Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:17.247596Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715673; 2025-03-28T09:30:17.256287Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790357293788143:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7486790352998820736:2496]Got LOCKS BROKEN for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7486790357293788143:2496].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-28T09:30:17.256980Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790357293788135:2496], SessionActorId: [2:7486790352998820736:2496], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7486790352998820736:2496]. isRollback=0 2025-03-28T09:30:17.257268Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Zjc1MzI0OGUtNzE1MjdjMGYtYzI5YzdhMDAtZGM2MWZhYTA=, ActorId: [2:7486790352998820736:2496], ActorState: ExecuteState, TraceId: 01jqe1d06kdyxk84yj462hz5w6, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7486790357293788136:2496] from: [2:7486790357293788135:2496] 2025-03-28T09:30:17.257378Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486790357293788136:2496] TxId: 281474976715673. Ctx: { TraceId: 01jqe1d06kdyxk84yj462hz5w6, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Zjc1MzI0OGUtNzE1MjdjMGYtYzI5YzdhMDAtZGM2MWZhYTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/TwoShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-28T09:30:17.258514Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Zjc1MzI0OGUtNzE1MjdjMGYtYzI5YzdhMDAtZGM2MWZhYTA=, ActorId: [2:7486790352998820736:2496], ActorState: ExecuteState, TraceId: 01jqe1d06kdyxk84yj462hz5w6, Create QueryResponse for error on request, msg: >> KqpSqlIn::SecondaryIndex_TupleLiteral [GOOD] >> KqpSqlIn::SecondaryIndex_TupleSelect >> KqpSinkTx::ExplicitTcl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 20628, MsgBus: 17000 2025-03-28T09:30:05.773553Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790305693637815:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:05.773796Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015cc/r3tmp/tmpVv6Vdi/pdisk_1.dat 2025-03-28T09:30:06.127257Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20628, node 1 2025-03-28T09:30:06.165105Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:06.165291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:06.167258Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:06.203724Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:06.203747Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:06.203754Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:06.203877Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17000 TClient is connected to server localhost:17000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:06.902064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:06.923238Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:06.930676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.118386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:07.303070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:07.388272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.176582Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790322873508781:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.176688Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.503291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.570170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.653056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.693905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.723725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.810920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.906800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790322873509311:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.906918Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.907177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790322873509316:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:09.910576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:09.923970Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790322873509318:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:09.985132Z node 1 :TX_PROXY ERROR: Actor# [1:7486790322873509372:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:10.774787Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790305693637815:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:10.774851Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:11.583360Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTViNGFlMjMtZTI4Nzg1YTktM2JjNzExZGYtMTEzY2E4NmQ=, ActorId: [1:7486790327168476924:2489], ActorState: ExecuteState, TraceId: 01jqe1ctm5d01r9b94td8cfz5w, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-03-28T09:30:11.593358Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTViNGFlMjMtZTI4Nzg1YTktM2JjNzExZGYtMTEzY2E4NmQ=, ActorId: [1:7486790327168476924:2489], ActorState: ReadyState, TraceId: 01jqe1ctr89z142af1qyxerpxc, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 25611, MsgBus: 14448 2025-03-28T09:30:12.433833Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790334379136343:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:12.433909Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015cc/r3tmp/tmpE2dqUH/pdisk_1.dat 2025-03-28T09:30:12.590595Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25611, node 2 2025-03-28T09:30:12.635113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:12.635221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:12.638417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:12.678894Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:12.678915Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:12.678923Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:12.679026Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14448 TClient is connected to server localhost:14448 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:13.119336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:13.125677Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:13.137508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:13.211064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:13.367088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:13.457024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.663834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790347264040016:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.663975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.719388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.752980Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.789084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.824772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.860112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.932686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.993002Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790347264040531:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.993106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.993406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790347264040536:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.997295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:16.011593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790347264040538:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:16.078571Z node 2 :TX_PROXY ERROR: Actor# [2:7486790351559007886:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:17.434172Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790334379136343:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:17.434232Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx_TLI [GOOD] Test command err: Trying to start YDB, gRPC: 22961, MsgBus: 24553 2025-03-28T09:30:07.597912Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790312489773989:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.598357Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015cb/r3tmp/tmp1ippHN/pdisk_1.dat 2025-03-28T09:30:08.234150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:08.237574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:08.237684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:08.249275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22961, node 1 2025-03-28T09:30:08.423279Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:08.423302Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:08.423315Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:08.423430Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24553 TClient is connected to server localhost:24553 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:09.028538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.051210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:09.058732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:09.196676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:09.364576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.444848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:11.126268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790329669644802:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:11.126397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:11.412827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:11.448177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:11.482337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:11.515050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:11.543813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:11.610751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:11.705460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790329669645325:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:11.705554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:11.705747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790329669645330:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:11.709788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:11.720989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790329669645332:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:11.819002Z node 1 :TX_PROXY ERROR: Actor# [1:7486790329669645388:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:12.585164Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790312489773989:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:12.585265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61916, MsgBus: 30032 2025-03-28T09:30:13.615998Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790340850101935:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:13.616055Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015cb/r3tmp/tmpyzuPNG/pdisk_1.dat 2025-03-28T09:30:13.715127Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61916, node 2 2025-03-28T09:30:13.743537Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:13.743621Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:13.745351Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:13.755093Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:13.755113Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:13.755121Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:13.755229Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30032 TClient is connected to server localhost:30032 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:14.128764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:14.144908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:14.198223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.334152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:14.403069Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:16.339837Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790353735005594:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:16.339924Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:16.382080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:16.448051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:16.471634Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:16.498518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:16.529116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:16.599447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:16.682555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790353735006119:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:16.682623Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:16.683223Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790353735006124:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:16.686301Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:16.695995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790353735006126:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:16.790629Z node 2 :TX_PROXY ERROR: Actor# [2:7486790353735006181:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 8624, MsgBus: 17225 2025-03-28T09:30:04.577090Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790300133367767:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:04.577147Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d5/r3tmp/tmpBcwn23/pdisk_1.dat 2025-03-28T09:30:05.093091Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:05.102301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:05.102403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 8624, node 1 2025-03-28T09:30:05.107992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:05.167212Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:05.167258Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:05.167271Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:05.167456Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17225 TClient is connected to server localhost:17225 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:05.747352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.771013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.950781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:06.115462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.181079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:07.973424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790313018271462:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:07.973602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:08.381742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.417719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.460333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.495748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.542882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.595265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.682057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790317313239274:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:08.682138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:08.682368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790317313239279:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:08.686353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:08.702823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790317313239281:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:08.798484Z node 1 :TX_PROXY ERROR: Actor# [1:7486790317313239337:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:09.582801Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790300133367767:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:09.582878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:10.425974Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjJjNzdmNy03ZTBlMzcwZC02YWNiYjQ5ZS03YjFlYTIyOA==, ActorId: [1:7486790321608206886:2488], ActorState: ExecuteState, TraceId: 01jqe1csh04crnm2q3fb0r8cps, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 27963, MsgBus: 11683 2025-03-28T09:30:11.270091Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790329068400677:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:11.270147Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d5/r3tmp/tmp5rjDwr/pdisk_1.dat 2025-03-28T09:30:11.383545Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27963, node 2 2025-03-28T09:30:11.433280Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:11.433365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:11.439518Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:11.453924Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:11.453944Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:11.453951Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:11.454075Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11683 TClient is connected to server localhost:11683 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:11.824885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:11.836408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:11.914191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.057158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.121545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:14.396712Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790341953304333:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:14.396787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:14.436345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.503292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.535676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.569264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.599950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.645151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.720537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790341953304851:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:14.720605Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:14.720650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790341953304856:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:14.723782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:14.733550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790341953304858:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:14.790587Z node 2 :TX_PROXY ERROR: Actor# [2:7486790341953304911:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:16.274875Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790329068400677:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:16.274930Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:17.974839Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715673; 2025-03-28T09:30:17.976027Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790354838207327:2496], Table: `/Root/EightShard` ([72057594046644480:3:1]), SessionActorId: [2:7486790346248272492:2496]Got LOCKS BROKEN for table `/Root/EightShard`. ShardID=72075186224037891, Sink=[2:7486790354838207327:2496].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-28T09:30:17.976565Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790354838207308:2496], SessionActorId: [2:7486790346248272492:2496], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/EightShard`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[2:7486790346248272492:2496]. isRollback=0 2025-03-28T09:30:17.976828Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTU4MTdkMDYtOTQ5YTQzYzUtMTYyMDg1ZGMtM2EyODQzYTk=, ActorId: [2:7486790346248272492:2496], ActorState: ExecuteState, TraceId: 01jqe1d0rta8khpr8jtbp46mcq, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:7486790354838207309:2496] from: [2:7486790354838207308:2496] 2025-03-28T09:30:17.976913Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486790354838207309:2496] TxId: 281474976715673. Ctx: { TraceId: 01jqe1d0rta8khpr8jtbp46mcq, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NTU4MTdkMDYtOTQ5YTQzYzUtMTYyMDg1ZGMtM2EyODQzYTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/EightShard`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-28T09:30:17.977167Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTU4MTdkMDYtOTQ5YTQzYzUtMTYyMDg1ZGMtM2EyODQzYTk=, ActorId: [2:7486790346248272492:2496], ActorState: ExecuteState, TraceId: 01jqe1d0rta8khpr8jtbp46mcq, Create QueryResponse for error on request, msg: 2025-03-28T09:30:17.977699Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715673; 2025-03-28T09:30:17.977833Z node 2 :TX_DATASHARD ERROR: Complete volatile write [1743154218022 : 281474976715673] from 72075186224037888 at tablet 72075186224037888, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> KqpTx::BeginTransactionBadMode >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] >> KqpSnapshotIsolation::TReadOnlyOlap >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> KqpSnapshotIsolation::TSimpleOlap >> KqpTx::InvalidateOnError [GOOD] >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictWriteOlap >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 19042, MsgBus: 31170 2025-03-28T09:30:08.928854Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790315899807434:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:08.928918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ca/r3tmp/tmpgXJhjE/pdisk_1.dat 2025-03-28T09:30:09.419076Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:09.421271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:09.421328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:09.423150Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19042, node 1 2025-03-28T09:30:09.555814Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:09.555830Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:09.555834Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:09.555939Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31170 TClient is connected to server localhost:31170 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:10.268627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.300514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.463823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.625163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.706748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:12.481797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790333079678394:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.481893Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.790991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.861562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.890728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.971446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.001390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.037299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.122181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790337374646213:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.122273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.122439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790337374646218:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.126726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:13.135401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790337374646220:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:13.207749Z node 1 :TX_PROXY ERROR: Actor# [1:7486790337374646276:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:13.929069Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790315899807434:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:13.929145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6031, MsgBus: 14593 2025-03-28T09:30:15.290286Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790348622474334:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:15.290373Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ca/r3tmp/tmpKzlft9/pdisk_1.dat 2025-03-28T09:30:15.386578Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6031, node 2 2025-03-28T09:30:15.427643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:15.427743Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:15.441495Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:15.471118Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:15.471155Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:15.471164Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:15.471309Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14593 TClient is connected to server localhost:14593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:30:15.920249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:15.934065Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:30:16.020736Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:16.178180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:16.267910Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:18.277834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790361507378006:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.277926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.314058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:18.353253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:18.392017Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:18.426330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:18.460780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:18.533442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:18.585582Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790361507378525:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.585670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.586026Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790361507378530:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.589867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:18.599868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790361507378532:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:18.673870Z node 2 :TX_PROXY ERROR: Actor# [2:7486790361507378585:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:19.900930Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790365802346193:2500], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1d2km2vmy3a8fwt1ct3y4. SessionId : ydb://session/3?node_id=2&id=YWQxZjkzYjctZDU4NGRlOTgtMzI3YTk1Zi01NmVhMjgzMQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:30:19.901414Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790365802346195:2501], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=YWQxZjkzYjctZDU4NGRlOTgtMzI3YTk1Zi01NmVhMjgzMQ==. TraceId : 01jqe1d2km2vmy3a8fwt1ct3y4. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486790365802346190:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:30:19.901861Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWQxZjkzYjctZDU4NGRlOTgtMzI3YTk1Zi01NmVhMjgzMQ==, ActorId: [2:7486790365802346141:2488], ActorState: ExecuteState, TraceId: 01jqe1d2km2vmy3a8fwt1ct3y4, Create QueryResponse for error on request, msg: 2025-03-28T09:30:19.976646Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWQxZjkzYjctZDU4NGRlOTgtMzI3YTk1Zi01NmVhMjgzMQ==, ActorId: [2:7486790365802346141:2488], ActorState: ExecuteState, TraceId: 01jqe1d2w77bd8ejvmvxbezdbb, Create QueryResponse for error on request, msg: >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> KqpSinkMvcc::OlapMultiSinks >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpRanges::ValidatePredicates [GOOD] >> KqpRanges::ValidatePredicatesDataQuery |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple >> KqpTx::BeginTransactionBadMode [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::BeginTransactionBadMode [GOOD] Test command err: Trying to start YDB, gRPC: 1896, MsgBus: 2068 2025-03-28T09:30:02.165028Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790290702149604:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.165090Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e9/r3tmp/tmpvMUPmg/pdisk_1.dat 2025-03-28T09:30:02.814820Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.815423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.815518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.820197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1896, node 1 2025-03-28T09:30:02.928687Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.928706Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.928714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.928816Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2068 TClient is connected to server localhost:2068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.790021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:03.831687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:30:04.044477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.234605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.323292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:06.006808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790307882020559:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.006969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.261830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.297748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.331381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.366089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.409253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.468794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.554270Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790307882021072:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.554378Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.554667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790307882021077:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.559028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:06.574595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790307882021079:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:06.679066Z node 1 :TX_PROXY ERROR: Actor# [1:7486790307882021135:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:07.166378Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790290702149604:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.166451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:17.809417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:17.809465Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:19.574967Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790363716596853:2634], TxId: 281474976710681, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTBmNzIwNDgtZTIyOWUxMDctMTc1MTY4YzYtODczNDJlZmI=. TraceId : 01jqe1d2af84a91vt584wt713y. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743154208000/18446744073709551615 shard 72075186224037888 with lowWatermark v1743154208467/18446744073709551615 (node# 1 state# Ready) } } 2025-03-28T09:30:19.575495Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790363716596853:2634], TxId: 281474976710681, task: 1. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTBmNzIwNDgtZTIyOWUxMDctMTc1MTY4YzYtODczNDJlZmI=. TraceId : 01jqe1d2af84a91vt584wt713y. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743154208000/18446744073709551615 shard 72075186224037888 with lowWatermark v1743154208467/18446744073709551615 (node# 1 state# Ready) } }. 2025-03-28T09:30:19.575854Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790363716596854:2635], TxId: 281474976710681, task: 2. Ctx: { TraceId : 01jqe1d2af84a91vt584wt713y. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTBmNzIwNDgtZTIyOWUxMDctMTc1MTY4YzYtODczNDJlZmI=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486790363716596849:2489], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:30:19.576231Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTBmNzIwNDgtZTIyOWUxMDctMTc1MTY4YzYtODczNDJlZmI=, ActorId: [1:7486790312176988690:2489], ActorState: ExecuteState, TraceId: 01jqe1d2af84a91vt584wt713y, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 11030, MsgBus: 20879 2025-03-28T09:30:20.322264Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790370729116482:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:20.322323Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e9/r3tmp/tmp25VcIV/pdisk_1.dat 2025-03-28T09:30:20.447943Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:20.470952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:20.471040Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:20.475820Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11030, node 2 2025-03-28T09:30:20.534233Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:20.534260Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:20.534272Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:20.534422Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20879 TClient is connected to server localhost:20879 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:21.000426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:21.008508Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:21.017053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:21.105626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:21.287883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:21.370926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:23.444479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790383614020129:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:23.444574Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:23.495512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.528808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.565851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.601151Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.632105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.663479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.739711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790383614020644:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:23.739802Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:23.739917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790383614020649:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:23.742663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:23.754652Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790383614020651:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:23.841637Z node 2 :TX_PROXY ERROR: Actor# [2:7486790383614020706:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> TPersQueueTest::TestReadRuleServiceTypePassword [GOOD] >> TPersQueueTest::TestReadPartitionByGroupId >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] >> KqpSinkTx::InvalidateOnError [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpTx::RollbackManyTx [GOOD] >> KqpTx::RollbackRoTx |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpLocks::EmptyRange [GOOD] >> KqpLocks::EmptyRangeAlreadyBroken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 16818, MsgBus: 22004 2025-03-28T09:30:02.708343Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790291100365833:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.731415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d7/r3tmp/tmpMD0d8B/pdisk_1.dat 2025-03-28T09:30:03.199389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:03.199490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:03.199730Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:03.207426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16818, node 1 2025-03-28T09:30:03.451337Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:03.451361Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:03.451370Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:03.451528Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22004 TClient is connected to server localhost:22004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:04.166340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.181042Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:04.198500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.389850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:04.591901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:04.682807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.255573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308280236631:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.255682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.626234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.656914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.686021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.725151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.753233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.784586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.854458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308280237140:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.854557Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.854855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308280237148:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.862365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:06.874372Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:30:06.874930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790308280237150:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:06.976565Z node 1 :TX_PROXY ERROR: Actor# [1:7486790308280237205:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:07.688241Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790291100365833:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.688306Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10626, MsgBus: 2859 2025-03-28T09:30:09.938126Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790322420003016:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:09.938175Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d7/r3tmp/tmpFnwQcA/pdisk_1.dat 2025-03-28T09:30:10.130087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:10.130167Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:10.131550Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:10.143338Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10626, node 2 2025-03-28T09:30:10.259565Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:10.259594Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:10.259602Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:10.259716Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2859 TClient is connected to server localhost:2859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:10.717688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.725883Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:10.745290Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.825964Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:11.010155Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:11.094413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.272147Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790339599873976:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.272237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.320758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.354518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.435925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.469800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.504651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.538466Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.582185Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790339599874488:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.582242Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.582284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790339599874494:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:13.585597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:13.597345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790339599874496:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:13.683467Z node 2 :TX_PROXY ERROR: Actor# [2:7486790339599874550:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:14.938670Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790322420003016:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:14.938741Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:25.115323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:25.115359Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:26.191181Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790395434450275:2640], TxId: 281474976715681, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1d8r86ehszs9zsahb16a4. SessionId : ydb://session/3?node_id=2&id=NjdmNGJlNzgtN2EzYjQzZTQtZjlmOWY2NjktYTJiZmI4Mw==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743154214606/18446744073709551615 shard 72075186224037888 with lowWatermark v1743154214914/18446744073709551615 (node# 2 state# Ready) } } 2025-03-28T09:30:26.191839Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790395434450275:2640], TxId: 281474976715681, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1d8r86ehszs9zsahb16a4. SessionId : ydb://session/3?node_id=2&id=NjdmNGJlNzgtN2EzYjQzZTQtZjlmOWY2NjktYTJiZmI4Mw==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 2 has no snapshot at v1743154214606/18446744073709551615 shard 72075186224037888 with lowWatermark v1743154214914/18446744073709551615 (node# 2 state# Ready) } }. 2025-03-28T09:30:26.192409Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790395434450276:2641], TxId: 281474976715681, task: 2. Ctx: { TraceId : 01jqe1d8r86ehszs9zsahb16a4. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=NjdmNGJlNzgtN2EzYjQzZTQtZjlmOWY2NjktYTJiZmI4Mw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486790395434450271:2496], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:30:26.192825Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjdmNGJlNzgtN2EzYjQzZTQtZjlmOWY2NjktYTJiZmI4Mw==, ActorId: [2:7486790343894842139:2496], ActorState: ExecuteState, TraceId: 01jqe1d8r86ehszs9zsahb16a4, Create QueryResponse for error on request, msg: >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive1 [GOOD] Test command err: Trying to start YDB, gRPC: 25190, MsgBus: 19999 2025-03-28T09:30:02.139893Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790291464255796:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.142415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001626/r3tmp/tmpJYGLXM/pdisk_1.dat 2025-03-28T09:30:02.673701Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.715498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.715610Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.717455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25190, node 1 2025-03-28T09:30:02.927284Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.927304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.927327Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.927426Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19999 TClient is connected to server localhost:19999 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.833490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.877679Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:05.582960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790304349158367:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.585764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790304349158355:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.585875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.589102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.603584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790304349158369:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.681930Z node 1 :TX_PROXY ERROR: Actor# [1:7486790304349158420:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.186953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.306602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.178220Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790291464255796:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.182729Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.426002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.702300Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTMyYmQ3NmItZTMyZTk3MjItNTEwMzgwY2MtYzFiNTQyODM=, ActorId: [1:7486790317234068780:2971], ActorState: ExecuteState, TraceId: 01jqe1crtn2hhwyg8jk3zh5g1q, Create QueryResponse for error on request, msg:
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 28459, MsgBus: 6443 2025-03-28T09:30:15.621812Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790348027512853:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:15.621888Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001626/r3tmp/tmpSbrGZs/pdisk_1.dat 2025-03-28T09:30:15.743288Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28459, node 2 2025-03-28T09:30:15.783398Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:15.783477Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:15.784962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:15.833869Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:15.833893Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:15.833903Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:15.834020Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6443 TClient is connected to server localhost:6443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:16.236578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:18.522973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790360912415361:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.523056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.523493Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790360912415396:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.527738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:18.539725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790360912415398:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:18.613858Z node 2 :TX_PROXY ERROR: Actor# [2:7486790360912415449:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:18.664468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:18.703866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:19.687573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:20.678642Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790348027512853:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:20.680257Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 3709, MsgBus: 61156 2025-03-28T09:30:02.154597Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790289726319465:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.154675Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ed/r3tmp/tmpI1DvSv/pdisk_1.dat 2025-03-28T09:30:02.710047Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.712316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.712403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.725025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3709, node 1 2025-03-28T09:30:02.931782Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.931809Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.931816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.931926Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61156 TClient is connected to server localhost:61156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.832733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.850841Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:05.699459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790302611222010:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.699469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790302611221983:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.699605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.703104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.714241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790302611222021:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.823166Z node 1 :TX_PROXY ERROR: Actor# [1:7486790302611222072:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.187685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.295301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.333825Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790289726319465:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.337051Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.602295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.687299Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710667; 2025-03-28T09:30:09.716461Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790319791100263:2971], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [1:7486790319791099652:2971]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[1:7486790319791100263:2971].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-28T09:30:09.717049Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790319791100245:2971], SessionActorId: [1:7486790319791099652:2971], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7486790319791099652:2971]. isRollback=0 2025-03-28T09:30:09.718529Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTY3ZDk5NjctNDc3M2EyZjYtMzVmYWVhZjYtOWJmMDQxNTM=, ActorId: [1:7486790319791099652:2971], ActorState: ExecuteState, TraceId: 01jqe1crv44td4p3jb59e5ggwe, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7486790319791100246:2971] from: [1:7486790319791100245:2971] 2025-03-28T09:30:09.718611Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486790319791100246:2971] TxId: 281474976710667. Ctx: { TraceId: 01jqe1crv44td4p3jb59e5ggwe, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTY3ZDk5NjctNDc3M2EyZjYtMzVmYWVhZjYtOWJmMDQxNTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-28T09:30:09.732446Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTY3ZDk5NjctNDc3M2EyZjYtMzVmYWVhZjYtOWJmMDQxNTM=, ActorId: [1:7486790319791099652:2971], ActorState: ExecuteState, TraceId: 01jqe1crv44td4p3jb59e5ggwe, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 10724, MsgBus: 3542 2025-03-28T09:30:15.556989Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790349268761188:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:15.557064Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ed/r3tmp/tmp8H1fxp/pdisk_1.dat 2025-03-28T09:30:15.690274Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:15.721051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:15.721150Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:15.722650Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10724, node 2 2025-03-28T09:30:15.787239Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:15.787262Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:15.787272Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:15.787389Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3542 TClient is connected to server localhost:3542 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:16.231431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:16.239308Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:18.724671Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790362153663711:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.724797Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.725192Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790362153663747:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.729608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:18.739086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790362153663749:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:18.822466Z node 2 :TX_PROXY ERROR: Actor# [2:7486790362153663800:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:18.870709Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:18.945422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:20.020373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:20.832539Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790349268761188:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:20.891924Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:21.366431Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-28T09:30:21.366700Z node 2 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:30:21.366870Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:30:21.367030Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790375038574125:2969], Table: `/Root/KV` ([72057594046644480:7:1]), SessionActorId: [2:7486790375038574041:2969]Got CONSTRAINT VIOLATION for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7486790375038574125:2969].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:30:21.367117Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486790375038574117:2969], SessionActorId: [2:7486790375038574041:2969], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7486790375038574041:2969]. isRollback=0 2025-03-28T09:30:21.367299Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjdiNzBiODgtYzRjNzNhYTktZjA3OTdlMjAtZjZmNWY0MWE=, ActorId: [2:7486790375038574041:2969], ActorState: ExecuteState, TraceId: 01jqe1d47d8m9qr66kjfsp3t3f, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7486790375038574118:2969] from: [2:7486790375038574117:2969] 2025-03-28T09:30:21.367392Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486790375038574118:2969] TxId: 281474976715664. Ctx: { TraceId: 01jqe1d47d8m9qr66kjfsp3t3f, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NjdiNzBiODgtYzRjNzNhYTktZjA3OTdlMjAtZjZmNWY0MWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/KV`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:30:21.368233Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjdiNzBiODgtYzRjNzNhYTktZjA3OTdlMjAtZjZmNWY0MWE=, ActorId: [2:7486790375038574041:2969], ActorState: ExecuteState, TraceId: 01jqe1d47d8m9qr66kjfsp3t3f, Create QueryResponse for error on request, msg:
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Duplicate keys have been found., code: 2012 2025-03-28T09:30:21.415064Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjdiNzBiODgtYzRjNzNhYTktZjA3OTdlMjAtZjZmNWY0MWE=, ActorId: [2:7486790375038574041:2969], ActorState: ExecuteState, TraceId: 01jqe1d49xdmr2w88swbk35y8c, Create QueryResponse for error on request, msg:
: Error: Transaction not found: 01jqe1d4780076fpvr05azc798, code: 2015 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpSinkLocks::EmptyRange [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBroken |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 25643, MsgBus: 1967 2025-03-28T09:30:02.175586Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790291114045822:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.180312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e1/r3tmp/tmpwQPwhK/pdisk_1.dat 2025-03-28T09:30:02.675296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.675403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.682109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:02.730783Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25643, node 1 2025-03-28T09:30:02.923907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.923933Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.923949Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.924116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1967 TClient is connected to server localhost:1967 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.856385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.585371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790303998948293:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.586600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790303998948275:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.586740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.589099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.605392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790303998948297:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.699711Z node 1 :TX_PROXY ERROR: Actor# [1:7486790303998948348:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.196309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.305319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.262395Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790291114045822:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.267814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.551497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.374699Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGRkOTI5ZTMtOGQ5NzBkZjktNmYwMTVmMjQtNDk5ZjAwMmE=, ActorId: [1:7486790321178825992:2970], ActorState: ExecuteState, TraceId: 01jqe1crcg27cwk962jfqbg77h, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T09:30:17.714848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:17.714879Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18D62F3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F94FE303D8F 18. ??:0: ?? @ 0x7F94FE303E3F 19. ??:0: ?? @ 0x16366028 Trying to start YDB, gRPC: 12805, MsgBus: 1405 2025-03-28T09:30:20.924191Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790369945704531:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:20.924314Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e1/r3tmp/tmpZR2D5O/pdisk_1.dat 2025-03-28T09:30:21.133332Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:21.159651Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:21.159738Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:21.165369Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12805, node 2 2025-03-28T09:30:21.205177Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:21.205206Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:21.205218Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:21.205352Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1405 TClient is connected to server localhost:1405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:21.608893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:21.626526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:21.703160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:21.869037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:21.939381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.163890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790387125575479:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.163958Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.229974Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.280369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.314030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.348411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.387950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.436244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.502238Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790387125575988:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.502322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.504647Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790387125575993:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.507824Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:24.517139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790387125575995:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:24.578033Z node 2 :TX_PROXY ERROR: Actor# [2:7486790387125576048:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:25.926795Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790369945704531:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:25.948552Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 21805, MsgBus: 19835 2025-03-28T09:30:02.151409Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790293296179364:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.151610Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00164e/r3tmp/tmpVUu1y4/pdisk_1.dat 2025-03-28T09:30:02.648564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.654076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.657073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:02.735838Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21805, node 1 2025-03-28T09:30:02.927213Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.927237Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.927243Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.927356Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19835 TClient is connected to server localhost:19835 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.893142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.915671Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:05.728897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790306181081898:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.728917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790306181081911:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.729009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.732560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.743680Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790306181081927:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.834863Z node 1 :TX_PROXY ERROR: Actor# [1:7486790306181081978:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.193684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.317291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.241400Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790293296179364:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.249585Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.509594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.102883Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjZjOTc1NTEtMWJiOTY4N2QtMzA2NTExZDctOTE0YzhmMDg=, ActorId: [1:7486790319065992221:2971], ActorState: ExecuteState, TraceId: 01jqe1cr67cpkd03kcn38w9kgm, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T09:30:17.697769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:17.697802Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18D63A12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7EFDBB8D1D8F 18. ??:0: ?? @ 0x7EFDBB8D1E3F 19. ??:0: ?? @ 0x16366028 Trying to start YDB, gRPC: 22763, MsgBus: 26332 2025-03-28T09:30:20.667870Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790368844399873:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:20.668292Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00164e/r3tmp/tmp55TZb7/pdisk_1.dat 2025-03-28T09:30:20.809599Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:20.835915Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:20.836010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:20.837390Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22763, node 2 2025-03-28T09:30:20.931233Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:20.931263Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:20.931276Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:20.931405Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26332 TClient is connected to server localhost:26332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:30:21.390052Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.624405Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790381729302423:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:23.624549Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790381729302414:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:23.624731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:23.629595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:23.643533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790381729302428:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:23.737698Z node 2 :TX_PROXY ERROR: Actor# [2:7486790381729302481:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:23.786641Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.826744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:25.079025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:25.913842Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790368844399873:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:25.966350Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:26.603553Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWMxYTFkMmUtY2VjMjI4NDItM2Y0ZGQwYjMtZjA3NmU1Y2M=, ActorId: [2:7486790394614212811:2968], ActorState: ExecuteState, TraceId: 01jqe1d9b0ehj5608qj416wc22, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18D63C3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7EFDBB8D1D8F 18. ??:0: ?? @ 0x7EFDBB8D1E3F 19. ??:0: ?? @ 0x16366028 |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TPersQueueTest::SrcIdCompatibility [GOOD] |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpSinkTx::ExplicitTcl [GOOD] >> KqpSinkTx::Interactive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 1713, MsgBus: 3925 2025-03-28T09:30:02.141378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790289799715000:2126];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.141442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e3/r3tmp/tmpDsiTwm/pdisk_1.dat 2025-03-28T09:30:02.710644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.710739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.710908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.723946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1713, node 1 2025-03-28T09:30:02.931360Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.931389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.931397Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.931520Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3925 TClient is connected to server localhost:3925 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.847535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.731957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790302684617496:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.732070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790302684617485:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.732348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.736822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.747328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790302684617499:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.842569Z node 1 :TX_PROXY ERROR: Actor# [1:7486790302684617550:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.186930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.327266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.148868Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790289799715000:2126];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.171331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.410569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.212780Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTE2MWYxYWMtMzE3MDY0MjUtOTg5NmQ2ZmQtZDZiMjlhYzM=, ActorId: [1:7486790315569527813:2971], ActorState: ExecuteState, TraceId: 01jqe1cr5z4y9f4k0ad19n4q6h, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T09:30:17.648845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:17.648884Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18D64092 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC58236FD8F 18. ??:0: ?? @ 0x7FC58236FE3F 19. ??:0: ?? @ 0x16366028 Trying to start YDB, gRPC: 10716, MsgBus: 14001 2025-03-28T09:30:20.923044Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790370450041379:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:20.923151Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e3/r3tmp/tmp1hMOuE/pdisk_1.dat 2025-03-28T09:30:21.090977Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:21.101153Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:21.101299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:21.103187Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10716, node 2 2025-03-28T09:30:21.176839Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:21.176869Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:21.176876Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:21.176983Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14001 TClient is connected to server localhost:14001 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:30:21.588525Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.108155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790387629911200:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.108386Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.108782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790387629911235:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.113636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:24.123180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790387629911237:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:24.210007Z node 2 :TX_PROXY ERROR: Actor# [2:7486790387629911288:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:24.264355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.345730Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:25.655871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:26.526269Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790370450041379:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:26.642103Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:27.314759Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTU0NGVkNDktMWZkNjhiNDctYzJlZGVmZTctOTc1ZTQ1NA==, ActorId: [2:7486790400514821411:2968], ActorState: ExecuteState, TraceId: 01jqe1d9xtdbg64b8xaqwdzmxv, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18D642BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC58236FD8F 18. ??:0: ?? @ 0x7FC58236FE3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpSqlIn::SecondaryIndex_TupleSelect [GOOD] Test command err: Trying to start YDB, gRPC: 63064, MsgBus: 32182 2025-03-28T09:29:31.939644Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790159667590146:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:31.942228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005d2/r3tmp/tmpOHwQl7/pdisk_1.dat 2025-03-28T09:29:32.511638Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:32.513536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:32.513598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:32.517448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63064, node 1 2025-03-28T09:29:32.705361Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:32.705389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:32.705396Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:32.705545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32182 TClient is connected to server localhost:32182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:33.545802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:33.601371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:33.768767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:33.970429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:29:34.058794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:29:35.737188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790176847460932:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:35.737277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:36.107073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:36.147035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:36.180254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:36.212278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:36.243105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:36.294858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:36.343437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790181142428741:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:36.343547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:36.343577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790181142428746:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:36.347651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:36.363043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790181142428748:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:36.463377Z node 1 :TX_PROXY ERROR: Actor# [1:7486790181142428803:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:36.939617Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790159667590146:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:36.939692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:37.565360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:37.657533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:29:37.701249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:5:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:44: Warning: At function: Filter, At function: Coalesce
:5:67: Warning: At function: SqlIn
:5:67: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 5029, MsgBus: 11972 2025-03-28T09:29:41.652452Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790202663689697:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:41.667740Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005d2/r3tmp/tmpfb6vzA/pdisk_1.dat 2025-03-28T09:29:41.828977Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:41.854358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:41.854438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:41.855937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5029, node 2 2025-03-28T09:29:41.909803Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:41.909819Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:41.909826Z node 2 :NET_CL ... cheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:13.434857Z node 5 :TX_PROXY ERROR: Actor# [5:7486790339157375817:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:14.092799Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790321977504283:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:14.092885Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:14.757361Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.794024Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.837669Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Type annotation, code: 1030
:4:21: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:5:60: Warning: At function: Filter, At function: Coalesce
:6:33: Warning: At function: SqlIn
:6:33: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 64256, MsgBus: 22487 2025-03-28T09:30:19.592366Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790364051305904:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:19.592444Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005d2/r3tmp/tmpKCtvxi/pdisk_1.dat 2025-03-28T09:30:19.732622Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:19.761711Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:19.761829Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:19.763531Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64256, node 6 2025-03-28T09:30:19.827698Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:19.827731Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:19.827746Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:19.827932Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22487 TClient is connected to server localhost:22487 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:20.516345Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:20.529114Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:20.545204Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:20.629358Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:20.835897Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:20.934018Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:24.153126Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790385526144168:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.153262Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.213511Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.251779Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.293194Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.375377Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.417438Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.498043Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.550459Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790385526144685:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.550559Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.550614Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7486790385526144690:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:24.556802Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:24.586920Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:7486790385526144692:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:24.594800Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486790364051305904:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:24.594881Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:24.646480Z node 6 :TX_PROXY ERROR: Actor# [6:7486790385526144748:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:26.318639Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:26.382230Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:30:26.523376Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:5:17: Warning: At function: RemovePrefixMembers, At function: Sort, At function: PersistableRepr, At function: SqlProject
:6:56: Warning: At function: Filter, At function: Coalesce
:7:29: Warning: At function: SqlIn
:7:29: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108
: Warning: Execution, code: 1060
:5:17: Warning: Cost Based Optimizer could not be applied to this query: couldn't load statistics, code: 8001 |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::SrcIdCompatibility [GOOD] Test command err: === Start server === Server->StartServer(false); 2025-03-28T09:25:18.534004Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789070160124869:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.534098Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.559473Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789073270518024:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.559499Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.729315Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:18.738780Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001233/r3tmp/tmpA2ruuP/pdisk_1.dat 2025-03-28T09:25:18.951548Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:18.980818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:18.980962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:18.986463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:18.987692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:18.987745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:18.991619Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:18.995535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18981, node 1 2025-03-28T09:25:19.134260Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001233/r3tmp/yandexwzMBGN.tmp 2025-03-28T09:25:19.134288Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001233/r3tmp/yandexwzMBGN.tmp 2025-03-28T09:25:19.134480Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001233/r3tmp/yandexwzMBGN.tmp 2025-03-28T09:25:19.134613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:19.314075Z INFO: TTestServer started on Port 18969 GrpcPort 18981 TClient is connected to server localhost:18969 PQClient connected to localhost:18981 === TenantModeEnabled() = 0 === Init PQ - start server on port 18981 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:19.784370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:25:19.784569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.784822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:25:19.785068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:25:19.785108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.788148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:19.788282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:25:19.788419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.788464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:25:19.788480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:25:19.788492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-03-28T09:25:19.790292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.790319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:25:19.790333Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:25:19.794248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.794291Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.794314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:19.794357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.798021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:19.815556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:19.815586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:25:19.815631Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:19.817177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:25:19.817324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:25:19.820313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153919864, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:19.820447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743153919864 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:25:19.820465Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:19.820686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:25:19.820709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:19.820812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:25:19.820836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:25:19.823051Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:25:19.823104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:25:19.823319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:25:19.823382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486789070160125523:2383], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:25:19.823443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.823498Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:25:19.823603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:25:19.823657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.823689Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:25:19.823699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.823732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:25:19.823763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.823782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:25:19.823801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-28T09:25:19.823868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [O ... sion cookie: 5 sessionId: partition: 7 expectedGeneration: (NULL) 2025-03-28T09:30:29.334870Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) TEvClientConnected Status OK, TabletId: 72075186224037910, NodeId 27, Generation: 1 2025-03-28T09:30:29.334942Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server connected, pipe [27:7486790406927741275:2750], now have 1 active actors on pipe 2025-03-28T09:30:29.335120Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-03-28T09:30:29.335161Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-03-28T09:30:29.335293Z node 27 :PERSQUEUE INFO: new Cookie test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0 generated for partition 7 topic 'rt3.dc1--account--topic100' owner test-src-id-compat2 2025-03-28T09:30:29.335438Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 7 2025-03-28T09:30:29.338906Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743154229338 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:30:29.335517Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-03-28T09:30:29.335843Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-03-28T09:30:29.335868Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-03-28T09:30:29.335974Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 0 2025-03-28T09:30:29.336184Z node 27 :PQ_WRITE_PROXY INFO: session inited cookie: 5 partition: 7 MaxSeqNo: 0 sessionId: test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0 2025-03-28T09:30:29.339102Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [] Write session established. Init response: session_id: "test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0" topic: "account/topic100" cluster: "dc1" partition_id: 7 supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:30:29.339581Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0] Write 1 messages with Id from 1 to 1 2025-03-28T09:30:29.340229Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0] Write session: try to update token 2025-03-28T09:30:29.340317Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0] Send 1 message(s) (0 left), first sequence number is 1 2025-03-28T09:30:29.341075Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-28T09:30:29.341474Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-28T09:30:29.341761Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-03-28T09:30:29.341802Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-03-28T09:30:29.341907Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 0 requestId: cookie: 1 2025-03-28T09:30:29.341979Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:30:29.342288Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--account--topic100' requestId: 2025-03-28T09:30:29.342315Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message batch for topic 'rt3.dc1--account--topic100' partition 7 2025-03-28T09:30:29.342384Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] got client message topic: rt3.dc1--account--topic100 partition: 7 SourceId: '\0test-src-id-compat2' SeqNo: 1 partNo : 0 messageNo: 1 size 102 offset: -1 2025-03-28T09:30:29.342663Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob processing sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 2025-03-28T09:30:29.343541Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 part blob complete sourceId '\0test-src-id-compat2' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 189 count 1 nextOffset 1 batches 1 2025-03-28T09:30:29.344731Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic100' partition 7 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000007_00000000000000000000_00000_0000000001_00000| size 177 WTime 1743154229343 2025-03-28T09:30:29.344975Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:30:29.345006Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- delete ---------------- 2025-03-28T09:30:29.345029Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] [x0000000007, x0000000008) 2025-03-28T09:30:29.345059Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- write ----------------- 2025-03-28T09:30:29.345089Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] m0000000007ptest-src-id-compat2 2025-03-28T09:30:29.345105Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] d0000000007_00000000000000000000_00000_0000000001_00000| 2025-03-28T09:30:29.345123Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] i0000000007 2025-03-28T09:30:29.345152Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] --- rename ---------------- 2025-03-28T09:30:29.345179Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] =========================== 2025-03-28T09:30:29.345229Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:30:29.345362Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 7 offset 0 partNo 0 count 1 size 177 2025-03-28T09:30:29.359259Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0] Write session got write response: sequence_numbers: 1 offsets: 0 already_written: false partition_id: 7 write_statistics { persist_duration_ms: 12 queued_in_partition_duration_ms: 4 } 2025-03-28T09:30:29.359351Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0] Write session: acknoledged message 1 2025-03-28T09:30:29.359644Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0] Write session: close. Timeout = 0 ms 2025-03-28T09:30:29.359710Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0] Write session will now close 2025-03-28T09:30:29.359783Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0] Write session: aborting 2025-03-28T09:30:29.360373Z :INFO: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:30:29.360442Z :DEBUG: [] MessageGroupId [test-src-id-compat2] SessionId [test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0] Write session: destroy 2025-03-28T09:30:29.355919Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 7 offset 0 count 1 size 177 actorID [27:7486790389747870425:2501] 2025-03-28T09:30:29.356120Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 122 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:30:29.356186Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::ReplyWrite. Partition: 7 2025-03-28T09:30:29.356257Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Answering for message sourceid: '\0test-src-id-compat2', Topic: 'rt3.dc1--account--topic100', Partition: 7, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-28T09:30:29.356551Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:30:29.356589Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Topic 'rt3.dc1--account--topic100' partition 7 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:30:29.356713Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--account--topic100' partition: 7 messageNo: 1 requestId: cookie: 1 2025-03-28T09:30:29.356891Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:30:29.357436Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 Topic 'rt3.dc1--account--topic100' partition 7 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-28T09:30:29.357472Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-28T09:30:29.357523Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-28T09:30:29.357550Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:30:29.357634Z node 27 :PERSQUEUE DEBUG: Topic 'rt3.dc1--account--topic100' partition 7 user user readTimeStamp done, result 1743154229338 queuesize 0 startOffset 0 2025-03-28T09:30:29.359423Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037910' partition 7 offset 0 partno 0 count 1 parts 0 size 177 2025-03-28T09:30:29.361758Z node 27 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 5 sessionId: test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0 grpc read done: success: 0 data: 2025-03-28T09:30:29.361783Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0 grpc read failed 2025-03-28T09:30:29.361831Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0 grpc closed 2025-03-28T09:30:29.361861Z node 27 :PQ_WRITE_PROXY INFO: session v1 cookie: 5 sessionId: test-src-id-compat2|aea1397d-fb4b637a-2554e4-9fbf52ea_0 is DEAD 2025-03-28T09:30:29.363532Z node 27 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037910 (partition=7) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:30:29.365183Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910] server disconnected, pipe [27:7486790406927741275:2750] destroyed 2025-03-28T09:30:29.365235Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037910, Partition: 7, State: StateIdle] TPartition::DropOwner. >> KqpTx::RollbackRoTx [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackRoTx [GOOD] Test command err: Trying to start YDB, gRPC: 27102, MsgBus: 8330 2025-03-28T09:30:17.332219Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790354089306388:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:17.332760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015b9/r3tmp/tmpP8YBde/pdisk_1.dat 2025-03-28T09:30:17.615216Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27102, node 1 2025-03-28T09:30:17.685073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:17.685208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:17.687111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:17.699179Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:17.699206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:17.699216Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:17.699346Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8330 TClient is connected to server localhost:8330 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:18.198270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:18.227682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:18.374272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:18.548079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:18.619826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:20.328261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790366974210040:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:20.328373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:20.566858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:20.598972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:20.627267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:20.654044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:20.686820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:20.723249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:20.810829Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790366974210556:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:20.810936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:20.811220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790366974210561:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:20.814792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:20.828523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790366974210563:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:20.913788Z node 1 :TX_PROXY ERROR: Actor# [1:7486790366974210618:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:22.334792Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790354089306388:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:22.334866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25411, MsgBus: 6892 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015b9/r3tmp/tmpNxgJmw/pdisk_1.dat 2025-03-28T09:30:28.185349Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:30:28.269518Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:28.280658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:28.280746Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:28.282239Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25411, node 2 2025-03-28T09:30:28.359262Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:28.359289Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:28.359304Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:28.359439Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6892 TClient is connected to server localhost:6892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:28.888552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:28.911531Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:29.020697Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:29.255387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:29.335227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:31.535873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790417788482405:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:31.535980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:31.577721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:31.613796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:31.643954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:31.673764Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:31.702729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:31.770277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:31.822105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790417788482917:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:31.822190Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:31.822228Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790417788482922:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:31.825648Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:31.838533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790417788482924:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:31.895338Z node 2 :TX_PROXY ERROR: Actor# [2:7486790417788482977:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:33.069016Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZDgzZmE0MGYtYWMxM2IyZGYtODJjMjkzOTMtYzVjYTVhYWI=, ActorId: [2:7486790422083450533:2488], ActorState: ReadyState, TraceId: 01jqe1dfq4cxssvg2q491fk07e, Create QueryResponse for error on request, msg: >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkTx::OlapExplicitTcl >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> TBlobStorageProxyTest::TestDoubleGroups >> TBlobStorageProxyTest::TestQuadrupleGroups >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock >> TBlobStorageProxyTest::TestBlock >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> TBlobStorageProxyTest::TestGetMultipart >> TBlobStorageProxyTest::TestPartialGetBlock >> TBlobStorageProxyTest::TestVPutVGet >> TBlobStorageProxyTest::TestInFlightPuts >> TBlobStorageProxyTest::TestBlockPersistence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 21793, MsgBus: 25271 2025-03-28T09:30:02.141452Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790291061817035:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.141497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ee/r3tmp/tmpDAu8q6/pdisk_1.dat 2025-03-28T09:30:02.788233Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.797799Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.801988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.808480Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21793, node 1 2025-03-28T09:30:02.927117Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.927142Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.927147Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.927227Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25271 TClient is connected to server localhost:25271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.818123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.846862Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:05.738010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790303946719584:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.738089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790303946719595:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.738137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.742068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.750394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790303946719605:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.836675Z node 1 :TX_PROXY ERROR: Actor# [1:7486790303946719656:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.188872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.331981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.313455Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790291061817035:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.341388Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.633054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.506429Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2025-03-28T09:30:09.506603Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-28T09:30:09.506749Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-28T09:30:09.506943Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790321126597533:2971], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7486790321126597214:2971]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7486790321126597533:2971].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-28T09:30:09.507476Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790321126597519:2971], SessionActorId: [1:7486790321126597214:2971], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7486790321126597214:2971]. isRollback=0 2025-03-28T09:30:09.507698Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTM1MjNjNC1kZjdmMzg2Yi0zMmE2OTM5MS05ZWIzNTNkZg==, ActorId: [1:7486790321126597214:2971], ActorState: ExecuteState, TraceId: 01jqe1crn9323bcb1t1mmd3pwn, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7486790321126597520:2971] from: [1:7486790321126597519:2971] 2025-03-28T09:30:09.507785Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486790321126597520:2971] TxId: 281474976710665. Ctx: { TraceId: 01jqe1crn9323bcb1t1mmd3pwn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM1MjNjNC1kZjdmMzg2Yi0zMmE2OTM5MS05ZWIzNTNkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-28T09:30:09.509249Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTM1MjNjNC1kZjdmMzg2Yi0zMmE2OTM5MS05ZWIzNTNkZg==, ActorId: [1:7486790321126597214:2971], ActorState: ExecuteState, TraceId: 01jqe1crn9323bcb1t1mmd3pwn, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 30824, MsgBus: 14413 2025-03-28T09:30:15.650802Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790348905532507:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:15.650859Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ee/r3tmp/tmpJJxemg/pdisk_1.dat 2025-03-28T09:30:15.817298Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:15.831939Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:15.832024Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:15.834517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30824, node 2 2025-03-28T09:30:15.908367Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:15.908392Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:15.908403Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:15.908517Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14413 TClient is connected to server localhost:14413 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:16.352415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:18.852536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790361790435053:2333], DatabaseId ... SHARD WARN: tablet_id=72075186224038044;self_id=[2:7486790387560245301:3373];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.547520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038056;self_id=[2:7486790383265277998:3370];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.547764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7486790383265277830:3351];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.548079Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7486790387560245378:3387];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.548457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7486790387560245378:3387];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.549446Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486790387560245454:3395];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.549891Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7486790383265277880:3361];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.550112Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7486790387560245354:3380];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.550426Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7486790387560245354:3380];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.551249Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7486790387560245440:3390];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.551718Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7486790383265277869:3357];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.554003Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7486790387560245309:3376];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.554400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7486790387560245309:3376];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.555128Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7486790387560245363:3383];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.555559Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7486790387560245363:3383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.555961Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7486790387560245442:3391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.556276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486790387560245454:3395];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.556787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486790387560245471:3397];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.557190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486790387560245471:3397];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.557431Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7486790387560245359:3382];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.557818Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7486790387560245359:3382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.568069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7486790387560245307:3375];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.569372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7486790387560245307:3375];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.575047Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7486790383265277420:3261];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.575508Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7486790383265277420:3261];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.575745Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7486790383265277797:3336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.581459Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7486790383265277462:3292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.581871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7486790383265277462:3292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.583410Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[2:7486790383265277143:3205];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.583876Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[2:7486790383265277143:3205];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.591754Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7486790383265277797:3336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-03-28T09:30:28.182623Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037896;self_id=[2:7486790366085402622:2351];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037896;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:28.182782Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037897;self_id=[2:7486790366085402607:2347];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037897;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:28.182860Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037888;self_id=[2:7486790366085402611:2349];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037888;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:28.182918Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037894;self_id=[2:7486790366085402610:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037894;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:28.182960Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037891;self_id=[2:7486790366085402593:2344];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:28.182990Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037895;self_id=[2:7486790366085402606:2346];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037895;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:28.183027Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037892;self_id=[2:7486790366085402624:2352];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037892;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:28.183088Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037893;self_id=[2:7486790366085402620:2350];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037893;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:28.183811Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037889;self_id=[2:7486790366085402595:2345];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037889;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:28.183886Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037890;self_id=[2:7486790366085402644:2353];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037890;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-28T09:30:30.797808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:30.797833Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] Test command err: Trying to start YDB, gRPC: 23045, MsgBus: 25758 2025-03-28T09:30:03.602794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790295707391081:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:03.603291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d6/r3tmp/tmpfN7KKo/pdisk_1.dat 2025-03-28T09:30:04.093602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:04.093723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:04.096020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:04.104223Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23045, node 1 2025-03-28T09:30:04.266663Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:04.266704Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:04.266712Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:04.266834Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25758 TClient is connected to server localhost:25758 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:04.870121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:06.811523Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308592293474:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.811656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.812007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790308592293510:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:06.816092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:06.831077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790308592293512:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:06.928915Z node 1 :TX_PROXY ERROR: Actor# [1:7486790308592293563:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:07.235257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.366187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.578934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.589355Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790295707391081:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:08.589448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 6558, MsgBus: 1749 test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d6/r3tmp/tmp0tbyNd/pdisk_1.dat 2025-03-28T09:30:16.201638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:30:16.202737Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6558, node 2 2025-03-28T09:30:16.240012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:16.240113Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:16.244135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:16.291142Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:16.291161Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:16.291168Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:16.291269Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1749 TClient is connected to server localhost:1749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:16.750239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:19.198869Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790362982810399:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.198976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.199648Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790362982810434:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.203975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:19.214631Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790362982810436:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:19.289154Z node 2 :TX_PROXY ERROR: Actor# [2:7486790362982810489:2338] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:19.342066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:19.508987Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[2:7486790362982810671:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:19.508987Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7486790362982810683:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:19.509215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7486790362982810683:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:19.509514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7486790362982810683:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:19.509639Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7486790362982810683:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:19.509694Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075 ... NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.528189Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7486790380162685950:3381];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.528493Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[2:7486790380162685873:3348];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.528662Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7486790380162685929:3373];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.530270Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7486790380162685927:3372];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.531007Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7486790380162685901:3360];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.539550Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486790380162686025:3391];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.539970Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486790380162686025:3391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.540715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7486790380162685939:3377];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.541156Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7486790380162685899:3359];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.542728Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[2:7486790380162685927:3372];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.542958Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7486790380162685901:3360];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.543783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7486790380162685877:3349];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.544187Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7486790380162685947:3380];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.544581Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7486790380162685877:3349];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.544744Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7486790380162685939:3377];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.544954Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7486790380162685899:3359];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.546507Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486790380162686016:3388];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.547516Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486790380162686016:3388];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.547696Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7486790380162685947:3380];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.548085Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7486790380162686020:3389];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.548428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[2:7486790380162686020:3389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.550423Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7486790380162685932:3374];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.550869Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7486790380162685932:3374];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.554140Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486790380162685936:3376];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.554529Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486790380162685936:3376];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.554800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7486790380162685895:3357];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.555212Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7486790380162685895:3357];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.556811Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7486790380162685893:3356];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.557288Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7486790380162685961:3385];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.559294Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7486790380162685893:3356];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.559993Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7486790380162685961:3385];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.561902Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7486790380162685886:3353];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.562274Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7486790380162685886:3353];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.564360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7486790380162685906:3362];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.566839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7486790380162685906:3362];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.587023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7486790380162685912:3365];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.587757Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7486790380162685912:3365];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.967401Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-28T09:30:27.969064Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.971505Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7486790367277780169:2691];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037934;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:27.972392Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037934;self_id=[2:7486790367277780169:2691];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037934;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:28.008924Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7486790362982810681:2346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:28.009360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[2:7486790362982810681:2346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037889;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-03-28T09:30:31.193407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:31.193445Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 14911, MsgBus: 18688 2025-03-28T09:30:16.066434Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790353002904071:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:16.066533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015bf/r3tmp/tmpPBEvk5/pdisk_1.dat 2025-03-28T09:30:16.476683Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:16.490161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:16.490257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:16.493445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14911, node 1 2025-03-28T09:30:16.570014Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:16.570041Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:16.570050Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:16.570191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18688 TClient is connected to server localhost:18688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:17.051156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:17.079016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:17.227152Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:17.378546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:17.443734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:19.256679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790365887807738:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.256815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.545810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:19.611598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:19.639308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:19.710370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:19.748290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:19.822376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:19.907568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790365887808264:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.907689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.907980Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790365887808269:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.911911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:19.922644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790365887808271:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:19.998587Z node 1 :TX_PROXY ERROR: Actor# [1:7486790365887808327:3457] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:21.066580Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790353002904071:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:21.066677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30119, MsgBus: 21023 2025-03-28T09:30:22.031138Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790377313648571:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:22.031219Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015bf/r3tmp/tmpzz3yT7/pdisk_1.dat 2025-03-28T09:30:22.168545Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30119, node 2 2025-03-28T09:30:22.206076Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:22.206164Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:22.218185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:22.241208Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:22.241233Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:22.241241Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:22.241422Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21023 TClient is connected to server localhost:21023 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:22.696336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:22.702817Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:22.709965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:22.795454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:22.949256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:23.042796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCre ... 644480 2025-03-28T09:30:25.452174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:25.497630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:25.576731Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:25.643060Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790390198552742:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:25.643191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:25.643478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790390198552747:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:25.648337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:25.659684Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715668, at schemeshard: 72057594046644480 2025-03-28T09:30:25.660123Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790390198552749:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:30:25.757526Z node 2 :TX_PROXY ERROR: Actor# [2:7486790390198552805:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:27.030959Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790377313648571:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:27.031024Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:27.348026Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWI3NGQ3NDItYzkwYWMwOGMtMmY4ZjViNzctZGU0ZGQzZmY=, ActorId: [2:7486790394493520358:2488], ActorState: ExecuteState, TraceId: 01jqe1da1qb78e57wmnbsse771, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 11052, MsgBus: 20916 2025-03-28T09:30:28.540099Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790404272311746:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:28.540233Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015bf/r3tmp/tmpthMryf/pdisk_1.dat 2025-03-28T09:30:28.823167Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11052, node 3 2025-03-28T09:30:28.899033Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:28.899149Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:28.905963Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:28.927291Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:28.927316Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:28.927326Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:28.927470Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20916 TClient is connected to server localhost:20916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:29.433958Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:29.444702Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:29.461825Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:29.555702Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:29.722428Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:29.793278Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:32.219422Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486790421452182693:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:32.219495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:32.266875Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:32.296264Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:32.322406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:32.360673Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:32.399416Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:32.468867Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:32.522260Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486790421452183210:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:32.522366Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:32.522620Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486790421452183215:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:32.527081Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:32.544243Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486790421452183217:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:32.598341Z node 3 :TX_PROXY ERROR: Actor# [3:7486790421452183271:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:33.532229Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486790404272311746:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:33.532312Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:34.131222Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDVmNDczMzctZjNhZGUxMWEtNWI1NDNjODgtNmYyOTAyMzE=, ActorId: [3:7486790425747150827:2488], ActorState: ExecuteState, TraceId: 01jqe1dgj3fm810fmqhhb6z67w, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 64813, MsgBus: 27460 2025-03-28T09:30:02.147521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790289801091863:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.154475Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00163b/r3tmp/tmpYcuwGc/pdisk_1.dat 2025-03-28T09:30:02.713559Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.720920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.721062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.726421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64813, node 1 2025-03-28T09:30:02.927028Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.927056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.927062Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.927172Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27460 TClient is connected to server localhost:27460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.793137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.819490Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:05.582979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790302685994280:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.583500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790302685994268:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.583612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.588739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.600667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790302685994282:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.672425Z node 1 :TX_PROXY ERROR: Actor# [1:7486790302685994333:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.187215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.307872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.159377Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790289801091863:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.166994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.403476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 8974, MsgBus: 20908 2025-03-28T09:30:15.632283Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790345486189187:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:15.632407Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00163b/r3tmp/tmpu8R4qr/pdisk_1.dat 2025-03-28T09:30:15.787068Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:15.789585Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:15.789664Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:15.791160Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8974, node 2 2025-03-28T09:30:15.831004Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:15.831029Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:15.831036Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:15.831139Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20908 TClient is connected to server localhost:20908 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:16.334278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:16.339208Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:18.823384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790358371091702:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.823572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.823882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790358371091738:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:18.828034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:18.838093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790358371091740:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:18.913392Z node 2 :TX_PROXY ERROR: Actor# [2:7486790358371091792:2338] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:18.978662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:19.145083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[2:7486790362666059293:2352];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:19.145083Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7486790362666059289:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:19.145331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[2:7486790362666059289:2350];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:19.145749Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075 ... 6224037932;self_id=[2:7486790366961028118:2543];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:31.983764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7486790379845933338:3120];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:31.984206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7486790379845933338:3120];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:31.999183Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7486790379845933329:3119];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:31.999481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7486790379845933291:3110];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:31.999772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7486790379845933329:3119];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:31.999951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7486790379845933291:3110];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.001014Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7486790379845933279:3107];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.001192Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7486790379845933279:3107];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.001347Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7486790379845933304:3114];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.001484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7486790379845933304:3114];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.001642Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[2:7486790379845933207:3104];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038000;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.001777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038000;self_id=[2:7486790379845933207:3104];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038000;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.008785Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7486790379845933298:3112];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.009031Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[2:7486790379845933298:3112];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.010239Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7486790379845933282:3108];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.010450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7486790379845933282:3108];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.025597Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7486790379845933140:3099];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.025604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7486790379845933327:3118];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.025868Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[2:7486790379845933204:3103];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037999;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.025970Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[2:7486790379845933140:3099];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.026021Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037999;self_id=[2:7486790379845933204:3103];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037999;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.026110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7486790379845933327:3118];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.032319Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7486790379845933488:3188];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.032344Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486790379845933311:3117];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.032567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7486790379845933488:3188];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.032575Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486790379845933311:3117];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.032743Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7486790379845933310:3116];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.032759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[2:7486790379845933132:3097];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038002;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.032883Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7486790379845933310:3116];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.032899Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038002;self_id=[2:7486790379845933132:3097];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038002;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.033031Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7486790379845933403:3159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.033166Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7486790379845933403:3159];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.037945Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7486790379845933350:3124];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.038311Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7486790379845933350:3124];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.038394Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7486790379845933476:3182];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18D7C798 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18D63E6A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F489EA31D8F 18. ??:0: ?? @ 0x7F489EA31E3F 19. ??:0: ?? @ 0x16366028 >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock >> KqpSinkLocks::TInvalidateOlap [GOOD] >> KqpSinkLocks::UncommittedRead >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 15959, MsgBus: 21749 2025-03-28T09:30:08.907405Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790318367306153:2123];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:08.907664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015c5/r3tmp/tmpnxtalH/pdisk_1.dat 2025-03-28T09:30:09.401892Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:09.404244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:09.404323Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:09.409996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15959, node 1 2025-03-28T09:30:09.513449Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:09.513488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:09.513497Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:09.513613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21749 TClient is connected to server localhost:21749 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:10.230101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:10.253903Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:12.225889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790335547175948:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.225966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790335547175942:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.226311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:12.237667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:12.258183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790335547175956:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:12.325574Z node 1 :TX_PROXY ERROR: Actor# [1:7486790335547176007:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:12.600503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:12.725505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:13.718666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:14.382984Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790318367306153:2123];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:14.468249Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:17.424263Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2025-03-28T09:30:17.425431Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790357022021513:2969], SessionActorId: [1:7486790344137119026:2969], Got LOCKS BROKEN for table. ShardID=72075186224037989, Sink=[1:7486790357022021513:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-28T09:30:17.425962Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790357022021513:2969], SessionActorId: [1:7486790344137119026:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7486790344137119026:2969]. isRollback=0 2025-03-28T09:30:17.426279Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTU2YTA4MTAtNjAwNTM5ZDYtMjBiNjFjYjMtOGMxZGJhODE=, ActorId: [1:7486790344137119026:2969], ActorState: ExecuteState, TraceId: 01jqe1d07x637yq3cc5xsvgrma, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7486790357022021514:2969] from: [1:7486790357022021513:2969] 2025-03-28T09:30:17.426357Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486790357022021514:2969] TxId: 281474976710666. Ctx: { TraceId: 01jqe1d07x637yq3cc5xsvgrma, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTU2YTA4MTAtNjAwNTM5ZDYtMjBiNjFjYjMtOGMxZGJhODE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-28T09:30:17.426546Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTU2YTA4MTAtNjAwNTM5ZDYtMjBiNjFjYjMtOGMxZGJhODE=, ActorId: [1:7486790344137119026:2969], ActorState: ExecuteState, TraceId: 01jqe1d07x637yq3cc5xsvgrma, Create QueryResponse for error on request, msg: 2025-03-28T09:30:17.427353Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710666; 2025-03-28T09:30:17.427534Z node 1 :TX_DATASHARD ERROR: Complete volatile write [1743154217469 : 281474976710666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 23446, MsgBus: 29332 2025-03-28T09:30:23.383067Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790380482052665:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:23.383139Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015c5/r3tmp/tmpHAF4nb/pdisk_1.dat 2025-03-28T09:30:23.483840Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23446, node 2 2025-03-28T09:30:23.525703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:23.525782Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:23.528930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:23.550424Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:23.550443Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:23.550449Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:23.550570Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29332 TClient is connected to server localhost:29332 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:23.995083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:24.003249Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:26.560807Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790393366955210:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:26.560870Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790393366955200:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:26.561367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:26.564979Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:26.575504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790393366955229:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:26.685748Z node 2 :TX_PROXY ERROR: Actor# [2:7486790393366955280:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:26.756297Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:26.825452Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:28.007160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:28.940604Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790380482052665:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:29.010792Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:29.997985Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDJiMjNmOWMtNTkzZDg1MGUtYWJhMjkzOTktYTI3Zjk3Y2I=, ActorId: [2:7486790406251865454:2969], ActorState: ExecuteState, TraceId: 01jqe1dcnn52xt3c6y73c92dgm, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob >> TBlobStorageProxyTest::TestSingleFailureMirror >> TBlobStorageProxyTest::TestDoubleFailure >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkTx::DeferredEffects >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage >> TBlobStorageProxyTest::TestNormal >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> KqpSnapshotIsolation::TSimpleOlap [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 5702, MsgBus: 4621 2025-03-28T09:30:02.138330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790290756344275:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.138387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015f4/r3tmp/tmpsmrXiB/pdisk_1.dat 2025-03-28T09:30:02.800879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.800999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.807532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:02.811207Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5702, node 1 2025-03-28T09:30:02.941795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.941821Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.941826Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.941969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4621 TClient is connected to server localhost:4621 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.821962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.837086Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:05.664849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790303641246816:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.665121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.665436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790303641246837:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.669804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.679211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790303641246839:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.754725Z node 1 :TX_PROXY ERROR: Actor# [1:7486790303641246890:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.187691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.298094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.175563Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790290756344275:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.185754Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.461016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.032772Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjcyNTdlZjAtYjI2MTgwNTEtMzlmNTMzMzItZjFkMmY0Y2M=, ActorId: [1:7486790316526157125:2971], ActorState: ExecuteState, TraceId: 01jqe1cr5536acsczevcdj0abf, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T09:30:17.806800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:17.806848Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18D63392 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FFAE5DA0D8F 18. ??:0: ?? @ 0x7FFAE5DA0E3F 19. ??:0: ?? @ 0x16366028 Trying to start YDB, gRPC: 1402, MsgBus: 15924 2025-03-28T09:30:20.764893Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790370423456513:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:20.771766Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015f4/r3tmp/tmpwtkWM7/pdisk_1.dat 2025-03-28T09:30:20.959031Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:20.964114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:20.964201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:20.966229Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1402, node 2 2025-03-28T09:30:21.023488Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:21.023522Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:21.023534Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:21.023663Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15924 TClient is connected to server localhost:15924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:30:21.494912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:30:21.499987Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:23.899062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790383308359037:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource p ... 281474976715665;tx_id=281474976715669;d=2.001696s; 2025-03-28T09:30:35.551380Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[2:7486790391898295724:2589];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037950;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.551460Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:7486790391898295186:2504];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037980;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.551559Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[2:7486790391898295840:2611];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037941;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.551645Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037948;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.551739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.551746Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7486790391898295833:2610];ev=NActors::TEvents::TEvWakeup;fline=sync.h:19;event=tx_timeout;lock=281474976715665;tx_id=281474976715669;d=2.001000s; 2025-03-28T09:30:35.551840Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037950;self_id=[2:7486790391898295724:2589];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037950;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.551841Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7486790391898295833:2610];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037922;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.551952Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;self_id=[2:7486790391898295186:2504];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037980;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.551996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[2:7486790391898295833:2610];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037922;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.552058Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037941;self_id=[2:7486790391898295840:2611];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037941;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.552628Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[2:7486790391898295974:2635];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037918;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.552670Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;self_id=[2:7486790391898295190:2506];ev=NActors::TEvents::TEvWakeup;fline=sync.h:19;event=tx_timeout;lock=281474976715665;tx_id=281474976715669;d=2.000950s; 2025-03-28T09:30:35.552750Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;self_id=[2:7486790391898295190:2506];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037990;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.552801Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[2:7486790391898295974:2635];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037918;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.552936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;self_id=[2:7486790391898295190:2506];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037990;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.553157Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[2:7486790391898295654:2573];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037930;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.553211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[2:7486790391898295749:2595];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.553337Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037932;self_id=[2:7486790391898295749:2595];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037932;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.553364Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037930;self_id=[2:7486790391898295654:2573];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037930;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.553802Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037980;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-28T09:30:35.554025Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-28T09:30:35.554142Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;self_id=[2:7486790391898295456:2521];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037967;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.554176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[2:7486790391898295221:2511];ev=NActors::TEvents::TEvWakeup;fline=sync.h:19;event=tx_timeout;lock=281474976715665;tx_id=281474976715669;d=2.001405s; 2025-03-28T09:30:35.554284Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[2:7486790391898295514:2527];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037938;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.554334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037967;self_id=[2:7486790391898295456:2521];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037967;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.554442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037938;self_id=[2:7486790391898295514:2527];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037938;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.554543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037980;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.554738Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037990;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-28T09:30:35.555484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.555543Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=281474976715669;tx_id=281474976715669;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715669; 2025-03-28T09:30:35.555688Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[2:7486790391898295221:2511];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037983;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.555765Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[2:7486790391898295742:2593];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037928;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.555869Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;self_id=[2:7486790391898295221:2511];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037983;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.555884Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[2:7486790391898295742:2593];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037928;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.556201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037990;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.556683Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7486790391898295883:2624];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037910;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.557078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037910;self_id=[2:7486790391898295883:2624];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037910;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.557150Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037983;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18D74DF8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18D637EA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FFAE5DA0D8F 18. ??:0: ?? @ 0x7FFAE5DA0E3F 19. ??:0: ?? @ 0x16366028 >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence >> TBlobStorageProxyTest::TestPersistence >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2025-03-28T09:30:38.413488Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cdf/r3tmp/tmpiyiMuR//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-28T09:30:38.440541Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOlap [GOOD] Test command err: Trying to start YDB, gRPC: 61814, MsgBus: 26637 2025-03-28T09:30:04.611202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790300856777599:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:04.611582Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d0/r3tmp/tmpmKzfTK/pdisk_1.dat 2025-03-28T09:30:05.050588Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:05.081267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:05.081377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 61814, node 1 2025-03-28T09:30:05.083774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:05.147197Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:05.147255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:05.147268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:05.147386Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26637 TClient is connected to server localhost:26637 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:05.652756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:07.771869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790313741680134:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:07.771992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:07.772679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790313741680161:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:07.778254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:07.798442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790313741680163:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:07.879134Z node 1 :TX_PROXY ERROR: Actor# [1:7486790313741680214:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:08.219399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:08.347145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.559481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.611347Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790300856777599:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:09.611444Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:10.988491Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2IzOTNjMDMtMzU4ZTkwMC1mODY0NjkxNi05YzE1YTU5NQ==, ActorId: [1:7486790326626590499:2970], ActorState: ExecuteState, TraceId: 01jqe1csz32f0q53wvbchbaajs, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18D62D12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7EFE1A320D8F 18. ??:0: ?? @ 0x7EFE1A320E3F 19. ??:0: ?? @ 0x16366028 Trying to start YDB, gRPC: 22670, MsgBus: 1610 2025-03-28T09:30:20.715271Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790370381990749:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:20.715355Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d0/r3tmp/tmpJa1V6J/pdisk_1.dat 2025-03-28T09:30:20.838155Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:20.867509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:20.867647Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:20.868756Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22670, node 2 2025-03-28T09:30:20.935186Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:20.935212Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:20.935220Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:20.935324Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1610 TClient is connected to server localhost:1610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:21.389726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:21.396884Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:23.813104Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790383266893290:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:23.813160Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790383266893302:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found o ... 186224038056;self_id=[2:7486790409036703367:3388];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038056;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.795861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7486790409036703434:3398];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.796032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[2:7486790409036703128:3353];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038096;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.796221Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486790409036703572:3421];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.796403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7486790409036703614:3428];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.796627Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7486790409036703545:3413];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.796783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[2:7486790409036703363:3386];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.796946Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7486790409036703296:3374];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.797124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7486790409036703358:3384];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.797295Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7486790409036703361:3385];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.797506Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[2:7486790409036703218:3365];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038084;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.797629Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7486790409036703439:3399];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.797777Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7486790409036703462:3403];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.797942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7486790409036703541:3412];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.798113Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7486790409036703351:3382];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.798339Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[2:7486790409036703475:3407];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.798499Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7486790409036703374:3391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.798673Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7486790409036703369:3389];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.799279Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7486790409036703548:3414];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.799465Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7486790409036703479:3409];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.799630Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7486790409036703471:3405];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.799831Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7486790409036703353:3383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.799844Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7486790409036703564:3418];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.800010Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7486790409036703560:3417];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.800023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7486790409036703570:3420];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.800310Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486790409036703552:3415];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.800475Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7486790409036703240:3367];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.800635Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038036;self_id=[2:7486790409036703586:3426];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038036;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.800787Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486790409036703580:3424];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.800896Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7486790409036703287:3373];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.800942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7486790409036703590:3427];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.801075Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7486790409036703532:3411];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.801078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[2:7486790409036703584:3425];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.801190Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[2:7486790409036703473:3406];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.801233Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7486790409036703441:3400];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.801375Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[2:7486790409036703469:3404];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.801538Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7486790409036703422:3394];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.802137Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7486790409036703578:3423];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.802438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7486790409036703433:3397];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.802464Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7486790409036703575:3422];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.802611Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[2:7486790409036703566:3419];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.805933Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7486790409036703680:3429];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.974473Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-28T09:30:32.976074Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-28T09:30:35.821827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:35.821858Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] Test command err: Trying to start YDB, gRPC: 2493, MsgBus: 4058 2025-03-28T09:30:02.152199Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790289481817700:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.157824Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ff/r3tmp/tmpjDL1By/pdisk_1.dat 2025-03-28T09:30:02.739347Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.767653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.767748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.770038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2493, node 1 2025-03-28T09:30:02.927423Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.927443Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.927456Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.929418Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4058 TClient is connected to server localhost:4058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.848453Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.881130Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:05.649011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790302366720252:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.649108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790302366720260:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.649211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.653305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.663111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790302366720266:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.744864Z node 1 :TX_PROXY ERROR: Actor# [1:7486790302366720317:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.188065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.298493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:07.253271Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790289481817700:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:07.259115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:07.511850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:09.106664Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGFmNGI1NGMtYjk3MzcyMjUtZTk5Yjk3MTItYTA5ZDhlNzQ=, ActorId: [1:7486790315251630582:2970], ActorState: ExecuteState, TraceId: 01jqe1cr70aygshqm9zgm6nmh9, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T09:30:17.731349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:17.731377Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18D635BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F19F8E21D8F 18. ??:0: ?? @ 0x7F19F8E21E3F 19. ??:0: ?? @ 0x16366028 Trying to start YDB, gRPC: 64175, MsgBus: 5774 2025-03-28T09:30:20.716360Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790370285099431:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:20.716422Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ff/r3tmp/tmpx2qPPz/pdisk_1.dat 2025-03-28T09:30:20.873126Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:20.892676Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:20.892773Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:20.894285Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64175, node 2 2025-03-28T09:30:20.983255Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:20.983280Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:20.983288Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:20.983400Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5774 TClient is connected to server localhost:5774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:30:21.411132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.876202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790383170001961:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:23.876506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ... EvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.568823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7486790408939812518:3481];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.569024Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7486790408939812455:3454];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.569149Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[2:7486790408939812518:3481];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.569372Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[2:7486790408939812455:3454];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.569378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7486790408939812428:3440];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.569672Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;self_id=[2:7486790408939812389:3426];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038086;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.569699Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7486790408939812428:3440];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.569908Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7486790408939812525:3486];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.570036Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038086;self_id=[2:7486790408939812389:3426];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038086;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.570176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7486790408939812525:3486];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.570333Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7486790408939812445:3449];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.570481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7486790408939812403:3427];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038077;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.570679Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[2:7486790408939812445:3449];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.570950Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7486790408939812721:3506];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.570995Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7486790408939812403:3427];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038077;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.571255Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7486790408939812467:3461];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038064;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.571298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7486790408939812721:3506];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.571527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[2:7486790408939812467:3461];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.571558Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7486790408939812482:3466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.571735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7486790408939812460:3456];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038062;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.571858Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7486790408939812482:3466];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.571990Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[2:7486790408939812460:3456];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038062;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.572316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7486790391759938652:2628];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037969;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.572377Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7486790408939812574:3501];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.572603Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[2:7486790391759938652:2628];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037969;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.572727Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038070;self_id=[2:7486790408939812574:3501];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038070;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.572856Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7486790408939812430:3441];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.572973Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7486790408939812550:3495];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.573171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7486790408939812430:3441];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.573306Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[2:7486790408939812550:3495];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.722193Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;self_id=[2:7486790391759938386:2570];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037997;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.722407Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037997;self_id=[2:7486790391759938386:2570];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037997;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.729309Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715667; 2025-03-28T09:30:32.730544Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;commit_tx_id=281474976715667;commit_lock_id=281474976715666;fline=manager.cpp:94;broken_lock_id=281474976715665; 2025-03-28T09:30:32.730792Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.805112Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:7486790391759938678:2638];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037964;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.810565Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037964;self_id=[2:7486790391759938678:2638];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037964;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.822625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7486790391759938495:2609];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037978;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.823013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037978;self_id=[2:7486790391759938495:2609];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037978;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.823202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[2:7486790391759938707:2641];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037958;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.823341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037958;self_id=[2:7486790391759938707:2641];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037958;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.851014Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7486790391759938519:2612];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037981;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:32.851428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037981;self_id=[2:7486790391759938519:2612];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037981;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-28T09:30:35.842796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:35.842830Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 16463, MsgBus: 2727 2025-03-28T09:30:18.351392Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790359674236620:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:18.351465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015b5/r3tmp/tmpDJxymz/pdisk_1.dat 2025-03-28T09:30:18.689213Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16463, node 1 2025-03-28T09:30:18.724662Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:18.724844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:18.726031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:18.739852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:18.739888Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:18.739905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:18.740084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2727 TClient is connected to server localhost:2727 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:19.237443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:21.385047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790372559139154:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:21.385321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790372559139173:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:21.385451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:21.390461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:21.400231Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790372559139184:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:21.484138Z node 1 :TX_PROXY ERROR: Actor# [1:7486790372559139235:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:21.725909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:21.819491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:22.691943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.433253Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790359674236620:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:23.439591Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:24.265501Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2025-03-28T09:30:24.275854Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-28T09:30:24.276037Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2025-03-28T09:30:24.276312Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790385444049931:2969], Table: `/Root/Test` ([72057594046644480:6:1]), SessionActorId: [1:7486790381149082331:2969]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7486790385444049931:2969].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2025-03-28T09:30:24.276909Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790385444049924:2969], SessionActorId: [1:7486790381149082331:2969], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7486790381149082331:2969]. isRollback=0 2025-03-28T09:30:24.277154Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWM4ZGZkYzAtODNhMzA2ZGEtMTdhMTViYy1kMjA5M2Q1ZA==, ActorId: [1:7486790381149082331:2969], ActorState: ExecuteState, TraceId: 01jqe1d72gf3nwhhhenqtqgw2m, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7486790385444049925:2969] from: [1:7486790385444049924:2969] 2025-03-28T09:30:24.277234Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486790385444049925:2969] TxId: 281474976710667. Ctx: { TraceId: 01jqe1d72gf3nwhhhenqtqgw2m, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OWM4ZGZkYzAtODNhMzA2ZGEtMTdhMTViYy1kMjA5M2Q1ZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } 2025-03-28T09:30:24.278137Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWM4ZGZkYzAtODNhMzA2ZGEtMTdhMTViYy1kMjA5M2Q1ZA==, ActorId: [1:7486790381149082331:2969], ActorState: ExecuteState, TraceId: 01jqe1d72gf3nwhhhenqtqgw2m, Create QueryResponse for error on request, msg:
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 12833, MsgBus: 16241 2025-03-28T09:30:30.345074Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790413911164478:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:30.345131Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015b5/r3tmp/tmpbCssLC/pdisk_1.dat 2025-03-28T09:30:30.449373Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12833, node 2 2025-03-28T09:30:30.485749Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:30.485835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:30.490834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:30.523235Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:30.523272Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:30.523282Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:30.523388Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16241 TClient is connected to server localhost:16241 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:30.938781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:30.944685Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:33.125881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790426796066995:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:33.125952Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790426796067013:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:33.126046Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:33.129398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:33.137452Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790426796067033:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:33.235806Z node 2 :TX_PROXY ERROR: Actor# [2:7486790426796067084:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:33.279849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:33.316563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:34.306446Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:35.345673Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790413911164478:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:35.345742Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:35.847629Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=M2FjYjU0OGUtNWQ1YjZiODAtMjA0NDk2YTMtNWM1ZmIzNzE=, ActorId: [2:7486790435386010142:2968], ActorState: ExecuteState, TraceId: 01jqe1djatb6bgdt3b90re9qhf, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> KqpSinkMvcc::OlapMultiSinks [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> KqpSinkTx::Interactive [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 28940, MsgBus: 23008 2025-03-28T09:30:02.415492Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790290189449627:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.431295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015db/r3tmp/tmpMg4xhW/pdisk_1.dat 2025-03-28T09:30:03.026110Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:03.029669Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:03.029765Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:03.033003Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28940, node 1 2025-03-28T09:30:03.123277Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:03.123328Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:03.123344Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:03.123628Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23008 TClient is connected to server localhost:23008 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.845807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.873370Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:05.809731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790303074352038:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.809813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790303074352048:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.809886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.814356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.827190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790303074352053:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.935980Z node 1 :TX_PROXY ERROR: Actor# [1:7486790303074352104:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.262659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.416875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:06.416875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:06.417050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:06.417124Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:06.417319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:06.417319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:06.417483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:06.417506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:06.417615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:06.417624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:06.417811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:06.417935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:06.418048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:06.418162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:06.418276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:06.418379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:06.418499Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790307369319610:2349];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:06.427161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:06.427362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:06.427477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:06.427594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:06.427697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:06.427839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:06.427969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790307369319597:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:06.479083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486790307369319592:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:06.479132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486790307369319592:2344];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abs ... 9:30:35.702968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7486790417362671132:3307];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.703054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7486790417362671297:3323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.703913Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7486790417362671188:3312];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.704057Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7486790417362671289:3321];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.704125Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7486790417362671188:3312];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.704290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[2:7486790417362671289:3321];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.704331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486790417362671347:3329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.704506Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486790417362671347:3329];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.704727Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7486790417362671359:3332];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.704882Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7486790417362671359:3332];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.705041Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7486790417362671345:3328];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.705212Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7486790417362671345:3328];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.706863Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7486790417362671377:3341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.707049Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7486790417362671377:3341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.707924Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7486790417362671079:3299];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.708139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7486790417362671079:3299];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.711098Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7486790417362671174:3311];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.711335Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[2:7486790417362671174:3311];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.712249Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486790417362671366:3335];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.712436Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[2:7486790417362671366:3335];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.712624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7486790417362671388:3345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.712811Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7486790417362671388:3345];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.713020Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7486790417362671384:3344];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.713182Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[2:7486790417362671384:3344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.714069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7486790417362671350:3330];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.714269Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7486790417362671350:3330];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.714468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7486790417362671390:3346];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.714657Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[2:7486790417362671390:3346];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.715123Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7486790417362671361:3333];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.715289Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[2:7486790417362671361:3333];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.715462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486790417362671334:3326];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.715671Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7486790417362671364:3334];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.715850Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7486790417362671364:3334];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.716155Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7486790417362671446:3355];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.716320Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7486790417362671446:3355];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.716803Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7486790417362671397:3348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.717038Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7486790417362671397:3348];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.721175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7486790417362671489:3356];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.721410Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7486790417362671489:3356];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.722430Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7486790417362671373:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.722636Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[2:7486790417362671373:3340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.743527Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7486790417362671322:3325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:35.748435Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7486790417362671162:3309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-28T09:30:37.064528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:37.064560Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2025-03-28T09:30:39.028773Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cd4/r3tmp/tmpuj2UBK//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-28T09:30:39.032227Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cd4/r3tmp/tmpuj2UBK//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-03-28T09:30:39.049656Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:39.050006Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:42.714415Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cd4/r3tmp/tmpg6vxPJ//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-28T09:30:42.716942Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cd4/r3tmp/tmpg6vxPJ//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-03-28T09:30:42.754511Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:42.759492Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2025-03-28T09:30:41.606444Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d12/r3tmp/tmpngsq6R//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-28T09:30:41.606997Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d12/r3tmp/tmpngsq6R//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-03-28T09:30:41.623549Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:41.627501Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 64608, MsgBus: 30116 2025-03-28T09:30:19.698472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790366416535351:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:19.702912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015aa/r3tmp/tmpc22oWg/pdisk_1.dat 2025-03-28T09:30:20.087802Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64608, node 1 2025-03-28T09:30:20.152278Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:20.152671Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:20.173224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:20.223259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:20.223278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:20.223284Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:20.223381Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30116 TClient is connected to server localhost:30116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:20.720838Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:22.866758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790379301437905:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:22.866875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:22.867106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790379301437917:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:22.871769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:22.881022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790379301437919:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:22.946533Z node 1 :TX_PROXY ERROR: Actor# [1:7486790379301437970:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:23.237580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:23.361822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:24.374728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:25.139565Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790366416535351:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:25.208394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:26.087985Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDBkNmNhNjUtNTljN2FhMGUtNmNlMDg4M2EtOWFlNDAxYmM=, ActorId: [1:7486790392186348410:2969], ActorState: ReadyState, TraceId: 01jqe1d8wregr96e89gar4dgt7, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 61325, MsgBus: 13244 2025-03-28T09:30:32.045693Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790418533900572:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:32.045760Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015aa/r3tmp/tmpsSzb7R/pdisk_1.dat 2025-03-28T09:30:32.147983Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61325, node 2 2025-03-28T09:30:32.181072Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:32.181169Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:32.194462Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:32.215424Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:32.215446Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:32.215455Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:32.215571Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13244 TClient is connected to server localhost:13244 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:32.634655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:35.459587Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790431418803116:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:35.459639Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790431418803125:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:35.459683Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:35.463706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:35.472618Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790431418803130:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:35.529279Z node 2 :TX_PROXY ERROR: Actor# [2:7486790431418803181:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:35.572714Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:35.630591Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:36.763382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:37.520138Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790418533900572:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:37.627603Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 15912, MsgBus: 20550 2025-03-28T09:30:02.149838Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790289485139581:2086];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.149945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00163d/r3tmp/tmpdRGXa1/pdisk_1.dat 2025-03-28T09:30:02.693904Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.697646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.697760Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.724470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15912, node 1 2025-03-28T09:30:02.930427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.930457Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.930465Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.930559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20550 TClient is connected to server localhost:20550 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.855623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:05.804637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790302370042103:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.804734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790302370042084:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.804896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.808940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.821093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790302370042113:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.899072Z node 1 :TX_PROXY ERROR: Actor# [1:7486790302370042164:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.205592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.381168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:06.381454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:06.381704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:06.381856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:06.381958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:06.382097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:06.382212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:06.382346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:06.382478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:06.382580Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:06.382726Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:06.382846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790306665009671:2352];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:06.385408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:06.385471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:06.385697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:06.385800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:06.385926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:06.386041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:06.386173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:06.386315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:06.386451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:06.386571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:06.386704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:06.386818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790306665009685:2353];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:06.443824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790306665009662:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:06.443889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790306665009662:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:06.444117Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;sel ... blet_id=72075186224038078;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.270886Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7486790419609684600:3282];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.274985Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7486790419609684590:3277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.275754Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7486790419609684469:3261];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.275967Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038092;self_id=[2:7486790419609684469:3261];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038092;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.276122Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[2:7486790419609684590:3277];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.277341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7486790419609684695:3299];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.277589Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[2:7486790419609684702:3302];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038058;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.277660Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;task_id=4e7315f6-bb711f0-a24eed9c-ab7b843a;tablet_id=72075186224037993;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.278015Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037993;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.278922Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[2:7486790419609684695:3299];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.279107Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[2:7486790419609684702:3302];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038058;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.279750Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7486790419609684559:3266];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.279984Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7486790419609684559:3266];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.283069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7486790419609684739:3309];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.289381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[2:7486790419609684739:3309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.295055Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7486790419609684372:3255];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.295324Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[2:7486790419609684372:3255];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.296064Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[2:7486790419609684731:3306];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038084;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.296267Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038084;self_id=[2:7486790419609684731:3306];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038084;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.296438Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7486790419609684611:3287];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038080;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.296587Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038080;self_id=[2:7486790419609684611:3287];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038080;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.296637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7486790423904652259:3329];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.296827Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7486790423904652259:3329];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.298681Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7486790419609684686:3293];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.298921Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7486790419609684686:3293];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.299090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7486790419609684797:3323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.299247Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7486790419609684797:3323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.304276Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486790423904652390:3363];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.304612Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7486790419609684573:3267];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.306064Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7486790419609684602:3283];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.306453Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7486790419609684475:3262];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.307210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[2:7486790419609684741:3310];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.307216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[2:7486790419609684609:3286];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.307455Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7486790419609684698:3301];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038071;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.307633Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486790423904652390:3363];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.307790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[2:7486790419609684573:3267];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.307923Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038076;self_id=[2:7486790419609684602:3283];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038076;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.308062Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7486790419609684475:3262];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-03-28T09:30:37.312452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038071;self_id=[2:7486790419609684698:3301];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038071;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.312790Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486790423904652403:3368];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.313013Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7486790419609684582:3272];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.313173Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7486790423904652301:3337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:37.313391Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;self_id=[2:7486790419609684477:3263];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038087;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:38.281617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:38.281649Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] Test command err: Trying to start YDB, gRPC: 12943, MsgBus: 63414 2025-03-28T09:30:02.139012Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790292477621539:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:02.139144Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e2/r3tmp/tmpaD2L2U/pdisk_1.dat 2025-03-28T09:30:02.782652Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:02.791215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:02.791315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:02.795254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12943, node 1 2025-03-28T09:30:02.927207Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:02.927232Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:02.927238Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:02.927365Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63414 TClient is connected to server localhost:63414 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:03.826828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:03.840089Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:05.667064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790305362524066:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.667286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.667653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790305362524101:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:05.671711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:05.696683Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790305362524103:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:05.798458Z node 1 :TX_PROXY ERROR: Actor# [1:7486790305362524154:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:06.208258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:06.382063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:06.382354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:06.382678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:06.382834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:06.383003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:06.383136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:06.383269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:06.383433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:06.383619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:06.383752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:06.383870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:06.384003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790309657491643:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:06.396978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:06.397113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:06.397386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:06.397506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:06.397658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:06.397758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:06.397921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:06.398042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:06.398180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:06.398306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:06.398469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:06.398592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486790309657491645:2346];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:06.452792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486790309657491649:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:06.452864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486790309657491649:2347];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abs ... 8;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165257Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037971;self_id=[2:7486790402092483723:2562];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165258Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037964;self_id=[2:7486790402092483764:2584];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037964;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165317Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037977;self_id=[2:7486790402092483655:2552];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037977;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165322Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037973;self_id=[2:7486790402092483745:2572];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037973;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165375Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037963;self_id=[2:7486790402092483731:2567];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037963;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165386Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037972;self_id=[2:7486790402092483922:2665];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037972;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165438Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037986;self_id=[2:7486790402092483643:2548];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165447Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037979;self_id=[2:7486790402092483012:2492];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165504Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037987;self_id=[2:7486790402092483576:2534];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165509Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037956;self_id=[2:7486790402092483806:2606];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037956;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165578Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037965;self_id=[2:7486790402092483784:2594];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037965;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165586Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037996;self_id=[2:7486790402092483017:2494];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165662Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037966;self_id=[2:7486790402092483852:2619];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037966;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165668Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037970;self_id=[2:7486790402092483856:2621];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165724Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037967;self_id=[2:7486790402092483743:2571];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037967;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165726Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037988;self_id=[2:7486790402092483626:2536];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165784Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037974;self_id=[2:7486790402092483928:2667];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037974;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165813Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037969;self_id=[2:7486790402092483769:2586];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037969;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165863Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037980;self_id=[2:7486790402092483695:2561];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165911Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037968;self_id=[2:7486790402092483840:2612];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037968;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.165941Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037983;self_id=[2:7486790402092483616:2535];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166011Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037961;self_id=[2:7486790402092483780:2592];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037961;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166025Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037981;self_id=[2:7486790402092483663:2555];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166087Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037985;self_id=[2:7486790402092483653:2551];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166101Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037975;self_id=[2:7486790402092483930:2668];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037975;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166171Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037992;self_id=[2:7486790402092483025:2498];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166181Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037984;self_id=[2:7486790402092483555:2528];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166249Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037982;self_id=[2:7486790402092483627:2537];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166252Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037993;self_id=[2:7486790402092483019:2495];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166320Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037989;self_id=[2:7486790402092483557:2529];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166326Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037994;self_id=[2:7486790402092483032:2503];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166394Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037990;self_id=[2:7486790402092483550:2525];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037990;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166396Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037991;self_id=[2:7486790402092482905:2487];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166459Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037995;self_id=[2:7486790402092483023:2497];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:37.166844Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037997;self_id=[2:7486790402092483015:2493];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; WAIT_INDEXATION: 0 2025-03-28T09:30:38.487077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:38.487110Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false >> TSchemeShardServerLess::Fake [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2025-03-28T09:30:37.588523Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d06/r3tmp/tmp6v2uui//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-03-28T09:30:37.592744Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:40.567809Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d06/r3tmp/tmp6v2uui//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-03-28T09:30:40.574900Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:42.038377Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d06/r3tmp/tmp6v2uui//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-03-28T09:30:42.041483Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:43.464900Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d06/r3tmp/tmp6v2uui//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-03-28T09:30:43.477737Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:44.827058Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d06/r3tmp/tmp6v2uui//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-03-28T09:30:44.834531Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:30:46.680552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:30:46.680747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:30:46.680789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:30:46.680821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:30:46.681413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:30:46.681452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:30:46.681523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:30:46.681604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:30:46.682987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:30:46.772350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:46.772409Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:46.787760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:30:46.788040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:30:46.788269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:30:46.794982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:30:46.795261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:30:46.796042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.796321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:30:46.800081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.807981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:30:46.808063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.809211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:30:46.809322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:30:46.809381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:30:46.809492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.816617Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:30:46.967933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:30:46.968184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.968391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:30:46.968624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:30:46.968691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.970960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.971100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:30:46.971265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.971318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:30:46.971357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:30:46.971399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:30:46.973410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.973467Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:30:46.973503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:30:46.975343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.975411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.975451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.975501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.979323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:30:46.981293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:30:46.981491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:30:46.982648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.982825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:30:46.982877Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.983254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:30:46.983319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.983486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:30:46.983561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:30:46.985653Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:30:46.985703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:30:46.985870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.985939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:30:46.986364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.986411Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:30:46.986513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:30:46.986549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.986586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:30:46.986618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.986654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:30:46.986718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.986756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:30:46.986790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:30:46.986870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:30:46.986915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:30:46.986969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:30:46.988932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:30:46.989034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:30:46.989071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409550 2025-03-28T09:30:47.313279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2025-03-28T09:30:47.313330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 104:0 Got OK TEvConfigureStatus from tablet# 72075186234409550 shardIdx# 72057594046678944:6 at schemeshard# 72057594046678944 2025-03-28T09:30:47.320033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:30:47.320473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:30:47.321693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186234409551, partId: 0 2025-03-28T09:30:47.321809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409551 2025-03-28T09:30:47.321873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 104:0 HandleReply TEvConfigureStatus operationId:104:0 at schemeshard:72057594046678944 2025-03-28T09:30:47.321913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 104:0 Got OK TEvConfigureStatus from tablet# 72075186234409551 shardIdx# 72057594046678944:7 at schemeshard# 72057594046678944 2025-03-28T09:30:47.321963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 3 -> 128 2025-03-28T09:30:47.325825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:30:47.325956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:30:47.325994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:30:47.326058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 104:0, at tablet# 72057594046678944 2025-03-28T09:30:47.326112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2025-03-28T09:30:47.326237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:30:47.327780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-03-28T09:30:47.327888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000005 2025-03-28T09:30:47.328160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:47.328254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:30:47.328295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-03-28T09:30:47.328478Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-28T09:30:47.328519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2025-03-28T09:30:47.328601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:30:47.328685Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 3], Generation: 2, ActorId:[1:610:2538], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2025-03-28T09:30:47.330384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:30:47.330423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:30:47.330557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:47.330591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-28T09:30:47.330877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:30:47.330930Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2025-03-28T09:30:47.330968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 240 -> 240 2025-03-28T09:30:47.331661Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:30:47.331769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:30:47.331806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:30:47.331841Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-03-28T09:30:47.331895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2025-03-28T09:30:47.331973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-28T09:30:47.334420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:30:47.334471Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-28T09:30:47.334580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:30:47.334614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:30:47.334665Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:30:47.334732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:30:47.334780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-28T09:30:47.334820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:30:47.334863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:30:47.334899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:30:47.335112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:30:47.335474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:30:47.337096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:30:47.337143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:30:47.337535Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:30:47.337621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:30:47.337663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:763:2645] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2025-03-28T09:30:47.339995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:30:47.340180Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2025-03-28T09:30:47.340228Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2025-03-28T09:30:47.340348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-03-28T09:30:47.340391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2025-03-28T09:30:47.344201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:30:47.344327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:30:46.680543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:30:46.680694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:30:46.680745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:30:46.680780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:30:46.681409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:30:46.681495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:30:46.681577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:30:46.681681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:30:46.683021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:30:46.759309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:46.759368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:46.777887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:30:46.778186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:30:46.778394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:30:46.786276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:30:46.786513Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:30:46.791889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.793160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:30:46.799990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.807936Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:30:46.808019Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.809196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:30:46.809278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:30:46.809374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:30:46.809501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.816631Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:30:46.964814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:30:46.965039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.965248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:30:46.965466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:30:46.965549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.967831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.967955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:30:46.968145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.968215Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:30:46.968272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:30:46.968310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:30:46.970136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.970187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:30:46.970224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:30:46.972319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.972377Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.972417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.972463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.982871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:30:46.985010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:30:46.985194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:30:46.986216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.986356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:30:46.986409Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.986670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:30:46.986753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.986944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:30:46.987099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:30:46.988877Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:30:46.988916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:30:46.989093Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.989139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:30:46.989493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.989540Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:30:46.989629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:30:46.989662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.989697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:30:46.989727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.989763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:30:46.989800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.989834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:30:46.989867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:30:46.989922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:30:46.989959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:30:46.990010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:30:46.991876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:30:46.991993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:30:46.992029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ecute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:30:47.682598Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-03-28T09:30:47.682723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:30:47.682759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:30:47.682795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:30:47.682827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:30:47.682875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-03-28T09:30:47.682917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:30:47.682953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-28T09:30:47.683001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-28T09:30:47.683147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:30:47.684904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:30:47.685161Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409546 2025-03-28T09:30:47.686145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:30:47.689123Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409548 2025-03-28T09:30:47.689323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-28T09:30:47.689610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 Forgetting tablet 72075186234409546 2025-03-28T09:30:47.690006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-03-28T09:30:47.690184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:30:47.691166Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409547 Forgetting tablet 72075186234409548 2025-03-28T09:30:47.692975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-28T09:30:47.693141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409547 2025-03-28T09:30:47.694616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:30:47.694791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:30:47.694847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:30:47.695038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:30:47.695716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:30:47.695763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:30:47.695821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:30:47.699558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-28T09:30:47.699615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2025-03-28T09:30:47.699718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-28T09:30:47.699733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2025-03-28T09:30:47.699928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-28T09:30:47.699975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2025-03-28T09:30:47.700232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:30:47.700291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:30:47.700536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-28T09:30:47.700572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-28T09:30:47.700897Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:30:47.700965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:30:47.701004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:925:2787] TestWaitNotification: OK eventTxId 106 2025-03-28T09:30:47.701504Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:30:47.701677Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 173us result status StatusPathDoesNotExist 2025-03-28T09:30:47.701826Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:30:47.702276Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:30:47.702429Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 160us result status StatusPathDoesNotExist 2025-03-28T09:30:47.702551Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:30:47.702987Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:30:47.703157Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 156us result status StatusSuccess 2025-03-28T09:30:47.703440Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2025-03-28T09:30:47.705526Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2025-03-28T09:30:47.705574Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2025-03-28T09:30:47.705600Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2025-03-28T09:30:47.705635Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> DataShardVolatile::DistributedWrite >> DataShardVolatile::DistributedWriteThenImmediateUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:30:46.680553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:30:46.680691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:30:46.680733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:30:46.680783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:30:46.681430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:30:46.681471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:30:46.681563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:30:46.681639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:30:46.682997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:30:46.760223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:46.760271Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:46.779240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:30:46.779584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:30:46.779788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:30:46.786178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:30:46.786413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:30:46.791905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.793186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:30:46.803643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.807945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:30:46.808031Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.809202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:30:46.809295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:30:46.809355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:30:46.809469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.816549Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:30:46.955384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:30:46.957090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.957944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:30:46.959337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:30:46.959419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.962584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.962770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:30:46.962936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.963047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:30:46.963082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:30:46.963112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:30:46.964878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.964930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:30:46.964968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:30:46.966832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.966881Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.966929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.966978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.971892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:30:46.973872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:30:46.974971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:30:46.976236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.976389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:30:46.976444Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.978599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:30:46.978719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.978923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:30:46.979017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:30:46.981347Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:30:46.981404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:30:46.981642Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.981710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:30:46.982884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.982936Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:30:46.983040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:30:46.983073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.983126Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:30:46.983158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.983193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:30:46.983231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.983267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:30:46.983298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:30:46.983411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:30:46.983450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:30:46.983501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:30:46.985361Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:30:46.985476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:30:46.985516Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ecute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:30:47.682879Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-03-28T09:30:47.682991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:30:47.683033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:30:47.683066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:30:47.683112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:30:47.683166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-03-28T09:30:47.683203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:30:47.683230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-28T09:30:47.683259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-28T09:30:47.683397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:30:47.684413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:30:47.684693Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 5 TabletID: 72075186234409549 2025-03-28T09:30:47.685506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409549 2025-03-28T09:30:47.688629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 5 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2025-03-28T09:30:47.688928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:30:47.691415Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 7 TabletID: 72075186234409551 2025-03-28T09:30:47.692574Z node 1 :HIVE INFO: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 6 TabletID: 72075186234409550 2025-03-28T09:30:47.692939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 7 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2025-03-28T09:30:47.693207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409551 2025-03-28T09:30:47.694283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 6 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2025-03-28T09:30:47.694482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409550 2025-03-28T09:30:47.695028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:30:47.695096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:30:47.695227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:30:47.695550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:30:47.695676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:30:47.695718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:30:47.695788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:30:47.698191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:5 2025-03-28T09:30:47.698249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2025-03-28T09:30:47.700774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:7 2025-03-28T09:30:47.700818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2025-03-28T09:30:47.700891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:6 2025-03-28T09:30:47.700925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2025-03-28T09:30:47.700999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:30:47.701143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:30:47.701516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-28T09:30:47.701562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-28T09:30:47.702055Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:30:47.702162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:30:47.702213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:930:2794] TestWaitNotification: OK eventTxId 106 2025-03-28T09:30:47.702872Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:30:47.703073Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 223us result status StatusPathDoesNotExist 2025-03-28T09:30:47.703249Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:30:47.703810Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:30:47.704032Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 194us result status StatusPathDoesNotExist 2025-03-28T09:30:47.704187Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:30:47.704774Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:30:47.704971Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 224us result status StatusSuccess 2025-03-28T09:30:47.705350Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2025-03-28T09:30:47.705996Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2025-03-28T09:30:47.706091Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2025-03-28T09:30:47.706152Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2025-03-28T09:30:47.706217Z node 1 :HIVE INFO: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2025-03-28T09:30:45.860679Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001d10/r3tmp/tmpWjcETD//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-28T09:30:45.866583Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TGroupMapperTest::MapperSequentialCalls >> TGroupMapperTest::ReassignGroupTest3dc >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] >> TGroupMapperTest::Mirror3dc >> KqpSinkLocks::UncommittedRead [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterDifferentSlotsPerDisk [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> TPersQueueTest::TestReadPartitionByGroupId [GOOD] >> TPersQueueTest::TestReadPartitionStatus >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::CheckNotToBreakFailModel [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> KqpSinkTx::DeferredEffects [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TGroupMapperTest::MonteCarlo >> TGroupMapperTest::SanitizeGroupTest3dc >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UncommittedRead [GOOD] Test command err: Trying to start YDB, gRPC: 63559, MsgBus: 12690 2025-03-28T09:30:17.738451Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790355182084738:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:17.738527Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015b7/r3tmp/tmpc2QqH9/pdisk_1.dat 2025-03-28T09:30:18.070802Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63559, node 1 2025-03-28T09:30:18.135079Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:18.135118Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:18.135128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:18.135279Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:30:18.135687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:18.135870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:18.137844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12690 TClient is connected to server localhost:12690 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:18.638008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:20.663724Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790368066987272:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:20.663866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:20.664379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790368066987299:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:20.667936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:20.680685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790368066987301:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:20.763244Z node 1 :TX_PROXY ERROR: Actor# [1:7486790368066987352:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:21.051841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:21.238651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:21.238660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:21.238866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:21.239142Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:21.239268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:21.239401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:21.239400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:21.239616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:21.239660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:21.239756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:21.239824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:21.239889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:21.239950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:21.240013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:21.240080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:21.240166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:21.240200Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:21.240322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:21.240329Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:21.240428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486790372361954841:2345];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:21.240479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:21.240604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:21.240739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:21.240846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790372361954843:2346];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:21.277266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790372361954851:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:21.277328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790372361954851:2350];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:21.277543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;sel ... ;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.403980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7486790393836797533:3392];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.404248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[1:7486790393836797762:3408];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.404564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[1:7486790393836797762:3408];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.404717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[1:7486790393836797452:3369];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038030;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.404994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038030;self_id=[1:7486790393836797452:3369];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038030;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.405640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7486790393836797611:3401];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.405868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[1:7486790393836797457:3371];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.406180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7486790393836797611:3401];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.406297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[1:7486790393836797457:3371];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.406424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[1:7486790393836797563:3398];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.406869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[1:7486790393836797493:3380];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038055;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.407064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[1:7486790393836797563:3398];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.407360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038055;self_id=[1:7486790393836797493:3380];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038055;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.411696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7486790393836797540:3394];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.412111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7486790393836797540:3394];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.412946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7486790393836797632:3404];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.413348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7486790393836797632:3404];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.413916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7486790393836797568:3399];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.414176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7486790393836797568:3399];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.447461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7486790393836797473:3377];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:30.448152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038004;self_id=[1:7486790393836797473:3377];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038004;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-28T09:30:33.059909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:33.059943Z node 1 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 9576, MsgBus: 29376 2025-03-28T09:30:37.187322Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790443553987860:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:37.187481Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015b7/r3tmp/tmppMY9IY/pdisk_1.dat 2025-03-28T09:30:37.356494Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:37.388016Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:37.388123Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:37.391758Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9576, node 2 2025-03-28T09:30:37.447319Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:37.447343Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:37.447351Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:37.447492Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29376 TClient is connected to server localhost:29376 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:37.879594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:40.649473Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790456438890317:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:40.649572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:40.649939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790456438890352:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:40.654841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:40.669698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790456438890354:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:40.732458Z node 2 :TX_PROXY ERROR: Actor# [2:7486790456438890405:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:40.781927Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:40.870360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:41.819536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:42.619997Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790443553987860:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:42.725560Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] >> TGroupMapperTest::MakeDisksNonoperational [GOOD] >> KqpRanges::ValidatePredicatesDataQuery [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge >> TGroupMapperTest::Block42_1disk |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MakeDisksNonoperational [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformClusterMirror3dcWithUnusableDomain [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesTrivial [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::DeferredEffects [GOOD] Test command err: Trying to start YDB, gRPC: 63924, MsgBus: 19917 2025-03-28T09:30:13.254329Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790337976822275:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:13.254368Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015c0/r3tmp/tmppLoLGA/pdisk_1.dat 2025-03-28T09:30:13.707919Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:13.712198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:13.712320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:13.715180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63924, node 1 2025-03-28T09:30:13.782604Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:13.782625Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:13.782639Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:13.782928Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19917 TClient is connected to server localhost:19917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:14.297841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:16.180409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790350861724821:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:16.180498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790350861724798:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:16.180840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:16.187377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:16.205161Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790350861724827:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:16.307104Z node 1 :TX_PROXY ERROR: Actor# [1:7486790350861724880:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:16.587707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:16.707135Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:17.585220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:18.323957Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790337976822275:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:18.330165Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:28.682825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:28.682879Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:32.464187Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790419581211631:3278], TxId: 281474976710678, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1df4a3xpy9pg8t02k2x5w. SessionId : ydb://session/3?node_id=1&id=YzRjNGVjZjAtZDAyNGQ0MTQtMTIzZjEwZjMtYTlkYTRkYQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1743154219000/18446744073709551615 shard 72075186224037889 with lowWatermark v1743154219044/18446744073709551615 (node# 1 state# Ready) } } 2025-03-28T09:30:32.464646Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790419581211631:3278], TxId: 281474976710678, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1df4a3xpy9pg8t02k2x5w. SessionId : ydb://session/3?node_id=1&id=YzRjNGVjZjAtZDAyNGQ0MTQtMTIzZjEwZjMtYTlkYTRkYQ==. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 7 has no snapshot at v1743154219000/18446744073709551615 shard 72075186224037889 with lowWatermark v1743154219044/18446744073709551615 (node# 1 state# Ready) } }. 2025-03-28T09:30:32.465525Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790419581211632:3279], TxId: 281474976710678, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzRjNGVjZjAtZDAyNGQ0MTQtMTIzZjEwZjMtYTlkYTRkYQ==. TraceId : 01jqe1df4a3xpy9pg8t02k2x5w. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486790419581211627:2969], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:30:32.465857Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790419581211633:3280], TxId: 281474976710678, task: 3. Ctx: { TraceId : 01jqe1df4a3xpy9pg8t02k2x5w. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzRjNGVjZjAtZDAyNGQ0MTQtMTIzZjEwZjMtYTlkYTRkYQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486790419581211627:2969], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:30:32.466195Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzRjNGVjZjAtZDAyNGQ0MTQtMTIzZjEwZjMtYTlkYTRkYQ==, ActorId: [1:7486790359451667988:2969], ActorState: ExecuteState, TraceId: 01jqe1df4a3xpy9pg8t02k2x5w, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 Trying to start YDB, gRPC: 2647, MsgBus: 21729 2025-03-28T09:30:38.460662Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790446928410280:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:38.460784Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015c0/r3tmp/tmpjswdQ5/pdisk_1.dat 2025-03-28T09:30:38.601795Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:38.632541Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:38.632639Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:38.634910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2647, node 2 2025-03-28T09:30:38.695297Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:38.695320Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:38.695328Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:38.695503Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21729 TClient is connected to server localhost:21729 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:39.175878Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:39.199181Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:30:41.521638Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790459813312803:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:41.521745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:41.523527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790459813312839:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:41.528441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:41.563676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790459813312841:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:30:41.655760Z node 2 :TX_PROXY ERROR: Actor# [2:7486790459813312892:2335] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:41.730124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:41.785624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:30:43.026330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:43.899285Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790446928410280:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:43.960833Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] >> TMultiversionObjectMap::MonteCarlo >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] >> TGroupMapperTest::NonUniformCluster2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 9385, MsgBus: 8806 2025-03-28T09:30:12.157615Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790333118278396:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:12.158603Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015c4/r3tmp/tmpAw9CEu/pdisk_1.dat 2025-03-28T09:30:12.529929Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:12.547591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:12.547818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:12.550255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9385, node 1 2025-03-28T09:30:12.699290Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:12.699315Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:12.699322Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:12.699451Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8806 TClient is connected to server localhost:8806 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:13.244066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:13.259299Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:15.178176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790346003180932:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.178182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790346003180942:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.178274Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:15.181923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:15.192138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790346003180947:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:15.290632Z node 1 :TX_PROXY ERROR: Actor# [1:7486790346003180998:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:15.610007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:15.760721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:15.760984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:15.761292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:15.761454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:15.761574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:15.761776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:15.761929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:15.762049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:15.762199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:15.762382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:15.762518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:15.762535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:15.762599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:15.762658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790346003181185:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:15.762775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:15.763075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:15.763240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:15.763377Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:15.763507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:15.763632Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:15.763768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:15.763896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:15.764003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:15.764179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790346003181197:2348];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:15.801491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790346003181317:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:15.801567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037890;self_id=[1:7486790346003181317:2353];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract ... 1056Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7486790445425611618:3180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.391353Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7486790445425611616:3179];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.396307Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038027;self_id=[2:7486790445425611618:3180];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038027;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.396552Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[2:7486790445425611616:3179];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.403008Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7486790445425611634:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.403269Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038005;self_id=[2:7486790445425611634:3192];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038005;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.403514Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7486790445425611678:3215];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.403700Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[2:7486790445425611629:3188];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.405691Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7486790445425611585:3154];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.405930Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486790445425611640:3195];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.408445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486790445425611651:3202];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.408639Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[2:7486790445425611651:3202];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.408810Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7486790445425611809:3220];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.408979Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[2:7486790445425611809:3220];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.409205Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7486790445425611783:3217];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.409349Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038011;self_id=[2:7486790445425611783:3217];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038011;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.410365Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[2:7486790445425611640:3195];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.411253Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[2:7486790445425611678:3215];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.411380Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7486790445425611655:3204];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.411465Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7486790445425611664:3210];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.411531Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7486790445425611655:3204];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.411624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[2:7486790445425611664:3210];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.412288Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[2:7486790445425611585:3154];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.412468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7486790445425611608:3173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.412590Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[2:7486790445425611608:3173];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.415389Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7486790445425611666:3211];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.415641Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[2:7486790445425611666:3211];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.426175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7486790445425611696:3216];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.426409Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038021;self_id=[2:7486790445425611696:3216];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038021;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.430121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7486790449720580106:3349];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.430445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7486790449720579792:3313];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.430602Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7486790449720579792:3313];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.430761Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7486790449720579803:3315];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.430919Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7486790449720579795:3314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.431165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[2:7486790449720580106:3349];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.431299Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7486790445425611807:3219];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-03-28T09:30:44.431933Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7486790445425611797:3218];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.432358Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[2:7486790449720579803:3315];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.432605Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[2:7486790449720579795:3314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.432773Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[2:7486790445425611807:3219];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.432905Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[2:7486790445425611797:3218];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.435386Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7486790445425611623:3183];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:44.435770Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[2:7486790445425611623:3183];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; WAIT_INDEXATION: 0 2025-03-28T09:30:45.581670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:45.581731Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Mirror3dc3Nodes [GOOD] >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesHuge [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::when_one_server_per_rack_in_4_racks_then_can_construct_group_with_4_domains_and_one_small_node [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpRanges::ValidatePredicatesDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 22088, MsgBus: 8180 2025-03-28T09:29:01.381231Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790031042820220:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:01.381286Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00091a/r3tmp/tmpvzn1O1/pdisk_1.dat 2025-03-28T09:29:01.901131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:01.901204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:01.903647Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:01.904999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22088, node 1 2025-03-28T09:29:02.098450Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:02.098477Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:02.098484Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:02.098585Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8180 TClient is connected to server localhost:8180 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:02.818337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:02.835453Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:02.849156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:29:03.033594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:03.219275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:03.313681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:04.928976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790043927723890:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:04.929092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:05.320726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:05.359689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:05.411409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:05.439043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:05.470553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:05.517066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:05.594485Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790048222691700:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:05.594575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:05.594786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790048222691705:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:05.599227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:05.612069Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:29:05.612633Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790048222691707:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:05.686466Z node 1 :TX_PROXY ERROR: Actor# [1:7486790048222691762:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:06.381364Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790031042820220:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:06.381428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:06.624076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:06.910841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:29:07.082103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:29:07.216048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:29:07.591929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480
: Warning: Type annotation, code: 1030
:1:44: Warning: At function: Coalesce
:1:58: Warning: At function: SqlIn
:1:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 26321, MsgBus: 24327 2025-03-28T09:29:08.876209Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790058940833892:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:08.876280Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00091a/r3tmp/tmpiGgT9i/pdisk_1.dat 2025-03-28T09:29:08.980429Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26321, node 2 2025-03-28T09:29:09.021818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:09.021927Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:09.024395Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:29:09.061716Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:09.061739Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:09.061746Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:09.061851Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24327 TClient is connected to server localhost:24327 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 S ... 78700Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154220906, txId: 281474976715785] shutting down 2025-03-28T09:30:21.164941Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154221193, txId: 281474976715787] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < 9000 OR Key3 IS NULL ORDER BY `Value`; 2025-03-28T09:30:21.448223Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154221480, txId: 281474976715789] shutting down 2025-03-28T09:30:21.758714Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154221788, txId: 281474976715791] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[6u]];[[7u]];[[8u]];[[9u]];[[10u]];[[11u]];[[12u]];[[13u]];[[14u]];[[15u]];[[16u]];[[17u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Value = 20 ORDER BY `Value`; 2025-03-28T09:30:21.994312Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154222026, txId: 281474976715793] shutting down 2025-03-28T09:30:22.188705Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154222222, txId: 281474976715795] shutting down EXPECTED: [[[20u]]] RECEIVED: [[[20u]]] Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE (Key1 <= 1000) OR (Key1 > 2000 AND Key1 < 5000) OR (Key1 >= 8000) ORDER BY `Value`; 2025-03-28T09:30:22.663124Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154222691, txId: 281474976715797] shutting down EXPECTED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] RECEIVED: [[[1u]];[[2u]];[[3u]];[[4u]];[[5u]];[[11u]];[[12u]];[[16u]];[[17u]];[[18u]];[[19u]];[[20u]]] 2025-03-28T09:30:23.092495Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154223125, txId: 281474976715799] shutting down Execute query SELECT `Value` FROM `/Root/TestPredicates` WHERE Key1 < NULL ORDER BY `Value`; 2025-03-28T09:30:23.219338Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154223214, txId: 281474976715801] shutting down 2025-03-28T09:30:23.350471Z node 6 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154223346, txId: 281474976715803] shutting down EXPECTED: [] RECEIVED: [] 2025-03-28T09:30:23.404383Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037919 not found 2025-03-28T09:30:23.404430Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037920 not found 2025-03-28T09:30:23.404453Z node 6 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 6, TabletId: 72075186224037921 not found Trying to start YDB, gRPC: 5153, MsgBus: 21422 2025-03-28T09:30:24.507918Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790384593169638:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:24.507969Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00091a/r3tmp/tmpZLdP8r/pdisk_1.dat 2025-03-28T09:30:24.733529Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:24.828264Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:24.828411Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:24.830147Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5153, node 7 2025-03-28T09:30:24.971995Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:24.972020Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:24.972037Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:24.972208Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21422 TClient is connected to server localhost:21422 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:30:25.764195Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:30:25.790073Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:25.898354Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:26.182594Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:26.304211Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:29.510811Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486790384593169638:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:29.510907Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:30:29.695534Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790406068007887:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:29.695825Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:29.722559Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:29.805487Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:30:29.890209Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:30:29.925771Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:30.006356Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:30:30.059589Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:30:30.130452Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790410362975703:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:30.130605Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:30.130900Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486790410362975709:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:30.135931Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:30:30.148914Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486790410362975711:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:30:30.218592Z node 7 :TX_PROXY ERROR: Actor# [7:7486790410362975764:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:31.698668Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:30:39.719695Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:39.719730Z node 7 :IMPORT WARN: Table profiles were not loaded >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::InterlacedRacksWithoutInterlacedNodes [GOOD] >> TGroupMapperTest::NonUniformCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2025-03-28T09:30:43.685023Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce4/r3tmp/tmpZhZiD3//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1 2025-03-28T09:30:43.712683Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:45.284581Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce4/r3tmp/tmpZhZiD3//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 2 2025-03-28T09:30:45.294378Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:1:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:46.919897Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce4/r3tmp/tmpZhZiD3//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 3 2025-03-28T09:30:46.924308Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:2:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:48.446965Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce4/r3tmp/tmpZhZiD3//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 4 2025-03-28T09:30:48.449684Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:3:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:50.076505Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce4/r3tmp/tmpZhZiD3//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 5 2025-03-28T09:30:50.082671Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:4:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:51.769567Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:413} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce4/r3tmp/tmpZhZiD3//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 6 2025-03-28T09:30:51.770658Z :BS_LOCALRECOVERY CRIT: VDISK[0:_:0:5:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TBlobStorageControllerGrouperTest::TestGroupFromCandidatesEmpty [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> KqpSinkTx::OlapExplicitTcl [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TGroupMapperTest::MapperSequentialCalls [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter >> TConsoleTests::TestGetUnknownTenantStatus |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription >> TUserAccountServiceTest::Get >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain >> TConsoleTests::TestCreateTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapExplicitTcl [GOOD] Test command err: Trying to start YDB, gRPC: 6696, MsgBus: 32466 2025-03-28T09:30:16.237146Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790351662927121:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:16.237316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015bd/r3tmp/tmpTFty3I/pdisk_1.dat 2025-03-28T09:30:16.640247Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:16.646087Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:16.646200Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:16.649527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6696, node 1 2025-03-28T09:30:16.715304Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:16.715325Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:16.715340Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:16.715480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32466 TClient is connected to server localhost:32466 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:17.195780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:17.228654Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:19.120553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790364547829576:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.120639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.120740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790364547829606:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:19.125079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:19.137591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790364547829612:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:19.219235Z node 1 :TX_PROXY ERROR: Actor# [1:7486790364547829663:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:19.524271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:19.701548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:19.701554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:19.701814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:19.701815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:19.702166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:19.702198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:19.702430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:19.702485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:19.702591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:19.702592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:19.702768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:19.702794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:19.702907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:19.702925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:19.703298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:19.703457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:19.703576Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:19.703693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:19.703792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486790364547829868:2348];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:19.706637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:19.706812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:19.706915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:19.707021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:19.707132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486790364547829862:2346];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:19.752917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790364547829874:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:19.752991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486790364547829874:2350];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstr ... COLUMNSHARD WARN: tablet_id=72075186224038049;self_id=[2:7486790471288585326:3416];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038049;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.687962Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[2:7486790471288584802:3344];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.688244Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7486790471288585430:3442];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.688449Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7486790471288584691:3322];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038077;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.691369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7486790471288585416:3439];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.691423Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7486790471288585386:3428];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.691595Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038040;self_id=[2:7486790471288585416:3439];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038040;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.691617Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[2:7486790471288585386:3428];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.691735Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[2:7486790471288585430:3442];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.691902Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[2:7486790471288584691:3322];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038077;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.693303Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7486790471288584900:3369];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.694307Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038068;self_id=[2:7486790471288584900:3369];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038068;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.694794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7486790471288585417:3440];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.694993Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[2:7486790471288585417:3440];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.697078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7486790471288585493:3467];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.698265Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[2:7486790471288585493:3467];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.698378Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7486790471288585004:3412];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.698957Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[2:7486790471288585004:3412];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.700987Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7486790471288585450:3457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.702313Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7486790471288584437:3292];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.707421Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[2:7486790471288585450:3457];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.707679Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[2:7486790471288584437:3292];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.708343Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7486790471288584902:3370];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.719637Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7486790471288584966:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.719951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7486790471288585016:3414];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.720546Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038059;self_id=[2:7486790471288584902:3370];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038059;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.720783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7486790471288584987:3409];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.721009Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7486790471288584409:3278];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.721334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[2:7486790471288584966:3400];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.721462Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038052;self_id=[2:7486790471288584987:3409];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038052;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.721477Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038050;self_id=[2:7486790471288585016:3414];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038050;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.721609Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[2:7486790471288584409:3278];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.721820Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7486790471288584516:3300];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.721971Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038074;self_id=[2:7486790471288584516:3300];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038074;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.722110Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7486790471288584661:3316];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.722226Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[2:7486790471288584661:3316];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.722287Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7486790471288584875:3350];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.722481Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038066;self_id=[2:7486790471288584875:3350];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038066;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.723251Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7486790471288584619:3302];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.723432Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7486790471288584998:3411];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.723756Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[2:7486790471288584998:3411];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.723936Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224038081;self_id=[2:7486790471288584619:3302];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038081;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T09:30:48.753359Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDdmMjE5ZDItMzVjYjVhMzAtMTI3M2NmNjUtNTdkZmEwYmU=, ActorId: [2:7486790484173489788:3897], ActorState: ReadyState, TraceId: 01jqe1dz121q0r2cr1khzy4p4r, Create QueryResponse for error on request, msg: WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 2025-03-28T09:30:50.226184Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:50.226219Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> FolderServiceTest::TFolderService >> FolderServiceTest::TFolderServiceTransitional >> TAccessServiceTest::PassRequestId |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MapperSequentialCalls [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster2 [GOOD] >> TServiceAccountServiceTest::Get [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan >> KqpCost::IndexLookupJoin-StreamLookupJoin >> KqpCost::IndexLookup+useSink >> KqpCost::OltpWriteRow+isSink >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore >> KqpCost::OlapWriteRow >> KqpCost::OlapRangeFullScan >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> KqpCost::OltpWriteRow-isSink |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> TUserAccountServiceTest::Get [GOOD] >> TServiceAccountServiceTest::IssueToken [GOOD] >> FolderServiceTest::TFolderServiceAdapter [GOOD] >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> FolderServiceTest::TFolderService [GOOD] >> TAccessServiceTest::PassRequestId [GOOD] >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> KqpCost::QuerySeviceRangeFullScan >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2025-03-28T09:30:56.463519Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790521978503567:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:56.463600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e23/r3tmp/tmpB0CbGa/pdisk_1.dat 2025-03-28T09:30:56.780399Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:56.811322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:56.811455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:56.813113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:56.997784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2025-03-28T09:30:56.157246Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790521622703577:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:56.157566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e08/r3tmp/tmpyAsxEc/pdisk_1.dat 2025-03-28T09:30:56.428334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:56.428473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:56.430371Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:56.448908Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:6440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:56.670229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:56.741815Z node 1 :GRPC_CLIENT DEBUG: [517000071288] Connect to grpc://localhost:24864 2025-03-28T09:30:56.755944Z node 1 :GRPC_CLIENT DEBUG: [517000071288] Request ListFoldersRequest { id: "i_am_exists" } 2025-03-28T09:30:56.801243Z node 1 :GRPC_CLIENT DEBUG: [517000071288] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2025-03-28T09:30:56.802338Z node 1 :GRPC_CLIENT DEBUG: [517000062808] Connect to grpc://localhost:21856 2025-03-28T09:30:56.803269Z node 1 :GRPC_CLIENT DEBUG: [517000062808] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-03-28T09:30:56.811615Z node 1 :GRPC_CLIENT DEBUG: [517000062808] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2025-03-28T09:30:56.812296Z node 1 :GRPC_CLIENT DEBUG: [517000062808] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-03-28T09:30:56.818827Z node 1 :GRPC_CLIENT DEBUG: [517000062808] Status 5 Not Found 2025-03-28T09:30:56.819277Z node 1 :GRPC_CLIENT DEBUG: [517000071288] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-03-28T09:30:56.821279Z node 1 :GRPC_CLIENT DEBUG: [517000071288] Status 5 Not Found >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> KqpCost::IndexLookupAndTake+useSink >> KqpCost::ScanScriptingRangeFullScan+SourceRead ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2025-03-28T09:30:56.797603Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790521566717203:2218];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:56.798301Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e5d/r3tmp/tmp4ouOun/pdisk_1.dat 2025-03-28T09:30:57.114463Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:57.131596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:57.131690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:57.134334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:57.385522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:57.412544Z node 1 :GRPC_CLIENT DEBUG: [51700007c508]{trololo} Connect to grpc://localhost:27427 2025-03-28T09:30:57.414541Z node 1 :GRPC_CLIENT DEBUG: [51700007c508]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2025-03-28T09:30:57.423821Z node 1 :GRPC_CLIENT DEBUG: [51700007c508]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2025-03-28T09:30:56.610741Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790525563325699:2267];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:56.610822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e17/r3tmp/tmptDmdhL/pdisk_1.dat 2025-03-28T09:30:57.007426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:57.007604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:57.009577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:57.036722Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:9795 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:57.270078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:57.284492Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Connect to grpc://localhost:15356 2025-03-28T09:30:57.295132Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-03-28T09:30:57.303577Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15356: Failed to connect to remote host: Connection refused 2025-03-28T09:30:57.307601Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-03-28T09:30:57.308279Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15356: Failed to connect to remote host: Connection refused 2025-03-28T09:30:58.309125Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2025-03-28T09:30:58.313482Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 5 Not Found 2025-03-28T09:30:58.314149Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2025-03-28T09:30:58.317448Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2025-03-28T09:30:56.698221Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790524047821991:2269];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:56.698880Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ea4/r3tmp/tmpLvLfyu/pdisk_1.dat 2025-03-28T09:30:57.044632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:57.044745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:57.046657Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:57.076302Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:1753 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:57.355873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:57.375730Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Connect to grpc://localhost:64947 2025-03-28T09:30:57.390608Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-03-28T09:30:57.397152Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:64947: Failed to connect to remote host: Connection refused 2025-03-28T09:30:57.398899Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-03-28T09:30:57.399367Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:64947: Failed to connect to remote host: Connection refused 2025-03-28T09:30:58.399834Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-03-28T09:30:58.400547Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:64947: Failed to connect to remote host: Connection refused 2025-03-28T09:30:59.401052Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_not_exists" } 2025-03-28T09:30:59.403956Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Status 5 Not Found 2025-03-28T09:30:59.404565Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Request ListFoldersRequest { id: "i_am_exists" } 2025-03-28T09:30:59.406894Z node 1 :GRPC_CLIENT DEBUG: [51700007c508] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> KqpCost::OlapPointLookup >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotification |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 30659, MsgBus: 25910 2025-03-28T09:30:18.451126Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790358687597777:2224];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:18.451791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015b2/r3tmp/tmp5tFQe3/pdisk_1.dat 2025-03-28T09:30:18.799922Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30659, node 1 2025-03-28T09:30:18.870240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:18.870907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:18.875198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:18.902565Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:18.902594Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:18.902602Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:18.902751Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25910 TClient is connected to server localhost:25910 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:19.432959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:21.204418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790371572500140:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:21.204524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:21.205664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790371572500173:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:21.209016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:30:21.217875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790371572500177:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:30:21.316014Z node 1 :TX_PROXY ERROR: Actor# [1:7486790371572500228:2336] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:21.614813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:30:21.745736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:21.745971Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:21.746287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:21.746433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:21.746590Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:21.746941Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:21.747113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:21.747234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:21.747395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:21.747554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:21.747784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:21.747950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486790371572500421:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:21.750268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:21.750304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:21.750521Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:30:21.750782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:30:21.750894Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:30:21.751100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:30:21.751219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:30:21.751644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:30:21.751788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:30:21.751914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:30:21.752025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:30:21.752730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486790371572500423:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:30:21.778448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486790371572500438:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:30:21.778493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486790371572500438:2351];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:30:21.778655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;sel ... 9;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837226Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037959;self_id=[2:7486790473600358078:2492];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037959;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837239Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037991;self_id=[2:7486790473600357765:2438];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037991;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837288Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037986;self_id=[2:7486790473600357852:2449];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037986;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837321Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037960;self_id=[2:7486790473600357963:2468];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037960;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837352Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037992;self_id=[2:7486790473600357780:2439];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037992;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837390Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037961;self_id=[2:7486790473600357864:2455];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037961;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837407Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037970;self_id=[2:7486790473600357870:2457];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037970;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837446Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037963;self_id=[2:7486790473600357630:2430];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037963;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837476Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037946;self_id=[2:7486790473600358220:2521];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037946;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837502Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037971;self_id=[2:7486790473600357873:2458];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037971;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837548Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037947;self_id=[2:7486790473600358155:2499];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037947;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837557Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037932;self_id=[2:7486790473600358331:2539];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037932;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837638Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037972;self_id=[2:7486790473600357947:2461];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037972;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837653Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037989;self_id=[2:7486790473600357729:2435];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037989;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837692Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037948;self_id=[2:7486790473600358073:2490];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037948;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837713Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037949;self_id=[2:7486790473600358134:2497];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037949;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837768Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037964;self_id=[2:7486790473600357857:2452];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037964;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837810Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037965;self_id=[2:7486790473600358137:2498];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037965;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837901Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037980;self_id=[2:7486790473600357860:2453];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037980;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.837983Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037966;self_id=[2:7486790473600357866:2456];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037966;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.838054Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037981;self_id=[2:7486790473600357838:2441];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037981;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.838118Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037967;self_id=[2:7486790473600357854:2450];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037967;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.838182Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037982;self_id=[2:7486790473600357845:2444];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037982;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.838248Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037984;self_id=[2:7486790473600357840:2442];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037984;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.838330Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037983;self_id=[2:7486790473600357862:2454];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037983;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.838402Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037993;self_id=[2:7486790473600357722:2433];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037993;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.838466Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037987;self_id=[2:7486790473600357731:2436];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037987;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.838528Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037978;self_id=[2:7486790473600358016:2470];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037978;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.838593Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037988;self_id=[2:7486790473600357761:2437];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037988;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.838706Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037979;self_id=[2:7486790473600357842:2443];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037979;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.839501Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037985;self_id=[2:7486790473600357836:2440];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037985;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.839580Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037994;self_id=[2:7486790473600357626:2428];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037994;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.839625Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037996;self_id=[2:7486790473600357624:2427];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037996;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.839687Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037995;self_id=[2:7486790473600357628:2429];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037995;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:53.839736Z node 2 :TX_COLUMNSHARD_WRITE WARN: tablet_id=72075186224037997;self_id=[2:7486790473600357669:2431];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037997;event=TEvWrite;fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=only single operation is supported;tx_id=0; 2025-03-28T09:30:54.542528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:30:54.542561Z node 2 :IMPORT WARN: Table profiles were not loaded WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 WAIT_INDEXATION: 0 >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink >> KqpCost::IndexLookup-useSink >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] >> KqpCost::IndexLookupAndTake-useSink >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestRestartConsoleAndPools >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::SanitizeGroupTest3dc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2025-03-28T09:30:55.942199Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790517915696061:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:55.942376Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e75/r3tmp/tmpJYc0gF/pdisk_1.dat 2025-03-28T09:30:56.260557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:56.260655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:56.262084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:56.262220Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:13184 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:56.500363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.030476Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790534512367512:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:59.030519Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e75/r3tmp/tmpt5HmFR/pdisk_1.dat 2025-03-28T09:30:59.194034Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:59.218846Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:59.218952Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:59.220250Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3996 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:59.397102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2025-03-28T09:30:56.215394Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790525514667120:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:56.215509Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e2f/r3tmp/tmp8jP2qp/pdisk_1.dat 2025-03-28T09:30:56.548951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:56.595917Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:56.596042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:56.598820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:56.828408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.198833Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790538322404631:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:59.207866Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e2f/r3tmp/tmp78suDZ/pdisk_1.dat 2025-03-28T09:30:59.328675Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:59.375077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:59.375162Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:59.376450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17803 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:59.525319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.534195Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 >> KqpCost::AAARangeFullScan [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead |91.2%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::OltpWriteRow+isSink [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] |91.2%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] >> KqpCost::OlapRangeFullScan [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess |91.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 30264, MsgBus: 19103 2025-03-28T09:30:57.517360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790526701185395:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:57.518188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001659/r3tmp/tmpxu8575/pdisk_1.dat 2025-03-28T09:30:57.901121Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:57.911009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:57.911197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:57.913088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30264, node 1 2025-03-28T09:30:58.026240Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:58.026283Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:58.026296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:58.026406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19103 TClient is connected to server localhost:19103 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:58.674615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:58.712319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:30:58.838495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.004891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.069417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:00.468924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790539586088984:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:00.469782Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:00.984248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.063616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.127624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.157471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.198049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.236965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.319342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790543881056803:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.319410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.319576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790543881056808:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.323523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:01.334393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790543881056810:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:01.405985Z node 1 :TX_PROXY ERROR: Actor# [1:7486790543881056866:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:02.517583Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790526701185395:2129];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:02.517656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; PONOS {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Group (-∞, +∞)","Name (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Test","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":1}],"E-Rows":"No estimate","Predicate":"item.Amount \u003C 5000","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-Filter","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Bytes":{"Count":1,"Sum":19,"Max":19,"Min":19,"History":[12,19]}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"PauseMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":5605,"Max":5605,"Min":5605,"History":[12,5605]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":11,"Min":5}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[12,1048576]},"Tasks":1,"OutputRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"FinishedTasks":1,"IngressRows":{"Count":1,"Sum":3,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":0,"Table":[{"Path":"\/Root\/Test","ReadRows":{"Count":1,"Sum":1,"Max":1,"Min":1},"ReadBytes":{"Count":1,"Sum":20,"Max":20,"Min":20}}],"BaseTimeMs":1743154262733,"OutputBytes":{"Count":1,"Sum":19,"Max":19,"Min":19},"CpuTimeUs":{"Count":1,"Sum":8122,"Max":8122,"Min":8122,"History":[12,8122]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Bytes":{"Count":1,"Sum":192,"Max":192,"Min":192,"History":[12,192]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Rows":{"Count":1,"Sum":3,"Max":3,"Min":3},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Bytes":{"Count":1,"Sum":192,"Max":192,"Min":192,"History":[12,192]},"PauseMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":5607,"Max":5607,"Min":5607,"History":[12,5607]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":11,"Min":5}}}]}}],"Node Type":"Merge","SortColumns":["Group (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Bytes":{"Count":1,"Sum":19,"Max":19,"Min":19,"History":[13,19]}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":1,"Sum":10543,"Max":10543,"Min":10543,"History":[13,10543]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"Rows":{"Count":1,"Sum":1,"Max":1,"Min":1},"LastMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11},"FirstMessageMs":{"Count":1,"Sum":11,"Max":11,"Min":11}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min" ... 27234 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 20372 affected_shards: 1 } compilation { duration_us: 296674 cpu_time_us: 293458 } process_cpu_time_us: 309 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Test\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"},{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"E-Rows\":\"No estimate\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Limit-Filter\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[12,19]}},\"Name\":\"4\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"PauseMessageMs\":{\"Count\":1,\"Sum\":5,\"Max\":5,\"Min\":5},\"WaitTimeUs\":{\"Count\":1,\"Sum\":5605,\"Max\":5605,\"Min\":5605,\"History\":[12,5605]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":11,\"Min\":5}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[12,1048576]},\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Test\",\"ReadRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ReadBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20}}],\"BaseTimeMs\":1743154262733,\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"CpuTimeUs\":{\"Count\":1,\"Sum\":8122,\"Max\":8122,\"Min\":8122,\"History\":[12,8122]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[12,192]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[12,192]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":5,\"Max\":5,\"Min\":5},\"WaitTimeUs\":{\"Count\":1,\"Sum\":5607,\"Max\":5607,\"Min\":5607,\"History\":[12,5607]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":11,\"Min\":5}}}]}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Group (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[13,19]}},\"Name\":\"RESULT\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":10543,\"Max\":10543,\"Min\":10543,\"History\":[13,10543]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[13,1048576]},\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"InputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"ResultRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Tasks\":1,\"ResultBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FinishedTasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PhysicalStageId\":1,\"StageDurationUs\":1000,\"BaseTimeMs\":1743154262733,\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"CpuTimeUs\":{\"Count\":1,\"Sum\":2740,\"Max\":2740,\"Min\":2740,\"History\":[12,2740]},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[12,19]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[12,19]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":10481,\"Max\":10481,\"Min\":10481,\"History\":[12,10481]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":296674,\"CpuTimeUs\":293458},\"ProcessCpuTimeUs\":309,\"TotalDurationUs\":339492,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":2838},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"No estimate\",\"ReadRanges\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\"],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"No estimate\",\"Table\":\"Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"E-Cost\":\"No estimate\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Rows\":\"No estimate\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"No estimate\",\"E-Cost\":\"No estimate\"}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":8.122,\"A-Cpu\":8.122,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":2.74,\"A-Cpu\":10.862,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:9\" \'\"\" \'1))\n(let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $4 (Uint64 \'1))\n(let $5 (OptionalType (DataType \'String)))\n(let $6 (StructType \'(\'\"Amount\" (OptionalType (DataType \'Uint64))) \'(\'\"Comment\" $5) \'(\'\"Group\" (OptionalType (DataType \'Uint32))) \'(\'\"Name\" $5)))\n(let $7 \'(\'(\'\"_logical_id\" \'559) \'(\'\"_id\" \'\"491d49d9-8f9a9fff-8532b8ff-e9bc8e62\") \'(\'\"_wide_channels\" $6)))\n(let $8 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($12) (block \'(\n (let $13 (lambda \'($16) (block \'(\n (let $17 (Member $16 \'\"Amount\"))\n (return $17 (Member $16 \'\"Comment\") (Member $16 \'\"Group\") (Member $16 \'\"Name\") (Coalesce (< $17 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda \'($18 $19 $20 $21 $22) $22) $4))\n (let $15 (lambda \'($23 $24 $25 $26 $27) $23 $24 $25 $26))\n (return (FromFlow (WideMap $14 $15)))\n))) $7))\n(let $9 (DqCnMerge (TDqOutput $8 \'0) \'(\'(\'\"2\" \'\"Asc\"))))\n(let $10 (DqPhyStage \'($9) (lambda \'($28) (FromFlow (NarrowMap (Take (ToFlow $28) $4) (lambda \'($29 $30 $31 $32) (AsStruct \'(\'\"Amount\" $29) \'(\'\"Comment\" $30) \'(\'\"Group\" $31) \'(\'\"Name\" $32)))))) \'(\'(\'\"_logical_id\" \'572) \'(\'\"_id\" \'\"82918d21-3302dc77-9f896702-ff9ca4f4\"))))\n(let $11 (DqCnResult (TDqOutput $10 \'0) \'()))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($8 $10) \'($11) \'() \'(\'(\'\"type\" \'\"data\")))) \'((KqpTxResultBinding (ListType $6) \'0 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 339492 total_cpu_time_us: 314139 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Test\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":9},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Amount\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Comment\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Group\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Name\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Group\\\",\\\"Name\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1743154262\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"aa3c79c7-7d10a12d-5a31420b-ab611199\",\"version\":\"1.0\"}" |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow+isSink [GOOD] Test command err: Trying to start YDB, gRPC: 16529, MsgBus: 14687 2025-03-28T09:30:57.515944Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790527395868916:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:57.516909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00167a/r3tmp/tmpq8MrAv/pdisk_1.dat 2025-03-28T09:30:57.869513Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:57.910466Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:57.911861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:57.915709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16529, node 1 2025-03-28T09:30:58.031322Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:58.031348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:58.031355Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:58.031470Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14687 TClient is connected to server localhost:14687 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:58.713327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:58.737627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:58.751677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:58.879112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.028414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:30:59.110888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:31:00.571222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790540280772573:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:00.571395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:00.978702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.021760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.062568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.099018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.134743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.183218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.244142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790544575740382:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.244217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.244370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790544575740387:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.249465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:01.260253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790544575740389:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:01.338724Z node 1 :TX_PROXY ERROR: Actor# [1:7486790544575740442:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:02.424274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:02.516256Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790527395868916:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:02.516351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; query_phases { duration_us: 4199 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1535 affected_shards: 1 } compilation { duration_us: 54138 cpu_time_us: 51572 } process_cpu_time_us: 529 total_duration_us: 62913 total_cpu_time_us: 53636 query_phases { duration_us: 4047 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1248 affected_shards: 1 } compilation { duration_us: 49584 cpu_time_us: 47086 } process_cpu_time_us: 537 total_duration_us: 55808 total_cpu_time_us: 48871 2025-03-28T09:31:02.755369Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=5; 2025-03-28T09:31:02.765938Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 5 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:31:02.766127Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 5 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:31:02.766375Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790548870708235:2496], Table: `/Root/TestTable` ([72057594046644480:16:1]), SessionActorId: [1:7486790548870708037:2496]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037919, Sink=[1:7486790548870708235:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:31:02.766923Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790548870708228:2496], SessionActorId: [1:7486790548870708037:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7486790548870708037:2496]. isRollback=0 2025-03-28T09:31:02.767156Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjc1MDZiOS03YWU3ZTMzLWFmODc5NzBkLTEwYWRjZWE4, ActorId: [1:7486790548870708037:2496], ActorState: ExecuteState, TraceId: 01jqe1ecn70jhmpcp3wh7e56gz, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486790548870708229:2496] from: [1:7486790548870708228:2496] 2025-03-28T09:31:02.767241Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486790548870708229:2496] TxId: 281474976710675. Ctx: { TraceId: 01jqe1ecn70jhmpcp3wh7e56gz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjc1MDZiOS03YWU3ZTMzLWFmODc5NzBkLTEwYWRjZWE4, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:31:02.768165Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Zjc1MDZiOS03YWU3ZTMzLWFmODc5NzBkLTEwYWRjZWE4, ActorId: [1:7486790548870708037:2496], ActorState: ExecuteState, TraceId: 01jqe1ecn70jhmpcp3wh7e56gz, Create QueryResponse for error on request, msg: query_phases { duration_us: 21086 cpu_time_us: 1011 affected_shards: 1 } compilation { duration_us: 47371 cpu_time_us: 44863 } process_cpu_time_us: 507 total_duration_us: 72271 total_cpu_time_us: 46381 query_phases { duration_us: 4234 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1540 affected_shards: 1 } compilation { duration_us: 51880 cpu_time_us: 48634 } process_cpu_time_us: 442 total_duration_us: 58665 total_cpu_time_us: 50616 query_phases { duration_us: 2407 cpu_time_us: 1435 affected_shards: 1 } compilation { duration_us: 82784 cpu_time_us: 80345 } process_cpu_time_us: 443 total_duration_us: 86969 total_cpu_time_us: 82223 query_phases { duration_us: 4329 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1453 affected_shards: 1 } compilation { duration_us: 55492 cpu_time_us: 53077 } process_cpu_time_us: 459 total_duration_us: 61654 total_cpu_time_us: 54989 query_phases { duration_us: 3904 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1553 affected_shards: 1 } compilation { duration_us: 69800 cpu_time_us: 67689 } process_cpu_time_us: 489 total_duration_us: 75512 total_cpu_time_us: 69731 query_phases { duration_us: 3867 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1344 affected_shards: 1 } compilation { duration_us: 47122 cpu_time_us: 44606 } process_cpu_time_us: 394 total_duration_us: 54174 total_cpu_time_us: 46344 |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 7894, MsgBus: 1272 2025-03-28T09:30:57.517789Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790525908393488:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:57.517849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001664/r3tmp/tmp6SLkis/pdisk_1.dat 2025-03-28T09:30:57.864075Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7894, node 1 2025-03-28T09:30:57.919274Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:57.919372Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:57.920950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:58.026223Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:58.026242Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:58.026254Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:58.026397Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1272 TClient is connected to server localhost:1272 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:58.674587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:58.697867Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:58.712035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:58.858265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:58.998645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.077913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:00.502755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790538793297015:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:00.502899Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:00.977922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.055500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.087386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.120905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.150633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.182254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.243730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790543088264827:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.243802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.243945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790543088264832:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.249495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:01.257654Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790543088264834:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:01.320637Z node 1 :TX_PROXY ERROR: Actor# [1:7486790543088264888:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:02.394210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:02.518044Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790525908393488:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:02.518142Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> KqpCost::ScanScriptingRangeFullScan-SourceRead >> KqpCost::IndexLookupJoin+StreamLookupJoin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 27961, MsgBus: 1160 2025-03-28T09:30:58.381545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790530953824796:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:58.381621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001638/r3tmp/tmpWoPYLx/pdisk_1.dat 2025-03-28T09:30:58.759564Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:58.799526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:58.799625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27961, node 1 2025-03-28T09:30:58.800860Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:58.899851Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:58.899888Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:58.899903Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:58.900058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1160 TClient is connected to server localhost:1160 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:59.395610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.410552Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:59.420146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.542895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.693535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.744209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.431932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790543838728470:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.432067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.728097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.796559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.824650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.850512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.879295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.922734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.969453Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790543838728985:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.969550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.969620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790543838728990:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.973082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:01.983294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790543838728992:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:02.038056Z node 1 :TX_PROXY ERROR: Actor# [1:7486790548133696341:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:02.908852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.381620Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790530953824796:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:03.381697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 3605, MsgBus: 3776 2025-03-28T09:30:57.884291Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790527274656850:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:57.884579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001639/r3tmp/tmpEE2XN5/pdisk_1.dat 2025-03-28T09:30:58.221607Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3605, node 1 2025-03-28T09:30:58.294789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:58.295036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:58.297333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:58.315342Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:58.315368Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:58.315375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:58.315555Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3776 TClient is connected to server localhost:3776 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:58.849348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:58.866949Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:30:58.872682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.009309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.165235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.228238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:00.916753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790540159560331:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:00.916844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.230884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.266235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.293664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.315713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.342399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.376219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.419027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790544454528141:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.419103Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.419158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790544454528146:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.422766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:01.437343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790544454528148:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:01.537619Z node 1 :TX_PROXY ERROR: Actor# [1:7486790544454528203:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:02.471759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:02.630388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:02.630645Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:02.631003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:02.631243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:31:02.631393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:31:02.631550Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:31:02.631700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:31:02.631814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:31:02.631926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:31:02.632030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:31:02.632173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:31:02.632307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548749495916:2499];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:31:02.664307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7486790548749495930:2503];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:02.664359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7486790548749495930:2503];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:02.664585Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;self_id=[1:7486790548749495930:2503];tablet_id=72075186224037924;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:02.664686 ... cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:31:02.811783Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:31:02.811812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:31:02.811857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:31:02.811893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:31:02.812520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:31:02.812559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:31:02.812718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:31:02.812747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:31:02.812889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:31:02.812917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:31:02.813091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:31:02.813135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:31:02.813249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:31:02.813274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:31:02.837254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:31:02.837317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:31:02.837408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:31:02.837435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:31:02.837615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:31:02.837641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:31:02.837733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:31:02.837802Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:31:02.837865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:31:02.837886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:31:02.837934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:31:02.837962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:31:02.838406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:31:02.838448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:31:02.838663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:31:02.838734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:31:02.838907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:31:02.838935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:31:02.839141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:31:02.839174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:31:02.839319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:31:02.839349Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:31:02.873436Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.874258Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.878908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.879705Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.882726Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.885537Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.887147Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.890886Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.892564Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.894403Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.895082Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790527274656850:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:02.895143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:03.043315Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T09:31:03.043330Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T09:31:03.043646Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;self_id=[1:7486790548749495911:2497];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037923;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037927;receive=72075186224037928; 2025-03-28T09:31:03.043979Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; query_phases { duration_us: 241786 table_access { name: "/Root/TestTable" reads { rows: 2 bytes: 72 } } cpu_time_us: 89733 } compilation { duration_us: 361262 cpu_time_us: 357811 } process_cpu_time_us: 329 total_duration_us: 610591 total_cpu_time_us: 447873 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 10761, MsgBus: 20011 2025-03-28T09:30:57.517920Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790526851792554:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:57.518487Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00166e/r3tmp/tmpHkSH82/pdisk_1.dat 2025-03-28T09:30:57.883887Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10761, node 1 2025-03-28T09:30:57.931679Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:57.931771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:57.935782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:58.027222Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:58.027239Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:58.027244Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:58.027336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20011 TClient is connected to server localhost:20011 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:58.674777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:58.710963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:58.858060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.010376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.087376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:00.469100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790539736696207:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:00.469770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:00.977954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.028620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.071044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.144896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.187203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.233825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.284365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790544031664019:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.284450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.284641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790544031664024:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.288385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:01.299645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790544031664026:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:01.376639Z node 1 :TX_PROXY ERROR: Actor# [1:7486790544031664080:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:02.448066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:02.478544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:02.510140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:02.517797Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790526851792554:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:02.517865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/Join1_2 1 19 /Root/Join1_1 8 136 >> KqpCost::ScanQueryRangeFullScan-SourceRead >> KqpCost::QuerySeviceRangeFullScan [GOOD] >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateSharedTenant >> KqpCost::OlapRange >> KqpCost::Range ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OltpWriteRow-isSink [GOOD] Test command err: Trying to start YDB, gRPC: 27622, MsgBus: 12059 2025-03-28T09:30:58.617904Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790532135424546:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:58.617992Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001608/r3tmp/tmpA9BRf0/pdisk_1.dat 2025-03-28T09:30:58.992875Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:59.020118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:59.020243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:59.021677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27622, node 1 2025-03-28T09:30:59.091216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:59.091237Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:59.091244Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:59.091366Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12059 TClient is connected to server localhost:12059 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:59.590196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.611570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.733476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.874560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.951320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.532736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790545020328201:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.532845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.816562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.838750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.863562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.890730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.928386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:02.004637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:02.088660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790549315296019:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:02.088766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:02.088999Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790549315296024:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:02.093350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:02.103062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790549315296027:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:02.180850Z node 1 :TX_PROXY ERROR: Actor# [1:7486790549315296078:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:03.199717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 query_phases { duration_us: 567 cpu_time_us: 567 } query_phases { duration_us: 2911 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1102 affected_shards: 1 } compilation { duration_us: 104192 cpu_time_us: 100748 } process_cpu_time_us: 869 total_duration_us: 109276 total_cpu_time_us: 103286 query_phases { duration_us: 601 cpu_time_us: 601 } query_phases { duration_us: 3703 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1063 affected_shards: 1 } compilation { duration_us: 88074 cpu_time_us: 84768 } process_cpu_time_us: 998 total_duration_us: 94993 total_cpu_time_us: 87430 2025-03-28T09:31:03.618100Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790532135424546:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:03.618205Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:03.819413Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790553610263866:2527], TxId: 281474976710676, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1edg94y7rrjed46k6q18b. SessionId : ydb://session/3?node_id=1&id=MTMxMmIwMDYtYzI3YTg4NzktNTA4OWY0M2QtYjMzNmU1ZmE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:31:03.819855Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486790553610263867:2528], TxId: 281474976710676, task: 2. Ctx: { TraceId : 01jqe1edg94y7rrjed46k6q18b. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MTMxMmIwMDYtYzI3YTg4NzktNTA4OWY0M2QtYjMzNmU1ZmE=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486790553610263862:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:31:03.820304Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTMxMmIwMDYtYzI3YTg4NzktNTA4OWY0M2QtYjMzNmU1ZmE=, ActorId: [1:7486790553610263634:2488], ActorState: ExecuteState, TraceId: 01jqe1edg94y7rrjed46k6q18b, Create QueryResponse for error on request, msg: query_phases { duration_us: 985 cpu_time_us: 985 } query_phases { duration_us: 8833 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 10347 affected_shards: 1 } query_phases { duration_us: 25495 cpu_time_us: 25968 } compilation { duration_us: 217044 cpu_time_us: 208318 } process_cpu_time_us: 1906 total_duration_us: 258487 total_cpu_time_us: 247524 query_phases { duration_us: 801 cpu_time_us: 801 } query_phases { duration_us: 2366 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 2571 affected_shards: 1 } query_phases { duration_us: 1227 cpu_time_us: 1659 } query_phases { duration_us: 3106 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1137 affected_shards: 1 } compilation { duration_us: 231910 cpu_time_us: 226848 } process_cpu_time_us: 1521 total_duration_us: 243840 total_cpu_time_us: 234537 query_phases { duration_us: 688 cpu_time_us: 688 } query_phases { duration_us: 4538 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 4183 affected_shards: 1 } query_phases { duration_us: 1072 cpu_time_us: 679 affected_shards: 1 } compilation { duration_us: 224349 cpu_time_us: 220138 } process_cpu_time_us: 1448 total_duration_us: 234229 total_cpu_time_us: 227136 query_phases { duration_us: 687 cpu_time_us: 687 } query_phases { duration_us: 3688 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 2994 affected_shards: 1 } query_phases { duration_us: 4658 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1678 affected_shards: 1 } compilation { duration_us: 203342 cpu_time_us: 198788 } process_cpu_time_us: 1470 total_duration_us: 216571 total_cpu_time_us: 205617 query_phases { duration_us: 551 cpu_time_us: 551 } query_phases { duration_us: 3593 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 956 affected_shards: 1 } compilation { duration_us: 94692 cpu_time_us: 89593 } process_cpu_time_us: 893 total_duration_us: 102616 total_cpu_time_us: 91993 query_phases { duration_us: 513 cpu_time_us: 513 } query_phases { duration_us: 3018 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 926 affected_shards: 1 } compilation { duration_us: 64224 cpu_time_us: 60672 } process_cpu_time_us: 909 total_duration_us: 69595 total_cpu_time_us: 63020 >> KqpCost::IndexLookupAndTake+useSink [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] >> KqpCost::PointLookup >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 8566, MsgBus: 4871 2025-03-28T09:31:00.137909Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790542072223222:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:00.137991Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ef/r3tmp/tmpRPGw7f/pdisk_1.dat 2025-03-28T09:31:00.421160Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8566, node 1 2025-03-28T09:31:00.476565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:00.476674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:00.478829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:00.503877Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:00.503905Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:00.503920Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:00.504060Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4871 TClient is connected to server localhost:4871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:00.986680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.011972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.165753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.329612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.402452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.277587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790554957126878:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.277744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.578443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.606335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.629285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.657728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.684182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.756246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.802832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790554957127396:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.802905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.803085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790554957127401:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.808156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:03.819410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790554957127403:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:03.909829Z node 1 :TX_PROXY ERROR: Actor# [1:7486790554957127456:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:05.138409Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790542072223222:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:05.139010Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 32671, MsgBus: 22692 2025-03-28T09:31:00.701500Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790538998554917:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:00.701676Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e4/r3tmp/tmpiPbHjv/pdisk_1.dat 2025-03-28T09:31:01.066517Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:01.103081Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:01.103183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:01.105214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32671, node 1 2025-03-28T09:31:01.150787Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:01.150807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:01.150814Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:01.150935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22692 TClient is connected to server localhost:22692 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:01.632935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.654645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.810228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.947184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:02.027089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.475888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790551883458588:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.476029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.774258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.806428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.833563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.863345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.931272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.974275Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:04.023205Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790556178426404:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:04.023281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:04.024564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790556178426409:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:04.028774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:04.062484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790556178426411:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:04.118469Z node 1 :TX_PROXY ERROR: Actor# [1:7486790556178426464:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:05.324802Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154265335, txId: 281474976710671] shutting down >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 17279, MsgBus: 31976 2025-03-28T09:31:00.586419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790542551674989:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:00.586508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015eb/r3tmp/tmp81u1Jo/pdisk_1.dat 2025-03-28T09:31:00.957614Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17279, node 1 2025-03-28T09:31:01.010537Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:01.010569Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:01.010583Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:01.010702Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:01.024247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:01.024368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:01.025795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31976 TClient is connected to server localhost:31976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:01.476649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.496278Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:01.505335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.685091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.830232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.910819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.371759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790555436578655:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.371911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.626009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.655341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.684807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.713137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.745829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.813633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.895432Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790555436579177:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.895536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.895712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790555436579182:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.899401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:03.910027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790555436579184:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:03.970813Z node 1 :TX_PROXY ERROR: Actor# [1:7486790555436579236:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:04.941543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.586129Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790542551674989:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:05.586209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 9856, MsgBus: 4465 2025-03-28T09:31:00.612964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790540485871625:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:00.613156Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e6/r3tmp/tmp7Vghi3/pdisk_1.dat 2025-03-28T09:31:00.966729Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:00.988154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:00.988250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:00.989814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9856, node 1 2025-03-28T09:31:01.081114Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:01.081141Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:01.081149Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:01.081294Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4465 TClient is connected to server localhost:4465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:01.578227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.611566Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:01.786641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:01.935035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:02.004005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.634160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790553370775296:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.634646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:03.893618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.926388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.998197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:04.033535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:04.071896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:04.113386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:04.154604Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790557665743105:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:04.154728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:04.154794Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790557665743110:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:04.158516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:04.171544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790557665743112:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:04.246023Z node 1 :TX_PROXY ERROR: Actor# [1:7486790557665743166:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:05.173124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.619996Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790540485871625:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:05.620168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> KqpCost::OlapPointLookup [GOOD] >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> TPersQueueTest::TestReadPartitionStatus [GOOD] >> TPersQueueTest::TxCounters >> KqpCost::IndexLookupAndTake-useSink [GOOD] >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 18191, MsgBus: 12299 2025-03-28T09:31:01.688626Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790544879172249:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:01.688684Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015e0/r3tmp/tmpsmPIUT/pdisk_1.dat 2025-03-28T09:31:02.063167Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18191, node 1 2025-03-28T09:31:02.107034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:02.107176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:02.111478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:02.147212Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:02.147235Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:02.147241Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:02.147371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12299 TClient is connected to server localhost:12299 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:02.657095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:02.687913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:02.829233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:02.984079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.059020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:04.609778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790557764075906:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:04.609900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:04.890963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:04.916319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:04.941858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:04.967525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:04.991300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.029591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.066530Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790562059043710:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.066590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790562059043715:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.066620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.069848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:05.079998Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790562059043717:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:05.152182Z node 1 :TX_PROXY ERROR: Actor# [1:7486790562059043771:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:06.079043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:06.217930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:06.218152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:06.218397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:06.218540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:31:06.218663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:31:06.218810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:31:06.218978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:31:06.219101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:31:06.219230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:31:06.219364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:31:06.219486Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:31:06.219591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790566354011493:2501];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:31:06.231720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790566354011482:2499];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:06.231825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790566354011482:2499];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:06.231978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790566354011482:2499];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:06.232107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790566354011482:2499];tablet_id=72075186224037920;process=TTxInitSchema::Execute; ... malizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:31:06.368564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:31:06.368679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:31:06.368707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:31:06.368791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:31:06.368827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:31:06.368869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:31:06.368893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:31:06.370291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:31:06.370343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:31:06.370543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:31:06.370569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:31:06.370743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:31:06.370770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:31:06.370967Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:31:06.371019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:31:06.371139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:31:06.371171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:31:06.372342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:31:06.372378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:31:06.372475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:31:06.372498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:31:06.372718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:31:06.372747Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:31:06.372830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:31:06.372864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:31:06.372947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:31:06.372973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:31:06.373008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:31:06.373037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:31:06.373598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:31:06.373673Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:31:06.373859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:31:06.373885Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:31:06.374040Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:31:06.374082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:31:06.374358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:31:06.374398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:31:06.374539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:31:06.374570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:31:06.418975Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:06.419155Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:06.423908Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:06.425404Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:06.428369Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:06.431194Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:06.432647Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:06.436959Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:06.437101Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:06.442858Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:06.562301Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T09:31:06.562302Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T09:31:06.563073Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T09:31:06.688645Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790544879172249:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:06.688770Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2 >> KqpCost::IndexLookup-useSink [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 24337, MsgBus: 17142 2025-03-28T09:31:02.533927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790548211844990:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:02.533994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d9/r3tmp/tmpMo1MPc/pdisk_1.dat 2025-03-28T09:31:02.857705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:02.877004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:02.877135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:02.880295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24337, node 1 2025-03-28T09:31:02.950041Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:02.950074Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:02.950081Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:02.950219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17142 TClient is connected to server localhost:17142 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:03.460325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.474950Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:31:03.479695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.643592Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.812671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.877774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:05.424496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790561096748657:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.424644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.740782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.768623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.794973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.822322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.852294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.919568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.958467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790561096749173:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.958545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.958588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790561096749178:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.962548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:05.972191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790561096749180:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:06.069821Z node 1 :TX_PROXY ERROR: Actor# [1:7486790565391716529:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:07.043579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:07.534032Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790548211844990:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:07.534116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 25105, MsgBus: 65421 2025-03-28T09:31:02.502488Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790551460380524:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:02.507579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d8/r3tmp/tmpJWROTl/pdisk_1.dat 2025-03-28T09:31:02.889715Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25105, node 1 2025-03-28T09:31:02.925886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:02.926007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:02.927841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:02.969392Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:02.969416Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:02.969427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:02.969567Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:65421 TClient is connected to server localhost:65421 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:03.466761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.486119Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:03.499367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:03.652461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:03.811514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:03.889973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:05.514859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790564345284047:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.514964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.752854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.779485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.805997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.832817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.860065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.930071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:05.963439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790564345284560:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.963504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.963580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790564345284565:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:05.966852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:05.974448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790564345284567:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:06.069969Z node 1 :TX_PROXY ERROR: Actor# [1:7486790568640251917:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:07.101164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:07.497459Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790551460380524:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:07.497565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 29565, MsgBus: 1415 2025-03-28T09:31:04.006299Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790557503871566:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:04.006403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015d2/r3tmp/tmpZoyQNg/pdisk_1.dat 2025-03-28T09:31:04.368544Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29565, node 1 2025-03-28T09:31:04.386929Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:04.387015Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:04.400058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:04.400175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:04.401651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:04.416835Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:04.416863Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:04.416872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:04.417046Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1415 TClient is connected to server localhost:1415 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:04.915689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:04.950671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:05.090358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:05.236853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:05.311033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.288106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790570388775250:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:07.288209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:07.657423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:07.685507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:07.712809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:07.779514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:07.812307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:07.882854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:07.949269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790570388775769:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:07.949334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:07.949391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790570388775774:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:07.952871Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:07.962171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790570388775776:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:08.034643Z node 1 :TX_PROXY ERROR: Actor# [1:7486790574683743127:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:08.833424Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-03-28T09:31:10.014956Z, after 1.181854s 2025-03-28T09:31:08.833568Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:31:08.869116Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-03-28T09:31:09.006320Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790557503871566:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:09.006413Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:09.035404Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7486790578978710712:2488] 2025-03-28T09:31:09.035443Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7486790574683743381:2488] 2025-03-28T09:31:09.040588Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1743154269087:281474976710671 created 2025-03-28T09:31:09.040792Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-03-28T09:31:09.040949Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-03-28T09:31:09.040981Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-03-28T09:31:09.041088Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2025-03-28T09:31:09.041212Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:31:09.041259Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2025-03-28T09:31:09.041344Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710672. Shard resolve complete, resolved shards: 1 2025-03-28T09:31:09.041369Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-03-28T09:31:09.041405Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037914] 2025-03-28T09:31:09.041436Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:31:09.041501Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmY ... : 01jqe1ejn3892j5d2yp61ghckp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:31:09.051943Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486790578978710727:2496], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=. CustomerSuppliedId : . TraceId : 01jqe1ejn3892j5d2yp61ghckp. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:31:09.051955Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2025-03-28T09:31:09.051965Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486790578978710727:2496], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=. CustomerSuppliedId : . TraceId : 01jqe1ejn3892j5d2yp61ghckp. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:31:09.052093Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2025-03-28T09:31:09.052212Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486790578978710728:2497], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=. TraceId : 01jqe1ejn3892j5d2yp61ghckp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:31:09.052244Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:31:09.052402Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7486790578978710727:2496], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 9310 Tasks { TaskId: 1 CpuTimeUs: 1102 FinishTimeMs: 1743154269051 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 130 BuildCpuTimeUs: 972 HostName: "ghrun-mdsjyjltvm" NodeId: 1 StartTimeMs: 1743154269051 CreateTimeMs: 1743154269042 } MaxMemoryUsage: 1048576 } 2025-03-28T09:31:09.052423Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-28T09:31:09.052498Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7486790578978710727:2496] 2025-03-28T09:31:09.052558Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7486790578978710728:2497], 2025-03-28T09:31:09.052732Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7486790574683743381:2488], seqNo: 1, nRows: 1 2025-03-28T09:31:09.054713Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7486790578978710730:2497] 2025-03-28T09:31:09.054786Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486790578978710728:2497], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=. TraceId : 01jqe1ejn3892j5d2yp61ghckp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. CA StateFunc 271646922 2025-03-28T09:31:09.054829Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-28T09:31:09.054837Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2025-03-28T09:31:09.054845Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486790578978710728:2497], TxId: 281474976710672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=. TraceId : 01jqe1ejn3892j5d2yp61ghckp. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:31:09.054906Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2025-03-28T09:31:09.054981Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:31:09.055040Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7486790578978710728:2497], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 2770 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 721 FinishTimeMs: 1743154269054 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 221 BuildCpuTimeUs: 500 HostName: "ghrun-mdsjyjltvm" NodeId: 1 CreateTimeMs: 1743154269047 } MaxMemoryUsage: 1048576 } 2025-03-28T09:31:09.055090Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7486790578978710728:2497] 2025-03-28T09:31:09.055097Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-28T09:31:09.056844Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 19178 DurationUs: 14278 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } ExecuterCpuTimeUs: 7098 StartTimeMs: 1743154269040 FinishTimeMs: 1743154269055 Stages { StageId: 1 StageGuid: "d0c077ce-533c52cc-a0592c3e-9832ee3a" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'1)) (lambda \'($2 $3 $4 $5) (AsStruct \'(\'\"Amount\" $2) \'(\'\"Comment\" $3) \'(\'\"Group\" $4) \'(\'\"Name\" $5)))))))\n)\n" ComputeActors { CpuTimeUs: 2770 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 721 FinishTimeMs: 1743154269054 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 221 BuildCpuTimeUs: 500 HostName: "ghrun-mdsjyjltvm" NodeId: 1 CreateTimeMs: 1743154269047 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743154269051 } Stages { StageGuid: "ab53dcc4-32c1c6ca-6e5545e6-8ccca73f" Program: "(\n(return (lambda \'($1) (block \'(\n (let $2 (lambda \'($5) (block \'(\n (let $6 (Member $5 \'\"Amount\"))\n (return $6 (Member $5 \'\"Comment\") (Member $5 \'\"Group\") (Member $5 \'\"Name\") (Coalesce (< $6 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $3 (WideFilter (ExpandMap (ToFlow $1) $2) (lambda \'($7 $8 $9 $10 $11) $11) (Uint64 \'1)))\n (let $4 (lambda \'($12 $13 $14 $15 $16) $12 $13 $14 $15))\n (return (FromFlow (WideMap $3 $4)))\n))))\n)\n" BaseTimeMs: 1743154269051 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":6,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":5,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Limit\":\"1\",\"Name\":\"Limit\"}],\"PlanNodeId\":4,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":3,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit-Filter\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"1\",\"Name\":\"Limit\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Filter\",\"Predicate\":\"item.Amount \\u003C 5000\"}],\"PlanNodeId\":2,\"Plans\":[{\"Node Type\":\"TableFullScan\",\"Operators\":[{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"ReadRanges\":[\"Group (-∞, +∞)\",\"Name (-∞, +∞)\"],\"ReadRangesPointPrefixLen\":\"0\",\"Scan\":\"Parallel\",\"Table\":\"Test\"}],\"PlanNodeId\":1,\"StageGuid\":\"\",\"Tables\":[\"Test\"]}],\"StageGuid\":\"ab53dcc4-32c1c6ca-6e5545e6-8ccca73f\",\"Stats\":{\"BaseTimeMs\":1743154269051,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"SortColumns\":[\"Group (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"d0c077ce-533c52cc-a0592c3e-9832ee3a\",\"Stats\":{\"BaseTimeMs\":1743154269051,\"ComputeNodes\":[{\"CpuTimeUs\":2770,\"Tasks\":[{\"ComputeTimeUs\":221,\"FinishTimeMs\":1743154269054,\"Host\":\"ghrun-mdsjyjltvm\",\"InputBytes\":19,\"InputRows\":1,\"NodeId\":1,\"OutputBytes\":19,\"OutputRows\":1,\"ResultBytes\":19,\"ResultRows\":1,\"TaskId\":2}]}],\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1677 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\013\010\322\025\020\336H\030\260^ \002" } } 2025-03-28T09:31:09.056899Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:31:09.056940Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790578978710722:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ejn3892j5d2yp61ghckp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmIzZjg2NTktNDJkYWRmMzUtZjc5MTFmYjItMTAwMTI5NTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.012080s ReadRows: 1 ReadBytes: 20 ru: 8 rate limiter was not found force flag: 1 2025-03-28T09:31:09.057785Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154269087, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2025-03-28T09:26:44.732954Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:44.821637Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:44.844148Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:44.844882Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:44.852478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:44.852661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:44.852852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:44.852973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:44.853082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:44.853191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:44.853312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:44.853416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:44.853518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:44.853644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.853743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:44.853856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:44.880686Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:44.881006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:44.881069Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:44.881312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:44.881497Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:44.881609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:44.881668Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:44.881770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:44.881836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:44.882021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:44.882055Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:44.882229Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:44.882314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:44.882358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:44.882390Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:44.882620Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:44.882702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:44.882763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:44.882798Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:44.882873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:44.882909Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:44.882940Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:44.882990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:44.883028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:44.883059Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:44.883442Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-03-28T09:26:44.883543Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=48; 2025-03-28T09:26:44.883647Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=61; 2025-03-28T09:26:44.883770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-03-28T09:26:44.883932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:44.883981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:44.884031Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:44.884204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:44.884247Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.884268Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.884398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:44.884448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:44.884477Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:44.884643Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:44.884679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:44.884705Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:44.884832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:44.884875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:44.884919Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:8568];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:74;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:75;blob_range:[NO_BLOB:0:8552];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:8488];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:54;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:55;blob_range:[NO_BLOB:0:8472];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:1;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:1;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:1;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:1;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:1;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:8248];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:35;blob_range:[NO_BLOB:0:8240];;;;switched=(portion_id:55;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2117976;index_size:24;meta:((produced=INSERTED;)););(portion_id:54;path_id:1;records_count:25666;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2549080;index_size:24;meta:((produced=SPLIT_COMPACTED;)););; 2025-03-28T09:31:08.852552Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11195:12822];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-28T09:31:08.854720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11195:12822];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope >> KqpCost::Range [GOOD] >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 29676, MsgBus: 20436 2025-03-28T09:31:05.503858Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790560426431040:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:05.504067Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015be/r3tmp/tmpjdQTrS/pdisk_1.dat 2025-03-28T09:31:05.786177Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29676, node 1 2025-03-28T09:31:05.848964Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:05.848990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:05.848997Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:05.849139Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:05.856618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:05.856776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:05.858980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20436 TClient is connected to server localhost:20436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:06.365310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:06.395578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:06.545639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:06.694699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:06.754035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:08.253198Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790573311334724:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.253313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.563738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.589578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.615458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.643040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.669312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.740400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.777363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790573311335237:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.777424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.777536Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790573311335242:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.781060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:08.789964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790573311335244:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:08.881986Z node 1 :TX_PROXY ERROR: Actor# [1:7486790573311335298:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:09.841712Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154269864, txId: 281474976710671] shutting down >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::ForcefulDestroyPartitionStream >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> KqpCost::PointLookup [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> TMultiversionObjectMap::MonteCarlo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 23715, MsgBus: 20291 2025-03-28T09:31:05.986749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790561859626722:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:05.986866Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ba/r3tmp/tmpFC1Rsd/pdisk_1.dat 2025-03-28T09:31:06.295589Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23715, node 1 2025-03-28T09:31:06.370826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:06.373570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:06.377684Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:06.406032Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:06.406057Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:06.406063Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:06.406202Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20291 TClient is connected to server localhost:20291 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:06.980430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:06.997895Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:07.012497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.133843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:07.254575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:07.332329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:08.818522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790574744530400:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.818639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.034042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.056424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.081472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.107533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.135672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.164448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.198736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790579039498206:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.198801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.198803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790579039498211:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.201723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:09.209727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790579039498213:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:09.294199Z node 1 :TX_PROXY ERROR: Actor# [1:7486790579039498267:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:10.082680Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 2025-03-28T09:31:12.001481Z, after 1.919123s 2025-03-28T09:31:10.082828Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:31:10.128848Z node 1 :KQP_GATEWAY DEBUG: Load table metadata from cache by path, request Path: /Root/Test 2025-03-28T09:31:10.279353Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Start KqpSnapshotManager at [1:7486790583334465854:2488] 2025-03-28T09:31:10.279388Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: got snapshot request from [1:7486790583334465820:2488] 2025-03-28T09:31:10.284023Z node 1 :KQP_RESOURCE_MANAGER DEBUG: KqpSnapshotManager: snapshot 1743154270326:281474976710671 created 2025-03-28T09:31:10.284258Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 3, stages: 2 2025-03-28T09:31:10.284434Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2025-03-28T09:31:10.284471Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,1], InputsCount: 1, OutputsCount: 1 2025-03-28T09:31:10.284642Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key sets: 1 2025-03-28T09:31:10.284835Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 9] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL, String : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:31:10.284909Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Start resolving tablets nodes... (1) 2025-03-28T09:31:10.285005Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710672. Shard resolve complete, resolved shards: 1 2025-03-28T09:31:10.285042Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-03-28T09:31:10.285080Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037914] 2025-03-28T09:31:10.285105Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Schedule Snapshot request 2025-03-28T09:31:10.285160Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '() (block '( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:9" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (KqpWideReadTableRanges $1 (Void) $2 '() '())) (return (FromFlow (WideFilter $3 (lambda '($4 $5 $6 $7) (Coalesce (< $4 (Uint64 '"5000")) (Bool 'false))) ... 01jqe1ekw20yvc8d0bwd2qcww8. SessionId : ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-03-28T09:31:10.298733Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. Tasks execution finished 2025-03-28T09:31:10.298786Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486790583334465869:2495], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==. TraceId : 01jqe1ekw20yvc8d0bwd2qcww8. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Compute state finished. All channels and sinks finished 2025-03-28T09:31:10.298925Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 1. pass away 2025-03-28T09:31:10.298965Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486790583334465871:2496], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jqe1ekw20yvc8d0bwd2qcww8. SessionId : ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2025-03-28T09:31:10.299027Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:31:10.299092Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7486790583334465869:2495], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 3536 Tasks { TaskId: 1 CpuTimeUs: 1078 FinishTimeMs: 1743154270298 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 150 BuildCpuTimeUs: 928 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-mdsjyjltvm" NodeId: 1 CreateTimeMs: 1743154270286 } MaxMemoryUsage: 1048576 } 2025-03-28T09:31:10.299174Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7486790583334465869:2495] 2025-03-28T09:31:10.299287Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [1:7486790583334465871:2496], 2025-03-28T09:31:10.299362Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-28T09:31:10.299430Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Send TEvStreamData to [1:7486790583334465820:2488], seqNo: 1, nRows: 1 2025-03-28T09:31:10.300961Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710672, send ack to channelId: 2, seqNo: 1, enough: 0, freeSpace: 8388488, to: [1:7486790583334465873:2496] 2025-03-28T09:31:10.301029Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486790583334465871:2496], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jqe1ekw20yvc8d0bwd2qcww8. SessionId : ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. CA StateFunc 271646922 2025-03-28T09:31:10.301067Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2025-03-28T09:31:10.301078Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. Tasks execution finished 2025-03-28T09:31:10.301089Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486790583334465871:2496], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jqe1ekw20yvc8d0bwd2qcww8. SessionId : ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Compute state finished. All channels and sinks finished 2025-03-28T09:31:10.301174Z node 1 :KQP_COMPUTE DEBUG: TxId: 281474976710672, task: 2. pass away 2025-03-28T09:31:10.301247Z node 1 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976710672;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2025-03-28T09:31:10.301352Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [1:7486790583334465871:2496], task: 2, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 9761 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 844 FinishTimeMs: 1743154270301 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 214 BuildCpuTimeUs: 630 HostName: "ghrun-mdsjyjltvm" NodeId: 1 CreateTimeMs: 1743154270287 } MaxMemoryUsage: 1048576 } 2025-03-28T09:31:10.301427Z node 1 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976710672, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-28T09:31:10.301429Z node 1 :KQP_EXECUTER INFO: TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [1:7486790583334465871:2496] 2025-03-28T09:31:10.303504Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Full stats: { CpuTimeUs: 23408 DurationUs: 17115 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ExecuterCpuTimeUs: 10111 StartTimeMs: 1743154270284 FinishTimeMs: 1743154270301 Stages { StageGuid: "a67aa357-682e1582-22ca0da4-f2463862" Program: "(\n(return (lambda \'() (block \'(\n (let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:9\" \'\"\" \'1))\n (let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n (let $3 (KqpWideReadTableRanges $1 (Void) $2 \'() \'()))\n (return (FromFlow (WideFilter $3 (lambda \'($4 $5 $6 $7) (Coalesce (< $4 (Uint64 \'\"5000\")) (Bool \'false))) (Uint64 \'1))))\n))))\n)\n" ComputeActors { CpuTimeUs: 3536 Tasks { TaskId: 1 CpuTimeUs: 1078 FinishTimeMs: 1743154270298 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 150 BuildCpuTimeUs: 928 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-mdsjyjltvm" NodeId: 1 CreateTimeMs: 1743154270286 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743154270298 } Stages { StageId: 1 StageGuid: "50aa1b0e-fc63b860-b7c209b1-93246adb" Program: "(\n(return (lambda \'($1) (FromFlow (NarrowMap (Take (ToFlow $1) (Uint64 \'1)) (lambda \'($2 $3 $4 $5) (AsStruct \'(\'\"Amount\" $2) \'(\'\"Comment\" $3) \'(\'\"Group\" $4) \'(\'\"Name\" $5)))))))\n)\n" ComputeActors { CpuTimeUs: 9761 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 844 FinishTimeMs: 1743154270301 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 214 BuildCpuTimeUs: 630 HostName: "ghrun-mdsjyjltvm" NodeId: 1 CreateTimeMs: 1743154270287 } MaxMemoryUsage: 1048576 } BaseTimeMs: 1743154270298 } TxPlansWithStats: "{\"Node Type\":\"Phase\",\"PlanNodeId\":5,\"Plans\":[{\"Node Type\":\"ResultSet\",\"PlanNodeId\":4,\"PlanNodeType\":\"ResultSet\",\"Plans\":[{\"Node Type\":\"Limit\",\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":2}],\"Limit\":\"1\",\"Name\":\"Limit\"}],\"PlanNodeId\":3,\"Plans\":[{\"Node Type\":\"Merge\",\"PlanNodeId\":2,\"PlanNodeType\":\"Connection\",\"Plans\":[{\"Node Type\":\"Limit-Filter-TableFullScan\",\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Limit\":\"1\",\"Name\":\"Limit\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[{\"InternalOperatorId\":2}],\"Name\":\"Filter\",\"Predicate\":\"item.Amount \\u003C 5000\"},{\"E-Cost\":\"No estimate\",\"E-Rows\":\"No estimate\",\"E-Size\":\"No estimate\",\"Inputs\":[],\"Name\":\"TableFullScan\",\"Path\":\"\\/Root\\/Test\",\"ReadColumns\":[\"Amount\",\"Comment\",\"Group\",\"Name\"],\"ReadRanges\":[\"Group (-∞, +∞)\",\"Name (-∞, +∞)\"],\"Scan\":\"Parallel\",\"Table\":\"Test\"}],\"PlanNodeId\":1,\"StageGuid\":\"a67aa357-682e1582-22ca0da4-f2463862\",\"Stats\":{\"BaseTimeMs\":1743154270298,\"ComputeNodes\":[{\"CpuTimeUs\":3536,\"Tasks\":[{\"ComputeTimeUs\":150,\"FinishTimeMs\":1743154270298,\"Host\":\"ghrun-mdsjyjltvm\",\"NodeId\":1,\"OutputBytes\":19,\"OutputRows\":1,\"TaskId\":1}]}],\"FinishedTasks\":0,\"PhysicalStageId\":0,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"},\"Tables\":[\"Test\"]}],\"SortColumns\":[\"Group (Asc)\"],\"StageGuid\":\"\"}],\"StageGuid\":\"50aa1b0e-fc63b860-b7c209b1-93246adb\",\"Stats\":{\"BaseTimeMs\":1743154270298,\"ComputeNodes\":[{\"CpuTimeUs\":9761,\"Tasks\":[{\"ComputeTimeUs\":214,\"FinishTimeMs\":1743154270301,\"Host\":\"ghrun-mdsjyjltvm\",\"InputBytes\":19,\"InputRows\":1,\"NodeId\":1,\"OutputBytes\":19,\"OutputRows\":1,\"ResultBytes\":19,\"ResultRows\":1,\"TaskId\":2}]}],\"FinishedTasks\":0,\"PhysicalStageId\":1,\"StageDurationUs\":0,\"Tasks\":0,\"UseLlvm\":\"undefined\"}}],\"StageGuid\":\"\"}],\"StageGuid\":\"\"}" StatConvertBytes: 1753 Extra { type_url: "type.googleapis.com/NKqpProto.TKqpExecutionExtraStats" value: "\010\001\022\013\010\320\033\020\241L\030\361g \002" } } 2025-03-28T09:31:10.303568Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:31:10.303623Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486790583334465864:2488] TxId: 281474976710672. Ctx: { TraceId: 01jqe1ekw20yvc8d0bwd2qcww8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDBhMzdjMDMtYjk4MTA4YTktZTcwM2EwYS1lMTVkNGFkZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.013297s ReadRows: 3 ReadBytes: 96 ru: 8 rate limiter was not found force flag: 1 2025-03-28T09:31:10.304661Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154270326, txId: 281474976710671] shutting down >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-03-28T09:31:11.687922Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.687951Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.687983Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.688458Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.689137Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.698607Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.699032Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:11.700297Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.700317Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.700340Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.700682Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.701160Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.701324Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.701506Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:11.701799Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-28T09:31:11.702595Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.702611Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.702628Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.702980Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.703596Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.703705Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.703863Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:11.708997Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.711895Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:11.712074Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.712144Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-28T09:31:11.713401Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.713438Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.713474Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.713840Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.714315Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.714440Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.714652Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-03-28T09:31:11.715668Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:11.715924Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-28T09:31:11.716187Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-28T09:31:11.716411Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-28T09:31:11.716538Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.716584Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:31:11.716620Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:31:11.716773Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-03-28T09:31:11.716903Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:31:11.716926Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-28T09:31:11.716948Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:31:11.717110Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-03-28T09:31:11.717179Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-28T09:31:11.717197Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-28T09:31:11.717217Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:31:11.717289Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-03-28T09:31:11.717385Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-28T09:31:11.717407Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-28T09:31:11.717434Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:31:11.717535Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-03-28T09:31:11.718864Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.718902Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.718924Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.719340Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.719803Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.719937Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.720144Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-03-28T09:31:11.721062Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:11.721329Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-28T09:31:11.721642Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-28T09:31:11.721827Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-28T09:31:11.721962Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.721995Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:31:11.722016Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:31:11.722041Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-28T09:31:11.722083Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:31:11.722312Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2025-03-28T09:31:11.722393Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-28T09:31:11.722413Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-28T09:31:11.722433Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-28T09:31:11.722451Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-28T09:31:11.722472Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:31:11.722637Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2025-03-28T09:31:11.723902Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.723932Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.723990Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.724327Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.724715Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.724833Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.725029Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:11.726062Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:11.726912Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:11.727162Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-03-28T09:31:11.727261Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-28T09:31:11.727352Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.727386Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:31:11.727409Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-03-28T09:31:11.727425Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-03-28T09:31:11.727461Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-03-28T09:31:11.727482Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-03-28T09:31:11.727642Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2025-03-28T09:31:11.727766Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-03-28T09:31:11.528305Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.528343Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.528365Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.529363Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:31:11.529438Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.529476Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.542294Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006279s 2025-03-28T09:31:11.543041Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.543525Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:31:11.543643Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.544781Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.544809Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.544836Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.545122Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:31:11.545157Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.545181Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.545229Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006373s 2025-03-28T09:31:11.545618Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.545968Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:31:11.546051Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.546891Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.546926Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.546943Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.547241Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-28T09:31:11.547277Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.547291Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.547340Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.167573s 2025-03-28T09:31:11.547684Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.547992Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:31:11.548076Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.548883Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.548916Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.548934Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.549203Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-28T09:31:11.549238Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.549252Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.549307Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.196039s 2025-03-28T09:31:11.549620Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.550022Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:31:11.550106Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.553480Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.553507Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.553525Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.553788Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.554136Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.562611Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.563286Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-03-28T09:31:11.563328Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.563347Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.563431Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.200103s 2025-03-28T09:31:11.563615Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-28T09:31:11.564955Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.564989Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.565016Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.565286Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.565623Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.565745Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.566130Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:11.666972Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.667247Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-28T09:31:11.667334Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.667392Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-28T09:31:11.667497Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-28T09:31:11.767840Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-28T09:31:11.768064Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-28T09:31:11.768962Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.768984Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.769022Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.769322Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.769719Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.769821Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.770208Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:11.871263Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.871514Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-28T09:31:11.871560Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.871585Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-28T09:31:11.871640Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-03-28T09:31:11.871780Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-28T09:31:11.871863Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-28T09:31:11.871994Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-28T09:31:11.872600Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 27788, MsgBus: 10042 2025-03-28T09:31:05.545521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790563347500682:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:05.545688Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015bc/r3tmp/tmpbyJv2K/pdisk_1.dat 2025-03-28T09:31:05.845703Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27788, node 1 2025-03-28T09:31:05.918385Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:05.918420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:05.918427Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:05.918555Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:05.924578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:05.924682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:05.926421Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10042 TClient is connected to server localhost:10042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:06.451072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:06.467377Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:06.478622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:31:06.619448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:06.772498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:06.842661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:08.524585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790576232404357:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.524717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.762587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.786627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.814142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.841452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.869943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.900188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.943063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790576232404866:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.943176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790576232404871:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.943197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:08.948673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:08.959797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790576232404873:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:09.050180Z node 1 :TX_PROXY ERROR: Actor# [1:7486790580527372224:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:10.060289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.086326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.118406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.555542Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790563347500682:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:10.555614Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/Join1_2 1 19 /Root/Join1_1 8 136 >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 16972, MsgBus: 31528 2025-03-28T09:31:06.461378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790566844304555:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:06.461454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015b4/r3tmp/tmpzUfWEJ/pdisk_1.dat 2025-03-28T09:31:06.763359Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16972, node 1 2025-03-28T09:31:06.830583Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:06.830739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:06.832875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:06.848751Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:06.848770Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:06.848777Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:06.848901Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31528 TClient is connected to server localhost:31528 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:07.343939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.371195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.502511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.645328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.721310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:09.429476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790579729208211:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.429599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.649555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.674571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.699101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.722400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.748014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.813402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.850803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790579729208725:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.850868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.850992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790579729208730:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.854858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:09.864064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790579729208732:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:09.954720Z node 1 :TX_PROXY ERROR: Actor# [1:7486790579729208786:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-03-28T09:31:11.528287Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.528325Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.528352Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.529406Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.534184Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:31:11.534263Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.536066Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.536085Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.536109Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.536411Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.536701Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:31:11.536766Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.537601Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.537624Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.537640Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.537910Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:31:11.537944Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.537967Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.538097Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-03-28T09:31:11.538868Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.538887Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.538919Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.539276Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-28T09:31:11.539320Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.539337Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.539403Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-03-28T09:31:11.541785Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-28T09:31:11.541808Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-28T09:31:11.541827Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.542144Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.542605Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.552975Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-28T09:31:11.553467Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:11.553758Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-03-28T09:31:11.559088Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-03-28T09:31:11.559341Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.559377Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:31:11.559394Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:31:11.559422Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-03-28T09:31:11.559439Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-03-28T09:31:11.559451Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-03-28T09:31:11.559472Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-03-28T09:31:11.559491Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-03-28T09:31:11.559537Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-03-28T09:31:11.559556Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-03-28T09:31:11.559575Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-03-28T09:31:11.559609Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-03-28T09:31:11.559630Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-03-28T09:31:11.559667Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-03-28T09:31:11.559695Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-03-28T09:31:11.559723Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-03-28T09:31:11.559764Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-03-28T09:31:11.559785Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-03-28T09:31:11.559805Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-03-28T09:31:11.559820Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-03-28T09:31:11.559830Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-03-28T09:31:11.559846Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-03-28T09:31:11.559858Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-03-28T09:31:11.559870Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-03-28T09:31:11.559881Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-03-28T09:31:11.559908Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-03-28T09:31:11.559925Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-03-28T09:31:11.559942Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-03-28T09:31:11.559959Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-03-28T09:31:11.559986Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-03-28T09:31:11.560011Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-03-28T09:31:11.560022Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-03-28T09:31:11.560094Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-03-28T09:31:11.560129Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-03-28T09:31:11.560147Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-03-28T09:31:11.560169Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-03-28T09:31:11.560200Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-03-28T09:31:11.560216Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-03-28T09:31:11.560279Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-03-28T09:31:11.560301Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-03-28T09:31:11.560316Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-03-28T09:31:11.560332Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-03-28T09:31:11.560349Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-03-28T09:31:11.560366Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-03-28T09:31:11.560384Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-03-28T09:31:11.560402Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-03-28T09:31:11.560419Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-03-28T09:31:11.560436Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-03-28T09:31:11.560453Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-03-28T09:31:11.560469Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-03-28T09:31:11.560526Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-03-28T09:31:11.562952Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-03-28T09:31:11.563115Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-03-28T09:31:11.563157Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-03-28T09:31:11.563183Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-03-28T09:31:11.563212Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-03-28T09:31:11.563238Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-03-28T09:31:11.563255Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-03-28T09:31:11.563284Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-03-28T09:31:11.563313Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-03-28T09:31:11.563354Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-03-28T09:31:11.563374Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-03-28T09:31:11.563416Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-03-28T09:31:11.563435Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-03-28T09:31:11.563452Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-03-28T09:31:11.563463Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-03-28T09:31:11.563474Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-03-28T09:31:11.563485Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-03-28T09:31:11.563507Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-03-28T09:31:11.563521Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-03-28T09:31:11.563532Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-03-28T09:31:11.563550Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-03-28T09:31:11.563567Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-03-28T09:31:11.563578Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-03-28T09:31:11.563590Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-03-28T09:31:11.563600Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-03-28T09:31:11.563608Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-03-28T09:31:11.563618Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-03-28T09:31:11.563636Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-03-28T09:31:11.563660Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-03-28T09:31:11.563675Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-03-28T09:31:11.563684Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-03-28T09:31:11.563699Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-03-28T09:31:11.563709Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-03-28T09:31:11.563752Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-03-28T09:31:11.563775Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-03-28T09:31:11.563792Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-03-28T09:31:11.563811Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-03-28T09:31:11.563843Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-03-28T09:31:11.563861Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-03-28T09:31:11.563879Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-03-28T09:31:11.563891Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-03-28T09:31:11.563900Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-03-28T09:31:11.563914Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-03-28T09:31:11.563929Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-03-28T09:31:11.563945Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-03-28T09:31:11.563959Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-03-28T09:31:11.563967Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-03-28T09:31:11.563977Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-03-28T09:31:11.564070Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-03-28T09:31:11.564097Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-03-28T09:31:11.564116Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-03-28T09:31:11.564180Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-03-28T09:31:11.564320Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-28T09:31:11.565265Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.565287Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.565319Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.565601Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.566114Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.566260Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.566711Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:11.667615Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.667872Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-28T09:31:11.667936Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.667964Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-28T09:31:11.668037Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-28T09:31:11.868384Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-03-28T09:31:11.969364Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-28T09:31:11.969550Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-28T09:31:11.969740Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-28T09:31:11.970767Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.970784Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.970803Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.971187Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.971753Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.971893Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.972265Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:12.073311Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.074863Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-28T09:31:12.074929Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:12.074969Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-28T09:31:12.075055Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2025-03-28T09:31:12.075148Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-28T09:31:12.075279Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-28T09:31:12.075362Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-28T09:31:12.075457Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 8027, MsgBus: 8420 2025-03-28T09:31:06.847061Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790567498701480:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:06.847164Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015b0/r3tmp/tmpldyhkt/pdisk_1.dat 2025-03-28T09:31:07.122294Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8027, node 1 2025-03-28T09:31:07.198083Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:07.198110Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:07.198119Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:07.198465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:07.199680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:07.199824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:07.201865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8420 TClient is connected to server localhost:8420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:07.691464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.729054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:07.889495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:31:08.052534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:08.133464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:09.694044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790580383605149:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.694145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:10.040663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.069125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.097237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.125031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.153466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.187789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.246423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790584678572958:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:10.246492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790584678572963:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:10.246504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:10.249995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:10.258986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790584678572965:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:10.316504Z node 1 :TX_PROXY ERROR: Actor# [1:7486790584678573018:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpCost::OlapRange [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TMultiversionObjectMap::MonteCarlo [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestListTenants >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> CompressExecutor::TestReorderedExecutor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRange [GOOD] Test command err: Trying to start YDB, gRPC: 24134, MsgBus: 63238 2025-03-28T09:31:06.442330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790566327915481:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:06.442452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015b3/r3tmp/tmpHX2bus/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24134, node 1 2025-03-28T09:31:06.786979Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:06.787006Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:06.806540Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:06.813162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:06.813259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:06.815289Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:06.817885Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:06.817902Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:06.817910Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:06.818030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63238 TClient is connected to server localhost:63238 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:07.280086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.306563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.434740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.589621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:07.649153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:09.567542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790579212819154:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.567649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:09.854593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.922558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.949151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:09.975638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.002872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.069860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:10.146065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790583507786969:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:10.146123Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:10.146170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790583507786974:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:10.149242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:10.157520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790583507786976:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:10.254634Z node 1 :TX_PROXY ERROR: Actor# [1:7486790583507787030:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:11.127868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:11.288738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790587802754744:2499];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:11.288738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790587802754732:2497];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:11.288928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790587802754732:2497];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:11.289228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790587802754732:2497];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:11.289382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790587802754732:2497];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:31:11.289527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790587802754732:2497];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:31:11.289536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790587802754744:2499];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:11.289670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790587802754732:2497];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:31:11.289704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790587802754744:2499];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:11.289838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790587802754744:2499];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:31:11.289881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790587802754732:2497];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:31:11.289962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790587802754744:2499];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:31:11.290009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790587802754732:2497];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:31:11.290067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486790587802754744:2499];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:31:11.290146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037920;self_id=[1:7486790587802754732:2497];tablet_id=72075186224037920;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:31:11 ... D WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:31:11.445774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:31:11.445839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:31:11.445867Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:31:11.445920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:31:11.445956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:31:11.447401Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:31:11.447456Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:31:11.447622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:31:11.447664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:31:11.447780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:31:11.447814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:31:11.447989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:31:11.448026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:31:11.448139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:31:11.448166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037923;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:31:11.448691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:31:11.448731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:31:11.448807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:31:11.448838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:31:11.449002Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:31:11.449037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:31:11.449105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:31:11.449143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:31:11.449205Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:31:11.449233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:31:11.449274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:31:11.449303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:31:11.449724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:31:11.449768Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:31:11.449928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:31:11.449965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:31:11.450093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:31:11.450149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:31:11.450324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:31:11.450366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:31:11.450467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:31:11.450495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:31:11.515822Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790566327915481:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:11.516253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:11.535781Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:11.536176Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:11.541057Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:11.543844Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:11.545123Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:11.549867Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:11.552470Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:11.553816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:11.558063Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:11.560316Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:11.687884Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;self_id=[1:7486790587802754769:2501];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037923;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037927;receive=72075186224037928; 2025-03-28T09:31:11.687913Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T09:31:11.688520Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T09:31:11.688816Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-03-28T09:31:11.534489Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.534535Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.534575Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.535039Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.535651Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.547905Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.548989Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:11.553547Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:11.553969Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:11.554172Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-03-28T09:31:11.554306Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:11.554400Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.554434Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-03-28T09:31:11.554466Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:31:11.554492Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:31:11.555727Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.555768Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.555796Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.556106Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.556476Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.556614Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.556802Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-03-28T09:31:11.557821Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:11.558055Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-28T09:31:11.558344Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-28T09:31:11.558510Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-28T09:31:11.558620Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.558652Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:31:11.558697Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:31:11.558955Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2025-03-28T09:31:11.560143Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:31:11.560170Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-28T09:31:11.560198Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:31:11.560346Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2025-03-28T09:31:11.560419Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-28T09:31:11.560439Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-28T09:31:11.560456Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:31:11.560535Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2025-03-28T09:31:11.560598Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-28T09:31:11.560617Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-28T09:31:11.560640Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:31:11.560754Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2025-03-28T09:31:11.562042Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.562072Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.562106Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:11.562440Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:11.562920Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:11.563029Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:11.563197Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-03-28T09:31:11.564121Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:11.564322Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-28T09:31:11.564614Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-28T09:31:11.564851Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-28T09:31:11.564956Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:11.564991Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:31:11.565106Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2025-03-28T09:31:11.565177Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:31:11.565196Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:31:11.565261Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2025-03-28T09:31:11.565308Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:31:11.565338Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:31:11.565412Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2025-03-28T09:31:11.565473Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-28T09:31:11.565504Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:31:13.584180Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2025-03-28T09:31:13.668783Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-28T09:31:13.668984Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-28T09:31:13.669015Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:13.669439Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:13.670191Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:13.670569Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-28T09:31:13.670936Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-03-28T09:31:13.764094Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-03-28T09:31:13.765022Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:13.766756Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:31:13.769226Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:31:13.769938Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-03-28T09:31:13.773946Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-03-28T09:31:13.774638Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-03-28T09:31:13.775427Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-03-28T09:31:13.776296Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-03-28T09:31:13.783550Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-03-28T09:31:13.784296Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-03-28T09:31:13.784358Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-03-28T09:31:13.784511Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:31:13.793017Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2025-03-28T09:31:13.795487Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:13.795514Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:13.795562Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:13.795850Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:13.796346Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:13.796536Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:13.796718Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:13.797030Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-03-28T09:31:13.797817Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:13.797841Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:13.797874Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:13.798110Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:13.798506Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:13.798622Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:13.799070Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:13.799208Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:13.799311Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:13.799354Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:31:13.799456Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 5577, MsgBus: 20682 2025-03-28T09:30:57.871418Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790526678072306:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:30:57.871477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001648/r3tmp/tmpwdUhz8/pdisk_1.dat 2025-03-28T09:30:58.255507Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:58.291856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:58.291991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5577, node 1 2025-03-28T09:30:58.293469Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:58.336595Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:58.336623Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:58.336632Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:58.336736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20682 TClient is connected to server localhost:20682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:30:58.892908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:58.913313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.046201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.244475Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:30:59.325908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:00.807418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790539562975955:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:00.807552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.114575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.144920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.173524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.205953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.239403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.269817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:01.325863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790543857943765:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.325976Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.326874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790543857943770:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:01.330133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:01.342183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790543857943772:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:01.445009Z node 1 :TX_PROXY ERROR: Actor# [1:7486790543857943828:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:02.445535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:02.578863Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548152911543:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:02.578864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486790548152911536:2505];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:02.579078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486790548152911536:2505];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:02.579408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486790548152911536:2505];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:02.579557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486790548152911536:2505];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:31:02.579573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548152911543:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:02.579765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486790548152911536:2505];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:31:02.579776Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548152911543:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:02.579882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548152911543:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:31:02.579912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486790548152911536:2505];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:31:02.580105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548152911543:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:31:02.580187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486790548152911536:2505];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:31:02.580241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548152911543:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:31:02.580319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037919;self_id=[1:7486790548152911536:2505];tablet_id=72075186224037919;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:31:02.580345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548152911543:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:31:02.580444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037922;self_id=[1:7486790548152911543:2507];tablet_id=72075186224037922;process=TTxInitSchema::Execute;fline=abstract. ... 791848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:31:02.792020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:31:02.792066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:31:02.792168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:31:02.792229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:31:02.792307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:31:02.792343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:31:02.792385Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:31:02.792419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:31:02.792977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:31:02.793022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:31:02.793217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:31:02.793251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:31:02.793386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:31:02.793430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:31:02.793627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:31:02.793665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:31:02.793789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:31:02.793823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:31:02.827274Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.827479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.833603Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.840080Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.841381Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.846487Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.847167Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.850795Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.851954Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.856223Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T09:31:02.871827Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790526678072306:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:02.871887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:03.042149Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;self_id=[1:7486790548152911579:2509];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037928;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037923;receive=72075186224037927; 2025-03-28T09:31:03.042287Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T09:31:03.042872Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T09:31:03.042879Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T09:31:03.111993Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 6478 cpu_time_us: 1932 affected_shards: 1 } query_phases { duration_us: 6148 cpu_time_us: 277 affected_shards: 1 } compilation { duration_us: 49047 cpu_time_us: 46747 } process_cpu_time_us: 580 total_duration_us: 63855 total_cpu_time_us: 49536 AddressSanitizer:DEADLYSIGNAL ================================================================= ==543407==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b1ca4d bp 0x7ffd8304ae80 sp 0x7ffd8304ace0 T0) ==543407==The signal is caused by a READ memory access. ==543407==Hint: address points to the zero page. 2025-03-28T09:31:13.251615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:31:13.251646Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18b1ca4d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b1ca4d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b1ca4d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b1ca4d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b1ca4d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b415d7 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b415d7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b415d7 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18b415d7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18b415d7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194936d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19463228 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b40483 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19464af5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x1948dc4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f1f6e011d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f1f6e011e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x162b8028 in _start (/home/runner/.ya/build/build_root/2909/001648/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x162b8028) (BuildId: 42cc14b09b8e1be640cc9d032c9c2bf606ecb0c4) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==543407==ABORTING >> TOlapNaming::AlterColumnTableOk >> TOlap::CreateStoreWithDirs >> TOlap::CustomDefaultPresets >> TOlapNaming::AlterColumnStoreOk >> TOlap::CreateTableWithNullableKeysNotAllowed >> TOlapNaming::CreateColumnTableOk >> TOlap::CreateDropStandaloneTable |91.4%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> TGroupMapperTest::NonUniformCluster [GOOD] >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TOlap::CreateTableWithNullableKeys |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::NonUniformCluster [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TOlap::CustomDefaultPresets [GOOD] >> TOlap::Decimal >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateTable >> Viewer::JsonAutocompleteStartOfDatabaseName >> Viewer::PDiskMerging >> TOlapNaming::AlterColumnStoreOk [GOOD] >> TOlapNaming::AlterColumnStoreFailed >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> Viewer::Cluster10000Tablets >> Viewer::TabletMerging >> Viewer::JsonAutocompleteEmpty |91.4%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.4%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Viewer::PDiskMerging [GOOD] >> Viewer::SelectStringWithBase64Encoding >> Viewer::FuzzySearcherLimit3OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit4OutOf4 [GOOD] >> Viewer::FuzzySearcherLongWord [GOOD] >> Viewer::FuzzySearcherPriority [GOOD] >> Viewer::JsonAutocompleteColumns >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient >> Viewer::LevenshteinDistance [GOOD] >> Viewer::JsonStorageListingV2 >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::AlterStore >> TOlap::Decimal [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] >> TOlap::CreateTableWithNullableKeys [GOOD] >> TOlap::CreateTable [GOOD] >> TOlap::CreateTableTtl >> TOlapNaming::AlterColumnStoreFailed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableWithNullableKeys [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:31:16.842173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:31:16.842282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.842347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:31:16.842381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:31:16.843062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:31:16.843104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:31:16.843167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.843240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:31:16.844648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:31:16.913686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:16.913754Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:16.926863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:31:16.927085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:31:16.927233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:31:16.933061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:31:16.933198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:31:16.936734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:16.937526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:16.941954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.948476Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:16.948599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.949586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:31:16.949626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:16.949728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:31:16.949801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:31:16.955203Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:31:17.046297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:17.048332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.050464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:31:17.051734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:31:17.051821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.054423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.054539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:31:17.054664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.054802Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:31:17.054828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:31:17.054851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:31:17.056433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.056492Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:31:17.056524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:31:17.057816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.057855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.057887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.057952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.066224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:31:17.067911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:31:17.068638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:31:17.069373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.069462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:17.069504Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.070524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:31:17.070565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.070714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:31:17.070771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:17.072559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:17.072593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:17.072712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:17.072744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:31:17.073438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.073470Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:31:17.073535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.073557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.073601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:31:17.073652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:31:17.073711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:31:17.073751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:31:17.073795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:31:17.073823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:31:17.075109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.075181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.075204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 78944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:31:18.198229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:31:18.198387Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:18.198438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 1 2025-03-28T09:31:18.198484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-28T09:31:18.198509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 4 2025-03-28T09:31:18.198922Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.198976Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState at schemeshard: 72057594046678944 2025-03-28T09:31:18.199043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TDropColumnTable TProposedWaitParts operationId# 104:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409547 2025-03-28T09:31:18.199706Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:31:18.199832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:31:18.199873Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:31:18.199950Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:31:18.199998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:31:18.200627Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:31:18.200703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:31:18.200730Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:31:18.200760Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-28T09:31:18.200802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:31:18.202384Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:31:18.202446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:31:18.202478Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:31:18.202499Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-03-28T09:31:18.202520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:31:18.202572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-28T09:31:18.204233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 104:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 275382275 2025-03-28T09:31:18.208545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:31:18.208651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:31:18.209404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:31:18.221819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-03-28T09:31:18.221890Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409547, partId: 0 2025-03-28T09:31:18.222060Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409547 TxId: 104 2025-03-28T09:31:18.222120Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 130 FAKE_COORDINATOR: Erasing txId 104 2025-03-28T09:31:18.224219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.224387Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.224449Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropColumnTable TProposedDeleteParts operationId# 104:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:31:18.224540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:31:18.224666Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:31:18.224710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:31:18.224756Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:31:18.224803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:31:18.224859Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-28T09:31:18.224930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:372:2351] message: TxId: 104 2025-03-28T09:31:18.225005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:31:18.225047Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:31:18.225084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:31:18.225203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:31:18.227179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:31:18.227298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:31:18.227344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:609:2569] 2025-03-28T09:31:18.227846Z node 2 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:31:18.228519Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186233409547;self_id=[2:475:2444];ev=NKikimr::TEvTablet::TEvTabletDead;fline=columnshard_impl.cpp:1153;event=tablet_die; Forgetting tablet 72075186233409547 2025-03-28T09:31:18.232375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:31:18.233467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:31:18.234020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:31:18.234072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:31:18.234148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:31:18.237112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:31:18.237203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:31:18.237681Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-03-28T09:31:18.238240Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/MyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:31:18.238439Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/MyDir/MyTable" took 234us result status StatusPathDoesNotExist 2025-03-28T09:31:18.238615Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/MyTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 3])" Path: "/MyRoot/MyDir/MyTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 3 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:31:18.239261Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 4 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2025-03-28T09:31:18.239351Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 4 took 97us result status StatusPathDoesNotExist 2025-03-28T09:31:18.239431Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:31:16.842189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:31:16.842318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.842369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:31:16.842406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:31:16.843124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:31:16.843167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:31:16.843238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.843320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:31:16.844651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:31:16.931943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:16.932018Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:16.946929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:31:16.947156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:31:16.947284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:31:16.953184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:31:16.953341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:31:16.953819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:16.953980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:16.955487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.956610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:16.956665Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.956784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:31:16.956816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:16.956849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:31:16.956926Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:31:16.961837Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:31:17.056324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:17.056554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.056719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:31:17.056912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:31:17.056961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.059054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.059166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:31:17.059322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.059378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:31:17.059411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:31:17.059444Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:31:17.061243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.061314Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:31:17.061354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:31:17.062888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.062942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.062982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.063037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.066995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:31:17.068740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:31:17.068911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:31:17.069885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.070006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:17.070058Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.070642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:31:17.070717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.070868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:31:17.070965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:17.072874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:17.072922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:17.073082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:17.073124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:31:17.073484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.073531Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:31:17.073621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.073655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.073721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:31:17.073832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:31:17.073914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:31:17.073971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:31:17.074010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:31:17.074048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:31:17.075962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.076082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.076120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 9:31:18.155401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:31:18.156822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-28T09:31:18.156956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-03-28T09:31:18.157326Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:18.157433Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936749 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:18.157487Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TPropose operationId# 101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-03-28T09:31:18.157650Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-03-28T09:31:18.157779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:31:18.157845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:31:18.158700Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tx_controller.cpp:211;event=finished_tx;tx_id=101; 2025-03-28T09:31:18.160216Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:18.160284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:18.160439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:31:18.160570Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:18.160628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:31:18.160675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-03-28T09:31:18.161252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.161305Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:31:18.161367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-03-28T09:31:18.161841Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:31:18.161953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:31:18.161996Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:31:18.162041Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:31:18.162084Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:31:18.162993Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:31:18.163072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:31:18.163100Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:31:18.163143Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:31:18.163179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:31:18.163252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-28T09:31:18.165658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-03-28T09:31:18.166736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:31:18.167080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:31:18.189214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2025-03-28T09:31:18.189276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:31:18.189404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:31:18.191074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.191216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.191258Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:31:18.191372Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:31:18.191413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:31:18.191456Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:31:18.191505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:31:18.191554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-28T09:31:18.191617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:342:2321] message: TxId: 101 2025-03-28T09:31:18.191668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:31:18.191715Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:31:18.191753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:31:18.191878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:31:18.193419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:31:18.193472Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:343:2322] TestWaitNotification: OK eventTxId 101 2025-03-28T09:31:18.193933Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:31:18.194147Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 236us result status StatusSuccess 2025-03-28T09:31:18.194775Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Decimal(35,9)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2025-03-28T09:30:56.838007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:56.838082Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:56.914312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:30:57.875787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:57.875850Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:57.914896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:30:58.731068Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:58.731136Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:58.797438Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:30:59.764718Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:59.764788Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:59.803905Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:01.020563Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:01.020638Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:01.089744Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:08.991978Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:08.992080Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:09.037595Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:10.140087Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:10.140180Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:10.189594Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:14.240963Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:14.241060Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:14.286765Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:15.384096Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:15.384204Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:15.455868Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:16.563270Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:16.563380Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:16.616671Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:17.718569Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:17.718644Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:17.764667Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestCreateTenantWrongName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:31:16.842151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:31:16.842317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.842362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:31:16.842393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:31:16.843097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:31:16.843132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:31:16.843206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.843275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:31:16.844646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:31:16.913686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:16.913751Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:16.927434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:31:16.927611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:31:16.927702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:31:16.932973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:31:16.933194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:31:16.936746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:16.937525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:16.941955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.948633Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:16.948714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.949637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:31:16.949692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:16.949740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:31:16.949829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:31:16.955834Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:31:17.056574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:17.056741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.056897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:31:17.057072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:31:17.057123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.058851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.058944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:31:17.059063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.059108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:31:17.059138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:31:17.059162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:31:17.060574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.060634Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:31:17.060667Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:31:17.062030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.062079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.062118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.062167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.071358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:31:17.072937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:31:17.073123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:31:17.074005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.074114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:17.074179Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.074395Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:31:17.074446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.074596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:31:17.074704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:17.076579Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:17.076620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:17.076784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:17.076822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:31:17.077084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.077127Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:31:17.077214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.077244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.077282Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.077308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.077339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:31:17.077377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.077445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:31:17.077476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:31:17.077536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:31:17.077588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:31:17.077621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:31:17.079490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.079591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.079642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... RD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:31:18.347200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-28T09:31:18.347641Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:18.347784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936749 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:18.347855Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TPropose operationId# 102:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000003 2025-03-28T09:31:18.348117Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:31:18.348242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:31:18.348301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:31:18.353039Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=tx_controller.cpp:211;event=finished_tx;tx_id=102; FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:31:18.354673Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:18.354744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:31:18.354924Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:31:18.355056Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:18.355118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:31:18.355170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-03-28T09:31:18.355474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.355521Z node 2 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:31:18.355579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-03-28T09:31:18.356252Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:31:18.356352Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:31:18.356385Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:31:18.356437Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:31:18.356495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:31:18.357050Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:31:18.357121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:31:18.357144Z node 2 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:31:18.357169Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 4 2025-03-28T09:31:18.357212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:31:18.357279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:31:18.359480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-03-28T09:31:18.359554Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697639 2025-03-28T09:31:18.359621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2025-03-28T09:31:18.359936Z node 2 :HIVE INFO: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2025-03-28T09:31:18.360158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.360366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2025-03-28T09:31:18.361255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:31:18.361435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:31:18.362314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.374286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2025-03-28T09:31:18.374363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:31:18.374515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:31:18.376274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.376410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.376450Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:31:18.376562Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:31:18.376598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:31:18.376661Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:31:18.376699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:31:18.376736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:31:18.376799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:342:2321] message: TxId: 102 2025-03-28T09:31:18.376846Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:31:18.376887Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:31:18.376943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:31:18.377067Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:31:18.378954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:31:18.379019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:403:2375] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-03-28T09:31:18.382845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "mess age" Type: "Utf8" } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:18.383096Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterOlapStore Propose, path: /MyRoot/OlapStore, opId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:31:18.383440Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-03-28T09:31:18.385747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:18.385916Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:31:18.386228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:31:18.386281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:31:18.386762Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:31:18.386897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:31:18.386949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:439:2411] TestWaitNotification: OK eventTxId 103 >> Viewer::JsonAutocompleteSimilarDatabaseName >> TOlap::CreateTableTtl [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:31:16.842160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:31:16.842271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.842307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:31:16.842343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:31:16.843107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:31:16.843148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:31:16.843215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.843288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:31:16.844626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:31:16.913552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:16.913611Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:16.926824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:31:16.927027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:31:16.927146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:31:16.933443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:31:16.933637Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:31:16.936751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:16.937532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:16.941869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.948532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:16.948612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.949608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:31:16.949656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:16.949702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:31:16.949770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:31:16.955856Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:31:17.056699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:17.056903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.057105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:31:17.057282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:31:17.057335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.059603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.059740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:31:17.059886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.059966Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:31:17.060001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:31:17.060035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:31:17.061685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.061746Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:31:17.061773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:31:17.062999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.063056Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.063095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.063138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.065909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:31:17.067572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:31:17.068625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:31:17.069529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.069643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:17.069684Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.070566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:31:17.070616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.070769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:31:17.070847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:17.072587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:17.072628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:17.072785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:17.072817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:31:17.073477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.073518Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:31:17.073591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.073618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.073675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:31:17.073746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:31:17.073820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:31:17.073873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:31:17.073899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:31:17.073932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:31:17.075372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.075482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.075514Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... SHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:31:19.527506Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-28T09:31:19.527638Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:31:19.527689Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:31:19.527735Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:31:19.527773Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:31:19.527817Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2025-03-28T09:31:19.527890Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:341:2320] message: TxId: 105 2025-03-28T09:31:19.527956Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:31:19.528006Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:31:19.528056Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-28T09:31:19.528198Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-03-28T09:31:19.529910Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:31:19.529965Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:524:2495] TestWaitNotification: OK eventTxId 105 2025-03-28T09:31:19.530626Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:31:19.530940Z node 3 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/Table3" took 334us result status StatusSuccess 2025-03-28T09:31:19.531448Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/Table3" PathDescription { Self { Name: "Table3" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 105 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 2 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-03-28T09:31:19.535482Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } } ColumnShardCount: 1 } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:19.535847Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:31:19.536275Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-28T09:31:19.536350Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 0 2025-03-28T09:31:19.536582Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-03-28T09:31:19.536908Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:31:19.536958Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:31:19.537088Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:31:19.537152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-28T09:31:19.539446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusAccepted TxId: 106 SchemeshardId: 72057594046678944 PathId: 7, at schemeshard: 72057594046678944 2025-03-28T09:31:19.539632Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2025-03-28T09:31:19.539873Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:19.539920Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:31:19.540160Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-03-28T09:31:19.540270Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:19.540315Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-03-28T09:31:19.540365Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 7 2025-03-28T09:31:19.540433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:31:19.540484Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState at tabletId# 72057594046678944 2025-03-28T09:31:19.540673Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2025-03-28T09:31:19.541969Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:31:19.542072Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:31:19.542108Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:31:19.542159Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 10 2025-03-28T09:31:19.542216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-28T09:31:19.543402Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:31:19.543496Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:31:19.543526Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:31:19.543557Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 1 2025-03-28T09:31:19.543588Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-28T09:31:19.543662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-03-28T09:31:19.545146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2025-03-28T09:31:19.545289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2025-03-28T09:31:19.546421Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:31:19.547528Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> TOlap::AlterTtl [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:31:16.842220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:31:16.842376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.842427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:31:16.842468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:31:16.843137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:31:16.843192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:31:16.843278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.843392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:31:16.844694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:31:16.937280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:16.937345Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:16.953130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:31:16.953396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:31:16.953574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:31:16.959957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:31:16.960176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:31:16.960804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:16.961013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:16.962863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.964134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:16.964197Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.964356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:31:16.964400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:16.964447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:31:16.964524Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:31:16.970921Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:31:17.111835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:17.112089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.112338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:31:17.112600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:31:17.112662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.114713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.114839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:31:17.115012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.115078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:31:17.115119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:31:17.115153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:31:17.116911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.116977Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:31:17.117016Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:31:17.118578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.118635Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.118704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.118758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.122523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:31:17.124356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:31:17.124562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:31:17.125568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.125687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:17.125736Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.126026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:31:17.126082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.126257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:31:17.126327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:17.128364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:17.128416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:17.128597Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:17.128641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:31:17.129004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.129054Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:31:17.129149Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.129183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.129227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.129276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.129316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:31:17.129376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.129434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:31:17.129466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:31:17.129533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:31:17.129569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:31:17.129601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:31:17.131486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.131594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.131632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-28T09:31:20.097717Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:31:20.097761Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-03-28T09:31:20.097805Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:31:20.131029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-03-28T09:31:20.131097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-03-28T09:31:20.131252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Status: PREPARED TxKind: TX_KIND_SCHEMA Origin: 72075186233409546 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 DomainCoordinators: 72057594046316545 2025-03-28T09:31:20.131298Z node 3 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-28T09:31:20.131354Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 106:0, left await: 0, at schemeshard: 72057594046678944 2025-03-28T09:31:20.131394Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 3 -> 128 2025-03-28T09:31:20.133367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:31:20.133521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:31:20.133571Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2025-03-28T09:31:20.133646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2025-03-28T09:31:20.133797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:31:20.135360Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2025-03-28T09:31:20.135485Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000007 2025-03-28T09:31:20.145585Z node 3 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=tx_controller.cpp:211;event=finished_tx;tx_id=106; 2025-03-28T09:31:20.145994Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:20.146127Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 12884904045 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:20.146190Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000007 2025-03-28T09:31:20.147005Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 106:0 128 -> 129 2025-03-28T09:31:20.147254Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:31:20.147319Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000007 2025-03-28T09:31:20.150677Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:20.150747Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:31:20.150968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:31:20.151122Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:20.151169Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 2 2025-03-28T09:31:20.151216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:206:2208], at schemeshard: 72057594046678944, txId: 106, path id: 3 2025-03-28T09:31:20.151773Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:31:20.151827Z node 3 :FLAT_TX_SCHEMESHARD INFO: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:31:20.151893Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2025-03-28T09:31:20.152901Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:31:20.153000Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:31:20.153038Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:31:20.153077Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 8 2025-03-28T09:31:20.153137Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:31:20.154066Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:31:20.154143Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:31:20.154166Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:31:20.154195Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 14 2025-03-28T09:31:20.154223Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:31:20.154289Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2025-03-28T09:31:20.156901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2025-03-28T09:31:20.158422Z node 3 :TX_TIERING ERROR: fline=manager.cpp:158;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2025-03-28T09:31:20.158779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:31:20.158871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2025-03-28T09:31:20.171071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2025-03-28T09:31:20.171153Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2025-03-28T09:31:20.171275Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 FAKE_COORDINATOR: Erasing txId 106 2025-03-28T09:31:20.173169Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:31:20.173327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:31:20.173366Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 106:0 ProgressState 2025-03-28T09:31:20.173490Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:31:20.173527Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:31:20.173570Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:31:20.173606Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:31:20.173647Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2025-03-28T09:31:20.173723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:341:2320] message: TxId: 106 2025-03-28T09:31:20.173799Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:31:20.173842Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-28T09:31:20.173874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 106:0 2025-03-28T09:31:20.174004Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:31:20.175874Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:31:20.175937Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:550:2521] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain >> ColumnShardTiers::TieringUsage |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs >> S3SettingsConversion::FoldersStyleDeduction [GOOD] >> S3SettingsConversion::Port [GOOD] >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpExtractPredicateLookup::PointJoin [GOOD] >> KqpExtractPredicateLookup::SqlInJoin |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] >> TOlapNaming::CreateColumnTableOk [GOOD] >> TOlapNaming::CreateColumnTableFailed |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TOlapNaming::CreateColumnTableFailed [GOOD] >> TOlapNaming::AlterColumnTableOk [GOOD] >> TOlapNaming::AlterColumnTableFailed |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> Viewer::JsonAutocompleteEmpty [GOOD] >> Viewer::JsonAutocompleteEndOfDatabaseName >> TPersQueueTest::TxCounters [GOOD] |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> Viewer::JsonAutocompleteColumns [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> Viewer::JsonAutocompleteStartOfDatabaseName [GOOD] >> Viewer::JsonStorageListingV1 >> ColumnShardTiers::DSConfigsWithQueryServiceDdl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:31:16.842174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:31:16.842303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.842347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:31:16.842381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:31:16.843080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:31:16.843132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:31:16.843200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.843292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:31:16.844666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:31:16.915450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:16.915505Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:16.927620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:31:16.927818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:31:16.927944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:31:16.933459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:31:16.933604Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:31:16.936745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:16.937535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:16.942040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.948622Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:16.948712Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.949625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:31:16.949675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:16.949727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:31:16.949810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:31:16.955864Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:31:17.055656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:17.055831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.056167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:31:17.056374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:31:17.056418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.058347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.058466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:31:17.058611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.058649Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:31:17.058672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:31:17.058719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:31:17.060418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.060502Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:31:17.060557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:31:17.062133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.062184Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.062223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.062268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.065906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:31:17.067693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:31:17.068637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:31:17.069621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.069723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:17.069773Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.070621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:31:17.070699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.070848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:31:17.070919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:17.072839Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:17.072882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:17.073033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:17.073071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:31:17.073514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.073565Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:31:17.073648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.073678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.073747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:31:17.073824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.073877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:31:17.073911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:31:17.073966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:31:17.074013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:31:17.074059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:31:17.075746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.075846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.075879Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... State, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:31:23.612823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:31:23.612858Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:31:23.614828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:23.614898Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:31:23.614941Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:31:23.616648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:23.616696Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:23.616745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:23.616803Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:31:23.616950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:31:23.618460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:31:23.618644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:31:23.619541Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:23.619673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936749 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:23.619723Z node 2 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:23.619998Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:31:23.620069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:23.620250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:31:23.620327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:23.622118Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:23.622168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:23.622350Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:23.622393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:31:23.622779Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:23.622831Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:31:23.622940Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:23.622983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:23.623034Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:23.623071Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:23.623112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:31:23.623159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:23.623203Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:31:23.623236Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:31:23.623306Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:31:23.623349Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:31:23.623385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:31:23.623897Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:23.623998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:23.624035Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:31:23.624091Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:31:23.624130Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:31:23.624225Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:31:23.627028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:31:23.627534Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:31:23.628378Z node 2 :TX_PROXY DEBUG: actor# [2:270:2261] Bootstrap 2025-03-28T09:31:23.646236Z node 2 :TX_PROXY DEBUG: actor# [2:270:2261] Become StateWork (SchemeCache [2:275:2266]) 2025-03-28T09:31:23.649151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:23.649499Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:23.649742Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2025-03-28T09:31:23.650979Z node 2 :TX_PROXY DEBUG: actor# [2:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:31:23.653189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:23.653342Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-03-28T09:31:23.654599Z node 2 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:31:23.654852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:31:23.654898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:31:23.655287Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:31:23.655395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:31:23.655437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:285:2276] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-28T09:31:23.658445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "~!@#$%^&*()+=asdfa" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:23.658735Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:31:23.658917Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', at schemeshard: 72057594046678944 2025-03-28T09:31:23.661007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Invalid name for column \'~!@#$%^&*()+=asdfa\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:23.661131Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:31:23.661382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:31:23.661419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:31:23.661738Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:31:23.661818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:31:23.661848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:292:2283] TestWaitNotification: OK eventTxId 102 >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::ReassignGroupTest3dc [GOOD] >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestMergeConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumns [GOOD] Test command err: 2025-03-28T09:31:22.458921Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:22.459315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:22.459384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 14261, node 1 TClient is connected to server localhost:3062 >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::TxCounters [GOOD] Test command err: === Server->StartServer(false); 2025-03-28T09:25:18.589927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486789072930532278:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.590016Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:25:18.626243Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486789070273439420:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:25:18.626358Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001246/r3tmp/tmp7SEqVe/pdisk_1.dat 2025-03-28T09:25:18.803869Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:18.809780Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:25:19.036233Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:25:19.044665Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.044796Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.045915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:25:19.045971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:25:19.055755Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:25:19.055856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:25:19.057712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24782, node 1 2025-03-28T09:25:19.183387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001246/r3tmp/yandexojmRU6.tmp 2025-03-28T09:25:19.183421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001246/r3tmp/yandexojmRU6.tmp 2025-03-28T09:25:19.183583Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001246/r3tmp/yandexojmRU6.tmp 2025-03-28T09:25:19.183698Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:25:19.313998Z INFO: TTestServer started on Port 65133 GrpcPort 24782 TClient is connected to server localhost:65133 PQClient connected to localhost:24782 === TenantModeEnabled() = 0 === Init PQ - start server on port 24782 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:25:19.864384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:25:19.864648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.864907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:25:19.865160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:25:19.865205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.871473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:19.871612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:25:19.871785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.871925Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:25:19.871949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:25:19.871962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-28T09:25:19.875718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.875846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:25:19.875867Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:25:19.880421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:19.880449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:25:19.880472Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:25:19.880884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.880929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.880982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:19.881051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.908273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:25:19.923301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:25:19.923442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:25:19.931709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743153919969, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:25:19.931854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743153919969 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:25:19.931883Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:19.932188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:25:19.932221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:25:19.932375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:25:19.932442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:25:19.935916Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:25:19.935947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:25:19.936120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:25:19.936136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486789077225500219:2393], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:25:19.936195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:25:19.936217Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:25:19.936316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:25:19.936327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.936345Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:25:19.936353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.936371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:25:19.936388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:25:19.936399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:25:19.936426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-28T09:25:19.936491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940 ... dateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:31:23.046147Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7486790637760730504:2497] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-28T09:31:23.046201Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:31:23.047252Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-03-28T09:31:23.047431Z node 32 :PERSQUEUE INFO: new Cookie 123|5ca69d2e-1ac99dae-aa09c90b-878a5912_0 generated for partition 0 topic 'topic' owner 123 2025-03-28T09:31:23.048085Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 123|5ca69d2e-1ac99dae-aa09c90b-878a5912_0 2025-03-28T09:31:23.058048Z node 32 :PQ_READ_PROXY DEBUG: new Describe partition request 2025-03-28T09:31:23.058300Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor for request path: "/Root/topic" include_location: true 2025-03-28T09:31:23.058426Z node 32 :PQ_READ_PROXY DEBUG: TDescribePartitionActor[32:7486790637760730510:2499]: Bootstrap 2025-03-28T09:31:23.061947Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7486790637760730510:2499]: Request location 2025-03-28T09:31:23.062505Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7486790637760730519:2500] connected; active server actors: 1 2025-03-28T09:31:23.062783Z node 32 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 32, Generation 1 2025-03-28T09:31:23.062893Z node 32 :PQ_READ_PROXY DEBUG: DescribeTopicImpl [32:7486790637760730510:2499]: Got location 2025-03-28T09:31:23.063195Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7486790637760730519:2500] disconnected; active server actors: 1 2025-03-28T09:31:23.063248Z node 32 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][topic] pipe [32:7486790637760730519:2500] disconnected no session 2025-03-28T09:31:23.065572Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 123|5ca69d2e-1ac99dae-aa09c90b-878a5912_0 grpc read done: success: 0 data: 2025-03-28T09:31:23.065620Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|5ca69d2e-1ac99dae-aa09c90b-878a5912_0 grpc read failed 2025-03-28T09:31:23.066437Z node 32 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 2 sessionId: 123|5ca69d2e-1ac99dae-aa09c90b-878a5912_0 2025-03-28T09:31:23.066493Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 123|5ca69d2e-1ac99dae-aa09c90b-878a5912_0 is DEAD 2025-03-28T09:31:23.067269Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:31:23.076410Z node 32 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-28T09:31:23.076460Z node 32 :PQ_WRITE_PROXY DEBUG: new session created cookie 3 2025-03-28T09:31:23.077677Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: grpc read done: success: 1 data: init_request { path: "topic" producer_id: "123" partition_with_generation { generation: 1 } } 2025-03-28T09:31:23.077884Z node 32 :PQ_WRITE_PROXY INFO: session request cookie: 3 path: "topic" producer_id: "123" partition_with_generation { generation: 1 } from ipv6:[::1]:34022 2025-03-28T09:31:23.077919Z node 32 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="topic server" ip=ipv6:[::1]:34022 proto=topic topic=topic durationSec=0 2025-03-28T09:31:23.077935Z node 32 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:31:23.077986Z node 32 :PQ_WRITE_PROXY INFO: session to partition: 0, generation: 1 2025-03-28T09:31:23.079671Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-03-28T09:31:23.079953Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-28T09:31:23.079988Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:31:23.080019Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:31:23.080059Z node 32 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [32:7486790637760730524:2502] (SourceId=123, PreferedPartition=0) ReplyResult: Partition=0, SeqNo=0 2025-03-28T09:31:23.080081Z node 32 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 3 sessionId: partition: 0 expectedGeneration: 1 2025-03-28T09:31:23.081222Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-03-28T09:31:23.081551Z node 32 :PERSQUEUE INFO: new Cookie 123|3a6c0975-a6cb230a-ce0bad04-1ce65bcc_0 generated for partition 0 topic 'topic' owner 123 2025-03-28T09:31:23.082231Z node 32 :PQ_WRITE_PROXY INFO: session inited cookie: 3 partition: 0 MaxSeqNo: 0 sessionId: 123|3a6c0975-a6cb230a-ce0bad04-1ce65bcc_0 2025-03-28T09:31:23.084210Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|3a6c0975-a6cb230a-ce0bad04-1ce65bcc_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-28T09:31:23.085936Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|3a6c0975-a6cb230a-ce0bad04-1ce65bcc_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-28T09:31:23.086183Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|3a6c0975-a6cb230a-ce0bad04-1ce65bcc_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-28T09:31:23.086432Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|3a6c0975-a6cb230a-ce0bad04-1ce65bcc_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-28T09:31:23.086712Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 32, Generation: 1 2025-03-28T09:31:23.091222Z node 32 :PQ_WRITE_PROXY DEBUG: SessionId: ydb://session/3?node_id=32&id=ODdhOWQwMy04OWIyOWFkYi02YWNlYjllMi0xZTNlODA3NA== TxId: 01jqe1f0d59z0ntne84mwmgf3e WriteId: {32, 281474976710673} 2025-03-28T09:31:23.100275Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976710673}, 100000}, State: StateInit] bootstrapping {0, {32, 281474976710673}, 100000} [32:7486790637760730536:2504] 2025-03-28T09:31:23.103462Z node 32 :PERSQUEUE INFO: [topic:{0, {32, 281474976710673}, 100000}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:31:23.103554Z node 32 :PERSQUEUE INFO: [PQ: 72075186224037892, Partition: {0, {32, 281474976710673}, 100000}, State: StateInit] init complete for topic 'topic' partition {0, {32, 281474976710673}, 100000} generation 1 [32:7486790637760730536:2504] 2025-03-28T09:31:23.103995Z node 32 :PERSQUEUE INFO: new Cookie 123|a19be062-91b31bd9-65fe1546-dbb5717e_0 generated for partition {0, {32, 281474976710673}, 100000} topic 'topic' owner 123 2025-03-28T09:31:23.106090Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:31:23.106142Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:31:23.106165Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:31:23.106192Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:31:23.110905Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:31:23.115831Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:31:23.115870Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:31:23.115890Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:31:23.191763Z node 32 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 3 sessionId: 123|3a6c0975-a6cb230a-ce0bad04-1ce65bcc_0 grpc read done: success: 0 data: 2025-03-28T09:31:23.191800Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|3a6c0975-a6cb230a-ce0bad04-1ce65bcc_0 grpc read failed 2025-03-28T09:31:23.191871Z node 32 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 3 sessionId: 123|3a6c0975-a6cb230a-ce0bad04-1ce65bcc_0 2025-03-28T09:31:23.191889Z node 32 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: 123|3a6c0975-a6cb230a-ce0bad04-1ce65bcc_0 is DEAD 2025-03-28T09:31:23.192426Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:31:23.192489Z node 32 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison Counters: ================================
name=api.grpc.topic.stream_write.bytes: 20796
name=api.grpc.topic.stream_write.messages: 4
name=api.grpc.topic.stream_write.uncommitted_bytes: 20796
name=api.grpc.topic.stream_write.uncommitted_messages: 4
name=topic.write.bytes: 20796
name=topic.write.discarded_bytes: 0
name=topic.write.discarded_messages: 0
name=topic.write.messages: 4
name=topic.write.uncommitted_bytes: 20796
name=topic.write.uncommitted_messages: 4
name=topic.write.uncompressed_bytes: 16
name=topic.write.lag_milliseconds:
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=180000: 0
    bin=200: 0
    bin=2000: 3
    bin=30000: 0
    bin=500: 0
    bin=5000: 1
    bin=60000: 0
    bin=999999: 0
name=topic.write.message_size_bytes:
    bin=1024: 1
    bin=10240: 2
    bin=102400: 0
    bin=1048576: 0
    bin=10485760: 0
    bin=20480: 1
    bin=204800: 0
    bin=2097152: 0
    bin=5120: 0
    bin=51200: 0
    bin=524288: 0
    bin=5242880: 0
    bin=67108864: 0
    bin=99999999: 0
name=topic.write.partition_throttled_milliseconds:
    bin=0: 4
    bin=1: 0
    bin=10: 0
    bin=100: 0
    bin=1000: 0
    bin=10000: 0
    bin=20: 0
    bin=2500: 0
    bin=5: 0
    bin=50: 0
    bin=500: 0
    bin=5000: 0
    bin=999999: 0
2025-03-28T09:31:23.241728Z node 32 :PERSQUEUE WARN: [PQ: 72075186224037892] Unknown transaction 281474976710674 >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> Viewer::JsonAutocompleteSimilarDatabaseName [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic >> S3SettingsConversion::Basic [GOOD] |91.6%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2025-03-28T09:26:38.850947Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.949789Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.975902Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.976238Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.984917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.985163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.985402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.985523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.985655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.985794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.985935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.986052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.986172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.986276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.986411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.986528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:39.022096Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:39.022404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:39.022463Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:39.022661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.022929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:39.023030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:39.023084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:39.023198Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:39.023290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:39.023352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:39.023394Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:39.023600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.023685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:39.023743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:39.023775Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:39.023941Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:39.024014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:39.024061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:39.024093Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:39.024165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:39.024215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:39.024248Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:39.024299Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:39.024339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:39.024377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:39.024770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-03-28T09:26:39.024888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=54; 2025-03-28T09:26:39.024986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=46; 2025-03-28T09:26:39.025092Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=46; 2025-03-28T09:26:39.025286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:39.025355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:39.025393Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:39.025583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:39.025626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.025656Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.025821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:39.025869Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:39.025899Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:39.026112Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:39.026169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:39.026204Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:39.026342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:39.026392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:39.026439Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... B:0:2688];;column_id:8;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:60;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:56;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-03-28T09:31:26.126042Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11572:13199];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-28T09:31:26.127924Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11572:13199];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> ColumnShardTiers::TTLUsage |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.7%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] >> CompressExecutor::TestReorderedExecutor [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestRemoveTenant >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TOlapNaming::AlterColumnTableFailed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2025-03-28T09:26:44.729877Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:44.833401Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:44.860058Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:44.860424Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:44.870161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:44.870378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:44.870670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:44.870812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:44.870934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:44.871054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:44.871254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:44.871355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:44.871463Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:44.871581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.871698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:44.871807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:44.901877Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:44.902214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:44.902312Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:44.902532Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:44.902731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:44.902825Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:44.902886Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:44.903007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:44.903086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:44.903132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:44.903175Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:44.903423Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:44.903492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:44.903532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:44.903563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:44.903727Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:44.903790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:44.903839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:44.903903Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:44.903985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:44.904031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:44.904068Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:44.904128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:44.904167Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:44.904205Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:44.904639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=44; 2025-03-28T09:26:44.904745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=52; 2025-03-28T09:26:44.904838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-03-28T09:26:44.904945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-03-28T09:26:44.905087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:44.905131Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:44.905159Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:44.905332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:44.905362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.905381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:44.905536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:44.905595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:44.905632Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:44.905832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:44.905873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:44.905903Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:44.906063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:44.906125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:44.906176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-28T09:31:28.679548Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11489:13116];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-28T09:31:28.681446Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11489:13116];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:31:16.842146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:31:16.842316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.842360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:31:16.842391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:31:16.843097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:31:16.843131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:31:16.843204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:31:16.843295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:31:16.844650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:31:16.952350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:16.952438Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:16.970844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:31:16.971143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:31:16.971351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:31:16.978258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:31:16.978518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:31:16.979390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:16.979644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:16.981806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.983463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:16.983612Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:16.983838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:31:16.983907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:16.983977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:31:16.984107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:31:16.992625Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:31:17.168323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:17.168607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.168866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:31:17.169127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:31:17.169195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.171494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.171627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:31:17.171797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.171856Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:31:17.171897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:31:17.171930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:31:17.173825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.173897Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:31:17.173934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:31:17.175419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.175477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.175528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.175593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.179235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:31:17.181050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:31:17.181238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:31:17.182227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:31:17.182345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:17.182397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.182664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:31:17.182741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:31:17.182910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:31:17.182991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:31:17.184929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:31:17.184996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:31:17.185164Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:31:17.185205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:31:17.185599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:31:17.185650Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:31:17.185741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.185782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.185819Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:31:17.185854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.185895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:31:17.185933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:31:17.185993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:31:17.186033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:31:17.186113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:31:17.186157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:31:17.186203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:31:17.194310Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.194454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:31:17.194499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d: 0 2025-03-28T09:31:29.720979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409609 TxId: 101 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:31:29.725835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.726049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.726199Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.726374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.726520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.726640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.727038Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.727275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.737039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.737238Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.737355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.737458Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.737550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.737653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.737723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.737840Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.741113Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.741296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.741404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.741538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.741637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.741740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.741873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.742424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744794Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.744948Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:31:29.745058Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:31:29.745092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:31:29.745135Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:31:29.745165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:31:29.745194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-28T09:31:29.745259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2779:4045] message: TxId: 101 2025-03-28T09:31:29.745307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:31:29.745383Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:31:29.745422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:31:29.746128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-03-28T09:31:29.749078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:31:29.749121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:2780:4046] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-28T09:31:29.751927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TestTable" AlterSchema { AddColumns { Name: "New Column" Type: "Int32" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:31:29.752124Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:31:29.752338Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: update parse error: Invalid name for column 'New Column'. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2025-03-28T09:31:29.754652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "update parse error: Invalid name for column \'New Column\'. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:31:29.754812Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: update parse error: Invalid name for column 'New Column'. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TestTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:31:29.755123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:31:29.755157Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:31:29.755491Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:31:29.755582Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:31:29.755609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:3580:4779] TestWaitNotification: OK eventTxId 102 >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:30:46.680556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:30:46.680700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:30:46.680740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:30:46.680768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:30:46.681420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:30:46.681472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:30:46.681536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:30:46.681602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:30:46.682996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:30:46.768457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:46.768539Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:46.781835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:30:46.782102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:30:46.782251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:30:46.788356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:30:46.788585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:30:46.791882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.793130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:30:46.799856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.807914Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:30:46.808078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.809200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:30:46.809291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:30:46.809378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:30:46.809481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.816548Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:30:46.961326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:30:46.961574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.961784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:30:46.962015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:30:46.962075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.964542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.964676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:30:46.964863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.964935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:30:46.964970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:30:46.965007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:30:46.966839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.966898Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:30:46.966934Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:30:46.968565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.968612Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.968657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.968702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.972504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:30:46.974276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:30:46.974971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:30:46.976186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:30:46.976354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:30:46.976406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.978600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:30:46.978669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:30:46.978862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:30:46.978939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:30:46.981370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:30:46.981436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:30:46.981606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:30:46.981671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:30:46.982891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:30:46.982942Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:30:46.983073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:30:46.983139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.983188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:30:46.983225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.983263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:30:46.983311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:30:46.983350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:30:46.983382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:30:46.983451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:30:46.983502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:30:46.983556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:30:46.985556Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:30:46.985659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:30:46.985695Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :0, name: RenamePathBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:31:30.532609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 107, done: 0, blocked: 1 2025-03-28T09:31:30.532664Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TDeleteTableBarrier operationId: 107:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 107 Name: RenamePathBarrier }, at tablet# 72075186233409549 2025-03-28T09:31:30.532830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 137 -> 129 2025-03-28T09:31:30.532941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-03-28T09:31:30.532987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-03-28T09:31:30.534396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-03-28T09:31:30.534521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72075186233409549 2025-03-28T09:31:30.535560Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409549 2025-03-28T09:31:30.535598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 1] 2025-03-28T09:31:30.535751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409549, txId: 107, path id: [OwnerId: 72075186233409549, LocalPathId: 2] 2025-03-28T09:31:30.535871Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409549 2025-03-28T09:31:30.535899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:659:2571], at schemeshard: 72075186233409549, txId: 107, path id: 1 2025-03-28T09:31:30.535928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:659:2571], at schemeshard: 72075186233409549, txId: 107, path id: 2 2025-03-28T09:31:30.536203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-03-28T09:31:30.536240Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2025-03-28T09:31:30.536301Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2025-03-28T09:31:30.536327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2025-03-28T09:31:30.536355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 107:0 129 -> 240 2025-03-28T09:31:30.537142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-03-28T09:31:30.537226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186233409549, cookie: 107 2025-03-28T09:31:30.537254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-03-28T09:31:30.537282Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 9 2025-03-28T09:31:30.537323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2025-03-28T09:31:30.537890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-03-28T09:31:30.537946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2025-03-28T09:31:30.537962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2025-03-28T09:31:30.537981Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:31:30.537998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 4 2025-03-28T09:31:30.538040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2025-03-28T09:31:30.539851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2025-03-28T09:31:30.539886Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2025-03-28T09:31:30.540143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 3 2025-03-28T09:31:30.540238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-28T09:31:30.540262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:31:30.540288Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#107:0 progress is 1/1 2025-03-28T09:31:30.540313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:31:30.540346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2025-03-28T09:31:30.540404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:798:2679] message: TxId: 107 2025-03-28T09:31:30.540436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2025-03-28T09:31:30.540468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 107:0 2025-03-28T09:31:30.540497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 107:0 2025-03-28T09:31:30.540579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 2 2025-03-28T09:31:30.541392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-03-28T09:31:30.542246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2025-03-28T09:31:30.543267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-28T09:31:30.543304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:2188:4034] TestWaitNotification: OK eventTxId 107 2025-03-28T09:31:30.560091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 770 RawX2: 4294969955 } TabletId: 72075186233409552 State: 4 2025-03-28T09:31:30.560181Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2025-03-28T09:31:30.561814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2025-03-28T09:31:30.562202Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409552 2025-03-28T09:31:30.564334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2025-03-28T09:31:30.564593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 2] was 1 2025-03-28T09:31:30.565123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2025-03-28T09:31:30.565177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 2], at schemeshard: 72075186233409549 2025-03-28T09:31:30.565236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 4 2025-03-28T09:31:30.567546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72075186233409549:4 2025-03-28T09:31:30.567602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2025-03-28T09:31:30.567949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2025-03-28T09:31:30.701125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2025-03-28T09:31:30.701245Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2025-03-28T09:31:30.701318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2025-03-28T09:31:30.701399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2025-03-28T09:31:30.701459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2025-03-28T09:31:30.701492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2025-03-28T09:31:30.701538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-03-28T09:31:30.701566Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:31:30.701595Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:31:30.767849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:31:30.768153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":1600452180,"quantity":59,"finish":1600452239,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":0},"id":"72057594046678944-3-1600452180-1600452239-0","cloud_id":"CLOUD_ID_VAL","source_wt":1600452240,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 3], now: 2020-09-18T18:04:00.027000Z, LastBillTime: 2020-09-18T18:02:00.000000Z, lastBilled: 2020-09-18T18:02:00.000000Z--2020-09-18T18:02:59.000000Z, toBill: 2020-09-18T18:03:00.000000Z--2020-09-18T18:03:59.000000Z, next retry at: 2020-09-18T18:05:00.000000Z 2025-03-28T09:31:30.770056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete grabMeteringMessage has happened 2025-03-28T09:31:30.770232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson |91.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> Viewer::Cluster10000Tablets [GOOD] >> Viewer::FuzzySearcherLimit1OutOf4 [GOOD] >> Viewer::FuzzySearcherLimit2OutOf4 [GOOD] >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation >> Viewer::JsonAutocompleteEndOfDatabaseName [GOOD] >> Viewer::JsonAutocompleteScheme >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2025-03-28T09:26:44.918740Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:45.004494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:45.027457Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:45.027744Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:45.035140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:45.035341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:45.035559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:45.035671Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:45.035810Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:45.035942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:45.036057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:45.036161Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:45.036257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:45.036370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.036502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:45.036598Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:45.066622Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:45.066987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:45.067040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:45.067227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:45.067423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:45.067510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:45.067572Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:45.067697Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:45.067764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:45.067822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:45.067860Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:45.068032Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:45.068118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:45.068156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:45.068189Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:45.068324Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:45.068379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:45.068425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:45.068458Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:45.068528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:45.068564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:45.068593Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:45.068640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:45.068675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:45.068706Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:45.069080Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=70; 2025-03-28T09:26:45.069183Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-03-28T09:26:45.069274Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=48; 2025-03-28T09:26:45.069372Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=43; 2025-03-28T09:26:45.069552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:45.069613Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:45.069644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:45.069846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:45.069890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.069915Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:45.070071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:45.070136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:45.070164Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:45.070344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:45.070390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:45.070418Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:45.070554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:45.070599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:45.070643Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-28T09:31:30.712995Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11490:13117];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-28T09:31:30.715527Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11490:13117];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink >> KqpQueryPerf::UpdateOn-QueryService+UseSink >> KqpQueryPerf::IndexReplace-QueryService-UseSink >> KqpQueryPerf::RangeLimitRead+QueryService >> KqpQueryPerf::IdxLookupJoin+QueryService >> KqpQueryPerf::Upsert+QueryService+UseSink >> KqpWorkload::STOCK >> KqpQueryPerf::Upsert+QueryService-UseSink >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink >> KqpQueryPerf::IndexInsert-QueryService-UseSink >> KqpQueryPerf::MultiRead+QueryService >> KqpQueryPerf::ComputeLength+QueryService >> KqpQueryPerf::Delete-QueryService-UseSink >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange >> KqpWorkload::KV >> KqpQueryPerf::Delete+QueryService-UseSink >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries >> Viewer::JsonAutocompleteSimilarDatabaseNameWithLimit [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation >> TGroupMapperTest::Block42_1disk [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> TBlobStorageProxyTest::TestEmptyRange [GOOD] |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::Block42_1disk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2025-03-28T09:26:48.640066Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:48.726806Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:48.748677Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:48.749167Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:48.756732Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:48.756957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:48.757156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:48.757246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:48.757335Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:48.757434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:48.757516Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:48.757584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:48.757656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:48.757728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:48.757828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:48.757910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:48.784145Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:48.784475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:48.784531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:48.784730Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:48.784930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:48.785022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:48.785073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:48.785206Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:48.785275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:48.785333Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:48.785373Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:48.785541Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:48.785612Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:48.785658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:48.785695Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:48.785842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:48.785898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:48.785954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:48.786005Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:48.786077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:48.786118Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:48.786150Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:48.786213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:48.786254Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:48.786290Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:48.786660Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-03-28T09:26:48.786798Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=66; 2025-03-28T09:26:48.786919Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-03-28T09:26:48.787033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=62; 2025-03-28T09:26:48.787226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:48.787283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:48.787316Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:48.787523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:48.787570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:48.787621Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:48.787804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:48.787880Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:48.787916Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:48.788120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:48.788171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:48.788203Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:48.788360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:48.788414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:48.788468Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:8464];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:70;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:71;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:72;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:73;blob_range:[NO_BLOB:0:8448];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:9040];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:50;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:51;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:52;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:53;blob_range:[NO_BLOB:0:9024];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:9456];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:33;blob_range:[NO_BLOB:0:9448];;;;switched=(portion_id:61;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2586528;index_size:28;meta:((produced=SPLIT_COMPACTED;)););(portion_id:57;path_id:1;records_count:25002;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2167032;index_size:28;meta:((produced=INSERTED;)););; 2025-03-28T09:31:34.516889Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11489:13116];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-28T09:31:34.519583Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11489:13116];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain |91.8%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::Upsert-QueryService-UseSink >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink >> Viewer::ExecuteQueryDoesntExecuteSchemeOperationsInsideTransation [GOOD] >> Viewer::FloatPointJsonQuery >> KqpQueryPerf::RangeRead-QueryService >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::MultiRead+QueryService [GOOD] >> KqpQueryPerf::ComputeLength+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] >> KqpQueryPerf::Delete-QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> Initializer::Simple [GOOD] >> KqpQueryPerf::Delete+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8974, MsgBus: 16560 2025-03-28T09:31:33.221531Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790682001850586:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.222341Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001265/r3tmp/tmp47YaeD/pdisk_1.dat 2025-03-28T09:31:33.665861Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.669892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.670009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.673725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8974, node 1 2025-03-28T09:31:33.857268Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.857291Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.857305Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.857485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16560 TClient is connected to server localhost:16560 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.573701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.603642Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:34.614640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.764532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:34.950414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:35.034538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.356292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790694886754246:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.356399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.710982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.740314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.767839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.794220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.826304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.860638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.939511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790694886754760:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.939613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.939665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790694886754765:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.943122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:36.952276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790694886754767:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.016631Z node 1 :TX_PROXY ERROR: Actor# [1:7486790699181722116:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.221895Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790682001850586:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.221976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15004, MsgBus: 4380 2025-03-28T09:31:33.219733Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790680875390537:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.222334Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00125b/r3tmp/tmp3VXV8A/pdisk_1.dat 2025-03-28T09:31:33.814599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.814721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.823054Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.835394Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15004, node 1 2025-03-28T09:31:33.998778Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.998805Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.998816Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.998934Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4380 TClient is connected to server localhost:4380 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.547210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.586046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.757921Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.934507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:35.045493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.617828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790693760294198:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.617951Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.896555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.927501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.994267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.020948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.049239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.084683Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.165116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790698055262013:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.165226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.165284Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790698055262018:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.169732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:37.179630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790698055262020:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.242975Z node 1 :TX_PROXY ERROR: Actor# [1:7486790698055262073:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.219812Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790680875390537:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.219932Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14889, MsgBus: 9813 2025-03-28T09:31:33.219178Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790681993355750:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.219318Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00126a/r3tmp/tmp5Cw2kg/pdisk_1.dat 2025-03-28T09:31:33.659138Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.677417Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.677496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.680854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14889, node 1 2025-03-28T09:31:33.860409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.860438Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.860447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.860559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9813 TClient is connected to server localhost:9813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.541686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.586613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.760580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:34.934857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:35.021991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.506830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790694878259436:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.506954Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.807500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.874979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.905760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.969021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.995829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.024513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.108981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790699173227249:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.109082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.109409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790699173227254:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.113383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:37.124623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790699173227256:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.213693Z node 1 :TX_PROXY ERROR: Actor# [1:7486790699173227312:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.219235Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790681993355750:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.219310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 31655, MsgBus: 25452 2025-03-28T09:31:33.220288Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790681324493371:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.220390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001279/r3tmp/tmpduAflx/pdisk_1.dat 2025-03-28T09:31:33.632893Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.671966Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.672074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.673470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31655, node 1 2025-03-28T09:31:33.858803Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.858823Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.858832Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.858929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25452 TClient is connected to server localhost:25452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.629705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:34.683426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:31:34.853416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:35.016724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:35.107044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.235847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790694209397032:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.235959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.711391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.739584Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.766633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.792901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.816110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.846528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.928752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790694209397546:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.928836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.929087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790694209397551:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.932731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:36.944843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790694209397553:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.029638Z node 1 :TX_PROXY ERROR: Actor# [1:7486790698504364905:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.220458Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790681324493371:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.220537Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 18052, MsgBus: 27368 2025-03-28T09:31:33.219120Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790683960881968:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.219183Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001263/r3tmp/tmpJinQyE/pdisk_1.dat 2025-03-28T09:31:33.709649Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.709774Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.714922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:33.722415Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18052, node 1 2025-03-28T09:31:33.875216Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.875237Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.875248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.875380Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27368 TClient is connected to server localhost:27368 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.597240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.623126Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:34.638648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.804003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.968459Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:35.033382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.516505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696845785628:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.516707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.804033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.832343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.859998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.884018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.916305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.948523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.986656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696845786138:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.986752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.986785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696845786143:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.989914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:36.998106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790696845786145:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.070516Z node 1 :TX_PROXY ERROR: Actor# [1:7486790701140753495:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.219557Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790683960881968:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.219707Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> Viewer::SelectStringWithBase64Encoding [GOOD] >> Viewer::QueryExecuteScript ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: 2025-03-28T09:30:28.759711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:30:28.760302Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:30:28.760377Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ada/r3tmp/tmplgnzOR/pdisk_1.dat TServer::EnableGrpc on GrpcPort 15074, node 1 TClient is connected to server localhost:5307 2025-03-28T09:30:30.193099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:30:30.260816Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:30.272685Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:30:30.272749Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:30:30.272795Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:30:30.273027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:30:30.311979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:30.312151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:30.324792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:40.507476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:692:2579], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:40.507649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:683:2574], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:40.509887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:30:40.525224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:30:40.625937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:697:2582], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:30:40.735215Z node 1 :TX_PROXY ERROR: Actor# [1:769:2623] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:30:41.214437Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:779:2632], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:30:41.224166Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWQyZWQ4M2UtYWQxMWQyZi0zZDYxNzA0YS00YzFlOWFmNA==, ActorId: [1:679:2571], ActorState: ExecuteState, TraceId: 01jqe1dpzm96tx0fqbdahg4rkx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2025-03-28T09:30:41.335742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-03-28T09:30:43.469839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:30:43.906454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:30:44.811339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480 Initialization finished 2025-03-28T09:30:55.817237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqe1e5vp5yxgt1m7fd2ggcec, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWJiNDMyMzYtYzFmMTBjOGUtYmZlZjY3NGEtYzZhN2RhZmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-03-28T09:31:06.874538Z node 1 :TX_PROXY ERROR: Actor# [1:1352:3066] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2025-03-28T09:31:06.874781Z node 1 :TX_PROXY ERROR: Actor# [1:1352:3066] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2025-03-28T09:31:17.399788Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqe1ety3850betzktng12vfs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzZhNDhiODMtOTYxOTYyZC1jMjY0YmYzOC00NGMzZjhmMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:977 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2025-03-28T09:31:38.657580Z node 1 :TX_PROXY ERROR: Actor# [1:1549:3213] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2025-03-28T09:31:38.657768Z node 1 :TX_PROXY ERROR: Actor# [1:1549:3213] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2025-03-28T09:26:49.553932Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:49.651325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:49.671379Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:49.671750Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:49.680690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:49.680922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:49.681209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:49.681322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:49.681428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:49.681564Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:49.681693Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:49.681801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:49.681934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:49.682034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.682159Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:49.682261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:49.719514Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:49.719867Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:49.719927Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:49.720149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:49.720323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:49.720414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:49.720461Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:49.720578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:49.720679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:49.720731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:49.720783Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:49.720983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:49.721058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:49.721104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:49.721168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:49.721342Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:49.721418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:49.721470Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:49.721503Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:49.721579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:49.721620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:49.721670Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:49.721730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:49.721773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:49.721814Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:49.722232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-28T09:26:49.722350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-03-28T09:26:49.722454Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-03-28T09:26:49.722568Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2025-03-28T09:26:49.722805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:49.722870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:49.722908Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:49.723166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:49.723217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.723252Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:49.723419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:49.723474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:49.723510Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:49.723756Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:49.723821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:49.723863Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:49.724014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:49.724067Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:49.724118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... B:0:2688];;column_id:8;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:8;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:8;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:8;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:8;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:8;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:8;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:7;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:8;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:9;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:10;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:11;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:12;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:13;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:14;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:15;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:20;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:21;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:22;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:28;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:30;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:31;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:32;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:33;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:34;blob_range:[NO_BLOB:0:8352];;column_id:9;chunk_idx:35;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:36;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:37;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:38;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:39;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:40;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:41;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:42;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:43;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:44;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:45;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:46;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:47;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:48;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:49;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:50;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:51;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:52;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:53;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:54;blob_range:[NO_BLOB:0:2696];;column_id:9;chunk_idx:55;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:56;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:57;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:58;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:59;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:60;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:61;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:62;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:63;blob_range:[NO_BLOB:0:2688];;column_id:9;chunk_idx:64;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:65;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:66;blob_range:[NO_BLOB:0:2680];;column_id:9;chunk_idx:67;blob_range:[NO_BLOB:0:2672];;column_id:9;chunk_idx:68;blob_range:[NO_BLOB:0:2664];;column_id:9;chunk_idx:69;blob_range:[NO_BLOB:0:8336];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:1;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:2;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:3;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:4;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:5;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:6;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:7;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:8;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:9;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:10;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:11;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:12;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:13;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:14;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:15;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:16;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:17;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:18;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:19;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:20;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:21;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:22;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:23;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:24;blob_range:[NO_BLOB:0:10208];;column_id:7;chunk_idx:25;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:26;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:27;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:28;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:29;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:30;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:31;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:32;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:33;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:34;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:35;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:36;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:37;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:38;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:39;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:40;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:41;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:42;blob_range:[NO_BLOB:0:2760];;column_id:7;chunk_idx:43;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:44;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:45;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:46;blob_range:[NO_BLOB:0:2752];;column_id:7;chunk_idx:47;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:48;blob_range:[NO_BLOB:0:2744];;column_id:7;chunk_idx:49;blob_range:[NO_BLOB:0:10208];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:1;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:2;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:3;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:4;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:5;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:6;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:7;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:8;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:9;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:10;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:11;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:12;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:13;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:14;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:15;blob_range:[NO_BLOB:0:9400];;column_id:5;chunk_idx:16;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:17;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:18;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:19;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:20;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:21;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:22;blob_range:[NO_BLOB:0:2696];;column_id:5;chunk_idx:23;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:24;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:25;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:26;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:27;blob_range:[NO_BLOB:0:2688];;column_id:5;chunk_idx:28;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:29;blob_range:[NO_BLOB:0:2680];;column_id:5;chunk_idx:30;blob_range:[NO_BLOB:0:2672];;column_id:5;chunk_idx:31;blob_range:[NO_BLOB:0:9392];;;;switched=(portion_id:60;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2507632;index_size:20;meta:((produced=SPLIT_COMPACTED;)););(portion_id:56;path_id:1;records_count:23698;min_schema_snapshot:(plan_step=10;tx_id=10;);schema_version:1;level:0;column_size:2109896;index_size:20;meta:((produced=INSERTED;)););; 2025-03-28T09:31:38.636583Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:11573:13200];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-28T09:31:38.638944Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:11573:13200];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> KqpQueryPerf::IndexReplace-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink >> KqpQueryPerf::IndexInsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert-QueryService+UseSink >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> Viewer::JsonAutocompleteScheme [GOOD] >> Viewer::JsonAutocompleteEmptyColumns |91.8%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> KqpQueryPerf::Replace+QueryService+UseSink >> KqpQueryPerf::IndexReplace+QueryService-UseSink >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> KqpQueryPerf::Replace+QueryService-UseSink >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink >> KqpQueryPerf::DeleteOn+QueryService-UseSink >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> KqpQueryPerf::UpdateOn+QueryService+UseSink >> KqpQueryPerf::DeleteOn-QueryService-UseSink >> Viewer::JsonAutocompleteSimilarDatabaseNamePOST [GOOD] >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2025-03-28T09:31:12.406342Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.406362Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.406379Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:12.406832Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:12.419234Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:12.419410Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.419700Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:12.420173Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.420316Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:12.420421Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:12.420449Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-28T09:31:12.421038Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.421054Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.421083Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:12.421335Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:12.421882Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:12.421982Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.422120Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:12.422458Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.422539Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:12.422608Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:12.422636Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-28T09:31:12.423305Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.423325Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.423345Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:12.423546Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:12.424034Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:12.424141Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.424332Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:12.424924Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.425053Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:12.425121Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:12.425151Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-28T09:31:12.425816Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.425833Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.425846Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:12.426159Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:12.426751Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:12.426882Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.427025Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:12.428348Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.428780Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:12.428855Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:12.428884Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-28T09:31:12.429539Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.429555Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.429574Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:12.429831Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:12.430290Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:12.430381Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.430650Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:12.431038Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.431168Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:12.431285Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:12.431327Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-28T09:31:12.431883Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.431985Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.432003Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:12.432311Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:12.432723Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:12.432827Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.432979Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:12.433275Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.433393Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:12.433470Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:12.433500Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-28T09:31:12.434243Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.434261Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.434309Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:12.434613Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:12.435210Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:12.435344Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.435517Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:12.436170Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.436327Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:12.436401Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:12.436432Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-28T09:31:12.437087Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.437135Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.437147Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:12.437595Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:12.438085Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:12.438184Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.438381Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:12.440247Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.440597Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:12.440672Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:12.440701Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-28T09:31:12.444206Z :ReadSession INFO: Random seed for debugging is 1743154272444188 2025-03-28T09:31:12.697889Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790592857217320:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:12.697975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:12.750007Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790593353083856:2274];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:12.750103Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error ... Data: "... 79 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1743154290482 CreateTimestampMS: 1743154290481 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 20 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 2 } 2025-03-28T09:31:30.487936Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 4 from offset3 2025-03-28T09:31:30.487975Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid af065dd7-2b3d83a3-b88285c6-e6fd143d has messages 1 2025-03-28T09:31:30.488049Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 read done: guid# af065dd7-2b3d83a3-b88285c6-e6fd143d, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 200 2025-03-28T09:31:30.488078Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 response to read: guid# af065dd7-2b3d83a3-b88285c6-e6fd143d 2025-03-28T09:31:30.488264Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 Process answer. Aval parts: 0 2025-03-28T09:31:30.488633Z :DEBUG: [/Root] [/Root] [c5276cd9-278d3b31-7885d54-bdf8cad6] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:30.488789Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-03-28T09:31:30.488859Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) GOT MESSAGE: Message { Data: "message3" Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-03-28T09:31:30.481000Z WriteTime: 2025-03-28T09:31:30.482000Z Ip: "ipv6:[::1]:44560" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:44560" } } } 2025-03-28T09:31:30.489097Z :DEBUG: [/Root] [/Root] [c5276cd9-278d3b31-7885d54-bdf8cad6] [dc1] Commit offsets [2, 3). Partition stream id: 1 2025-03-28T09:31:30.489076Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 grpc read done: success# 1, data# { read { } } 2025-03-28T09:31:30.489141Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 got read request: guid# ac58a4d0-c3e8896c-c6482e5a-eeea2f69 2025-03-28T09:31:30.489342Z :DEBUG: [/Root] [/Root] [c5276cd9-278d3b31-7885d54-bdf8cad6] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:31:30.489552Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 grpc read done: success# 1, data# { commit { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } } 2025-03-28T09:31:30.489708Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 4 2025-03-28T09:31:30.489805Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:31:30.489828Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:31:30.489951Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_15255108614432574062_v1 2025-03-28T09:31:30.490062Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:31:30.490082Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:31:30.490094Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:31:30.490109Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:31:30.490121Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:31:30.490131Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:31:30.490143Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:31:30.490157Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:31:30.490184Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:31:30.492460Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:31:30.492505Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:31:30.492504Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 4 2025-03-28T09:31:30.492600Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 4 } 2025-03-28T09:31:30.492646Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 4 2025-03-28T09:31:30.492656Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 replying for commits: assignId# 1, from# 4, to# 4, offset# 3 2025-03-28T09:31:30.493100Z :DEBUG: [/Root] [/Root] [c5276cd9-278d3b31-7885d54-bdf8cad6] [dc1] Committed response: { offset_ranges { assign_id: 1 start_offset: 2 end_offset: 3 } } 2025-03-28T09:31:30.581874Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|24a1000b-e0a21da-be5bfe4e-4b93c8af_0] Write session will now close 2025-03-28T09:31:30.581940Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|24a1000b-e0a21da-be5bfe4e-4b93c8af_0] Write session: aborting 2025-03-28T09:31:30.582680Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|24a1000b-e0a21da-be5bfe4e-4b93c8af_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:31:30.582717Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|24a1000b-e0a21da-be5bfe4e-4b93c8af_0] Write session is aborting and will not restart 2025-03-28T09:31:30.582774Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|24a1000b-e0a21da-be5bfe4e-4b93c8af_0] Write session: destroy 2025-03-28T09:31:30.582975Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|24a1000b-e0a21da-be5bfe4e-4b93c8af_0 grpc read done: success: 0 data: 2025-03-28T09:31:30.583004Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|24a1000b-e0a21da-be5bfe4e-4b93c8af_0 grpc read failed 2025-03-28T09:31:30.583027Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|24a1000b-e0a21da-be5bfe4e-4b93c8af_0 grpc closed 2025-03-28T09:31:30.583061Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|24a1000b-e0a21da-be5bfe4e-4b93c8af_0 is DEAD 2025-03-28T09:31:30.583773Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:31:30.583905Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7486790670166631977:2663] destroyed 2025-03-28T09:31:30.583982Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:31:33.198304Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-03-28T09:31:40.488537Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-03-28T09:31:40.586760Z :INFO: [/Root] [/Root] [c5276cd9-278d3b31-7885d54-bdf8cad6] Closing read session. Close timeout: 0.000000s 2025-03-28T09:31:40.586838Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-03-28T09:31:40.586875Z :INFO: [/Root] [/Root] [c5276cd9-278d3b31-7885d54-bdf8cad6] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16436 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:31:40.586976Z :NOTICE: [/Root] [/Root] [c5276cd9-278d3b31-7885d54-bdf8cad6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:31:40.587030Z :DEBUG: [/Root] [/Root] [c5276cd9-278d3b31-7885d54-bdf8cad6] [dc1] Abort session to cluster 2025-03-28T09:31:40.587533Z :NOTICE: [/Root] [/Root] [c5276cd9-278d3b31-7885d54-bdf8cad6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:31:40.590875Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 grpc read done: success# 0, data# { } 2025-03-28T09:31:40.590911Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 grpc read failed 2025-03-28T09:31:40.590953Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 grpc closed 2025-03-28T09:31:40.591117Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_15255108614432574062_v1 is DEAD 2025-03-28T09:31:40.614207Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_15255108614432574062_v1 2025-03-28T09:31:40.614288Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7486790644396827504:2549] destroyed 2025-03-28T09:31:40.614363Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_15255108614432574062_v1 2025-03-28T09:31:40.609991Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7486790644396827502:2546] disconnected; active server actors: 1 2025-03-28T09:31:40.610045Z node 2 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7486790644396827502:2546] client user disconnected session shared/user_1_1_15255108614432574062_v1 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-03-28T09:31:11.554087Z :SpecifyClustersExplicitly INFO: Random seed for debugging is 1743154271554059 2025-03-28T09:31:11.886235Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790587772061151:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:11.886295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:11.937148Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790588071374266:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:12.059785Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001b8a/r3tmp/tmpT2Nrk8/pdisk_1.dat 2025-03-28T09:31:12.065056Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:31:12.087350Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:12.321755Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:12.324750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:12.324846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:12.326279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:12.326341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:12.331763Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:31:12.331901Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:12.332485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11080, node 1 2025-03-28T09:31:12.454022Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001b8a/r3tmp/yandexDja5mY.tmp 2025-03-28T09:31:12.454058Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001b8a/r3tmp/yandexDja5mY.tmp 2025-03-28T09:31:12.455710Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001b8a/r3tmp/yandexDja5mY.tmp 2025-03-28T09:31:12.455867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:12.645314Z INFO: TTestServer started on Port 62987 GrpcPort 11080 TClient is connected to server localhost:62987 PQClient connected to localhost:11080 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:12.918865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:31:14.752821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790600956276354:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:14.753536Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790600956276329:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:14.753633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:14.760786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:31:14.779700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790600956276358:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:31:14.855344Z node 2 :TX_PROXY ERROR: Actor# [2:7486790600956276386:2129] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:15.257032Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790600656964123:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:31:15.258624Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486790600956276401:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:31:15.258795Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmIwOWExNDAtZmFhMzgzZDctNDM3ZTczYTItMjRmZjFlMWI=, ActorId: [1:7486790600656964080:2335], ActorState: ExecuteState, TraceId: 01jqe1erj67q5x8jxy3hyxd23j, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:31:15.258935Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGJjOGMxYTctOWU4NDZmNC02M2ZmYWI5OS0yZmVlOWIxYg==, ActorId: [2:7486790600956276327:2307], ActorState: ExecuteState, TraceId: 01jqe1erdx9gqrvfbzb9z39jhj, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:31:15.260385Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:31:15.260405Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:31:15.350535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:31:15.479508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:15.606060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:11080", true, true, 1000); 2025-03-28T09:31:15.996056Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqe1esb4avtsafg5ns62bmy9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWQwZDdkM2YtYmM4Njk3MDgtZWZmYWEwYzAtOWExZDZkMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486790609246899109:2955] 2025-03-28T09:31:16.886432Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790587772061151:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:16.886523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:16.931904Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790588071374266:2190];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:16.931962Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:31:21.615994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:11080 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:31:21.678235Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:11080 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSe ... al parts: 0 2025-03-28T09:31:38.442265Z :DEBUG: [/Root] [/Root] [cf9acdce-d88c8c14-cf571360-f0b0e439] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:38.442913Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-03-28T09:31:38.443097Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2025-03-28T09:31:38.443173Z :DEBUG: [/Root] [/Root] [cf9acdce-d88c8c14-cf571360-f0b0e439] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-03-28T09:31:37.298000Z WriteTime: 2025-03-28T09:31:37.300000Z Ip: "ipv6:[::1]:46244" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:46244" } } } } 2025-03-28T09:31:38.443349Z :INFO: [/Root] [/Root] [cf9acdce-d88c8c14-cf571360-f0b0e439] Closing read session. Close timeout: 3.000000s 2025-03-28T09:31:38.443396Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-03-28T09:31:38.443437Z :INFO: [/Root] [/Root] [cf9acdce-d88c8c14-cf571360-f0b0e439] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1372 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:31:38.443905Z :INFO: [/Root] [/Root] [cf9acdce-d88c8c14-cf571360-f0b0e439] Closing read session. Close timeout: 0.000000s 2025-03-28T09:31:38.443943Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-03-28T09:31:38.443975Z :INFO: [/Root] [/Root] [cf9acdce-d88c8c14-cf571360-f0b0e439] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1373 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:31:38.444080Z :NOTICE: [/Root] [/Root] [cf9acdce-d88c8c14-cf571360-f0b0e439] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:31:38.449852Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_14253911929427375325_v1 2025-03-28T09:31:38.446820Z node 3 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_3_1_14253911929427375325_v1 grpc read done: success# 1, data# { read { } } 2025-03-28T09:31:38.449900Z node 4 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [3:7486790699662948486:2541] destroyed 2025-03-28T09:31:38.449947Z node 4 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_3_1_14253911929427375325_v1 2025-03-28T09:31:38.446871Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_14253911929427375325_v1 grpc closed 2025-03-28T09:31:38.446906Z node 3 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_3_1_14253911929427375325_v1 is DEAD 2025-03-28T09:31:38.451372Z node 3 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486790699662948483:2538] disconnected; active server actors: 1 2025-03-28T09:31:38.451391Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [3:7486790699662948483:2538] client user disconnected session shared/user_3_1_14253911929427375325_v1 2025-03-28T09:31:40.083246Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.083317Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.083368Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:40.083769Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:40.084788Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:40.084996Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.085562Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-03-28T09:31:40.087297Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.087341Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.087379Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:40.087679Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:40.088168Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:40.088347Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.088563Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:40.089657Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:40.095604Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-03-28T09:31:40.095740Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-03-28T09:31:40.095932Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:40.095987Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:31:40.096016Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:31:40.096084Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-03-28T09:31:40.098407Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.098452Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.098483Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:40.098807Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:40.099254Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:40.099396Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.099613Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:40.100206Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.100472Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:40.100574Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:40.100629Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:31:40.100703Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2025-03-28T09:31:40.102244Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.102292Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.102323Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:40.102623Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:40.103182Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:40.103351Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:40.103660Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:31:40.104330Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:40.104773Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:31:40.104971Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-03-28T09:31:40.105056Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:40.105117Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:40.105162Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-03-28T09:31:40.105328Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:31:40.105376Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:31:42.109946Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:42.109993Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:42.110043Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:42.110759Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:31:42.111130Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:31:42.111328Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:42.112304Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:42.112570Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:31:42.112650Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:31:42.112730Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4422, MsgBus: 2871 2025-03-28T09:31:37.364031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790698326005340:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:37.364108Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00124b/r3tmp/tmpnJG6uC/pdisk_1.dat 2025-03-28T09:31:37.706215Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4422, node 1 2025-03-28T09:31:37.763360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:37.763690Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:37.768022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:37.790789Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:37.790813Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:37.790822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:37.790954Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2871 TClient is connected to server localhost:2871 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:38.318212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:38.331408Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:38.342308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:38.487818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:38.653070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:38.726346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:40.275846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790711210908995:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:40.275959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:40.573308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:40.602877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:40.634956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:40.664244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:40.696722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:40.731977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:40.779291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790711210909507:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:40.779373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:40.779413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790711210909512:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:40.782635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:40.791840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790711210909514:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:40.871770Z node 1 :TX_PROXY ERROR: Actor# [1:7486790711210909568:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::RangeRead-QueryService [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] >> Viewer::FloatPointJsonQuery [GOOD] >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes >> KqpQueryPerf::ComputeLength-QueryService [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16080, MsgBus: 31685 2025-03-28T09:31:38.434183Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790701864175357:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.434274Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001244/r3tmp/tmp7QP4TB/pdisk_1.dat 2025-03-28T09:31:38.842602Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16080, node 1 2025-03-28T09:31:38.858593Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:38.862557Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:38.875955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:38.876921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:38.878852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:38.899869Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:38.899904Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:38.899915Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:38.900027Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31685 TClient is connected to server localhost:31685 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:39.409214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.428537Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:39.448270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.592194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.762196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.831076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.388599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790714749079017:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:41.388748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:41.697991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.764156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.798922Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.825924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.857636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.889464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.946840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790714749079530:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:41.946925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:41.947156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790714749079535:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:41.951591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:41.965064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790714749079537:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:42.066584Z node 1 :TX_PROXY ERROR: Actor# [1:7486790719044046889:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:43.435737Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790701864175357:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:43.435893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> KqpQueryPerf::Update+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3959, MsgBus: 4212 2025-03-28T09:31:33.279984Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790681923197358:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.280072Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001270/r3tmp/tmpKdzsmF/pdisk_1.dat 2025-03-28T09:31:33.653626Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.675051Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.675139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.676623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3959, node 1 2025-03-28T09:31:33.859392Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.859424Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.859446Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.859592Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4212 TClient is connected to server localhost:4212 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.569832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.607339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.790465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.970059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:35.051225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.257410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790694808101031:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.257495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.712998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.748761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.780684Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.806511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.831910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.899677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.945884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790694808101546:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.945953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.946032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790694808101551:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.949446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:36.958805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790694808101553:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.059727Z node 1 :TX_PROXY ERROR: Actor# [1:7486790699103068902:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.280254Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790681923197358:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.283007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25683, MsgBus: 15807 2025-03-28T09:31:39.124650Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790708932851230:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:39.124704Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001270/r3tmp/tmpqurNCX/pdisk_1.dat 2025-03-28T09:31:39.264177Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:39.277324Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:39.277409Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:39.281608Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25683, node 2 2025-03-28T09:31:39.327269Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:39.327297Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:39.327305Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:39.327417Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15807 TClient is connected to server localhost:15807 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:31:39.783721Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:31:39.799428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:39.874092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:31:40.010079Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:40.064810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:42.251750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790721817754887:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.251840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.297367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.327763Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.356918Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.386923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.414830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.490033Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.553514Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790721817755402:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.553588Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.553623Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790721817755407:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.557664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:42.567890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790721817755409:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:42.621647Z node 2 :TX_PROXY ERROR: Actor# [2:7486790721817755461:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink >> KqpQueryPerf::Replace-QueryService-UseSink >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 16957, MsgBus: 8049 2025-03-28T09:31:33.239956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790680365186778:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.241209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00126e/r3tmp/tmpH1gJke/pdisk_1.dat 2025-03-28T09:31:33.696618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.696687Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.700701Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.706770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16957, node 1 2025-03-28T09:31:33.854436Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.854460Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.854476Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.854607Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8049 TClient is connected to server localhost:8049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.587624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.610381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.774837Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.948888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:35.037020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.290376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790693250090459:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.290487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.711048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.782127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.814156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.845426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.873641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.904616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.943766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790693250090970:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.943831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.943985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790693250090975:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.947413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:36.957362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790693250090977:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.016730Z node 1 :TX_PROXY ERROR: Actor# [1:7486790697545058327:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.239924Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790680365186778:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.240003Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19021, MsgBus: 2590 2025-03-28T09:31:39.036886Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790707627666430:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:39.036965Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00126e/r3tmp/tmp2ALl1p/pdisk_1.dat 2025-03-28T09:31:39.140065Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19021, node 2 2025-03-28T09:31:39.187412Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:39.187503Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:39.189878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:39.207214Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:39.207241Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:39.207249Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:39.207352Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2590 TClient is connected to server localhost:2590 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:39.597425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.632519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.714512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.849562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.913683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:42.095966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790720512570077:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.096092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.147989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.183367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.213821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.254392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.286499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.324243Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.365607Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790720512570589:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.365700Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.365878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790720512570594:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.368971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:42.379156Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790720512570596:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:42.443580Z node 2 :TX_PROXY ERROR: Actor# [2:7486790720512570649:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:44.036928Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790707627666430:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:44.036990Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18714, MsgBus: 2734 2025-03-28T09:31:33.624950Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790683555656289:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.627619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001250/r3tmp/tmplTAXLx/pdisk_1.dat 2025-03-28T09:31:34.112264Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:34.115331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:34.115450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:34.121875Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18714, node 1 2025-03-28T09:31:34.211200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:34.211222Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:34.211229Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:34.211313Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2734 TClient is connected to server localhost:2734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.802613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.834110Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.850797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:31:35.021084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:35.220370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:35.294621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.892710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696440559810:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.892813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.196368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.220120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.245595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.270555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.296383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.329968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.384320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790700735527616:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.384394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.384395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790700735527621:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.388015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:37.400685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790700735527623:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.481983Z node 1 :TX_PROXY ERROR: Actor# [1:7486790700735527677:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.616935Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790683555656289:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.616995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12735, MsgBus: 15153 2025-03-28T09:31:39.809435Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790709193280273:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:39.809520Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001250/r3tmp/tmpQp6Q5j/pdisk_1.dat 2025-03-28T09:31:39.909208Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12735, node 2 2025-03-28T09:31:39.941931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:39.942010Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:39.943393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:39.956195Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:39.956218Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:39.956226Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:39.956330Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15153 TClient is connected to server localhost:15153 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:40.396723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:40.416713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:40.471123Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:40.622587Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:40.726859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:42.801643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790722078183925:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.801721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.855564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.888160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.920516Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.975580Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.009564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.047357Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.094282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790726373151735:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:43.094359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:43.094612Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790726373151740:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:43.097899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:43.111206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790726373151742:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:43.175307Z node 2 :TX_PROXY ERROR: Actor# [2:7486790726373151795:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3509, MsgBus: 30216 2025-03-28T09:31:33.220730Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790683610676433:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.220791Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00125f/r3tmp/tmpLB1waL/pdisk_1.dat 2025-03-28T09:31:33.752138Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.760350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.760472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.764821Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3509, node 1 2025-03-28T09:31:33.867833Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.867862Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.867872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.867994Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30216 TClient is connected to server localhost:30216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.599226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.637009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.808658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.964150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:35.064171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.369624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696495580119:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.369786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.710853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.741637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.764866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.835028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.865281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.897785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.947196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696495580632:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.947319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.947649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696495580637:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.950820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:36.959357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790696495580639:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.058358Z node 1 :TX_PROXY ERROR: Actor# [1:7486790700790547988:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.220713Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790683610676433:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.220781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 65311, MsgBus: 16668 2025-03-28T09:31:39.425888Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790706325880836:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:39.427135Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00125f/r3tmp/tmp1URg6N/pdisk_1.dat 2025-03-28T09:31:39.600711Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:39.622619Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:39.622814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:39.623987Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65311, node 2 2025-03-28T09:31:39.671275Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:39.671296Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:39.671303Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:39.671419Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16668 TClient is connected to server localhost:16668 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:40.068625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:40.075775Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:31:40.086028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:40.134249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:40.301593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:40.365547Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:42.726050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790719210784497:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.726133Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.767180Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.801294Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.831954Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.898955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.926610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.002415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.085105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790723505752314:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:43.085176Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:43.085334Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790723505752319:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:43.088962Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:43.099518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790723505752321:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:43.169058Z node 2 :TX_PROXY ERROR: Actor# [2:7486790723505752377:3452] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:44.426074Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790706325880836:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:44.426117Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Replace-QueryService+UseSink >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] >> KqpQueryPerf::RangeLimitRead-QueryService >> KqpQueryPerf::DeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink >> KqpQueryPerf::AggregateToScalar+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8810, MsgBus: 17366 2025-03-28T09:31:34.585494Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790688223386048:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:34.587215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00124c/r3tmp/tmpx4FEhl/pdisk_1.dat 2025-03-28T09:31:35.077010Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:35.100510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:35.100606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:35.108835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8810, node 1 2025-03-28T09:31:35.306915Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:35.306938Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:35.306950Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:35.307084Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17366 TClient is connected to server localhost:17366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:35.769933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:35.796763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:35.930509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.060621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.133358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:37.894984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790701108289701:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.895100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:38.215565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.245363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.274618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.309878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.340719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.411205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.468282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790705403257515:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:38.468339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:38.468463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790705403257520:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:38.472346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:38.483046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790705403257522:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:38.545617Z node 1 :TX_PROXY ERROR: Actor# [1:7486790705403257575:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:39.586349Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790688223386048:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:39.599398Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26212, MsgBus: 28723 2025-03-28T09:31:40.926991Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790714612970722:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:40.927212Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00124c/r3tmp/tmpADQMOG/pdisk_1.dat 2025-03-28T09:31:41.054952Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26212, node 2 2025-03-28T09:31:41.077685Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:41.077800Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:41.080682Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:41.131025Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:41.131051Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:41.131062Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:41.131184Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28723 TClient is connected to server localhost:28723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:41.551779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.556891Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:31:41.571073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.631050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.806412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.869101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.829829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790727497874359:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:43.829906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:43.869834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.906794Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.951406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.977327Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.002434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.034578Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.073569Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790731792842165:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.073635Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.073914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790731792842170:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.076862Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:44.084693Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790731792842172:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:44.167317Z node 2 :TX_PROXY ERROR: Actor# [2:7486790731792842225:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::UpdateOn+QueryService-UseSink >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29232, MsgBus: 9223 2025-03-28T09:31:41.870112Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790718729103983:2224];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:41.870375Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001242/r3tmp/tmpF6ERwA/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29232, node 1 2025-03-28T09:31:42.286517Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:42.286994Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:42.288165Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:42.312593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:42.312712Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:42.314735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:42.331969Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:42.332051Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:42.332059Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:42.332209Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9223 TClient is connected to server localhost:9223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:42.866055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:42.888899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.028157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.193372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.279124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:44.920920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790731614007494:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.921017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.174994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.207631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.236910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.279705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.309409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.344379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.438449Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790735908975303:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.438534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.438900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790735908975308:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.443508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:45.456577Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790735908975310:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:45.518389Z node 1 :TX_PROXY ERROR: Actor# [1:7486790735908975365:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> KqpQueryPerf::Insert+QueryService-UseSink >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5041, MsgBus: 15420 2025-03-28T09:31:42.081364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790722652853410:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:42.081525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001238/r3tmp/tmp81oVKF/pdisk_1.dat 2025-03-28T09:31:42.478773Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:42.522362Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:42.522449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:42.523841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5041, node 1 2025-03-28T09:31:42.578930Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:42.578950Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:42.578957Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:42.579074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15420 TClient is connected to server localhost:15420 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:43.090040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:43.123716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.252905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.438267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.501311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:45.217723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790735537757073:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.217884Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.537483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.608365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.632955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.663271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.696826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.769578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.816818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790735537757587:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.816922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.817482Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790735537757592:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.822863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:45.832590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790735537757594:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:45.919740Z node 1 :TX_PROXY ERROR: Actor# [1:7486790735537757647:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:47.079502Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790722652853410:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:47.079626Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20662, MsgBus: 32288 2025-03-28T09:31:42.635602Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790722724361311:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:42.636414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00122d/r3tmp/tmp2ZRmns/pdisk_1.dat 2025-03-28T09:31:43.011829Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20662, node 1 2025-03-28T09:31:43.094884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:43.100868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:43.114613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:43.131158Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:43.131197Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:43.131205Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:43.131311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32288 TClient is connected to server localhost:32288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:43.646399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.684948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.791356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.945552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:44.009340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:45.635145Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790735609264971:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.635224Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.941813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.971279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.994052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.019192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.050318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.119987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.159150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790739904232783:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:46.159219Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:46.159421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790739904232788:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:46.163532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:46.176100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790739904232790:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:46.257148Z node 1 :TX_PROXY ERROR: Actor# [1:7486790739904232843:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13982, MsgBus: 7435 2025-03-28T09:31:33.219109Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790680611778140:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.219160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00126c/r3tmp/tmpqKHlyD/pdisk_1.dat 2025-03-28T09:31:33.661785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.667023Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.667575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.675504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13982, node 1 2025-03-28T09:31:33.855274Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.855307Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.855323Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.855441Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7435 TClient is connected to server localhost:7435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.560367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.575443Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:34.586297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.798313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:34.972947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:35.094598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.463885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790693496681804:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.464057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.772819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.804759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.832726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.869772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.899249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.968830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.009599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790697791649615:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.009682Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.009723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790697791649620:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.013189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:37.021595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790697791649622:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.110723Z node 1 :TX_PROXY ERROR: Actor# [1:7486790697791649675:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.094975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.131070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.168283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.219480Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790680611778140:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.219534Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7986, MsgBus: 12358 2025-03-28T09:31:40.875207Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790711526350635:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:40.879347Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00126c/r3tmp/tmpbCQ126/pdisk_1.dat 2025-03-28T09:31:40.989614Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:41.004013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:41.004107Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:41.005843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7986, node 2 2025-03-28T09:31:41.059030Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:41.059052Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:41.059060Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:41.059155Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12358 TClient is connected to server localhost:12358 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:41.462288Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.469351Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:31:41.477202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.549387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.690469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.763373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:44.146230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790728706221563:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.146332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.192644Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.223693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.250999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.281950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.323418Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.390802Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.428979Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790728706222075:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.429089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790728706222080:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.429097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.432939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:44.441793Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790728706222082:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:44.510328Z node 2 :TX_PROXY ERROR: Actor# [2:7486790728706222135:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:45.353503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.397422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.439785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.878578Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790711526350635:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:45.920540Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexReplace+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9942, MsgBus: 23323 2025-03-28T09:31:33.241003Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790683215143366:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.241060Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00125d/r3tmp/tmp1t9MXb/pdisk_1.dat 2025-03-28T09:31:33.740597Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.740709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.742959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:33.765069Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9942, node 1 2025-03-28T09:31:33.888303Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.888323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.888330Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.888444Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23323 TClient is connected to server localhost:23323 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.614302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.642480Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:34.658006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.806228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.982733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:35.059468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.424199Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696100047006:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.424352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.710835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.735180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.757392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.785702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.813639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.843611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.900441Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696100047514:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.900526Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.900797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696100047519:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.905200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:36.918460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790696100047521:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.014747Z node 1 :TX_PROXY ERROR: Actor# [1:7486790700395014873:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.042460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.075968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.111136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.241163Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790683215143366:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.241242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24363, MsgBus: 27197 2025-03-28T09:31:41.024567Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790715913348083:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:41.024676Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00125d/r3tmp/tmpISxExt/pdisk_1.dat 2025-03-28T09:31:41.191786Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24363, node 2 2025-03-28T09:31:41.216167Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:41.216252Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:41.217453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:41.242667Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:41.242704Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:41.242713Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:41.242838Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27197 TClient is connected to server localhost:27197 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:41.625292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.641751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.721563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.882073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.957228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:44.272688Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790728798251760:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.272821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.321189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.356102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.382521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.412975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.444029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.478095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.519208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790728798252271:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.519340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790728798252276:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.519345Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.524030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:44.534293Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790728798252278:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:44.630242Z node 2 :TX_PROXY ERROR: Actor# [2:7486790728798252332:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:45.742922Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.791914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.865983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.024508Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790715913348083:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:46.024629Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21897, MsgBus: 22814 2025-03-28T09:31:33.219960Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790680792360809:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.220115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001254/r3tmp/tmpkpw7V5/pdisk_1.dat 2025-03-28T09:31:33.690060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.690148Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.693029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:33.697252Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21897, node 1 2025-03-28T09:31:33.856085Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.856100Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.856131Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.856228Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22814 TClient is connected to server localhost:22814 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.589925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.631549Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:34.653758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.800906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:35.046460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:35.134199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.581898Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790693677264462:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.582084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.878470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.907293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.940079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.966737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.992258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.022728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.058995Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790697972232264:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.059071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790697972232269:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.059100Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.062468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:37.071313Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790697972232271:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:37.168925Z node 1 :TX_PROXY ERROR: Actor# [1:7486790697972232326:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.041751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.123790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.166812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:38.219351Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790680792360809:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.219466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5546, MsgBus: 10892 2025-03-28T09:31:41.130358Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790717701207380:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:41.130610Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001254/r3tmp/tmpBab42D/pdisk_1.dat 2025-03-28T09:31:41.248179Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:41.271550Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:41.271643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 5546, node 2 2025-03-28T09:31:41.273138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:41.306613Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:41.306637Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:41.306645Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:41.306777Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10892 TClient is connected to server localhost:10892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:41.717733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.731304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.807998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.952842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:42.031020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.337398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790730586111030:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.337497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.379179Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.408412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.437664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.468122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.496007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.530040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.581168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790730586111539:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.581271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.581337Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790730586111544:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.584643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:44.594174Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790730586111546:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:44.661190Z node 2 :TX_PROXY ERROR: Actor# [2:7486790730586111600:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:45.509887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.584615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.631978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.130786Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790717701207380:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:46.130853Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Update-QueryService-UseSink >> KqpQueryPerf::RangeRead+QueryService >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService >> Viewer::JsonAutocompleteEmptyColumns [GOOD] >> Viewer::JsonAutocompleteColumnsPOST >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService >> KqpQueryPerf::IndexUpsert+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7836, MsgBus: 11610 2025-03-28T09:31:45.401781Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790735561814224:2271];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:45.401827Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001225/r3tmp/tmp7niT2s/pdisk_1.dat 2025-03-28T09:31:45.787352Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:45.787613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:45.787688Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:45.791750Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7836, node 1 2025-03-28T09:31:45.879247Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:45.879266Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:45.879287Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:45.879406Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11610 TClient is connected to server localhost:11610 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:46.401212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.430659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.579368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.740244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.822844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.437009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790748446717658:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:48.437723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:48.756092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.789338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.826167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.855781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.886856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.957780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.006394Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790752741685468:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.006475Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.006628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790752741685473:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.009718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:49.022544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790752741685475:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:49.079275Z node 1 :TX_PROXY ERROR: Actor# [1:7486790752741685528:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink >> Viewer::JsonAutocompleteSimilarDatabaseNameLowerCase [GOOD] >> Viewer::JsonAutocompleteSchemePOST >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21960, MsgBus: 32445 2025-03-28T09:31:45.769898Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790733842626085:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:45.769944Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001220/r3tmp/tmpgSb6VR/pdisk_1.dat 2025-03-28T09:31:46.145640Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21960, node 1 2025-03-28T09:31:46.176172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:46.176256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:46.178007Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:46.223259Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:46.223289Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:46.223298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:46.223420Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32445 TClient is connected to server localhost:32445 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:46.722788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.739520Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:46.746643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.905240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:47.073698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:47.139406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.939787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790746727529756:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:48.939916Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.229999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.258189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.286074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.314021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.379177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.447173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.490982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790751022497569:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.491061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.491275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790751022497574:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.494559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:49.502658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790751022497576:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:49.602319Z node 1 :TX_PROXY ERROR: Actor# [1:7486790751022497629:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:50.770783Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790733842626085:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:50.770891Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> BasicUsage::BrokenCredentialsProvider [GOOD] >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestAuthorization >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::AuthorizeYdbTokenWithDatabaseAttributes [GOOD] Test command err: 2025-03-28T09:31:28.182825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:732:2428], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:28.183254Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:28.183327Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:31:28.185298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:729:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:28.185464Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:28.185638Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:31:28.735713Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:28.879104Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:31:28.904267Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:31:29.300090Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 24440, node 1 TClient is connected to server localhost:6444 2025-03-28T09:31:29.513743Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:29.513794Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:29.513818Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:29.514225Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:32.212905Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790678702107207:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:32.213037Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:31:32.325172Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:32.355301Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:32.355397Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:32.356963Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27016, node 3 2025-03-28T09:31:32.399846Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:32.399883Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:32.399895Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:32.400070Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:32.692715Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:32.717038Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:31:32.719297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.947546Z node 3 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:34.947616Z node 3 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:35.536343Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486790691587009785:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:35.536416Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486790691587009793:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:35.536526Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:35.540719Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-28T09:31:35.550299Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486790691587009799:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-28T09:31:35.652889Z node 3 :TX_PROXY ERROR: Actor# [3:7486790691587009850:2356] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:35.991442Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZWQyYTFkNzQtMjM3M2YyMy0xMzM5ODJjNC1mNTQ4YzMwNQ==, ActorId: [3:7486790691587009783:2340], ActorState: ExecuteState, TraceId: 01jqe1fcqe6j1r3mez93kf5v45, Create QueryResponse for error on request, msg: Scheme operations cannot be executed inside transaction 2025-03-28T09:31:38.377096Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486790703434512301:2098];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.378949Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:31:38.505717Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:38.533312Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:38.533401Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:38.534955Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23965, node 4 2025-03-28T09:31:38.641334Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:38.641368Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:38.641377Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:38.641511Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23878 2025-03-28T09:31:39.042045Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:31:39.047975Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.053211Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2025-03-28T09:31:41.327433Z node 4 :GRPC_SERVER DEBUG: Got grpc request# request auth and check internal request, traceId# undef, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# /Root, peer# , grpcInfo# undef, timeout# 9.999790s 2025-03-28T09:31:41.327578Z node 4 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:41.327609Z node 4 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:42.341095Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486790720614382122:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.341222Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.453944Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486790720614382137:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.454047Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:42.454353Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: ... false data# peer# 2025-03-28T09:31:49.160931Z node 5 :GRPC_SERVER DEBUG: [0x51b000477a80] received request Name# Coordination/CreateNode ok# false data# peer# 2025-03-28T09:31:49.161100Z node 5 :GRPC_SERVER DEBUG: [0x51b000477380] received request Name# Coordination/AlterNode ok# false data# peer# 2025-03-28T09:31:49.161109Z node 5 :GRPC_SERVER DEBUG: [0x51b000476c80] received request Name# Coordination/DropNode ok# false data# peer# 2025-03-28T09:31:49.161258Z node 5 :GRPC_SERVER DEBUG: [0x51b000476580] received request Name# Coordination/DescribeNode ok# false data# peer# 2025-03-28T09:31:49.161338Z node 5 :GRPC_SERVER DEBUG: [0x51b000475e80] received request Name# CreateDatabase ok# false data# peer# 2025-03-28T09:31:49.161420Z node 5 :GRPC_SERVER DEBUG: [0x51b000526380] received request Name# GetDatabaseStatus ok# false data# peer# 2025-03-28T09:31:49.161568Z node 5 :GRPC_SERVER DEBUG: [0x51b000475780] received request Name# AlterDatabase ok# false data# peer# 2025-03-28T09:31:49.161754Z node 5 :GRPC_SERVER DEBUG: [0x51b000525580] received request Name# ListDatabases ok# false data# peer# 2025-03-28T09:31:49.161802Z node 5 :GRPC_SERVER DEBUG: [0x51b000524780] received request Name# RemoveDatabase ok# false data# peer# 2025-03-28T09:31:49.161924Z node 5 :GRPC_SERVER DEBUG: [0x51b000524080] received request Name# DescribeDatabaseOptions ok# false data# peer# 2025-03-28T09:31:49.162039Z node 5 :GRPC_SERVER DEBUG: [0x51b000523980] received request Name# GetScaleRecommendation ok# false data# peer# 2025-03-28T09:31:49.162110Z node 5 :GRPC_SERVER DEBUG: [0x51b00051fa80] received request Name# ListEndpoints ok# false data# peer# 2025-03-28T09:31:49.162311Z node 5 :GRPC_SERVER DEBUG: [0x51b000522480] received request Name# WhoAmI ok# false data# peer# 2025-03-28T09:31:49.162321Z node 5 :GRPC_SERVER DEBUG: [0x51b000520880] received request Name# NodeRegistration ok# false data# peer# 2025-03-28T09:31:49.162500Z node 5 :GRPC_SERVER DEBUG: [0x51b00051f380] received request Name# Scan ok# false data# peer# 2025-03-28T09:31:49.162569Z node 5 :GRPC_SERVER DEBUG: [0x51b000574880] received request Name# GetShardLocations ok# false data# peer# 2025-03-28T09:31:49.162670Z node 5 :GRPC_SERVER DEBUG: [0x51b00051ec80] received request Name# DescribeTable ok# false data# peer# 2025-03-28T09:31:49.162865Z node 5 :GRPC_SERVER DEBUG: [0x51b00057aa80] received request Name# CreateSnapshot ok# false data# peer# 2025-03-28T09:31:49.163004Z node 5 :GRPC_SERVER DEBUG: [0x51b000578780] received request Name# RefreshSnapshot ok# false data# peer# 2025-03-28T09:31:49.163117Z node 5 :GRPC_SERVER DEBUG: [0x51b000571080] received request Name# DiscardSnapshot ok# false data# peer# 2025-03-28T09:31:49.163246Z node 5 :GRPC_SERVER DEBUG: [0x51b000572580] received request Name# List ok# false data# peer# 2025-03-28T09:31:49.163357Z node 5 :GRPC_SERVER DEBUG: [0x51b000573380] received request Name# RateLimiter/CreateResource ok# false data# peer# 2025-03-28T09:31:49.163496Z node 5 :GRPC_SERVER DEBUG: [0x51b000521680] received request Name# RateLimiter/AlterResource ok# false data# peer# 2025-03-28T09:31:49.163612Z node 5 :GRPC_SERVER DEBUG: [0x51b00001e380] received request Name# RateLimiter/DropResource ok# false data# peer# 2025-03-28T09:31:49.163750Z node 5 :GRPC_SERVER DEBUG: [0x51b00052fd80] received request Name# RateLimiter/ListResources ok# false data# peer# 2025-03-28T09:31:49.163863Z node 5 :GRPC_SERVER DEBUG: [0x51b000532780] received request Name# RateLimiter/DescribeResource ok# false data# peer# 2025-03-28T09:31:49.163990Z node 5 :GRPC_SERVER DEBUG: [0x51b00052e880] received request Name# RateLimiter/AcquireResource ok# false data# peer# 2025-03-28T09:31:49.164138Z node 5 :GRPC_SERVER DEBUG: [0x51b00052d380] received request Name# CreateStream ok# false data# peer# 2025-03-28T09:31:49.164257Z node 5 :GRPC_SERVER DEBUG: [0x51b000533c80] received request Name# ListStreams ok# false data# peer# 2025-03-28T09:31:49.164405Z node 5 :GRPC_SERVER DEBUG: [0x51b000538980] received request Name# DeleteStream ok# false data# peer# 2025-03-28T09:31:49.164512Z node 5 :GRPC_SERVER DEBUG: [0x51b000537480] received request Name# DescribeStream ok# false data# peer# 2025-03-28T09:31:49.164673Z node 5 :GRPC_SERVER DEBUG: [0x51b00053a580] received request Name# ListShards ok# false data# peer# 2025-03-28T09:31:49.164752Z node 5 :GRPC_SERVER DEBUG: [0x51b000511380] received request Name# SetWriteQuota ok# false data# peer# 2025-03-28T09:31:49.164933Z node 5 :GRPC_SERVER DEBUG: [0x51b000548580] received request Name# UpdateStream ok# false data# peer# 2025-03-28T09:31:49.165006Z node 5 :GRPC_SERVER DEBUG: [0x51b00052a980] received request Name# PutRecord ok# false data# peer# 2025-03-28T09:31:49.165191Z node 5 :GRPC_SERVER DEBUG: [0x51b00052c580] received request Name# PutRecords ok# false data# peer# 2025-03-28T09:31:49.165274Z node 5 :GRPC_SERVER DEBUG: [0x51b00052b080] received request Name# GetRecords ok# false data# peer# 2025-03-28T09:31:49.165441Z node 5 :GRPC_SERVER DEBUG: [0x51b00052da80] received request Name# GetShardIterator ok# false data# peer# 2025-03-28T09:31:49.165510Z node 5 :GRPC_SERVER DEBUG: [0x51b00052ef80] received request Name# SubscribeToShard ok# false data# peer# 2025-03-28T09:31:49.165690Z node 5 :GRPC_SERVER DEBUG: [0x51b000530b80] received request Name# DescribeLimits ok# false data# peer# 2025-03-28T09:31:49.165750Z node 5 :GRPC_SERVER DEBUG: [0x51b000532e80] received request Name# DescribeStreamSummary ok# false data# peer# 2025-03-28T09:31:49.165952Z node 5 :GRPC_SERVER DEBUG: [0x51b000534380] received request Name# DecreaseStreamRetentionPeriod ok# false data# peer# 2025-03-28T09:31:49.165992Z node 5 :GRPC_SERVER DEBUG: [0x51b000535880] received request Name# IncreaseStreamRetentionPeriod ok# false data# peer# 2025-03-28T09:31:49.166174Z node 5 :GRPC_SERVER DEBUG: [0x51b000535f80] received request Name# UpdateShardCount ok# false data# peer# 2025-03-28T09:31:49.166213Z node 5 :GRPC_SERVER DEBUG: [0x51b000536d80] received request Name# UpdateStreamMode ok# false data# peer# 2025-03-28T09:31:49.166405Z node 5 :GRPC_SERVER DEBUG: [0x51b000539e80] received request Name# RegisterStreamConsumer ok# false data# peer# 2025-03-28T09:31:49.166449Z node 5 :GRPC_SERVER DEBUG: [0x51b00053b380] received request Name# DeregisterStreamConsumer ok# false data# peer# 2025-03-28T09:31:49.166658Z node 5 :GRPC_SERVER DEBUG: [0x51b00053c180] received request Name# DescribeStreamConsumer ok# false data# peer# 2025-03-28T09:31:49.166714Z node 5 :GRPC_SERVER DEBUG: [0x51b00053d680] received request Name# ListStreamConsumers ok# false data# peer# 2025-03-28T09:31:49.166957Z node 5 :GRPC_SERVER DEBUG: [0x51b000540080] received request Name# AddTagsToStream ok# false data# peer# 2025-03-28T09:31:49.166971Z node 5 :GRPC_SERVER DEBUG: [0x51b000540780] received request Name# DisableEnhancedMonitoring ok# false data# peer# 2025-03-28T09:31:49.167185Z node 5 :GRPC_SERVER DEBUG: [0x51b000542a80] received request Name# EnableEnhancedMonitoring ok# false data# peer# 2025-03-28T09:31:49.167216Z node 5 :GRPC_SERVER DEBUG: [0x51b000543f80] received request Name# ListTagsForStream ok# false data# peer# 2025-03-28T09:31:49.167399Z node 5 :GRPC_SERVER DEBUG: [0x51b000544680] received request Name# MergeShards ok# false data# peer# 2025-03-28T09:31:49.167445Z node 5 :GRPC_SERVER DEBUG: [0x51b000508780] received request Name# RemoveTagsFromStream ok# false data# peer# 2025-03-28T09:31:49.167619Z node 5 :GRPC_SERVER DEBUG: [0x51b000545480] received request Name# SplitShard ok# false data# peer# 2025-03-28T09:31:49.167700Z node 5 :GRPC_SERVER DEBUG: [0x51b000546980] received request Name# StartStreamEncryption ok# false data# peer# 2025-03-28T09:31:49.167854Z node 5 :GRPC_SERVER DEBUG: [0x51b000511a80] received request Name# StopStreamEncryption ok# false data# peer# 2025-03-28T09:31:49.167970Z node 5 :GRPC_SERVER DEBUG: [0x51b000510580] received request Name# SelfCheck ok# false data# peer# 2025-03-28T09:31:49.168106Z node 5 :GRPC_SERVER DEBUG: [0x51b00050f780] received request Name# NodeCheck ok# false data# peer# 2025-03-28T09:31:49.168256Z node 5 :GRPC_SERVER DEBUG: [0x51b00050c680] received request Name# CreateSession ok# false data# peer# 2025-03-28T09:31:49.168364Z node 5 :GRPC_SERVER DEBUG: [0x51b00050b880] received request Name# DeleteSession ok# false data# peer# 2025-03-28T09:31:49.168547Z node 5 :GRPC_SERVER DEBUG: [0x51b00050b180] received request Name# AttachSession ok# false data# peer# 2025-03-28T09:31:49.168621Z node 5 :GRPC_SERVER DEBUG: [0x51b000509580] received request Name# BeginTransaction ok# false data# peer# 2025-03-28T09:31:49.168827Z node 5 :GRPC_SERVER DEBUG: [0x51b000507980] received request Name# CommitTransaction ok# false data# peer# 2025-03-28T09:31:49.168912Z node 5 :GRPC_SERVER DEBUG: [0x51b000504f80] received request Name# RollbackTransaction ok# false data# peer# 2025-03-28T09:31:49.169098Z node 5 :GRPC_SERVER DEBUG: [0x51b00050e980] received request Name# ExecuteQuery ok# false data# peer# 2025-03-28T09:31:49.169182Z node 5 :GRPC_SERVER DEBUG: [0x51b00050e280] received request Name# ExecuteScript ok# false data# peer# 2025-03-28T09:31:49.169373Z node 5 :GRPC_SERVER DEBUG: [0x51b00050d480] received request Name# FetchScriptResults ok# false data# peer# 2025-03-28T09:31:49.169450Z node 5 :GRPC_SERVER DEBUG: [0x51b000503a80] received request Name# ExecuteTabletMiniKQL ok# false data# peer# 2025-03-28T09:31:49.169653Z node 5 :GRPC_SERVER DEBUG: [0x51b000503380] received request Name# ChangeTabletSchema ok# false data# peer# 2025-03-28T09:31:49.169696Z node 5 :GRPC_SERVER DEBUG: [0x51b000506b80] received request Name# RestartTablet ok# false data# peer# 2025-03-28T09:31:49.169918Z node 5 :GRPC_SERVER DEBUG: [0x51b000507280] received request Name# CreateLogStore ok# false data# peer# 2025-03-28T09:31:49.169946Z node 5 :GRPC_SERVER DEBUG: [0x51b000501e80] received request Name# DescribeLogStore ok# false data# peer# 2025-03-28T09:31:49.170206Z node 5 :GRPC_SERVER DEBUG: [0x51b000500980] received request Name# AlterLogStore ok# false data# peer# 2025-03-28T09:31:49.170218Z node 5 :GRPC_SERVER DEBUG: [0x51b000501780] received request Name# DropLogStore ok# false data# peer# 2025-03-28T09:31:49.170463Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fe680] received request Name# CreateLogTable ok# false data# peer# 2025-03-28T09:31:49.170472Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fd880] received request Name# DescribeLogTable ok# false data# peer# 2025-03-28T09:31:49.170743Z node 5 :GRPC_SERVER DEBUG: [0x51b0004fca80] received request Name# DropLogTable ok# false data# peer# 2025-03-28T09:31:49.170744Z node 5 :GRPC_SERVER DEBUG: [0x51b000527f80] received request Name# AlterLogTable ok# false data# peer# 2025-03-28T09:31:49.170978Z node 5 :GRPC_SERVER DEBUG: [0x51b000526a80] received request Name# Login ok# false data# peer# 2025-03-28T09:31:49.171012Z node 5 :GRPC_SERVER DEBUG: [0x51b000525c80] received request Name# DescribeReplication ok# false data# peer# 2025-03-28T09:31:49.171200Z node 5 :GRPC_SERVER DEBUG: [0x51b000524e80] received request Name# DescribeView ok# false data# peer# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22368, MsgBus: 28928 2025-03-28T09:31:42.037538Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790719953977256:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:42.037757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001239/r3tmp/tmpAVes6I/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22368, node 1 2025-03-28T09:31:42.428683Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:42.428696Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:42.441854Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:42.442098Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:42.453143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:42.454300Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:42.511116Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:42.511139Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:42.511152Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:42.511278Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28928 TClient is connected to server localhost:28928 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:43.047499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:43.066808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.204544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.376910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.442518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:44.945094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790728543913602:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:44.945210Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.284760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.313567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.344003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.379416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.412006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.457011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.514769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790732838881411:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.514848Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.515011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790732838881416:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.518243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:45.531215Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:31:45.531593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790732838881418:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:45.600751Z node 1 :TX_PROXY ERROR: Actor# [1:7486790732838881471:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 7308, MsgBus: 5479 2025-03-28T09:31:47.463853Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790743734592043:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:47.463906Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001239/r3tmp/tmp9r7QR3/pdisk_1.dat 2025-03-28T09:31:47.576614Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7308, node 2 2025-03-28T09:31:47.635114Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:47.635221Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:47.640318Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:47.667211Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:47.667232Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:47.667240Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:47.667354Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5479 TClient is connected to server localhost:5479 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:31:48.078308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.093616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.171628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.307422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.390245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.322268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790756619495708:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.322368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.347987Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.385219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.412384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.440016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.481016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.522738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.617273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790756619496225:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.617374Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.617650Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790756619496230:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.621066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:50.633348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790756619496232:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:50.729159Z node 2 :TX_PROXY ERROR: Actor# [2:7486790756619496287:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsWithCompactingMiddleRows [GOOD] >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1093, MsgBus: 21187 2025-03-28T09:31:47.106320Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790741189106299:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:47.108151Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00121f/r3tmp/tmp4QZh19/pdisk_1.dat 2025-03-28T09:31:47.489791Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1093, node 1 2025-03-28T09:31:47.538810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:47.539000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:47.545121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:47.585484Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:47.585508Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:47.585522Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:47.585642Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21187 TClient is connected to server localhost:21187 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:48.090440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.121490Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:48.129413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.273960Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.446191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.528234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.087190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790754074009963:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.087396Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.368171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.437376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.467822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.503043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.535662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.570524Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.625088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790754074010481:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.625181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.625356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790754074010486:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.629497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:50.656128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790754074010488:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:50.737515Z node 1 :TX_PROXY ERROR: Actor# [1:7486790754074010543:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::Upsert-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 19376, MsgBus: 24426 2025-03-28T09:31:47.381756Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790741177830234:2238];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:47.381936Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00121e/r3tmp/tmp1GEybm/pdisk_1.dat 2025-03-28T09:31:47.703686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:47.705729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:47.705812Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:47.710717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19376, node 1 2025-03-28T09:31:47.814277Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:47.814316Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:47.814325Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:47.814445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24426 TClient is connected to server localhost:24426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:48.286989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.314094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.485750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.650067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.728376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.420859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790754062733740:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.420991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.702007Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.735020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.803501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.879318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.945020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.978731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.074060Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790758357701563:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.074132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.074187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790758357701568:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.077834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:51.087749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790758357701570:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:51.143679Z node 1 :TX_PROXY ERROR: Actor# [1:7486790758357701624:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:52.378710Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790741177830234:2238];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:52.378751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-03-28T09:31:11.555073Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1743154271555049 2025-03-28T09:31:11.886757Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790586569809538:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:11.887870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:11.925667Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790587247818574:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:11.926117Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:12.058635Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:31:12.075706Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001bac/r3tmp/tmpgUQDP2/pdisk_1.dat 2025-03-28T09:31:12.283696Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:12.283807Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:12.287693Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:31:12.288407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:12.288693Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:12.288733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:12.294012Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:12.313598Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24682, node 1 2025-03-28T09:31:12.341868Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:12.342972Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:12.453998Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001bac/r3tmp/yandex5dkix3.tmp 2025-03-28T09:31:12.454024Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001bac/r3tmp/yandex5dkix3.tmp 2025-03-28T09:31:12.455714Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001bac/r3tmp/yandex5dkix3.tmp 2025-03-28T09:31:12.455912Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:12.645314Z INFO: TTestServer started on Port 14018 GrpcPort 24682 TClient is connected to server localhost:14018 PQClient connected to localhost:24682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:12.917757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:31:14.815101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790599454712445:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:14.815101Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790599454712419:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:14.815206Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:14.819021Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:31:14.835983Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790599454712448:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:31:14.918526Z node 1 :TX_PROXY ERROR: Actor# [1:7486790599454712539:2684] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:15.275607Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790599454712556:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:31:15.275793Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486790600132720794:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:31:15.276012Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTNkOThkZmItYzJkMThlN2MtZDhkNDkwNzUtZTJiZDczNWQ=, ActorId: [2:7486790600132720754:2307], ActorState: ExecuteState, TraceId: 01jqe1erhy91c03vtv303qbyxk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:31:15.277033Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjNmMGZmYWYtNGM1MjM0NjYtNGE4YWRkY2UtMjY2ZGY2NTA=, ActorId: [1:7486790599454712416:2335], ActorState: ExecuteState, TraceId: 01jqe1erfp6yfhmjgy870hadz4, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:31:15.277889Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:31:15.277889Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:31:15.350281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:15.509111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:15.639502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:24682", true, true, 1000); 2025-03-28T09:31:15.996063Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe1esc71wt5cwg6whsth1xb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzY1ZDQxZGYtNGU3ZjQ0YzUtYzZkODJhYTAtNmMzZDVkMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486790608044647598:3026] 2025-03-28T09:31:16.886199Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790586569809538:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:16.886266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:16.925670Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790587247818574:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:16.925729Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:31:21.809378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:24682 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:31:21.942319Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:24682 MetaRequest { CmdCreateTopic { Topic ... 1 server" ip=ipv6:[::1]:54290 proto=v1 topic=test-topic durationSec=0 2025-03-28T09:31:50.516639Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:31:50.518036Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-03-28T09:31:50.518148Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-28T09:31:50.518158Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:31:50.518167Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-28T09:31:50.518182Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486790755143069790:2521] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:31:50.520427Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486790755143069790:2521] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:31:50.700231Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486790755143069790:2521] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-28T09:31:50.700597Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7486790755143069832:2521] connected; active server actors: 1 2025-03-28T09:31:50.700701Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486790755143069790:2521] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-28T09:31:50.700721Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486790755143069790:2521] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-28T09:31:50.702265Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7486790755143069832:2521] disconnected; active server actors: 1 2025-03-28T09:31:50.702291Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7486790755143069832:2521] disconnected no session 2025-03-28T09:31:50.831045Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486790755143069790:2521] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-28T09:31:50.831079Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486790755143069790:2521] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-28T09:31:50.831093Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486790755143069790:2521] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-28T09:31:50.831115Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:31:50.831909Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-03-28T09:31:50.834860Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7486790755143069852:2521], now have 1 active actors on pipe 2025-03-28T09:31:50.834939Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:31:50.834967Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:31:50.835056Z node 6 :PERSQUEUE INFO: new Cookie src|7ae45b95-3008e139-a39af209-71db43b8_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-28T09:31:50.835149Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:31:50.835198Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:31:50.837047Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:31:50.837079Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:31:50.837208Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:31:50.837500Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|7ae45b95-3008e139-a39af209-71db43b8_0 2025-03-28T09:31:50.838415Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743154310838 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:31:50.838533Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|7ae45b95-3008e139-a39af209-71db43b8_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:31:50.838741Z :INFO: [] MessageGroupId [src] SessionId [src|7ae45b95-3008e139-a39af209-71db43b8_0] Write session: close. Timeout = 0 ms 2025-03-28T09:31:50.838800Z :INFO: [] MessageGroupId [src] SessionId [src|7ae45b95-3008e139-a39af209-71db43b8_0] Write session will now close 2025-03-28T09:31:50.838844Z :DEBUG: [] MessageGroupId [src] SessionId [src|7ae45b95-3008e139-a39af209-71db43b8_0] Write session: aborting 2025-03-28T09:31:50.839295Z :INFO: [] MessageGroupId [src] SessionId [src|7ae45b95-3008e139-a39af209-71db43b8_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:31:50.839338Z :DEBUG: [] MessageGroupId [src] SessionId [src|7ae45b95-3008e139-a39af209-71db43b8_0] Write session: destroy 2025-03-28T09:31:50.842828Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|7ae45b95-3008e139-a39af209-71db43b8_0 grpc read done: success: 0 data: 2025-03-28T09:31:50.842856Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|7ae45b95-3008e139-a39af209-71db43b8_0 grpc read failed 2025-03-28T09:31:50.842893Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|7ae45b95-3008e139-a39af209-71db43b8_0 grpc closed 2025-03-28T09:31:50.842913Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|7ae45b95-3008e139-a39af209-71db43b8_0 is DEAD 2025-03-28T09:31:50.844168Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:31:50.844544Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7486790755143069852:2521] destroyed 2025-03-28T09:31:50.844606Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:31:50.866348Z :INFO: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] Starting read session 2025-03-28T09:31:50.866428Z :DEBUG: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] Starting session to cluster null (localhost:25780) 2025-03-28T09:31:50.868548Z :DEBUG: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:50.868598Z :DEBUG: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:50.868641Z :DEBUG: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] [null] Reconnecting session to cluster null in 0.000000s 2025-03-28T09:31:50.870105Z :ERROR: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-03-28T09:31:50.870166Z :DEBUG: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:50.870200Z :DEBUG: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:50.870331Z :INFO: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-03-28T09:31:50.870496Z :NOTICE: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:31:50.870531Z :DEBUG: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-03-28T09:31:50.870621Z :INFO: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] Closing read session. Close timeout: 0.000000s 2025-03-28T09:31:50.870664Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:31:50.870730Z :INFO: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] Counters: { Errors: 1 CurrentSessionLifetimeMs: 4 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:31:50.870850Z :NOTICE: [/Root] [/Root] [7186a5d0-2154bf44-7aea70dd-9c3387b5] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:31:50.987740Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710689, task: 1, CA Id [5:7486790755143069873:2535]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-03-28T09:31:51.019419Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710689, task: 1, CA Id [5:7486790755143069873:2535]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:31:51.075568Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710689, task: 1, CA Id [5:7486790755143069873:2535]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:31:51.142788Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710689, task: 1, CA Id [5:7486790755143069873:2535]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:31:51.224566Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710689, task: 1, CA Id [5:7486790755143069873:2535]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:31:51.364309Z node 5 :KQP_COMPUTE WARN: TxId: 281474976710689, task: 1, CA Id [5:7486790755143069873:2535]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService >> KqpQueryPerf::Insert+QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4495, MsgBus: 29005 2025-03-28T09:31:42.666595Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790721276288048:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:42.666631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001231/r3tmp/tmpfTmkfM/pdisk_1.dat 2025-03-28T09:31:43.016596Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:43.016725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:43.019473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4495, node 1 2025-03-28T09:31:43.059414Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:43.114466Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:43.114488Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:43.114495Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:43.114596Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29005 TClient is connected to server localhost:29005 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:43.627278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.640914Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:43.648259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.786895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.936906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.998212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:45.728550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790734161191711:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.728665Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.995144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.025252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.051195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.081727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.112338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.149980Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.238178Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790738456159526:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:46.238242Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:46.238647Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790738456159531:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:46.242950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:46.252867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790738456159533:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:46.307085Z node 1 :TX_PROXY ERROR: Actor# [1:7486790738456159586:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20750, MsgBus: 12116 2025-03-28T09:31:48.122362Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790745543348105:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:48.122432Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001231/r3tmp/tmpMbu1SE/pdisk_1.dat 2025-03-28T09:31:48.228223Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20750, node 2 2025-03-28T09:31:48.268525Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:48.268611Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:48.286886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:48.310077Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:48.310100Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:48.310108Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:48.310216Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12116 TClient is connected to server localhost:12116 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:48.765094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.771164Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.783187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.855329Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:49.010363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.089567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:51.145940Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790758428251751:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.146040Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.193427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.226265Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.256431Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.295029Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.327574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.363058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.410546Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790758428252263:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.410622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.410731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790758428252268:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.413971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:51.423925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790758428252270:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:51.490664Z node 2 :TX_PROXY ERROR: Actor# [2:7486790758428252323:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3705, MsgBus: 28254 2025-03-28T09:31:38.284909Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790706067939672:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.287064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00124a/r3tmp/tmpBfMJPk/pdisk_1.dat 2025-03-28T09:31:38.667334Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3705, node 1 2025-03-28T09:31:38.705292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:38.705385Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:38.709168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:38.746268Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:38.746304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:38.746315Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:38.746471Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28254 TClient is connected to server localhost:28254 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:39.300183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.324992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.458787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.624994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:39.702570Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:41.318443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790718952843321:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:41.318550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:41.603227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.634940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.670415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.701486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.729321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.761548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.838977Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790718952843835:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:41.839054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:41.839391Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790718952843840:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:41.843495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:41.871266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790718952843842:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:41.928334Z node 1 :TX_PROXY ERROR: Actor# [1:7486790718952843895:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:42.969202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.011668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.087999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.285135Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790706067939672:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:43.285234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5925, MsgBus: 27130 2025-03-28T09:31:45.954201Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790735222550002:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:45.954274Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00124a/r3tmp/tmpYXDpiL/pdisk_1.dat 2025-03-28T09:31:46.078247Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5925, node 2 2025-03-28T09:31:46.113012Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:46.113094Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:46.113859Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:46.159966Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:46.159987Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:46.159998Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:46.160134Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27130 TClient is connected to server localhost:27130 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:46.577810Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.585449Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:46.605326Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.675041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.822408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.891019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:49.062881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790752402420944:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.062961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.112080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.148981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.175839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.211530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.247403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.281462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.352606Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790752402421456:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.352689Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.352717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790752402421461:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:49.356835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:49.366558Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790752402421463:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:49.466544Z node 2 :TX_PROXY ERROR: Actor# [2:7486790752402421518:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:50.324628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.401797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.448336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.956327Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790735222550002:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:50.956387Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8547, MsgBus: 22304 2025-03-28T09:31:47.712471Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790740700570320:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:47.712544Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00120b/r3tmp/tmplT85Es/pdisk_1.dat 2025-03-28T09:31:48.091209Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8547, node 1 2025-03-28T09:31:48.107410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:48.107518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:48.111352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:48.170132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:48.170158Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:48.170170Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:48.170305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22304 TClient is connected to server localhost:22304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:48.722737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.769146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.938020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:49.094792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:49.159010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.803960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790753585473977:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.804067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.068707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.093682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.164765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.233725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.265797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.336701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.421642Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790757880441798:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.421718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.421837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790757880441803:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.425362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:51.438805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790757880441805:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:51.504537Z node 1 :TX_PROXY ERROR: Actor# [1:7486790757880441861:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:52.712859Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790740700570320:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:52.712931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Insert-QueryService-UseSink >> Viewer::QueryExecuteScript [FAIL] >> Viewer::Plan2SvgOK >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] >> KqpQueryPerf::MultiRead-QueryService >> KqpQueryPerf::IndexUpsert-QueryService-UseSink >> KqpQueryPerf::KvRead+QueryService >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink >> KqpQueryPerf::RangeRead+QueryService [GOOD] >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] >> KqpQueryPerf::Update-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21941, MsgBus: 22856 2025-03-28T09:31:49.439646Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790750530171828:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:49.439834Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011fa/r3tmp/tmp17u877/pdisk_1.dat 2025-03-28T09:31:49.735764Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21941, node 1 2025-03-28T09:31:49.790538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:49.791209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:49.794379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:49.813223Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:49.813243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:49.813256Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:49.813399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22856 TClient is connected to server localhost:22856 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:50.350807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.364532Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:50.381328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.555558Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.725727Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.789996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.477483Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790763415075500:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.477765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.718675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.754443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.784742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.815261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.842017Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.879177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.925919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790763415076013:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.925996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.926182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790763415076018:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.930673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:52.939851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790763415076020:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:53.017694Z node 1 :TX_PROXY ERROR: Actor# [1:7486790763415076073:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:54.439566Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790750530171828:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:54.439697Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexInsert+QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19529, MsgBus: 11820 2025-03-28T09:31:50.390879Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790755547788305:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:50.391335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011f8/r3tmp/tmpec8hLj/pdisk_1.dat 2025-03-28T09:31:50.736278Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:50.778076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:50.778165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19529, node 1 2025-03-28T09:31:50.783187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:50.822083Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:50.822110Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:50.822121Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:50.822242Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11820 TClient is connected to server localhost:11820 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:51.365489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:51.392048Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:51.400392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:51.537627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:51.715444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:51.794450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:53.498429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790768432691830:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.498661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.801863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.832855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.860229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.888119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.916646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.948806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.024695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790772727659639:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.024769Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.024793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790772727659644:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.028273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:54.053306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790772727659646:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:54.129593Z node 1 :TX_PROXY ERROR: Actor# [1:7486790772727659701:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:55.385871Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790755547788305:2202];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:55.385955Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Update+QueryService+UseSink >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 31500, MsgBus: 17799 2025-03-28T09:31:50.821421Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790755240426956:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:50.821829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011ef/r3tmp/tmp3wBB4b/pdisk_1.dat 2025-03-28T09:31:51.216793Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:51.246069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:51.246160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:51.248189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31500, node 1 2025-03-28T09:31:51.333083Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:51.333107Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:51.333115Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:51.333244Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17799 TClient is connected to server localhost:17799 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:51.835627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:51.861840Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:51.868100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.008019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.174648Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.266642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:53.796014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790768125330601:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.796125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.088494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.112955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.136276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.159803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.184624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.211691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.247181Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790772420298405:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.247278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.247292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790772420298410:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.250335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:54.259006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790772420298412:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:54.318010Z node 1 :TX_PROXY ERROR: Actor# [1:7486790772420298465:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18907, MsgBus: 8443 2025-03-28T09:31:42.023195Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790720060711292:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:42.023266Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00123e/r3tmp/tmpctGczK/pdisk_1.dat 2025-03-28T09:31:42.392996Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18907, node 1 2025-03-28T09:31:42.462487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:42.464349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:42.466845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:42.487190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:42.487212Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:42.487223Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:42.487312Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8443 TClient is connected to server localhost:8443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:43.078605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.120108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.238864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.418371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.490055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:45.145212Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790732945614952:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.145343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.475209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.503161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.535845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.567763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.598257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.638878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.686746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790732945615463:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.686817Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790732945615468:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.686823Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.690326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:45.699429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790732945615470:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:45.781784Z node 1 :TX_PROXY ERROR: Actor# [1:7486790732945615524:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:46.755216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.797184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.864437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:47.023194Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790720060711292:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:47.023265Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14541, MsgBus: 8984 2025-03-28T09:31:49.661222Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790749835257018:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:49.661394Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00123e/r3tmp/tmpTRyMwu/pdisk_1.dat 2025-03-28T09:31:49.781060Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:49.809146Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:49.809241Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:49.810584Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14541, node 2 2025-03-28T09:31:49.857231Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:49.857258Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:49.857266Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:49.857393Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8984 TClient is connected to server localhost:8984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:31:50.250886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.262866Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.336896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.496501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:50.573408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.811135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790762720160669:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.811236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.855926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.887861Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.921430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.945502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.972683Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.004171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.073422Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790767015128474:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.073500Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.073749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790767015128479:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.077095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:53.086904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790767015128481:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:53.175460Z node 2 :TX_PROXY ERROR: Actor# [2:7486790767015128537:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:54.004977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.042267Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.077503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.661728Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790749835257018:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:54.661793Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32051, MsgBus: 2992 2025-03-28T09:31:51.520352Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790760333802561:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:51.520578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011e4/r3tmp/tmpJDSWs1/pdisk_1.dat 2025-03-28T09:31:51.899559Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:51.903291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:51.903366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:51.907335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32051, node 1 2025-03-28T09:31:51.983460Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:51.983479Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:51.983485Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:51.983580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2992 TClient is connected to server localhost:2992 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:52.536198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.552223Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:52.570254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.730196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.874734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:52.952426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.407583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790773218706236:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.407714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.702650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.733009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.760369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.782378Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.808538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.840610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.881386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790773218706745:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.881452Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.881661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790773218706750:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.884870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:54.894822Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:31:54.894979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790773218706752:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:54.978606Z node 1 :TX_PROXY ERROR: Actor# [1:7486790773218706807:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25812, MsgBus: 12446 2025-03-28T09:31:45.204282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790732621195722:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:45.206552Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00122b/r3tmp/tmpuWadL9/pdisk_1.dat 2025-03-28T09:31:45.587197Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25812, node 1 2025-03-28T09:31:45.617516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:45.617604Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:45.620194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:45.678370Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:45.678408Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:45.678425Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:45.678584Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12446 TClient is connected to server localhost:12446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:46.137962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.155831Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:46.168942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:46.296314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.453274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:46.515994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.324623Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790745506099296:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:48.324759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:48.621062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.674481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.704276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.733401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.764746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.797095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:48.879037Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790745506099811:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:48.879119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:48.879194Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790745506099816:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:48.883072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:48.893180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790745506099818:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:48.965359Z node 1 :TX_PROXY ERROR: Actor# [1:7486790745506099873:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:50.199854Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790732621195722:2140];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:50.199925Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15904, MsgBus: 62287 2025-03-28T09:31:51.582603Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790758085138412:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:51.582654Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00122b/r3tmp/tmpa0fz8O/pdisk_1.dat 2025-03-28T09:31:51.674662Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:51.719790Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:51.719864Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:51.722365Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15904, node 2 2025-03-28T09:31:51.764202Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:51.764223Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:51.764229Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:51.764322Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62287 TClient is connected to server localhost:62287 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:52.206145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.212304Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:31:52.221375Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.289827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:52.486424Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.550154Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.483095Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790770970042064:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.483165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.519369Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.547338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.574098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.608054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.638046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.670567Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.718697Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790770970042575:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.718772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.719090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790770970042580:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.722164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:54.731996Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790770970042582:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:54.789445Z node 2 :TX_PROXY ERROR: Actor# [2:7486790770970042635:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:56.583790Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790758085138412:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:56.583875Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12527, MsgBus: 19802 2025-03-28T09:31:41.933310Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790716177932691:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:41.933346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00123a/r3tmp/tmp9WappV/pdisk_1.dat 2025-03-28T09:31:42.287512Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12527, node 1 2025-03-28T09:31:42.338616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:42.339211Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:42.352547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:42.382728Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:42.382751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:42.382759Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:42.382875Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19802 TClient is connected to server localhost:19802 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:42.898083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:42.939182Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:42.957458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.090721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.258854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:43.338726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:45.114738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790733357803648:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.114846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.380369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.408357Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.436128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.475892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.518444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.590046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.672440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790733357804170:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.672491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.672630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790733357804175:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.675420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:45.683529Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790733357804177:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:45.750014Z node 1 :TX_PROXY ERROR: Actor# [1:7486790733357804230:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:46.729417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.767399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.806340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:46.933533Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790716177932691:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:46.933586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64481, MsgBus: 5949 2025-03-28T09:31:49.832385Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790750442925256:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:49.832452Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00123a/r3tmp/tmpfA3ZH2/pdisk_1.dat 2025-03-28T09:31:49.915078Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64481, node 2 2025-03-28T09:31:49.966962Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:49.967047Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:49.968666Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:49.980334Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:49.980362Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:49.980369Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:49.980485Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5949 TClient is connected to server localhost:5949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:50.400190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.412271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.491304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.643941Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.702172Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:53.102332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790767622796195:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.102451Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.128058Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.156271Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.187590Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.219817Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.249344Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.279053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:53.329630Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790767622796703:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.329732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.329947Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790767622796708:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:53.334202Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:53.344646Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790767622796710:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:53.424969Z node 2 :TX_PROXY ERROR: Actor# [2:7486790767622796764:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:54.207092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.279626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.312901Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.832671Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790750442925256:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:54.832732Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService+UseSink >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6565, MsgBus: 28425 2025-03-28T09:31:47.676383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790741816801177:2272];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:47.676429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001210/r3tmp/tmp2VMycV/pdisk_1.dat 2025-03-28T09:31:48.063232Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:48.065734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:48.065779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:48.069798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6565, node 1 2025-03-28T09:31:48.136165Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:48.136191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:48.136200Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:48.136328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28425 TClient is connected to server localhost:28425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:48.697764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.712027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:48.718822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:48.877176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:49.040479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:49.122654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.652240Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790754701704617:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.652383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:50.974122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.008797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.035295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.059335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.088787Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.128394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.173975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790758996672430:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.174057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.174381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790758996672435:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.179080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:51.197701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790758996672437:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:51.263171Z node 1 :TX_PROXY ERROR: Actor# [1:7486790758996672491:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:52.676383Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790741816801177:2272];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:52.676485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10364, MsgBus: 63902 2025-03-28T09:31:53.476386Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790770243242614:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:53.476448Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001210/r3tmp/tmplRVF8O/pdisk_1.dat 2025-03-28T09:31:53.609219Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:53.629752Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:53.629843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:53.631374Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10364, node 2 2025-03-28T09:31:53.671207Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:53.671229Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:53.671236Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:53.671337Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63902 TClient is connected to server localhost:63902 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:54.089703Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:54.097695Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:54.165844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:54.305615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:54.367084Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.307787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790783128146273:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:56.307866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:56.343018Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.368965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.444109Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.469356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.498560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.537072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.589676Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790783128146786:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:56.589757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:56.590066Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790783128146791:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:56.593159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:56.604410Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790783128146793:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:56.657483Z node 2 :TX_PROXY ERROR: Actor# [2:7486790783128146846:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9136, MsgBus: 30060 2025-03-28T09:31:53.934367Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790767044620927:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:53.934499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011c9/r3tmp/tmpIh2NjU/pdisk_1.dat 2025-03-28T09:31:54.261424Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9136, node 1 2025-03-28T09:31:54.321601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:54.321713Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:54.323285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:54.340618Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:54.340642Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:54.340650Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:54.340792Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30060 TClient is connected to server localhost:30060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:54.827585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:54.856634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:54.998265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:55.172133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:55.256146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.925358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790779929524593:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:56.925478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.173050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.198358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.222637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.250863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.275145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.304026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.354371Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790784224492399:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.354431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.354463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790784224492404:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.357692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:57.368052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790784224492406:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:57.433673Z node 1 :TX_PROXY ERROR: Actor# [1:7486790784224492460:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::Insert-QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink >> KqpQueryPerf::MultiRead-QueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21328, MsgBus: 27954 2025-03-28T09:31:48.845230Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790746792213752:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:48.846273Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001208/r3tmp/tmpTyhHNN/pdisk_1.dat 2025-03-28T09:31:49.196861Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:49.213899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:49.214036Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:49.217122Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21328, node 1 2025-03-28T09:31:49.291285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:49.291314Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:49.291323Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:49.291484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27954 TClient is connected to server localhost:27954 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:49.789944Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:49.814478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:49.974252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.126340Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:50.194629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:51.785701Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790759677117410:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:51.785792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.151603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.185926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.215111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.280778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.348539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.424146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:52.505087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790763972085237:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.505184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.505226Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790763972085242:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:52.508440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:52.517770Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790763972085244:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:52.597002Z node 1 :TX_PROXY ERROR: Actor# [1:7486790763972085300:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:53.845462Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790746792213752:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:53.845567Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21221, MsgBus: 21304 2025-03-28T09:31:54.423226Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790772668263900:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:54.423370Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001208/r3tmp/tmpTVDhbs/pdisk_1.dat 2025-03-28T09:31:54.520781Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21221, node 2 2025-03-28T09:31:54.568062Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:54.568145Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:54.571582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:54.579655Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:54.579673Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:54.579678Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:54.579750Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21304 TClient is connected to server localhost:21304 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:31:54.917403Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.935381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:55.005742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:55.155087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:55.226605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.522763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790785553167543:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.522838Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.566222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.595454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.624005Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.663520Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.693198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.760394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.807857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790785553168060:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.807930Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.808282Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790785553168065:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.811464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:57.821440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790785553168067:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:31:57.881585Z node 2 :TX_PROXY ERROR: Actor# [2:7486790785553168120:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK >> ReadIteratorExternalBlobs::ExtBlobsEmptyTable [GOOD] >> ReadIteratorExternalBlobs::NotExtBlobs >> TopicAutoscaling::ControlPlane_CreateAlterDescribe >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK >> KqpQueryPerf::KvRead+QueryService [GOOD] >> KqpQueryPerf::KvRead-QueryService >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] >> TopicAutoscaling::PartitionSplit_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 20658, MsgBus: 21771 2025-03-28T09:31:55.192253Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790776772138268:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:55.192276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011ba/r3tmp/tmpfv86IG/pdisk_1.dat 2025-03-28T09:31:55.561009Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20658, node 1 2025-03-28T09:31:55.605787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:55.605909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:55.624025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:55.659285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:55.659309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:55.659323Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:55.659486Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21771 TClient is connected to server localhost:21771 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:56.096018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.123104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:56.243863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.380246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.444256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:58.051832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790789657041951:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.051950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.389614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.417966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.443769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.470355Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.497428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.532388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.609228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790789657042466:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.609324Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.609429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790789657042471:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.612884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:58.621852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790789657042473:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:58.717535Z node 1 :TX_PROXY ERROR: Actor# [1:7486790789657042528:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> Viewer::JsonAutocompleteColumnsPOST [GOOD] >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] >> Viewer::JsonAutocompleteSchemePOST [GOOD] >> Viewer::Plan2SvgOK [GOOD] >> Viewer::Plan2SvgBad >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 30531, MsgBus: 10858 2025-03-28T09:31:56.173327Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790780499327694:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:56.173383Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001198/r3tmp/tmpWvMGVV/pdisk_1.dat 2025-03-28T09:31:56.527857Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30531, node 1 2025-03-28T09:31:56.575857Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:56.576069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:56.577761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:56.597810Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:56.597831Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:56.597839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:56.597948Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10858 TClient is connected to server localhost:10858 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:57.131111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.164253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.306119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.457434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.538155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:59.128108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790793384231357:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.128207Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.459557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.487105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.513966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.538091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.562417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.590543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.666200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790793384231872:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.666280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.666296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790793384231877:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.669258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:59.677679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790793384231879:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:59.756873Z node 1 :TX_PROXY ERROR: Actor# [1:7486790793384231935:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexUpsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24859, MsgBus: 10669 2025-03-28T09:31:55.972369Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790775333972372:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:55.974315Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011a5/r3tmp/tmpSTBp8M/pdisk_1.dat 2025-03-28T09:31:56.302204Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24859, node 1 2025-03-28T09:31:56.364203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:56.364279Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:56.367172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:56.416656Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:56.416677Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:56.416687Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:56.416813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10669 TClient is connected to server localhost:10669 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:56.905481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.919127Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:56.926035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.047770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.193978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.253389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:58.897764Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790788218875974:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.897859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.219483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.247465Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.279579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.304350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.333131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.364205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.443093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790792513843786:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.443152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.443252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790792513843791:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.446531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:59.454993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790792513843793:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:59.552620Z node 1 :TX_PROXY ERROR: Actor# [1:7486790792513843848:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:00.973848Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790775333972372:2130];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.973920Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> Balancing::Balancing_OneTopic_TopicApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28629, MsgBus: 14535 2025-03-28T09:31:57.187363Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790785950059180:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:57.187612Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00118f/r3tmp/tmpWVSm7M/pdisk_1.dat 2025-03-28T09:31:57.564976Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:57.572554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:57.572652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:57.575607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28629, node 1 2025-03-28T09:31:57.627936Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:57.627955Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:57.627962Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:57.628086Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14535 TClient is connected to server localhost:14535 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:58.103192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:58.124119Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:58.129033Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:58.253169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.397888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:58.447028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:59.928635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790794539995547:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.928749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:00.200182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.227406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.255103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.280729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.345294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.395255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.474072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790798834963363:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:00.474154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:00.474213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790798834963368:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:00.477435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:00.488214Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790798834963370:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:32:00.591200Z node 1 :TX_PROXY ERROR: Actor# [1:7486790798834963426:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteColumnsPOST [GOOD] Test command err: 2025-03-28T09:31:22.246621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:22.246908Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:22.246983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 61329, node 1 TClient is connected to server localhost:15401 2025-03-28T09:31:30.581509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:30.581792Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:30.581970Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 11227, node 2 TClient is connected to server localhost:25327 2025-03-28T09:31:38.829303Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:38.829736Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:38.829813Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 12592, node 3 TClient is connected to server localhost:20137 2025-03-28T09:31:48.180843Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:48.181291Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:48.181456Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 12974, node 4 TClient is connected to server localhost:2635 2025-03-28T09:31:58.757169Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:58.757536Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:31:58.757694Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 2278, node 5 TClient is connected to server localhost:24379 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 10124, MsgBus: 11456 2025-03-28T09:31:50.984414Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790755362419698:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:50.984477Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011eb/r3tmp/tmpz1FFAg/pdisk_1.dat 2025-03-28T09:31:51.372178Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10124, node 1 2025-03-28T09:31:51.403623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:51.403717Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:51.405281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:51.447171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:51.447191Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:51.447198Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:51.447328Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11456 TClient is connected to server localhost:11456 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:51.979218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.023602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.153600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.323120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.398633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:54.069448Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790772542290648:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.069567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.270080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.295439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.323375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.349987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.378674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.410962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:54.453289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790772542291157:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.453344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.453500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790772542291162:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.456705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:54.465792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790772542291164:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:54.570842Z node 1 :TX_PROXY ERROR: Actor# [1:7486790772542291219:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:55.985331Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790755362419698:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:55.985485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 19355, MsgBus: 7159 2025-03-28T09:31:56.778606Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790779882882418:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:56.779186Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011eb/r3tmp/tmpd7ERPk/pdisk_1.dat 2025-03-28T09:31:56.892130Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:56.907337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:56.907427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:56.909193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19355, node 2 2025-03-28T09:31:56.951536Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:56.951559Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:56.951575Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:56.951725Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7159 TClient is connected to server localhost:7159 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:57.364279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.370943Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:31:57.376758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.454875Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:57.615594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.692742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:59.857165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790792767786072:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.857316Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.908739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.938001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.967303Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.992748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.018342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.043221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.076901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790797062753874:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:00.076980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:00.077011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790797062753879:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:00.079961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:00.087826Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790797062753881:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:00.158491Z node 2 :TX_PROXY ERROR: Actor# [2:7486790797062753935:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:01.779081Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790779882882418:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:01.779164Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexInsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonAutocompleteSchemePOST [GOOD] Test command err: 2025-03-28T09:31:24.576269Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:24.576651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:24.576746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 13120, node 1 TClient is connected to server localhost:22054 2025-03-28T09:31:32.330501Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:32.330850Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:32.331036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 18016, node 2 TClient is connected to server localhost:19145 2025-03-28T09:31:40.274653Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:40.275096Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:40.275167Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 24616, node 3 TClient is connected to server localhost:21218 2025-03-28T09:31:49.622890Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:49.623278Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:49.623417Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 8022, node 4 TClient is connected to server localhost:21347 2025-03-28T09:31:59.189702Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:59.190016Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:31:59.190151Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 13952, node 5 TClient is connected to server localhost:6254 >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributesExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-03-28T09:31:30.826673Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:30.827019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:30.827081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001bb2/r3tmp/tmp37aK1f/pdisk_1.dat TServer::EnableGrpc on GrpcPort 18480, node 1 TClient is connected to server localhost:15687 2025-03-28T09:31:31.339568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:31:31.387143Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:31.391227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:31.391287Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:31.391318Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:31.391551Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:31.427396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:31.427532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:31.439071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:31.555015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-03-28T09:31:31.662199Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:31:31.663890Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:31:31.664250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:31:31.688107Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:31:31.688442Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-03-28T09:31:31.696405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:31.696667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:31.696943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:31.697064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:31:31.697169Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:31:31.697296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:31:31.697437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:31:31.697555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:31:31.697672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:31:31.697807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:31:31.697928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:31:31.698052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:31:31.720611Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:31:31.722403Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-03-28T09:31:31.722669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:31:31.722746Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:31:31.722940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:31:31.723075Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:31:31.723147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:31:31.723187Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:31:31.723308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:31:31.723375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:31:31.723421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:31:31.723451Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:31:31.723626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:31:31.723685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:31:31.723727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:31:31.723760Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:31:31.723855Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:31:31.723907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:31:31.723945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:31:31.723971Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:31:31.724088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:31:31.724133Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:31:31.724179Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:31:31.724244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:31:31.724283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:31:31.724314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:31:31.724770Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=57; 2025-03-28T09:31:31.724858Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-03-28T09:31:31.724966Z node 1 :TX_COLUMNSHARD INFO: tablet_id ... LUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1315:3116], Recipient [1:746:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-03-28T09:32:00.103922Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-03-28T09:32:00.104239Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[31] (CS::GENERAL) apply at tablet 72075186224037888 2025-03-28T09:32:00.108849Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 2 2025-03-28T09:32:00.109057Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3720408;raw_bytes=123937532;count=3;records=105525} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=17463040;raw_bytes=589051848;count=6;records=494475} inactive {blob_bytes=23493144;raw_bytes=787383134;count=15;records=667575} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:73/0:size=4045;count=18;;1:size=53108;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445360;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445920;count=1;;11:size=4137072;count=5;;12:size=1445744;count=1;;13:size=1445928;count=1;;14:size=1445608;count=1;;15:size=1445528;count=1;;16:size=1445360;count=1;;17:size=1168928;count=1;;18:size=1402768;count=1;;19:size=1403096;count=1;;20:size=1402840;count=1;;21:size=1445376;count=1;;22:size=808584;count=1;;23:size=1496088;count=2;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:73/0:size=4114;count=19;;1:size=53108;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445360;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445920;count=1;;11:size=4137072;count=5;;12:size=1445744;count=1;;13:size=1445928;count=1;;14:size=1445608;count=1;;15:size=1445528;count=1;;16:size=1445360;count=1;;17:size=1168928;count=1;;18:size=1402768;count=1;;19:size=1403096;count=1;;20:size=1402840;count=1;;21:size=1445376;count=1;;22:size=808584;count=1;;23:size=1496088;count=2;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-03-28T09:32:00.121384Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-03-28T09:32:00.121478Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;fline=with_appended.cpp:65;portions=25,26,;task_id=7f260492-bb711f0-ae9b45c7-b907183d; 2025-03-28T09:32:00.121792Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:25;path_id:3;records_count:52716;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:1857384;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-03-28T09:32:00.121999Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/547285.000000s;; 2025-03-28T09:32:00.122142Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:26;path_id:3;records_count:52714;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:1857112;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-03-28T09:32:00.122277Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/599999.000000s;; 2025-03-28T09:32:00.122357Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::7f260492-bb711f0-ae9b45c7-b907183d; 2025-03-28T09:32:00.122449Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:21186720;portions_count:26;); 2025-03-28T09:32:00.122519Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:32:00.122598Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:32:00.122727Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-28T09:32:00.122810Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-03-28T09:32:00.122869Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:32:00.122932Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:32:00.122986Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:32:00.123113Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.600000s; 2025-03-28T09:32:00.123182Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:32:00.123410Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 2 VERIFY failed (2025-03-28T09:32:00.123613Z): tablet_id=72075186224037888;task_id=7f260492-bb711f0-ae9b45c7-b907183d;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18B7DF99) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18B6C22B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x19E83CD6) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x48517211) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x30555E9D) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1E8660F3) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1E749AD0) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E58FEC4) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E52CC45) NActors::IActor::Receive(TAutoPtr&)+237 (0x19DB54AD) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x3585FE75) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x358586EA) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+1076 (0x35862A64) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+292 (0x35A2FBE4) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+419 (0x35A2ED03) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+307 (0x35A26F63) NActors::TTestActorRuntime::SimulateSleep(TDuration)+1115 (0x35A26B3B) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4702 (0x1875C99E) std::__y1::__function::__func, void ()>::operator()()+280 (0x1876E558) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1902ACB6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x18FFA7E9) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x1876D504) NUnitTest::TTestFactory::Execute()+2438 (0x18FFC0B6) NUnitTest::RunMain(int, char**)+5213 (0x1902522D) ??+0 (0x7F652F0ABD90) __libc_start_main+128 (0x7F652F0ABE40) _start+41 (0x160E8029) >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenDropTable >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 28901, MsgBus: 16643 2025-03-28T09:31:54.331905Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790770844890996:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:54.332143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011c4/r3tmp/tmp9CFraN/pdisk_1.dat 2025-03-28T09:31:54.683010Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28901, node 1 2025-03-28T09:31:54.747778Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:54.747869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:54.749558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:54.758237Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:54.758261Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:54.758276Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:54.758421Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16643 TClient is connected to server localhost:16643 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:55.246149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:55.262796Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:55.276988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:55.424815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:55.577031Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:55.643064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.322275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790783729794674:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.322370Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.637278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.703282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.729486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.753918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.777601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.807012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.848104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790783729795186:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.848191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.848349Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790783729795191:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.851818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:57.860958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790783729795193:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:57.964349Z node 1 :TX_PROXY ERROR: Actor# [1:7486790783729795248:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 7288, MsgBus: 26002 2025-03-28T09:31:59.746423Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790795335970547:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:59.746564Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011c4/r3tmp/tmpRYhSKv/pdisk_1.dat 2025-03-28T09:31:59.838247Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7288, node 2 2025-03-28T09:31:59.876769Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:59.876849Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:59.878559Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:59.893658Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:59.893682Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:59.893688Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:59.893795Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26002 TClient is connected to server localhost:26002 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:00.269514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:00.274142Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:32:00.286529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:00.334256Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:00.480571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:00.554134Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.656237Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790808220874210:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:02.656329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:02.703755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.736706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.771699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.802126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.877224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.947608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.024835Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790812515842030:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.024920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.025088Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790812515842035:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.028068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:03.035946Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790812515842037:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:03.109599Z node 2 :TX_PROXY ERROR: Actor# [2:7486790812515842092:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardTopicSplitMergeTest::Boot >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TSchemeShardTopicSplitMergeTest::MargePartitions >> KqpQueryPerf::KvRead-QueryService [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31381, MsgBus: 10081 2025-03-28T09:31:54.926425Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790771941913212:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:54.926555Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011c2/r3tmp/tmpbEXkHL/pdisk_1.dat 2025-03-28T09:31:55.298946Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31381, node 1 2025-03-28T09:31:55.332710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:55.332814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:55.334143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:55.375902Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:55.375926Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:55.375934Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:55.376023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10081 TClient is connected to server localhost:10081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:55.870179Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:55.894101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.027186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.170230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.238082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.828114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790784826816886:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:57.828227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.113398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.145198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.172795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.204663Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.235271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.305878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.347896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790789121784700:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.347968Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.348065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790789121784705:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.351322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:58.365712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790789121784707:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:58.456508Z node 1 :TX_PROXY ERROR: Actor# [1:7486790789121784760:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30229, MsgBus: 23324 2025-03-28T09:32:00.506736Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790799185048440:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.506790Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011c2/r3tmp/tmpRZaZ7p/pdisk_1.dat 2025-03-28T09:32:00.597522Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30229, node 2 2025-03-28T09:32:00.629437Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:00.629540Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:00.631583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:00.661161Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:00.661185Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:00.661197Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:00.661291Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23324 TClient is connected to server localhost:23324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:01.120229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:01.139753Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:01.207545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:01.393164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:01.454622Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:03.542287Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790812069952085:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.542365Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.591145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.623165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.648892Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.675834Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.705259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.778421Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.833007Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790812069952602:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.833092Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.833112Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790812069952607:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.837176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:03.850008Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790812069952609:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:03.927633Z node 2 :TX_PROXY ERROR: Actor# [2:7486790812069952662:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 12685, MsgBus: 26922 2025-03-28T09:31:55.973824Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790775766870899:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:55.973922Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011ae/r3tmp/tmp9Ek9Ct/pdisk_1.dat 2025-03-28T09:31:56.308329Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12685, node 1 2025-03-28T09:31:56.384175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:56.384304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:56.385986Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:56.397741Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:56.397759Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:56.397764Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:56.397862Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26922 TClient is connected to server localhost:26922 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:56.918725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.942534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.062433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.216904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.283631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:58.851176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790788651774574:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.851292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.161763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.191713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.219353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.246198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.274270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.302887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.345089Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790792946742379:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.345165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.345360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790792946742384:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.349332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:59.359451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790792946742386:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:59.415583Z node 1 :TX_PROXY ERROR: Actor# [1:7486790792946742440:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 5956, MsgBus: 17235 2025-03-28T09:32:00.995693Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790798310202051:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.995877Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011ae/r3tmp/tmp7qdLoj/pdisk_1.dat 2025-03-28T09:32:01.159519Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:01.173229Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:01.173310Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:01.176742Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5956, node 2 2025-03-28T09:32:01.231478Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:01.231506Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:01.231518Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:01.231643Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17235 TClient is connected to server localhost:17235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:01.656575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:01.670968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:01.720107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:01.836534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:01.908784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:03.941794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790811195105730:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.941896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.985733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.015526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.053314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.084712Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.151844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.227953Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.288146Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790815490073548:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.288253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.288478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790815490073553:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.291975Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:04.301808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790815490073555:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:04.382789Z node 2 :TX_PROXY ERROR: Actor# [2:7486790815490073608:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19218, MsgBus: 21186 2025-03-28T09:31:51.948746Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790760314918736:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:51.948988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011d7/r3tmp/tmpQsl4jf/pdisk_1.dat 2025-03-28T09:31:52.394745Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:52.399611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:52.399735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 19218, node 1 2025-03-28T09:31:52.401647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:52.456971Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:52.456992Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:52.457000Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:52.457115Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21186 TClient is connected to server localhost:21186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:52.922178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:52.940981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:53.088147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:53.249447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:53.315886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:54.931239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790773199822419:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.931373Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:55.238803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.273557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.313091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.342422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.373924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.413282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.499756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790777494790230:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:55.499846Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:55.500119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790777494790235:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:55.504724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:55.514191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790777494790237:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:55.602045Z node 1 :TX_PROXY ERROR: Actor# [1:7486790777494790292:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:56.517670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.553909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.592299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.948151Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790760314918736:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:56.948230Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6969, MsgBus: 15492 2025-03-28T09:31:59.288497Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790793776358782:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:59.288580Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011d7/r3tmp/tmpQc892a/pdisk_1.dat 2025-03-28T09:31:59.377701Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6969, node 2 2025-03-28T09:31:59.423552Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:59.423643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:59.425206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:59.434818Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:59.434846Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:59.434853Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:59.434964Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15492 TClient is connected to server localhost:15492 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:59.810362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:59.818167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:59.887773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:59.996771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:00.044026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.279003Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790806661262434:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:02.279106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:02.304470Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.346129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.386489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.467992Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.511371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.564382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.634976Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790806661262951:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:02.635057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:02.635309Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790806661262956:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:02.638345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:02.648368Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790806661262958:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:02.707742Z node 2 :TX_PROXY ERROR: Actor# [2:7486790806661263011:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:03.532251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.570231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.615561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.288907Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790793776358782:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:04.288982Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16392, MsgBus: 9883 2025-03-28T09:31:52.136192Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790762946060108:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:52.136238Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011d9/r3tmp/tmpSyM6Qq/pdisk_1.dat 2025-03-28T09:31:52.530902Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16392, node 1 2025-03-28T09:31:52.558955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:52.559203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:52.567719Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:52.592602Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:52.592635Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:52.592649Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:52.592780Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9883 TClient is connected to server localhost:9883 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:53.085107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:53.110005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:53.226188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:53.375073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:53.430072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:54.933967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790771535996468:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:54.934093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:55.301946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.337279Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.368024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.410733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.479755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.549420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:55.595038Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790775830964286:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:55.595114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:55.595309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790775830964291:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:55.598519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:55.612218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790775830964293:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:55.676930Z node 1 :TX_PROXY ERROR: Actor# [1:7486790775830964346:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:56.669016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.706521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:56.742091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:57.136736Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790762946060108:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:57.136849Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25270, MsgBus: 12021 2025-03-28T09:31:59.715303Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790792172309861:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:59.715354Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011d9/r3tmp/tmpKbfHwf/pdisk_1.dat 2025-03-28T09:31:59.796371Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25270, node 2 2025-03-28T09:31:59.843722Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:59.843794Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:59.844979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:59.855103Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:59.855126Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:59.855133Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:59.855243Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12021 TClient is connected to server localhost:12021 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:00.271037Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:00.287624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:00.335236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.470415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:00.527638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.834132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790805057213519:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:02.834239Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:02.870247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.905165Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.934283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.964282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:02.994177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.026461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:03.068764Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790809352181324:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.068822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790809352181329:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.069090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.072593Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:03.082186Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790809352181331:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:03.160096Z node 2 :TX_PROXY ERROR: Actor# [2:7486790809352181385:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:04.035098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.115091Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.153886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.715796Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790792172309861:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:04.715869Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:06.713744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:06.713870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.713909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:06.713941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:06.714554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:06.714594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:06.714774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.714855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:06.715840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:06.802186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:06.802252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:06.819512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:06.819819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:06.820002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:06.832879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:06.833061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:06.835409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.835594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.841034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.847217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:06.847304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.847358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:06.847456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.853848Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:06.989434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:06.989648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.989910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:06.990154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:06.990213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.992393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.992526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:06.992686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.992750Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:06.992781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:06.992823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:06.994714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.994764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:06.994801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:06.996484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.996531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.996566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.996610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.000146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:07.001913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:07.002093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:07.003241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:07.003374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.003425Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.003666Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:07.003727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.003899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:07.003997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:07.006029Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:07.006075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:07.006229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:07.006301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:07.006786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.006842Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:07.006933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:07.006967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.007015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:07.007046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.007080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:07.007116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.007147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:07.007178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:07.007237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:07.007273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:07.007332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:07.009194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:07.009318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:07.009360Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 57594046678944 message# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-03-28T09:32:07.222618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-03-28T09:32:07.222652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-28T09:32:07.224520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:32:07.224737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:32:07.224776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:32:07.225120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:32:07.225171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-28T09:32:07.225226Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:32:07.259001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:07.259128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.259195Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-03-28T09:32:07.259263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-28T09:32:07.285977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-03-28T09:32:07.286131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-03-28T09:32:07.286201Z node 1 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-03-28T09:32:07.286245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.286280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-28T09:32:07.286416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-28T09:32:07.286587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:32:07.286705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:07.288852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.289287Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:07.289330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:32:07.289461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:32:07.289627Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:07.289692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-28T09:32:07.289737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-28T09:32:07.290117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.290162Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-28T09:32:07.290242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:32:07.290270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:32:07.290302Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:32:07.290329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:32:07.290365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-28T09:32:07.290399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:32:07.290433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:32:07.290459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:32:07.290617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:32:07.290678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-03-28T09:32:07.290729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-28T09:32:07.290758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-28T09:32:07.291723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:32:07.291814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:32:07.291848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:32:07.291890Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:32:07.291929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:32:07.293208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:32:07.293270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:32:07.293293Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:32:07.293318Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:32:07.293355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:07.293418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-03-28T09:32:07.293458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:403:2369] 2025-03-28T09:32:07.296114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:32:07.299078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:32:07.299160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:32:07.299196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:542:2475] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2025-03-28T09:32:07.306362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:07.306547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.306744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2025-03-28T09:32:07.309394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.309538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-28T09:32:07.309768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:32:07.309803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:32:07.310141Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:32:07.310230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:32:07.310277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:638:2560] TestWaitNotification: OK eventTxId 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:06.714452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:06.714561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.714609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:06.714649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:06.714725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:06.714759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:06.714825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.714905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:06.715860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:06.803532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:06.803597Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:06.819387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:06.819688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:06.819852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:06.832960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:06.833157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:06.835385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.835584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.841047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846547Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.847209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:06.847285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.847340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:06.847416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.853402Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:06.974316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:06.974525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.974764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:06.975020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:06.975070Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.977251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.977382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:06.977542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.977596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:06.977633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:06.977670Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:06.979359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.979411Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:06.979446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:06.980972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.981014Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.981048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.981097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.984642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:06.986335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:06.986497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:06.987529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.987667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:06.987712Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.987965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:06.988021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.988202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:06.988279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.990161Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.990206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.990352Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.990410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:06.990853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.990918Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:06.991014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:06.991049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.991084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:06.991113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.991146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:06.991181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.991213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:06.991245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:06.991299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:06.991333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:06.991379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:06.993230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:06.993323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:06.993359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... chemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.309494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary is empty, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-28T09:32:07.309749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:32:07.309786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:32:07.310136Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:32:07.310239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:32:07.310291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:638:2560] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } TestModificationResults wait txId: 106 2025-03-28T09:32:07.312617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\001" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:07.312752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.313791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-03-28T09:32:07.315523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'01\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.315656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '01' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:32:07.315850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-28T09:32:07.315885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-28T09:32:07.316234Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:32:07.316302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:32:07.316324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:645:2567] TestWaitNotification: OK eventTxId 106 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } TestModificationResults wait txId: 107 2025-03-28T09:32:07.318547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "UUUUUUUUUUUUUUUT" } } } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:07.318719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 107:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.318865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 107:1, propose status:StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', at schemeshard: 72057594046678944 2025-03-28T09:32:07.320510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 107, response: Status: StatusInvalidParameter Reason: "Split boundary less or equals FromBound of partition: \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\' <= \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\'" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.320621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 107, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary less or equals FromBound of partition: '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54' <= '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54', operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2025-03-28T09:32:07.320828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2025-03-28T09:32:07.320870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2025-03-28T09:32:07.321202Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2025-03-28T09:32:07.321257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2025-03-28T09:32:07.321279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:652:2574] TestWaitNotification: OK eventTxId 107 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } TestModificationResults wait txId: 108 2025-03-28T09:32:07.323439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\255" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:07.323579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 108:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.323713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-03-28T09:32:07.325305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AD\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.325415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 108, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AD' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2025-03-28T09:32:07.325613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2025-03-28T09:32:07.325641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2025-03-28T09:32:07.325964Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2025-03-28T09:32:07.326020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2025-03-28T09:32:07.326044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:659:2581] TestWaitNotification: OK eventTxId 108 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } TestModificationResults wait txId: 109 2025-03-28T09:32:07.328021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:07.328180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.328330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), at schemeshard: 72057594046678944 2025-03-28T09:32:07.329854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "Split boundary greate or equals ToBound of partition: \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' >= \'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\' (FromBound is \'55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54\')" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.329968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split boundary greate or equals ToBound of partition: 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' >= 'AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9' (FromBound is '55 55 55 55 55 55 55 55 55 55 55 55 55 55 55 54'), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2025-03-28T09:32:07.330164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2025-03-28T09:32:07.330190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2025-03-28T09:32:07.330485Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2025-03-28T09:32:07.330564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2025-03-28T09:32:07.330593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:666:2588] TestWaitNotification: OK eventTxId 109 >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:06.713738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:06.713859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.713905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:06.713944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:06.714576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:06.714628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:06.714725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.714809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:06.715905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:06.806230Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:06.806296Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:06.821301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:06.821549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:06.821753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:06.833208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:06.833412Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:06.835445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.835638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.840944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.847205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:06.847266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.847321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:06.847413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.852953Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:06.953372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:06.954491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.955612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:06.956927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:06.957000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.960043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.960170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:06.960313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.960439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:06.960470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:06.960503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:06.962370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.962414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:06.962441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:06.963981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.964021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.964072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.964114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.972822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:06.974895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:06.975039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:06.975830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.975959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:06.976039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.976359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:06.976429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.976592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:06.976650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.978333Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.978381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.978544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.978606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:06.979030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.979074Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:06.979167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:06.979195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.979228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:06.979254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.979284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:06.979312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.979337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:06.979373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:06.979428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:06.979463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:06.979506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:06.980923Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:06.981010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:06.981045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... y Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.428337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:756:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:759:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:760:2058] recipient: [1:758:2665] Leader for TabletID 72057594046678944 is [1:761:2666] sender: [1:762:2058] recipient: [1:758:2665] 2025-03-28T09:32:07.471018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:07.471109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:07.471140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:07.471173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:07.471206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:07.471247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:07.471303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:07.471366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:07.471623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:07.485349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:07.486508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:07.486631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:07.486802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:07.486850Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:07.486945Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:07.487539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:32:07.487607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:32:07.487639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:32:07.487705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.487756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.487942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:32:07.488217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.488297Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:32:07.488444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.488521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.488605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-03-28T09:32:07.488646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:32:07.488680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:32:07.488701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-28T09:32:07.488721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:32:07.488784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.488862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.489048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-03-28T09:32:07.489194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:32:07.489462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.489555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.489962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.490042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.490359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.490432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.490528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.490730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.490813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.491048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.491236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.491356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.491407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.491444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.496533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:07.496588Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:07.497167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:07.497219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:07.497256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:07.498103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:06.718116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:06.718232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.718300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:06.718360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:06.718408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:06.718457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:06.718538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.718624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:06.718998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:06.808637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:06.808705Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:06.824907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:06.825221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:06.825404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:06.832957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:06.833167Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:06.835487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.835717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.841095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846891Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.847241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:06.847315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.847375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:06.847474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.854502Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:07.001273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:07.001537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.001788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:07.002051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:07.002117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.004605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:07.004757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:07.004949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.005012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:07.005052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:07.005097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:07.007116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.007179Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:07.007219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:07.008972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.009025Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.009065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.009128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.013246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:07.015043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:07.015211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:07.016374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:07.016506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.016556Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.016823Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:07.016888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.017066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:07.017149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:07.019146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:07.019210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:07.019398Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:07.019449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:07.019878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.019930Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:07.020055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:07.020106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.020153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:07.020190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.020228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:07.020280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.020340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:07.020376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:07.020440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:07.020497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:07.020537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:07.022578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:07.022703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:07.022746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 678944 is [2:122:2148] sender: [2:639:2058] recipient: [2:15:2062] Leader for TabletID 72057594046678944 is [2:122:2148] sender: [2:640:2058] recipient: [2:638:2560] Leader for TabletID 72057594046678944 is [2:641:2561] sender: [2:642:2058] recipient: [2:638:2560] 2025-03-28T09:32:07.853523Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:07.853648Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:07.853686Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:07.853720Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:07.853755Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:07.853784Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:07.853836Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:07.853906Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:07.854186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:07.870806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:07.872317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:07.872480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:07.872597Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:07.872635Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:07.872834Z node 2 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:07.873465Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:32:07.873550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:32:07.873600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:32:07.873662Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.873723Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.873917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:32:07.874161Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.874243Z node 2 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:32:07.874412Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.874490Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.874589Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-03-28T09:32:07.874625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:32:07.874653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:32:07.874676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-28T09:32:07.874733Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:32:07.874812Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.874883Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.875076Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-03-28T09:32:07.875273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:32:07.875578Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.875691Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.876104Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.876172Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.876373Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.876452Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.876538Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.876688Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.876766Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.876936Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.877144Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.877297Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.877352Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.877394Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.884318Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:07.884387Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:07.884651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:07.884722Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:07.884762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:07.884962Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:641:2561] sender: [2:699:2058] recipient: [2:15:2062] 2025-03-28T09:32:07.938329Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:32:07.938601Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 313us result status StatusSuccess 2025-03-28T09:32:07.939210Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 1 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 1024 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:06.713746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:06.713858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.713896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:06.713941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:06.714587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:06.714619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:06.714680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.714793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:06.715862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:06.799136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:06.799194Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:06.819451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:06.819726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:06.819973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:06.832736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:06.832897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:06.835366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.835601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.841025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846532Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.847204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:06.847269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.847365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:06.847442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.853439Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:06.953388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:06.954497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.955520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:06.956949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:06.957015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.960060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.960184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:06.960359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.960437Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:06.960470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:06.960510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:06.962475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.962526Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:06.962560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:06.964215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.964262Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.964302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.964352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.979409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:06.981171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:06.981333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:06.982334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.982442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:06.982484Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.982743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:06.982792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.982984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:06.983059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.984910Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.984977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.985149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.985190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:06.985566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.985610Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:06.985698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:06.985733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.985772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:06.985800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.985833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:06.985870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.985901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:06.985933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:06.986010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:06.986069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:06.986104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:06.987974Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:06.988116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:06.988158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... essage# TabletId: 72075186233409548 TxId: 104 Status: OK 2025-03-28T09:32:08.259153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-03-28T09:32:08.259179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-28T09:32:08.260117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:32:08.260255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:32:08.260293Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:32:08.260536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:32:08.260562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-28T09:32:08.260589Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:32:08.293943Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 150, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:08.294049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 AckTo { RawX1: 0 RawX2: 0 } } Step: 150 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:08.294090Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvOperationPlan, step: 150, at tablet: 72057594046678944 2025-03-28T09:32:08.294151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-28T09:32:08.311955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-03-28T09:32:08.312102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-03-28T09:32:08.312166Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 150 2025-03-28T09:32:08.312205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.312237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-28T09:32:08.312357Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-28T09:32:08.312473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:32:08.312518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:08.314331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.314657Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:08.314726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:32:08.314848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:32:08.314959Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:08.314989Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-28T09:32:08.315021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 104, path id: 3 2025-03-28T09:32:08.315062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.315090Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-28T09:32:08.315186Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:32:08.315211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:32:08.315237Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:32:08.315262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:32:08.315291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-28T09:32:08.315320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:32:08.315347Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:32:08.315368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:32:08.315454Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:32:08.315485Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2025-03-28T09:32:08.315513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-28T09:32:08.315531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-28T09:32:08.316241Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:32:08.316318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:32:08.316363Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:32:08.316388Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:32:08.316413Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:32:08.316962Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:32:08.317007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:32:08.317022Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:32:08.317039Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:32:08.317055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:08.317094Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2025-03-28T09:32:08.317122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:406:2373] 2025-03-28T09:32:08.320936Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:32:08.321072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:32:08.321129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:32:08.321156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:542:2477] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } TestModificationResults wait txId: 105 2025-03-28T09:32:08.328174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Merge { Partition: 0 AdjacentPartition: 2 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:08.328320Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.328448Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, at schemeshard: 72057594046678944 2025-03-28T09:32:08.329731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "You cannot merge non-contiguous partitions" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:08.329848Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: You cannot merge non-contiguous partitions, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-28T09:32:08.330018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:32:08.330043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:32:08.330343Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:32:08.330405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:32:08.330429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:638:2562] TestWaitNotification: OK eventTxId 105 >> Viewer::Plan2SvgBad [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:06.713717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:06.713839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.713889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:06.713949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:06.714565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:06.714602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:06.714706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.714800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:06.716004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:06.811330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:06.811402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:06.827718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:06.828004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:06.828206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:06.834405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:06.834594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:06.835392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.835636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.841097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.847224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:06.847301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.847361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:06.847453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.854121Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:07.012388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:07.012624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.012847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:07.013096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:07.013154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.015307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:07.015468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:07.015634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.015695Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:07.015734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:07.015779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:07.017607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.017659Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:07.017711Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:07.019387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.019439Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.019481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.019536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.023492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:07.025342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:07.025519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:07.026552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:07.026671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.026748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.027033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:07.027094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.027269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:07.027345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:07.029306Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:07.029374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:07.029562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:07.029615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:07.030011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.030068Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:07.030183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:07.030223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.030262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:07.030297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.030335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:07.030375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.030408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:07.030444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:07.030505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:07.030561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:07.030614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:07.032588Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:07.032709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:07.032753Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2057594046678944 2025-03-28T09:32:08.422446Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose ProgressState, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.422545Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2025-03-28T09:32:08.422712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72075186233409546 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 151 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409546 2025-03-28T09:32:08.424093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:0 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:3 msg type: 269550082 2025-03-28T09:32:08.424171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 0:105 msg type: 269090816 2025-03-28T09:32:08.424245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72075186233409546 2025-03-28T09:32:08.424479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-03-28T09:32:08.424615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409548 TxId: 105 Status: OK 2025-03-28T09:32:08.424688Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409548 TxId: 105 Status: OK 2025-03-28T09:32:08.424735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2025-03-28T09:32:08.424778Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-28T09:32:08.426264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-28T09:32:08.426493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:32:08.426544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:32:08.426986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:32:08.427035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-03-28T09:32:08.427080Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:32:08.460711Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:08.460845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:08.460907Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-03-28T09:32:08.460960Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-28T09:32:08.487015Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-03-28T09:32:08.487192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-03-28T09:32:08.487268Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-03-28T09:32:08.487323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.487362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-28T09:32:08.487562Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-03-28T09:32:08.487742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:08.489975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.490370Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:08.490418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:32:08.490723Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:08.490785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-03-28T09:32:08.491359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.491409Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-28T09:32:08.491515Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:32:08.491552Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:08.491597Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:32:08.491630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:08.491669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-28T09:32:08.491713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:08.491756Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:32:08.491791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-28T09:32:08.491931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:32:08.491993Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-03-28T09:32:08.492050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-28T09:32:08.492714Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:32:08.492825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:32:08.492864Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:32:08.492900Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:32:08.492957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:08.493037Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-03-28T09:32:08.493072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:406:2373] 2025-03-28T09:32:08.496318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:32:08.496412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:32:08.496471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:667:2590] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2025-03-28T09:32:08.505822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:08.506038Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.506223Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2025-03-28T09:32:08.508172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:08.508332Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:32:08.508637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-28T09:32:08.508685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-28T09:32:08.509066Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:32:08.509162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:32:08.509200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:758:2669] TestWaitNotification: OK eventTxId 106 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:07.832128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:07.832210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:07.832240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:07.832267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:07.832333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:07.832361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:07.832417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:07.832486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:07.832753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:07.910270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:07.910339Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:07.924357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:07.924616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:07.924810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:07.930474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:07.930717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:07.931317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:07.931540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:07.933437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:07.934721Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:07.934774Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:07.934932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:07.934995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:07.935040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:07.935139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.941925Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:08.051837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:08.052113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.052372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:08.052639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:08.052700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.055404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:08.055582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:08.055802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.055878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:08.055931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:08.055971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:08.058540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.058610Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:08.058655Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:08.061055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.061114Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.061169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:08.061234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:08.065895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:08.070193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:08.070374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:08.071546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:08.071709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:08.071767Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:08.072142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:08.072227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:08.072403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:08.072486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:08.074679Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:08.074754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:08.074929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:08.074985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:08.075387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.075429Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:08.075525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:08.075562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:08.075600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:08.075627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:08.075698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:08.075736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:08.075767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:08.075798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:08.075860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:08.075893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:08.075937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:08.077899Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:08.078010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:08.078034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:08.563095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:738:2058] recipient: [1:102:2137] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:741:2058] recipient: [1:15:2062] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:742:2058] recipient: [1:740:2653] Leader for TabletID 72057594046678944 is [1:743:2654] sender: [1:744:2058] recipient: [1:740:2653] 2025-03-28T09:32:08.605526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:08.605639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:08.605682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:08.605717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:08.605749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:08.605778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:08.605843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:08.605906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:08.606192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:08.622121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:08.623451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:08.623621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:08.623759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:08.623790Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:08.623992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:08.624698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:32:08.624791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_1, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:32:08.624844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 2], parent name: USER_1, child name: Topic1, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:32:08.624957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.625021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.625221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:32:08.625484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.625585Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:32:08.625767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.625867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.625993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2025-03-28T09:32:08.626035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:32:08.626061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:32:08.626079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-28T09:32:08.626097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:32:08.626195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.626283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.626500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2025-03-28T09:32:08.626675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:32:08.627170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.627291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.627671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.627750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.627981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.628076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.628186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.628375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.628473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.628650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.628867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.629024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.629081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.629144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:08.637153Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:08.637229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:08.637354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:08.637399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:08.637442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:08.637654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:743:2654] sender: [1:800:2058] recipient: [1:15:2062] 2025-03-28T09:32:08.701416Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:32:08.701757Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 320us result status StatusSuccess 2025-03-28T09:32:08.702481Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3534, MsgBus: 28997 2025-03-28T09:31:55.369845Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790775296858613:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:55.371155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011b0/r3tmp/tmpPCAw6D/pdisk_1.dat 2025-03-28T09:31:55.708412Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3534, node 1 2025-03-28T09:31:55.767554Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:55.767701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:55.770595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:55.792210Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:55.792241Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:55.792250Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:55.792385Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28997 TClient is connected to server localhost:28997 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:56.275540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.300418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.456410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.655079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:56.721628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:58.259942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790788181762134:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.260099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.550131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.574259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.597738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.620741Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.645000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.673074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:58.748110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790788181762648:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.748176Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.748190Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790788181762653:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:58.751490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:58.760919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790788181762655:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:58.818930Z node 1 :TX_PROXY ERROR: Actor# [1:7486790788181762709:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:59.855103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.924561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.959705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.367492Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790775296858613:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.367586Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7733, MsgBus: 26435 2025-03-28T09:32:02.825425Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790807570413719:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:02.825506Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0011b0/r3tmp/tmp326E9b/pdisk_1.dat 2025-03-28T09:32:02.919507Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7733, node 2 2025-03-28T09:32:02.953826Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:02.953910Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:02.956993Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:02.998630Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:02.998666Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:02.998676Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:02.998831Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26435 TClient is connected to server localhost:26435 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:03.435943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:03.442917Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:32:03.452454Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:03.500419Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:03.634451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:03.711814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:05.611694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790820455317367:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.611771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.641739Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:05.669808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:05.697694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:05.726214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:05.750653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:05.777934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:05.814801Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790820455317874:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.814892Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.814893Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790820455317879:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.817821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:05.825877Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790820455317881:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:05.879431Z node 2 :TX_PROXY ERROR: Actor# [2:7486790820455317934:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:06.623539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:32:06.667356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:32:06.705502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:07.825789Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790807570413719:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:07.825848Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:06.713728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:06.713843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.713882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:06.713932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:06.714577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:06.714622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:06.714701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.714776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:06.715842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:06.797778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:06.797855Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:06.819443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:06.819718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:06.819915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:06.832375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:06.832642Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:06.835368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.835605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.840987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.847163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:06.847201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.847254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:06.847378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.852620Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:06.997337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:06.997498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.997655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:06.997817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:06.997851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.999583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.999685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:06.999803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.999844Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:06.999885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:06.999944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:07.001560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.001603Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:07.001629Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:07.002895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.002926Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.002955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.003002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.005583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:07.006977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:07.007116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:07.007870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:07.007960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:07.007992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.008202Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:07.008244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:07.008360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:07.008410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:07.009827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:07.009872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:07.009977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:07.010013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:07.010265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:07.010293Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:07.010353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:07.010379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.010413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:07.010443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.010471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:07.010498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:07.010521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:07.010545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:07.010599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:07.010656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:07.010719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:07.012219Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:07.012309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:07.012363Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... arts: 1/1 2025-03-28T09:32:08.938749Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:32:08.938788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:08.938832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-28T09:32:08.938876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:08.938920Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:32:08.938957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-28T09:32:08.939096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:32:08.939138Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-03-28T09:32:08.939175Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-28T09:32:08.940289Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:32:08.940371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:32:08.940403Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:32:08.940448Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:32:08.940489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:08.940570Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-28T09:32:08.944784Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-28T09:32:08.952201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:32:08.952250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:32:08.952685Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:32:08.952767Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:32:08.952808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:753:2666] TestWaitNotification: OK eventTxId 105 2025-03-28T09:32:09.520926Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:09.521183Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 280us result status StatusSuccess 2025-03-28T09:32:09.521673Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:09.594051Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:32:09.594345Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 327us result status StatusSuccess 2025-03-28T09:32:09.594962Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2025-03-28T09:32:09.598122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:09.598347Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2025-03-28T09:32:09.598474Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2025-03-28T09:32:09.600791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:09.600952Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2025-03-28T09:32:09.601244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2025-03-28T09:32:09.601286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2025-03-28T09:32:09.601844Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2025-03-28T09:32:09.601933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2025-03-28T09:32:09.601967Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:768:2680] TestWaitNotification: OK eventTxId 106 >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18808, MsgBus: 64239 2025-03-28T09:31:56.318903Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790781318119576:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:56.318950Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001194/r3tmp/tmp7aMi4G/pdisk_1.dat 2025-03-28T09:31:56.681381Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:56.695002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:56.695113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:56.697348Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18808, node 1 2025-03-28T09:31:56.765375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:56.765399Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:56.765411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:56.765545Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64239 TClient is connected to server localhost:64239 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:57.234063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.262209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.397673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:57.558086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:31:57.615860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.090124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790794203023109:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.090236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.410522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.440510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.463680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.487632Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.510636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.579103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.617032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790794203023626:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.617098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.617195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790794203023631:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.620639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:31:59.628920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790794203023633:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:31:59.732348Z node 1 :TX_PROXY ERROR: Actor# [1:7486790794203023687:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:00.735851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.772457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:32:00.806086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:01.319314Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790781318119576:2193];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:01.319363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31752, MsgBus: 11313 2025-03-28T09:32:03.727896Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790809479020469:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:03.727960Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001194/r3tmp/tmpz8whZU/pdisk_1.dat 2025-03-28T09:32:03.825422Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31752, node 2 2025-03-28T09:32:03.871036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:03.871166Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:03.874913Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:03.901486Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:03.901504Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:03.901509Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:03.901588Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11313 TClient is connected to server localhost:11313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:04.329750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:04.345936Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:04.402491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:04.547137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:04.605618Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:06.552918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790822363924138:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:06.553024Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:06.621560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:06.654627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:06.686010Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:06.717105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:06.748766Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:06.781636Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:06.819578Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790822363924653:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:06.819709Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790822363924658:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:06.819773Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:06.822643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:06.831832Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790822363924660:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:06.906847Z node 2 :TX_PROXY ERROR: Actor# [2:7486790822363924713:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:07.631002Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:32:07.703568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:32:07.744406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:08.728396Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790809479020469:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:08.728460Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TopicAutoscaling::ControlPlane_CreateAlterDescribe [GOOD] >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> Viewer::TabletMerging [GOOD] >> Viewer::StorageGroupOutputWithoutFilterNoDepends >> ObjectStorageListingTest::FilterListing >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> ObjectStorageListingTest::ListingNoFilter >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:09.238822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:09.238931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:09.238972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:09.239021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:09.239070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:09.239097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:09.239153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:09.239240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:09.239583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:09.324952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:09.325024Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:09.339917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:09.340210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:09.340371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:09.345884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:09.346083Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:09.346749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:09.346958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:09.348704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:09.349907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:09.349986Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:09.350220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:09.350288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:09.350338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:09.350422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:09.356373Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:09.458058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:09.458232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:09.458404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:09.458582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:09.458623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:09.460462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:09.460560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:09.460687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:09.460723Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:09.460750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:09.460776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:09.462453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:09.462506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:09.462549Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:09.464194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:09.464241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:09.464283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:09.464338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:09.467696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:09.469194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:09.469320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:09.470153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:09.470259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:09.470295Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:09.470521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:09.470574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:09.470725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:09.470790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:09.472294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:09.472347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:09.472498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:09.472539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:09.472854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:09.472899Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:09.472987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:09.473015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:09.473052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:09.473082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:09.473110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:09.473139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:09.473164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:09.473190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:09.473244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:09.473290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:09.473316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:09.474654Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:09.474763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:09.474794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... iber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:32:10.676262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:32:10.676623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:32:10.676664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2025-03-28T09:32:10.676705Z node 2 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:32:10.710256Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 200, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:10.710397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:10.710456Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 200, at tablet: 72057594046678944 2025-03-28T09:32:10.710504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-28T09:32:10.742078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2025-03-28T09:32:10.742329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-03-28T09:32:10.742410Z node 2 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 200 2025-03-28T09:32:10.742466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:10.742510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-28T09:32:10.742718Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-03-28T09:32:10.742949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:10.748082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:32:10.748762Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:10.748823Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:32:10.749135Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:10.749195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-03-28T09:32:10.749294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:32:10.749347Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-28T09:32:10.749460Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:32:10.749500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:10.749542Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:32:10.749575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:10.749620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-28T09:32:10.749669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:10.749714Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:32:10.749751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-28T09:32:10.749898Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:32:10.749953Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2025-03-28T09:32:10.749992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-28T09:32:10.750867Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:32:10.750953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:32:10.750983Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:32:10.751017Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:32:10.751049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:10.751116Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2025-03-28T09:32:10.751149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:406:2373] 2025-03-28T09:32:10.754546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:32:10.754630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:32:10.754664Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:673:2594] TestWaitNotification: OK eventTxId 105 2025-03-28T09:32:10.760116Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:32:10.760312Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 240us result status StatusSuccess 2025-03-28T09:32:10.760830Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:06.713746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:06.713849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.713890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:06.713960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:06.714581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:06.714634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:06.714731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:06.714818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:06.715912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:06.802014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:06.802083Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:06.819410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:06.819671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:06.819887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:06.832959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:06.833184Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:06.835383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.835637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.840979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.846484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.847175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:06.847231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.847274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:06.847341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.853245Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:06.953389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:06.954502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.955529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:06.956936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:06.957009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.960063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.960186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:06.960376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.960454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:06.960497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:06.960538Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:06.962501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.962560Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:06.962615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:06.964301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.964345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.964382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.964431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.967939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:06.969457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:06.970394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:06.971219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:06.971362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:06.971403Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.972615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:06.972681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:06.972851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:06.972914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:06.974730Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:06.974776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:06.974917Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:06.974954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:06.975246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:06.975281Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:06.975353Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:06.975381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.975423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:06.975447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.975472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:06.975510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:06.975550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:06.975582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:06.975628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:06.975667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:06.975694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:06.976991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:06.977084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:06.977116Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... AT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-28T09:32:10.015492Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 105:0 128 -> 240 2025-03-28T09:32:10.015633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:10.017163Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:32:10.017626Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:10.017657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:32:10.017844Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:10.017877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:207:2209], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-03-28T09:32:10.017983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:32:10.018026Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 105:0 ProgressState 2025-03-28T09:32:10.018141Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:32:10.018183Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:10.018246Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:32:10.018281Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:10.018323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-28T09:32:10.018372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:32:10.018413Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:32:10.018445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-28T09:32:10.018600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:32:10.018643Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2025-03-28T09:32:10.018679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-28T09:32:10.019991Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:32:10.020094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:32:10.020130Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:32:10.020180Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:32:10.020221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:32:10.020314Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-28T09:32:10.023770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-28T09:32:10.030416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:32:10.030461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:32:10.030814Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:32:10.030889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:32:10.030925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:759:2670] TestWaitNotification: OK eventTxId 105 2025-03-28T09:32:10.610732Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:10.611013Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 333us result status StatusSuccess 2025-03-28T09:32:10.611646Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:10.684221Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:32:10.684464Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 282us result status StatusSuccess 2025-03-28T09:32:10.684940Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false BalancerTabletID: 72075186233409549 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 |92.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TFlatTest::LargeProxyReply >> TFlatTest::CrossRW |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> TLocksTest::CK_Range_BrokenLock >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType >> TVPatchTests::FindingPartsWhenSeveralPartsExist >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> TopicAutoscaling::Simple_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_AutoscaleAwareSDK |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/spilling/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TVPatchTests::FindingPartsWithTimeout [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota |92.2%| [TA] $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/spilling/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2025-03-28T09:32:14.676305Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:32:14.677247Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2025-03-28T09:32:14.677318Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-28T09:32:14.677544Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2025-03-28T09:32:14.677613Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2025-03-28T09:32:14.677672Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2025-03-28T09:32:14.946914Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2025-03-28T09:32:14.957629Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:734} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2025-03-28T09:32:14.957719Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2025-03-28T09:32:14.957839Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:727} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> TColumnShardTestSchema::RebootHotTiersTtlWithStat >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> TColumnShardTestSchema::HotTiersWithStat >> ObjectStorageListingTest::FilterListing [GOOD] >> TFlatTest::WriteSplitByPartialKeyAndRead >> ObjectStorageListingTest::ListingNoFilter [GOOD] >> TopicAutoscaling::CommitTopPast_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ControlPlane_BackCompatibility >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 >> TLocksTest::Range_IncorrectDot1 >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> TFlatTest::RejectByPerShardReadSize |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: 2025-03-28T09:32:14.401850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:14.402387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:14.402463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001b03/r3tmp/tmporRaYW/pdisk_1.dat 2025-03-28T09:32:14.897976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:32:14.962064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:15.009682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:15.014745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:15.030406Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:15.138433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:32:15.199101Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:32:15.199362Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:15.258437Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:15.258586Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:32:15.261260Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:32:15.261363Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:32:15.261465Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:32:15.261914Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:32:15.262056Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:32:15.262132Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:32:15.272904Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:32:15.304822Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:32:15.305025Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:32:15.305216Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:32:15.305264Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:32:15.305302Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:32:15.305344Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:15.305805Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:32:15.305903Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:32:15.306333Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:32:15.306410Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:32:15.306475Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:32:15.306523Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:32:15.306654Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:32:15.306837Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:32:15.307111Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:32:15.307198Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:32:15.309026Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:32:15.319813Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:32:15.319936Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:32:15.472371Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:32:15.477332Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:32:15.477420Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:15.478245Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:32:15.478319Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:32:15.478396Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:32:15.478667Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:32:15.478879Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:32:15.479764Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:32:15.479859Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:32:15.482510Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:32:15.484113Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:32:15.485385Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:32:15.485424Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:15.486004Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:32:15.486058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:32:15.486949Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:32:15.487000Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:32:15.487063Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:32:15.487155Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:32:15.487228Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:32:15.487664Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:15.491336Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:32:15.493702Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:32:15.493776Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:32:15.494048Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:32:15.523507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:15.524354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:15.524455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:15.534550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:32:15.541277Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:32:15.710014Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:32:15.719820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:32:15.813205Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:16.523254Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1gks1082dnnj4f0yded3k, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjVlZDM2MDQtNGZlMDYwNWItNmI4MTcxMDUtYTRiNWY0MTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:16.533932Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:32:16.535121Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:32:16.552275Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:32:16.552429Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:16.561514Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:860:2695], serverId# [1:861:2696], sessionId# [0:0:0] 2025-03-28T09:32:16.561803Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:32:16.562064Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2025-03-28T09:32:16.562279Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:860:2695], serverId# [1:861:2696], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: 2025-03-28T09:32:14.401874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:14.402387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:14.402465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001a93/r3tmp/tmpkanKX9/pdisk_1.dat 2025-03-28T09:32:14.897356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:32:14.957951Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:15.009636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:15.014706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:15.029752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:15.138744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:32:15.202909Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:32:15.203271Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:15.255016Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:15.255172Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:32:15.258217Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:32:15.258327Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:32:15.258470Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:32:15.260930Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:32:15.261123Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:32:15.261219Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:32:15.272139Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:32:15.298021Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:32:15.299459Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:32:15.299640Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:32:15.299678Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:32:15.299716Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:32:15.299753Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:15.301143Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:32:15.301254Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:32:15.301721Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:32:15.301795Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:32:15.301905Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:32:15.301958Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:32:15.302101Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:32:15.302260Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:32:15.302662Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:32:15.302790Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:32:15.304891Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:32:15.315780Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:32:15.315904Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:32:15.478603Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:32:15.484527Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:32:15.484632Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:15.485511Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:32:15.485592Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:32:15.485685Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:32:15.485978Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:32:15.486165Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:32:15.487127Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:32:15.487227Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:32:15.496115Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:32:15.496645Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:32:15.498350Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:32:15.498405Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:15.499454Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:32:15.499534Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:32:15.500673Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:32:15.500731Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:32:15.500813Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:32:15.500905Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:32:15.500969Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:32:15.501060Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:15.505217Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:32:15.507942Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:32:15.508041Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:32:15.508328Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:32:15.523365Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:15.524263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:15.524367Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:15.534559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:32:15.541533Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:32:15.702134Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:32:15.706292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:32:15.789286Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:16.523309Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1gks09tat92aeftjb06xw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjkzODQ4NDMtNGJiNGZjMjUtNGRhYTIwYTAtYzA2YzQ4ZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:16.534080Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:32:16.535190Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:32:16.552300Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:32:16.552455Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:16.561864Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:860:2695], serverId# [1:861:2696], sessionId# [0:0:0] 2025-03-28T09:32:16.562202Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:32:16.562495Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2025-03-28T09:32:16.562781Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:860:2695], serverId# [1:861:2696], sessionId# [0:0:0] 2025-03-28T09:32:16.565722Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:866:2701], serverId# [1:867:2702], sessionId# [0:0:0] 2025-03-28T09:32:16.565976Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:32:16.566200Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2025-03-28T09:32:16.566538Z node 1 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [1:866:2701], serverId# [1:867:2702], sessionId# [0:0:0] >> TopicAutoscaling::PartitionSplit_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK >> Viewer::JsonStorageListingV2 [GOOD] >> Viewer::JsonStorageListingV2GroupIdFilter |92.3%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::PartitionSplit_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK |92.3%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::PartitionSplit_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition >> TLocksTest::Range_BrokenLock0 >> TopicAutoscaling::ReadingAfterSplitTest_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> Balancing::Balancing_OneTopic_TopicApi [GOOD] >> Balancing::Balancing_OneTopic_PQv1 >> TExportToS3Tests::ShouldRestartOnScanErrors >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] >> TExportToS3Tests::RebootDuringAbortion >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert >> TFlatTest::GetTabletCounters [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring >> TExportToS3Tests::RebootDuringCompletion >> TopicAutoscaling::ControlPlane_DisableAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_iterator/unittest >> ReadIteratorExternalBlobs::NotExtBlobs [GOOD] Test command err: 2025-03-28T09:27:00.409018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:27:00.409468Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:27:00.409525Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00106e/r3tmp/tmpqV7WJX/pdisk_1.dat 2025-03-28T09:27:00.895805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:27:00.951164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:27:01.006798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:27:01.008011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:27:01.022169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:27:01.136556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:27:01.218808Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:27:01.220025Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:27:01.220520Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:27:01.220779Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:27:01.273516Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:27:01.274398Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:27:01.274532Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:27:01.276608Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:27:01.276709Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:27:01.276769Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:27:01.277238Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:27:01.277381Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:27:01.277492Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:27:01.288682Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:27:01.336813Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:27:01.337037Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:27:01.337186Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:27:01.337228Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:27:01.337266Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:27:01.337301Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.337538Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.337664Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.337998Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:27:01.338092Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:27:01.338570Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:01.338627Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:27:01.338955Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:27:01.339024Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:27:01.339061Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:27:01.339095Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:27:01.339148Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:27:01.339303Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.339340Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.339384Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:27:01.339452Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:27:01.339491Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:27:01.339629Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:27:01.339883Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:27:01.339964Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:27:01.340079Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:27:01.340131Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:27:01.340166Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:27:01.340203Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:27:01.340238Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.340545Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:27:01.340590Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:27:01.340680Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:27:01.340733Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.340790Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:27:01.340820Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:27:01.340877Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:27:01.340912Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.340942Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:27:01.342442Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:27:01.342515Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:27:01.355234Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:27:01.355329Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:27:01.355370Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:27:01.355416Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:27:01.355505Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:27:01.516565Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.516622Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:27:01.516676Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:27:01.517145Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:27:01.517195Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:27:01.517328Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:27:01.517380Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:27:01.517441Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:27:01.517487Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:27:01.529902Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:27:01.529993Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:27:01.531383Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.531434Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:27:01.531520Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:27:0 ... HARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:31:59.507424Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:744:2625], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.507553Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [17:755:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.507711Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:59.514373Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:31:59.682986Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [17:758:2633], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:31:59.718865Z node 17 :TX_PROXY ERROR: Actor# [17:832:2676] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:59.992022Z node 17 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1g44g1d4qwweej6ve2vpd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=17&id=ZmJjNGUxNzQtZDc3MDY2ZGUtYTQyNmUwNGYtOTM1YzczMDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:05.048107Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:306:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:05.048408Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:05.048473Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00106e/r3tmp/tmp7ZECsw/pdisk_1.dat 2025-03-28T09:32:05.373461Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:32:05.413553Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:05.453647Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:05.453829Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:05.465633Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:05.548883Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:32:05.834200Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:740:2622], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.834327Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [18:749:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.834430Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.841579Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:32:06.005793Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [18:754:2630], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:32:06.041193Z node 18 :TX_PROXY ERROR: Actor# [18:828:2673] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:06.780806Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1gaa70rcg4mm65rxxtbw4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=NmM3NGI1N2QtYTVjZTE3OTgtMTQxMDcwYWYtYTI2NmQ2Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:07.506157Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1gbah3tanfbrb47jxpwf7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=NmJlOTdkOGUtNGUyNjFkNWYtNWE1N2M5NTItMmJlMDY4Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:08.270618Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1gc1k1jaejzc0xhaddf3r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=N2NiZjhmOWMtNjE3MWE2ZjQtYmFiNWNiMTItM2QxODg0ODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:08.829874Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1gcs1ehbqhrmmjzq4nhxm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=NWM2NWVkMjQtNjE3ODAxZTgtMmFiMjE5MDUtZjRkMDQxZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:09.353080Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1gd940whh6sp5fy1yd4j9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YWY4ZDc1MmQtMmFiNDk5MzQtZDRjMTJkYjYtMTRhODZiOGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:09.885850Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1gds664p2sn74q6g7d3hz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OWM0NTBjNWEtNWM4NzQ3OGItNGVjYWZkZmItODhmNDg5MDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:10.423879Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe1gea816862x2bgpyj9q6q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MWNmNTMxZTAtYjQxNzE2NGEtNWFkMTA0ZTAtMjZkYWY5NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:11.063513Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqe1getx4qq1fzsaxqvwp99p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=YWYzNDM4MmMtZmZlMTk2ZjktYzgyNThiZTQtZWUxMjE0YWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:11.679404Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqe1gff22z585zw2bfcpb6vb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=MjE0Y2MyZGEtYmNhNmFlNjEtOTE4ZjZhMDktOTIzNWQ4YTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:12.360142Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqe1gg23bshb09c9zwzadb3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=NmJmZjNmZGYtNWI3ZTY5ZjEtMTU0NDY4NTgtMjBlNGQ0YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for stats after upsert 2025-03-28T09:32:14.516906Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:32:14.516995Z node 18 :IMPORT WARN: Table profiles were not loaded Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { DataSize: 10487312 RowCount: 10 IndexSize: 0 InMemSize: 10487312 LastAccessTime: 1547 LastUpdateTime: 1547 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 9126 Memory: 17425464 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 ... waiting for stats after compaction Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 1 TableStats { DataSize: 10487312 RowCount: 10 IndexSize: 0 InMemSize: 10487312 LastAccessTime: 1547 LastUpdateTime: 1547 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 3447 Memory: 124948 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 2 TableStats { DataSize: 10486220 RowCount: 10 IndexSize: 0 InMemSize: 0 LastAccessTime: 1547 LastUpdateTime: 1547 ImmediateTxCompleted: 10 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 10 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 10486220 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 3447 Memory: 124948 Storage: 10486554 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 18 StartTime: 450 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-28T09:32:19.045027Z node 18 :KQP_EXECUTER ERROR: TxId: 281474976715670. Ctx: { TraceId: 01jqe1gpz2cnwxy7mfm9zv4677, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=18&id=OWJiNjY0LTE4ZGY1YmVkLThmNjY0ZjI4LTE1YWQxOGJk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TColumnShardTestSchema::RebootColdTiers >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2025-03-28T09:32:13.881857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790855161414486:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:13.881919Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef9/r3tmp/tmpo7PFUg/pdisk_1.dat 2025-03-28T09:32:14.211920Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:14.279815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:14.279924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:14.281898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9266 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:14.453393Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:14.470298Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:14.491403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:16.871308Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790865721111732:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:16.871434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef9/r3tmp/tmpwMbe6N/pdisk_1.dat 2025-03-28T09:32:16.986655Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:17.012415Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:17.012532Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:17.014249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2137 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:17.226381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:17.248850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154337351 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::Plan2SvgBad [GOOD] Test command err: 2025-03-28T09:31:18.146041Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790619254664321:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:18.147236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:31:18.462223Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:18.498862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:18.498955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:18.502044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28701, node 1 2025-03-28T09:31:18.663267Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:18.663293Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:18.663303Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:18.663454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:19.123314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:19.162133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:31:19.164696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:21.014131Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790632139566907:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:21.014140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790632139566919:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:21.014227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:21.021110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:31:21.030607Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790632139566921:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:31:21.122638Z node 1 :TX_PROXY ERROR: Actor# [1:7486790632139566972:2357] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:23.718290Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790640864860330:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:23.718395Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:31:23.822880Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21554, node 2 2025-03-28T09:31:23.865357Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:23.865438Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:23.867171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:23.918469Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:23.918493Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:23.918501Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:23.918618Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:24.172895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:24.195355Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:31:24.200672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:24.206792Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2025-03-28T09:31:26.777555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790653749762921:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:26.777595Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790653749762904:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:26.777727Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:26.781335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:31:26.789915Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790653749762927:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:31:26.859826Z node 2 :TX_PROXY ERROR: Actor# [2:7486790653749762978:2355] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:28.612435Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790662653623510:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:28.612519Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:31:28.775386Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:28.792175Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:28.792268Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:28.794889Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29820, node 3 2025-03-28T09:31:28.835364Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:28.835389Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:28.835397Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:28.835533Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4427 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateF ... lf is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.129664Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.133807Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:44.467263Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:44.467348Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:45.263977Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790736039977061:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.264075Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7486790736039977043:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.264609Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:45.270662Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480 2025-03-28T09:31:45.287370Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7486790736039977065:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2025-03-28T09:31:45.369986Z node 5 :TX_PROXY ERROR: Actor# [5:7486790736039977116:2359] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:45.446091Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.489707Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486790714565139871:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:45.489792Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:45.558332Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:45.558375Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:46.103941Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:46.103989Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:46.361276Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:46.361330Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:46.619240Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:46.619293Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:46.944358Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:46.944404Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:47.214727Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:47.214780Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:47.539579Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:47.539634Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:47.815280Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:47.815348Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:48.088370Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:48.088431Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:48.382568Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:48.382628Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:48.663028Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:48.663088Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:48.947584Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:48.947640Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:49.245884Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:49.245949Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:49.470210Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:49.470267Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:49.767136Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:49.767191Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:50.066826Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:50.066883Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T09:31:50.075443Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715709:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.077168Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-03-28T09:31:50.078647Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715710:0, at schemeshard: 72057594046644480 2025-03-28T09:31:51.607230Z node 5 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T09:31:51.607289Z node 5 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success assertion failed at ydb/core/viewer/viewer_ut.cpp:1948, virtual void NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext &): (json.GetMap().contains("metadata")) {} TBackTrace::Capture()+28 (0x18D9F0BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1925AF80) NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext&)+8947 (0x189326D3) std::__y1::__function::__func, void ()>::operator()()+280 (0x18948448) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x19291FA6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19261AF9) NTestSuiteViewer::TCurrentTest::Execute()+1204 (0x189472F4) NUnitTest::TTestFactory::Execute()+2438 (0x192633C6) NUnitTest::RunMain(int, char**)+5213 (0x1928C51D) ??+0 (0x7FD3414ABD90) __libc_start_main+128 (0x7FD3414ABE40) _start+41 (0x16247029) 2025-03-28T09:31:55.064926Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486790775430820690:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:55.065020Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:31:55.249688Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:55.272603Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:55.272723Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:55.275899Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20096, node 6 2025-03-28T09:31:55.355525Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:55.355566Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:55.355577Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:55.355758Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13785 2025-03-28T09:31:59.576986Z node 6 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator 2025-03-28T09:32:02.258787Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486790806064672376:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:02.258909Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:32:02.484166Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:02.484306Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:02.489440Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:02.493981Z node 7 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9532, node 7 2025-03-28T09:32:02.609859Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:02.609891Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:02.609904Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:02.610125Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13139 2025-03-28T09:32:06.895577Z node 7 :TICKET_PARSER ERROR: Ticket **** (8C3E2D8D): Could not find correct token validator >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> TExportToS3Tests::ExportStartTime >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::SchemaMapping |92.3%| [TA] $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] |92.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/test-results/unittest/{meta.json ... results_accumulator.log} >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::EnableChecksumsPersistance >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::ExportPartitioningSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2025-03-28T09:30:56.933519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:56.933585Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:56.986629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:30:58.131432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-03-28T09:30:58.291387Z node 5 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:30:58.292000Z node 5 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:30:58.293309Z node 5 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14935616270616405638 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:30:58.325720Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:30:58.326580Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:30:58.326795Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6248238898065188030 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:30:58.370445Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:30:58.370970Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:30:58.371244Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10979333190895893259 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:30:58.444004Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:30:58.444484Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:30:58.444716Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 762690440855404370 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:30:58.449457Z node 3 :BS_LOCALRECOVERY CRIT: VDISK[80000001:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpdRYXa1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:58.455289Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2 ... LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:32:12.243451Z node 152 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:32:12.243926Z node 152 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:32:12.244152Z node 152 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8994589331405411480 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:32:12.293638Z node 149 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:32:12.294140Z node 149 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:32:12.294392Z node 149 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 18201032863649751399 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:32:12.336267Z node 151 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:32:12.336812Z node 151 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:32:12.337247Z node 151 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13836198510676290232 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:32:12.603931Z node 145 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:12.604059Z node 145 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:12.702188Z node 145 :STATISTICS WARN: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2025-03-28T09:32:12.813580Z node 150 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:32:12.814250Z node 150 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:32:12.814576Z node 150 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001ce8/r3tmp/tmpAsecKm/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 18340672523020556178 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:32:15.814458Z node 154 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:15.814573Z node 154 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:15.899277Z node 154 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:32:19.427017Z node 163 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:19.427126Z node 163 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:19.493974Z node 163 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> TFlatTest::CopyTableAndReturnPartAfterCompaction >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> TExportToS3Tests::SchemaMapping [GOOD] >> TExportToS3Tests::SchemaMappingEncryption >> TExportToS3Tests::CheckItemProgress >> TExportToS3Tests::UidAsIdempotencyKey >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] >> TExportToS3Tests::EncryptedExport >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TFlatTest::LargeProxyReplyRW [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::CancelledExportEndTime >> TLocksTest::BrokenLockUpdate >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::UserSID >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed >> YdbYqlClient::SimpleColumnFamilies [GOOD] >> YdbYqlClient::TableKeyRangesSinglePartition >> Viewer::StorageGroupOutputWithoutFilterNoDepends [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2025-03-28T09:32:13.787213Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790854778779645:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:13.787267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001efb/r3tmp/tmptrTH2D/pdisk_1.dat 2025-03-28T09:32:14.101206Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:14.163387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:14.163472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:14.165252Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3064 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:14.385714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:14.424311Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:18.658811Z node 1 :TX_PROXY ERROR: Actor# [1:7486790876253619702:4139] txid# 281474976711010 MergeResult Result too large TDataReq marker# P18 2025-03-28T09:32:18.658874Z node 1 :TX_PROXY ERROR: Actor# [1:7486790876253619702:4139] txid# 281474976711010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2025-03-28T09:32:18.790811Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790854778779645:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:18.790943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:32:19.222863Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790881666091189:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:19.222938Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001efb/r3tmp/tmpgqVcBH/pdisk_1.dat 2025-03-28T09:32:19.356368Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:19.375604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:19.375720Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:19.378156Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13119 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:19.544197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:19.550757Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:19.558494Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:32:19.563199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:23.664865Z node 2 :TX_PROXY ERROR: Actor# [2:7486790898845963945:4140] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2025-03-28T09:32:23.664935Z node 2 :TX_PROXY ERROR: Actor# [2:7486790898845963945:4140] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> TExportToS3Tests::EncryptedExport [GOOD] >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::TablePermissions >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2025-03-28T09:32:17.213387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790871629717120:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:17.213617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef5/r3tmp/tmpv3I6YZ/pdisk_1.dat 2025-03-28T09:32:17.507172Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:17.613641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:17.613740Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:17.615556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23348 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:17.803959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:17.833679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743154337939 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Key2" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "Va... (TRUNCATED) 2025-03-28T09:32:17.994743Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:32:17.996196Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:32:17.996225Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:32:18.069548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976710668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:18.069696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:32:18.069949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:32:18.069978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:32:18.070197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-28T09:32:18.070265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:32:18.071112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710668, response: Status: StatusAccepted TxId: 281474976710668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:32:18.071199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-03-28T09:32:18.071365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:32:18.071405Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 waiting... 2025-03-28T09:32:18.071754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:32:18.071948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:32:18.072326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2025-03-28T09:32:18.072413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-03-28T09:32:18.072468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710668, partId: 0, tablet: 72057594037968897 2025-03-28T09:32:18.072487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-03-28T09:32:18.072522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-03-28T09:32:18.072685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:32:18.072698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710668, ready parts: 0/1, is published: true 2025-03-28T09:32:18.072725Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:32:18.075127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-03-28T09:32:18.075181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2025-03-28T09:32:18.075448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-03-28T09:32:18.075484Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-03-28T09:32:18.075553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2025-03-28T09:32:18.075796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-28T09:32:18.075822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2025-03-28T09:32:18.075882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-28T09:32:18.075901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-03-28T09:32:18.075932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-28T09:32:18.075965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710668:0 2 -> 3 2025-03-28T09:32:18.076631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:32:18.076696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:32:18.076767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:32:18.076793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:32:18.076873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037889 splitOp: 281474976710668:0 alterVersion: 1 at tablet: 72057594046644480 2025-03-28T09:32:18.077058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Initializing scheme on dst datashard: 72075186224037890 ... tId: 72075186224037891 State: 4 2025-03-28T09:32:21.606665Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:21.607581Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:21.607723Z node 3 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:32:21.608006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:21.608007Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-28T09:32:21.608653Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-03-28T09:32:21.609186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486790887234770598 RawX2: 4503612512274685 } TabletId: 72075186224037892 State: 4 2025-03-28T09:32:21.609223Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:21.609337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486790887234770598 RawX2: 4503612512274685 } TabletId: 72075186224037892 State: 4 2025-03-28T09:32:21.609361Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:21.609505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:32:21.609669Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-28T09:32:21.609799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-28T09:32:21.609994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-28T09:32:21.610148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-28T09:32:21.610273Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-28T09:32:21.610364Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-28T09:32:21.610772Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-03-28T09:32:21.610868Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-28T09:32:21.610933Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-28T09:32:21.610977Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-28T09:32:21.611000Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2025-03-28T09:32:21.611017Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-28T09:32:21.611045Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-28T09:32:21.611261Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-28T09:32:21.611752Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2025-03-28T09:32:21.612093Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486790887234770597 RawX2: 4503612512274684 } TabletId: 72075186224037890 State: 4 2025-03-28T09:32:21.612143Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:21.612285Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-28T09:32:21.612369Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-28T09:32:21.612970Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:21.613036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:21.613110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:32:21.613286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:32:21.613500Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-28T09:32:21.614011Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-28T09:32:21.614193Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-28T09:32:21.614221Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-28T09:32:21.614224Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2025-03-28T09:32:21.614253Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-28T09:32:21.612030Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-28T09:32:21.612149Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-28T09:32:21.613513Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-03-28T09:32:21.613531Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2025-03-28T09:32:21.613573Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-28T09:32:21.614528Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-28T09:32:21.616153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:32:21.616179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:32:21.616215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:32:21.616223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-28T09:32:21.616288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:21.616342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:32:21.616357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:32:21.616612Z node 3 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-28T09:32:21.616966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-28T09:32:21.617132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:32:21.617283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-28T09:32:21.618058Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-28T09:32:21.618087Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-28T09:32:21.618101Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2025-03-28T09:32:21.617071Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037892 reason = ReasonStop 2025-03-28T09:32:21.617134Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-28T09:32:21.618103Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-28T09:32:21.618281Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-28T09:32:21.618958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:32:21.619306Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037892 2025-03-28T09:32:21.620077Z node 3 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-28T09:32:21.620345Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-28T09:32:21.620413Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-28T09:32:21.619112Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:32:21.619255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:32:21.619269Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:32:21.619311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:32:21.619984Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-28T09:32:21.620148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-28T09:32:21.620161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-03-28T09:32:21.620193Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-28T09:32:21.620436Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-28T09:32:21.620453Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-28T09:32:21.620955Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2025-03-28T09:32:21.621559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:32:21.621590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:32:21.621643Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:32:21.657258Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:20.363181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:20.363339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:20.363387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:20.363425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:20.364112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:20.364172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:20.364263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:20.364359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:20.365507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:20.453027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:20.453083Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:20.471920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:20.472178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:20.472326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:20.478175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:20.478368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:20.479037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.479225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:20.480943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.491933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:20.492110Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.492323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:20.492374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:20.492428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:20.492593Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.498497Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:20.617034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:20.618868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.620948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:20.622249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:20.622354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.625301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.625445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:20.625627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.625746Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:20.625803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:20.625849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:20.628312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.628370Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:20.628407Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:20.630158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.630203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.630244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.630316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.638979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:20.642321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:20.643431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:20.644531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.644648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:20.644704Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.646072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:20.646133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.646342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:20.646404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:20.648490Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:20.648540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:20.648690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.648720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:20.649093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.649138Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:20.649208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:20.649233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.649260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:20.649285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.649314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:20.649361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.649408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:20.649435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:20.649483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:20.649520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:20.649548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:20.651153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:20.651283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:20.651337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Id: 72075186233409548 CpuTimeUsec: 259 } } 2025-03-28T09:32:24.933024Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-28T09:32:24.933132Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409548, shardIdx: 72057594046678944:3, operationId: 281474976710759:0, left await: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.933174Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-03-28T09:32:24.934926Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.935058Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.935109Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:24.935197Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:32:24.935333Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409548 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:24.936681Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-03-28T09:32:24.936786Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409548 for txId: 281474976710759 at step: 5000005 2025-03-28T09:32:24.937284Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:24.937379Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 17179871338 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:24.937435Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-03-28T09:32:24.937552Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-03-28T09:32:24.937677Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: REQUEST: PUT /metadata.json HTTP/1.1 5000005 HEADERS: Host: localhost:29776 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 933B2D5C-A7CC-4C3C-A447-DBFC40213693 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2025-03-28T09:32:24.970495Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:24.970543Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 6] 2025-03-28T09:32:24.970771Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:24.970796Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 6 2025-03-28T09:32:24.971040Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.971082Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710759 2025-03-28T09:32:24.972095Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-28T09:32:24.972160Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-28T09:32:24.972181Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-03-28T09:32:24.972204Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 3 2025-03-28T09:32:24.972229Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-03-28T09:32:24.972293Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:29776 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D9669D5E-196E-4875-9F37-4FF0CB5650DD amz-sdk-request: attempt=1 content-length: 602 content-md5: GgrERoUcI3sF1n0Je2MTCQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 602 2025-03-28T09:32:24.974744Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:29776 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 13E7EC7B-C093-4028-B33B-A1ECAAE17D66 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-03-28T09:32:24.989488Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 507 RawX2: 17179871649 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-28T09:32:24.989551Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409548, partId: 0 2025-03-28T09:32:24.989675Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 507 RawX2: 17179871649 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-28T09:32:24.989785Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 507 RawX2: 17179871649 } Origin: 72075186233409548 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-28T09:32:24.989850Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:3, datashard: 72075186233409548, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:24.989895Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.989937Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:32:24.989981Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-03-28T09:32:24.990150Z node 4 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:24.992196Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.992484Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.992522Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-03-28T09:32:24.992645Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-28T09:32:24.992676Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:32:24.992723Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-28T09:32:24.992758Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:32:24.992797Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-03-28T09:32:24.992853Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:133:2156] message: TxId: 281474976710759 2025-03-28T09:32:24.992887Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:32:24.992917Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-03-28T09:32:24.992940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-03-28T09:32:24.993032Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-28T09:32:24.994807Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-03-28T09:32:24.994863Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-03-28T09:32:24.996510Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:32:24.996563Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:536:2486] TestWaitNotification: OK eventTxId 102 >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> TExportToS3Tests::Checksums >> TExportToS3Tests::TablePermissions [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::EncryptedExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:20.555853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:20.555952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:20.555989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:20.556039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:20.556090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:20.556117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:20.556173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:20.556384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:20.556796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:20.637226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:20.637281Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:20.651099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:20.651320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:20.651496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:20.657640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:20.657811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:20.658509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.658716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:20.660455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.661749Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:20.661804Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.661956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:20.661997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:20.662041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:20.662157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.669007Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:20.777991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:20.778208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.778432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:20.778723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:20.778792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.781316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.781452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:20.781653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.781720Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:20.781767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:20.781809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:20.783967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.784039Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:20.784075Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:20.785857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.785897Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.785941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.786016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.789558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:20.791345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:20.791602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:20.792576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.792692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:20.792765Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.793055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:20.793141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.793303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:20.793390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:20.795421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:20.795471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:20.795632Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.795672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:20.796089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.796133Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:20.796222Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:20.796279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.796321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:20.796351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.796391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:20.796445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.796496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:20.796540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:20.796604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:20.796649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:20.796682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:20.798574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:20.798704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:20.798751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 4046678944 2025-03-28T09:32:25.706321Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-03-28T09:32:25.706381Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-03-28T09:32:25.706508Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:25.707036Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.707176Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.707207Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-28T09:32:25.707249Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-03-28T09:32:25.707304Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:32:25.707855Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.707944Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.707972Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-28T09:32:25.708000Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-03-28T09:32:25.708041Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:32:25.708112Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-03-28T09:32:25.710899Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-28T09:32:25.710991Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-03-28T09:32:25.711110Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000010 2025-03-28T09:32:25.711559Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-03-28T09:32:25.711606Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-03-28T09:32:25.711684Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-03-28T09:32:25.712302Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:25.712414Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 17179871338 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:25.712475Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000010, at schemeshard: 72057594046678944 2025-03-28T09:32:25.712587Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-03-28T09:32:25.712647Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-03-28T09:32:25.712686Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-28T09:32:25.712731Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-03-28T09:32:25.712767Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-28T09:32:25.712819Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:32:25.712889Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:32:25.712947Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-03-28T09:32:25.713015Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-28T09:32:25.713057Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-03-28T09:32:25.713095Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-03-28T09:32:25.713170Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:32:25.713217Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-03-28T09:32:25.713259Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-03-28T09:32:25.713310Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-28T09:32:25.714096Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.714228Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.715776Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:25.715817Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:25.715962Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:32:25.716095Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:25.716126Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-03-28T09:32:25.716161Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-03-28T09:32:25.716895Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.717003Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.717043Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-28T09:32:25.717087Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-03-28T09:32:25.717136Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:32:25.717759Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.717841Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.717866Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-28T09:32:25.717897Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:32:25.717940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:32:25.718029Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-03-28T09:32:25.718076Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:133:2156] 2025-03-28T09:32:25.720927Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.721261Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:25.721337Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-03-28T09:32:25.721383Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-03-28T09:32:25.721422Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-03-28T09:32:25.721448Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-03-28T09:32:25.721477Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-03-28T09:32:25.723039Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-28T09:32:25.723116Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:32:25.723165Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:1126:3003] TestWaitNotification: OK eventTxId 103 >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> TopicAutoscaling::ControlPlane_BackCompatibility [GOOD] >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:20.708249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:20.708359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:20.708392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:20.708422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:20.708466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:20.708492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:20.708538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:20.708608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:20.708947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:20.791614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:20.791667Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:20.806171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:20.806404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:20.806568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:20.813927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:20.814195Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:20.814922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.815134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:20.817326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.818610Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:20.818674Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.818865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:20.818911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:20.818966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:20.819084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.825863Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:20.953421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:20.953664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.953884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:20.954125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:20.954203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.958337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.958490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:20.958714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.958781Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:20.958825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:20.958869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:20.963465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.963524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:20.963562Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:20.965225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.965272Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.965309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.965372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.969126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:20.970786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:20.971012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:20.971923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.972055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:20.972116Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.972392Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:20.972466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.972610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:20.972679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:20.974463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:20.974512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:20.974662Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.974721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:20.975115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.975160Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:20.975249Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:20.975286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.975322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:20.975354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.975386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:20.975436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.975483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:20.975514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:20.975590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:20.975632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:20.975666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:20.977463Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:20.977568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:20.977608Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 46Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-03-28T09:32:25.946298Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-28T09:32:25.946346Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:32:25.947133Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-28T09:32:25.947216Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-28T09:32:25.947243Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710758 2025-03-28T09:32:25.947273Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-28T09:32:25.947307Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:32:25.947374Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-03-28T09:32:25.949320Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-28T09:32:25.949457Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2025-03-28T09:32:25.949501Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2025-03-28T09:32:25.949551Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2025-03-28T09:32:25.950859Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2025-03-28T09:32:25.950998Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000005 2025-03-28T09:32:25.952596Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-03-28T09:32:25.952972Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:25.953119Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 17179871338 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:25.953172Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000005, at schemeshard: 72057594046678944 2025-03-28T09:32:25.953284Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2025-03-28T09:32:25.953350Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-03-28T09:32:25.953392Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-03-28T09:32:25.953442Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710758:0 progress is 1/1 2025-03-28T09:32:25.953479Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-03-28T09:32:25.953541Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:32:25.953608Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:32:25.953646Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2025-03-28T09:32:25.953697Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2025-03-28T09:32:25.953750Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710758:0 2025-03-28T09:32:25.953790Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710758:0 2025-03-28T09:32:25.953857Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:32:25.953901Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2025-03-28T09:32:25.953943Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-28T09:32:25.953978Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-28T09:32:25.954372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-03-28T09:32:25.955848Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:25.955890Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:25.956050Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:32:25.956181Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:25.956213Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2025-03-28T09:32:25.956249Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710758 2025-03-28T09:32:25.956921Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-28T09:32:25.956999Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-28T09:32:25.957031Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2025-03-28T09:32:25.957078Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-28T09:32:25.957122Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:32:25.957750Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-28T09:32:25.957827Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2025-03-28T09:32:25.957854Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2025-03-28T09:32:25.957882Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:32:25.957912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:32:25.957992Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2025-03-28T09:32:25.958038Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:133:2156] 2025-03-28T09:32:25.958455Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:32:25.958509Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:32:25.958583Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:32:25.960899Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-03-28T09:32:25.961502Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2025-03-28T09:32:25.961934Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2025-03-28T09:32:25.962015Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710758 2025-03-28T09:32:25.962080Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-03-28T09:32:25.962122Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-03-28T09:32:25.962170Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2025-03-28T09:32:25.963234Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:32:25.965577Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2025-03-28T09:32:25.965809Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:32:25.965860Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:32:25.966294Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:32:25.966386Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:32:25.966430Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [4:551:2510] TestWaitNotification: OK eventTxId 103 >> TExportToS3Tests::Checksums [GOOD] >> TExportToS3Tests::Changefeeds >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TablePermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:24.372998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:24.373063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:24.373090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:24.373113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:24.373148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:24.373167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:24.373203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:24.373261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:24.373499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:24.431744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:24.431786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:24.442566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:24.442737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:24.442892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:24.447800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:24.448063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:24.448619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:24.448785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:24.450319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:24.451294Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:24.451336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:24.451432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:24.451471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:24.451520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:24.451600Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.456905Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:24.567914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:24.568096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.568254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:24.568415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:24.568466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.570050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:24.570144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:24.570273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.570305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:24.570355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:24.570385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:24.571684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.571723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:24.571745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:24.572885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.572916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.572942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:24.572978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:24.584579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:24.586112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:24.586291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:24.586954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:24.587042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:24.587083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:24.587280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:24.587316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:24.587416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:24.587463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:24.588866Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:24.588896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:24.589003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:24.589037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:24.589294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.589323Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:24.589378Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:24.589398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:24.589422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:24.589440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:24.589459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:24.589500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:24.589565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:24.589587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:24.589626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:24.589650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:24.589670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:24.590808Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:24.590883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:24.590918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... HEMESHARD INFO: Change state for txid 281474976710759:0 3 -> 128 2025-03-28T09:32:26.711625Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:26.711795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:26.711837Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:26.711919Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:32:26.712070Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710759 MinStep: 5000005 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:26.714012Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710759 msg type: 269090816 2025-03-28T09:32:26.714139Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710759 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000005 2025-03-28T09:32:26.714497Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:26.714608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 12884904045 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:26.714669Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000005, at schemeshard: 72057594046678944 2025-03-28T09:32:26.714821Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 128 -> 129 2025-03-28T09:32:26.714966Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:32:26.745219Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:26.745278Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:32:26.745467Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:26.745493Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:204:2206], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 4 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000005 2025-03-28T09:32:26.745978Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:26.746038Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:26.746601Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-28T09:32:26.746704Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2025-03-28T09:32:26.746746Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2025-03-28T09:32:26.746792Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-28T09:32:26.746830Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:32:26.746909Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 2025-03-28T09:32:26.750149Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:13946 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 94F4DE44-0B72-4F1B-9065-27AA9E371B78 amz-sdk-request: attempt=1 content-length: 73 content-md5: q/ySd5GvS6I/qOVxS/4Thg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 REQUEST: PUT /permissions.pb HTTP/1.1 HEADERS: Host: localhost:13946 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 59D9911E-72B3-4252-A4C0-DF1CF5E1EE55 amz-sdk-request: attempt=1 content-length: 137 content-md5: WeIr3D5bqIjvqMGEjx2JrA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /permissions.pb / / 137 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:13946 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8E9D79CD-19BF-4A4B-925E-F92AE8E891C7 amz-sdk-request: attempt=1 content-length: 355 content-md5: 4DhJNWgTpoG3PVvZ0uCHUA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 355 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:13946 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7DD17E70-7633-4352-B0D8-C69EF93234EF amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-03-28T09:32:26.779995Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 448 RawX2: 12884904305 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-28T09:32:26.780058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-03-28T09:32:26.780169Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 448 RawX2: 12884904305 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-28T09:32:26.780253Z node 3 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 448 RawX2: 12884904305 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-28T09:32:26.780306Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:26.780338Z node 3 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:26.780370Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:32:26.780407Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-03-28T09:32:26.780520Z node 3 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:26.782048Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:26.782147Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:26.782173Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-03-28T09:32:26.782258Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-28T09:32:26.782277Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:32:26.782303Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-28T09:32:26.782321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:32:26.782352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-03-28T09:32:26.782398Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:124:2150] message: TxId: 281474976710759 2025-03-28T09:32:26.782425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:32:26.782448Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-03-28T09:32:26.782465Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-03-28T09:32:26.782548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:32:26.784445Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-03-28T09:32:26.784500Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-03-28T09:32:26.786650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:32:26.786748Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:478:2439] TestWaitNotification: OK eventTxId 103 >> TColumnShardTestSchema::RebootOneTier >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TopicAutoscaling::Simple_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::Simple_PQv1 >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] >> TFlatTest::SelectRangeItemsLimit >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit >> TExportToS3Tests::Changefeeds [GOOD] >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2025-03-28T09:32:23.283166Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790896686990251:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:23.283235Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eed/r3tmp/tmpdDh2dr/pdisk_1.dat 2025-03-28T09:32:23.564914Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:23.567257Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:23.567370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:23.570827Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15254 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:23.766626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:23.806425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:23.936319Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:32:23.940622Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:32:23.965409Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1605 647 6413)b }, ecr=1.000 2025-03-28T09:32:23.969029Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2358 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743154343917 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) Copy TableOld to Table 2025-03-28T09:32:24.067695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:24.068195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:32:24.068567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-28T09:32:24.068598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-03-28T09:32:24.068623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:32:24.068679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-28T09:32:24.068689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-28T09:32:24.068808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-03-28T09:32:24.068891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:32:24.069453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:32:24.069485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-03-28T09:32:24.070026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-03-28T09:32:24.070117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-03-28T09:32:24.070251Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:32:24.070271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:32:24.070362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-03-28T09:32:24.070431Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:32:24.070456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486790896686990769:2245], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 2025-03-28T09:32:24.070473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486790896686990769:2245], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 waiting... 2025-03-28T09:32:24.070521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:32:24.070598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-03-28T09:32:24.070891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:32:24.071014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:32:24.083535Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:32:24.083637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:32:24.083656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-03-28T09:32:24.083684Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-03-28T09:32:24.083703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:32:24.083931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:32:24.083965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:32:24.083989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-03-28T09:32:24.084001Z node 1 :FLAT_TX_SCHEM ... ble TProposedDeletePart operationId: 281474976715784:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:32:27.361337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-03-28T09:32:27.361436Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-03-28T09:32:27.361446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-03-28T09:32:27.361460Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715784:0 progress is 1/1 2025-03-28T09:32:27.361469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-03-28T09:32:27.361482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715784, ready parts: 1/1, is published: true 2025-03-28T09:32:27.361515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7486790914668311507:2687] message: TxId: 281474976715784 2025-03-28T09:32:27.361531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715784 ready parts: 1/1 2025-03-28T09:32:27.361545Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715784:0 2025-03-28T09:32:27.361553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715784:0 2025-03-28T09:32:27.361632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-03-28T09:32:27.361660Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037891 state PreOffline 2025-03-28T09:32:27.361680Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2025-03-28T09:32:27.361770Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715784 datashard 72075186224037890 state PreOffline 2025-03-28T09:32:27.361786Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-03-28T09:32:27.361992Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:32:27.362131Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 parts [ [72075186224037889:1:16:1:12288:306:0] [72075186224037889:1:23:1:12288:253:0] ] return ack processed 2025-03-28T09:32:27.362157Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:32:27.362225Z node 2 :TX_DATASHARD INFO: 72075186224037891 Initiating switch from PreOffline to Offline state 2025-03-28T09:32:27.363309Z node 2 :TX_DATASHARD INFO: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:32:27.363503Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7486790914668311531:2689], serverId# [2:7486790914668311536:3449], sessionId# [0:0:0] Check that tablet 72075186224037888 was deleted 2025-03-28T09:32:27.363680Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486790910373342504 RawX2: 4503608217307385 } TabletId: 72075186224037889 State: 4 2025-03-28T09:32:27.363729Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:27.363759Z node 2 :TX_DATASHARD DEBUG: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:32:27.363829Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state 2025-03-28T09:32:27.363867Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486790910373342797 RawX2: 4503608217307443 } TabletId: 72075186224037891 State: 4 2025-03-28T09:32:27.363895Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:27.364048Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-03-28T09:32:27.364145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:27.364287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 Check that tablet 72075186224037889 was deleted 2025-03-28T09:32:27.364732Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2025-03-28T09:32:27.364897Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-28T09:32:27.364921Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline Check that tablet 72075186224037890 was deleted 2025-03-28T09:32:27.365037Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:32:27.365403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486790910373342795 RawX2: 4503608217307442 } TabletId: 72075186224037890 State: 4 2025-03-28T09:32:27.365444Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:27.365615Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-28T09:32:27.365617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:32:27.365776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:32:27.365887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:32:27.365895Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-28T09:32:27.365951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-28T09:32:27.365963Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-28T09:32:27.365996Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:32:27.366005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:32:27.366026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:32:27.366369Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-28T09:32:27.366594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:27.366629Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:32:27.366636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:32:27.366902Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:32:27.366917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:32:27.366959Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:32:27.367036Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-28T09:32:27.367085Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7486790914668311378:3312], serverId# [2:7486790914668311379:3313], sessionId# [0:0:0] 2025-03-28T09:32:27.367102Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7486790910373342881:2553], serverId# [2:7486790910373342884:2556], sessionId# [0:0:0] 2025-03-28T09:32:27.367125Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-28T09:32:27.367160Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7486790910373342821:2507], serverId# [2:7486790910373342824:2509], sessionId# [0:0:0] 2025-03-28T09:32:27.367280Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-28T09:32:27.367347Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-28T09:32:27.367395Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-28T09:32:27.368085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:32:27.368254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-28T09:32:27.368405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:32:27.368423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-28T09:32:27.368451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:32:27.368456Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-28T09:32:27.368737Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-28T09:32:27.368845Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:32:27.368868Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:32:27.368891Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:32:27.369105Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-28T09:32:27.369151Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-28T09:32:27.672615Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-03-28T09:32:27.673775Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TopicAutoscaling::ControlPlane_PauseAutoPartitioning [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Enable >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:24.193367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:24.193448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:24.193478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:24.193509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:24.193546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:24.193572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:24.193616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:24.193686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:24.193995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:24.266134Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:24.266196Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:24.276134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:24.276357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:24.276473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:24.280667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:24.280830Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:24.281261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:24.281401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:24.282924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:24.284124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:24.284178Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:24.284336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:24.284378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:24.284414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:24.284518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.290358Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:24.391346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:24.391550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.391746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:24.391947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:24.392035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.394184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:24.394305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:24.394471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.394517Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:24.394561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:24.394601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:24.396529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.396580Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:24.396614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:24.398179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.398220Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.398263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:24.398314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:24.401778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:24.403482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:24.403722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:24.404578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:24.404719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:24.404772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:24.405002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:24.405055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:24.405190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:24.405257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:24.407097Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:24.407145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:24.407298Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:24.407350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:24.407705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:24.407749Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:24.407833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:24.407862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:24.407893Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:24.407934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:24.407973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:24.408043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:24.408091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:24.408121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:24.408184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:24.408219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:24.408246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:24.409963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:24.410085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:24.410121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046678944 2025-03-28T09:32:29.217024Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-03-28T09:32:29.217097Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710761 ready parts: 1/1 2025-03-28T09:32:29.217225Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710761 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:29.217788Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.217874Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.217898Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-03-28T09:32:29.217924Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-28T09:32:29.217955Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:32:29.218966Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.219034Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.219065Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2025-03-28T09:32:29.219098Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 7 2025-03-28T09:32:29.219122Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-03-28T09:32:29.219180Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-03-28T09:32:29.221158Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-28T09:32:29.221687Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2025-03-28T09:32:29.221731Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2025-03-28T09:32:29.221769Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2025-03-28T09:32:29.222800Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2025-03-28T09:32:29.222925Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000010 2025-03-28T09:32:29.223529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.223686Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000010, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:29.223788Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 17179871338 } } Step: 5000010 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:29.223841Z node 4 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000010, at schemeshard: 72057594046678944 2025-03-28T09:32:29.223936Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2025-03-28T09:32:29.224024Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-03-28T09:32:29.224059Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-28T09:32:29.224102Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710761:0 progress is 1/1 2025-03-28T09:32:29.224138Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-28T09:32:29.224195Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:29.224258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-03-28T09:32:29.224293Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2025-03-28T09:32:29.224353Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2025-03-28T09:32:29.224408Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710761:0 2025-03-28T09:32:29.224445Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710761:0 2025-03-28T09:32:29.224510Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 3 2025-03-28T09:32:29.224550Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2025-03-28T09:32:29.224587Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2025-03-28T09:32:29.224616Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 9], 18446744073709551615 2025-03-28T09:32:29.225210Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.226723Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:29.226774Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:29.226901Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 9] 2025-03-28T09:32:29.227003Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:29.227032Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2025-03-28T09:32:29.227065Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 9 FAKE_COORDINATOR: Erasing txId 281474976710761 2025-03-28T09:32:29.227822Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.227896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.227953Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2025-03-28T09:32:29.227997Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-03-28T09:32:29.228066Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:32:29.228476Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.228546Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 9 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.228568Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2025-03-28T09:32:29.228592Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 9], version: 18446744073709551615 2025-03-28T09:32:29.228616Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 2 2025-03-28T09:32:29.228676Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2025-03-28T09:32:29.228730Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:133:2156] 2025-03-28T09:32:29.231372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.232181Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2025-03-28T09:32:29.232269Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2025-03-28T09:32:29.232316Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710761 2025-03-28T09:32:29.232353Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-03-28T09:32:29.232376Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2025-03-28T09:32:29.232402Z node 4 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2025-03-28T09:32:29.233961Z node 4 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-28T09:32:29.234046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:32:29.234086Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1387:3176] TestWaitNotification: OK eventTxId 105 >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ShouldSucceedOnManyTables >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 >> TLocksTest::BrokenLockErase ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbYqlClient::TableKeyRangesSinglePartition [GOOD] Test command err: 2025-03-28T09:24:19.255510Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486788817020649969:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:19.258964Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001423/r3tmp/tmp9epCqu/pdisk_1.dat 2025-03-28T09:24:19.779552Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:19.784440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:19.784733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:19.790918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9164, node 1 2025-03-28T09:24:20.025203Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:20.025239Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:20.025261Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:20.025403Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12661 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:20.369701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:22.668009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788829905552884:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:22.668118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:22.939085Z node 1 :TX_PROXY ERROR: Actor# [1:7486788829905552911:2632] txid# 281474976710658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-03-28T09:24:22.939264Z node 1 :TX_PROXY ERROR: Actor# [1:7486788829905552911:2632] txid# 281474976710658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-03-28T09:24:23.074974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486788834200520231:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.075048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:23.088849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:24.882845Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486788840129576102:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:24.883518Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001423/r3tmp/tmp41PVU7/pdisk_1.dat 2025-03-28T09:24:25.043135Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:25.083535Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:25.083608Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:25.086910Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4491, node 4 2025-03-28T09:24:25.227749Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:25.227801Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:25.227811Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:25.228260Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9978 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:25.376082Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:24:28.050916Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788857309446233:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.051014Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.064961Z node 4 :TX_PROXY ERROR: Actor# [4:7486788857309446254:2628] txid# 281474976715658, Access denied for badguy@builtin on path /Root, with access CreateTable 2025-03-28T09:24:28.065078Z node 4 :TX_PROXY ERROR: Actor# [4:7486788857309446254:2628] txid# 281474976715658, issues: { message: "Access denied for badguy@builtin on path /Root" issue_code: 200000 severity: 1 } 2025-03-28T09:24:28.150813Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486788857309446266:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.150896Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:24:28.159872Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:24:29.632791Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486788861110910866:2217];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:24:29.693964Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001423/r3tmp/tmpjblyLd/pdisk_1.dat 2025-03-28T09:24:29.796119Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:24:29.835262Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:24:29.835373Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:24:29.840467Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2211, node 7 2025-03-28T09:24:29.934176Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:24:29.934200Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:24:29.934207Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:24:29.934340Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:24:30.167168Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but ... 4976719643. Ctx: { TraceId: 01jqe1gsyw7875snd232ra38e0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:22.040669Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719644. Ctx: { TraceId: 01jqe1gt213fpyreqjn08wpcj6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:22.127941Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719645. Ctx: { TraceId: 01jqe1gt546qcbt21x6gfhzmgv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:22.239398Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719646. Ctx: { TraceId: 01jqe1gt7wffta31tvmaq0shdg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:22.334214Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719647. Ctx: { TraceId: 01jqe1gtbbb05afzjtwf7amxyk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:22.439001Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719648. Ctx: { TraceId: 01jqe1gtea3qttsqs37qwd48ss, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:22.543243Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719649. Ctx: { TraceId: 01jqe1gthm4q3f9sbg7ff7f3fd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:22.630207Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719650. Ctx: { TraceId: 01jqe1gtms9qjnaghxejn12673, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:22.719025Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719651. Ctx: { TraceId: 01jqe1gtqh0yxxfgnw050tyx4e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:22.804890Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719652. Ctx: { TraceId: 01jqe1gtt92wm796vx5ewf6h6h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:22.896317Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719653. Ctx: { TraceId: 01jqe1gtx0et3r5sp3jr7yp3bh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.002760Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719654. Ctx: { TraceId: 01jqe1gtztbjaxvjhf8c78vzv2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.090229Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719655. Ctx: { TraceId: 01jqe1gv35bvec38e4gvbmn0zt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.175783Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719656. Ctx: { TraceId: 01jqe1gv5y1dmjsaxrfz24z51m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.258399Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719657. Ctx: { TraceId: 01jqe1gv8j8zhx7j4h4tr7hahh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.358396Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719658. Ctx: { TraceId: 01jqe1gvb492td0691j58eebyw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.454979Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719659. Ctx: { TraceId: 01jqe1gve88gnjhzjvr96vttzy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.577742Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719660. Ctx: { TraceId: 01jqe1gvh9dn0k667hf5j97x1s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.661023Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719661. Ctx: { TraceId: 01jqe1gvn3fwcew72wv04cy6a7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.749097Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719662. Ctx: { TraceId: 01jqe1gvqq5mp0wszwe6swdf4q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.844456Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719663. Ctx: { TraceId: 01jqe1gvtf8kbjbk2djn8xsz57, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:23.932694Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719664. Ctx: { TraceId: 01jqe1gvxec76hq5ytq5gsw3sq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:24.021314Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719665. Ctx: { TraceId: 01jqe1gw074qd3zp5yhsr1vfe7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:24.105821Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719666. Ctx: { TraceId: 01jqe1gw2z7ej621yspcw9sfqv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:24.193099Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719667. Ctx: { TraceId: 01jqe1gw5ma9ny4sp56f59x29t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:24.278714Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976719668. Ctx: { TraceId: 01jqe1gw8b1n644v1pd6wsty5s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=OTliN2VjNzgtMzdjZmRlY2UtMmYwNWVjYjAtZDBkOTFmODA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:32:24.286767Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 9 2025-03-28T09:32:24.287523Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:32:25.878491Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486790907683595058:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:25.878575Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001423/r3tmp/tmpM3McrI/pdisk_1.dat 2025-03-28T09:32:26.014424Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:26.057675Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:26.057789Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:26.060807Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3768, node 10 2025-03-28T09:32:26.138849Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:26.138879Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:26.138896Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:26.139087Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6305 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:26.405002Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:29.843000Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 >> Viewer::JsonStorageListingV1 [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> KqpExtractPredicateLookup::SqlInJoin [GOOD] >> KqpKv::BulkUpsert >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] Test command err: 2025-03-28T09:30:56.921333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:56.921419Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:56.983961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:30:58.126413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-03-28T09:30:58.273130Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:30:58.273679Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:30:58.274396Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16397144828057658799 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:30:58.306896Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:30:58.307246Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:30:58.307491Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 3296670342577325687 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:30:58.310277Z node 2 :BS_LOCALRECOVERY CRIT: VDISK[80000000:_:0:0:0]: LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2025-03-28T09:30:58.443940Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:30:58.444490Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:30:58.444795Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16973514160211088192 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:30:58.493107Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:30:58.493654Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:30:58.493922Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cae/r3tmp/tmp9KUoiY/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16897575537687876974 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:30:58.501926Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 ... istered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerRequestDataSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardReadSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.PerShardIncomingReadSetSizeLimit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TxLimitControls.DefaultTimeoutMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.EnableLeaderLeases was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinLeaderLeaseDurationUs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.VolatilePlanLeaseMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.PlanAheadTimeShiftMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control CoordinatorControls.MinPlanResolutionMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.ForceShardSplitDataSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control SchemeShardControls.DisableForceShardSplit was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.ProfileSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.GuardedSamplingRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheTargetSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TCMallocControls.PageCacheReleaseRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableLocalSyncLogDataCutting was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.EnableSyncLogChunkCompressionSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxSyncLogChunksInFlightSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.BurstThresholdNsNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DiskTimeAvailableScaleNVME was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.DefaultHugeGarbagePerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.HugeDefragFreeSpaceBorderPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxChunksToDefragInflight was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingDryRun was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLevel0SstCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLevel0SstCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxInplacedSizeSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxOccupancyPerMille was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMinLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.ThrottlingMaxLogChunkCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control VDiskControls.MaxInProgressSyncCount was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TabletControls.MaxCommitRedoMB was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThreshold was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplier was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.LongRequestThresholdMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisks was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.SlowDiskThresholdSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.PredictedDelayMultiplierSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.MaxNumOfSlowDisksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.BucketSize was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakDurationMs was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control DSProxyControls.RequestReportingSettings.LeakRate was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.MaxCommonLogChunksSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerHDD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control PDiskControls.UseNoopSchedulerSSD was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control BlobStorageControllerControls.EnableSelfHealWithDegraded was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TableServiceControls.EnableMergeDatashardReads was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. WARNING: immediate control TestShardControls.DisableWrites was registered before TImmediateControlsConfigurator creation. A default value may have been used before it was configured. |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> KqpWorkload::STOCK [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::AuditCompletedExport >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 31581, MsgBus: 8947 2025-03-28T09:31:33.223638Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790683155526736:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.223724Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001268/r3tmp/tmpPKeVRD/pdisk_1.dat 2025-03-28T09:31:33.759450Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.766784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.766863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.768958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31581, node 1 2025-03-28T09:31:33.866330Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.866358Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.866364Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.866472Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8947 TClient is connected to server localhost:8947 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.541447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:34.569766Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:31:36.323217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790696040429294:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.323364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.711322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:31:36.842252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.363356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.740630Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790700335400669:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.740718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.740967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790700335400674:2636], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.744542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:31:37.757014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790700335400676:2637], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:31:37.838718Z node 1 :TX_PROXY ERROR: Actor# [1:7486790700335400808:5135] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.226674Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790683155526736:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.227324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:48.747732Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:31:48.747779Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.578791s took: 0.594873s took: 0.596441s took: 0.600603s took: 0.606946s took: 0.612908s took: 0.613865s took: 0.615621s took: 0.620400s took: 0.625035s took: 5.519044s took: 5.519674s took: 5.532403s took: 5.537212s took: 5.541634s took: 5.544522s took: 5.547198s took: 5.547332s took: 5.547933s took: 5.563362s took: 5.913041s 2025-03-28T09:32:32.085293Z node 1 :TX_DATASHARD ERROR: Complete [1743154352079 : 281474976711337] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:32:32.085572Z node 1 :TX_DATASHARD ERROR: Complete [1743154352080 : 281474976711339] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:32:32.089241Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmNhMWIyNWItYzA1NzQwNjYtMjQ2MGUyYTQtOTFhYTA2OTU=, ActorId: [1:7486790910788811684:5378], ActorState: ExecuteState, TraceId: 01jqe1gyrn5bx7wdk2n4p6w6c4, Create QueryResponse for error on request, msg: 2025-03-28T09:32:32.099095Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGI0MGI0YjEtNDU3ZjRkNC02ODBiODA5NS1lMGIxZDRjZQ==, ActorId: [1:7486790910788811683:5377], ActorState: ExecuteState, TraceId: 01jqe1gyr83k9sdq9h0jz2wz6z, Create QueryResponse for error on request, msg: 2025-03-28T09:32:32.100427Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGViNDhkMjEtYWFhNmJkNTYtZjIwZjgzMTMtZDJhYmRkNzc=, ActorId: [1:7486790910788811679:5373], ActorState: ExecuteState, TraceId: 01jqe1gysw9p3bkt714v7yc1ch, Create QueryResponse for error on request, msg: 2025-03-28T09:32:32.101486Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MmY2NzYwYmEtNDBiMTk4MjAtYmYwM2FhMGMtZWEzMjBjZTE=, ActorId: [1:7486790910788811687:5381], ActorState: ExecuteState, TraceId: 01jqe1gysv21egk8q3qefaz2m4, Create QueryResponse for error on request, msg: 2025-03-28T09:32:32.102170Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTk3ZjA0ZGYtNzVhZjkxODMtZDlmZmFlOWUtMmJmZWIyYmU=, ActorId: [1:7486790910788811681:5375], ActorState: ExecuteState, TraceId: 01jqe1gyrna2zr3nwd86mw7y50, Create QueryResponse for error on request, msg: 2025-03-28T09:32:32.103019Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmUzMGIxYTMtYmI0MjhmZWQtY2EyM2ZhOWEtMjJkZDE5ZWI=, ActorId: [1:7486790910788811678:5372], ActorState: ExecuteState, TraceId: 01jqe1gysee12j49wvx7hp3r5m, Create QueryResponse for error on request, msg: 2025-03-28T09:32:32.103866Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODVkNzY0N2UtYjk2YTQzMmMtNGJkZTdhMDQtOWY3ZmIzZWU=, ActorId: [1:7486790910788811685:5379], ActorState: ExecuteState, TraceId: 01jqe1gysp3432mf8ehnt0bbpn, Create QueryResponse for error on request, msg: 2025-03-28T09:32:32.107023Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGQzNmE4NDUtZTNlN2U4YTAtY2FiY2JlOWQtZWJhODQ4YzE=, ActorId: [1:7486790910788811686:5380], ActorState: ExecuteState, TraceId: 01jqe1gysfe3281rn6g1mnzzaa, Create QueryResponse for error on request, msg: 2025-03-28T09:32:32.108603Z node 1 :TX_DATASHARD ERROR: Complete [1743154352123 : 281474976711343] from 72075186224037921 at tablet 72075186224037921, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:32:32.108797Z node 1 :TX_DATASHARD ERROR: Complete [1743154352087 : 281474976711340] from 72075186224037900 at tablet 72075186224037900, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:32:32.109101Z node 1 :TX_DATASHARD ERROR: Complete [1743154352079 : 281474976711337] from 72075186224037890 at tablet 72075186224037890, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:32:32.109639Z node 1 :TX_DATASHARD ERROR: Complete [1743154352080 : 281474976711339] from 72075186224037911 at tablet 72075186224037911, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:32:32.110305Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzA3YjQ5NjQtYWM0NDMzMjYtYjFkYmMyY2ItMmUwNDliNGU=, ActorId: [1:7486790910788811680:5374], ActorState: ExecuteState, TraceId: 01jqe1gyrx6240r2xp4bpa37b8, Create QueryResponse for error on request, msg: 2025-03-28T09:32:32.111195Z node 1 :TX_DATASHARD ERROR: Complete [1743154352122 : 281474976711341] from 72075186224037928 at tablet 72075186224037928, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:32:32.111837Z node 1 :TX_DATASHARD ERROR: Complete [1743154352087 : 281474976711340] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:32:32.111911Z node 1 :TX_DATASHARD ERROR: Complete [1743154352094 : 281474976711338] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:32:32.111947Z node 1 :TX_DATASHARD ERROR: Complete [1743154352114 : 281474976711342] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_C ... 9:32:33.150380Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-03-28T09:32:33.150394Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-03-28T09:32:33.153163Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-03-28T09:32:33.153197Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-03-28T09:32:33.153216Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-03-28T09:32:33.153229Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-03-28T09:32:33.153242Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-03-28T09:32:33.153262Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-28T09:32:33.153280Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-03-28T09:32:33.153294Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-03-28T09:32:33.153309Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:32:33.153325Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-03-28T09:32:33.156524Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-03-28T09:32:33.156546Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-03-28T09:32:33.156555Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-03-28T09:32:33.156564Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-03-28T09:32:33.158826Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-03-28T09:32:33.158842Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-03-28T09:32:33.158856Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-03-28T09:32:33.158872Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-03-28T09:32:33.163567Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-03-28T09:32:33.163590Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-03-28T09:32:33.163608Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-03-28T09:32:33.163622Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-03-28T09:32:33.163638Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-03-28T09:32:33.163650Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-03-28T09:32:33.163659Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-03-28T09:32:33.163667Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-03-28T09:32:33.163675Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-03-28T09:32:33.163683Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-03-28T09:32:33.163691Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-03-28T09:32:33.167166Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-03-28T09:32:33.167195Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:32:33.167210Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-03-28T09:32:33.167232Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-03-28T09:32:33.167252Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-03-28T09:32:33.168669Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-03-28T09:32:33.179106Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-03-28T09:32:33.300568Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2025-03-28T09:32:33.300609Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2025-03-28T09:32:33.300625Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2025-03-28T09:32:33.300645Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2025-03-28T09:32:33.300659Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2025-03-28T09:32:33.300674Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2025-03-28T09:32:33.300689Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2025-03-28T09:32:33.300703Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2025-03-28T09:32:33.300750Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2025-03-28T09:32:33.300800Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2025-03-28T09:32:33.300819Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2025-03-28T09:32:33.300835Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2025-03-28T09:32:33.300881Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2025-03-28T09:32:33.300896Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2025-03-28T09:32:33.300912Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2025-03-28T09:32:33.300932Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2025-03-28T09:32:33.301990Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2025-03-28T09:32:33.302097Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2025-03-28T09:32:33.308265Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2025-03-28T09:32:33.311171Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2025-03-28T09:32:33.314602Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2025-03-28T09:32:33.314634Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found 2025-03-28T09:32:33.314649Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2025-03-28T09:32:33.314713Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2025-03-28T09:32:33.314734Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2025-03-28T09:32:33.314749Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2025-03-28T09:32:33.314819Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2025-03-28T09:32:33.314863Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2025-03-28T09:32:33.314879Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2025-03-28T09:32:33.314894Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found 2025-03-28T09:32:33.314909Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-03-28T09:32:33.314952Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2025-03-28T09:32:33.314971Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2025-03-28T09:32:33.314985Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2025-03-28T09:32:33.315000Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2025-03-28T09:32:33.315014Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2025-03-28T09:32:33.315029Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-03-28T09:32:33.315044Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2025-03-28T09:32:33.315088Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2025-03-28T09:32:33.315111Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:29.013141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:29.013225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:29.013260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:29.013297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:29.013345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:29.013373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:29.013461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:29.013551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:29.013928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:29.097432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:29.097495Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:29.111991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:29.112250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:29.112406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:29.117694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:29.117911Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:29.118560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:29.118793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:29.120948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:29.122152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:29.122222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:29.122393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:29.122437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:29.122485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:29.122566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:29.130638Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:29.236275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:29.236516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:29.236780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:29.237036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:29.237110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:29.239646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:29.239790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:29.240037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:29.240094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:29.240147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:29.240199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:29.242500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:29.242576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:29.242628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:29.244525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:29.244576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:29.244618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:29.244713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:29.248799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:29.250850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:29.251123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:29.252195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:29.252339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:29.252399Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:29.252710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:29.252777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:29.252975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:29.253060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:29.255226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:29.255284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:29.255478Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:29.255525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:29.255925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:29.255976Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:29.256115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:29.256179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:29.256227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:29.256264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:29.256314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:29.256373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:29.256417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:29.256450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:29.256524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:29.256565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:29.256600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:29.258615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:29.258756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:29.258797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ardId: 72075186233409547 CpuTimeUsec: 214 } } 2025-03-28T09:32:34.186238Z node 4 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-28T09:32:34.186341Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409547, shardIdx: 72057594046678944:2, operationId: 281474976710760:0, left await: 0, at schemeshard: 72057594046678944 2025-03-28T09:32:34.186386Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 3 -> 128 2025-03-28T09:32:34.188452Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:32:34.188610Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:32:34.188671Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710760:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:34.188748Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:32:34.188896Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409547 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 5000006 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:34.190609Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-03-28T09:32:34.190753Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710760 at step: 5000006 2025-03-28T09:32:34.191285Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:34.191392Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 17179871338 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:34.191453Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TPropose, opId: 281474976710760:0 HandleReply TEvOperationPlan, stepId: 5000006, at schemeshard: 72057594046678944 2025-03-28T09:32:34.191592Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 129 2025-03-28T09:32:34.191725Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000006 2025-03-28T09:32:34.219989Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:34.220060Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710760, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:32:34.220324Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:34.220365Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:207:2209], at schemeshard: 72057594046678944, txId: 281474976710760, path id: 4 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:6946 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 05EE4337-206B-4C89-AC9D-8A298F72B854 amz-sdk-request: attempt=1 content-length: 73 content-md5: fOPVvXe4lXvxEe0jvYDDBA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /metadata.json / / 73 2025-03-28T09:32:34.220631Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:32:34.220688Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710760:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-28T09:32:34.221391Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710760 2025-03-28T09:32:34.221475Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710760 2025-03-28T09:32:34.221505Z node 4 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710760 2025-03-28T09:32:34.221538Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710760, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-28T09:32:34.221571Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:32:34.221651Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:6946 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CDD743C4-72AA-4459-A49F-809DD9C1EFBF amz-sdk-request: attempt=1 content-length: 465 content-md5: I8OyVo4ze5n8ehz5iBQr/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /scheme.pb / / 465 2025-03-28T09:32:34.225022Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710760 REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:6946 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A4F1C668-BF8E-4998-9350-2F750D9B2FA4 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / / 0 2025-03-28T09:32:34.250771Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 490 RawX2: 17179871643 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-28T09:32:34.250836Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710760, tablet: 72075186233409547, partId: 0 2025-03-28T09:32:34.250974Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944, message: Source { RawX1: 490 RawX2: 17179871643 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-28T09:32:34.251098Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710760:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 490 RawX2: 17179871643 } Origin: 72075186233409547 State: 2 TxId: 281474976710760 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 0 RowsProcessed: 0 } 2025-03-28T09:32:34.251170Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710760:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:34.251217Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:32:34.251260Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710760:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:32:34.251308Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 129 -> 240 2025-03-28T09:32:34.251472Z node 4 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710760:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:34.253443Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:32:34.253748Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:32:34.253794Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-28T09:32:34.253904Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:32:34.253937Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:32:34.253973Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:32:34.253999Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:32:34.254033Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-28T09:32:34.254095Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:133:2156] message: TxId: 281474976710760 2025-03-28T09:32:34.254140Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:32:34.254171Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-28T09:32:34.254199Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-28T09:32:34.254304Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:32:34.256178Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-28T09:32:34.256241Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-28T09:32:34.257921Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:32:34.257987Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:520:2481] TestWaitNotification: OK eventTxId 102 >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TExportToS3Tests::AuditCancelledExport >> TopicAutoscaling::ReBalancingAfterSplit_sessionsWithPartition [GOOD] >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] >> Balancing::Balancing_OneTopic_PQv1 [GOOD] >> Balancing::Balancing_ManyTopics_TopicApi >> TFlatTest::RejectByPerRequestSize [GOOD] >> TVectorIndexTests::CreateTableCoveredEmbedding >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalTable >> TopicAutoscaling::PartitionSplit_ManySession_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 >> TColumnShardTestSchema::RebootDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2025-03-28T09:32:29.452977Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790921622889797:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:29.453448Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee9/r3tmp/tmpLZP9Ei/pdisk_1.dat 2025-03-28T09:32:29.794258Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:29.798507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:29.798557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:29.802016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8753 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:30.031886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:30.066209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:32.875176Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790937540422834:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:32.875207Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee9/r3tmp/tmpKV7yn6/pdisk_1.dat 2025-03-28T09:32:32.961697Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:33.007004Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:33.007085Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:33.013243Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2797 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:33.190571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:33.216258Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:32:33.222616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TExportToS3Tests::AuditCancelledExport [GOOD] >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer >> TopicAutoscaling::ReadingAfterSplitTest_AutoscaleAwareSDK_AutoCommit [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:32:20.363164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:20.363335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:20.363382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:20.363445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:20.364112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:20.364159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:20.364325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:20.364433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:20.365546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:20.452476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:20.452543Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:20.463054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:20.463281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:20.463440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:20.475442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:20.475650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:20.476354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.476971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:20.481150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.492302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:20.492396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.493842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:20.493910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:20.493969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:20.494176Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.502363Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:32:20.637687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:20.637942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.638134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:20.638324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:20.638375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.640655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.640841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:20.641128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.641187Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:20.641238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:20.641269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:20.642954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.643003Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:20.643028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:20.644871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.644924Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.644964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.645013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.647399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:20.649047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:20.649206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:20.649970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.650075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:20.650141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.650376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:20.650421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.650578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:20.650657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:20.652189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:20.652223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:20.652354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.652496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:32:20.652546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.652577Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:20.652645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:20.652681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.652734Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:20.652756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.652784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:20.652809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.652841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:20.652862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:20.652909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:20.652950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:20.652979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:20.654886Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:20.654998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:20.655039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 94046678944 2025-03-28T09:32:36.020548Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-03-28T09:32:36.020611Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710763 ready parts: 1/1 2025-03-28T09:32:36.020728Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:36.021323Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.021391Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.021414Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-28T09:32:36.021435Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2025-03-28T09:32:36.021465Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:32:36.021817Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.021872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.021900Z node 3 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-28T09:32:36.021936Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 11 2025-03-28T09:32:36.021970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:32:36.022029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-03-28T09:32:36.024355Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-28T09:32:36.024436Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710763, at schemeshard: 72057594046678944 2025-03-28T09:32:36.024457Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 0/1, is published: true 2025-03-28T09:32:36.024479Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710763, at schemeshard: 72057594046678944 2025-03-28T09:32:36.024951Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710763:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710763 msg type: 269090816 2025-03-28T09:32:36.025033Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710763, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710763 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710763 at step: 5000009 2025-03-28T09:32:36.025285Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000009, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:36.025355Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710763 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 12884904044 } } Step: 5000009 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:36.025392Z node 3 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976710763:0, step: 5000009, at schemeshard: 72057594046678944 2025-03-28T09:32:36.025479Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976710763:0, at schemeshard: 72057594046678944 2025-03-28T09:32:36.025528Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-03-28T09:32:36.025563Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-28T09:32:36.025597Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710763:0 progress is 1/1 2025-03-28T09:32:36.025620Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-28T09:32:36.025657Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:32:36.025700Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:32:36.025738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710763, ready parts: 1/1, is published: false 2025-03-28T09:32:36.025787Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710763 ready parts: 1/1 2025-03-28T09:32:36.025819Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710763:0 2025-03-28T09:32:36.025853Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710763:0 2025-03-28T09:32:36.025902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:32:36.025930Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710763, publications: 2, subscribers: 1 2025-03-28T09:32:36.025954Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2025-03-28T09:32:36.025988Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710763, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-28T09:32:36.026494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.026550Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.027926Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:36.027968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:36.028084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710763, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:32:36.028149Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:36.028170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 1 2025-03-28T09:32:36.028208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:207:2209], at schemeshard: 72057594046678944, txId: 281474976710763, path id: 4 FAKE_COORDINATOR: Erasing txId 281474976710763 2025-03-28T09:32:36.028680Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.028763Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.028799Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-28T09:32:36.028863Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2025-03-28T09:32:36.028902Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:32:36.029371Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.029460Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.029486Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710763 2025-03-28T09:32:36.029512Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710763, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:32:36.029535Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:32:36.029637Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710763, subscribers: 1 2025-03-28T09:32:36.029677Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:126:2152] 2025-03-28T09:32:36.032155Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.032407Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710763 2025-03-28T09:32:36.032454Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710763 2025-03-28T09:32:36.032492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710763 2025-03-28T09:32:36.032526Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-03-28T09:32:36.032562Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763 2025-03-28T09:32:36.032580Z node 3 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976710763, id# 103, itemIdx# 4294967295 2025-03-28T09:32:36.033790Z node 3 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-28T09:32:36.033872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:32:36.033911Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:772:2707] TestWaitNotification: OK eventTxId 103 >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2025-03-28T09:32:17.819464Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790870878708422:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:17.819590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef1/r3tmp/tmpvqlidL/pdisk_1.dat 2025-03-28T09:32:18.105019Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:18.172416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:18.172493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:18.174273Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5568 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:18.401103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:18.427492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:22.819730Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790870878708422:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:22.819794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:32:24.723939Z node 1 :TX_DATASHARD ERROR: Transaction read size 51002181 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-03-28T09:32:24.724082Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002181 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-03-28T09:32:24.724241Z node 1 :TX_PROXY ERROR: Actor# [1:7486790900943481142:2932] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-03-28T09:32:25.223206Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790907311145231:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:25.223304Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef1/r3tmp/tmpxz01t4/pdisk_1.dat 2025-03-28T09:32:25.322719Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:25.365064Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:25.365227Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:25.367657Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23125 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:25.495169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:25.511404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:30.223278Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790907311145231:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:30.223343Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:32:32.127037Z node 2 :TX_DATASHARD ERROR: Transaction read size 51002261 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2025-03-28T09:32:32.127130Z node 2 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002261 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2025-03-28T09:32:32.128002Z node 2 :TX_PROXY ERROR: Actor# [2:7486790937375917949:2933] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2025-03-28T09:32:32.739038Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790933770314586:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:32.739130Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef1/r3tmp/tmpdFOO3q/pdisk_1.dat 2025-03-28T09:32:32.841283Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:32.869820Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:32.869925Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:32.871440Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65283 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:33.023594Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:33.046953Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:35.921988Z node 3 :TX_PROXY DEBUG: actor# [3:7486790933770314806:2093] Handle TEvProposeTransaction 2025-03-28T09:32:35.922018Z node 3 :TX_PROXY DEBUG: actor# [3:7486790933770314806:2093] TxId# 281474976715700 ProcessProposeTransaction 2025-03-28T09:32:35.922067Z node 3 :TX_PROXY DEBUG: actor# [3:7486790933770314806:2093] Cookie# 0 userReqId# "" txid# 281474976715700 SEND to# [3:7486790946655217568:2611] DataReq marker# P0 2025-03-28T09:32:35.922112Z node 3 :TX_PROXY DEBUG: Actor# [3:7486790946655217568:2611] Cookie# 0 txid# 281474976715700 HANDLE TDataReq marker# P1 2025-03-28T09:32:35.922571Z node 3 :TX_PROXY DEBUG: Actor [3:7486790946655217568:2611] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-03-28T09:32:35.922589Z node 3 :TX_PROXY DEBUG: Actor [3:7486790946655217568:2611] txid 281474976715700 disallow followers cause of operation 2 read target mode 0 2025-03-28T09:32:35.922613Z node 3 :TX_PROXY DEBUG: Actor# [3:7486790946655217568:2611] txid# 281474976715700 SEND to# [3:7486790933770314848:2113] TSchemeCache with 2 scheme entries. DataReq marker# P2 2025-03-28T09:32:35.922775Z node 3 :TX_PROXY DEBUG: Actor# [3:7486790946655217568:2611] txid# 281474976715700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2025-03-28T09:32:35.923895Z node 3 :TX_PROXY DEBUG: Actor# [3:7486790946655217568:2611] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-03-28T09:32:35.924131Z node 3 :TX_PROXY DEBUG: Actor# [3:7486790946655217568:2611] txid# 281474976715700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2025-03-28T09:32:35.924175Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:32:35.925253Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037888 2025-03-28T09:32:35.925550Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:32:35.926416Z node 3 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976715700 at tablet 72075186224037889 2025-03-28T09:32:35.926886Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:32:35.926989Z node 3 :TX_PROXY DEBUG: Actor# [3:7486790946655217568:2611] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000855 out readset size 0 marker# P6 2025-03-28T09:32:35.927575Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:32:35.927635Z node 3 :TX_PROXY DEBUG: Actor# [3:7486790946655217568:2611] txid# 281474976715700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000463 out readset size 0 marker# P6 2025-03-28T09:32:35.927689Z node 3 :TX_PROXY ERROR: Actor# [3:7486790946655217568:2611] txid# 281474976715700 FailProposedRequest: Transaction total read size 26001318 exceeded limit 10000 Status# ExecError 2025-03-28T09:32:35.927752Z node 3 :TX_PROXY ERROR: Actor# [3:7486790946655217568:2611] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c 2025-03-28T09:32:35.927822Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976715700 2025-03-28T09:32:35.927869Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976715700 2025-03-28T09:32:35.928275Z node 3 :TX_DATASHARD DEBUG: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976715700 2025-03-28T09:32:35.928308Z node 3 :TX_DATASHARD DEBUG: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976715700 >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2025-03-28T09:32:37.584060Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2025-03-28T09:32:37.584905Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2025-03-28T09:32:37.584963Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2025-03-28T09:32:37.585165Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:674} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2025-03-28T09:32:37.585300Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:577} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2025-03-28T09:32:37.585350Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet >> TopicAutoscaling::ControlPlane_AutoscalingWithStorageSizeRetention [GOOD] >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad >> TopicAutoscaling::PartitionMerge_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AuditCancelledExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:32:28.657847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:28.657956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:28.657995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:28.658038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:28.658086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:28.658113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:28.658217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:28.658288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:28.658635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:28.739567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:28.739626Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:28.750518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:28.750771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:28.750938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:28.761398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:28.761635Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:28.762231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:28.762907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:28.767273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:28.768921Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:28.768977Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:28.769545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:28.769607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:28.769662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:28.769883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:28.779745Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:32:28.935377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:28.935621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:28.935882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:28.936154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:28.936214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:28.938593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:28.938809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:28.939102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:28.939156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:28.939204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:28.939240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:28.941368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:28.941432Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:28.941474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:28.943144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:28.943188Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:28.943232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:28.943287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:28.947169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:28.948919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:28.949097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:28.950086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:28.950220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:28.950279Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:28.950545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:28.950598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:28.950780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:28.950883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:28.953553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:28.953596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:28.953794Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:28.953848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:32:28.953929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:28.953968Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:28.954066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:28.954102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:28.954160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:28.954193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:28.954227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:28.954265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:28.954304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:28.954331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:28.954391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:28.954435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:28.954488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:28.957022Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:28.957132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:28.957165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... s: 0 S3Settings { Endpoint: "localhost:62269" Scheme: HTTP Bucket: "" ObjectKeyPattern: "" AccessKey: "" SecretKey: "" StorageClass: STORAGE_CLASS_UNSPECIFIED UseVirtualAddressing: true } Table { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } NeedToBill: true SnapshotStep: 0 SnapshotTxId: 0 EnableChecksums: false EnablePermissions: false } Internal: true } TxId: 281474976710759 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:37.128637Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TBackup Propose, path: /MyRoot/export-102/0, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.128781Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:32:37.129156Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710759:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:37.129218Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpBackup, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.130280Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion export in-flight, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:32:37.130343Z node 4 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:32:37.132027Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710759, response: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:37.132281Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710759, database: /MyRoot, subject: , status: StatusAccepted, operation: BACKUP TABLE, path: /MyRoot/export-102/0 2025-03-28T09:32:37.132543Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710759, status# StatusAccepted 2025-03-28T09:32:37.132602Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710759 SchemeshardId: 72057594046678944 2025-03-28T09:32:37.132920Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.132976Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710759:0 ProgressState, operation type: TxBackup, at tablet# 72057594046678944 2025-03-28T09:32:37.133022Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710759:0 ProgressState no shards to create, do next state 2025-03-28T09:32:37.133053Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 2 -> 3 2025-03-28T09:32:37.135359Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Execute, at schemeshard: 72057594046678944, message: TargetTxId: 281474976710759 TxId: 102 2025-03-28T09:32:37.135419Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Execute cancel tx: opId# 102:0, target opId# 281474976710759:0 2025-03-28T09:32:37.135781Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.135832Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.135982Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-03-28T09:32:37.138060Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProposeCancelTx Complete, at schemeshard: 72057594046678944 2025-03-28T09:32:37.138220Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.138258Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TBackup TConfigurePart ProgressState, opId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.138380Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Propose backup to datashard 72075186233409547 txid 281474976710759:0 at schemeshard 72057594046678944 2025-03-28T09:32:37.138924Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvCancelTxResult: Cookie: 102, at schemeshard: 72057594046678944 2025-03-28T09:32:37.139042Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted Result: "Cancelled at SchemeShard" TargetTxId: 281474976710759 TxId: 102 2025-03-28T09:32:37.140413Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-03-28T09:32:37.140558Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710759, partId: 0, tablet: 72075186233409547 2025-03-28T09:32:37.142360Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710759:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-03-28T09:32:37.143841Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:32:37.143899Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:563:2522] TestWaitNotification: OK eventTxId 102 AUDIT LOG buffer(7): 2025-03-28T09:32:36.268369Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-03-28T09:32:36.321757Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE TABLE, paths=[/MyRoot/Table], status=SUCCESS, detailed_status=StatusAccepted 2025-03-28T09:32:36.753223Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT START, status=SUCCESS, detailed_status=SUCCESS, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none} 2025-03-28T09:32:36.763775Z: component=schemeshard, tx_id=281474976710757, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE DIRECTORY, paths=[/MyRoot/export-102], status=SUCCESS, detailed_status=StatusAccepted 2025-03-28T09:32:36.797336Z: component=schemeshard, tx_id=281474976710758, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=CREATE TABLE COPY FROM, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2025-03-28T09:32:37.132191Z: component=schemeshard, tx_id=281474976710759, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=BACKUP TABLE, paths=[/MyRoot/export-102/0], status=SUCCESS, detailed_status=StatusAccepted 2025-03-28T09:32:37.139310Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2025-03-28T09:32:36.316253Z, end_time=2025-03-28T09:33:06.366253Z AUDIT LOG checked line: 2025-03-28T09:32:37.139310Z: component=schemeshard, id=102, uid=foo, remote_address=127.0.0.1, subject=user@builtin, sanitized_token={none}, database=/MyRoot, operation=EXPORT END, status=ERROR, detailed_status=CANCELLED, reason=Cancelled manually, export_type=s3, export_item_count=1, export_s3_bucket={none}, export_s3_prefix={none}, start_time=2025-03-28T09:32:36.316253Z, end_time=2025-03-28T09:33:06.366253Z |92.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableCoveredEmbedding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:37.290053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:37.290140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:37.290180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:37.290218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:37.290252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:37.290274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:37.290342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:37.290410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:37.290641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:37.369623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:37.369686Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:37.384491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:37.384802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:37.384990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:37.391406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:37.391650Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:37.392292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:37.392485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:37.394387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:37.395570Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:37.395639Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:37.395786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:37.395833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:37.395907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:37.396034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.402267Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:37.503702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:37.503905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.504093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:37.504282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:37.504338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.506453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:37.506563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:37.506725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.506765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:37.506791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:37.506831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:37.508433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.508476Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:37.508512Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:37.509767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.509797Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.509838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:37.509870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:37.517945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:37.520157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:37.520347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:37.521620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:37.521748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:37.521792Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:37.522095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:37.522147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:37.522314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:37.522392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:37.524885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:37.524948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:37.525135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:37.525169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:37.525613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:37.525662Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:37.525756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:37.525787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:37.525822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:37.525871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:37.525910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:37.525970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:37.526014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:37.526053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:37.526131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:37.526167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:37.526196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:37.527921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:37.528032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:37.528059Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Children { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "idx_vector" LocalPathId: 3 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "embedding" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:37.828182Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:32:37.828387Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplLevelTable" took 213us result status StatusSuccess 2025-03-28T09:32:37.828702Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplLevelTable" PathDescription { Self { Name: "indexImplLevelTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplLevelTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: true IsBuildInProgress: false } Columns { Name: "__ydb_centroid" Type: "String" TypeId: 4097 Id: 3 NotNull: true IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "__ydb_id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:37.829216Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:32:37.829413Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector/indexImplPostingTable" took 161us result status StatusSuccess 2025-03-28T09:32:37.829663Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/vectors/idx_vector/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "id" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpExplain::ExplainStream >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestExecutorMemUsage >> KikimrIcGateway::TestLoadTableMetadata >> KikimrIcGateway::TestLoadExternalTable >> TFlatTest::ShardUnfreezeNonFrozen |92.4%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> TLocksTest::Range_BrokenLockMax >> KikimrIcGateway::TestDropExternalTable [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> KikimrIcGateway::TestDropExternalDataSource |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] Test command err: 2025-03-28T09:30:56.849505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:56.849554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:56.921675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:00.228104Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:00.228201Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:00.290956Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:03.409552Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:03.409618Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:03.468117Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:04.486028Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944 2025-03-28T09:31:04.641493Z node 27 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:31:04.642030Z node 27 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:31:04.643043Z node 27 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 65581815738005971 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:31:04.685300Z node 23 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:31:04.685879Z node 23 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:31:04.686118Z node 23 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15682166327281778993 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:31:04.779890Z node 22 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:31:04.780435Z node 22 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:31:04.780670Z node 22 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7019458797049780587 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:31:04.825228Z node 21 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:31:04.825740Z node 21 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:31:04.826037Z node 21 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1340040011774179418 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:31:04.832526Z node 24 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:31:04.832999Z node 24 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:31:04.833257Z node 24 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/001cf9/r3tmp/tmpRRJrHl/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15150978173755702408 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 M ... node 163 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-03-28T09:32:38.015140Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-03-28T09:32:38.015239Z node 163 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715661:0 progress is 1/1 2025-03-28T09:32:38.015323Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-03-28T09:32:38.015419Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-03-28T09:32:38.015496Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715661, ready parts: 1/1, is published: false 2025-03-28T09:32:38.015664Z node 163 :FLAT_TX_SCHEMESHARD INFO: Send TEvUpdateTenantSchemeShard, to actor: [163:1382:2617], msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72057594046578944 2025-03-28T09:32:38.015720Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715661 ready parts: 1/1 2025-03-28T09:32:38.015808Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715661:0 2025-03-28T09:32:38.015886Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715661:0 2025-03-28T09:32:38.015997Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 12 2025-03-28T09:32:38.016089Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715661, publications: 1, subscribers: 1 2025-03-28T09:32:38.016167Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715661, [OwnerId: 72057594046578944, LocalPathId: 3], 7 2025-03-28T09:32:38.019441Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046578944 2025-03-28T09:32:38.019483Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046578944, txId: 281474976715661, path id: [OwnerId: 72057594046578944, LocalPathId: 3] 2025-03-28T09:32:38.019642Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046578944 2025-03-28T09:32:38.019673Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:673:2234], at schemeshard: 72057594046578944, txId: 281474976715661, path id: 3 2025-03-28T09:32:38.019804Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186233409546, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3 2025-03-28T09:32:38.019879Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046578944 Generation: 2 UserAttributes { Key: "name1" Value: "value1" } UserAttributesVersion: 3, at schemeshard: 72075186233409546 2025-03-28T09:32:38.020149Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: Cannot publish paths for unknown operation id#0 2025-03-28T09:32:38.020979Z node 163 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-28T09:32:38.021067Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 tablet 72057594046578944 removed=1 2025-03-28T09:32:38.021093Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 for mediator 72057594046382081 acknowledged 2025-03-28T09:32:38.021117Z node 163 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:21] persistent tx 281474976715661 acknowledged 2025-03-28T09:32:38.021555Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-03-28T09:32:38.021629Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046578944, msg: Owner: 72057594046578944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046578944, cookie: 281474976715661 2025-03-28T09:32:38.021671Z node 163 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046578944, txId: 281474976715661 2025-03-28T09:32:38.021750Z node 163 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046578944, txId: 281474976715661, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], version: 7 2025-03-28T09:32:38.021829Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046578944, LocalPathId: 3] was 11 2025-03-28T09:32:38.021976Z node 163 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046578944, txId: 281474976715661, subscribers: 1 2025-03-28T09:32:38.022055Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046578944, to actorId: [163:1923:2384] 2025-03-28T09:32:38.025084Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046578944, msg: DomainSchemeShard: 72057594046578944 DomainPathId: 3 TabletID: 72075186233409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 3 TenantHive: 18446744073709551615 TenantSysViewProcessor: 72075186233409553 TenantRootACL: "" TenantStatisticsAggregator: 72075186233409554 TenantGraphShard: 18446744073709551615 2025-03-28T09:32:38.025149Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 2025-03-28T09:32:38.025209Z node 163 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046578944, LocalPathId: 3], Generation: 2, ActorId:[163:1382:2617], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 3, TenantHive: 18446744073709551615, TenantSysViewProcessor: 72075186233409553, TenantStatisticsAggregator: 72075186233409554, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 3, tenantHive: 18446744073709551615, tenantSysViewProcessor: 72075186233409553, at schemeshard: 72057594046578944 2025-03-28T09:32:38.025269Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2025-03-28T09:32:38.025297Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2025-03-28T09:32:38.025402Z node 163 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2025-03-28T09:32:38.025434Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [163:1588:2764], at schemeshard: 72075186233409546, txId: 0, path id: 1 2025-03-28T09:32:38.026820Z node 163 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72075186233409546, cookie: 0 2025-03-28T09:32:38.027706Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046578944, cookie: 281474976715661 2025-03-28T09:32:38.027776Z node 163 :FLAT_TX_SCHEMESHARD DEBUG: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046578944, LocalPathId: 3], at schemeshard: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 Reply: Status: StatusSuccess Path: "/dc-1/users/tenant-1" PathDescription { Self { Name: "tenant-1" PathId: 3 SchemeshardId: 72057594046578944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046578944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 10 Coordinators: 72075186233409547 Coordinators: 72075186233409548 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 Mediators: 72075186233409551 Mediators: 72075186233409552 SchemeShard: 72075186233409546 SysViewProcessor: 72075186233409553 StatisticsAggregator: 72075186233409554 } DomainKey { SchemeShard: 72057594046578944 PathId: 3 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } StoragePools { Name: "/dc-1/users/tenant-1:hdd-1" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 9 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046578944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { } SecurityState { } } UserAttributes { Key: "name1" Value: "value1" } } PathId: 3 PathOwnerId: 72057594046578944 >> KikimrIcGateway::TestListPath >> KqpKv::BulkUpsert [GOOD] >> KqpKv::ReadRows_ExternalBlobs+UseExtBlobsPrecharge >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Enable [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood >> TopicAutoscaling::ControlPlane_CDC_Disable >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme |92.5%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::ExternalTTL_Types >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnVDiskSpaceStatus [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage >> TFlatTest::AutoSplitBySize >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata >> TopicAutoscaling::Simple_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropExternalDataSource [GOOD] Test command err: Trying to start YDB, gRPC: 25356, MsgBus: 11353 2025-03-28T09:32:33.907757Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790939961272496:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:33.907917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001477/r3tmp/tmpp6yMFy/pdisk_1.dat 2025-03-28T09:32:34.261016Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:34.301878Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:34.301993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:34.303754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25356, node 1 2025-03-28T09:32:34.466970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:34.466990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:34.467001Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:34.467150Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11353 TClient is connected to server localhost:11353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:35.121413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:35.163653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480 2025-03-28T09:32:35.182376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:32:35.198949Z node 1 :TX_PROXY ERROR: Actor# [1:7486790948551207785:2344] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:35.202828Z node 1 :KQP_GATEWAY ERROR: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges)
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeExternalTable, state: EPathStateNoChanges) Trying to start YDB, gRPC: 6309, MsgBus: 27952 2025-03-28T09:32:36.971690Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790954251179026:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:36.971762Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001477/r3tmp/tmpRThTvf/pdisk_1.dat 2025-03-28T09:32:37.059394Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6309, node 2 2025-03-28T09:32:37.097715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:37.097867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:37.099482Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:37.123282Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:37.123307Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:37.123316Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:37.123431Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27952 TClient is connected to server localhost:27952 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:37.495213Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:37.513352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-28T09:32:37.527105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 22226, MsgBus: 5278 2025-03-28T09:32:40.306803Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790972362582560:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:40.306923Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001477/r3tmp/tmp1XqgUL/pdisk_1.dat 2025-03-28T09:32:40.420133Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:40.448314Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:40.448398Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:40.450031Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22226, node 3 2025-03-28T09:32:40.494030Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:40.494051Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:40.494056Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:40.494158Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5278 TClient is connected to server localhost:5278 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:32:40.913099Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:32:40.933398Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480 >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> TColumnShardTestSchema::RebootOneTier [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTier [GOOD] Test command err: 2025-03-28T09:32:28.531254Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:32:28.628835Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:32:28.633245Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:32:28.633640Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:32:28.658346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:32:28.658699Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:32:28.666725Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:32:28.666959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:32:28.667186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:32:28.667295Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:32:28.667389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:32:28.667520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:32:28.667639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:32:28.667755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:32:28.667887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:32:28.668028Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:32:28.668164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:32:28.668284Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:32:28.694082Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:32:28.702065Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:32:28.702428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:32:28.702484Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:32:28.702671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:28.702846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:32:28.702920Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:32:28.702973Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:32:28.703068Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:32:28.703122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:32:28.703165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:32:28.703193Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:32:28.703347Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:28.703415Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:32:28.703465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:32:28.703500Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:32:28.703597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:32:28.703660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:32:28.703703Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:32:28.703730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:32:28.703790Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:32:28.703826Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:32:28.703867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:32:28.703919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:32:28.703970Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:32:28.704001Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:32:28.704391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-03-28T09:32:28.704472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=32; 2025-03-28T09:32:28.704551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-03-28T09:32:28.704627Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=35; 2025-03-28T09:32:28.704828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:32:28.704881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:32:28.704916Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:32:28.705098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:32:28.705141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:32:28.705180Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:32:28.705314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:32:28.705369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:32:28.705400Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:32:28.705584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:32:28.705628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:32:28.705659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T0 ... 1:1033:3026];bytes=350080;rows=43760;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-28T09:32:45.942672Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:32:45.942837Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:32:45.942893Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:32:45.942946Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:32:45.943119Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:32:45.943306Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:32:45.943352Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:32:45.943482Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=36240; 2025-03-28T09:32:45.943546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=289920;num_rows=36240;batch_columns=timestamp; 2025-03-28T09:32:45.943725Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1033:3026];bytes=289920;rows=36240;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-28T09:32:45.943865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:32:45.943985Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:32:45.944123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:32:45.944267Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:32:45.944350Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:32:45.944422Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:32:45.944455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1038:3031] finished for tablet 9437184 2025-03-28T09:32:45.944982Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1033:3026];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.69},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.693}],"full":{"a":1743154365251126,"name":"_full_task","f":1743154365251126,"d_finished":0,"c":0,"l":1743154365944515,"d":693389},"events":[{"name":"bootstrap","f":1743154365251435,"d_finished":2851,"c":1,"l":1743154365254286,"d":2851},{"a":1743154365944248,"name":"ack","f":1743154365941848,"d_finished":2195,"c":2,"l":1743154365944155,"d":2462},{"a":1743154365944234,"name":"processing","f":1743154365256080,"d_finished":216524,"c":16,"l":1743154365944157,"d":216805},{"name":"ProduceResults","f":1743154365252800,"d_finished":5136,"c":20,"l":1743154365944440,"d":5136},{"a":1743154365944443,"name":"Finish","f":1743154365944443,"d_finished":0,"c":0,"l":1743154365944515,"d":72},{"name":"task_result","f":1743154365256120,"d_finished":213832,"c":14,"l":1743154365941638,"d":213832}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:32:45.945064Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1033:3026];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:32:45.945572Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1033:3026];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.69},{"events":["l_ProduceResults","f_Finish"],"t":0.693},{"events":["l_ack","l_processing","l_Finish"],"t":0.694}],"full":{"a":1743154365251126,"name":"_full_task","f":1743154365251126,"d_finished":0,"c":0,"l":1743154365945128,"d":694002},"events":[{"name":"bootstrap","f":1743154365251435,"d_finished":2851,"c":1,"l":1743154365254286,"d":2851},{"a":1743154365944248,"name":"ack","f":1743154365941848,"d_finished":2195,"c":2,"l":1743154365944155,"d":3075},{"a":1743154365944234,"name":"processing","f":1743154365256080,"d_finished":216524,"c":16,"l":1743154365944157,"d":217418},{"name":"ProduceResults","f":1743154365252800,"d_finished":5136,"c":20,"l":1743154365944440,"d":5136},{"a":1743154365944443,"name":"Finish","f":1743154365944443,"d_finished":0,"c":0,"l":1743154365945128,"d":685},{"name":"task_result","f":1743154365256120,"d_finished":213832,"c":14,"l":1743154365941638,"d":213832}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:32:45.945660Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:32:45.250259Z;index_granules=0;index_portions=2;index_batches=1721;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5175704;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5175704;selected_rows=0; 2025-03-28T09:32:45.945707Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:32:45.945999Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1038:3031];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2025-03-28T09:32:39.730419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790966552549235:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:39.730633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee5/r3tmp/tmpTfLiWk/pdisk_1.dat 2025-03-28T09:32:40.122102Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:40.124835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:40.124920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:40.129109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20277 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:40.410049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:40.430876Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:32:40.437107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:40.601351Z node 1 :TX_PROXY ERROR: Actor# [1:7486790970847517003:2364] txid# 281474976710659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2025-03-28T09:32:43.029837Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790985013037363:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:43.029936Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee5/r3tmp/tmpzewlVq/pdisk_1.dat 2025-03-28T09:32:43.136536Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:43.174542Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:43.174712Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:43.176208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26156 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:43.323471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:43.331368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:43.393988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:43.410529Z node 2 :TX_PROXY ERROR: Actor# [2:7486790985013038085:2392] txid# 281474976715660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2025-03-28T09:32:43.412797Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:43.426221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 waiting... >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestDropResourcePool >> TopicAutoscaling::CDC_PartitionSplit_AutosplitByLoad [GOOD] >> TopicAutoscaling::ControlPlane_CDC >> KqpKv::ReadRows_ExternalBlobs+UseExtBlobsPrecharge [GOOD] >> KqpKv::ReadRows_ExternalBlobs-UseExtBlobsPrecharge >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TFlatTest::WriteSplitKillRead >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock >> TCertificateCheckerTest::CheckSubjectDns >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::FewEffects+UseSink >> TLocksTest::CK_Range_BrokenLockInf [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TColumnShardTestSchema::RebootDrop [GOOD] >> TCertificateCheckerTest::CheckSubjectDns [GOOD] >> TopicAutoscaling::ControlPlane_CDC_Disable [GOOD] >> TopicAutoscaling::MidOfRange [GOOD] >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootDrop [GOOD] Test command err: 2025-03-28T09:32:37.383716Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:32:37.480566Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:32:37.506943Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:32:37.507301Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:32:37.515160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:32:37.515387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:32:37.515631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:32:37.515779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:32:37.515893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:32:37.516021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:32:37.516135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:32:37.516291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:32:37.516426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:32:37.516561Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:32:37.516705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:32:37.516815Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:32:37.543693Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:32:37.543996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:32:37.544058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:32:37.544192Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:37.544331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:32:37.544442Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:32:37.544478Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:32:37.544551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:32:37.544604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:32:37.544634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:32:37.544655Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:32:37.544781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:37.544841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:32:37.544876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:32:37.544893Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:32:37.544947Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:32:37.544978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:32:37.545012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:32:37.545040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:32:37.545093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:32:37.545119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:32:37.545136Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:32:37.545163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:32:37.545201Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:32:37.545225Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:32:37.545525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=33; 2025-03-28T09:32:37.545583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=24; 2025-03-28T09:32:37.545641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=23; 2025-03-28T09:32:37.545687Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=22; 2025-03-28T09:32:37.545792Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:32:37.545844Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:32:37.545872Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:32:37.546011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:32:37.546044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:32:37.546079Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:32:37.546229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:32:37.546258Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:32:37.546298Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:32:37.546438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:32:37.546466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:32:37.546485Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:32:37.546553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:32:37.546595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:32:37.546629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 55Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:32:51.077505Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:32:51.077540Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:32:51.077603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:32:51.077691Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:32:51.077767Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-28T09:32:51.077850Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700004;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:32:51.077910Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:32:51.077972Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:32:51.078025Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:32:51.078128Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-28T09:32:51.078200Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:32:51.078647Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=8;path_id=1; 2025-03-28T09:32:51.079622Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-28T09:32:51.266258Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000003:max} readable: {1000000004:max} at tablet 9437184 2025-03-28T09:32:51.266477Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:32:51.271466Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 9 } } } ; 2025-03-28T09:32:51.271602Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 9 } } } ; 2025-03-28T09:32:51.272371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"saved_at","id":9}]},"o":"9","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"9","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:32:51.272534Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:32:51.273945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tablet_id=9437184;self_id=[1:889:2890];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:949:2942];trace_detailed=; 2025-03-28T09:32:51.275453Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=9;column_names=saved_at;);; 2025-03-28T09:32:51.275804Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-03-28T09:32:51.276444Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:949:2942];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:32:51.276603Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:949:2942];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:32:51.276766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:949:2942];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:32:51.276827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:949:2942] finished for tablet 9437184 2025-03-28T09:32:51.277351Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:949:2942];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:942:2936];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":1743154371273855,"name":"_full_task","f":1743154371273855,"d_finished":0,"c":0,"l":1743154371276898,"d":3043},"events":[{"name":"bootstrap","f":1743154371274182,"d_finished":1804,"c":1,"l":1743154371275986,"d":1804},{"a":1743154371276407,"name":"ack","f":1743154371276407,"d_finished":0,"c":0,"l":1743154371276898,"d":491},{"a":1743154371276379,"name":"processing","f":1743154371276379,"d_finished":0,"c":0,"l":1743154371276898,"d":519},{"name":"ProduceResults","f":1743154371275960,"d_finished":346,"c":2,"l":1743154371276805,"d":346},{"a":1743154371276810,"name":"Finish","f":1743154371276810,"d_finished":0,"c":0,"l":1743154371276898,"d":88}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:32:51.277426Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:949:2942];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:942:2936];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:32:51.277751Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:949:2942];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:942:2936];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults","l_ProduceResults","f_Finish"],"t":0.002},{"events":["l_ack","l_processing","l_Finish"],"t":0.003}],"full":{"a":1743154371273855,"name":"_full_task","f":1743154371273855,"d_finished":0,"c":0,"l":1743154371277466,"d":3611},"events":[{"name":"bootstrap","f":1743154371274182,"d_finished":1804,"c":1,"l":1743154371275986,"d":1804},{"a":1743154371276407,"name":"ack","f":1743154371276407,"d_finished":0,"c":0,"l":1743154371277466,"d":1059},{"a":1743154371276379,"name":"processing","f":1743154371276379,"d_finished":0,"c":0,"l":1743154371277466,"d":1087},{"name":"ProduceResults","f":1743154371275960,"d_finished":346,"c":2,"l":1743154371276805,"d":346},{"a":1743154371276810,"name":"Finish","f":1743154371276810,"d_finished":0,"c":0,"l":1743154371277466,"d":656}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:32:51.277832Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:949:2942];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:32:51.272478Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:32:51.277889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:949:2942];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:32:51.277980Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:949:2942];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2025-03-28T09:32:14.470542Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790860226405140:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:14.470677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef7/r3tmp/tmpq2vQGz/pdisk_1.dat 2025-03-28T09:32:14.778733Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:14.834065Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:14.834158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:14.835968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15616 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:15.010148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:32:15.045225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:32:15.164669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:15.220389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:17.581781Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790873126330454:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:17.581861Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef7/r3tmp/tmpzFzY1W/pdisk_1.dat 2025-03-28T09:32:17.690890Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:17.728731Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:17.728862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:17.730433Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2829 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:17.860983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:17.882473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:17.955016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:17.995955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:20.809268Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790884389909211:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:20.809332Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef7/r3tmp/tmpjXefvR/pdisk_1.dat 2025-03-28T09:32:20.914538Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:20.950033Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:20.950120Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:20.951357Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30140 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:21.079587Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:21.101707Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:21.177622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:21.252982Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:32:23.713883Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486790895539762263:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:23.713951Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef7/r3tmp/tmpdkxCrM/pdisk_1.dat 2025-03-28T09:32:23.785981Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:23.843529Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:23.843606Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:23.844939Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26361 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:23.969357Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:23.986325Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waitin ... 9/001ef7/r3tmp/tmp7OXqFF/pdisk_1.dat 2025-03-28T09:32:34.747418Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:34.780736Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:34.780836Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:34.782156Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6858 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:35.023137Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:35.049037Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:35.121196Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:35.191655Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:38.501351Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486790961918948328:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:38.501487Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef7/r3tmp/tmpTm1szL/pdisk_1.dat 2025-03-28T09:32:38.669324Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:38.688844Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:38.688952Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:38.691377Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4463 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:38.909197Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:38.923385Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:38.994302Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:39.047311Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:42.980907Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486790980817311983:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:42.980998Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef7/r3tmp/tmpkrLIqL/pdisk_1.dat 2025-03-28T09:32:43.090863Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:43.121652Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:43.121750Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:43.123133Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26761 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:43.351696Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:43.369496Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:43.446397Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:43.505731Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:47.136455Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486791001857539287:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:47.136537Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef7/r3tmp/tmp5zhaAm/pdisk_1.dat 2025-03-28T09:32:47.256971Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:47.288420Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:47.288513Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:47.289828Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3114 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:47.514639Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:47.536109Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:47.614626Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:47.666669Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> KikimrIcGateway::TestDropResourcePool [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 >> Balancing::Balancing_ManyTopics_TopicApi [GOOD] >> Balancing::Balancing_ManyTopics_PQv1 >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::MidOfRange [GOOD] Test command err: 2025-03-28T09:32:00.877054Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790799340566090:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.877110Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001369/r3tmp/tmp00bf5j/pdisk_1.dat 2025-03-28T09:32:01.100439Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:32:01.333008Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:01.337723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:01.337824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:01.349630Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61698, node 1 2025-03-28T09:32:01.803388Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001369/r3tmp/yandexw7muWG.tmp 2025-03-28T09:32:01.803413Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001369/r3tmp/yandexw7muWG.tmp 2025-03-28T09:32:01.803653Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001369/r3tmp/yandexw7muWG.tmp 2025-03-28T09:32:01.803788Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:02.036093Z INFO: TTestServer started on Port 63966 GrpcPort 61698 TClient is connected to server localhost:63966 PQClient connected to localhost:61698 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:02.329868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.367296Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.384701Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:32:02.390198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:02.543673Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:03.950408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790812225468793:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.950586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.953355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790812225468805:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.958338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:32:03.974387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790812225468808:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:32:04.211804Z node 1 :TX_PROXY ERROR: Actor# [1:7486790816520436168:2453] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:04.247361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.294460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.349159Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790816520436183:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:32:04.350893Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGYyNDU1ZDUtZWNiZDUzYzAtNGZlNDAxNWEtYjgyOTk1Yjk=, ActorId: [1:7486790812225468790:2337], ActorState: ExecuteState, TraceId: 01jqe1g8ebezarap4qt094ma8h, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:32:04.369247Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:32:04.370746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486790816520436462:2635] 2025-03-28T09:32:05.877127Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790799340566090:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:05.877186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:32:10.505605Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:32:10.517493Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-28T09:32:10.518480Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486790842290240527:2792], Recipient [1:7486790803635533789:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.518509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.518520Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:10.518543Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486790842290240523:2789], Recipient [1:7486790803635533789:2184]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-28T09:32:10.518553Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:10.617837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "autoscalit-topic" TotalGroupCount: 5 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 5 MaxPartitionCount: 10 ScaleThresholdSeconds: 500 ScaleUpPartitionWriteSpeedThresholdPercent: 80 ScaleDownPartitionWriteSpeedThresholdPercent: 20 PartitionStrategyType: CAN_SPLIT } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:10.618123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/autoscalit-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:10.618347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: autoscalit-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-28T09:32:10.618378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-28T09:32:10.618418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-03-28T09:32:10.618464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created fo ... .828448Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715673 2025-03-28T09:32:51.828459Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715673, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], version: 2 2025-03-28T09:32:51.828469Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 15] was 4 2025-03-28T09:32:51.828516Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715673, subscribers: 1 2025-03-28T09:32:51.828532Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [5:7486791016173944467:2470] 2025-03-28T09:32:51.828560Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:32:51.828937Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:32:51.829065Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-03-28T09:32:51.829081Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:32:51.829106Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715673 2025-03-28T09:32:51.829112Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:32:51.829158Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Send to actor: [5:7486791016173944467:2470] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715673 at schemeshard: 72057594046644480 2025-03-28T09:32:51.829165Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [5:7486791016173944495:2472], Recipient [5:7486791016173944495:2472]: NKikimrClient.TResponse Status: 1 Cookie: 5 WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } WriteResult { Status: 0 StatusFlags: 1 } 2025-03-28T09:32:51.829186Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-03-28T09:32:51.829204Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:32:51.829224Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state EXECUTED 2025-03-28T09:32:51.829240Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673, State EXECUTED 2025-03-28T09:32:51.829259Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673 State EXECUTED FrontTxId 281474976715673 2025-03-28T09:32:51.829275Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:32:51.829293Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673, NewState WAIT_RS_ACKS 2025-03-28T09:32:51.829307Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:32:51.829331Z node 5 :PERSQUEUE DEBUG: [TxId: 281474976715673] PredicateAcks: 0/0 2025-03-28T09:32:51.829340Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:32:51.829353Z node 5 :PERSQUEUE DEBUG: [TxId: 281474976715673] PredicateAcks: 0/0 2025-03-28T09:32:51.829372Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] add an TxId 281474976715673 to the list for deletion 2025-03-28T09:32:51.829389Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673, NewState DELETING 2025-03-28T09:32:51.829416Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete key for TxId 281474976715673 2025-03-28T09:32:51.829468Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:32:51.829521Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7486791016173944495:2472], Recipient [5:7486791016173944495:2472]: NKikimr::TEvKeyValue::TEvCollect 2025-03-28T09:32:51.829655Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794752, Sender [5:7486791016173944495:2472], Recipient [5:7486791016173944495:2472]: NKikimrClient.TKeyValueRequest Cookie: 5 CmdDeleteRange { Range { From: "tx_00000281474976715673" IncludeFrom: true To: "tx_00000281474976715673" IncludeTo: true } } CmdWrite { Key: "_txinfo" Value: "\020\200\272\343\340\3352\030\231\247\200\200\200\200@(\240\215\0060\200\272\343\340\33528\231\247\200\200\200\200@" StorageChannel: INLINE } 2025-03-28T09:32:51.829772Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794753, Sender [5:7486791016173944620:2472], Recipient [5:7486791016173944495:2472]: NKikimr::TEvKeyValue::TEvIntermediate 2025-03-28T09:32:51.829830Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7486791016173944474:2829], Recipient [5:7486790977519237639:2159]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:32:51.829855Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:32:51.829870Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-28T09:32:51.830000Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7486791016173944619:2482], Recipient [5:7486791016173944495:2472]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-03-28T09:32:51.830342Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270795264, Sender [5:7486791016173944495:2472], Recipient [5:7486791016173944495:2472]: NKikimrClient.TResponse Status: 1 Cookie: 5 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2025-03-28T09:32:51.830367Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvKeyValue::TEvResponse 2025-03-28T09:32:51.830380Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:32:51.830396Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] Try execute txs with state DELETING 2025-03-28T09:32:51.830408Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] TxId 281474976715673, State DELETING 2025-03-28T09:32:51.830427Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037893] delete TxId 281474976715673 2025-03-28T09:32:51.830542Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794756, Sender [5:7486791016173944495:2472], Recipient [5:7486791016173944495:2472]: NKikimr::TEvKeyValue::TEvCollect 2025-03-28T09:32:51.830806Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 270794760, Sender [5:7486791016173944623:2483], Recipient [5:7486791016173944495:2472]: NKikimr::TEvKeyValue::TEvCompleteGC 2025-03-28T09:32:51.844551Z node 5 :PQ_READ_PROXY DEBUG: new alter topic request 2025-03-28T09:32:51.846890Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [5:7486791016173944632:2919], Recipient [5:7486790977519237639:2159]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:51.846946Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:51.846972Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:51.847058Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [5:7486791016173944629:2917], Recipient [5:7486790977519237639:2159]: {TEvModifySchemeTransaction txid# 281474976715674 TabletId# 72057594046644480} 2025-03-28T09:32:51.847087Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:51.850440Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "Root/origin/feed" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "streamImpl" PathId: 15 TotalGroupCount: 3 PartitionPerTablet: 2 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "feed" TopicPath: "/Root/origin/feed/streamImpl" YdbDatabasePath: "/Root" MeteringMode: METERING_MODE_REQUEST_UNITS PartitionStrategy { MinPartitionCount: 3 MaxPartitionCount: 107 ScaleThresholdSeconds: 30 PartitionStrategyType: DISABLED } } Partitions { PartitionId: 0 TabletId: 72075186224037893 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037894 NextPartitionId: 1 } ApplyIf { PathId: 15 PathVersion: 2 } AllowAccessToPrivatePaths: true } TxId: 281474976715674 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:51.850796Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TAlterPQ Propose, path: Root/origin/feed/streamImpl, pathId: [OwnerId: 72057594046644480, LocalPathId: 15], opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:32:51.850927Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715674:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046644480 2025-03-28T09:32:51.851134Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:32:51.851631Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715674, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 281474976715674 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:32:51.851800Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715674, database: /Root, subject: root@builtin, status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: Root/origin/feed/streamImpl 2025-03-28T09:32:51.851834Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:32:51.852098Z node 5 :TX_PROXY ERROR: Actor# [5:7486791016173944629:2917] txid# 281474976715674, issues: { message: "Can`t disable auto partitioning." severity: 1 } 2025-03-28T09:32:51.852310Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [5:7486791016173944632:2919], Recipient [5:7486790977519237639:2159]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:32:51.852344Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:32:51.852355Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046644480 2025-03-28T09:32:51.920837Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791016173944495:2472], Partition 0, Sender [0:0:0], Recipient [5:7486791016173944579:2478], Cookie: 0 2025-03-28T09:32:51.920909Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791016173944579:2478]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:32:51.920938Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:32:51.920983Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:32:51.921048Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:32:51.921091Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:32:51.921127Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TColumnShardTestSchema::RebootHotTiers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropResourcePool [GOOD] Test command err: Trying to start YDB, gRPC: 6092, MsgBus: 13846 2025-03-28T09:32:41.367071Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790976664672893:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:41.367143Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a1/r3tmp/tmp15AuC1/pdisk_1.dat 2025-03-28T09:32:41.708238Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6092, node 1 2025-03-28T09:32:41.770502Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:41.770825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:41.772555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:41.787014Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:41.787040Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:41.787047Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:41.787190Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13846 TClient is connected to server localhost:13846 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:42.294372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:42.308557Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:32:44.325971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790989549575488:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:44.326174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:44.538420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:32:44.653725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:32:44.681251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:44.710483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:44.753296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790989549575802:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:44.753416Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:44.753738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790989549575807:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:44.757510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-03-28T09:32:44.767108Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790989549575809:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-03-28T09:32:44.857802Z node 1 :TX_PROXY ERROR: Actor# [1:7486790989549575862:2567] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21266, MsgBus: 5223 2025-03-28T09:32:45.696382Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790989946355138:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:45.696434Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a1/r3tmp/tmpMRnzsM/pdisk_1.dat 2025-03-28T09:32:45.788866Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21266, node 2 2025-03-28T09:32:45.823893Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:45.823983Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:45.826142Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:45.855311Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:45.855332Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:45.855339Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:45.855458Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5223 TClient is connected to server localhost:5223 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:32:46.300118Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:32:48.422364Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791002831257718:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:48.422458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:48.441464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:32:48.471836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:32:48.499385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:48.525354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:48.559616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791002831258029:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:48.559728Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:48.559765Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791002831258034:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:48.563533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-03-28T09:32:48.571722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486791002831258036:2368], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-03-28T09:32:48.637459Z node 2 :TX_PROXY ERROR: Actor# [2:7486791002831258088:2563] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:48.832384Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found
: Info: Success, code: 4 Trying to start YDB, gRPC: 12283, MsgBus: 16060 2025-03-28T09:32:49.491008Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791006997896451:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:49.491109Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a1/r3tmp/tmp9loQV1/pdisk_1.dat 2025-03-28T09:32:49.613752Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12283, node 3 2025-03-28T09:32:49.640162Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:49.640256Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:49.645119Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:49.668938Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:49.668963Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:49.668969Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:49.669092Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16060 TClient is connected to server localhost:16060 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:50.036151Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:50.062637Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TopicAutoscaling::PartitionSplit_ManySession_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation >> TLocksTest::Range_Pinhole >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PQv1 [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TLocksFatTest::PointSetBreak ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2025-03-28T09:32:17.366152Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790872402465910:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:17.366645Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef3/r3tmp/tmpxgMByw/pdisk_1.dat 2025-03-28T09:32:17.672055Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:17.719686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:17.719791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:17.721468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24876 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:17.907466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:17.930903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:18.039281Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:18.082971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:20.045222Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790885190440240:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:20.045279Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef3/r3tmp/tmp2PcXmo/pdisk_1.dat 2025-03-28T09:32:20.176381Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:20.222303Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:20.222407Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:20.223936Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2096 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:20.373835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:20.395629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:20.467477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:32:20.537640Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:23.041243Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790896824753293:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:23.041320Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef3/r3tmp/tmpTjZwtZ/pdisk_1.dat 2025-03-28T09:32:23.136144Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:23.176722Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:23.176805Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:23.178180Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6493 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:23.348748Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:23.370525Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:23.436956Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:23.474288Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:25.716705Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486790907573271582:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:25.716787Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef3/r3tmp/tmprTjffM/pdisk_1.dat 2025-03-28T09:32:25.805895Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:25.847308Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:25.847401Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:25.851616Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11314 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:26.023918Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:26.028720Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:26.044666Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation p ... ,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:36.888094Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19014 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:37.143831Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:37.170509Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:37.230077Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:37.301832Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:40.613914Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486790969901340296:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:40.614017Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef3/r3tmp/tmpODcJJK/pdisk_1.dat 2025-03-28T09:32:40.775254Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:40.805115Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:40.805225Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:40.807559Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24080 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:32:41.038940Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:41.060069Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:41.132139Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:41.209723Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.275211Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486790993551622208:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:45.275307Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef3/r3tmp/tmptmZOFl/pdisk_1.dat 2025-03-28T09:32:45.417520Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:45.446800Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:45.446886Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:45.448534Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14505 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:45.702444Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:45.721922Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.797192Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.902848Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:49.759275Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486791007260027430:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:49.759329Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ef3/r3tmp/tmp8ZhLU5/pdisk_1.dat 2025-03-28T09:32:49.909121Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:49.929497Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:49.929614Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:49.931246Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18198 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:50.197738Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:50.205020Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:50.216615Z node 10 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:50.224344Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:32:50.301082Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:50.390376Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TColumnShardTestSchema::HotTiers ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-03-28T09:31:14.034286Z :TestReorderedExecutor INFO: Random seed for debugging is 1743154274034252 2025-03-28T09:31:14.313045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790601075665996:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:14.313207Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:14.335072Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790598900879188:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:14.335139Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:14.508440Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:31:14.513414Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001acd/r3tmp/tmplTOogt/pdisk_1.dat 2025-03-28T09:31:14.708711Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:14.731841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:14.731940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:14.737187Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:31:14.739574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:14.753181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:14.753259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:14.755868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6429, node 1 2025-03-28T09:31:14.819592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001acd/r3tmp/yandexSF3ImP.tmp 2025-03-28T09:31:14.819628Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001acd/r3tmp/yandexSF3ImP.tmp 2025-03-28T09:31:14.831208Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001acd/r3tmp/yandexSF3ImP.tmp 2025-03-28T09:31:14.831484Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:14.871701Z INFO: TTestServer started on Port 13079 GrpcPort 6429 TClient is connected to server localhost:13079 PQClient connected to localhost:6429 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:15.111099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:31:17.422322Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790613960568896:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:17.422414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790613960568904:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:17.422479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:17.425878Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790611785781394:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:17.425948Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486790611785781378:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:17.425874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:31:17.426367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:17.436436Z node 2 :TX_PROXY ERROR: Actor# [2:7486790611785781408:2124] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:31:17.437922Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790613960568942:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:17.438259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:17.445154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790613960568910:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:31:17.445305Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486790611785781407:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:31:17.501785Z node 2 :TX_PROXY ERROR: Actor# [2:7486790611785781435:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:17.533698Z node 1 :TX_PROXY ERROR: Actor# [1:7486790613960569000:2696] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:17.629897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:17.631931Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790613960569019:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:31:17.632224Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzNlYjM5NTctYThiZGE5M2MtZTU2Yzk4YTQtNWE2ZmFlOWY=, ActorId: [1:7486790613960568893:2336], ActorState: ExecuteState, TraceId: 01jqe1ev1c856wyjrk0c4emtsf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:31:17.634466Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:31:17.635556Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486790611785781450:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:31:17.635804Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmRmZTFlY2UtZDViOWQ4OWUtOWU3ZjhhMTQtY2VlNjlmMDM=, ActorId: [2:7486790611785781376:2308], ActorState: ExecuteState, TraceId: 01jqe1ev1gbbqe1cdw8b2gh5ke, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:31:17.636210Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:31:17.743860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:17.880257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part prop ... 2:52.341161Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-28T09:32:52.341266Z node 13 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:54546 2025-03-28T09:32:52.341280Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:54546 proto=v1 topic=test-topic durationSec=0 2025-03-28T09:32:52.341288Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:32:52.342560Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-03-28T09:32:52.342654Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-28T09:32:52.342667Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:32:52.342676Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-28T09:32:52.342705Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7486791022386034554:2503] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:32:52.344966Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7486791022386034554:2503] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:32:52.541132Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7486791022386034554:2503] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-28T09:32:52.541900Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7486791022386034554:2503] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-28T09:32:52.541943Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7486791022386034554:2503] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-28T09:32:52.541573Z node 14 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7486791022386034616:2503] connected; active server actors: 1 2025-03-28T09:32:52.542349Z node 14 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7486791022386034616:2503] disconnected; active server actors: 1 2025-03-28T09:32:52.542375Z node 14 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [13:7486791022386034616:2503] disconnected no session 2025-03-28T09:32:52.699621Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7486791022386034554:2503] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-28T09:32:52.699668Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7486791022386034554:2503] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-28T09:32:52.699692Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7486791022386034554:2503] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-28T09:32:52.699725Z node 13 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:32:52.700875Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 13, Generation: 1 2025-03-28T09:32:52.700936Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [13:7486791022386034639:2503], now have 1 active actors on pipe 2025-03-28T09:32:52.700967Z node 13 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:32:52.700995Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:32:52.701098Z node 13 :PERSQUEUE INFO: new Cookie src|a531a270-56b97475-47b7225d-d542e67e_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-28T09:32:52.701210Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:32:52.701284Z node 13 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:32:52.710459Z node 13 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:32:52.710502Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:32:52.710627Z node 13 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:32:52.710812Z node 13 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|a531a270-56b97475-47b7225d-d542e67e_0 2025-03-28T09:32:52.711855Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743154372711 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:32:52.711960Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|a531a270-56b97475-47b7225d-d542e67e_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:32:52.712293Z :INFO: [] MessageGroupId [src] SessionId [src|a531a270-56b97475-47b7225d-d542e67e_0] Write session: close. Timeout = 0 ms 2025-03-28T09:32:52.712335Z :INFO: [] MessageGroupId [src] SessionId [src|a531a270-56b97475-47b7225d-d542e67e_0] Write session will now close 2025-03-28T09:32:52.712372Z :DEBUG: [] MessageGroupId [src] SessionId [src|a531a270-56b97475-47b7225d-d542e67e_0] Write session: aborting 2025-03-28T09:32:52.712712Z :INFO: [] MessageGroupId [src] SessionId [src|a531a270-56b97475-47b7225d-d542e67e_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:32:52.712750Z :DEBUG: [] MessageGroupId [src] SessionId [src|a531a270-56b97475-47b7225d-d542e67e_0] Write session: destroy 2025-03-28T09:32:52.716545Z node 13 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|a531a270-56b97475-47b7225d-d542e67e_0 grpc read done: success: 0 data: 2025-03-28T09:32:52.716577Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a531a270-56b97475-47b7225d-d542e67e_0 grpc read failed 2025-03-28T09:32:52.716613Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a531a270-56b97475-47b7225d-d542e67e_0 grpc closed 2025-03-28T09:32:52.716635Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|a531a270-56b97475-47b7225d-d542e67e_0 is DEAD 2025-03-28T09:32:52.717216Z node 13 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:32:52.717574Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [13:7486791022386034639:2503] destroyed 2025-03-28T09:32:52.717635Z node 13 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:32:52.770930Z :INFO: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Starting read session 2025-03-28T09:32:52.770986Z :DEBUG: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Starting cluster discovery 2025-03-28T09:32:52.771256Z :INFO: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17501: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17501
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:17501. " 2025-03-28T09:32:52.771305Z :DEBUG: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Restart cluster discovery in 0.009246s 2025-03-28T09:32:52.780914Z :DEBUG: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Starting cluster discovery 2025-03-28T09:32:52.781288Z :INFO: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17501: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17501
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:17501. " 2025-03-28T09:32:52.781345Z :DEBUG: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Restart cluster discovery in 0.019183s 2025-03-28T09:32:52.802406Z :DEBUG: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Starting cluster discovery 2025-03-28T09:32:52.802644Z :INFO: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17501: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17501
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:17501. " 2025-03-28T09:32:52.802681Z :DEBUG: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Restart cluster discovery in 0.022731s 2025-03-28T09:32:52.825861Z :DEBUG: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Starting cluster discovery 2025-03-28T09:32:52.826139Z :NOTICE: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17501: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17501
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:17501. " } 2025-03-28T09:32:52.826343Z :NOTICE: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:17501: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:17501
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:17501. " } 2025-03-28T09:32:52.826476Z :INFO: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Closing read session. Close timeout: 0.000000s 2025-03-28T09:32:52.826587Z :NOTICE: [/Root] [/Root] [5fc4185-9ad2605e-f664a7cc-df830c61] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> TFlatTest::WriteSplitWriteSplit [GOOD] >> TLocksTest::Range_BrokenLock1 [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot >> KqpExplain::FewEffects+UseSink [GOOD] >> KqpExplain::FewEffects-UseSink >> TObjectStorageListingTest::CornerCases >> KqpKv::ReadRows_ExternalBlobs-UseExtBlobsPrecharge [GOOD] >> KqpKv::ReadRows_Decimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2025-03-28T09:32:50.259312Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791011643464844:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:50.259483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee0/r3tmp/tmppT0Nsg/pdisk_1.dat 2025-03-28T09:32:50.743927Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:50.744042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:50.746993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:50.749706Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22501 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:51.042063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:51.081686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:51.252155Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:32:51.258274Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:32:51.284530Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-28T09:32:51.288737Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-03-28T09:32:51.314179Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 4 blobs 8r (max 9), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743154371217 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-03-28T09:32:51.432763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976710680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:51.433002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:32:51.433285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:32:51.433330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-28T09:32:51.433369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-28T09:32:51.433617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-03-28T09:32:51.433697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:32:51.434726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710680, response: Status: StatusAccepted TxId: 281474976710680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:32:51.434829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2025-03-28T09:32:51.435023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:32:51.435065Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 waiting... 2025-03-28T09:32:51.435422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:32:51.435636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:32:51.435729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:32:51.436196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710680, at schemeshard: 72057594046644480 2025-03-28T09:32:51.436218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710680, ready parts: 0/1, is published: true 2025-03-28T09:32:51.436255Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710680, at schemeshard: 72057594046644480 2025-03-28T09:32:51.436339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-03-28T09:32:51.436430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-03-28T09:32:51.436467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2025-03-28T09:32:51.436540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72057594037968897 2025-03-28T09:32:51.436578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-03-28T09:32:51.436589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:4, partId: 0 2025-03-28T09:32:51.436596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:5, partId: 0 2025-03-28T09:32:51.439865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-28T09:32:51.439899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2025-03-28T09:32:51.440061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-28T09:32:51.440099Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710680:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2025-03-28T09:32:51.440161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710680:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2025-03-28T09:32:51.440520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 4 TabletID: 72075186224037891 Origin: 72057594037968897 2025-03-28T09:32:51.440531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRela ... 9 2025-03-28T09:32:54.397888Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715694:0 progress is 1/1 2025-03-28T09:32:54.397896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-03-28T09:32:54.397910Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715694:0 progress is 1/1 2025-03-28T09:32:54.397917Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-03-28T09:32:54.397930Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715694, ready parts: 1/1, is published: true 2025-03-28T09:32:54.397963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7486791030839209105:2423] message: TxId: 281474976715694 2025-03-28T09:32:54.397976Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715694 ready parts: 1/1 2025-03-28T09:32:54.397991Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715694:0 2025-03-28T09:32:54.397999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715694:0 2025-03-28T09:32:54.398126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 8 2025-03-28T09:32:54.399152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791026544241109 RawX2: 4503608217307387 } TabletId: 72075186224037888 State: 4 2025-03-28T09:32:54.399197Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:54.399460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 TClient::Ls request: /dc-1/Dir/TableOld 2025-03-28T09:32:54.400875Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-28T09:32:54.401085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 7 2025-03-28T09:32:54.402061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791030839208739 RawX2: 4503608217307454 } TabletId: 72075186224037891 State: 4 2025-03-28T09:32:54.402096Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:54.402654Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-28T09:32:54.402950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:32:54.402969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-28T09:32:54.403359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:54.404519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:32:54.404700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 6 2025-03-28T09:32:54.405234Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:32:54.405252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:32:54.406439Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791030839208742 RawX2: 4503608217307455 } TabletId: 72075186224037892 State: 4 2025-03-28T09:32:54.406474Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:54.406982Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-28T09:32:54.407047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-28T09:32:54.408560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 5 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2025-03-28T09:32:54.408726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-28T09:32:54.409394Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2025-03-28T09:32:54.409416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-28T09:32:54.409434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-03-28T09:32:54.411205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791026544241108 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 2025-03-28T09:32:54.411242Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:54.411375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791030839208953 RawX2: 4503608217307483 } TabletId: 72075186224037893 State: 4 2025-03-28T09:32:54.411393Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:54.411470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791030839208738 RawX2: 4503608217307453 } TabletId: 72075186224037890 State: 4 2025-03-28T09:32:54.411486Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:54.411551Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791030839208948 RawX2: 4503608217307482 } TabletId: 72075186224037894 State: 4 2025-03-28T09:32:54.411566Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:32:54.411808Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:54.411927Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:54.411983Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:54.412037Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:32:54.416386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:32:54.416540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-28T09:32:54.416649Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 6 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2025-03-28T09:32:54.416714Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:32:54.416761Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:32:54.416835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:32:54.416897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-03-28T09:32:54.416961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:32:54.417017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:32:54.417025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:32:54.417049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:32:54.418107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:32:54.418124Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:32:54.418149Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-28T09:32:54.418160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-03-28T09:32:54.418160Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2025-03-28T09:32:54.418166Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-03-28T09:32:54.418167Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-28T09:32:54.418176Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-03-28T09:32:54.418277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:32:54.418285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:32:54.418308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-03-28T09:32:54.418315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-03-28T09:32:54.418331Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink >> TFlatTest::Ls ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2025-03-28T09:32:19.056441Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790878708257306:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:19.056529Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eef/r3tmp/tmpvjREBG/pdisk_1.dat 2025-03-28T09:32:19.409785Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:19.456625Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:19.456754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:19.458705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29627 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:19.660890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:19.690843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:19.822181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:19.897004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:22.001114Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790892057761066:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:22.001155Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eef/r3tmp/tmpdYXf8c/pdisk_1.dat 2025-03-28T09:32:22.130500Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:22.155834Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:22.155938Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:22.157528Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29589 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:22.321694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:22.326061Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:22.344378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:22.422733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:22.467977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:32:25.114937Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790906724425550:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:25.115001Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eef/r3tmp/tmpKrC55w/pdisk_1.dat 2025-03-28T09:32:25.211216Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:25.251274Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:25.251357Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:25.252996Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23677 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:25.395132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:25.416082Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:25.463394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:25.532440Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:28.241194Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486790918343936872:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:28.241299Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eef/r3tmp/tmpS2Z3jQ/pdisk_1.dat 2025-03-28T09:32:28.327111Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:28.377769Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:28.377852Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:28.379348Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12865 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:28.544206Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:28.548995Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, ... : HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:39.602370Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:39.603556Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27667 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:32:39.821760Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:32:39.831402Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:39.849746Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:39.925482Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:40.005957Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:43.751724Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486790984533311387:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:43.751804Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eef/r3tmp/tmpAg6AKF/pdisk_1.dat 2025-03-28T09:32:43.852728Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:43.888790Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:43.888881Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:43.890403Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20585 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:44.099743Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:44.120529Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:44.196438Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:44.244536Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:48.043977Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486791004957228336:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:48.044058Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eef/r3tmp/tmpjjizmA/pdisk_1.dat 2025-03-28T09:32:48.184006Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:48.196135Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:48.196208Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:48.197943Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17116 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:48.485054Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:48.508043Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:48.570316Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:48.628074Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:52.374823Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486791020388618415:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:52.374943Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eef/r3tmp/tmp5r1iHc/pdisk_1.dat 2025-03-28T09:32:52.494237Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:52.525512Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:52.525590Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:52.527259Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7587 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:52.794374Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:52.820534Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:52.897153Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:52.982707Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... |92.6%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::ReadFromTimestamp_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK >> TopicAutoscaling::PartitionMerge_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 >> TopicAutoscaling::ControlPlane_CDC [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation [GOOD] Test command err: Trying to start YDB, gRPC: 62461, MsgBus: 27269 2025-03-28T09:32:39.333641Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790967040940506:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:39.333832Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d1/r3tmp/tmpp0HyY2/pdisk_1.dat TServer::EnableGrpc on GrpcPort 62461, node 1 2025-03-28T09:32:39.659970Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:32:39.660297Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:32:39.669949Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:39.708736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:39.708920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:39.731544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:39.759587Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:39.759616Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:39.759643Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:39.759808Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27269 TClient is connected to server localhost:27269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:40.271497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:42.257078Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790979925843104:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:42.257196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:42.791853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.917937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.948607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:43.017011Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:43.069430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790984220810719:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:43.069519Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:43.069537Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790984220810724:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:43.073546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480 2025-03-28T09:32:43.083463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790984220810726:2369], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2025-03-28T09:32:43.171758Z node 1 :TX_PROXY ERROR: Actor# [1:7486790984220810778:2570] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 11], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13193, MsgBus: 21459 2025-03-28T09:32:44.323825Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790989224287897:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:44.323951Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d1/r3tmp/tmpg7SwdN/pdisk_1.dat 2025-03-28T09:32:44.429430Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13193, node 2 2025-03-28T09:32:44.463448Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:44.463556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:44.465764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:44.478348Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:44.478376Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:44.478386Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:44.478496Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21459 TClient is connected to server localhost:21459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:44.851761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:44.856570Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:32:44.872171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:44.955117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.097315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.162338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:47.199785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791002109191541:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:47.199872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:47.239244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:47.267296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:47.295458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:47.323771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:47.354720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation par ... _TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:47.438978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791002109192052:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:47.439070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791002109192057:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:47.439100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:47.441827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:47.450157Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486791002109192059:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:47.515099Z node 2 :TX_PROXY ERROR: Actor# [2:7486791002109192112:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:48.450521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-03-28T09:32:49.141378Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:32:49.325773Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790989224287897:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:49.332856Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:32:49.620928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-03-28T09:32:50.145263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-28T09:32:50.664944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-28T09:32:51.140112Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-03-28T09:32:51.587895Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:32:51.663760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:32:53.840319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715714:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 18572, MsgBus: 9105 2025-03-28T09:32:54.725091Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791030015083383:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:54.725147Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013d1/r3tmp/tmpoiXr42/pdisk_1.dat 2025-03-28T09:32:54.845279Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:54.869631Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:54.869722Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:54.871520Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18572, node 3 2025-03-28T09:32:54.913502Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:54.913525Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:54.913534Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:54.913681Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9105 TClient is connected to server localhost:9105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:55.442808Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:55.454940Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:55.531328Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:55.689228Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:55.770302Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.035846Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486791047194954341:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:58.035978Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:58.083808Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.135202Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.164420Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.195964Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.268844Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.303758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.348767Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486791047194954854:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:58.348861Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:58.348905Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486791047194954859:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:58.352743Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:58.363039Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486791047194954861:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:58.436120Z node 3 :TX_PROXY ERROR: Actor# [3:7486791047194954913:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-03-28T09:31:11.555073Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1743154271555048 2025-03-28T09:31:11.894001Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790588727313384:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:11.894047Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:11.929955Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790589996380268:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:11.930511Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:12.082154Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:31:12.085639Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001b98/r3tmp/tmp51W1PQ/pdisk_1.dat 2025-03-28T09:31:12.292210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:12.292320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:12.292885Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:12.292932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:12.294555Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:12.299292Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:31:12.299476Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:12.299678Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13829, node 1 2025-03-28T09:31:12.454053Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001b98/r3tmp/yandexnMjPzD.tmp 2025-03-28T09:31:12.454109Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001b98/r3tmp/yandexnMjPzD.tmp 2025-03-28T09:31:12.455727Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001b98/r3tmp/yandexnMjPzD.tmp 2025-03-28T09:31:12.455892Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:12.645248Z INFO: TTestServer started on Port 7768 GrpcPort 13829 TClient is connected to server localhost:7768 PQClient connected to localhost:13829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:12.947661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:12.984063Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:31:14.695337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790601612216275:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:14.695345Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790601612216280:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:14.696436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:14.702994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:31:14.723241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790601612216290:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:31:14.790745Z node 1 :TX_PROXY ERROR: Actor# [1:7486790601612216373:2672] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:15.236248Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486790607176249791:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:31:15.236621Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDVjMDA2OWUtZjg3Y2Q0MTUtZDBlYWJkZDEtZjc1ZWRlZjA=, ActorId: [2:7486790607176249751:2308], ActorState: ExecuteState, TraceId: 01jqe1ert276n6pr4sys6509ec, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:31:15.236355Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790601612216383:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:31:15.240070Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:31:15.248323Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWUxMmMxZGMtNzRkOWY1ZDMtYjM4NTNmMjQtMzMyODlmNzk=, ActorId: [1:7486790601612216273:2335], ActorState: ExecuteState, TraceId: 01jqe1erc5apgs2edtky116yaf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:31:15.248684Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:31:15.350396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:15.482075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:15.606506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:13829", true, true, 1000); 2025-03-28T09:31:15.996069Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe1esb659z6jer9mwnyeaq5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODE4YTUwMzYtYWJiODNhZWEtZGJhZGRkM2ItMjhiMTkzMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486790610202151375:2975] 2025-03-28T09:31:16.894198Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790588727313384:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:16.894281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:16.925377Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486790589996380268:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:16.925454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:31:21.651805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:13829 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:31:21.720542Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:13829 MetaRequest { CmdCreateTopic { ... st { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-03-28T09:32:57.668224Z node 13 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:33214 2025-03-28T09:32:57.668248Z node 13 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:33214 proto=v1 topic=test-topic durationSec=0 2025-03-28T09:32:57.668263Z node 13 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:32:57.670248Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-03-28T09:32:57.670410Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-28T09:32:57.670429Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:32:57.670445Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-28T09:32:57.670469Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7486791042365872376:2574] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:32:57.674731Z node 13 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [13:7486791042365872376:2574] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:32:57.844602Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710697. Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-28T09:32:57.844737Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7486791042365872389:2576] TxId: 281474976710697. Ctx: { TraceId: 01jqe1hwybcv9cb2a9wcryrf18, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=NjIxYzE3OTYtYjA2MDg5YTgtYWU5ZWJiNmQtY2NhYjFjYTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-28T09:32:57.844920Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=NjIxYzE3OTYtYjA2MDg5YTgtYWU5ZWJiNmQtY2NhYjFjYTc=, ActorId: [13:7486791042365872377:2576], ActorState: ExecuteState, TraceId: 01jqe1hwybcv9cb2a9wcryrf18, Create QueryResponse for error on request, msg: 2025-03-28T09:32:57.846318Z node 13 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [13:7486791042365872376:2574] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NjIxYzE3OTYtYjA2MDg5YTgtYWU5ZWJiNmQtY2NhYjFjYTc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqe1hwybcv9cb2a9wdp3azxs" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-03-28T09:32:57.846469Z node 13 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NjIxYzE3OTYtYjA2MDg5YTgtYWU5ZWJiNmQtY2NhYjFjYTc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqe1hwybcv9cb2a9wdp3azxs" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-03-28T09:32:57.846901Z node 13 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-03-28T09:32:57.847373Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7fce8d88-d5f44b0b-5099146c-5c0f841b_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=13&id=NjIxYzE3OTYtYjA2MDg5YTgtYWU5ZWJiNmQtY2NhYjFjYTc=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqe1hwybcv9cb2a9wdp3azxs" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-03-28T09:32:57.847415Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7fce8d88-d5f44b0b-5099146c-5c0f841b_0] Write session will restart in 2.000000s 2025-03-28T09:32:57.847561Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7fce8d88-d5f44b0b-5099146c-5c0f841b_0] Write session: Do CDS request 2025-03-28T09:32:57.847601Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7fce8d88-d5f44b0b-5099146c-5c0f841b_0] Do schedule cds request after 2000 ms 2025-03-28T09:32:58.137040Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720682. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:32:58.137158Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7486791041688936971:2499] TxId: 281474976720682. Ctx: { TraceId: 01jqe1hx735w8y3vrnt72hdddc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=N2VkMTVkMzMtZWExMTk2MWMtNjIxNjc5NmItYTRmMDE0Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:32:58.137368Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=N2VkMTVkMzMtZWExMTk2MWMtNjIxNjc5NmItYTRmMDE0Yzc=, ActorId: [14:7486791041688936968:2499], ActorState: ExecuteState, TraceId: 01jqe1hx735w8y3vrnt72hdddc, Create QueryResponse for error on request, msg: 2025-03-28T09:32:58.138429Z node 14 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqe1hx735w8y3vrntaam7vrj" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-28T09:32:58.202408Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710699. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:32:58.202536Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7486791046660839756:2585] TxId: 281474976710699. Ctx: { TraceId: 01jqe1hx964h8sewbp6aeke0zj, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MTU2NzMxNjEtMjJjZDYyYmUtMWNjZDZjNzgtM2I5Y2JmM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:32:58.202754Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=MTU2NzMxNjEtMjJjZDYyYmUtMWNjZDZjNzgtM2I5Y2JmM2M=, ActorId: [13:7486791046660839753:2585], ActorState: ExecuteState, TraceId: 01jqe1hx964h8sewbp6aeke0zj, Create QueryResponse for error on request, msg: 2025-03-28T09:32:58.203890Z node 13 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqe1hx97771h1wnpq44rmmwx" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-28T09:32:58.558215Z node 13 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976710701. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:32:58.558350Z node 13 :KQP_EXECUTER WARN: ActorId: [13:7486791046660839803:2579] TxId: 281474976710701. Ctx: { TraceId: 01jqe1hx5tedtkr5spsphf0q7d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=13&id=MjcwZGUxMWYtNTViNzZmOGEtMzg0NmEzYS03YjZmOTg1MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:32:58.558574Z node 13 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=13&id=MjcwZGUxMWYtNTViNzZmOGEtMzg0NmEzYS03YjZmOTg1MQ==, ActorId: [13:7486791042365872438:2579], ActorState: ExecuteState, TraceId: 01jqe1hx5tedtkr5spsphf0q7d, Create QueryResponse for error on request, msg: 2025-03-28T09:32:58.559900Z node 13 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqe1hxmee5240rrk9kfbzr9h" } } YdbStatus: UNAVAILABLE ConsumedRu: 308 } 2025-03-28T09:32:58.572119Z node 14 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720684. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:32:58.572238Z node 14 :KQP_EXECUTER WARN: ActorId: [14:7486791045983904319:2497] TxId: 281474976720684. Ctx: { TraceId: 01jqe1hx6rctg3kxkyybc6xk1a, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=14&id=ZDdiODM1OWYtMjAzOTYzMjMtMjRlYTFjYTAtNThjODIyZTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:32:58.572447Z node 14 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=14&id=ZDdiODM1OWYtMjAzOTYzMjMtMjRlYTFjYTAtNThjODIyZTA=, ActorId: [14:7486791041688936964:2497], ActorState: ExecuteState, TraceId: 01jqe1hx6rctg3kxkyybc6xk1a, Create QueryResponse for error on request, msg: 2025-03-28T09:32:58.573976Z node 14 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqe1hxmt25v7j5d8mxfy27gq" } } YdbStatus: UNAVAILABLE ConsumedRu: 295 } 2025-03-28T09:32:58.658156Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7fce8d88-d5f44b0b-5099146c-5c0f841b_0] Write session: close. Timeout = 0 ms 2025-03-28T09:32:58.658228Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7fce8d88-d5f44b0b-5099146c-5c0f841b_0] Write session will now close 2025-03-28T09:32:58.658305Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7fce8d88-d5f44b0b-5099146c-5c0f841b_0] Write session: aborting 2025-03-28T09:32:58.659186Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7fce8d88-d5f44b0b-5099146c-5c0f841b_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-03-28T09:32:58.659240Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|7fce8d88-d5f44b0b-5099146c-5c0f841b_0] Write session: destroy >> TLocksTest::BrokenNullLock [GOOD] >> TColumnShardTestSchema::RebootHotTiersRevCompression >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds >> TSubscriberTest::ReconnectOnFailure >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> TSubscriberTest::ReconnectOnFailure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2025-03-28T09:33:02.424188Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:33:02.430802Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-28T09:33:02.431055Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-03-28T09:33:02.432043Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-28T09:33:02.432108Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-28T09:33:02.432203Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-03-28T09:33:02.432246Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Set up state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:02.432350Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-03-28T09:33:02.432386Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:02.434919Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-03-28T09:33:02.435039Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:02.435107Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-03-28T09:33:02.435148Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:02.435186Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-03-28T09:33:02.435213Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:02.446776Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-28T09:33:02.446979Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:35:2065] 2025-03-28T09:33:02.447073Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:02.447180Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:46:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-28T09:33:02.447321Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:36:2065] 2025-03-28T09:33:02.447361Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:02.447419Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:47:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-28T09:33:02.447506Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2065] 2025-03-28T09:33:02.447538Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:34:2065][path] Ignore empty state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:02.452495Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:45:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-03-28T09:33:02.452661Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:35:2065] 2025-03-28T09:33:02.452739Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:34:2065][path] Update to strong state: owner# [2:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> TopicAutoscaling::PartitionSplit_PreferedPartition_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2025-03-28T09:32:25.211591Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790905494574070:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:25.211651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eeb/r3tmp/tmpIsGVm8/pdisk_1.dat 2025-03-28T09:32:25.551772Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:25.608047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:25.608180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:25.609846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19780 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:25.813173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:25.823744Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:25.842635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:25.981882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:26.019882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:28.139601Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790920406661677:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:28.139707Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eeb/r3tmp/tmp54n2tD/pdisk_1.dat 2025-03-28T09:32:28.232855Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:28.270873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:28.270961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:28.273132Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19712 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. waiting... 2025-03-28T09:32:28.431533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:28.453853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:28.512532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:28.563925Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:32:31.438375Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790929638438774:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:31.438456Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eeb/r3tmp/tmpv1qSxQ/pdisk_1.dat 2025-03-28T09:32:31.541697Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:31.579192Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:31.579269Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:31.580835Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13254 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:31.720118Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:31.742770Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:31.789635Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:31.832619Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:34.773730Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486790946180055986:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:34.773796Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eeb/r3tmp/tmp97Szwc/pdisk_1.dat 2025-03-28T09:32:34.891129Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:34.905790Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:34.905879Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:34.907557Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15714 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:35.048574Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:35.066801Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:45.614735Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2965 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:45.851644Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:45.867427Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.924782Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.984823Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:49.070007Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486791008960572352:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:49.070087Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eeb/r3tmp/tmpkvQL7M/pdisk_1.dat 2025-03-28T09:32:49.216053Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:49.232229Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:49.232339Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:49.233817Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30287 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:49.474008Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:49.499915Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:49.562530Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:49.674176Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:53.267628Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486791026785291514:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:53.267702Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eeb/r3tmp/tmpywJied/pdisk_1.dat 2025-03-28T09:32:53.417032Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:53.455780Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:53.455891Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:53.457923Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24365 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:53.829001Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:53.836884Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:53.847003Z node 9 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:32:53.852512Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:53.927377Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:53.990141Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:57.672562Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486791044733837918:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:57.672624Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eeb/r3tmp/tmpyiZf7f/pdisk_1.dat 2025-03-28T09:32:57.795739Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:57.817980Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:57.818083Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:57.819881Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:64046 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:58.068750Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:58.093482Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.162531Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:58.244188Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:20.363163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:20.363345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:20.363390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:20.363425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:20.364108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:20.364159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:20.364256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:20.364363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:20.365506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:20.447929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:20.447984Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:20.463080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:20.463249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:20.463378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:20.469647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:20.469863Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:20.472129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.472878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:20.483390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.491951Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:20.492072Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.492251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:20.492318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:20.492421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:20.492615Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.499906Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:20.665399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:20.665627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.665857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:20.666106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:20.666190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.668645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.668780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:20.668960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.669021Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:20.669062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:20.669104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:20.671037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.671088Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:20.671122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:20.672983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.673026Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.673063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.673128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.681787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:20.686910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:20.687195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:20.688228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:20.688397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:20.688467Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.688746Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:20.688808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:20.688945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:20.689015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:20.691252Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:20.691298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:20.691446Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:20.691484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:20.691887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:20.691930Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:20.692011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:20.692058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.692094Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:20.692121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.692153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:20.692207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:20.692263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:20.692300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:20.692372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:20.692406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:20.692433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:20.694120Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:20.694226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:20.694266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 33409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-03-28T09:32:51.854830Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-03-28T09:32:51.854983Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-28T09:32:51.855032Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-03-28T09:32:51.855064Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-03-28T09:32:51.865512Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:32:55.468742Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0022 2025-03-28T09:32:55.490758Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.002 2025-03-28T09:32:55.524564Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-28T09:32:55.524904Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-03-28T09:32:55.524999Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-03-28T09:32:55.525054Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-03-28T09:32:55.525194Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-28T09:32:55.525255Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-03-28T09:32:55.525288Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-03-28T09:32:55.535727Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:32:59.080729Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 70 rowCount 2 cpuUsage 0.0019 2025-03-28T09:32:59.102408Z node 4 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0017 2025-03-28T09:32:59.147577Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-28T09:32:59.147788Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 70 row count 2 2025-03-28T09:32:59.147865Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=Table, is column=0, is olap=0 2025-03-28T09:32:59.147908Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 2, DataSize 70 2025-03-28T09:32:59.148027Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-28T09:32:59.148077Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=0, is column=0, is olap=0 2025-03-28T09:32:59.148102Z node 4 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 4: RowCount 0, DataSize 0, with borrowed parts 2025-03-28T09:32:59.158460Z node 4 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:33:01.917399Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Bootstrap: self# [4:577:2535], attempt# 1 2025-03-28T09:33:01.940137Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvReset: self# [4:576:2533] 2025-03-28T09:33:01.948734Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvReady: self# [4:577:2535], sender# [4:576:2533] 2025-03-28T09:33:01.948903Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [4:576:2533] 2025-03-28T09:33:01.949152Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [4:577:2535], sender# [4:576:2533], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } 2025-03-28T09:33:01.949376Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [4:577:2535], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [6e3e0a41fdab8add833862f1bd2954c3,1d8dd09e584ce6a47582a31b591900e2,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:12139 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 92E4D532-69F1-4D62-8F4E-CAF0058CA373 amz-sdk-request: attempt=1 content-length: 459 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2025-03-28T09:33:01.953959Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [4:577:2535], result# 2025-03-28T09:33:01.954171Z node 4 :DATASHARD_BACKUP DEBUG: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [4:576:2533], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2025-03-28T09:33:01.967892Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 447 RawX2: 17179871600 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:33:01.967970Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2025-03-28T09:33:01.968167Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 447 RawX2: 17179871600 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:33:01.968305Z node 4 :FLAT_TX_SCHEMESHARD INFO: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 447 RawX2: 17179871600 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2025-03-28T09:33:01.968395Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:01.968447Z node 4 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:33:01.968487Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:33:01.968539Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710759:0 129 -> 240 2025-03-28T09:33:01.968730Z node 4 :FLAT_TX_SCHEMESHARD WARN: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:01.971124Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:33:01.971554Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2025-03-28T09:33:01.971616Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2025-03-28T09:33:01.971773Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-28T09:33:01.971815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:33:01.971865Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710759:0 progress is 1/1 2025-03-28T09:33:01.971898Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:33:01.971943Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2025-03-28T09:33:01.972047Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:133:2156] message: TxId: 281474976710759 2025-03-28T09:33:01.972110Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2025-03-28T09:33:01.972153Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710759:0 2025-03-28T09:33:01.972191Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710759:0 2025-03-28T09:33:01.972319Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:33:01.974318Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2025-03-28T09:33:01.974410Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710759 2025-03-28T09:33:01.976371Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:33:01.976433Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [4:595:2550] TestWaitNotification: OK eventTxId 102 >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> TFlatTest::CopyTableAndRead >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad >> TObjectStorageListingTest::Decimal [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> TExportToS3Tests::ShouldCheckQuotas >> KqpExplain::FewEffects-UseSink [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> TFlatTest::LsPathId [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad >> KqpWorkload::KV [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::FewEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12323, MsgBus: 22791 2025-03-28T09:32:39.102397Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790967961256502:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:39.102451Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a2/r3tmp/tmpcvm7Wn/pdisk_1.dat 2025-03-28T09:32:39.484925Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:39.507111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:39.508360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:39.510131Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12323, node 1 2025-03-28T09:32:39.775305Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:39.775348Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:39.775359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:39.775505Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22791 TClient is connected to server localhost:22791 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:40.505633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:40.556581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:40.744992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:40.895863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:32:40.964157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:42.087605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790980846160056:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:42.087742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:42.706486Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.739209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.767223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.799934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.831574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.904317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.998459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790980846160576:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:42.998541Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:42.998598Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790980846160581:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:43.006028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:43.015346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790980846160583:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:32:43.111584Z node 1 :TX_PROXY ERROR: Actor# [1:7486790985141127936:3459] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:44.102588Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790967961256502:2195];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:44.102662Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":8,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":4}],"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"InternalOperatorId":3}],"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Aggregate","Phase":"Final"},{"Inputs":[{"InternalOperatorId":2}],"Name":"Limit","Limit":"1"},{"Inputs":[{"ExternalPlanNodeId":6}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate-Limit-Aggregate"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Data)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.Data = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"Name":"Limit","Limit":"1"}],"Node Type":"Limit"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 20893, MsgBus: 4725 2025-03- ... de 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6186 TClient is connected to server localhost:6186 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:58.213517Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.227674Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:32:58.242592Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.343374Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.517276Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.591601Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:01.222404Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486791062365806897:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:01.222525Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:01.274527Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:01.312262Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:01.349481Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:01.381574Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:01.417752Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:01.455254Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:01.498779Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486791062365807407:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:01.498852Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:01.498933Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486791062365807412:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:01.502740Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:01.511765Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486791062365807414:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:33:01.611277Z node 4 :TX_PROXY ERROR: Actor# [4:7486791062365807468:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:02.591453Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486791045185935944:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:02.591535Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":26,"Plans":[{"Tables":["EightShard"],"PlanNodeId":25,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Delete","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_5_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","CTE Name":"precompute_5_0"}],"Node Type":"Effect"},{"PlanNodeId":23,"Plans":[{"PlanNodeId":22,"Plans":[{"PlanNodeId":21,"Plans":[{"PlanNodeId":20,"Plans":[{"Tables":["EightShard"],"PlanNodeId":19,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_5_0","Node Type":"Precompute_5","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":17,"Plans":[{"Tables":["EightShard"],"PlanNodeId":16,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_3_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_3_0"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"Tables":["EightShard"],"PlanNodeId":10,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","ReadRangesPointPrefixLen":"1","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_3_0","Node Type":"Precompute_3","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":8,"Plans":[{"Tables":["EightShard"],"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"Effect"},{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","ReadRangesPointPrefixLen":"0","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_0_0","Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"},{"columns":["Key"],"scan_by":["Key (350, +∞)"],"type":"Scan"},{"columns":["Data","Key"],"scan_by":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"type":"Scan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"},{"columns":["Data","Key"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Path":"\/Root\/EightShard","Name":"Delete","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRange":["Key (350, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Operators":[{"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":16,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key [100, 100]","Key [200, 200]","Key [300, 300]"],"Name":"TableRangeScan","Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"1","E-Rows":"No estimate","ReadRangesKeys":["Key"],"Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":17,"Plans":[{"PlanNodeId":18,"Operators":[{"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":24,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2025-03-28T09:32:58.077995Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791045484394044:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:58.079270Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eda/r3tmp/tmp8NWZJq/pdisk_1.dat 2025-03-28T09:32:58.402252Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13150, node 1 2025-03-28T09:32:58.456149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:58.456253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:58.462659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:58.486892Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:58.486920Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:58.486929Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:58.487078Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18538 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:58.778320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:58.818818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:01.163977Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791061823687878:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:01.164061Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eda/r3tmp/tmp0AhVes/pdisk_1.dat 2025-03-28T09:33:01.260574Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28348, node 2 2025-03-28T09:33:01.295931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:01.296095Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:01.298079Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:01.316801Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:01.316830Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:01.316838Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:01.316947Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21038 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:01.526756Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:01.548423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> KqpKv::ReadRows_Decimal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2025-03-28T09:32:58.903300Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791046009009204:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:58.903428Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed8/r3tmp/tmpoi9TSu/pdisk_1.dat 2025-03-28T09:32:59.190802Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:59.255955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:59.256101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:59.257775Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18292 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:59.447790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: // TClient::Ls response: Status: 128 StatusCode: ERROR Issues { message: "Default error" severity: 1 } SchemeStatus: 7 ErrorReason: "Invalid path" TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743154379505 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePo... (TRUNCATED) TClient::Ls request: /dc-11 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" TClient::Ls request: /dc-2 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Root not found" waiting... TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743154379505 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154379519 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/Berkanavt TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154379519 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 Shard... (TRUNCATED) 2025-03-28T09:32:59.481204Z node 1 :TX_PROXY ERROR: Actor# [1:7486791050303977118:2323] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743154379505 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154379519 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depr... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" waiting... TClient::Ls request: /dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743154379505 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Berkanavt" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154379519 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "arcadia" Path... (TRUNCATED) TClient::Ls request: /dc-1/arcadia TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "arcadia" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743154379533 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsI... (TRUNCATED) 2025-03-28T09:33:01.722846Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791060148171102:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:01.722930Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed8/r3tmp/tmpwsZk5k/pdisk_1.dat 2025-03-28T09:33:01.827552Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:01.868793Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:01.868909Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:01.870397Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24251 TClient::Ls request: / TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "/" PathId: 1 SchemeshardId: 0 PathType: EPathTypeDir CreateFinished: true } Children { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true } } Path: "/" WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:02.050919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 13887, MsgBus: 62350 2025-03-28T09:31:33.317047Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790680815220828:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:33.320865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001256/r3tmp/tmpGJyBKX/pdisk_1.dat 2025-03-28T09:31:33.749064Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.767460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:33.767534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:33.785864Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13887, node 1 2025-03-28T09:31:33.905405Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:33.905426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:33.905432Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:33.905540Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62350 TClient is connected to server localhost:62350 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:34.547470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:31:36.297800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790693700123212:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.297903Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.713503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:31:37.157347Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790697995092093:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.157410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.157471Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790697995092098:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.161539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:31:37.171191Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790697995092100:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:31:37.259165Z node 1 :TX_PROXY ERROR: Actor# [1:7486790697995092188:3410] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.330567Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790680815220828:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:38.330809Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:31:48.721652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:31:48.721690Z node 1 :IMPORT WARN: Table profiles were not loaded took: 0.121542s took: 0.122015s took: 0.122242s took: 0.122937s took: 0.123325s took: 0.124569s took: 0.124606s took: 0.132720s took: 0.131756s took: 0.136458s took: 0.368926s took: 0.379995s took: 0.379907s took: 0.382084s took: 0.382770s took: 0.383385s took: 0.383990s took: 0.386450s took: 0.391634s took: 0.392763s took: 0.184568s took: 0.186115s took: 0.187466s took: 0.191527s took: 0.192366s took: 0.192247s took: 0.193438s took: 0.192560s took: 0.196267s took: 0.194842s took: 0.050931s took: 0.057750s took: 0.063375s took: 0.057142s took: 0.058842s took: 0.063774s took: 0.057578s took: 0.066244s took: 0.071110s took: 0.071150s took: 0.147342s took: 0.146272s took: 0.148577s took: 0.148852s took: 0.157593s took: 0.162827s took: 0.167851s took: 0.162664s took: 0.170910s took: 0.170962s 2025-03-28T09:33:04.039617Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2025-03-28T09:33:04.039651Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2025-03-28T09:33:04.039667Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2025-03-28T09:33:04.039681Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2025-03-28T09:33:04.039695Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2025-03-28T09:33:04.039710Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2025-03-28T09:33:04.039728Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2025-03-28T09:33:04.039744Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2025-03-28T09:33:04.039759Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2025-03-28T09:33:04.039776Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2025-03-28T09:33:04.040653Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2025-03-28T09:33:04.061919Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2025-03-28T09:33:04.061963Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2025-03-28T09:33:04.061981Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2025-03-28T09:33:04.061998Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2025-03-28T09:33:04.062016Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2025-03-28T09:33:04.062034Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2025-03-28T09:33:04.062052Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2025-03-28T09:33:04.062068Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2025-03-28T09:33:04.062086Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2025-03-28T09:33:04.062101Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2025-03-28T09:33:04.062118Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-03-28T09:33:04.062135Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2025-03-28T09:33:04.062151Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-03-28T09:33:04.062168Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2025-03-28T09:33:04.064468Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2025-03-28T09:33:04.064588Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2025-03-28T09:33:04.064605Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2025-03-28T09:33:04.064623Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2025-03-28T09:33:04.064641Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-03-28T09:33:04.064658Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2025-03-28T09:33:04.067946Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found 2025-03-28T09:33:04.067997Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2025-03-28T09:33:04.068016Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2025-03-28T09:33:04.068033Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2025-03-28T09:33:04.068052Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:33:04.068071Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-03-28T09:33:04.068089Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2025-03-28T09:33:04.068108Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2025-03-28T09:33:04.068125Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] |92.6%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> TExportToS3Tests::ShouldCheckQuotas [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/opt/unittest >> KqpKv::ReadRows_Decimal [GOOD] Test command err: Trying to start YDB, gRPC: 5246, MsgBus: 2154 2025-03-28T09:29:23.391480Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790123975321479:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:23.391708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00067c/r3tmp/tmpRmNstI/pdisk_1.dat 2025-03-28T09:29:24.037935Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:24.045890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:24.045977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:24.050708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5246, node 1 2025-03-28T09:29:24.204375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:24.204416Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:24.204425Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:24.204577Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2154 TClient is connected to server localhost:2154 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:29:25.008643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.070876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.305552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.561377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:25.654754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:29:27.380627Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790141155192433:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.380747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.709970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.749462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.789324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.818847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.848249Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.923046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:29:27.982032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790141155192951:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.982128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.982372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790141155192956:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:29:27.986444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:29:27.999216Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790141155192958:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:29:28.064274Z node 1 :TX_PROXY ERROR: Actor# [1:7486790145450160307:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:29:28.454091Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790123975321479:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:28.461312Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:29:29.289724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.365038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.413681Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.455338Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.530259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.560525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.594621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.628370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.663449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:29:29.706370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 6402, MsgBus: 25459 2025-03-28T09:29:32.164953Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790164316947830:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:29:32.165038Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00067c/r3tmp/tmpwNpFgF/pdisk_1.dat 2025-03-28T09:29:32.327454Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:29:32.348507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:29:32.348589Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:29:32.351217Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6402, node 2 2025-03-28T09:29:32.438112Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:29:32.438132Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:29:32.438141Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:29:32.438247Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25459 TClient is connected to server localhost:25459 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescripti ... type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 18834, MsgBus: 20075 2025-03-28T09:32:41.548509Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7486790976119121300:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:41.548603Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00067c/r3tmp/tmpwP3zNL/pdisk_1.dat 2025-03-28T09:32:41.738799Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:41.777619Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:41.777739Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:41.779467Z node 18 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18834, node 18 2025-03-28T09:32:41.837071Z node 18 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:41.837101Z node 18 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:41.837118Z node 18 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:41.837277Z node 18 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20075 TClient is connected to server localhost:20075 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:42.885731Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:42.894103Z node 18 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:32:46.549049Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7486790976119121300:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:46.549187Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:32:48.376878Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 14457, MsgBus: 13038 2025-03-28T09:32:50.099267Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7486791014797130798:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:50.099341Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00067c/r3tmp/tmpOfw8ai/pdisk_1.dat 2025-03-28T09:32:50.298172Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:50.327687Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:50.327847Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:50.332272Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14457, node 19 2025-03-28T09:32:50.413811Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:50.413866Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:50.413886Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:50.414124Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13038 TClient is connected to server localhost:13038 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:51.545982Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:55.099532Z node 19 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[19:7486791014797130798:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:55.099649Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:32:56.400359Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 15224, MsgBus: 31269 2025-03-28T09:32:58.172717Z node 20 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7486791045489444180:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:58.172820Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00067c/r3tmp/tmpLc0Enn/pdisk_1.dat 2025-03-28T09:32:58.380527Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:58.428031Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:58.428190Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:58.430047Z node 20 :HIVE WARN: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15224, node 20 2025-03-28T09:32:58.514587Z node 20 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:58.514619Z node 20 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:58.514640Z node 20 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:58.514868Z node 20 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31269 TClient is connected to server localhost:31269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:59.445778Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:03.172652Z node 20 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7486791045489444180:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:03.172784Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:04.827857Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7486791071259248631:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:04.828049Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:04.876585Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:33:05.050547Z node 20 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Uint64 for column Key22, but expected Decimal(22,9) 2025-03-28T09:33:05.065611Z node 20 :RPC_REQUEST ERROR: TReadRowsRPC ReplyWithError: Type mismatch, got type Decimal(35,10) for column Key22, but expected Decimal(22,9) >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata [GOOD] Test command err: Trying to start YDB, gRPC: 24826, MsgBus: 11434 2025-03-28T09:32:39.533337Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790963930511149:2225];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:39.533803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bd/r3tmp/tmpVH80tm/pdisk_1.dat 2025-03-28T09:32:39.947643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:39.947773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:39.951192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:39.978768Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24826, node 1 2025-03-28T09:32:40.043048Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:40.043070Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:40.043076Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:40.043231Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11434 TClient is connected to server localhost:11434 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:40.555054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:40.587896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:40.763879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:40.919749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:40.991239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.610075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790976815414641:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:42.610200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:42.882532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.914674Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.941708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.967442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:42.998405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:43.072553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:43.153912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790981110382456:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:43.153984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:43.154036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790981110382461:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:43.157813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:43.165805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790981110382463:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:32:43.265096Z node 1 :TX_PROXY ERROR: Actor# [1:7486790981110382518:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:44.181762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:32:44.188756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28237, MsgBus: 19122 2025-03-28T09:32:44.883446Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790987016211197:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:44.883532Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bd/r3tmp/tmpOxeAhl/pdisk_1.dat 2025-03-28T09:32:44.991554Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28237, node 2 2025-03-28T09:32:45.042735Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:45.042796Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:45.043976Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:45.057945Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:45.057976Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:45.057985Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:45.058096Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19122 TClient is connected to server localhost:19122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:45.454078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.461476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.532948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.671434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:45.741416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025- ... h existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:32:50.094159Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-03-28T09:32:50.582835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-28T09:32:51.085468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-28T09:32:51.566713Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-03-28T09:32:52.032658Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:32:52.071565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:32:54.073367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715716:0, at schemeshard: 72057594046644480 2025-03-28T09:32:54.083374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715717:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 23008, MsgBus: 20405 2025-03-28T09:32:55.103728Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791034024954231:2145];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013bd/r3tmp/tmpxcfD06/pdisk_1.dat 2025-03-28T09:32:55.105577Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:32:55.201129Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:55.229566Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:55.229660Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:55.231821Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23008, node 3 2025-03-28T09:32:55.279455Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:55.279494Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:55.279509Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:55.279656Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20405 TClient is connected to server localhost:20405 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:32:55.757694Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:55.775248Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:32:55.850489Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:56.004367Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:56.074931Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.031927Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486791046909857795:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:58.032075Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:58.065649Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.095578Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.134166Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.166132Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.199622Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.246373Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.302117Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486791046909858309:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:58.302202Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:58.302497Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486791046909858314:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:58.305882Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:32:58.319029Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486791046909858316:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:32:58.398269Z node 3 :TX_PROXY ERROR: Actor# [3:7486791046909858369:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:59.299903Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:1, at schemeshard: 72057594046644480 2025-03-28T09:32:59.800061Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715674:0, at schemeshard: 72057594046644480 2025-03-28T09:33:00.103565Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486791034024954231:2145];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:00.103659Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:00.315727Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480 2025-03-28T09:33:00.803691Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715682:0, at schemeshard: 72057594046644480 2025-03-28T09:33:01.297791Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480 2025-03-28T09:33:01.749522Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-03-28T09:33:02.227700Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:33:02.260796Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:33:05.881393Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715735:0, at schemeshard: 72057594046644480 >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx |92.7%| [TA] $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:32:31.340352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:32:31.340451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:31.340487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:32:31.340537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:32:31.340592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:32:31.340624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:32:31.340685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:32:31.340777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:32:31.341095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:32:31.407628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:32:31.407678Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:31.421124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:32:31.421325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:32:31.421454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:32:31.427454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:32:31.427681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:32:31.428397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:31.428600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:31.430623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:31.432063Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:31.432129Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:31.432316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:32:31.432364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:31.432405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:32:31.432489Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:32:31.438665Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:32:31.574814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:32:31.575043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:31.575278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:32:31.575542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:32:31.575600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:31.577953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:31.578110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:32:31.578293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:31.578366Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:32:31.578419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:32:31.578461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:32:31.580485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:31.580541Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:32:31.580578Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:32:31.582323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:31.582367Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:31.582409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:31.582472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:32:31.586131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:32:31.587971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:32:31.588215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:32:31.589197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:32:31.589331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:32:31.589474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:31.589764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:32:31.589817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:32:31.589968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:32:31.590118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:32:31.592092Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:32:31.592142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:32:31.592312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:32:31.592354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:32:31.592752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:32:31.592797Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:32:31.592895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:31.592932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:31.592985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:32:31.593028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:31.593069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:32:31.593112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:32:31.593168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:32:31.593203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:32:31.593269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:32:31.593308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:32:31.593340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:32:31.595250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:31.595362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:32:31.595396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 57594046678944 2025-03-28T09:33:07.015969Z node 5 :FLAT_TX_SCHEMESHARD INFO: TRmDir ProgressState, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-03-28T09:33:07.016069Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976720762 ready parts: 1/1 2025-03-28T09:33:07.016220Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976720762 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:07.016686Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.016769Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.016799Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-03-28T09:33:07.016829Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-28T09:33:07.016859Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:33:07.017444Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.017514Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.017539Z node 5 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976720762 2025-03-28T09:33:07.017569Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-03-28T09:33:07.017595Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:33:07.017655Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-03-28T09:33:07.020002Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-28T09:33:07.020130Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976720762, at schemeshard: 72057594046678944 2025-03-28T09:33:07.020177Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 0/1, is published: true 2025-03-28T09:33:07.020222Z node 5 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976720762, at schemeshard: 72057594046678944 2025-03-28T09:33:07.020523Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976720762:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976720762 msg type: 269090816 2025-03-28T09:33:07.020686Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976720762, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976720762 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976720762 at step: 5000007 2025-03-28T09:33:07.021376Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000007, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:07.021474Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976720762 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 21474838635 } } Step: 5000007 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:07.021517Z node 5 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 281474976720762:0, step: 5000007, at schemeshard: 72057594046678944 2025-03-28T09:33:07.021626Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 281474976720762:0, at schemeshard: 72057594046678944 2025-03-28T09:33:07.021689Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-03-28T09:33:07.021739Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-03-28T09:33:07.021796Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720762:0 progress is 1/1 2025-03-28T09:33:07.021833Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-03-28T09:33:07.021894Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:07.021962Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:33:07.022002Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720762, ready parts: 1/1, is published: false 2025-03-28T09:33:07.022046Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720762 ready parts: 1/1 2025-03-28T09:33:07.022088Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720762:0 2025-03-28T09:33:07.022144Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976720762:0 2025-03-28T09:33:07.022209Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:33:07.022255Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720762, publications: 2, subscribers: 1 2025-03-28T09:33:07.022307Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-28T09:33:07.022350Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720762, [OwnerId: 72057594046678944, LocalPathId: 3], 18446744073709551615 2025-03-28T09:33:07.023179Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.023280Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.024643Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:07.024681Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:07.024817Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720762, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:33:07.024919Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:07.024951Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:335:2311], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 1 2025-03-28T09:33:07.024985Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:335:2311], at schemeshard: 72057594046678944, txId: 281474976720762, path id: 3 FAKE_COORDINATOR: Erasing txId 281474976720762 2025-03-28T09:33:07.025658Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.025733Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.025766Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720762 2025-03-28T09:33:07.025816Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-28T09:33:07.025862Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:33:07.026477Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.026547Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.026577Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720762 2025-03-28T09:33:07.026604Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720762, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:33:07.026634Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:33:07.026716Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720762, subscribers: 1 2025-03-28T09:33:07.026768Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [5:288:2275] 2025-03-28T09:33:07.029147Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.029458Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720762 2025-03-28T09:33:07.029524Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976720762 2025-03-28T09:33:07.029570Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976720762 2025-03-28T09:33:07.029610Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoExecute 2025-03-28T09:33:07.029636Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762 2025-03-28T09:33:07.029675Z node 5 :EXPORT DEBUG: TExport::TTxProgress: OnNotifyResult: txId# 281474976720762, id# 102, itemIdx# 4294967295 2025-03-28T09:33:07.031121Z node 5 :EXPORT DEBUG: TExport::TTxProgress: DoComplete 2025-03-28T09:33:07.031212Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:33:07.031259Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:706:2646] TestWaitNotification: OK eventTxId 102 |92.7%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/opt/test-results/unittest/{meta.json ... results_accumulator.log} >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors |92.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> Balancing::Balancing_ManyTopics_PQv1 [GOOD] >> TPersQueueMirrorer::TestBasicRemote >> TLocksFatTest::LocksLimit [GOOD] |92.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> TFlatTest::CopyTableAndDropOriginal [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2025-03-28T09:32:56.235804Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791040837869882:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:56.235914Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001edc/r3tmp/tmpCJ2RyQ/pdisk_1.dat 2025-03-28T09:32:56.592107Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:56.633894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:56.634072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:56.636057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27929 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:56.780919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:56.810008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:56.916075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:56.966958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:01.236007Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791040837869882:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:01.236078Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:02.332077Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791065701446223:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:02.332128Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001edc/r3tmp/tmpmBjM8R/pdisk_1.dat 2025-03-28T09:33:02.434770Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:02.479408Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:02.479497Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:02.481096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2798 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:02.637081Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:02.653943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:02.702269Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:02.746886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:05.705455Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791078594705800:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:05.706364Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001edc/r3tmp/tmp2CpFyO/pdisk_1.dat 2025-03-28T09:33:05.873037Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:05.901452Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:05.901552Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:05.903035Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2914 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:06.100860Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:33:06.119644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:33:06.177756Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:06.276362Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TLocksTest::Range_Pinhole [GOOD] >> TLocksTest::SetBreakSetEraseBreak >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> TLocksTest::BrokenDupLock [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2025-03-28T09:33:03.698814Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791067590362380:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:03.698857Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed6/r3tmp/tmp4OrY2z/pdisk_1.dat 2025-03-28T09:33:04.051442Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:04.114111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:04.114220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:04.115943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7008 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:04.265494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:04.294908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:04.451884Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:33:04.457347Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:33:04.497651Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-03-28T09:33:04.502236Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.007s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 Copy TableOld to Table 2025-03-28T09:33:04.624504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:33:04.624716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:33:04.625063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 2], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-28T09:33:04.625094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 0 2025-03-28T09:33:04.625116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:33:04.625137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-28T09:33:04.625147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-28T09:33:04.625245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 3 2025-03-28T09:33:04.625327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:33:04.625858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:33:04.625889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 4 2025-03-28T09:33:04.626545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2025-03-28T09:33:04.626748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2025-03-28T09:33:04.627038Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:33:04.627064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:33:04.627208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 4] 2025-03-28T09:33:04.627302Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:33:04.627328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791071885330192:2243], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 2 waiting... 2025-03-28T09:33:04.627351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791071885330192:2243], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 4 2025-03-28T09:33:04.627418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:33:04.627460Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2025-03-28T09:33:04.627894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:33:04.628064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 4 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2025-03-28T09:33:04.630045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:33:04.630177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:33:04.630200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-03-28T09:33:04.630220Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 6 2025-03-28T09:33:04.630236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:33:04.630521Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:33:04.630579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 4 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2025-03-28T09:33:04.630592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2025-03-28T09:33:04.630623Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 4], version: 1 2025-03-28T09:33:04.630639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 5 2025-03-28T09:33:04.630706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710676, ready parts: 0/1, is published: true 2025-03-28T09:33:04.630853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2025-03-28T09:33:04.630965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710676:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2025-03-28T09:33:04.631059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710676, partId: 0, tablet: 72057594037968897 2025-03-28T09:33:04.631084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:3, partId: 0 2025-03-28T09:33:04.631098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 281474976710676, shardIdx: 72057594046644480:4, partId: 0 2025-03-28T09:33:04.631216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710676, at schemeshard: 72057594046644480 2025-03-28T09:33:04.631247Z nod ... ueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:33:07.923914Z node 2 :TX_DATASHARD INFO: 72075186224037890 Initiating switch from PreOffline to Offline state Check that tablet 72075186224037888 was deleted 2025-03-28T09:33:07.924658Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7486791086675529407:2374], serverId# [2:7486791086675529413:2673], sessionId# [0:0:0] 2025-03-28T09:33:07.924723Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:33:07.924751Z node 2 :TX_DATASHARD INFO: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:33:07.924798Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7486791086675529411:2376], serverId# [2:7486791086675529415:2675], sessionId# [0:0:0] 2025-03-28T09:33:07.924919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791086675528889 RawX2: 4503608217307387 } TabletId: 72075186224037889 State: 4 2025-03-28T09:33:07.924962Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:33:07.925110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791086675529191 RawX2: 4503608217307446 } TabletId: 72075186224037891 State: 4 2025-03-28T09:33:07.925135Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:33:07.925221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791086675528888 RawX2: 4503608217307386 } TabletId: 72075186224037888 State: 4 2025-03-28T09:33:07.925242Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:33:07.925323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791086675529190 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-03-28T09:33:07.925345Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:33:07.925429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791086675529190 RawX2: 4503608217307445 } TabletId: 72075186224037890 State: 4 2025-03-28T09:33:07.925460Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:33:07.925628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:33:07.925694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:33:07.925735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:33:07.925766Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-28T09:33:07.925774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:33:07.925795Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2025-03-28T09:33:07.925802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:33:07.925805Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2025-03-28T09:33:07.925814Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-28T09:33:07.925821Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2025-03-28T09:33:07.926279Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037891 reason = ReasonStop 2025-03-28T09:33:07.926393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:33:07.926620Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037891 2025-03-28T09:33:07.926621Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:33:07.926778Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-28T09:33:07.926809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:33:07.926933Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2025-03-28T09:33:07.926959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 2 2025-03-28T09:33:07.927220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:33:07.927248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:33:07.927302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-28T09:33:07.927431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:33:07.927567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:33:07.927666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 4] was 1 2025-03-28T09:33:07.927755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:33:07.927877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:33:07.927896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 4], at schemeshard: 72057594046644480 2025-03-28T09:33:07.927935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:33:07.927956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:33:07.927984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:33:07.928074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:33:07.928089Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:33:07.928452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:33:07.928476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-28T09:33:07.928656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:33:07.928704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:33:07.928741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:33:07.928774Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:33:07.952833Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-28T09:33:07.952895Z node 2 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037891 from 72075186224037889 is reset 2025-03-28T09:33:07.952928Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7486791086675529010:2392], serverId# [2:7486791086675529011:2393], sessionId# [0:0:0] 2025-03-28T09:33:07.952959Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037888 reason = ReasonStop 2025-03-28T09:33:07.952980Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7486791086675529000:2385], serverId# [2:7486791086675529001:2386], sessionId# [0:0:0] 2025-03-28T09:33:07.952995Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037890 reason = ReasonStop 2025-03-28T09:33:07.953013Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7486791086675529373:2640], serverId# [2:7486791086675529375:2642], sessionId# [0:0:0] 2025-03-28T09:33:07.953234Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-28T09:33:07.953326Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-28T09:33:07.953372Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-28T09:33:07.953395Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-28T09:33:07.953541Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2025-03-28T09:33:07.954267Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7486791086675528941:2352], serverId# [2:7486791086675528944:2355], sessionId# [0:0:0] 2025-03-28T09:33:07.954430Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-28T09:33:07.954473Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-28T09:33:07.955636Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037890 2025-03-28T09:33:07.955688Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037890 2025-03-28T09:33:08.225024Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-03-28T09:33:08.225565Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-03-28T09:33:08.226086Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-03-28T09:33:08.226497Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: 2025-03-28T09:33:05.186465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:05.187062Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:05.187124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001652/r3tmp/tmpKJW9yI/pdisk_1.dat 2025-03-28T09:33:05.708258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:05.748945Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:05.795064Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:05.795787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:05.809123Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:05.905395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:05.964828Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:33:05.965126Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:06.010932Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:06.011027Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:06.013421Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:06.013496Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:06.013534Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:06.014761Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:06.014897Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:06.014966Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:33:06.025529Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:06.055681Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:06.057177Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:06.057357Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:33:06.057400Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:06.057441Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:06.057482Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:06.058993Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:06.059130Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:06.069802Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:06.069892Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:06.070014Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:06.070246Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:06.070418Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:06.070596Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:06.071703Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:06.071849Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:06.073900Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:06.084746Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:06.085915Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:06.236450Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:06.240714Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:06.240828Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:06.241767Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:06.241844Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:06.241939Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:06.242229Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:06.242372Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:06.243127Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:06.243218Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:06.249599Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:06.251423Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:06.253380Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:06.253442Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:06.254482Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:06.254565Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:06.255680Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:06.255744Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:06.255820Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:06.255900Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:06.255969Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:06.256193Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:06.261584Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:06.264579Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:06.264681Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:06.264944Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:06.293601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:06.293789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:06.293873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:06.302154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:06.308112Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:06.460269Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:06.464055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:06.520310Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:07.220099Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1j5bh8pxhje4xpvdrn2ww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk1MmRlZjAtNGQ4NjYzYWUtNzQwODMxYTItYTQyNmMzZGM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:07.230004Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:07.231090Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:07.250367Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... 86224037888 2025-03-28T09:33:11.161864Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:11.161908Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:11.161968Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.162040Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:11.162174Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:11.162380Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:11.162469Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:11.164569Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.175336Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:11.175481Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:11.325516Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:704:2594], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:11.326723Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:11.326784Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.327455Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.327528Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:11.327581Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:11.327866Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:11.328031Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:11.328315Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.328387Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:11.328893Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:11.329346Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:11.330945Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:11.330999Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.331864Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:11.331955Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.332972Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.333026Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:11.333074Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:11.333160Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:11.333226Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:11.333318Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.334311Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.336188Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:11.336288Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:11.337184Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:11.345484Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.345585Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.345679Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.351541Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:11.358219Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.510054Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.513545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:11.548483Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:11.646398Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1ja9fd9v3jdkapm2x68w6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTk5MDNiZGEtMTgzN2E2YzctYzY2MzMxMjUtNGRhNmUyNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:11.647038Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:852:2688], serverId# [2:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:11.647277Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:11.659626Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:11.659776Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.663886Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:860:2695], serverId# [2:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:11.665026Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:33:11.676291Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:33:11.676383Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.676734Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:11.676779Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-28T09:33:11.676926Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:860:2695], serverId# [2:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:11.677052Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.677102Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:11.677151Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:11.677207Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.678199Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:11.678527Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:11.678761Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.678809Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:11.678855Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:33:11.679087Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:11.679151Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.679719Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-28T09:33:11.679980Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:11.680140Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-28T09:33:11.680190Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-28T09:33:11.707141Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:11.707212Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-28T09:33:11.707394Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.707435Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:11.707478Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-28T09:33:11.707610Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:11.707676Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.707728Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2025-03-28T09:32:33.107141Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790938472353938:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:33.107240Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee7/r3tmp/tmpuUGrLI/pdisk_1.dat 2025-03-28T09:32:33.477494Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:33.525569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:33.525695Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:33.527256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62112 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:33.760818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:33.783185Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:33.788383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:32:33.906556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:33.978392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:36.324338Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790953030342453:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:36.324374Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee7/r3tmp/tmpPJhAe6/pdisk_1.dat 2025-03-28T09:32:36.428924Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:36.454959Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:36.455053Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:36.456812Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23687 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:36.592067Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:36.614522Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:36.689830Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:36.731534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:39.362933Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790964232523236:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:39.362986Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee7/r3tmp/tmpmSXWla/pdisk_1.dat 2025-03-28T09:32:39.503761Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:39.519189Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:39.519275Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:39.521186Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24567 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:39.708566Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:39.716200Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:39.738955Z node 3 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:32:39.743086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:39.823751Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:39.881896Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:42.747225Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486790980593340995:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:42.747279Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee7/r3tmp/tmpckp8a5/pdisk_1.dat 2025-03-28T09:32:42.844707Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:42.882331Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:42.882418Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:42.886488Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22192 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 20 ... : HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:54.532792Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:54.534558Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14360 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:54.775574Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:54.786953Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:54.807196Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:54.876976Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:54.931980Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.517272Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486791047478605402:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:58.517360Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee7/r3tmp/tmpQK72qC/pdisk_1.dat 2025-03-28T09:32:58.666252Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:58.683683Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:58.683764Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:58.685287Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26570 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:58.909043Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:58.933661Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.998480Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:59.052362Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:03.056192Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486791070813918128:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:03.056296Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee7/r3tmp/tmpKrnDiu/pdisk_1.dat 2025-03-28T09:33:03.171525Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:03.199933Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:03.200042Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:03.201575Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2257 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:03.434519Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:03.453373Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:03.514179Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:03.590494Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:07.566224Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486791085114979417:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:07.566343Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee7/r3tmp/tmpORDRJl/pdisk_1.dat 2025-03-28T09:33:07.719584Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:07.748504Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:07.748627Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:07.750216Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20364 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:08.037803Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:08.059768Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:08.130214Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:33:08.251817Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> ColumnShardTiers::DSConfigsStub [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> TopicAutoscaling::PartitionSplit_ManySession_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: 2025-03-28T09:31:28.537775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:28.538063Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:28.538123Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001c00/r3tmp/tmp4RXpus/pdisk_1.dat TServer::EnableGrpc on GrpcPort 12322, node 1 TClient is connected to server localhost:7769 2025-03-28T09:31:29.006228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:31:29.045354Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:29.049235Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:29.049295Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:29.049325Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:29.049563Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:29.087309Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:29.087413Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:29.098793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:29.224037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-03-28T09:31:29.370641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:29.370944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:29.371228Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:29.371361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:31:29.371475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:31:29.371588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:31:29.371724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:31:29.371868Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:31:29.371996Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:31:29.372177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:31:29.372310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:31:29.372420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:31:29.395697Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-03-28T09:31:29.397907Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:31:29.398009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:31:29.398162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:31:29.398210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:31:29.398402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:31:29.398447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:31:29.398551Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:31:29.398603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:31:29.398710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:31:29.398748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:31:29.398806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:31:29.398846Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:31:29.400290Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:31:29.400384Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:31:29.400603Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:31:29.400651Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:31:29.400829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:31:29.400878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:31:29.401080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:31:29.401122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:31:29.401260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:31:29.401309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:31:29.431375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:31:29.431467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:31:29.431701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:31:29.431835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:31:29.431955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:31:29.432097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:31:29.432220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:750:2629];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=norma ... -03-28T09:32:26.411034Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-03-28T09:32:26.411080Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-28T09:32:26.411113Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 0 2025-03-28T09:32:26.411162Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-28T09:32:26.411221Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-03-28T09:32:26.411279Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; Initialization finished REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=0;WAITING=1 2025-03-28T09:32:37.873779Z node 1 :TX_PROXY ERROR: Actor# [1:3700:4863] txid# 281474976715759, issues: { message: "Other entities depend on this data source, please remove them at the beginning: /Root/olapStore/olapTable" severity: 1 } REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=
: Error: Execution, code: 1060
:1:27: Error: Executing DROP OBJECT EXTERNAL_DATA_SOURCE
: Error:
: Error: Other entities depend on this data source, please remove them at the beginning: /Root/olapStore/olapTable, code: 2003 , code: 2003 ;EXPECTATION=0 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 2025-03-28T09:32:49.336309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropTable, opId: 281474976715770:0, at schemeshard: 72057594046644480 2025-03-28T09:32:50.064678Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037889;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715770;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715770; 2025-03-28T09:32:50.065069Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037888;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715770;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715770; 2025-03-28T09:32:50.065513Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037890;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715770;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715770; REQUEST=DROP TABLE `/Root/olapStore/olapTable`;RESULT=
: Info: Execution, code: 1060
:1:12: Info: Executing DROP TABLE
: Info: Success, code: 4 ;EXPECTATION=1 FINISHED_REQUEST=DROP TABLE `/Root/olapStore/olapTable`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-03-28T09:33:00.549513Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:00.550189Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:00.550253Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:00.550291Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:00.550465Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:00.550524Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-28T09:33:00.550584Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:00.550655Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 0 2025-03-28T09:33:00.550734Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-28T09:33:00.550822Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-03-28T09:33:00.550898Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:00.551066Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:00.551106Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037888;has_config=0; 2025-03-28T09:33:00.551145Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037888 2025-03-28T09:33:00.551181Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037888 2025-03-28T09:33:00.551212Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-03-28T09:33:00.551258Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037888 2025-03-28T09:33:00.551306Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:00.551347Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:00.551377Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037889;has_config=0; 2025-03-28T09:33:00.551412Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037889 2025-03-28T09:33:00.551445Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037889 2025-03-28T09:33:00.551473Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-03-28T09:33:00.551509Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037889 2025-03-28T09:33:00.551552Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:00.551587Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:00.551614Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037890;has_config=0; 2025-03-28T09:33:00.551644Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037890 2025-03-28T09:33:00.551677Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier2' at tablet 72075186224037890 2025-03-28T09:33:00.551704Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-03-28T09:33:00.551742Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 72075186224037890 2025-03-28T09:33:00.551781Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:00.552202Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037888;self_id=[1:746:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-28T09:33:00.552318Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037889;self_id=[1:750:2629];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-28T09:33:00.552398Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037890;self_id=[1:758:2635];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-03-28T09:33:11.814397Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:11.814795Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:11.814844Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:11.814878Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:11.815338Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:11.815403Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-03-28T09:33:11.815458Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-28T09:33:11.815539Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:11.816149Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:11.816194Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037888;has_config=0; 2025-03-28T09:33:11.816229Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2025-03-28T09:33:11.816282Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:11.816333Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:11.816371Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037889;has_config=0; 2025-03-28T09:33:11.816402Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2025-03-28T09:33:11.816444Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:11.816479Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:11.816504Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037890;has_config=0; 2025-03-28T09:33:11.816531Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2025-03-28T09:33:11.816573Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:11.817178Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037888;self_id=[1:746:2626];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-28T09:33:11.817264Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037889;self_id=[1:750:2629];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-28T09:33:11.817397Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037890;self_id=[1:758:2635];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: 2025-03-28T09:33:08.280315Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:08.280752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:08.280804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00162c/r3tmp/tmpsGc0vS/pdisk_1.dat 2025-03-28T09:33:08.654744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:08.695631Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:08.734633Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:08.734783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:08.746490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:08.827898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:08.866145Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:33:08.866468Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:08.913596Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:08.913719Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:08.915415Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:08.915496Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:08.915553Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:08.915918Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:08.916056Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:08.916140Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:33:08.926884Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:08.958058Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:08.958254Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:08.958368Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:33:08.958400Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:08.958432Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:08.958470Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:08.958927Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:08.959044Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:08.959442Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:08.959491Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:08.959525Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:08.959564Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:08.959659Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:08.959794Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:08.960053Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:08.960149Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:08.961801Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:08.972478Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:08.972593Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:09.121672Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:09.126192Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:09.126264Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:09.127001Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:09.127052Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:09.127115Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:09.127373Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:09.127521Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:09.128320Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:09.128400Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:09.130350Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:09.130781Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:09.132246Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:09.132296Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:09.133063Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:09.133125Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:09.133987Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:09.134031Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:09.134085Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:09.134151Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:09.134204Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:09.134284Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:09.138218Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:09.140592Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:09.140674Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:09.140913Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:09.149426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:09.149590Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:09.149695Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:09.154845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:09.161097Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:09.309674Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:09.312926Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:09.373621Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:09.771460Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1j84vbt828y8x5f349fk9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTk1ODRhMjQtOTMzYzA0ZmItYjZlMzJlMy1mYjRjNDQyZQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:09.777919Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:09.778163Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:09.790681Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... 86224037888 2025-03-28T09:33:13.408125Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:13.408165Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:13.408224Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:13.408293Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:13.408420Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:13.408598Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:13.408680Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:13.410549Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:13.421217Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:13.421323Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:13.572942Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:704:2594], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:13.574024Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:13.574080Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:13.574655Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:13.574899Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:13.574945Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:13.575175Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:13.575334Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:13.575600Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:13.575667Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:13.576147Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:13.576547Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:13.577906Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:13.577954Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:13.578915Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:13.579014Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:13.579886Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:13.579925Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:13.580007Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:13.580073Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:13.580121Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:13.580223Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:13.581070Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:13.582942Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:13.583005Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:13.583887Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:13.591922Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:13.592042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:13.592109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:13.598043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:13.606165Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:13.755576Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:13.758208Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:13.795719Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:13.888671Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jcfp9s2r072fyz1bwkbr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YmE4NmZiZjQtMWNhZDExNzAtZTQwMjViMTUtYWE1Zjk1ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:13.889116Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:852:2688], serverId# [2:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:13.889287Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:13.900867Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:13.900998Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:13.904188Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:860:2695], serverId# [2:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:13.904876Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:33:13.915676Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:33:13.915736Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:13.916002Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:13.916034Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-28T09:33:13.916125Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:860:2695], serverId# [2:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:13.916210Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:13.916241Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:13.916287Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:13.916334Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:13.917003Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:13.917253Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:13.917370Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:13.917407Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:13.917442Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:33:13.917611Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:13.917658Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:13.918132Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-28T09:33:13.918361Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:13.918495Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-28T09:33:13.918541Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-28T09:33:13.942746Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:13.942800Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-28T09:33:13.942957Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:13.942989Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:13.943028Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-28T09:33:13.943134Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:13.943185Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:13.943225Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK >> TPQCachingProxyTest::OutdatedSession [GOOD] >> TPQCachingProxyTest::TestDeregister [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_BeforeAutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2025-03-28T09:33:15.398065Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:33:15.398218Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:33:15.418121Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:15.418502Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-03-28T09:33:15.418620Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-03-28T09:33:15.418664Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-03-28T09:33:15.418782Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2025-03-28T09:33:15.398262Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:33:15.398355Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:33:15.417666Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:15.418471Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-03-28T09:33:15.418535Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session2:1 with generation 1 2025-03-28T09:33:15.418636Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: session1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: 2025-03-28T09:33:07.858304Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:07.858619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:07.858658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001634/r3tmp/tmpCMIvgf/pdisk_1.dat 2025-03-28T09:33:08.207324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:08.259393Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:08.307184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:08.307346Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:08.319128Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:08.412445Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:08.455278Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:33:08.455586Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:08.503883Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:08.503992Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:08.505350Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:08.505406Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:08.505448Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:08.505719Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:08.505817Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:08.505890Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:33:08.516624Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:08.543402Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:08.543593Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:08.543702Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:33:08.543730Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:08.543757Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:08.543788Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:08.544171Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:08.544253Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:08.544588Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:08.544626Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:08.544655Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:08.544719Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:08.544795Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:08.544887Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:08.545104Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:08.545184Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:08.546862Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:08.557583Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:08.557706Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:08.709844Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:08.713939Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:08.714013Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:08.714563Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:08.714611Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:08.714668Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:08.714910Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:08.715076Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:08.715695Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:08.715768Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:08.717297Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:08.717659Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:08.718777Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:08.718819Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:08.719500Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:08.719558Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:08.720337Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:08.720374Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:08.720432Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:08.720485Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:08.720528Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:08.720599Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:08.724157Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:08.726639Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:08.726758Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:08.727064Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:08.734302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:08.734424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:08.734479Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:08.738738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:08.743457Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:08.893688Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:08.897354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:08.959831Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:09.283121Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1j7qw2tvr14vnkwcx09d7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NWZlNWUzNWEtZDlmMDk4NWItZTg3YTYxMmMtNGJlMjBmMzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:09.287792Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:09.288036Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:09.300109Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... 24037893 2025-03-28T09:33:14.404864Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1250:3030], serverId# [2:1251:3031], sessionId# [0:0:0] 2025-03-28T09:33:14.405182Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1250:3030], serverId# [2:1251:3031], sessionId# [0:0:0] 2025-03-28T09:33:14.407022Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1255:3035], serverId# [2:1256:3036], sessionId# [0:0:0] 2025-03-28T09:33:14.407227Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1255:3035], serverId# [2:1256:3036], sessionId# [0:0:0] 2025-03-28T09:33:14.409115Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037893, clientId# [2:1260:3040], serverId# [2:1261:3041], sessionId# [0:0:0] 2025-03-28T09:33:14.409339Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1260:3040], serverId# [2:1261:3041], sessionId# [0:0:0] 2025-03-28T09:33:14.411491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:14.414537Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-03-28T09:33:14.414625Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:33:14.414802Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:33:14.414882Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-03-28T09:33:14.414919Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:14.415043Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-03-28T09:33:14.451772Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1283:3060] 2025-03-28T09:33:14.452006Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:14.460146Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:14.460240Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:14.461251Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2025-03-28T09:33:14.461296Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037894 2025-03-28T09:33:14.461324Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037894 2025-03-28T09:33:14.461600Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:14.461679Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:14.461720Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037894 persisting started state actor id [2:1299:3060] in generation 1 2025-03-28T09:33:14.483191Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:14.483254Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037894 2025-03-28T09:33:14.483318Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:14.483384Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037894, actorId: [2:1301:3070] 2025-03-28T09:33:14.483406Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-03-28T09:33:14.483424Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2025-03-28T09:33:14.483455Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:33:14.483765Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037894 2025-03-28T09:33:14.483849Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2025-03-28T09:33:14.483923Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-03-28T09:33:14.483952Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:14.484008Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037894 TxInFly 0 2025-03-28T09:33:14.484034Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-03-28T09:33:14.484398Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1282:3059], serverId# [2:1288:3062], sessionId# [0:0:0] 2025-03-28T09:33:14.484546Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-28T09:33:14.484697Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2025-03-28T09:33:14.484788Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2025-03-28T09:33:14.485153Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-03-28T09:33:14.495831Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-28T09:33:14.495936Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:14.623933Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1311:3080], serverId# [2:1313:3082], sessionId# [0:0:0] 2025-03-28T09:33:14.624887Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715663 at step 4000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 4000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2025-03-28T09:33:14.624932Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:33:14.625184Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-03-28T09:33:14.625208Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:14.625235Z node 2 :TX_DATASHARD DEBUG: Found ready operation [4000:281474976715663] in PlanQueue unit at 72075186224037894 2025-03-28T09:33:14.625423Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037894 loaded tx from db 4000:281474976715663 keys extracted: 0 2025-03-28T09:33:14.625537Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:14.625949Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037894 2025-03-28T09:33:14.626029Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 8] schema version# 1 2025-03-28T09:33:14.626352Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:14.626600Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:14.628006Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 3500 2025-03-28T09:33:14.628064Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:33:14.629136Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 4000} 2025-03-28T09:33:14.629194Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-03-28T09:33:14.629932Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037894 2025-03-28T09:33:14.629962Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037894 2025-03-28T09:33:14.630002Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037894 2025-03-28T09:33:14.630065Z node 2 :TX_DATASHARD DEBUG: Complete [4000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:14.630116Z node 2 :TX_DATASHARD INFO: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2025-03-28T09:33:14.630205Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:33:14.631113Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2025-03-28T09:33:14.631169Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:33:14.631205Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2025-03-28T09:33:14.631430Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:33:14.631526Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-03-28T09:33:14.631576Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-03-28T09:33:14.631617Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:14.631955Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 4000 2025-03-28T09:33:14.632673Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2025-03-28T09:33:14.632719Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-03-28T09:33:14.636334Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1340:3103], serverId# [2:1341:3104], sessionId# [0:0:0] 2025-03-28T09:33:14.636546Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1340:3103], serverId# [2:1341:3104], sessionId# [0:0:0] 2025-03-28T09:33:14.637623Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1345:3108], serverId# [2:1346:3109], sessionId# [0:0:0] 2025-03-28T09:33:14.637781Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1345:3108], serverId# [2:1346:3109], sessionId# [0:0:0] 2025-03-28T09:33:14.639261Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:1350:3113], serverId# [2:1351:3114], sessionId# [0:0:0] 2025-03-28T09:33:14.639482Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1350:3113], serverId# [2:1351:3114], sessionId# [0:0:0] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::WithDir_PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2025-03-28T09:32:00.874543Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790800117699685:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.875224Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:32:01.110613Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001357/r3tmp/tmpv0xYSS/pdisk_1.dat 2025-03-28T09:32:01.319811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:01.341670Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:01.341764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:01.352614Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23933, node 1 2025-03-28T09:32:01.800184Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001357/r3tmp/yandexrmSTkn.tmp 2025-03-28T09:32:01.800209Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001357/r3tmp/yandexrmSTkn.tmp 2025-03-28T09:32:01.801461Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001357/r3tmp/yandexrmSTkn.tmp 2025-03-28T09:32:01.801595Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:02.035147Z INFO: TTestServer started on Port 8171 GrpcPort 23933 TClient is connected to server localhost:8171 PQClient connected to localhost:23933 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:02.362591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.383356Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.400841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:02.539804Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:03.900602Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790813002602376:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.900747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.900832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790813002602405:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.904443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:32:03.920300Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790813002602407:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:32:04.145428Z node 1 :TX_PROXY ERROR: Actor# [1:7486790817297569767:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:04.205719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.240913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.256494Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790817297569775:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:32:04.258048Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWM0NDFlZmYtMmQwNzhjZDAtOTRjMmQxNWYtNDc2MTRhNzQ=, ActorId: [1:7486790813002602373:2336], ActorState: ExecuteState, TraceId: 01jqe1g8dh1zf49j99zwmyzaxh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:32:04.260369Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:32:04.324075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486790817297570062:2634] 2025-03-28T09:32:05.874725Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790800117699685:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:05.874787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:32:10.559868Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:32:10.572608Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-28T09:32:10.573912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486790843067374126:2790], Recipient [1:7486790804412667408:2197]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.573947Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.573962Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:10.573986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486790843067374122:2787], Recipient [1:7486790804412667408:2197]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-28T09:32:10.573998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:10.606809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:10.607156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:10.607386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-28T09:32:10.607433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-28T09:32:10.607466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-03-28T09:32:10.607510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13 ... StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791110605542597:2462]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:14.401431Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:14.401474Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:14.401547Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:14.401576Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:14.401605Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:14.465452Z node 5 :PQ_READ_PROXY DEBUG: new Describe topic request 2025-03-28T09:33:14.465544Z node 5 :PQ_READ_PROXY DEBUG: TDescribeTopicActor for request operation_params { } path: "/Root/dir/origin" 2025-03-28T09:33:14.465689Z node 5 :PQ_READ_PROXY DEBUG: Describe topic actor for path /Root/dir/origin 2025-03-28T09:33:14.469188Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [5:7486791114900510155:2983], Recipient [5:7486791067655868442:2144]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:33:14.469229Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:33:14.469252Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:33:14.469636Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [5:7486791071950836048:2317], Recipient [5:7486791067655868442:2144]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 3 Generation: 1 Round: 0 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { Memory: 82488 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 5 StartTime: 1743154384449 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-28T09:33:14.469663Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:33:14.469694Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2025-03-28T09:33:14.469793Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:33:14.469830Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.099993s, queue# 1 2025-03-28T09:33:14.485305Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7486791110605542667:2478], Partition 1, Sender [5:7486791110605542751:2488], Recipient [5:7486791110605542745:2484], Cookie: 0 2025-03-28T09:33:14.485394Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7486791110605542751:2488], Recipient [5:7486791110605542745:2484]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-03-28T09:33:14.485419Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-03-28T09:33:14.485786Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435094, Sender [0:0:0], Recipient [5:7486791067655868442:2144]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:14.485828Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:14.485852Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:33:14.485865Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:33:14.485921Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-03-28T09:33:14.486292Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435094, Sender [0:0:0], Recipient [5:7486791067655868442:2144]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:14.486326Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:14.486354Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T09:33:14.487371Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188544 (NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated), Tablet [5:7486791110605542666:2477], Partition 2, Sender [5:7486791110605542765:2492], Recipient [5:7486791110605542749:2486], Cookie: 0 2025-03-28T09:33:14.487484Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188544, Sender [5:7486791110605542765:2492], Recipient [5:7486791110605542749:2486]: NKikimr::NPQ::NReadQuoterEvents::TEvQuotaCountersUpdated 2025-03-28T09:33:14.487543Z node 5 :PERSQUEUE TRACE: StateIdle, processing event NReadQuoterEvents::TEvQuotaCountersUpdated 2025-03-28T09:33:14.489844Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791110605542667:2478], Partition 1, Sender [0:0:0], Recipient [5:7486791110605542745:2484], Cookie: 0 2025-03-28T09:33:14.489909Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791110605542745:2484]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:14.489930Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:14.489996Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:14.490086Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:14.490110Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:14.490140Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037895, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:14.497102Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791110605542666:2477], Partition 2, Sender [0:0:0], Recipient [5:7486791110605542749:2486], Cookie: 0 2025-03-28T09:33:14.497189Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791110605542749:2486]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:14.497211Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:14.497260Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:14.497331Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:14.497363Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:14.497421Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037894, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:14.501651Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791110605542530:2458], Partition 0, Sender [0:0:0], Recipient [5:7486791110605542597:2462], Cookie: 0 2025-03-28T09:33:14.501726Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791110605542597:2462]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:14.501751Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:14.501797Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:14.501869Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:14.501899Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:14.501928Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:14.569763Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7486791067655868442:2144]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:33:14.569801Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:33:14.569822Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:33:14.569865Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:33:14.569879Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-28T09:33:14.569942Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 0 row count 0 2025-03-28T09:33:14.569990Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=SourceIdMeta2, is column=0, is olap=0 2025-03-28T09:33:14.570012Z node 5 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 0, DataSize 0 2025-03-28T09:33:14.570029Z node 5 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2025-03-28T09:33:14.570089Z node 5 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-28T09:33:14.570203Z node 5 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:33:14.570296Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [5:7486791067655868442:2144]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:33:14.570313Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:33:14.570324Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp >> TPQCachingProxyTest::TestPublishAndForget >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate >> TPQCachingProxyTest::TestPublishAndForget [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] >> TLocksTest::Range_CorrectDot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2025-03-28T09:33:17.417029Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:33:17.417142Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:33:17.432699Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:17.432835Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: session1:1 with generation 1 2025-03-28T09:33:17.432985Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: staged direct read id 1 for session: session1 2025-03-28T09:33:17.433028Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: publish read: 1 for session session1, Generation: 1 2025-03-28T09:33:17.433123Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: forget read: 1 for session session1 >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] |92.8%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.8%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: 2025-03-28T09:33:10.979241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:10.979709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:10.979761Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00161b/r3tmp/tmp8sC8iB/pdisk_1.dat 2025-03-28T09:33:11.355500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:11.388848Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:11.426907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:11.427058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:11.438551Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:11.519081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:11.556751Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:33:11.557054Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:11.593562Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:11.593647Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:11.595097Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:11.595177Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:11.595237Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:11.595596Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:11.595718Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:11.595810Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:33:11.606572Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:11.639664Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:11.639873Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:11.640007Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:33:11.640040Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:11.640070Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:11.640096Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.640486Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:11.640600Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:11.641067Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.641109Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:11.641146Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:11.641187Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.641307Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:11.641444Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:11.641717Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:11.641810Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:11.643425Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.654025Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:11.654142Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:11.807566Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:11.812594Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:11.812671Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.813462Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.813514Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:11.813572Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:11.813872Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:11.814052Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:11.814886Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.814956Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:11.817134Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:11.817527Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:11.819293Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:11.819342Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.820134Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:11.820202Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.821209Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.821253Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:11.821317Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:11.821385Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:11.821442Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:11.821541Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.825777Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.828545Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:11.828614Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:11.828881Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:11.838202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.838363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.838445Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.843954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:11.850013Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:12.002003Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:12.005340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:12.077219Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:12.393954Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jarw488szdbd7x0cry8t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRiY2VjYmQtYzAxZTZiNzYtYjk5YjJiNDEtM2Q2YTU5NzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:12.399942Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:12.400150Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:12.412205Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... 86224037888 2025-03-28T09:33:16.662775Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:16.662834Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:16.662888Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:16.662963Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:16.663101Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:16.663323Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:16.663414Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:16.665489Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:16.676181Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:16.676300Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:16.825680Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:704:2594], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:16.826841Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:16.826911Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:16.827558Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:16.827607Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:16.827648Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:16.827894Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:16.828063Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:16.828352Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:16.828418Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:16.828910Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:16.829311Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:16.830800Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:16.830853Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:16.831723Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:16.831798Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:16.832755Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:16.832799Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:16.832847Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:16.832907Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:16.832961Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:16.833079Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:16.833991Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:16.835693Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:16.835761Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:16.836683Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:16.844823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:16.844933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:16.845005Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:16.850374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:16.856566Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:17.011266Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:17.014212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:17.049377Z node 2 :TX_PROXY ERROR: Actor# [2:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:17.164420Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jfnb9z6sxm8zwtya01bj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWNlYWFmZGItZDJiOWViNWQtNWI5MTVkMjktYmIzNjUxNjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:17.164985Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:852:2688], serverId# [2:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:17.165228Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:17.177411Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:17.177535Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:17.181206Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:860:2695], serverId# [2:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:17.182238Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:33:17.193383Z node 2 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:33:17.193466Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:17.193748Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:17.193782Z node 2 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-28T09:33:17.193892Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:860:2695], serverId# [2:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:17.193984Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:17.194046Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:17.194086Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:17.194135Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:17.194942Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:17.195221Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:17.195362Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:17.195396Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:17.195428Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:33:17.195594Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:17.195653Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:17.196117Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-28T09:33:17.196305Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:17.196423Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-28T09:33:17.196458Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-28T09:33:17.214497Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:17.214555Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-28T09:33:17.214721Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:17.214748Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:17.214784Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-28T09:33:17.214889Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:17.214950Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:17.214990Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> YdbProxy::RemoveDirectory >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> YdbProxy::CreateTopic >> YdbProxy::DropTable >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable >> YdbProxy::DescribePath >> YdbProxy::ReadTopic >> YdbProxy::ListDirectory >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: 2025-03-28T09:33:06.197810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:06.198155Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:06.198193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001643/r3tmp/tmprO3JPi/pdisk_1.dat 2025-03-28T09:33:06.539987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:06.582158Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:06.629214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:06.629389Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:06.644070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:06.726560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:06.779462Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-03-28T09:33:06.779691Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:06.812338Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:06.812481Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:06.813825Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:06.813903Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:06.813961Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:06.814246Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:06.814567Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:06.814617Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:717:2586] in generation 1 2025-03-28T09:33:06.815713Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2588] 2025-03-28T09:33:06.815931Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:06.826199Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:06.826446Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:06.827759Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:33:06.827816Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:33:06.827869Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:33:06.828171Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:06.828609Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2590] 2025-03-28T09:33:06.828820Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:06.837418Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:06.837504Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:735:2588] in generation 1 2025-03-28T09:33:06.838941Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:06.839046Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:06.840361Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-28T09:33:06.840418Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-28T09:33:06.840457Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-28T09:33:06.840771Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:06.840866Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:06.840921Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:740:2590] in generation 1 2025-03-28T09:33:06.851968Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:06.881310Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:06.881505Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:06.881629Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:744:2618] 2025-03-28T09:33:06.881665Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:06.881698Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:06.881758Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:06.882095Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:06.882129Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:33:06.882186Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:06.882264Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:745:2619] 2025-03-28T09:33:06.882287Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:33:06.882312Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:33:06.882333Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:33:06.882747Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:06.882778Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-28T09:33:06.882823Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:06.882871Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:746:2620] 2025-03-28T09:33:06.882918Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-28T09:33:06.882942Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-28T09:33:06.882979Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:33:06.883140Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:06.883238Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:06.883864Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:06.883921Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:06.883963Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:06.884017Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:06.884075Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:33:06.884145Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:33:06.884235Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:712:2601], sessionId# [0:0:0] 2025-03-28T09:33:06.884306Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:33:06.884333Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:06.884355Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:33:06.884381Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:33:06.884444Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-03-28T09:33:06.884504Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-03-28T09:33:06.884658Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:06.884904Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:06.885022Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:06.885486Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:713:2602], sessionId# [0:0:0] 2025-03-28T09:33:06.885545Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:33:06.885574Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:06.885601Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-03-28T09:33:06.885645Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:33:06.885892Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:33:06.886043Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:33:06.886095Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:33:06.886489Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:719:2605], sessionId# [0:0:0] 2025-03-28T09:33:06.886612Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-28T09:33:06.886790Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-03-28T09:33:06.886841Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-03-28T09:33:06.888741Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:06.888823Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:33:06.888866Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:33:06.899893Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:06.900036Z nod ... 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2025-03-28T09:33:17.749645Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2025-03-28T09:33:17.749882Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack init split/merge destination OpId 281474976715664 2025-03-28T09:33:17.749942Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-03-28T09:33:17.750587Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack init split/merge destination OpId 281474976715664 2025-03-28T09:33:17.750630Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 not sending time cast registration request in state SplitDstReceivingSnapshot 2025-03-28T09:33:17.751897Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 received split OpId 281474976715664 at state Ready 2025-03-28T09:33:17.762832Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 starting snapshot for split OpId 281474976715664 2025-03-28T09:33:17.763222Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 CancelReadIterators#0 2025-03-28T09:33:17.765097Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 3, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:33:17.765144Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 3, finished edge# 0, front# 0 2025-03-28T09:33:17.765548Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 4, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:33:17.765577Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 4, finished edge# 0, front# 0 2025-03-28T09:33:17.767888Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:33:17.767923Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2025-03-28T09:33:17.804280Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:33:17.804330Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2025-03-28T09:33:17.805135Z node 3 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:33:17.805172Z node 3 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2025-03-28T09:33:17.805555Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 snapshot complete for split OpId 281474976715664 2025-03-28T09:33:17.805769Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715664 2025-03-28T09:33:17.805831Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715664 2025-03-28T09:33:17.805863Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715664 2025-03-28T09:33:17.805891Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715664 2025-03-28T09:33:17.806106Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715664 2025-03-28T09:33:17.806284Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715664 2025-03-28T09:33:17.806315Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715664 2025-03-28T09:33:17.806344Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715664 2025-03-28T09:33:17.806402Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715664 2025-03-28T09:33:17.806512Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715664 2025-03-28T09:33:17.807100Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Sending snapshots from src for split OpId 281474976715664 2025-03-28T09:33:17.807282Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715664 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2025-03-28T09:33:17.807407Z node 3 :TX_DATASHARD DEBUG: Sending snapshot for split opId 281474976715664 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2025-03-28T09:33:17.807701Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037892, clientId# [3:1229:2945], serverId# [3:1231:2947], sessionId# [0:0:0] 2025-03-28T09:33:17.807819Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037891, clientId# [3:1230:2946], serverId# [3:1232:2948], sessionId# [0:0:0] 2025-03-28T09:33:17.807881Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 Received snapshot for split/merge TxId 281474976715664 from tabeltId 72075186224037889 2025-03-28T09:33:17.808726Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 Received snapshot for split/merge TxId 281474976715664 from tabeltId 72075186224037889 2025-03-28T09:33:17.810190Z node 3 :TX_DATASHARD DEBUG: 72075186224037892 ack snapshot OpId 281474976715664 2025-03-28T09:33:17.810330Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037892 2025-03-28T09:33:17.810426Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:17.810517Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-28T09:33:17.810584Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [3:1235:2951] 2025-03-28T09:33:17.810617Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-03-28T09:33:17.810664Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-03-28T09:33:17.810719Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:33:17.810832Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715664 2025-03-28T09:33:17.811363Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-03-28T09:33:17.811404Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:33:17.811692Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-28T09:33:17.811725Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:17.811754Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-03-28T09:33:17.811785Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-28T09:33:17.811925Z node 3 :TX_DATASHARD DEBUG: 72075186224037891 ack snapshot OpId 281474976715664 2025-03-28T09:33:17.812018Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037891 2025-03-28T09:33:17.812078Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:17.812127Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037891 2025-03-28T09:33:17.812172Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [3:1237:2953] 2025-03-28T09:33:17.812193Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-03-28T09:33:17.812220Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037891 2025-03-28T09:33:17.812241Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-28T09:33:17.812292Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1229:2945], serverId# [3:1231:2947], sessionId# [0:0:0] 2025-03-28T09:33:17.812385Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715664 2025-03-28T09:33:17.812867Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 2000 2025-03-28T09:33:17.812899Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-28T09:33:17.812943Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-28T09:33:17.812967Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:17.812990Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037891 TxInFly 0 2025-03-28T09:33:17.813017Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-28T09:33:17.813253Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1230:2946], serverId# [3:1232:2948], sessionId# [0:0:0] 2025-03-28T09:33:17.813300Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 1500 next step 2000 2025-03-28T09:33:17.813353Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-28T09:33:17.813655Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 1500 next step 2000 2025-03-28T09:33:17.813689Z node 3 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037891: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-28T09:33:17.835043Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715664 2025-03-28T09:33:17.838157Z node 3 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-03-28T09:33:17.840886Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-03-28T09:33:17.840943Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-03-28T09:33:17.841176Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1098:2843], serverId# [3:1099:2844], sessionId# [0:0:0] 2025-03-28T09:33:17.841261Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:33:17.841300Z node 3 :TX_DATASHARD INFO: Progress tx at non-ready tablet 72075186224037889 state 5 2025-03-28T09:33:17.841465Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715664 2025-03-28T09:33:17.841532Z node 3 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:33:17.841582Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtlWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143154939.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154939.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154939.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143154939.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154939.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123154939.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143154939.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154939.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123153739.000000s;Name=;Codec=}; 2025-03-28T09:32:19.695662Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:32:19.771945Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:32:19.795807Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:32:19.796135Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:32:19.803727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:32:19.803913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:32:19.804153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:32:19.804237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:32:19.804302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:32:19.804378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:32:19.804444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:32:19.804523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:32:19.804600Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:32:19.804690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:32:19.804814Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:32:19.804935Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:32:19.835195Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:32:19.835497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:32:19.835556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:32:19.835700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:19.835841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:32:19.835914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:32:19.835947Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:32:19.836031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:32:19.836073Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:32:19.836102Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:32:19.836121Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:32:19.836255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:19.836317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:32:19.836352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:32:19.836376Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:32:19.836443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:32:19.836483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:32:19.836512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:32:19.836532Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:32:19.836584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:32:19.836611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:32:19.836631Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:32:19.836665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:32:19.836695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:32:19.836720Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:32:19.837053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-03-28T09:32:19.837123Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=25; 2025-03-28T09:32:19.837186Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-03-28T09:32:19.837255Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=29; 2025-03-28T09:32:19.837393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:32:19.837433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:32:19.837474Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:32:19.837654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:32:19.837690Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:32:19.837711Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:32:19.837847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... nes;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=518; 2025-03-28T09:33:17.843953Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=39509; 2025-03-28T09:33:17.855649Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=11589; 2025-03-28T09:33:17.864804Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=8001; 2025-03-28T09:33:17.864900Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=9148; 2025-03-28T09:33:17.865034Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=78; 2025-03-28T09:33:17.865125Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=53; 2025-03-28T09:33:17.865250Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=90; 2025-03-28T09:33:17.865341Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=55; 2025-03-28T09:33:17.872552Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7156; 2025-03-28T09:33:17.882879Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10229; 2025-03-28T09:33:17.882990Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=31; 2025-03-28T09:33:17.883049Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=22; 2025-03-28T09:33:17.883089Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-03-28T09:33:17.883125Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-03-28T09:33:17.883162Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-03-28T09:33:17.883221Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2025-03-28T09:33:17.883258Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-03-28T09:33:17.883330Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2025-03-28T09:33:17.883363Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-03-28T09:33:17.883411Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2025-03-28T09:33:17.883475Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=38; 2025-03-28T09:33:17.883733Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=226; 2025-03-28T09:33:17.883773Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=84680; 2025-03-28T09:33:17.883908Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=36397736;raw_bytes=56295575;count=22;records=560000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:33:17.884015Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:33:17.884060Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:33:17.884114Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:33:17.898880Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:33:17.899013Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:33:17.899061Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:33:17.899126Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:33:17.899185Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-28T09:33:17.899218Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:33:17.899253Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:33:17.899281Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:33:17.899351Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:33:17.899934Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:33:17.900018Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:2598:4500];tablet_id=9437184;parent=[1:2548:4457];fline=manager.cpp:82;event=ask_data;request=request_id=120;1={portions_count=22};; 2025-03-28T09:33:17.900859Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:33:17.901169Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:33:17.901198Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:33:17.901220Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:33:17.901268Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:33:17.901318Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:33:17.901364Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:33:17.901411Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-28T09:33:17.901444Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:33:17.901480Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:33:17.901510Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:33:17.901573Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:33:17.902873Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=22;path_id=1; 2025-03-28T09:33:17.903659Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2548:4457];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::BalancingAfterSplit_sessionsWithPartition [GOOD] Test command err: 2025-03-28T09:32:00.889021Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790798927552632:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.893095Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:32:01.091305Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00134d/r3tmp/tmpB77nUx/pdisk_1.dat 2025-03-28T09:32:01.346626Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:01.357149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:01.357253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:01.362104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10199, node 1 2025-03-28T09:32:01.803476Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00134d/r3tmp/yandexcIn2r2.tmp 2025-03-28T09:32:01.803507Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00134d/r3tmp/yandexcIn2r2.tmp 2025-03-28T09:32:01.803671Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00134d/r3tmp/yandexcIn2r2.tmp 2025-03-28T09:32:01.803784Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:02.035204Z INFO: TTestServer started on Port 24040 GrpcPort 10199 TClient is connected to server localhost:24040 PQClient connected to localhost:10199 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:02.338161Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.355848Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.371338Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:32:02.390125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:32:03.952148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790811812455320:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.952491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.952576Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790811812455346:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.957164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:32:03.971935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790811812455349:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:32:04.154732Z node 1 :TX_PROXY ERROR: Actor# [1:7486790816107422709:2453] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:04.205747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.276060Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790816107422724:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:32:04.276128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.276340Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2IyY2YyNmUtMTgzZmFiZjItNjc1ZTU5MGEtZDg3MjY1MjQ=, ActorId: [1:7486790811812455308:2337], ActorState: ExecuteState, TraceId: 01jqe1g8etbxfhx63w8p4tbhnq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:32:04.279072Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:32:04.341202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486790816107423004:2635] 2025-03-28T09:32:05.886978Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790798927552632:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:05.887054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:32:10.388212Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:32:10.404798Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-28T09:32:10.412854Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486790841877227068:2791], Recipient [1:7486790803222520336:2188]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.412899Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.412924Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:10.412967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486790841877227064:2788], Recipient [1:7486790803222520336:2188]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-28T09:32:10.412985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:10.462821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:10.463245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:10.463454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-28T09:32:10.463489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-28T09:32:10.463522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-03-28T09:32:10.463566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13 ... t [6:7486791095707230619:2455]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:16.710201Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:16.710255Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7486791117182068239:2734], Recipient [6:7486791117182068158:2726]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:16.710270Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:16.710357Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 3 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-03-28T09:33:16.710402Z node 6 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [6:7486791117182068243:2737], Recipient [6:7486791117182068164:2727]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:16.710414Z node 6 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:16.710461Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [6:7486791117182068164:2727], Partition 1, Sender [6:7486791117182068164:2727], Recipient [6:7486791117182068243:2737], Cookie: 0 2025-03-28T09:33:16.710502Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [6:7486791117182068164:2727], Recipient [6:7486791117182068243:2737]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-28T09:33:16.710517Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-03-28T09:33:16.710852Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 7 DataSize: 0 UsedReserveSize: 0 2025-03-28T09:33:16.710996Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] ProcessPendingStats. PendingUpdates size 3 2025-03-28T09:33:16.711235Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [6:7486791095707230618:2454], Recipient [6:7486791057052523834:2137]: NKikimrPQ.TEvPeriodicTopicStats PathId: 13 Generation: 1 Round: 7 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2025-03-28T09:33:16.711265Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-03-28T09:33:16.711284Z node 6 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2025-03-28T09:33:16.711308Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099996s, queue# 1 2025-03-28T09:33:16.716728Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [6:7486791095707230618:2454], Recipient [6:7486791057052523834:2137]: NKikimrSchemeOp.TDescribePath PathId: 13 SchemeshardId: 72057594046644480 2025-03-28T09:33:16.716773Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:33:16.759177Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7486791117182068158:2726], Partition 2, Sender [0:0:0], Recipient [6:7486791117182068239:2734], Cookie: 0 2025-03-28T09:33:16.759274Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7486791117182068239:2734]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.759308Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.759363Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:16.759447Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:16.759505Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:16.759547Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:16.763131Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7486791117182068164:2727], Partition 1, Sender [0:0:0], Recipient [6:7486791117182068243:2737], Cookie: 0 2025-03-28T09:33:16.763195Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7486791095707230619:2455], Partition 0, Sender [0:0:0], Recipient [6:7486791095707230680:2458], Cookie: 0 2025-03-28T09:33:16.763212Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7486791117182068243:2737]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.763240Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.763264Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7486791095707230680:2458]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.763290Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:16.763310Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.763364Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:16.763372Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:16.763395Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:16.763425Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:16.763446Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:16.763479Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:16.763519Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:16.811331Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435094, Sender [0:0:0], Recipient [6:7486791057052523834:2137]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:16.811388Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:16.811415Z node 6 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:33:16.811427Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:33:16.811495Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-03-28T09:33:16.811837Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435094, Sender [0:0:0], Recipient [6:7486791057052523834:2137]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:16.811862Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:16.811876Z node 6 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T09:33:16.859404Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7486791117182068158:2726], Partition 2, Sender [0:0:0], Recipient [6:7486791117182068239:2734], Cookie: 0 2025-03-28T09:33:16.859481Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7486791117182068239:2734]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.859512Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.859562Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:16.859636Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:16.859660Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:16.859690Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:16.863490Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7486791117182068164:2727], Partition 1, Sender [0:0:0], Recipient [6:7486791117182068243:2737], Cookie: 0 2025-03-28T09:33:16.863490Z node 6 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [6:7486791095707230619:2455], Partition 0, Sender [0:0:0], Recipient [6:7486791095707230680:2458], Cookie: 0 2025-03-28T09:33:16.863550Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7486791095707230680:2458]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.863560Z node 6 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [6:7486791117182068243:2737]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.863578Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.863582Z node 6 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:16.863630Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:16.863643Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:16.863696Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:16.863708Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:16.863716Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:16.863728Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:16.863743Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:16.863759Z node 6 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2025-03-28T09:32:40.571763Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790968128540582:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:40.574909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee4/r3tmp/tmpZ4RNoB/pdisk_1.dat 2025-03-28T09:32:40.938434Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:40.956825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:40.956913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:40.959015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7992 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:41.205099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:41.217840Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:41.238747Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:41.354265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:41.426595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:43.784205Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790985237288696:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:43.784277Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee4/r3tmp/tmplaE3Dj/pdisk_1.dat 2025-03-28T09:32:43.889041Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:43.920926Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:43.921019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:43.922548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11341 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:44.034317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:44.054669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:44.105356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:44.145711Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:46.851811Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790994567355994:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:46.851895Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee4/r3tmp/tmp9FLigO/pdisk_1.dat 2025-03-28T09:32:46.947939Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:46.990012Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:46.990090Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:46.991601Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9555 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:47.109869Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:47.143129Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:47.194253Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:47.268165Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:49.938526Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791007361882314:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:49.938579Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee4/r3tmp/tmpKrLRhT/pdisk_1.dat 2025-03-28T09:32:50.086815Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:50.111538Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:50.111629Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:50.112793Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16710 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:50.365415Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:50.375603Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, un ... /001ee4/r3tmp/tmpNnJqJA/pdisk_1.dat 2025-03-28T09:33:01.476497Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:01.507050Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:01.507155Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:01.508835Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8728 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:01.740782Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:01.757778Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:01.828578Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:01.899091Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:05.097779Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486791077273303678:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:05.097831Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee4/r3tmp/tmps31oea/pdisk_1.dat 2025-03-28T09:33:05.213522Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:05.243168Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:05.243277Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:05.246865Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20332 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:05.438970Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:05.455654Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:05.528814Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:05.587263Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:09.235312Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486791093965239147:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:09.235466Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee4/r3tmp/tmpCkFDDD/pdisk_1.dat 2025-03-28T09:33:09.366901Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:09.373998Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:09.374093Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:09.375735Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27906 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:09.624629Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:09.647863Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:09.719336Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:09.781985Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:13.740935Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486791113309108005:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:13.741031Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee4/r3tmp/tmp6lldcZ/pdisk_1.dat 2025-03-28T09:33:13.866064Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:13.900346Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:13.900449Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:13.902469Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1032 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:14.142559Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:14.164391Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:14.249470Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:14.315482Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 >> YdbProxy::CreateTable >> TopicAutoscaling::PartitionMerge_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_AutosplitByLoad >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> Viewer::StorageGroupOutputWithSpaceCheckDependsOnUsage [GOOD] >> Viewer::SimpleFeatureFlags >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: 2025-03-28T09:33:13.824433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:13.824797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:13.824839Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015fd/r3tmp/tmpJGw2tY/pdisk_1.dat 2025-03-28T09:33:14.160791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:14.205882Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:14.250835Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:14.250962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:14.262266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:14.348914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:14.383865Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:33:14.384095Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:14.423063Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:14.423156Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:14.424769Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:14.424850Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:14.424919Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:14.425286Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:14.425426Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:14.425510Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:33:14.436419Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:14.466273Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:14.466529Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:14.466651Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:33:14.466709Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:14.466746Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:14.466778Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:14.467664Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:14.467792Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:14.468263Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:14.468320Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:14.468359Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:14.468397Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:14.468505Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:14.468648Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:14.468901Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:14.468999Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:14.470883Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:14.481623Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:14.481735Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:14.631883Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:14.642279Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:14.642365Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:14.643213Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:14.643270Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:14.643327Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:14.643611Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:14.643798Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:14.644752Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:14.644833Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:14.646930Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:14.647355Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:14.649002Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:14.649054Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:14.649970Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:14.650046Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:14.651160Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:14.651205Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:14.651270Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:14.651344Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:14.651408Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:14.651508Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:14.655244Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:14.657927Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:14.658022Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:14.658300Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:14.667267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:14.667426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:14.667496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:14.672866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:14.679175Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:14.828812Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:14.832374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:14.912849Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:15.272328Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jdh9673j8v8x0cxkhme3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDRkZTEyZDQtMTA4M2NmOTUtMjYzODQ4MDUtZTdlYmNmZGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:15.276782Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:15.277001Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:15.289142Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... 3-28T09:33:15.381699Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:15.381736Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:15.381774Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:33:15.382030Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:15.382102Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:15.382842Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2025-03-28T09:33:15.383048Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:15.383164Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2025-03-28T09:33:15.383210Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 0 2025-03-28T09:33:15.384752Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:15.384796Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715662, at: 72075186224037888 2025-03-28T09:33:15.385150Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:15.385184Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:15.385217Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2025-03-28T09:33:15.385325Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:15.385388Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:15.385443Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:18.311052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:18.311362Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:18.311415Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015fd/r3tmp/tmp4bFQ1R/pdisk_1.dat 2025-03-28T09:33:18.585556Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:18.613723Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:18.650225Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:18.650330Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:18.661776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:18.743499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:18.765331Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-03-28T09:33:18.765561Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:18.814897Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:18.815005Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:18.816697Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:18.816797Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:18.816884Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:18.817188Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:18.817308Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:18.817381Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-03-28T09:33:18.828104Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:18.828191Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:18.828307Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:18.828389Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-03-28T09:33:18.828429Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:18.828467Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:18.828506Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:18.828815Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:18.828926Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:18.829044Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:18.829086Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:18.829135Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:18.829189Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:18.829253Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:18.829379Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:18.829597Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:18.829691Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:18.831687Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:18.842341Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:18.842441Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:18.991575Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:704:2594], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:18.992779Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:18.992838Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:18.993461Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:18.993520Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:18.993564Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:18.993830Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:18.993968Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:18.994200Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:18.994256Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:18.994779Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:18.995203Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:18.996729Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:18.996779Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:18.997624Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:18.997717Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:18.998724Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:18.998767Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:18.998813Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:18.998883Z node 2 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:18.998940Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:18.999027Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:19.000061Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:19.001896Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:19.001980Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:19.002945Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:19.008718Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] 2025-03-28T09:33:19.008835Z node 2 :TX_DATASHARD NOTICE: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2025-03-28T09:33:19.009053Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [2:739:2621], serverId# [2:740:2622], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: 2025-03-28T09:33:05.374943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:05.375539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:05.375611Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001654/r3tmp/tmpclxTz4/pdisk_1.dat 2025-03-28T09:33:05.741476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:05.793208Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:05.833636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:05.833784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:05.845430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:05.929297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:05.983324Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-03-28T09:33:05.983599Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:06.031509Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:06.031718Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:06.033461Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:06.033545Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:06.033619Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:06.034007Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:06.034475Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:06.034544Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:717:2586] in generation 1 2025-03-28T09:33:06.036157Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2588] 2025-03-28T09:33:06.036370Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:06.046892Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:06.047140Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:06.048486Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:33:06.048550Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:33:06.048596Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:33:06.048879Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:06.049295Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2590] 2025-03-28T09:33:06.049484Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:06.057819Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:06.057891Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:735:2588] in generation 1 2025-03-28T09:33:06.059262Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:06.059343Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:06.060664Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-28T09:33:06.060727Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-28T09:33:06.060767Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-28T09:33:06.061039Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:06.061134Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:06.061188Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:740:2590] in generation 1 2025-03-28T09:33:06.072265Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:06.102227Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:06.102433Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:06.102559Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:744:2618] 2025-03-28T09:33:06.102594Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:06.102645Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:06.102707Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:06.103053Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:06.103089Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:33:06.103145Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:06.103227Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:745:2619] 2025-03-28T09:33:06.103251Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:33:06.103277Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:33:06.103299Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:33:06.103652Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:06.103696Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-28T09:33:06.103743Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:06.103790Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:746:2620] 2025-03-28T09:33:06.103833Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-28T09:33:06.103854Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-28T09:33:06.103876Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:33:06.104055Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:06.104162Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:06.104784Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:06.104845Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:06.104887Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:06.104927Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:06.104986Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:33:06.105050Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:33:06.105137Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:712:2601], sessionId# [0:0:0] 2025-03-28T09:33:06.105207Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:33:06.105231Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:06.105252Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:33:06.105282Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:33:06.105339Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-03-28T09:33:06.105396Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-03-28T09:33:06.105566Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:06.105784Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:06.105871Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:06.106297Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:713:2602], sessionId# [0:0:0] 2025-03-28T09:33:06.106351Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:33:06.106379Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:06.106403Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-03-28T09:33:06.106447Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:33:06.106720Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:33:06.106889Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:33:06.106997Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:33:06.107275Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:719:2605], sessionId# [0:0:0] 2025-03-28T09:33:06.107393Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-28T09:33:06.107503Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-03-28T09:33:06.107555Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-03-28T09:33:06.109498Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:06.109581Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:33:06.109625Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:33:06.120698Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:06.120819Z nod ... egularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:33:18.802177Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] Handle TEvDataShard::TEvEraseRowsRequest 2025-03-28T09:33:18.802330Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] Propose tx: txId# 281474976715663, shard# 72075186224037890, keys# 3, dependents# 0, dependencies# 1 2025-03-28T09:33:18.802452Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] Propose tx: txId# 281474976715663, shard# 72075186224037888, keys# 3, dependents# 0, dependencies# 1 2025-03-28T09:33:18.802532Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] Propose tx: txId# 281474976715663, shard# 72075186224037889, keys# 3, dependents# 2, dependencies# 0 2025-03-28T09:33:18.802836Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-28T09:33:18.803075Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037890 2025-03-28T09:33:18.803564Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:18.803675Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037888 2025-03-28T09:33:18.803945Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:33:18.804073Z node 3 :TX_DATASHARD DEBUG: Prepared DistributedErase transaction txId 281474976715663 at tablet 72075186224037889 2025-03-28T09:33:18.815425Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-28T09:33:18.815514Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:18.815623Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-03-28T09:33:18.815686Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-03-28T09:33:18.815739Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037888, status# 1 2025-03-28T09:33:18.815844Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:33:18.815890Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037889, status# 1 2025-03-28T09:33:18.815993Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-28T09:33:18.816017Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-03-28T09:33:18.816060Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037890, status# 1 2025-03-28T09:33:18.816084Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] Register plan: txId# 281474976715663, minStep# 1504, maxStep# 31504 2025-03-28T09:33:18.816189Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2025-03-28T09:33:18.816236Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2025-03-28T09:33:18.829103Z node 3 :TX_DATASHARD INFO: OnDetach: 72075186224037888 2025-03-28T09:33:18.829887Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-28T09:33:18.832438Z node 3 :TX_DATASHARD ERROR: [DistEraser] [3:1103:2847] Reply: txId# 281474976715663, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715663, shard# 72075186224037888 2025-03-28T09:33:18.832671Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2025-03-28T09:33:18.832727Z node 3 :TX_DATASHARD DEBUG: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2025-03-28T09:33:18.833425Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-03-28T09:33:18.833478Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-03-28T09:33:18.833800Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:33:18.833876Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:18.833924Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 1 2025-03-28T09:33:18.833987Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:33:18.834325Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1098:2843], serverId# [3:1099:2844], sessionId# [0:0:0] 2025-03-28T09:33:18.857307Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1115:2858] 2025-03-28T09:33:18.857634Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:18.863065Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:18.864309Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:18.866946Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:18.867033Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:18.867123Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:18.867587Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:18.867913Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:18.867996Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:1130:2858] in generation 2 2025-03-28T09:33:18.880411Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:18.880532Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-03-28T09:33:18.880667Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:18.880991Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:1133:2866] 2025-03-28T09:33:18.881042Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:18.881094Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:18.881147Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:18.881453Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-03-28T09:33:18.881654Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-03-28T09:33:18.882853Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:18.882988Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:18.883081Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:18.883308Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1503 2025-03-28T09:33:18.883355Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:18.883497Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:18.883543Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:18.883594Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 1 2025-03-28T09:33:18.883643Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:18.883732Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-03-28T09:33:18.883778Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715662 2025-03-28T09:33:18.883848Z node 3 :TX_DATASHARD DEBUG: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715662 2025-03-28T09:33:18.884064Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 1500 next step 1503 2025-03-28T09:33:18.884188Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715662 2025-03-28T09:33:18.884286Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 1503 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:33:18.884368Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1503:281474976715662 at 72075186224037889 2025-03-28T09:33:18.884445Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-28T09:33:18.884511Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037889 {TEvReadSet step# 1503 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:33:18.884638Z node 3 :TX_DATASHARD DEBUG: Start TTxProgressResendRS at tablet 72075186224037888 2025-03-28T09:33:18.884667Z node 3 :TX_DATASHARD INFO: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715662 2025-03-28T09:33:18.884696Z node 3 :TX_DATASHARD DEBUG: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715662 2025-03-28T09:33:18.884844Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715662 2025-03-28T09:33:18.885014Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715662 2025-03-28T09:33:18.885068Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 1503 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-03-28T09:33:18.885098Z node 3 :TX_DATASHARD NOTICE: Outdated readset for 1503:281474976715662 at 72075186224037890 2025-03-28T09:33:18.885130Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-28T09:33:18.885181Z node 3 :TX_DATASHARD DEBUG: Send RS Ack at 72075186224037890 {TEvReadSet step# 1503 txid# 281474976715662 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2025-03-28T09:33:18.885281Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: 2025-03-28T09:33:10.018437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:10.018913Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:10.018967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001628/r3tmp/tmpMLi2QS/pdisk_1.dat 2025-03-28T09:33:10.395026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:10.438338Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:10.482955Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:10.483044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:10.494183Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:10.573811Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:10.608447Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:33:10.608686Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:10.643229Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:10.643316Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:10.644772Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:10.644843Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:10.644912Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:10.645255Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:10.645374Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:10.645444Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:33:10.656195Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:10.682284Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:10.682432Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:10.682513Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:33:10.682539Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:10.682571Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:10.682610Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:10.683013Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:10.683111Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:10.683503Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:10.683553Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:10.683589Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:10.683625Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:10.683743Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:10.683873Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:10.684120Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:10.684192Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:10.685462Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:10.696054Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:10.696164Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:10.843916Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:10.847779Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:10.847850Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:10.848484Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:10.848525Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:10.848568Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:10.848766Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:10.848885Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:10.849519Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:10.849580Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:10.855367Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:10.855836Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:10.856927Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:10.856963Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:10.857566Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:10.857619Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:10.858459Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:10.858499Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:10.858555Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:10.858617Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:10.858661Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:10.858769Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:10.861438Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:10.863243Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:10.863300Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:10.863499Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:10.870335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:10.870410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:10.870461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:10.873976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:10.878483Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.025548Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.028749Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:11.101127Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:11.382770Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1j9tm175eh9h9qc6xzd03, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjE4NTA5OTAtNjQ3ZDRhNGMtOTVjOWY2MzAtZGQxNDI4MjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:11.387028Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:11.387220Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:11.399168Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... 86224037888 2025-03-28T09:33:19.063516Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:19.063557Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:19.063608Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:19.063950Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:19.064066Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:19.064249Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:19.064340Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:19.065701Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:19.076540Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:19.076671Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:19.228390Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:19.228827Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:19.228894Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:19.229850Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:19.229907Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:19.229960Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:19.230216Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:19.230399Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:19.231056Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:19.231137Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:19.231634Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:19.232081Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:19.233830Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:19.233892Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:19.235107Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:19.235193Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:19.236274Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:19.236326Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:19.236381Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:19.236449Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:19.236513Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:19.236619Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:19.237970Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:19.240441Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:19.240515Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:19.240739Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:19.249178Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:19.249271Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:19.249328Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:19.253464Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:19.258474Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:19.417554Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:19.421234Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:19.457297Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:19.554783Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jj0f3tdvz3rfw3gy85aq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OTgxN2M3MjYtNDY3ZWJlMjQtNmU1ZjRlZjYtOWViOTA5MzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:19.555350Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:852:2688], serverId# [3:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:19.555629Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:19.568643Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:19.568794Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:19.572822Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:860:2695], serverId# [3:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:19.573891Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:33:19.587453Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:33:19.587537Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:19.587785Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:19.587831Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-28T09:33:19.588123Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:19.588185Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:19.588253Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:19.588331Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:19.588485Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:860:2695], serverId# [3:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:19.589562Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:19.589914Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:19.590107Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:19.590148Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:19.590196Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:33:19.590443Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:19.590508Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:19.591206Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-28T09:33:19.591485Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:19.591628Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-28T09:33:19.591671Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-28T09:33:19.642491Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:19.642566Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-28T09:33:19.643269Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:19.643314Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:19.643355Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-28T09:33:19.643478Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:19.643546Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:19.643587Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> YdbProxy::MakeDirectory >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] >> TopicAutoscaling::ReadFromTimestamp_AutoscaleAwareSDK [GOOD] >> TopicAutoscaling::ReadFromTimestamp_PQv1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2025-03-28T09:32:54.913718Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791029158160235:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:54.913789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ede/r3tmp/tmpdzzXO9/pdisk_1.dat 2025-03-28T09:32:55.296979Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:55.324978Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:55.325075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:55.327592Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2883 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:55.551518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:55.578572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:55.714046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:55.787577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:32:58.067943Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791049042090626:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:58.068028Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ede/r3tmp/tmpfAdmRq/pdisk_1.dat 2025-03-28T09:32:58.230501Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:58.235697Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:58.235772Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:58.241636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25493 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:58.424443Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:58.447511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.520886Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:58.601744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:01.138223Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791059850589600:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:01.138360Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ede/r3tmp/tmpOYtd7k/pdisk_1.dat 2025-03-28T09:33:01.238078Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:01.275762Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:01.275835Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:01.277096Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30037 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:01.458027Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:01.478507Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:01.547281Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:01.586021Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:04.364170Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791071438182734:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:04.364288Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ede/r3tmp/tmpOeeEVa/pdisk_1.dat 2025-03-28T09:33:04.454218Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:04.495283Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:04.495371Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:04.497317Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5612 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:04.687033Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:33:04.706530Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:33:04.783954Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:04.857409Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:08.003262Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486791089358194855:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:08.003358Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ede/r3tmp/tmpYL5FB0/pdisk_1.dat 2025-03-28T09:33:08.112868Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:08.140035Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:08.140127Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:08.141446Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23096 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:08.338187Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:08.343287Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:08.358989Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:08.418016Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:08.459467Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:11.698396Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486791101532342698:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:11.698484Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ede/r3tmp/tmp66Y7V0/pdisk_1.dat 2025-03-28T09:33:11.832962Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:11.863156Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:11.863248Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:11.864718Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17583 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:12.101550Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:12.118402Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:12.172006Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:12.224456Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:15.681361Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486791121844210884:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:15.681463Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ede/r3tmp/tmpNqf4Xb/pdisk_1.dat 2025-03-28T09:33:15.847072Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:15.853688Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:15.853800Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:15.855517Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2312 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:16.083981Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:16.091201Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:16.102595Z node 7 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2025-03-28T09:33:16.107035Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:16.180452Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:16.254817Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: 2025-03-28T09:33:10.597610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:10.598053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:10.598118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00162a/r3tmp/tmpfdZvI0/pdisk_1.dat 2025-03-28T09:33:10.980817Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:11.015115Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:11.053724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:11.053859Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:11.065109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:11.145362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:11.188588Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:33:11.188899Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:11.236615Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:11.236707Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:11.238269Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:11.238373Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:11.238436Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:11.238816Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:11.238944Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:11.239032Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:33:11.249744Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:11.265904Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:11.266066Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:11.266177Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:33:11.266211Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:11.266238Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:11.266270Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.266656Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:11.266772Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:11.267125Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.267167Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:11.267200Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:11.267257Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.267338Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:11.267436Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:11.267626Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:11.267700Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:11.268969Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.279655Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:11.279777Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:11.428400Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:11.433311Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:11.433394Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.434172Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.434220Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:11.434280Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:11.434536Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:11.434709Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:11.435457Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.435523Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:11.437683Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:11.438110Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:11.439599Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:11.439654Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.440353Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:11.440437Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.441353Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.441392Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:11.441452Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:11.441519Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:11.441574Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:11.441658Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.445345Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.447708Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:11.447768Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:11.448019Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:11.456917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.457045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.457115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.462310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:11.468002Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.618242Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.621522Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:11.699623Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:12.041371Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jacyae410nmx7gtw8mtj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2ZiYjA5NjEtODA0YTY0OC01NDk2NTgyZi05M2YwNWMwOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:12.057305Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:12.057551Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:12.069985Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... 86224037888 2025-03-28T09:33:20.232108Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:20.232153Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:20.232202Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:20.232611Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:20.232742Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:20.232962Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:20.233037Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:20.235060Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:20.246471Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:20.246595Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:20.407661Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:20.408280Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:20.408358Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:20.409470Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:20.409532Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:20.409582Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:20.409896Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:20.410083Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:20.411025Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:20.411123Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:20.411731Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:20.412347Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:20.414118Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:20.414181Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:20.416086Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:20.416204Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:20.417512Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:20.417613Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:20.417700Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:20.417790Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:20.417858Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:20.417976Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:20.419692Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:20.421879Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:20.421949Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:20.422140Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:20.432008Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:20.432131Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:20.432221Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:20.438608Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:20.446991Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:20.605525Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:20.609243Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:20.646182Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:20.808488Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jk5d9wgwq92ypkv944mv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OWYzODc5YWEtMzJkNDA0ZGEtNTUzMzNiYzAtOGYwMzAzOTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:20.809142Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:852:2688], serverId# [3:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:20.809376Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:20.822586Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:20.822770Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:20.827303Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:860:2695], serverId# [3:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:20.829410Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:33:20.840766Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:33:20.840836Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:20.841068Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:20.841104Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-28T09:33:20.841327Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:20.841378Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:20.841447Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:20.841517Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:20.841665Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:860:2695], serverId# [3:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:20.842482Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:20.842796Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:20.843018Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:20.843055Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:20.843097Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:33:20.843359Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:20.843413Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:20.843986Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-28T09:33:20.844236Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:20.844335Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-28T09:33:20.844368Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-28T09:33:20.898190Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:20.898297Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-28T09:33:20.898801Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:20.898847Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:20.898888Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-28T09:33:20.899012Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:20.899068Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:20.899116Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: 2025-03-28T09:33:10.415711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:10.416200Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:10.416263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00161c/r3tmp/tmpiAlWhz/pdisk_1.dat 2025-03-28T09:33:10.782720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:10.816681Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:10.854038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:10.854157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:10.865496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:10.945934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:10.982866Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:33:10.983230Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:11.022120Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:11.022235Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:11.023962Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:11.024075Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:11.024134Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:11.024486Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:11.024604Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:11.024674Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:33:11.035466Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:11.059742Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:11.059931Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:11.060051Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:33:11.060082Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:11.060113Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:11.060155Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.060680Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:11.060793Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:11.061233Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.061284Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:11.061318Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:11.061379Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.061472Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:11.061593Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:11.061803Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:11.061887Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:11.063454Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.074138Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:11.074262Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:11.222642Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:11.227578Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:11.227651Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.228487Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.228535Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:11.228580Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:11.228825Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:11.228971Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:11.229752Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:11.229816Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:11.231813Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:11.232245Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:11.233745Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:11.233799Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.234551Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:11.234632Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.235707Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:11.235747Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:11.235806Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:11.235876Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:11.235935Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:11.236049Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:11.239635Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.242001Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:11.242063Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:11.242308Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:11.250984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.251109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.251184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:11.256478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:11.262612Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.410007Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:11.413351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:11.475214Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:11.872707Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1ja6gfr653qtr6et6ewv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjdjZWY3MGQtNGRhNWQ3YjktYTY2N2E4ZjItYzAzZTYzMDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:11.878297Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:11.878513Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:11.891728Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... 86224037888 2025-03-28T09:33:20.560371Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:20.560418Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:20.560465Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:20.560887Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:20.561007Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:20.561220Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:20.561303Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:20.563680Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:20.578954Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:20.579079Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:20.738186Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:20.738749Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:20.738817Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:20.739751Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:20.739816Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:20.739870Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:20.740166Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:20.740328Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:20.741012Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:20.741095Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:20.741603Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:20.742094Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:20.744243Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:20.744306Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:20.745158Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:20.745242Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:20.746272Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:20.746331Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:20.746388Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:20.746459Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:20.746520Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:20.746624Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:20.755650Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:20.757892Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:20.757967Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:20.758144Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:20.766079Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:20.766179Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:20.766252Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:20.770672Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:20.776340Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:20.925137Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:20.927495Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:20.961995Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:21.081947Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jkfw5xfcw5ys94r9e43d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzZhNjY4ZWYtNzE5MzA4YmYtY2RjMzBjYzItMjJlYzlhODQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:21.082596Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:852:2688], serverId# [3:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:21.082827Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:21.095188Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:21.095333Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:21.099509Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:860:2695], serverId# [3:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:21.100756Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:33:21.112187Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:33:21.112289Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:21.112584Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:21.112631Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-28T09:33:21.112869Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:21.112926Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:21.112978Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:21.113041Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:21.113204Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:860:2695], serverId# [3:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:21.114220Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:21.114601Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:21.114846Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:21.114897Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:21.114946Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:33:21.115209Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:21.115273Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:21.115894Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-28T09:33:21.116200Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:21.116319Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-28T09:33:21.116383Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-28T09:33:21.166866Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:21.166935Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-28T09:33:21.167367Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:21.167404Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:21.167442Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-28T09:33:21.167589Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:21.167654Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:21.167701Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> YdbProxy::AlterTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: 2025-03-28T09:33:12.079253Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:12.079617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:12.079672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00160b/r3tmp/tmprNdWrG/pdisk_1.dat 2025-03-28T09:33:12.398165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:12.433856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:12.472519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:12.472639Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:12.483846Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:12.564519Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:12.614838Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-03-28T09:33:12.615022Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:12.649363Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:12.649515Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:12.650898Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:12.650960Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:12.651008Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:12.651284Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:12.651584Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:12.651627Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:717:2586] in generation 1 2025-03-28T09:33:12.652837Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2588] 2025-03-28T09:33:12.653015Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:12.661189Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:12.661351Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:12.662239Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:33:12.662281Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:33:12.662305Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:33:12.662477Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:12.662771Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2590] 2025-03-28T09:33:12.662896Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:12.668482Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:12.668528Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:735:2588] in generation 1 2025-03-28T09:33:12.669431Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:12.669487Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:12.670351Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-28T09:33:12.670389Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-28T09:33:12.670414Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-28T09:33:12.670587Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:12.670650Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:12.670703Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:740:2590] in generation 1 2025-03-28T09:33:12.681462Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:12.703506Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:12.703658Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:12.703737Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:744:2618] 2025-03-28T09:33:12.703762Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:12.703785Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:12.703809Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:12.704059Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:12.704088Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:33:12.704118Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:12.704191Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:745:2619] 2025-03-28T09:33:12.704218Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:33:12.704252Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:33:12.704265Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:33:12.704516Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:12.704542Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-28T09:33:12.704576Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:12.704604Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:746:2620] 2025-03-28T09:33:12.704617Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-28T09:33:12.704628Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-28T09:33:12.704643Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:33:12.704743Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:12.704810Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:12.705224Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:12.705268Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:12.705306Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:12.705340Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:12.705373Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:33:12.705445Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:33:12.705508Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:712:2601], sessionId# [0:0:0] 2025-03-28T09:33:12.705541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:33:12.705555Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:12.705567Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:33:12.705581Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:33:12.705627Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-03-28T09:33:12.705677Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-03-28T09:33:12.705807Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:12.705998Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:12.706088Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:12.706479Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:713:2602], sessionId# [0:0:0] 2025-03-28T09:33:12.706512Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:33:12.706532Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:12.706548Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-03-28T09:33:12.706591Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:33:12.706794Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:33:12.706891Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:33:12.706927Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:33:12.707103Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:719:2605], sessionId# [0:0:0] 2025-03-28T09:33:12.707169Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-28T09:33:12.707274Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-03-28T09:33:12.707308Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-03-28T09:33:12.708593Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:12.708646Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:33:12.708672Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:33:12.719611Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:12.719738Z nod ... 5887Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:20.965922Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:20.965957Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:20.966296Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:20.966402Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:20.966550Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:20.966612Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:20.967926Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:20.978655Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:20.978773Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:21.127614Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:21.128026Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:21.128081Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:21.128927Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:21.128996Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:21.129044Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:21.129275Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:21.129420Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:21.129953Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:21.130031Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:21.130486Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:21.130895Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:21.132385Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:21.132435Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:21.133083Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:21.133151Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:21.133966Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:21.134007Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:21.134052Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:21.134121Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:21.134175Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:21.134264Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:21.135433Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:21.137581Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:21.137650Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:21.137826Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:21.145350Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:21.145451Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:21.145536Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:21.150652Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:21.156468Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:21.307666Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:21.334518Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:21.368317Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:21.434864Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jkvqaa8kdk4d88vzhy0b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZmJiODMzYTAtM2U5Nzg5M2ItM2E0ZjUzNzMtMzUwMWI2YWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:21.435402Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:852:2688], serverId# [3:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:21.435599Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:21.447691Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:21.447840Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:21.621005Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1jm5n6e2ccbf6zccggk1m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjU1MGU5YTgtMmQ5ODg2NS0xOTUwYWFiZS03YTgwZjJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:21.624112Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] { items { uint64_value: 0 } } 2025-03-28T09:33:21.631470Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:891:2719], serverId# [3:892:2720], sessionId# [0:0:0] 2025-03-28T09:33:21.632450Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:33:21.644803Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:33:21.644886Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:21.644948Z node 3 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-03-28T09:33:21.645560Z node 3 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-03-28T09:33:21.645619Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:21.645812Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:21.645855Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037888 2025-03-28T09:33:21.646074Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:21.646120Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:21.646166Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:21.646214Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:21.646343Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:891:2719], serverId# [3:892:2720], sessionId# [0:0:0] 2025-03-28T09:33:21.715973Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1jmbe7n2mwy5p0srd27zj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjU1MGU5YTgtMmQ5ODg2NS0xOTUwYWFiZS03YTgwZjJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:21.716522Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:21.728115Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:21.728233Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:21.737695Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=MjU1MGU5YTgtMmQ5ODg2NS0xOTUwYWFiZS03YTgwZjJj, ActorId: [3:859:2694], ActorState: ExecuteState, TraceId: 01jqe1jmbe7n2mwy5p0srd27zj, Create QueryResponse for error on request, msg: 2025-03-28T09:33:21.738700Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1jmbe7n2mwy5p0srd27zj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MjU1MGU5YTgtMmQ5ODg2NS0xOTUwYWFiZS03YTgwZjJj, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:21.739076Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:21.739501Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:21.739555Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream >> ColumnShardTiers::DSConfigs [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2025-03-28T09:33:19.352022Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791136143480274:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:19.352158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e13/r3tmp/tmpUlux3C/pdisk_1.dat 2025-03-28T09:33:19.776051Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:19.798949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:19.799069Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:19.801939Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1321 TServer::EnableGrpc on GrpcPort 21159, node 1 2025-03-28T09:33:20.083255Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:20.083280Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:20.083290Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:20.083394Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1321 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:20.603641Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:20.626119Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:22.205470Z node 1 :TX_PROXY ERROR: Actor# [1:7486791149028382799:2305] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-28T09:33:22.218092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:33:22.338182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:33:22.356163Z node 1 :TX_PROXY ERROR: Actor# [1:7486791149028382915:2384] txid# 281474976710661, issues: { message: "Can\'t drop unknown column: \'extra\'" severity: 1 } >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists >> TMiniKQLEngineFlatTest::TestPureProgram >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> YdbProxy::MakeDirectory [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> YdbProxy::OAuthToken >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards >> TMiniKQLProtoTestYdb::TestExportListYdb >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLProtoTestYdb::TestExportTupleYdb >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: 2025-03-28T09:31:25.634746Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:25.635246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:25.635319Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001c4e/r3tmp/tmp21FepO/pdisk_1.dat TServer::EnableGrpc on GrpcPort 13584, node 1 TClient is connected to server localhost:7600 2025-03-28T09:31:26.200704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:31:26.237649Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:26.245689Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:26.245748Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:26.245782Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:26.246044Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:26.281915Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:26.282052Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:26.293985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-03-28T09:31:38.261200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:38.261377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:38.264848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-03-28T09:31:38.427058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:868:2707], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:38.427143Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:38.427351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:873:2712], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:38.430629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-28T09:31:38.554669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:875:2714], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:31:38.819401Z node 1 :TX_PROXY ERROR: Actor# [1:971:2781] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:39.425828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:39.919000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-03-28T09:31:40.620235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.393394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.894004Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.948286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:31:43.252109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-28T09:31:58.390255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715708:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 2025-03-28T09:31:59.948600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:31:59.948667Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-28T09:32:00.254124Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-03-28T09:32:00.254206Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-03-28T09:32:00.254270Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2025-03-28T09:32:00.254347Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-03-28T09:32:00.254593Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:154;event=watch_scheme_objects;names=/Root/tier1; 2025-03-28T09:32:00.254739Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:62;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-03-28T09:32:00.254786Z node 1 :TX_TIERING INFO: fline=manager.cpp:271;event=update_secrets;tablet=0; 2025-03-28T09:32:00.254839Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-03-28T09:32:00.254904Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:32:00.256648Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:111;component=TSchemeObjectWatcher;event=NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult;path=Root/tier1; 2025-03-28T09:32:00.257882Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:140;event=object_fetched;path=/Root/tier1; 2025-03-28T09:32:00.258036Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:75;component=tiering_manager;event=object_updated;path=/Root/tier1; 2025-03-28T09:32:00.258159Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-03-28T09:32:00.258240Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-28T09:32:00.258301Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-28T09:32:11.589173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715723:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION=" ... :Tier '/Root/tier2' stopped at tablet 0 2025-03-28T09:32:59.977151Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-03-28T09:32:59.977189Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-03-28T09:33:10.998899Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:10.999029Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:10.999059Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:10.999102Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:10.999141Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:10.999488Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:10.999537Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037892;has_config=0; 2025-03-28T09:33:10.999595Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 2025-03-28T09:33:10.999649Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037892 2025-03-28T09:33:10.999702Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-03-28T09:33:10.999754Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037892 2025-03-28T09:33:10.999813Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:10.999854Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:10.999875Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037893;has_config=0; 2025-03-28T09:33:10.999898Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 2025-03-28T09:33:10.999920Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037893 2025-03-28T09:33:10.999936Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-03-28T09:33:10.999959Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037893 2025-03-28T09:33:11.000009Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:11.000039Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:11.000065Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2025-03-28T09:33:11.000095Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037894 2025-03-28T09:33:11.000126Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2025-03-28T09:33:11.000150Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-03-28T09:33:11.000175Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037894 2025-03-28T09:33:11.000204Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:11.000275Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:11.000303Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-03-28T09:33:11.000332Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-28T09:33:11.000362Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-03-28T09:33:11.000381Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:11.000404Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-28T09:33:11.000428Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:11.000906Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:11.000934Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-03-28T09:33:11.000957Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-28T09:33:11.000980Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-03-28T09:33:11.001000Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:11.001026Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-28T09:33:11.001053Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:11.001520Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3099:4405];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-28T09:33:11.001631Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3105:4408];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-28T09:33:11.001757Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3114:4414];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-03-28T09:33:22.062847Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:22.062990Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:22.063030Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:22.063089Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:22.063135Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:22.063292Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:22.063349Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-28T09:33:22.063406Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:22.063491Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:22.063752Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:22.064313Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:22.064352Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-28T09:33:22.064386Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:22.064434Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:22.064520Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:22.064555Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-03-28T09:33:22.064592Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-03-28T09:33:22.064644Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:22.064677Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:22.064704Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-03-28T09:33:22.064733Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-03-28T09:33:22.064776Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:22.064935Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:22.064967Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-03-28T09:33:22.064996Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-03-28T09:33:22.065036Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:22.065712Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:22.065750Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-28T09:33:22.065782Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:22.065827Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:22.066208Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3099:4405];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-28T09:33:22.066299Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3105:4408];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-28T09:33:22.066353Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3114:4414];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: 2025-03-28T09:33:12.492561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:12.492983Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:12.493035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001609/r3tmp/tmpW1aRhP/pdisk_1.dat 2025-03-28T09:33:12.794205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:12.834543Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:12.874426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:12.874573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:12.886214Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:12.974398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:13.017333Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:33:13.017650Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:13.050976Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:13.051087Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:13.052531Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:13.052600Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:13.052647Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:13.052940Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:13.053037Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:13.053107Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:33:13.063806Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:13.103571Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:13.103778Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:13.103933Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:33:13.103993Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:13.104036Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:13.104097Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:13.104562Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:13.104684Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:13.105153Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:13.105209Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:13.105247Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:13.105291Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:13.105409Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:13.105556Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:13.105817Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:13.105918Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:13.107680Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:13.118409Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:13.118563Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:13.269458Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:13.274585Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:13.274671Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:13.275592Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:13.275673Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:13.275744Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:13.276018Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:13.276221Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:13.277136Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:13.277220Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:13.279362Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:13.279841Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:13.281424Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:13.281476Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:13.282399Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:13.282487Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:13.283651Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:13.283701Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:13.283763Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:13.283841Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:13.283906Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:13.284031Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:13.288013Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:13.290814Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:13.290883Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:13.291145Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:13.301088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:13.301204Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:13.301302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:13.307165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:13.313942Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:13.467083Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:13.470806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:13.546258Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:13.934098Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jc6j4waxafsxv1wcnya0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDNlMGJmMDEtOTU4ZDU0MzktODY0OTQzYTgtODdlMGEwODI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:13.940521Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:13.940789Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:13.953790Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... 86224037888 2025-03-28T09:33:22.846867Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:22.846913Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:22.846965Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:22.847391Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-28T09:33:22.847519Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:22.847726Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:22.847823Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:22.849575Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:22.860402Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:22.860532Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:33:23.010870Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-28T09:33:23.011299Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:33:23.011359Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:23.012290Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:23.012348Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:33:23.012398Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:33:23.012659Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:33:23.012810Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:23.013424Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:23.013494Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:33:23.013941Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:33:23.014379Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:23.015903Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:33:23.015971Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:23.016706Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:33:23.016786Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:23.017665Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:23.017715Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:23.017763Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:33:23.017830Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:33:23.017883Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:33:23.017998Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:23.019266Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:23.021638Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:33:23.021707Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:33:23.021899Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:33:23.030485Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:23.030599Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:23.030677Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:23.036534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:23.042944Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:23.193426Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:23.196094Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:23.231125Z node 3 :TX_PROXY ERROR: Actor# [3:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:23.333288Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1jnpm5zfsqg87mxjpad38, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ODFkODlkZDUtY2UzM2M5MTctNjJkYzQyNzAtZmI4Y2MxNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:23.333917Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:852:2688], serverId# [3:853:2689], sessionId# [0:0:0] 2025-03-28T09:33:23.334199Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:23.347245Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:23.347388Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:23.353902Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:860:2695], serverId# [3:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:23.355007Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2025-03-28T09:33:23.366400Z node 3 :TX_DATASHARD INFO: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2025-03-28T09:33:23.366485Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:23.366799Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:23.366848Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 3, at: 72075186224037888 2025-03-28T09:33:23.367114Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:23.367165Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:23.367219Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:23.367283Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:23.367451Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037888, clientId# [3:860:2695], serverId# [3:861:2696], sessionId# [0:0:0] 2025-03-28T09:33:23.368481Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:23.368839Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:23.369023Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:23.369068Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:23.369116Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:33:23.369337Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:23.369398Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:23.370072Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2025-03-28T09:33:23.370299Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:23.370420Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2025-03-28T09:33:23.370465Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2025-03-28T09:33:23.418845Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:23.418921Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037888 2025-03-28T09:33:23.419353Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:23.419391Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:23.419429Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2025-03-28T09:33:23.419554Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:23.419619Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:23.419665Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName >> TopicAutoscaling::PartitionSplit_PreferedPartition_PQv1 [GOOD] >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType >> YdbProxy::DescribeConsumer [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> YdbProxy::DescribeTopic [GOOD] >> TPersQueueMirrorer::TestBasicRemote [GOOD] >> TPersQueueMirrorer::ValidStartStream >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] >> YdbProxy::StaticCreds [GOOD] >> YdbProxy::DescribeTable [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> YdbProxy::DropTopic [GOOD] >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> YdbProxy::CreateCdcStream [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.9%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2025-03-28T09:33:19.269748Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791139603136671:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:19.269878Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbb/r3tmp/tmpRvnjwO/pdisk_1.dat 2025-03-28T09:33:19.741704Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:19.764877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:19.765004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:19.766879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5383 TServer::EnableGrpc on GrpcPort 17457, node 1 2025-03-28T09:33:20.083394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:20.083429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:20.083447Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:20.083573Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5383 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:20.638264Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:20.699666Z node 1 :TX_PROXY ERROR: Actor# [1:7486791143898104589:2297] txid# 281474976710658, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } 2025-03-28T09:33:22.708421Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791150296264872:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:22.708545Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbb/r3tmp/tmpVlDcti/pdisk_1.dat 2025-03-28T09:33:22.801893Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:22.837631Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:22.837727Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:22.839389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11233 TServer::EnableGrpc on GrpcPort 4406, node 2 2025-03-28T09:33:23.008016Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:23.008049Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:23.008057Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:23.008182Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11233 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:23.253438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.9%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2025-03-28T09:33:19.269673Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791135869097214:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:19.269765Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dfd/r3tmp/tmpV1b8TO/pdisk_1.dat 2025-03-28T09:33:19.699593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:19.700339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:19.707489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:19.722598Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22768 TServer::EnableGrpc on GrpcPort 61837, node 1 2025-03-28T09:33:20.087288Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:20.087314Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:20.087322Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:20.087465Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22768 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:20.609955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:22.418717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:33:22.545817Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2025-03-28T09:33:22.547452Z node 1 :TX_PROXY ERROR: Actor# [1:7486791148753999897:2401] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-28T09:33:23.038388Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791155973234396:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:23.038473Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dfd/r3tmp/tmpjCguFG/pdisk_1.dat 2025-03-28T09:33:23.125045Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:23.168371Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:23.168451Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:23.169898Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11243 TServer::EnableGrpc on GrpcPort 22102, node 2 2025-03-28T09:33:23.336591Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:23.336610Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:23.336616Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:23.336726Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11243 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:23.612153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: 2025-03-28T09:33:07.828202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:07.828677Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:07.828743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001644/r3tmp/tmpksy4rF/pdisk_1.dat 2025-03-28T09:33:08.187360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:08.229290Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:08.268985Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:08.269119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:08.280618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:08.364296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:08.418077Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:688:2586] 2025-03-28T09:33:08.418373Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:08.468591Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:08.468780Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:08.470501Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:33:08.470570Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:33:08.470650Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:33:08.471165Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:08.471580Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:08.471647Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:717:2586] in generation 1 2025-03-28T09:33:08.473073Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:690:2588] 2025-03-28T09:33:08.473264Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:08.483959Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:08.484216Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:08.485548Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:33:08.485610Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:33:08.485666Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:33:08.485935Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:08.486344Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:693:2590] 2025-03-28T09:33:08.486543Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:08.495467Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:08.495542Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:735:2588] in generation 1 2025-03-28T09:33:08.496850Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:08.496943Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:33:08.498214Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-28T09:33:08.498271Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-28T09:33:08.498312Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-28T09:33:08.498584Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:33:08.498711Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:33:08.498770Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:740:2590] in generation 1 2025-03-28T09:33:08.509758Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:08.531508Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:33:08.531672Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:08.531761Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:744:2618] 2025-03-28T09:33:08.531790Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:33:08.531832Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:33:08.531862Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:08.532121Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:08.532157Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:33:08.532193Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:08.532252Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:745:2619] 2025-03-28T09:33:08.532268Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:33:08.532285Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:33:08.532300Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:33:08.532547Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:33:08.532566Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-28T09:33:08.532595Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:33:08.532670Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:746:2620] 2025-03-28T09:33:08.532689Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-28T09:33:08.532709Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-28T09:33:08.532727Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:33:08.532833Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:33:08.532894Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:33:08.533350Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:08.533393Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:08.533424Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:33:08.533454Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:08.533489Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:33:08.533537Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:33:08.533604Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:678:2581], serverId# [1:712:2601], sessionId# [0:0:0] 2025-03-28T09:33:08.533653Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:33:08.533677Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:08.533691Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:33:08.533709Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:33:08.533753Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037890 2025-03-28T09:33:08.533787Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037890 2025-03-28T09:33:08.533905Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:08.534121Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:33:08.534206Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:33:08.534538Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:679:2582], serverId# [1:713:2602], sessionId# [0:0:0] 2025-03-28T09:33:08.534567Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:33:08.534584Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:08.534602Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037890 TxInFly 0 2025-03-28T09:33:08.534660Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:33:08.534864Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:33:08.534963Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:33:08.534999Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:33:08.535233Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:680:2583], serverId# [1:719:2605], sessionId# [0:0:0] 2025-03-28T09:33:08.535327Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-28T09:33:08.535399Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037890 txId 281474976715657 ssId 72057594046644480 seqNo 2:3 2025-03-28T09:33:08.535444Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037890 2025-03-28T09:33:08.536805Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:33:08.536870Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:33:08.536899Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:33:08.548436Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:08.548543Z nod ... [2000:281474976715663] at 72075186224037888 for LoadAndWaitInRS 2025-03-28T09:33:25.288390Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:25.288670Z node 3 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715663 2025-03-28T09:33:25.288730Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 19 Seqno# 6 Flags# 0} 2025-03-28T09:33:25.288788Z node 3 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-28T09:33:25.289107Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:33:25.289155Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:33:25.289187Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [2000:281474976715663] at 72075186224037890 for LoadAndWaitInRS 2025-03-28T09:33:25.289434Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:25.300701Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:25.300795Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [3:1103:2847], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:33:25.300879Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 5} 2025-03-28T09:33:25.300939Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:25.301046Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2025-03-28T09:33:25.301141Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:33:25.301206Z node 3 :TX_DATASHARD DEBUG: Complete [2000 : 281474976715663] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1103:2847], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:33:25.301253Z node 3 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 2000 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2025-03-28T09:33:25.301279Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:33:25.301348Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037888, status# 2 2025-03-28T09:33:25.301400Z node 3 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715663 2025-03-28T09:33:25.301432Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715663, shard# 72075186224037890, status# 2 2025-03-28T09:33:25.301472Z node 3 :TX_DATASHARD DEBUG: [DistEraser] [3:1103:2847] Reply: txId# 281474976715663, status# OK, error# 2025-03-28T09:33:25.301752Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-03-28T09:33:25.301793Z node 3 :TX_DATASHARD DEBUG: Conditional erase complete: cookie: 4, at: 72075186224037889 2025-03-28T09:33:25.302091Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:33:25.302126Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:25.302158Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:33:25.302225Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:33:25.302326Z node 3 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1098:2843], serverId# [3:1099:2844], sessionId# [0:0:0] 2025-03-28T09:33:25.303618Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:33:25.303986Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:33:25.304162Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:33:25.304205Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:25.304251Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037889 for WaitForStreamClearance 2025-03-28T09:33:25.304524Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:25.304586Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:33:25.305238Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715666, MessageQuota: 1 2025-03-28T09:33:25.305479Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037889, TxId: 281474976715666, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:25.305592Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715666, PendingAcks: 0 2025-03-28T09:33:25.305633Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037889, TxId: 281474976715666, MessageQuota: 0 2025-03-28T09:33:25.307226Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037889 2025-03-28T09:33:25.307276Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037889 2025-03-28T09:33:25.307724Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:33:25.307758Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:25.307796Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715666] at 72075186224037889 for ReadTableScan 2025-03-28T09:33:25.307905Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:25.307988Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:33:25.308035Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:33:25.332124Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:33:25.332508Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:33:25.332710Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:25.332757Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:25.332806Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037888 for WaitForStreamClearance 2025-03-28T09:33:25.333045Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:25.333113Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:25.333736Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715667, MessageQuota: 1 2025-03-28T09:33:25.334001Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037888, TxId: 281474976715667, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:25.334145Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715667, PendingAcks: 0 2025-03-28T09:33:25.334193Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037888, TxId: 281474976715667, MessageQuota: 0 2025-03-28T09:33:25.335738Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037888 2025-03-28T09:33:25.335790Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715667, at: 72075186224037888 2025-03-28T09:33:25.336209Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:33:25.336247Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:25.336283Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715667] at 72075186224037888 for ReadTableScan 2025-03-28T09:33:25.336421Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:25.336483Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:33:25.336527Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:33:25.367914Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-28T09:33:25.368285Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-03-28T09:33:25.368465Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:33:25.368509Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:25.368551Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037890 for WaitForStreamClearance 2025-03-28T09:33:25.368745Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:25.368811Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:33:25.369352Z node 3 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2025-03-28T09:33:25.369551Z node 3 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:33:25.369676Z node 3 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2025-03-28T09:33:25.369726Z node 3 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2025-03-28T09:33:25.370961Z node 3 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-28T09:33:25.371001Z node 3 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715668, at: 72075186224037890 2025-03-28T09:33:25.371335Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:33:25.371395Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2025-03-28T09:33:25.371431Z node 3 :TX_DATASHARD DEBUG: Found ready candidate operation [0:281474976715668] at 72075186224037890 for ReadTableScan 2025-03-28T09:33:25.371530Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:33:25.371572Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:33:25.371605Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2025-03-28T09:33:19.273805Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791135777471929:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:19.282814Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e01/r3tmp/tmpMfoXCX/pdisk_1.dat 2025-03-28T09:33:19.736160Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:19.736269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:19.745752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:19.758051Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:22100 TServer::EnableGrpc on GrpcPort 8732, node 1 2025-03-28T09:33:20.081229Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:20.081251Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:20.081259Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:20.081368Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:20.620330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:20.695404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:33:20.722679Z node 1 :TX_PROXY ERROR: Actor# [1:7486791140072439887:2328] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2025-03-28T09:33:22.622586Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791150137210764:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:22.622665Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e01/r3tmp/tmpsV3bZu/pdisk_1.dat 2025-03-28T09:33:22.706493Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:22.753264Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:22.753392Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:22.754450Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3977 TServer::EnableGrpc on GrpcPort 11285, node 2 2025-03-28T09:33:22.916885Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:22.916909Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:22.916915Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:22.917035Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3977 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:23.146253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:23.164238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480 TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743154403193 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743154403193 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "user1" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 ... (TRUNCATED) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: 2025-03-28T09:31:27.944973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:27.945384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:27.945436Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001c18/r3tmp/tmpd6o5un/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6102, node 1 TClient is connected to server localhost:21995 2025-03-28T09:31:28.501925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:31:28.543906Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:28.547630Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:28.547681Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:28.547713Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:28.547935Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:28.582916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:28.583040Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:28.594425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-03-28T09:31:40.273867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:753:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:40.273978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:763:2638], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:40.274053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:40.279920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:31:40.299331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:767:2641], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:31:40.358868Z node 1 :TX_PROXY ERROR: Actor# [1:818:2673] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:40.661509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-03-28T09:31:41.633306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.079999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-03-28T09:31:42.924185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-28T09:31:43.719366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:44.095625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:31:45.314461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:31:45.644094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-28T09:32:01.572238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715708:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 2025-03-28T09:32:02.113967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:32:02.114048Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-28T09:32:02.459067Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-03-28T09:32:02.459158Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-03-28T09:32:02.459229Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2025-03-28T09:32:02.459349Z node 1 :TX_TIERING INFO: fline=manager.cpp:128;event=start_subscribing_metadata; 2025-03-28T09:32:02.459654Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:154;event=watch_scheme_objects;names=/Root/tier1; 2025-03-28T09:32:02.459919Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:62;event=TEvRefreshSubscriberData;snapshot=secrets; 2025-03-28T09:32:02.459967Z node 1 :TX_TIERING INFO: fline=manager.cpp:271;event=update_secrets;tablet=0; 2025-03-28T09:32:02.460022Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-03-28T09:32:02.460117Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:32:02.461788Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:111;component=TSchemeObjectWatcher;event=NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult;path=Root/tier1; 2025-03-28T09:32:02.490315Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:140;event=object_fetched;path=/Root/tier1; 2025-03-28T09:32:02.490507Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:75;component=tiering_manager;event=object_updated;path=/Root/tier1; 2025-03-28T09:32:02.490665Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=1; 2025-03-28T09:32:02.490816Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-28T09:32:02.490892Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-28T09:32:14.053846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715723:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc2", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-28T09:32:15.246657Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier2;has_secrets=1;tier_config=0; 2025-03-28T09:32:15.246796Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:215;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2025-03-28T09:32:15.246842Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:196;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2025-03-28T09:32:15.246885 ... :Tier '/Root/tier2' stopped at tablet 0 2025-03-28T09:33:02.129457Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier2' started at tablet 0 2025-03-28T09:33:02.129512Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2025-03-28T09:33:13.175876Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:13.176122Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:13.176169Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:13.176204Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:13.176248Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2025-03-28T09:33:13.176917Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:13.176973Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-03-28T09:33:13.177029Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-28T09:33:13.177085Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-03-28T09:33:13.177128Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:13.177196Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-28T09:33:13.177259Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:13.177515Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:13.177552Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037892;has_config=0; 2025-03-28T09:33:13.177585Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 2025-03-28T09:33:13.177617Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037892 2025-03-28T09:33:13.177642Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-03-28T09:33:13.177678Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037892 2025-03-28T09:33:13.177722Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:13.177754Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:13.177778Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037893;has_config=0; 2025-03-28T09:33:13.177805Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 2025-03-28T09:33:13.177832Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037893 2025-03-28T09:33:13.177854Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-03-28T09:33:13.177886Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037893 2025-03-28T09:33:13.177926Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:13.178067Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:13.178094Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=72075186224037894;has_config=0; 2025-03-28T09:33:13.178121Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037894 2025-03-28T09:33:13.178147Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 72075186224037894 2025-03-28T09:33:13.178173Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-03-28T09:33:13.178204Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 72075186224037894 2025-03-28T09:33:13.178237Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:13.178316Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2025-03-28T09:33:13.178341Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2025-03-28T09:33:13.178365Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 2025-03-28T09:33:13.178390Z node 1 :TX_TIERING DEBUG: manager.cpp:142 :Restarting tier '/Root/tier1' at tablet 0 2025-03-28T09:33:13.178412Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:13.178442Z node 1 :TX_TIERING DEBUG: manager.cpp:162 :Tier '/Root/tier1' started at tablet 0 2025-03-28T09:33:13.178473Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:13.179335Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3148:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-28T09:33:13.179424Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3158:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2025-03-28T09:33:13.179488Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3165:4460];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2025-03-28T09:33:24.272820Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:24.272961Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:24.273002Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:24.273119Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:24.273172Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2025-03-28T09:33:24.273231Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 2025-03-28T09:33:24.273332Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:24.273382Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:24.273411Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2025-03-28T09:33:24.273438Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 2025-03-28T09:33:24.273480Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:24.273511Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:24.273552Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:24.273595Z node 1 :TX_TIERING DEBUG: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2025-03-28T09:33:24.273657Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:24.273683Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-28T09:33:24.273710Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:24.273753Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:24.273916Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037892;self_id=[1:3148:4451];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-28T09:33:24.273992Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037893;self_id=[1:3158:4454];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2025-03-28T09:33:24.274026Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:24.274053Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2025-03-28T09:33:24.274082Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037894 2025-03-28T09:33:24.274123Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:24.274159Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:24.274183Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-28T09:33:24.274211Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:24.274247Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:24.274281Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:94;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2025-03-28T09:33:24.274304Z node 1 :TX_TIERING INFO: fline=manager.cpp:279;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2025-03-28T09:33:24.274329Z node 1 :TX_TIERING DEBUG: manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 2025-03-28T09:33:24.274364Z node 1 :TX_TIERING DEBUG: fline=manager.cpp:205;event=configs_updated;configs=TIERS=;SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2025-03-28T09:33:24.274460Z node 1 :TX_TIERING DEBUG: tablet_id=72075186224037894;self_id=[1:3165:4460];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:242;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2025-03-28T09:33:19.291202Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791136869135637:2256];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:19.291565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ddf/r3tmp/tmpYYIt7A/pdisk_1.dat 2025-03-28T09:33:19.717211Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:19.731037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:19.731134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:19.740241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23111 TServer::EnableGrpc on GrpcPort 30638, node 1 2025-03-28T09:33:20.083326Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:20.083343Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:20.083359Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:20.083480Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23111 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:20.605544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:20.623491Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:22.390439Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791149061467018:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:22.390482Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ddf/r3tmp/tmpmpDHbW/pdisk_1.dat 2025-03-28T09:33:22.481550Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:22.522672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:22.522755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:22.524470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14297 TServer::EnableGrpc on GrpcPort 13048, node 2 2025-03-28T09:33:22.673689Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:22.673711Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:22.673717Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:22.673802Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:22.941220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:25.551220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2025-03-28T09:33:19.434362Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791137623980061:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:19.436141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dcd/r3tmp/tmpZPkrnf/pdisk_1.dat 2025-03-28T09:33:19.861069Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:19.879894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:19.880035Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:19.881922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5541 TServer::EnableGrpc on GrpcPort 24648, node 1 2025-03-28T09:33:20.124641Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:20.124664Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:20.124689Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:20.124826Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5541 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:20.604685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:22.802726Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791150369644982:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:22.803309Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dcd/r3tmp/tmpLykL3w/pdisk_1.dat 2025-03-28T09:33:22.903129Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:22.932375Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:22.932492Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:22.933937Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25446 TServer::EnableGrpc on GrpcPort 26994, node 2 2025-03-28T09:33:23.121821Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:23.121848Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:23.121852Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:23.121940Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25446 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:23.396898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:23.529105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:33:23.537723Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-28T09:33:23.538042Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-28T09:33:23.538415Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-28T09:33:23.538469Z node 2 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2025-03-28T09:33:23.561652Z node 2 :TX_PROXY ERROR: Actor# [2:7486791154664613040:2396] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } >> YdbProxy::OAuthToken [GOOD] |93.0%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2025-03-28T09:33:19.795887Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791136766910900:2265];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:19.795975Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001da9/r3tmp/tmpv7p1O2/pdisk_1.dat 2025-03-28T09:33:20.185770Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:20.215870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:20.215951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:20.219718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12754 TServer::EnableGrpc on GrpcPort 11949, node 1 2025-03-28T09:33:20.427173Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:20.427206Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:20.427216Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:20.427354Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12754 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:20.757823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:22.366600Z node 1 :TX_PROXY ERROR: Actor# [1:7486791149651813245:2302] txid# 281474976710658, issues: { message: "Column key has wrong key type Float" severity: 1 } 2025-03-28T09:33:22.377349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:33:22.500668Z node 1 :TX_PROXY ERROR: Actor# [1:7486791149651813333:2362] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:23.154227Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791156659199530:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:23.155122Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001da9/r3tmp/tmptpZgd1/pdisk_1.dat 2025-03-28T09:33:23.237386Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:23.283847Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:23.283918Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:23.285171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5000 TServer::EnableGrpc on GrpcPort 10446, node 2 2025-03-28T09:33:23.487216Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:23.487239Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:23.487245Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:23.487360Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:23.760539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:25.972430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:33:26.099276Z node 2 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][2:7486791169544102267:2345] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-28T09:33:26.146932Z node 2 :TX_PROXY ERROR: Actor# [2:7486791169544102323:2449] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/table/updates\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 3], type: EPathTypeCdcStream, state: EPathStateNoChanges)" severity: 1 } >> Viewer::SimpleFeatureFlags [GOOD] |93.0%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] Test command err: 2025-03-28T09:32:35.909318Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790948698151825:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:35.909410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001575/r3tmp/tmpyTfkgl/pdisk_1.dat TServer::EnableGrpc on GrpcPort 27032, node 1 2025-03-28T09:32:36.223481Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:32:36.223571Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:32:36.236265Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:36.249190Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:36.249231Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:36.249240Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:36.249381Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:36.253510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:36.253733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:36.256161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:36.398813Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:32:36.402706Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:32:36.402751Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:32:36.403540Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:21942, port: 21942 2025-03-28T09:32:36.404250Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:32:36.409396Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:32:36.451727Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****MpzQ (D75D9D42) () has now valid token of ldapuser@ldap 2025-03-28T09:32:39.046695Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790966218668987:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:39.046745Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001575/r3tmp/tmpsxxrmz/pdisk_1.dat 2025-03-28T09:32:39.181504Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:39.195735Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:39.195818Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:39.197582Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3784, node 2 2025-03-28T09:32:39.234284Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:39.234308Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:39.234315Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:39.234437Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:39.370815Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:32:39.373969Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:32:39.373998Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:32:39.374762Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:1174, port: 1174 2025-03-28T09:32:39.374836Z node 2 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:32:39.391156Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:32:39.435369Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:32:39.436014Z node 2 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:1174 return no entries 2025-03-28T09:32:39.437727Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****-86g (F02B20E6) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:1174 return no entries)' 2025-03-28T09:32:42.343091Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486790979637697108:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:42.343184Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001575/r3tmp/tmpowUTW0/pdisk_1.dat 2025-03-28T09:32:42.457243Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3717, node 3 2025-03-28T09:32:42.493099Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:42.493526Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:42.498204Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:42.514246Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:42.514282Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:42.514288Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:42.514403Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:42.868553Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:32:42.872444Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:32:42.872483Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:32:42.873212Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:13888, port: 13888 2025-03-28T09:32:42.873281Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:32:42.884511Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:32:42.927166Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:32:42.972705Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:32:42.975424Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:32:42.975487Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:32:43.023142Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:32:43.067086Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:32:43.068346Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****OQ_A (9CA81573) () has now valid token of ldapuser@ldap 2025-03-28T09:32:47.339311Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486790979637697108:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:47.339400Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:32:48.379667Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****OQ_A (9CA81573) 2025-03-28T09:32:48.379809Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:13888, port: 13888 2025-03-28T09:32:48.379918Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:32:48.390312Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:32:48.439189Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:32:48.483045Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:32:48.487270Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:32:48.487345Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:32:48.531031Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:32:48.575085Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:32:48.576352Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****OQ_A (9CA81573) () has now valid token of ldapuser@ldap 2025-03-28T09:32:52.385578Z node 3 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****OQ_A (9CA81573) 2025-03-28T09:32:52.385681Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:13888, port: 13888 2025-03-28T09:32:52.385740Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:32:52.395055Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:32:52.439940Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:32:52.483068Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:32:52.483748Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:32:52.483788Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entry ... DAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:33:05.287752Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:33:05.331175Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:33:05.382741Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:33:05.384029Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****wcug (D407F507) () has now valid token of ldapuser@ldap 2025-03-28T09:33:08.888826Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****wcug (D407F507) 2025-03-28T09:33:08.891547Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:23150, port: 23150 2025-03-28T09:33:08.891698Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:33:08.902973Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:33:08.947149Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:33:08.947688Z node 5 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:23150 return no entries 2025-03-28T09:33:08.948202Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****wcug (D407F507) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:23150 return no entries)' 2025-03-28T09:33:09.887264Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486791072980651230:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:09.887361Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:11.890186Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****wcug (D407F507) 2025-03-28T09:33:15.978864Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486791122277327736:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:15.978907Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001575/r3tmp/tmpCExBir/pdisk_1.dat 2025-03-28T09:33:16.106905Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:16.140850Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:16.140954Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:16.143215Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2596, node 6 2025-03-28T09:33:16.200882Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:16.200908Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:16.200935Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:16.201066Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:16.321068Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:33:16.324596Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:33:16.324627Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:33:16.325318Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2925, port: 2925 2025-03-28T09:33:16.325394Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:33:16.337434Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:33:16.379184Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:33:16.379718Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:2925. Server is busy 2025-03-28T09:33:16.380263Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****fOMQ (87C4DA07) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:2925. Server is busy)' 2025-03-28T09:33:16.380578Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:33:16.380611Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:33:16.381603Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2925, port: 2925 2025-03-28T09:33:16.381676Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:33:16.392176Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:33:16.439195Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:33:16.439694Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:2925. Server is busy 2025-03-28T09:33:16.440142Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****fOMQ (87C4DA07) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:2925. Server is busy)' 2025-03-28T09:33:17.991134Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****fOMQ (87C4DA07) 2025-03-28T09:33:17.991547Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:33:17.991582Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:33:17.992735Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2925, port: 2925 2025-03-28T09:33:17.992881Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:33:18.006526Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:33:18.051265Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:33:18.051773Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:2925. Server is busy 2025-03-28T09:33:18.052397Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****fOMQ (87C4DA07) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:2925. Server is busy)' 2025-03-28T09:33:20.979256Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486791122277327736:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:20.979363Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:21.994633Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****fOMQ (87C4DA07) 2025-03-28T09:33:21.995067Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:33:21.995096Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:33:21.995991Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2925, port: 2925 2025-03-28T09:33:21.996167Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:33:22.004004Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:33:22.051087Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:33:22.098939Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:33:22.099522Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:33:22.099583Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:33:22.147078Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:33:22.191048Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:33:22.192123Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****fOMQ (87C4DA07) () has now valid token of ldapuser@ldap 2025-03-28T09:33:25.997592Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****fOMQ (87C4DA07) 2025-03-28T09:33:25.997677Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:2925, port: 2925 2025-03-28T09:33:25.997757Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:33:26.008656Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:33:26.051137Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:33:26.095082Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:33:26.095689Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:33:26.095740Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:33:26.139105Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:33:26.187155Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:33:26.188397Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****fOMQ (87C4DA07) () has now valid token of ldapuser@ldap |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |93.0%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2025-03-28T09:33:21.535458Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791146152695965:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:21.535549Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001da2/r3tmp/tmpYa2iNr/pdisk_1.dat 2025-03-28T09:33:21.826813Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:21.899379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:21.899538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:21.901747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61627 TServer::EnableGrpc on GrpcPort 10413, node 1 2025-03-28T09:33:22.031474Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:22.031515Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:22.031527Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:22.031952Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61627 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:22.341395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:24.265052Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791159569619450:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:24.265135Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001da2/r3tmp/tmpme2gVj/pdisk_1.dat 2025-03-28T09:33:24.368576Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:24.401430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:24.401538Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:24.402959Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25012 TServer::EnableGrpc on GrpcPort 23440, node 2 2025-03-28T09:33:24.574765Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:24.574793Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:24.574800Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:24.574895Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25012 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:24.854237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadOneMessage >> YdbProxy::ReadNonExistentTopic [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2025-03-28T09:33:19.270188Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791138255501629:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:19.270291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001df0/r3tmp/tmpZnnpR1/pdisk_1.dat 2025-03-28T09:33:19.711664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:19.711749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:19.720126Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:19.739776Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3682 TServer::EnableGrpc on GrpcPort 3999, node 1 2025-03-28T09:33:20.081294Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:20.081321Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:20.081329Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:20.081455Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3682 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:20.606137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:20.622924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:20.882500Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480 2025-03-28T09:33:22.375455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791151140404459:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:22.375561Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791151140404458:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:22.375592Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791151140404442:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:22.375784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:22.379488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:2, at schemeshard: 72057594046644480 2025-03-28T09:33:22.388582Z node 1 :TX_PROXY ERROR: Actor# [1:7486791151140404464:2451] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:33:22.391327Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-28T09:33:22.391408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791151140404462:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:33:22.391490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791151140404463:2375], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:33:22.459839Z node 1 :TX_PROXY ERROR: Actor# [1:7486791151140404511:2482] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:22.468697Z node 1 :TX_PROXY ERROR: Actor# [1:7486791151140404529:2490] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:23.695746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:33:24.176599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:33:24.270547Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791138255501629:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:24.270631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:24.621940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 2025-03-28T09:33:25.049849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480 2025-03-28T09:33:25.501400Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710684:0, at schemeshard: 72057594046644480 2025-03-28T09:33:27.333583Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791172761029516:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:27.333708Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001df0/r3tmp/tmp6bpFvv/pdisk_1.dat 2025-03-28T09:33:27.495618Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:27.510115Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:27.510197Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:27.511833Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12155 TServer::EnableGrpc on GrpcPort 7342, node 2 2025-03-28T09:33:27.719528Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:27.719557Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:27.719566Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:27.719691Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:27.999750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... >> TColumnShardTestSchema::HotTiersWithStat [GOOD] |93.1%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] >> Compression::WriteRAW >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> ReadSessionImplTest::ProperlyOrdersDecompressedData >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::SimpleFeatureFlags [GOOD] Test command err: BASE_PERF = 3.241803058 2025-03-28T09:32:15.718820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:15.718980Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:15.719115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 17207, node 1 TClient is connected to server localhost:64582 2025-03-28T09:32:23.716553Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:319:2362], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:23.716918Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:23.717056Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 18845, node 2 TClient is connected to server localhost:9191 2025-03-28T09:32:31.721866Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:31.722300Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:31.722376Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 30570, node 3 TClient is connected to server localhost:4826 2025-03-28T09:32:40.485054Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:40.485476Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:40.485626Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 19491, node 4 TClient is connected to server localhost:6684 2025-03-28T09:32:52.060050Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:52.060358Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:52.060485Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 27233, node 5 TClient is connected to server localhost:25837 2025-03-28T09:33:03.542464Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:337:2380], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:03.542876Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:03.543005Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 11645, node 6 TClient is connected to server localhost:11989 2025-03-28T09:33:16.088582Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:16.089225Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:16.089340Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 23415, node 7 TClient is connected to server localhost:20189 2025-03-28T09:33:20.168099Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486791140534791535:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:20.168210Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:33:20.458610Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:20.488600Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:20.488779Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:20.492798Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12655, node 8 2025-03-28T09:33:20.563231Z node 8 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:20.563261Z node 8 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:20.563278Z node 8 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:20.563454Z node 8 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17910 >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143154939.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154939.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154939.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154939.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154939.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154939.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153739.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154939.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123154939.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153739.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123153739.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123153739.000000s;Name=;Codec=}; 2025-03-28T09:32:20.108107Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:32:20.206177Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:32:20.226536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:32:20.226840Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:32:20.232950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:32:20.233106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:32:20.233277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:32:20.233345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:32:20.233423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:32:20.233484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:32:20.233542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:32:20.233624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:32:20.233713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:32:20.233787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:32:20.233862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:32:20.233922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:32:20.259514Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:32:20.259877Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:32:20.259954Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:32:20.260135Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:20.260309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:32:20.260386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:32:20.260438Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:32:20.260527Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:32:20.260587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:32:20.260628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:32:20.260663Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:32:20.260836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:20.260893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:32:20.260931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:32:20.260963Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:32:20.261043Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:32:20.261092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:32:20.261132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:32:20.261164Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:32:20.261231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:32:20.261286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:32:20.261320Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:32:20.261366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:32:20.261404Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:32:20.261442Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:32:20.261831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-03-28T09:32:20.261914Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-03-28T09:32:20.261988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-03-28T09:32:20.262064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-03-28T09:32:20.262227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:32:20.262280Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:32:20.262314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:32:20.262494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:32:20.262533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:32:20.262562Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... D DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:33:31.071280Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:33:31.071336Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:33:31.071401Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:33:31.071449Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:33:31.071563Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:33:31.071839Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-28T09:33:31.071991Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:33:31.072166Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:33:31.072234Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:33:31.072734Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:33:31.072833Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:33:31.073360Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1984:3989];trace_detailed=; 2025-03-28T09:33:31.073794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:33:31.074035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:33:31.074220Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:33:31.074367Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:33:31.074796Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:33:31.074916Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:33:31.075054Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:33:31.075103Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1984:3989] finished for tablet 9437184 2025-03-28T09:33:31.075547Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1983:3988];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154411073286,"name":"_full_task","f":1743154411073286,"d_finished":0,"c":0,"l":1743154411075171,"d":1885},"events":[{"name":"bootstrap","f":1743154411073493,"d_finished":908,"c":1,"l":1743154411074401,"d":908},{"a":1743154411074768,"name":"ack","f":1743154411074768,"d_finished":0,"c":0,"l":1743154411075171,"d":403},{"a":1743154411074744,"name":"processing","f":1743154411074744,"d_finished":0,"c":0,"l":1743154411075171,"d":427},{"name":"ProduceResults","f":1743154411074137,"d_finished":517,"c":2,"l":1743154411075086,"d":517},{"a":1743154411075089,"name":"Finish","f":1743154411075089,"d_finished":0,"c":0,"l":1743154411075171,"d":82}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:33:31.075637Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1983:3988];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:33:31.076061Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1983:3988];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743154411073286,"name":"_full_task","f":1743154411073286,"d_finished":0,"c":0,"l":1743154411075688,"d":2402},"events":[{"name":"bootstrap","f":1743154411073493,"d_finished":908,"c":1,"l":1743154411074401,"d":908},{"a":1743154411074768,"name":"ack","f":1743154411074768,"d_finished":0,"c":0,"l":1743154411075688,"d":920},{"a":1743154411074744,"name":"processing","f":1743154411074744,"d_finished":0,"c":0,"l":1743154411075688,"d":944},{"name":"ProduceResults","f":1743154411074137,"d_finished":517,"c":2,"l":1743154411075086,"d":517},{"a":1743154411075089,"name":"Finish","f":1743154411075089,"d_finished":0,"c":0,"l":1743154411075688,"d":599}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1984:3989]->[1:1983:3988] 2025-03-28T09:33:31.076162Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:33:31.072802Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:33:31.076213Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:33:31.076324Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2025-03-28T09:33:31.932484Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.932519Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.932616Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.933014Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.933765Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.946027Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.947068Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.950276Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.950302Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.950342Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.950612Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.951066Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.951215Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.951571Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.951941Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-28T09:33:31.954155Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.954194Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.954223Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.954520Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.955185Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.955359Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.956033Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.961503Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.964329Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:31.964484Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.964544Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-28T09:33:31.965808Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.965839Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.965877Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.966316Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.966814Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.967020Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.967518Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2025-03-28T09:33:31.968822Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:33:31.969021Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-28T09:33:31.969396Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-28T09:33:31.969635Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-28T09:33:31.969779Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.969819Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:33:31.969855Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:33:31.970013Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-03-28T09:33:31.970058Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:33:31.970078Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-28T09:33:31.970098Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:33:31.970229Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-03-28T09:33:31.970329Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-28T09:33:31.970349Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-28T09:33:31.970371Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:33:31.970471Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-03-28T09:33:31.970501Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-28T09:33:31.970523Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-28T09:33:31.970551Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:33:31.970652Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-03-28T09:33:31.971998Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.972026Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.972051Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.972527Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.973005Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.973141Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.973412Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-03-28T09:33:31.974568Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:33:31.974824Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-28T09:33:31.975144Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-28T09:33:31.975371Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-28T09:33:31.975491Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.975569Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:33:31.975594Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:33:31.975612Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-28T09:33:31.975651Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:33:31.975878Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 5). Partition stream id: 1 Getting new event 2025-03-28T09:33:31.975979Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-28T09:33:31.975999Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-28T09:33:31.976019Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-28T09:33:31.976037Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-28T09:33:31.976062Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:33:31.976211Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 2025-03-28T09:33:31.977516Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.977561Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.977586Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.977921Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.978413Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.978555Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.978772Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.979927Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:33:31.980961Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:33:31.981281Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2025-03-28T09:33:31.981415Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-28T09:33:31.981532Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.981567Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:33:31.981591Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2025-03-28T09:33:31.981612Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2025-03-28T09:33:31.981648Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2025-03-28T09:33:31.981688Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-03-28T09:33:31.981837Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-28T09:33:31.981965Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet >> TStorageTenantTest::GenericCases >> TStorageTenantTest::LsLs >> TStorageTenantTest::CreateTableInsideSubDomain >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TStorageTenantTest::Boot ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2025-03-28T09:33:31.914643Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.914676Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.914719Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.915172Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:33:31.915229Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.915283Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.916517Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007272s 2025-03-28T09:33:31.917380Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.917892Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:33:31.918005Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.919074Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.919099Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.919116Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.919458Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:33:31.919496Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.919524Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.919566Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007711s 2025-03-28T09:33:31.920051Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.920614Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:33:31.920720Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.922146Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.922212Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.922229Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.922645Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-28T09:33:31.922724Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.922749Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.922829Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.254778s 2025-03-28T09:33:31.923406Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.923902Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:33:31.923988Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.924872Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.924891Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.924907Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.925269Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-28T09:33:31.925310Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.925331Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.925388Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.263915s 2025-03-28T09:33:31.925870Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.926343Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:33:31.926429Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.927326Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.927347Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.927376Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.927724Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.928187Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.939308Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.939787Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2025-03-28T09:33:31.939827Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.939846Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.939957Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.180108s 2025-03-28T09:33:31.940273Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-28T09:33:31.943096Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.943117Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.943140Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.943499Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.943967Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.944509Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.944965Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:32.046476Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:32.046797Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-28T09:33:32.046870Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:32.046923Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-28T09:33:32.047018Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-28T09:33:32.147649Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-28T09:33:32.147834Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-28T09:33:32.148978Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:32.149026Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:32.149054Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:32.149665Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:32.150355Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:32.150551Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:32.150990Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:32.255662Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:32.255896Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-28T09:33:32.255969Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:32.256013Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-28T09:33:32.256194Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-03-28T09:33:32.256287Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-28T09:33:32.256642Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-28T09:33:32.256829Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-28T09:33:32.256931Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2025-03-28T09:33:31.881493Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.881527Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.881616Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.882726Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.888270Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:33:31.888368Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.891849Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.891878Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.891900Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.892228Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.892606Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2025-03-28T09:33:31.892687Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.893583Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.893617Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.893650Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.893973Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:33:31.894018Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.894063Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.895055Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2025-03-28T09:33:31.896041Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.896067Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.896091Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.896422Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2025-03-28T09:33:31.896462Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.896493Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.896554Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2025-03-28T09:33:31.898614Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-28T09:33:31.898656Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-28T09:33:31.898701Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.899059Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.899759Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.914831Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-28T09:33:31.916261Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.919654Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2025-03-28T09:33:31.925894Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2025-03-28T09:33:31.926168Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.926253Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:33:31.926278Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:33:31.926298Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-03-28T09:33:31.926327Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-03-28T09:33:31.926349Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-03-28T09:33:31.926368Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2025-03-28T09:33:31.926397Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2025-03-28T09:33:31.926433Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2025-03-28T09:33:31.926452Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2025-03-28T09:33:31.926469Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2025-03-28T09:33:31.926498Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2025-03-28T09:33:31.926518Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2025-03-28T09:33:31.926552Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2025-03-28T09:33:31.926578Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2025-03-28T09:33:31.926595Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2025-03-28T09:33:31.926635Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2025-03-28T09:33:31.926655Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2025-03-28T09:33:31.926674Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2025-03-28T09:33:31.926706Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2025-03-28T09:33:31.926725Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2025-03-28T09:33:31.926745Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2025-03-28T09:33:31.926769Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2025-03-28T09:33:31.926803Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2025-03-28T09:33:31.926824Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2025-03-28T09:33:31.926841Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2025-03-28T09:33:31.926856Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2025-03-28T09:33:31.926871Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2025-03-28T09:33:31.926887Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2025-03-28T09:33:31.926918Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2025-03-28T09:33:31.926938Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2025-03-28T09:33:31.926955Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2025-03-28T09:33:31.927023Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2025-03-28T09:33:31.927054Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2025-03-28T09:33:31.927084Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2025-03-28T09:33:31.927102Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2025-03-28T09:33:31.927119Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2025-03-28T09:33:31.927145Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2025-03-28T09:33:31.927159Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2025-03-28T09:33:31.927185Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2025-03-28T09:33:31.927205Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2025-03-28T09:33:31.927222Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2025-03-28T09:33:31.927238Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2025-03-28T09:33:31.927271Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2025-03-28T09:33:31.927289Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2025-03-28T09:33:31.927306Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2025-03-28T09:33:31.927323Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2025-03-28T09:33:31.927340Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2025-03-28T09:33:31.927357Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2025-03-28T09:33:31.927373Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2025-03-28T09:33:31.927426Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-03-28T09:33:31.929804Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2025-03-28T09:33:31.929993Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2025-03-28T09:33:31.930036Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2025-03-28T09:33:31.930061Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2025-03-28T09:33:31.930082Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2025-03-28T09:33:31.930117Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2025-03-28T09:33:31.930142Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2025-03-28T09:33:31.930159Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2025-03-28T09:33:31.930199Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2025-03-28T09:33:31.930248Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2025-03-28T09:33:31.930271Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2025-03-28T09:33:31.930312Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2025-03-28T09:33:31.930332Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2025-03-28T09:33:31.930348Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2025-03-28T09:33:31.930372Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2025-03-28T09:33:31.930390Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2025-03-28T09:33:31.930410Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2025-03-28T09:33:31.930448Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2025-03-28T09:33:31.930470Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2025-03-28T09:33:31.930486Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2025-03-28T09:33:31.930502Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2025-03-28T09:33:31.930528Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2025-03-28T09:33:31.930553Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2025-03-28T09:33:31.930569Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2025-03-28T09:33:31.930584Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2025-03-28T09:33:31.930600Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2025-03-28T09:33:31.930615Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2025-03-28T09:33:31.930647Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2025-03-28T09:33:31.930665Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2025-03-28T09:33:31.930700Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2025-03-28T09:33:31.930723Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2025-03-28T09:33:31.930741Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2025-03-28T09:33:31.930757Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2025-03-28T09:33:31.930824Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2025-03-28T09:33:31.930844Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2025-03-28T09:33:31.930859Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2025-03-28T09:33:31.930884Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2025-03-28T09:33:31.930916Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2025-03-28T09:33:31.930939Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2025-03-28T09:33:31.930958Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2025-03-28T09:33:31.930975Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2025-03-28T09:33:31.930994Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2025-03-28T09:33:31.931015Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2025-03-28T09:33:31.931045Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2025-03-28T09:33:31.931063Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2025-03-28T09:33:31.931079Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2025-03-28T09:33:31.931097Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2025-03-28T09:33:31.931113Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2025-03-28T09:33:31.931197Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2025-03-28T09:33:31.931217Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2025-03-28T09:33:31.931238Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2025-03-28T09:33:31.931284Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2025-03-28T09:33:31.931400Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2025-03-28T09:33:31.932695Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.932715Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.932750Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.933089Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.933546Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.933687Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.934046Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:32.035637Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:32.035912Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-28T09:33:32.036002Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:32.036044Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-28T09:33:32.036122Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-28T09:33:32.239717Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-03-28T09:33:32.342996Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-28T09:33:32.345756Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-28T09:33:32.346425Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2025-03-28T09:33:32.348631Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:32.348671Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:32.348707Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:32.349135Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:32.352391Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:32.352583Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:32.353668Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:32.454447Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:32.454657Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2025-03-28T09:33:32.454737Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:32.454795Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2025-03-28T09:33:32.454893Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2025-03-28T09:33:32.454983Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2025-03-28T09:33:32.455182Z :DEBUG: [db] [sessionid] [cluster] Committed response: { cookies { assign_id: 1 partition_cookie: 1 } } 2025-03-28T09:33:32.455258Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2025-03-28T09:33:32.455354Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster >> TColumnShardTestSchema::RebootColdTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadNotEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-03-28T09:32:00.889636Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790799881568008:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.889716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:32:01.122715Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00138c/r3tmp/tmpGL9g6H/pdisk_1.dat 2025-03-28T09:32:01.347666Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:01.392636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:01.392741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:01.394397Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22704, node 1 2025-03-28T09:32:01.803331Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00138c/r3tmp/yandexAVwXLT.tmp 2025-03-28T09:32:01.803373Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00138c/r3tmp/yandexAVwXLT.tmp 2025-03-28T09:32:01.803534Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00138c/r3tmp/yandexAVwXLT.tmp 2025-03-28T09:32:01.803656Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:02.038912Z INFO: TTestServer started on Port 24946 GrpcPort 22704 TClient is connected to server localhost:24946 PQClient connected to localhost:22704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:02.331553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:02.389046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:02.546832Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.567390Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2025-03-28T09:32:04.031472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790817061438022:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.031525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790817061438034:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.031608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.036783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:32:04.044755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790817061438067:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.044930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.055054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790817061438036:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:32:04.268259Z node 1 :TX_PROXY ERROR: Actor# [1:7486790817061438092:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:04.293650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.329387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.401191Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790817061438108:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:32:04.403332Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Njc3MzAwNTctZjEzMjcxZDMtZTQ4MjgyZDEtNTFmODgwNWY=, ActorId: [1:7486790817061438019:2337], ActorState: ExecuteState, TraceId: 01jqe1g8hecrhfg8aq2d7cazrc, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:32:04.405914Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:32:04.416120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486790817061438391:2634] 2025-03-28T09:32:05.887467Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790799881568008:2057];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:05.887548Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:32:10.567667Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:32:10.578750Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-28T09:32:10.579787Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486790842831242446:2781], Recipient [1:7486790804176535742:2198]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.579817Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.579832Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:10.579866Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486790842831242442:2778], Recipient [1:7486790804176535742:2198]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-28T09:32:10.579882Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:10.652870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:10.653236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:10.653450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPat ... test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-03-28T09:33:30.820357Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.820390Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.820455Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7486791179199445435:2735], Partition 2, Sender [5:7486791179199445435:2735], Recipient [5:7486791179199445517:2744], Cookie: 0 2025-03-28T09:33:30.820456Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7486791179199445520:2746], Recipient [5:7486791179199445436:2736]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:30.820475Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:30.820510Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7486791179199445435:2735], Recipient [5:7486791179199445517:2744]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-28T09:33:30.820526Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-03-28T09:33:30.820573Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7486791157724607923:2457], Recipient [5:7486791157724607863:2453]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:30.820597Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:30.820655Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7486791179199445517:2744], Recipient [5:7486791179199445435:2735]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:30.820681Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:30.821004Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 6 DataSize: 0 UsedReserveSize: 0 2025-03-28T09:33:30.821133Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] ProcessPendingStats. PendingUpdates size 3 2025-03-28T09:33:30.821304Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [5:7486791157724607867:2454], Recipient [5:7486791119069901074:2144]: NKikimrPQ.TEvPeriodicTopicStats PathId: 13 Generation: 1 Round: 6 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2025-03-28T09:33:30.821333Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-03-28T09:33:30.821356Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2025-03-28T09:33:30.821372Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099996s, queue# 1 2025-03-28T09:33:30.821452Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [5:7486791157724607867:2454], Recipient [5:7486791119069901074:2144]: NKikimrSchemeOp.TDescribePath PathId: 13 SchemeshardId: 72057594046644480 2025-03-28T09:33:30.821470Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:33:30.866178Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791157724607863:2453], Partition 0, Sender [0:0:0], Recipient [5:7486791157724607923:2457], Cookie: 0 2025-03-28T09:33:30.866255Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791157724607923:2457]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.866279Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.866323Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.866410Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.866433Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.866461Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.888657Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791179199445435:2735], Partition 2, Sender [0:0:0], Recipient [5:7486791179199445517:2744], Cookie: 0 2025-03-28T09:33:30.888720Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791179199445517:2744]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.888739Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.888772Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.888832Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.888863Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.888893Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.889091Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791179199445436:2736], Partition 1, Sender [0:0:0], Recipient [5:7486791179199445520:2746], Cookie: 0 2025-03-28T09:33:30.889121Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791179199445520:2746]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.889137Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.889154Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.889176Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.889197Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.889213Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.921467Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435094, Sender [0:0:0], Recipient [5:7486791119069901074:2144]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:30.921513Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:30.921535Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:33:30.921548Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:33:30.921603Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-03-28T09:33:30.921950Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435094, Sender [0:0:0], Recipient [5:7486791119069901074:2144]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:30.921973Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTopicStats 2025-03-28T09:33:30.921988Z node 5 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T09:33:30.966564Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791157724607863:2453], Partition 0, Sender [0:0:0], Recipient [5:7486791157724607923:2457], Cookie: 0 2025-03-28T09:33:30.966651Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791157724607923:2457]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.966697Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.966749Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.966838Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.966870Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.966901Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.989085Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791179199445435:2735], Partition 2, Sender [0:0:0], Recipient [5:7486791179199445517:2744], Cookie: 0 2025-03-28T09:33:30.989179Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791179199445517:2744]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.989208Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.989256Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.989351Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.989385Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.989424Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.989535Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791179199445436:2736], Partition 1, Sender [0:0:0], Recipient [5:7486791179199445520:2746], Cookie: 0 2025-03-28T09:33:30.989614Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791179199445520:2746]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.989645Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.989686Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.989744Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.989774Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.989804Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143154942.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154942.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143154942.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154942.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154942.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143154942.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153742.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154942.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123154942.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153742.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123153742.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123153742.000000s;Name=;Codec=}; 2025-03-28T09:32:23.013029Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:32:23.092717Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:32:23.107704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:32:23.107951Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:32:23.114789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:32:23.115000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:32:23.115212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:32:23.115320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:32:23.115422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:32:23.115523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:32:23.115625Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:32:23.115742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:32:23.115862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:32:23.115976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:32:23.116101Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:32:23.116197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:32:23.139250Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:32:23.139509Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:32:23.139556Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:32:23.139673Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:23.139796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:32:23.139853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:32:23.139883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:32:23.139964Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:32:23.140007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:32:23.140049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:32:23.140068Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:32:23.140165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:23.140203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:32:23.140227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:32:23.140245Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:32:23.140310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:32:23.140343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:32:23.140369Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:32:23.140385Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:32:23.140426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:32:23.140453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:32:23.140478Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:32:23.140519Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:32:23.140547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:32:23.140568Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:32:23.140837Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-03-28T09:32:23.140890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=23; 2025-03-28T09:32:23.140939Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=22; 2025-03-28T09:32:23.141004Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-03-28T09:32:23.141136Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:32:23.141184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:32:23.141211Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:32:23.141357Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:32:23.141405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:32:23.141432Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:32:23.141530Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:32:23.141553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... ata.cpp:29;EXECUTE:finishLoadingTime=368; 2025-03-28T09:33:32.588752Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=33653; 2025-03-28T09:33:32.595177Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6333; 2025-03-28T09:33:32.602198Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5994; 2025-03-28T09:33:32.602329Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7038; 2025-03-28T09:33:32.602501Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=98; 2025-03-28T09:33:32.602616Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=65; 2025-03-28T09:33:32.602786Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=120; 2025-03-28T09:33:32.602918Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=80; 2025-03-28T09:33:32.611615Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8624; 2025-03-28T09:33:32.622714Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10971; 2025-03-28T09:33:32.622856Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=39; 2025-03-28T09:33:32.622927Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=27; 2025-03-28T09:33:32.622987Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-03-28T09:33:32.623043Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-03-28T09:33:32.623092Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-03-28T09:33:32.623170Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=42; 2025-03-28T09:33:32.623218Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-03-28T09:33:32.623311Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2025-03-28T09:33:32.623355Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-03-28T09:33:32.623420Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2025-03-28T09:33:32.623527Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=56; 2025-03-28T09:33:32.623919Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=343; 2025-03-28T09:33:32.623980Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=76550; 2025-03-28T09:33:32.624140Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=31203592;raw_bytes=48253350;count=18;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:33:32.624250Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:33:32.624309Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:33:32.624373Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:33:32.643556Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:33:32.643730Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:33:32.643794Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:33:32.643877Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-28T09:33:32.643994Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:33:32.644045Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:33:32.644096Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:33:32.644138Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:33:32.644233Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:33:32.644612Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2212:4085];tablet_id=9437184;parent=[1:2172:4052];fline=manager.cpp:82;event=ask_data;request=request_id=128;1={portions_count=18};; 2025-03-28T09:33:32.644946Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:33:32.645474Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:33:32.645861Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:33:32.645895Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:33:32.645921Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:33:32.645962Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:33:32.646024Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:33:32.646095Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-28T09:33:32.646161Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:33:32.646219Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:33:32.646271Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:33:32.646311Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:33:32.646400Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:33:32.650415Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=18;path_id=1; 2025-03-28T09:33:32.651423Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2172:4052];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ManySession_existed_AutoscaleAwareSDK [GOOD] Test command err: 2025-03-28T09:32:01.956173Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790803772078497:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:01.956243Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:32:02.159879Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001348/r3tmp/tmpHVYDiB/pdisk_1.dat 2025-03-28T09:32:02.376786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:02.380926Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:02.381050Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:02.401076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15256, node 1 2025-03-28T09:32:02.454391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001348/r3tmp/yandex0i1yGS.tmp 2025-03-28T09:32:02.454427Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001348/r3tmp/yandex0i1yGS.tmp 2025-03-28T09:32:02.454723Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001348/r3tmp/yandex0i1yGS.tmp 2025-03-28T09:32:02.454879Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:02.507807Z INFO: TTestServer started on Port 9032 GrpcPort 15256 TClient is connected to server localhost:9032 PQClient connected to localhost:15256 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:02.774796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:02.801425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:32:04.915637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790816656981205:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.915640Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790816656981213:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.915702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.920133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:32:04.921008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790816656981249:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.921071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.931236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790816656981219:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:32:05.076818Z node 1 :TX_PROXY ERROR: Actor# [1:7486790820951948573:2449] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:05.098843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:05.125406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:05.174840Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790820951948589:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:32:05.175754Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGUzZGFiMTEtZDZiMjM4YjEtZGJkN2Q0NDAtZjc5Y2I5NDM=, ActorId: [1:7486790816656981202:2336], ActorState: ExecuteState, TraceId: 01jqe1g9db3m0b98n39asy755v, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:32:05.177409Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:32:05.181874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486790820951948868:2630] 2025-03-28T09:32:06.956322Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790803772078497:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:06.956393Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:32:11.329116Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:32:11.340371Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-28T09:32:11.341615Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486790846721752924:2778], Recipient [1:7486790808067046217:2191]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:11.341649Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:11.341668Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:11.341697Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486790846721752920:2775], Recipient [1:7486790808067046217:2191]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-28T09:32:11.341710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:11.398293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:11.398789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:11.399074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-28T09:32:11.399134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-28T09:32:11.399174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new ... oducer-1|76113b56-62ceaaf2-cef60b2a-84e3fc46_0] PartitionId [0] Generation [1] Write session: aborting 2025-03-28T09:33:30.588634Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7486791186760915217:3524], Recipient [5:7486791156696142696:2454]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:30.588665Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:30.588682Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:30.588698Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_2_3285395475926888238_v1 2025-03-28T09:33:30.588810Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7486791186760915216:2818] destroyed 2025-03-28T09:33:30.588868Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_2_3285395475926888238_v1 2025-03-28T09:33:30.589536Z :INFO: [/Root] TraceId [] SessionId [producer-1|76113b56-62ceaaf2-cef60b2a-84e3fc46_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-03-28T09:33:30.589640Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|76113b56-62ceaaf2-cef60b2a-84e3fc46_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-03-28T09:33:30.589695Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2025-03-28T09:33:30.589730Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|76113b56-62ceaaf2-cef60b2a-84e3fc46_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2025-03-28T09:33:30.589720Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: producer-1|76113b56-62ceaaf2-cef60b2a-84e3fc46_0 grpc read done: success: 0 data: 2025-03-28T09:33:30.589807Z :DEBUG: [/Root] TraceId [] SessionId [producer-1|76113b56-62ceaaf2-cef60b2a-84e3fc46_0] PartitionId [0] Generation [1] Write session: destroy 2025-03-28T09:33:30.589750Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|76113b56-62ceaaf2-cef60b2a-84e3fc46_0 grpc read failed 2025-03-28T09:33:30.589780Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|76113b56-62ceaaf2-cef60b2a-84e3fc46_0 grpc closed 2025-03-28T09:33:30.589794Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: producer-1|76113b56-62ceaaf2-cef60b2a-84e3fc46_0 is DEAD 2025-03-28T09:33:30.590804Z node 5 :PQ_PARTITION_CHOOSER TRACE: StateIdle, received event# 65543, Sender [5:7486791178170980204:2735], Recipient [5:7486791178170980206:2735]: NActors::TEvents::TEvPoison 2025-03-28T09:33:30.590875Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:33:30.591084Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7486791178170980236:3311], Recipient [5:7486791156696142696:2454]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:30.591144Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:30.591164Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:30.591201Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7486791178170980235:2735] destroyed 2025-03-28T09:33:30.591282Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188506 (NKikimr::TEvPQ::TEvPipeDisconnected), Tablet [5:7486791156696142696:2454], Partition 0, Sender [5:7486791156696142696:2454], Recipient [5:7486791156696142756:2458], Cookie: 0 2025-03-28T09:33:30.591349Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188506, Sender [5:7486791156696142696:2454], Recipient [5:7486791156696142756:2458]: NKikimr::TEvPQ::TEvPipeDisconnected 2025-03-28T09:33:30.591374Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPipeDisconnected 2025-03-28T09:33:30.591415Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:33:30.591454Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessChangeOwnerRequests. 2025-03-28T09:33:30.591495Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.591567Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.591596Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.591623Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.667169Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791156696142696:2454], Partition 0, Sender [0:0:0], Recipient [5:7486791156696142756:2458], Cookie: 0 2025-03-28T09:33:30.667245Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791156696142756:2458]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.667270Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.667312Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.667376Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.667406Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.667435Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.667480Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791182465947640:2765], Partition 2, Sender [0:0:0], Recipient [5:7486791182465947723:2774], Cookie: 0 2025-03-28T09:33:30.667522Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791182465947723:2774]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.667543Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.667567Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.667598Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.667618Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.667636Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.667677Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791182465947641:2766], Partition 1, Sender [0:0:0], Recipient [5:7486791182465947727:2776], Cookie: 0 2025-03-28T09:33:30.667719Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791182465947727:2776]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.667741Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.667764Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.667794Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.667814Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.667831Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.767718Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791156696142696:2454], Partition 0, Sender [0:0:0], Recipient [5:7486791156696142756:2458], Cookie: 0 2025-03-28T09:33:30.767800Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791156696142756:2458]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.767825Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.767869Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.767938Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.767984Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.768014Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.768082Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791182465947640:2765], Partition 2, Sender [0:0:0], Recipient [5:7486791182465947723:2774], Cookie: 0 2025-03-28T09:33:30.768125Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791182465947723:2774]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.768144Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.768168Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.768197Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.768211Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.768230Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:30.768272Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791182465947641:2766], Partition 1, Sender [0:0:0], Recipient [5:7486791182465947727:2776], Cookie: 0 2025-03-28T09:33:30.768310Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791182465947727:2776]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.768328Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:30.768347Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:30.768376Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:30.768391Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:30.768408Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2025-03-28T09:33:31.907319Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.907372Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.907409Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.908181Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.908776Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.919765Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.920758Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.921583Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:33:31.921995Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:33:31.922743Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-03-28T09:33:31.922885Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:31.923006Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.923045Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-03-28T09:33:31.923091Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:33:31.923117Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:33:31.924734Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.924765Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.924828Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.925221Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.925701Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.925866Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.926110Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2025-03-28T09:33:31.927058Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:33:31.927270Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-28T09:33:31.927553Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-28T09:33:31.927790Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-28T09:33:31.927894Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.927940Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:33:31.927983Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:33:31.928146Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2025-03-28T09:33:31.928180Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:33:31.928201Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-28T09:33:31.928222Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:33:31.928342Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2025-03-28T09:33:31.928421Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2025-03-28T09:33:31.928490Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2025-03-28T09:33:31.928513Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:33:31.928599Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2025-03-28T09:33:31.928628Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2025-03-28T09:33:31.928649Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2025-03-28T09:33:31.928670Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:33:31.928758Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2025-03-28T09:33:31.929962Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.929991Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.930013Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.930265Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.930761Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.930907Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.931094Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2025-03-28T09:33:31.931993Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:33:31.932215Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2025-03-28T09:33:31.932585Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2025-03-28T09:33:31.932901Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2025-03-28T09:33:31.933063Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.933103Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:33:31.933224Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 Getting new event 2025-03-28T09:33:31.933272Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:33:31.933295Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:33:31.933348Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 Getting new event 2025-03-28T09:33:31.933373Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:33:31.933400Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2025-03-28T09:33:31.933478Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 Getting new event 2025-03-28T09:33:31.933517Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2025-03-28T09:33:31.933537Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStream ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:33:34.266472Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2025-03-28T09:33:34.349296Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-28T09:33:34.349349Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-28T09:33:34.349384Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:34.349761Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:34.354455Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:34.354668Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2025-03-28T09:33:34.359003Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2025-03-28T09:33:34.500706Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2025-03-28T09:33:34.501712Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:34.503580Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:33:34.506350Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:33:34.507231Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2025-03-28T09:33:34.511916Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2025-03-28T09:33:34.512760Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2025-03-28T09:33:34.513591Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2025-03-28T09:33:34.514439Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2025-03-28T09:33:34.523094Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2025-03-28T09:33:34.523989Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2025-03-28T09:33:34.524056Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2025-03-28T09:33:34.524235Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2025-03-28T09:33:34.535926Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2025-03-28T09:33:34.539272Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:34.539311Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:34.539356Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:34.539625Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:34.540033Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:34.540144Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:34.547012Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:34.547472Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2025-03-28T09:33:34.548706Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:34.548724Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:34.548741Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:34.560089Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:34.570950Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:34.571146Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:34.579301Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:34.582874Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:34.586831Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:34.586898Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:33:34.590946Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull >> KqpUserConstraint::KqpReadNull+UploadNull >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadingAfterSplitTest_PreferedPartition_AutoscaleAwareSDK [GOOD] Test command err: 2025-03-28T09:32:00.884544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790796498695496:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.885111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:32:01.096868Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00135d/r3tmp/tmpsIKx51/pdisk_1.dat 2025-03-28T09:32:01.327853Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:01.349703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:01.349828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:01.354492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20100, node 1 2025-03-28T09:32:01.803313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00135d/r3tmp/yandexjm4iyX.tmp 2025-03-28T09:32:01.803345Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00135d/r3tmp/yandexjm4iyX.tmp 2025-03-28T09:32:01.803491Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00135d/r3tmp/yandexjm4iyX.tmp 2025-03-28T09:32:01.803593Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:02.037954Z INFO: TTestServer started on Port 17565 GrpcPort 20100 TClient is connected to server localhost:17565 PQClient connected to localhost:20100 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:02.358116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:02.398141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:32:03.707672Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790809383598118:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.707686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790809383598126:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.707834Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.714894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:32:03.729148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790809383598133:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:32:03.808909Z node 1 :TX_PROXY ERROR: Actor# [1:7486790809383598197:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:04.147059Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790809383598205:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:32:04.148715Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWFhNTMyNGEtOTM5YzA5OTctNzQxOGVhMWEtYTBjZjhhODE=, ActorId: [1:7486790809383598116:2336], ActorState: ExecuteState, TraceId: 01jqe1g87e8f88hv9ksdyxe7yt, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:32:04.151729Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:32:04.205455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.276665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.351918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486790813678565787:2635] 2025-03-28T09:32:05.883931Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790796498695496:2137];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:05.883999Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:32:10.398630Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:32:10.410623Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-28T09:32:10.412824Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486790839448369849:2790], Recipient [1:7486790800793663118:2185]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.412860Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.412874Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:10.412906Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486790839448369845:2787], Recipient [1:7486790800793663118:2185]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-28T09:32:10.412921Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:10.461468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:10.461921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:10.463320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-28T09:32:10.463376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-28T09:32:10.463418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-03-28T09:32:10.463479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-03-28T09:32:10.463539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-03-28T09:32:10.464791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusAccepted, reason: , at sche ... 647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 5 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-03-28T09:33:35.931035Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7486791187477863851:2746], Partition 2, Sender [5:7486791187477863851:2746], Recipient [5:7486791187477863940:2757], Cookie: 0 2025-03-28T09:33:35.931080Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7486791187477863851:2746], Recipient [5:7486791187477863940:2757]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-28T09:33:35.931097Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-03-28T09:33:35.931139Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7486791196067798901:2833], Partition 3, Sender [5:7486791196067798901:2833], Recipient [5:7486791196067798989:2841], Cookie: 0 2025-03-28T09:33:35.931171Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7486791196067798901:2833], Recipient [5:7486791196067798989:2841]: NKikimr::TEvPQ::TEvPartitionStatus 2025-03-28T09:33:35.931182Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2025-03-28T09:33:35.931336Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 5 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-03-28T09:33:35.931919Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7486791196067798900:2832], Partition 4, Sender [5:7486791196067798900:2832], Recipient [5:7486791196067799013:2849], Cookie: 0 2025-03-28T09:33:35.931988Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7486791196067798900:2832], Recipient [5:7486791196067799013:2849]: NKikimr::TEvPQ::TEvPartitionStatus 2025-03-28T09:33:35.932003Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2025-03-28T09:33:35.932170Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 5 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-03-28T09:33:35.932275Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7486791196067798900:2832], Partition 4, Sender [5:7486791196067798900:2832], Recipient [5:7486791196067799013:2849], Cookie: 0 2025-03-28T09:33:35.932318Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7486791196067798900:2832], Recipient [5:7486791196067799013:2849]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-28T09:33:35.932331Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-03-28T09:33:35.932373Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7486791161708058927:2453], Partition 0, Sender [5:7486791161708058927:2453], Recipient [5:7486791161708058988:2457], Cookie: 0 2025-03-28T09:33:35.932403Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7486791161708058927:2453], Recipient [5:7486791161708058988:2457]: NKikimr::TEvPQ::TEvPartitionStatus 2025-03-28T09:33:35.932415Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2025-03-28T09:33:35.932575Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 5 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-03-28T09:33:35.932649Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7486791161708058927:2453], Partition 0, Sender [5:7486791161708058927:2453], Recipient [5:7486791161708058988:2457], Cookie: 0 2025-03-28T09:33:35.932683Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7486791161708058927:2453], Recipient [5:7486791161708058988:2457]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-28T09:33:35.932696Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-03-28T09:33:35.932735Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188491 (NKikimr::TEvPQ::TEvPartitionStatus), Tablet [5:7486791187477863852:2747], Partition 1, Sender [5:7486791187477863852:2747], Recipient [5:7486791187477863943:2759], Cookie: 0 2025-03-28T09:33:35.932763Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188491, Sender [5:7486791187477863852:2747], Recipient [5:7486791187477863943:2759]: NKikimr::TEvPQ::TEvPartitionStatus 2025-03-28T09:33:35.932774Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvPartitionStatus 2025-03-28T09:33:35.932914Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 5 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2025-03-28T09:33:35.932999Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7486791187477863852:2747], Partition 1, Sender [5:7486791187477863852:2747], Recipient [5:7486791187477863943:2759], Cookie: 0 2025-03-28T09:33:35.933035Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7486791187477863852:2747], Recipient [5:7486791187477863943:2759]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-28T09:33:35.933048Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus 2025-03-28T09:33:35.933098Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7486791187477863940:2757], Recipient [5:7486791187477863851:2746]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:35.933117Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:35.933238Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7486791196067798989:2841], Recipient [5:7486791196067798901:2833]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:35.933250Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:35.933323Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7486791196067799013:2849], Recipient [5:7486791196067798900:2832]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:35.933335Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:35.933438Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 271188503, Sender [5:7486791187477863943:2759], Recipient [5:7486791187477863852:2747]: NKikimr::TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:35.933449Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvPQ::TEvPartitionLabeledCounters 2025-03-28T09:33:35.933846Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] Send TEvPeriodicTopicStats PathId: 13 Generation: 1 StatsReportRound: 10 DataSize: 0 UsedReserveSize: 0 2025-03-28T09:33:35.933976Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][test-topic] ProcessPendingStats. PendingUpdates size 5 2025-03-28T09:33:35.934316Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [5:7486791161708058928:2454], Recipient [5:7486791123053352152:2146]: NKikimrPQ.TEvPeriodicTopicStats PathId: 13 Generation: 1 Round: 10 DataSize: 0 UsedReserveSize: 0 SubDomainOutOfSpace: false 2025-03-28T09:33:35.934342Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-03-28T09:33:35.934359Z node 5 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046644480, LocalPathId: 13] DataSize 0 UsedReserveSize 0 2025-03-28T09:33:35.934388Z node 5 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.099996s, queue# 1 2025-03-28T09:33:35.934631Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188536 (NKikimr::TEvPQ::TEvSubDomainStatus), Tablet [5:7486791196067798901:2833], Partition 3, Sender [5:7486791196067798901:2833], Recipient [5:7486791196067798989:2841], Cookie: 0 2025-03-28T09:33:35.934678Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188536, Sender [5:7486791196067798901:2833], Recipient [5:7486791196067798989:2841]: NKikimrPQ.TEvSubDomainStatus SubDomainOutOfSpace: false 2025-03-28T09:33:35.934712Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvSubDomainStatus >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink >> TSchemeShardLoginTest::BanUnbanUser >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> TWebLoginService::AuditLogEmptySIDsLoginSuccess >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] >> TStorageTenantTest::LsLs [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> TStorageTenantTest::GenericCases [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> TPersQueueMirrorer::ValidStartStream [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2025-03-28T09:33:33.269236Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791197512661012:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:33.269432Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013db/r3tmp/tmph1mMnR/pdisk_1.dat 2025-03-28T09:33:33.894938Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:33.928484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.928590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.939607Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20353 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:33:34.232976Z node 1 :TX_PROXY DEBUG: actor# [1:7486791197512661248:2141] Handle TEvNavigate describe path dc-1 2025-03-28T09:33:34.233035Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791201807628977:2433] HANDLE EvNavigateScheme dc-1 2025-03-28T09:33:34.233190Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791197512661291:2157], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:34.233240Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486791197512661291:2157], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:33:34.233443Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201807628978:2434][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:33:34.235607Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660888:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791201807628982:2434] 2025-03-28T09:33:34.235672Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197512660888:2051] Subscribe: subscriber# [1:7486791201807628982:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.235739Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660891:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791201807628983:2434] 2025-03-28T09:33:34.235773Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197512660891:2054] Subscribe: subscriber# [1:7486791201807628983:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.235812Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660894:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791201807628984:2434] 2025-03-28T09:33:34.235829Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197512660894:2057] Subscribe: subscriber# [1:7486791201807628984:2434], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.235874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201807628982:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791197512660888:2051] 2025-03-28T09:33:34.235898Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201807628983:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791197512660891:2054] 2025-03-28T09:33:34.235917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201807628984:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791197512660894:2057] 2025-03-28T09:33:34.235991Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201807628978:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791201807628979:2434] 2025-03-28T09:33:34.236031Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201807628978:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791201807628980:2434] 2025-03-28T09:33:34.236398Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660888:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791201807628982:2434] 2025-03-28T09:33:34.236423Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660891:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791201807628983:2434] 2025-03-28T09:33:34.236461Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660894:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791201807628984:2434] 2025-03-28T09:33:34.237157Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486791201807628978:2434][/dc-1] Set up state: owner# [1:7486791197512661291:2157], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:34.237354Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201807628978:2434][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791201807628981:2434] 2025-03-28T09:33:34.237414Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486791201807628978:2434][/dc-1] Path was already updated: owner# [1:7486791197512661291:2157], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:34.237470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201807628982:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201807628979:2434], cookie# 1 2025-03-28T09:33:34.237489Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201807628983:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201807628980:2434], cookie# 1 2025-03-28T09:33:34.237503Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201807628984:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201807628981:2434], cookie# 1 2025-03-28T09:33:34.241254Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660888:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201807628982:2434], cookie# 1 2025-03-28T09:33:34.241303Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660891:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201807628983:2434], cookie# 1 2025-03-28T09:33:34.241322Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660894:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201807628984:2434], cookie# 1 2025-03-28T09:33:34.241371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201807628982:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791197512660888:2051], cookie# 1 2025-03-28T09:33:34.241409Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201807628983:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791197512660891:2054], cookie# 1 2025-03-28T09:33:34.241425Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201807628984:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791197512660894:2057], cookie# 1 2025-03-28T09:33:34.241457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201807628978:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791201807628979:2434], cookie# 1 2025-03-28T09:33:34.241487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201807628978:2434][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:33:34.241506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201807628978:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791201807628980:2434], cookie# 1 2025-03-28T09:33:34.241533Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201807628978:2434][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:33:34.241554Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201807628978:2434][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791201807628981:2434], cookie# 1 2025-03-28T09:33:34.241569Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201807628978:2434][/dc-1] Unexpected sync response: sender# [1:7486791201807628981:2434], cookie# 1 2025-03-28T09:33:34.309243Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791197512661291:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:33:34.309660Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791197512661291:2157], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:33:35.831423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:33:35.831428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-28T09:33:35.831446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:6 2025-03-28T09:33:35.831452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2025-03-28T09:33:35.831481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:33:35.831490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:33:35.831507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:8 2025-03-28T09:33:35.831513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2025-03-28T09:33:35.831530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:5 2025-03-28T09:33:35.831543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2025-03-28T09:33:35.831588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046644480 2025-03-28T09:33:35.831655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:33:35.831691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:33:35.831716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:33:35.831794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:33:35.936971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:33:36.101356Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791206804253992:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:36.101488Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791206804253992:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:36.101509Z node 3 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [3:7486791206804253992:2107], path# /dc-1/USER_0, domainOwnerId# 72057594046644480 2025-03-28T09:33:36.107409Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486791211099221611:2305][/dc-1/USER_0] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:33:36.124622Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660888:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0 DomainOwnerId: 72057594046644480 }: sender# [3:7486791211099221615:2305] 2025-03-28T09:33:36.124681Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197512660888:2051] Subscribe: subscriber# [3:7486791211099221615:2305], path# /dc-1/USER_0, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:36.124724Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660891:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0 DomainOwnerId: 72057594046644480 }: sender# [3:7486791211099221616:2305] 2025-03-28T09:33:36.124741Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197512660891:2054] Subscribe: subscriber# [3:7486791211099221616:2305], path# /dc-1/USER_0, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:36.124774Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197512660894:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/USER_0 DomainOwnerId: 72057594046644480 }: sender# [3:7486791211099221617:2305] 2025-03-28T09:33:36.124795Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197512660894:2057] Subscribe: subscriber# [3:7486791211099221617:2305], path# /dc-1/USER_0, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:36.128804Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486791211099221615:2305][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:7486791197512660888:2051] 2025-03-28T09:33:36.129124Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486791211099221616:2305][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:7486791197512660891:2054] 2025-03-28T09:33:36.129154Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:7486791211099221617:2305][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:7486791197512660894:2057] 2025-03-28T09:33:36.129194Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486791211099221611:2305][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [3:7486791211099221612:2305] 2025-03-28T09:33:36.129516Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486791211099221611:2305][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [3:7486791211099221613:2305] 2025-03-28T09:33:36.129564Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:7486791211099221611:2305][/dc-1/USER_0] Set up state: owner# [3:7486791206804253992:2107], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:36.129603Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:7486791211099221611:2305][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [3:7486791211099221614:2305] 2025-03-28T09:33:36.129648Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:7486791211099221611:2305][/dc-1/USER_0] Path was already updated: owner# [3:7486791206804253992:2107], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:36.129704Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7486791206804253992:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 } 2025-03-28T09:33:36.130863Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7486791206804253992:2107], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7486791211099221611:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:33:36.175039Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791206804253992:2107], cacheItem# { Subscriber: { Subscriber: [3:7486791211099221611:2305] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:36.199004Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791211099221618:2306], recipient# [3:7486791211099221610:2304], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:36.199122Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791206804253992:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:36.199224Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791211099221619:2307], recipient# [3:7486791211099221609:2318], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:37.203595Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791206804253992:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:37.203719Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791215394188918:2309], recipient# [3:7486791215394188917:2319], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2025-03-28T09:33:33.183635Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791197871440960:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:33.183700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013cc/r3tmp/tmp4Z62Yy/pdisk_1.dat 2025-03-28T09:33:33.709073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.714888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.720949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:33.770181Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:61183 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:33:34.024995Z node 1 :TX_PROXY DEBUG: actor# [1:7486791197871441193:2120] Handle TEvNavigate describe path dc-1 2025-03-28T09:33:34.025079Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791202166408966:2445] HANDLE EvNavigateScheme dc-1 2025-03-28T09:33:34.025955Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791197871441229:2139], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:34.026004Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486791197871441229:2139], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:33:34.026294Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202166408967:2446][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:33:34.028601Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440869:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791202166408971:2446] 2025-03-28T09:33:34.028692Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197871440869:2051] Subscribe: subscriber# [1:7486791202166408971:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.028750Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440872:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791202166408972:2446] 2025-03-28T09:33:34.028764Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197871440872:2054] Subscribe: subscriber# [1:7486791202166408972:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.028819Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440875:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791202166408973:2446] 2025-03-28T09:33:34.028835Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197871440875:2057] Subscribe: subscriber# [1:7486791202166408973:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.028906Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202166408971:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791197871440869:2051] 2025-03-28T09:33:34.028960Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202166408972:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791197871440872:2054] 2025-03-28T09:33:34.028981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202166408973:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791197871440875:2057] 2025-03-28T09:33:34.029027Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202166408967:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791202166408968:2446] 2025-03-28T09:33:34.029117Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202166408967:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791202166408969:2446] 2025-03-28T09:33:34.029178Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486791202166408967:2446][/dc-1] Set up state: owner# [1:7486791197871441229:2139], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:34.033358Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202166408967:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791202166408970:2446] 2025-03-28T09:33:34.033432Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486791202166408967:2446][/dc-1] Path was already updated: owner# [1:7486791197871441229:2139], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:34.033469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202166408971:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202166408968:2446], cookie# 1 2025-03-28T09:33:34.033480Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202166408972:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202166408969:2446], cookie# 1 2025-03-28T09:33:34.035790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202166408973:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202166408970:2446], cookie# 1 2025-03-28T09:33:34.035879Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440869:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791202166408971:2446] 2025-03-28T09:33:34.035913Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440869:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202166408971:2446], cookie# 1 2025-03-28T09:33:34.035937Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440872:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791202166408972:2446] 2025-03-28T09:33:34.035969Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440872:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202166408972:2446], cookie# 1 2025-03-28T09:33:34.035986Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440875:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791202166408973:2446] 2025-03-28T09:33:34.036000Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440875:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202166408973:2446], cookie# 1 2025-03-28T09:33:34.037549Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202166408971:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791197871440869:2051], cookie# 1 2025-03-28T09:33:34.037577Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202166408972:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791197871440872:2054], cookie# 1 2025-03-28T09:33:34.037593Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202166408973:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791197871440875:2057], cookie# 1 2025-03-28T09:33:34.037629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202166408967:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791202166408968:2446], cookie# 1 2025-03-28T09:33:34.037655Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202166408967:2446][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:33:34.037671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202166408967:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791202166408969:2446], cookie# 1 2025-03-28T09:33:34.037699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202166408967:2446][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:33:34.037723Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202166408967:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791202166408970:2446], cookie# 1 2025-03-28T09:33:34.037738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202166408967:2446][/dc-1] Unexpected sync response: sender# [1:7486791202166408970:2446], cookie# 1 2025-03-28T09:33:34.146597Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791197871441229:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:33:34.147992Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791197871441229:2139], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... DEBUG: [main][1:7486791210756344096:2855][/dc-1/USER_0/SimpleTable] Unexpected sync response: sender# [1:7486791210756344098:2855], cookie# 1 2025-03-28T09:33:36.152759Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791210756344095:2854] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:33:36.152820Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791210756344095:2854] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2025-03-28T09:33:36.152844Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440872:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [1:7486791210756344101:2855] 2025-03-28T09:33:36.153590Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791210756344095:2854] Handle TEvDescribeSchemeResult Forward to# [1:7486791210756344094:2853] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743154416050 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:33:36.194837Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791197871441229:2139], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:36.194989Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791197871441229:2139], cacheItem# { Subscriber: { Subscriber: [1:7486791202166408982:2453] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:36.195082Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791210756344106:2858], recipient# [1:7486791210756344105:2311], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:36.266986Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440869:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7486791205742051095:2100] 2025-03-28T09:33:36.267051Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197871440869:2051] Unsubscribe: subscriber# [3:7486791205742051095:2100], path# /dc-1/USER_0 2025-03-28T09:33:36.267088Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440872:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7486791205742051096:2100] 2025-03-28T09:33:36.267100Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197871440872:2054] Unsubscribe: subscriber# [3:7486791205742051096:2100], path# /dc-1/USER_0 2025-03-28T09:33:36.267119Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197871440875:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7486791205742051099:2100] 2025-03-28T09:33:36.267129Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791197871440875:2057] Unsubscribe: subscriber# [3:7486791205742051099:2100], path# /dc-1/USER_0 2025-03-28T09:33:36.267466Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-28T09:33:36.274747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:33:37.075031Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791205742051108:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:37.075169Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791205742051108:2107], cacheItem# { Subscriber: { Subscriber: [3:7486791210037018734:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:37.075260Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791214331986209:2358], recipient# [3:7486791214331986208:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.079153Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791205742051108:2107], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.079249Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791205742051108:2107], cacheItem# { Subscriber: { Subscriber: [3:7486791210037018734:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:38.079626Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791218626953507:2359], recipient# [3:7486791218626953506:2326], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TWebLoginService::AuditLogEmptySIDsLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::BanUnbanUser [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2025-03-28T09:33:33.377398Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791197707839360:2220];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:33.381508Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:33.537203Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791199214613455:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:33.537417Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ff/r3tmp/tmpsDCPuW/pdisk_1.dat 2025-03-28T09:33:34.442499Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:34.522185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:34.522297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:34.526569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:34.526666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:34.528154Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:34.535246Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:34.537082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:34.539987Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:34.579508Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30158 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:33:34.822862Z node 1 :TX_PROXY DEBUG: actor# [1:7486791197707839441:2137] Handle TEvNavigate describe path dc-1 2025-03-28T09:33:34.822900Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791202002807217:2465] HANDLE EvNavigateScheme dc-1 2025-03-28T09:33:34.822995Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791197707839470:2154], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:34.823077Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202002807024:2310][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486791197707839470:2154], cookie# 1 2025-03-28T09:33:34.824642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202002807039:2310][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202002807036:2310], cookie# 1 2025-03-28T09:33:34.824680Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202002807040:2310][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202002807037:2310], cookie# 1 2025-03-28T09:33:34.824697Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202002807041:2310][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202002807038:2310], cookie# 1 2025-03-28T09:33:34.824736Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197707839085:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202002807039:2310], cookie# 1 2025-03-28T09:33:34.824761Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197707839088:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202002807040:2310], cookie# 1 2025-03-28T09:33:34.824779Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791197707839091:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202002807041:2310], cookie# 1 2025-03-28T09:33:34.824817Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202002807039:2310][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791197707839085:2052], cookie# 1 2025-03-28T09:33:34.824834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202002807040:2310][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791197707839088:2055], cookie# 1 2025-03-28T09:33:34.824845Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202002807041:2310][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791197707839091:2058], cookie# 1 2025-03-28T09:33:34.824891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202002807024:2310][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791202002807036:2310], cookie# 1 2025-03-28T09:33:34.824917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202002807024:2310][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:33:34.824931Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202002807024:2310][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791202002807037:2310], cookie# 1 2025-03-28T09:33:34.824947Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202002807024:2310][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:33:34.824968Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202002807024:2310][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791202002807038:2310], cookie# 1 2025-03-28T09:33:34.824981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202002807024:2310][/dc-1] Unexpected sync response: sender# [1:7486791202002807038:2310], cookie# 1 2025-03-28T09:33:34.825024Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791197707839470:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:33:34.832591Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791197707839470:2154], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486791202002807024:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:33:34.832723Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791197707839470:2154], cacheItem# { Subscriber: { Subscriber: [1:7486791202002807024:2310] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:33:34.835089Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791202002807218:2466], recipient# [1:7486791202002807217:2465], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:33:34.835155Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791202002807217:2465] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:33:34.872109Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791202002807217:2465] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:33:34.876955Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791202002807217:2465] Handle TEvDescribeSchemeResult Forward to# [1:7486791202002807216:2464] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:34.895734Z node 1 :TX_PROXY DEBUG: actor# [1:7486791197707839441:2137] Handle TEvProposeTransaction 202 ... ue ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:38.055191Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791220689450055:2124], recipient# [2:7486791216394482742:2308], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.055620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7486791216394482742:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:38.229123Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486791199214613496:2108], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.229252Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791199214613496:2108], cacheItem# { Subscriber: { Subscriber: [2:7486791216394482744:2121] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:38.229302Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791199214613496:2108], cacheItem# { Subscriber: { Subscriber: [2:7486791216394482745:2122] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:38.229430Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791220689450056:2125], recipient# [2:7486791216394482742:2308], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.229847Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7486791216394482742:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:38.447026Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486791199214613496:2108], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.447161Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791199214613496:2108], cacheItem# { Subscriber: { Subscriber: [2:7486791216394482744:2121] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:38.447232Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791199214613496:2108], cacheItem# { Subscriber: { Subscriber: [2:7486791216394482745:2122] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:38.447337Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791220689450057:2126], recipient# [2:7486791216394482742:2308], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.447769Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7486791216394482742:2308], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:38.518783Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791199214613455:2266];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:38.518857Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:38.563010Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486791199214613496:2108], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.563137Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791199214613496:2108], cacheItem# { Subscriber: { Subscriber: [2:7486791207804548119:2115] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:38.563243Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791220689450059:2127], recipient# [2:7486791220689450058:2309], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.997097Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486791199214613496:2108], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.997231Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791199214613496:2108], cacheItem# { Subscriber: { Subscriber: [2:7486791216394482728:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:38.997331Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791220689450065:2128], recipient# [2:7486791220689450064:2310], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.997538Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2025-03-28T09:33:33.230491Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791198169095913:2084];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:33.230571Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013e9/r3tmp/tmp69nP0u/pdisk_1.dat 2025-03-28T09:33:33.784928Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:33.797425Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.802779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.822374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9728 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:33:34.230938Z node 1 :TX_PROXY DEBUG: actor# [1:7486791198169096134:2122] Handle TEvNavigate describe path dc-1 2025-03-28T09:33:34.231131Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791202464063901:2441] HANDLE EvNavigateScheme dc-1 2025-03-28T09:33:34.231295Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791198169096164:2138], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:34.231326Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486791198169096164:2138], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:33:34.231599Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202464063902:2442][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:33:34.242178Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095806:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791202464063906:2442] 2025-03-28T09:33:34.242245Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791198169095806:2051] Subscribe: subscriber# [1:7486791202464063906:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.242320Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095812:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791202464063908:2442] 2025-03-28T09:33:34.242342Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791198169095812:2057] Subscribe: subscriber# [1:7486791202464063908:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.242438Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791198169096164:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:34.242515Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202464063906:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791198169095806:2051] 2025-03-28T09:33:34.242551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202464063908:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791198169095812:2057] 2025-03-28T09:33:34.242594Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202464063902:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791202464063903:2442] 2025-03-28T09:33:34.242647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202464063902:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791202464063905:2442] 2025-03-28T09:33:34.242723Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486791202464063902:2442][/dc-1] Set up state: owner# [1:7486791198169096164:2138], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:34.242785Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095809:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791202464063907:2442] 2025-03-28T09:33:34.242851Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791198169095809:2054] Subscribe: subscriber# [1:7486791202464063907:2442], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.242855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202464063906:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202464063903:2442], cookie# 1 2025-03-28T09:33:34.242873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202464063907:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202464063904:2442], cookie# 1 2025-03-28T09:33:34.242888Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202464063908:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202464063905:2442], cookie# 1 2025-03-28T09:33:34.242950Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095806:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791202464063906:2442] 2025-03-28T09:33:34.246829Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095809:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202464063907:2442], cookie# 1 2025-03-28T09:33:34.247018Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095812:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791202464063908:2442] 2025-03-28T09:33:34.247064Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095812:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202464063908:2442], cookie# 1 2025-03-28T09:33:34.250818Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095806:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791202464063906:2442], cookie# 1 2025-03-28T09:33:34.250946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202464063907:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791198169095809:2054] 2025-03-28T09:33:34.250993Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202464063907:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791198169095809:2054], cookie# 1 2025-03-28T09:33:34.251012Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202464063908:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791198169095812:2057], cookie# 1 2025-03-28T09:33:34.251028Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791202464063906:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791198169095806:2051], cookie# 1 2025-03-28T09:33:34.251067Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202464063902:2442][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791202464063904:2442] 2025-03-28T09:33:34.251133Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486791202464063902:2442][/dc-1] Path was already updated: owner# [1:7486791198169096164:2138], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:34.251191Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202464063902:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791202464063904:2442], cookie# 1 2025-03-28T09:33:34.251222Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202464063902:2442][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:33:34.251245Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202464063902:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791202464063905:2442], cookie# 1 2025-03-28T09:33:34.251268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202464063902:2442][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:33:34.251293Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202464063902:2442][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791202464063903:2442], cookie# 1 2025-03-28T09:33:34.251307Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791202464063902:2442][/dc-1] Unexpected sync response: sender# [1:7486791202464063903:2442], cookie# 1 2025-03-28T09:33:34.251343Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095809:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791202464063907:2442] 2025-03-28T09:33:34.349068Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791198169096164:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:33:34.349588Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791198169096164:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathI ... :2384] Ack for unknown update (already acked?): sender# [1:7486791198169096539:2387], cookie# 281474976710660 2025-03-28T09:33:35.336343Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:7486791198169096536:2384] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 18446744073709551615 }: sender# [1:7486791198169096539:2387], cookie# 281474976710660 2025-03-28T09:33:35.336355Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: [1:7486791198169096536:2384] Ack for unknown update (already acked?): sender# [1:7486791198169096539:2387], cookie# 281474976710660 2025-03-28T09:33:35.336443Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710660 2025-03-28T09:33:35.336509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710660 2025-03-28T09:33:35.336519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710660 2025-03-28T09:33:35.336534Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2025-03-28T09:33:35.336555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:33:35.336782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710660 2025-03-28T09:33:35.336825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976710660 2025-03-28T09:33:35.336831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710660 2025-03-28T09:33:35.336840Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710660, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:33:35.336850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 6 2025-03-28T09:33:35.336887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710660, subscribers: 1 2025-03-28T09:33:35.336898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7486791206759031686:2317] 2025-03-28T09:33:35.338880Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095806:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7486791205027666648:2221] 2025-03-28T09:33:35.338925Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095809:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7486791205027666649:2221] 2025-03-28T09:33:35.338948Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791198169095812:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7486791205027666650:2221] 2025-03-28T09:33:35.341122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:33:35.341148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:33:35.341159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:33:35.341169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:33:35.341251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2025-03-28T09:33:35.341279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710660 2025-03-28T09:33:35.346808Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2025-03-28T09:33:35.351908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:33:35.352267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2025-03-28T09:33:35.352541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2025-03-28T09:33:35.352703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2025-03-28T09:33:35.352805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-28T09:33:35.352947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:33:35.353055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:33:35.353160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:33:35.353276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:33:35.353363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:33:35.353485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:33:35.353630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:33:35.353645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:33:35.353687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:33:35.357648Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2025-03-28T09:33:35.361340Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791198169096164:2138], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:35.361447Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791198169096164:2138], cacheItem# { Subscriber: { Subscriber: [1:7486791202464063916:2449] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:35.361672Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037891 2025-03-28T09:33:35.361774Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2025-03-28T09:33:35.363979Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791206759031754:2863], recipient# [1:7486791206759031743:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:35.365130Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037889 2025-03-28T09:33:35.375714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:33:35.375756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:33:35.375819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:4 2025-03-28T09:33:35.375828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2025-03-28T09:33:35.375850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:33:35.375863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-28T09:33:35.375889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:33:35.375903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:33:35.375933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:33:35.375977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 TabletID: 72075186224037888 Status: OK Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } TabletType: Coordinator Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 2 } 2025-03-28T09:33:35.378843Z node 1 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2025-03-28T09:33:33.239964Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791196887057321:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:33.240009Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013f4/r3tmp/tmpUmVA7V/pdisk_1.dat 2025-03-28T09:33:33.863226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.863338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.868777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:33.910385Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:30632 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:33:34.194047Z node 1 :TX_PROXY DEBUG: actor# [1:7486791196887057508:2135] Handle TEvNavigate describe path dc-1 2025-03-28T09:33:34.194102Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791201182025255:2445] HANDLE EvNavigateScheme dc-1 2025-03-28T09:33:34.194239Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791196887057578:2164], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:34.194276Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486791196887057578:2164], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:33:34.194467Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201182025256:2446][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:33:34.208485Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057156:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791201182025260:2446] 2025-03-28T09:33:34.208503Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057159:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791201182025261:2446] 2025-03-28T09:33:34.208564Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791196887057156:2051] Subscribe: subscriber# [1:7486791201182025260:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.208565Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791196887057159:2054] Subscribe: subscriber# [1:7486791201182025261:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.208632Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057162:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791201182025262:2446] 2025-03-28T09:33:34.208650Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791196887057162:2057] Subscribe: subscriber# [1:7486791201182025262:2446], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.208743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201182025261:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791196887057159:2054] 2025-03-28T09:33:34.208777Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201182025260:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791196887057156:2051] 2025-03-28T09:33:34.208797Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057159:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791201182025261:2446] 2025-03-28T09:33:34.208797Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201182025262:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791196887057162:2057] 2025-03-28T09:33:34.208841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201182025256:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791201182025258:2446] 2025-03-28T09:33:34.209141Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057156:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791201182025260:2446] 2025-03-28T09:33:34.209159Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057162:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791201182025262:2446] 2025-03-28T09:33:34.210009Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201182025256:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791201182025257:2446] 2025-03-28T09:33:34.210403Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486791201182025256:2446][/dc-1] Set up state: owner# [1:7486791196887057578:2164], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:34.210591Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201182025256:2446][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791201182025259:2446] 2025-03-28T09:33:34.210647Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486791201182025256:2446][/dc-1] Path was already updated: owner# [1:7486791196887057578:2164], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:34.210713Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201182025260:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201182025257:2446], cookie# 1 2025-03-28T09:33:34.210739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201182025261:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201182025258:2446], cookie# 1 2025-03-28T09:33:34.210757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201182025262:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201182025259:2446], cookie# 1 2025-03-28T09:33:34.211250Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057156:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201182025260:2446], cookie# 1 2025-03-28T09:33:34.211297Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057159:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201182025261:2446], cookie# 1 2025-03-28T09:33:34.211316Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057162:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201182025262:2446], cookie# 1 2025-03-28T09:33:34.211469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201182025260:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791196887057156:2051], cookie# 1 2025-03-28T09:33:34.211486Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201182025261:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791196887057159:2054], cookie# 1 2025-03-28T09:33:34.211507Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201182025262:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791196887057162:2057], cookie# 1 2025-03-28T09:33:34.211542Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201182025256:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791201182025257:2446], cookie# 1 2025-03-28T09:33:34.211576Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201182025256:2446][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:33:34.211590Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201182025256:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791201182025258:2446], cookie# 1 2025-03-28T09:33:34.211609Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201182025256:2446][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:33:34.211647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201182025256:2446][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791201182025259:2446], cookie# 1 2025-03-28T09:33:34.211669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201182025256:2446][/dc-1] Unexpected sync response: sender# [1:7486791201182025259:2446], cookie# 1 2025-03-28T09:33:34.286327Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791196887057578:2164], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:33:34.286858Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791196887057578:2164], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVer ... [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743154417050 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "k... (TRUNCATED) 2025-03-28T09:33:37.138351Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791214066927724:2869], recipient# [1:7486791214066927723:2868], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:33:37.138385Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791214066927723:2868] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:33:37.138431Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791214066927723:2868] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2025-03-28T09:33:37.139361Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791214066927723:2868] Handle TEvDescribeSchemeResult Forward to# [1:7486791214066927722:2867] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710660 CreateStep: 1743154417050 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046644480 2025-03-28T09:33:37.220350Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2025-03-28T09:33:37.220396Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057156:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7486791206805483194:2104] 2025-03-28T09:33:37.220420Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791196887057156:2051] Unsubscribe: subscriber# [3:7486791206805483194:2104], path# /dc-1/USER_0 2025-03-28T09:33:37.220453Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057159:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7486791206805483195:2104] 2025-03-28T09:33:37.220463Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791196887057159:2054] Unsubscribe: subscriber# [3:7486791206805483195:2104], path# /dc-1/USER_0 2025-03-28T09:33:37.220493Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196887057162:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [3:7486791206805483196:2104] 2025-03-28T09:33:37.220502Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791196887057162:2057] Unsubscribe: subscriber# [3:7486791206805483196:2104], path# /dc-1/USER_0 2025-03-28T09:33:37.221581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T09:33:37.302849Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791196887057578:2164], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:37.303014Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791196887057578:2164], cacheItem# { Subscriber: { Subscriber: [1:7486791201182025272:2452] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:37.303113Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791214066927743:2884], recipient# [1:7486791214066927742:2313], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:37.914531Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791206805483216:2111], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:37.914650Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791206805483216:2111], cacheItem# { Subscriber: { Subscriber: [3:7486791206805483241:2125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:37.914783Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791215395418210:2368], recipient# [3:7486791215395418209:2325], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2025-03-28T09:33:33.157949Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791196844997269:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:33.158015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00140a/r3tmp/tmpMQbrzc/pdisk_1.dat 2025-03-28T09:33:33.842968Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:33.851434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.851531Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.857734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18577 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:33:34.240832Z node 1 :TX_PROXY DEBUG: actor# [1:7486791196844997507:2117] Handle TEvNavigate describe path dc-1 2025-03-28T09:33:34.240875Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791201139965299:2447] HANDLE EvNavigateScheme dc-1 2025-03-28T09:33:34.240993Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791196844997530:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:34.241084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201139965281:2441][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486791196844997530:2130], cookie# 1 2025-03-28T09:33:34.242525Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201139965285:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201139965282:2441], cookie# 1 2025-03-28T09:33:34.242566Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201139965286:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201139965283:2441], cookie# 1 2025-03-28T09:33:34.242584Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201139965287:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201139965284:2441], cookie# 1 2025-03-28T09:33:34.242652Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196844997182:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201139965285:2441], cookie# 1 2025-03-28T09:33:34.242678Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196844997185:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201139965286:2441], cookie# 1 2025-03-28T09:33:34.242716Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196844997188:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791201139965287:2441], cookie# 1 2025-03-28T09:33:34.242747Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201139965285:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791196844997182:2050], cookie# 1 2025-03-28T09:33:34.242762Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201139965286:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791196844997185:2053], cookie# 1 2025-03-28T09:33:34.242775Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791201139965287:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791196844997188:2056], cookie# 1 2025-03-28T09:33:34.242813Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201139965281:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791201139965282:2441], cookie# 1 2025-03-28T09:33:34.242835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201139965281:2441][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:33:34.242849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201139965281:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791201139965283:2441], cookie# 1 2025-03-28T09:33:34.242877Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201139965281:2441][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:33:34.242903Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201139965281:2441][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791201139965284:2441], cookie# 1 2025-03-28T09:33:34.242917Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791201139965281:2441][/dc-1] Unexpected sync response: sender# [1:7486791201139965284:2441], cookie# 1 2025-03-28T09:33:34.242971Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791196844997530:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:33:34.252871Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791196844997530:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486791201139965281:2441] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:33:34.252997Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791196844997530:2130], cacheItem# { Subscriber: { Subscriber: [1:7486791201139965281:2441] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:33:34.255562Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791201139965300:2448], recipient# [1:7486791201139965299:2447], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:33:34.255634Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791201139965299:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:33:34.300170Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791201139965299:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:33:34.304289Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791201139965299:2447] Handle TEvDescribeSchemeResult Forward to# [1:7486791201139965298:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:34.331226Z node 1 :TX_PROXY DEBUG: actor# [1:7486791196844997507:2117] Handle TEvProposeTransaction 2025-03-28T09:33:34.331254Z node 1 :TX_PROXY DEBUG: actor# [1:7486791196844997507:2117] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:33:34.331374Z node 1 :TX_PROXY DEBUG: actor# [1:7486791196844997507:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486791201139965307:2454] 2025-03-28T09:33:34.403461Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791201139965307:2454] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:33:34.403516Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791201139965307:2454] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:33:34.403585Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791201139965307:2454] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:33:34.403671Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791196844997530:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:1844674407370955161 ... _BOARD_SUBSCRIBER INFO: [main][1:7486791214024868116:3149][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [1:7486791196844997530:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:37.340571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791214024868127:3150][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486791196844997182:2050] 2025-03-28T09:33:37.340581Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791214024868128:3150][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486791196844997185:2053] 2025-03-28T09:33:37.340592Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791214024868129:3150][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486791196844997188:2056] 2025-03-28T09:33:37.340607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791214024868117:3150][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486791214024868124:3150] 2025-03-28T09:33:37.340618Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791214024868117:3150][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486791214024868125:3150] 2025-03-28T09:33:37.340644Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486791214024868117:3150][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7486791196844997530:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:37.340657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791214024868117:3150][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7486791214024868126:3150] 2025-03-28T09:33:37.340668Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486791214024868117:3150][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7486791196844997530:2130], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:37.340682Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196844997182:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486791214024868121:3149] 2025-03-28T09:33:37.340691Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196844997182:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486791214024868127:3150] 2025-03-28T09:33:37.340699Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196844997185:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486791214024868122:3149] 2025-03-28T09:33:37.340709Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196844997185:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486791214024868128:3150] 2025-03-28T09:33:37.340720Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196844997188:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486791214024868123:3149] 2025-03-28T09:33:37.340737Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791196844997188:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7486791214024868129:3150] 2025-03-28T09:33:37.340766Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791196844997530:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2025-03-28T09:33:37.340803Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791196844997530:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7486791214024868116:3149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:33:37.340866Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791196844997530:2130], cacheItem# { Subscriber: { Subscriber: [1:7486791214024868116:3149] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:37.340882Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791196844997530:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2025-03-28T09:33:37.340913Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791196844997530:2130], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7486791214024868117:3150] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:33:37.340939Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791196844997530:2130], cacheItem# { Subscriber: { Subscriber: [1:7486791214024868117:3150] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:37.340995Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791214024868130:3151], recipient# [1:7486791214024868115:2339], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.158783Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791196844997269:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:38.158845Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:38.191609Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791196844997530:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.191731Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791196844997530:2130], cacheItem# { Subscriber: { Subscriber: [1:7486791201139965289:2443] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:38.191802Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791218319835448:3157], recipient# [1:7486791218319835447:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.342260Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791196844997530:2130], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:38.342370Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791196844997530:2130], cacheItem# { Subscriber: { Subscriber: [1:7486791214024868107:3147] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:38.342440Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791218319835450:3158], recipient# [1:7486791218319835449:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TPersQueueMirrorer::ValidStartStream [GOOD] Test command err: 2025-03-28T09:32:03.380382Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790810911045832:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:03.380450Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:32:03.542411Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001342/r3tmp/tmpBZGvHa/pdisk_1.dat 2025-03-28T09:32:03.699059Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31808, node 1 2025-03-28T09:32:03.778496Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:03.778601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:03.813544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:03.860246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001342/r3tmp/yandexAnMGk6.tmp 2025-03-28T09:32:03.860269Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001342/r3tmp/yandexAnMGk6.tmp 2025-03-28T09:32:03.860506Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001342/r3tmp/yandexAnMGk6.tmp 2025-03-28T09:32:03.860665Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:03.906954Z INFO: TTestServer started on Port 20922 GrpcPort 31808 TClient is connected to server localhost:20922 PQClient connected to localhost:31808 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:04.164149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:04.198801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:04.331783Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:05.982362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790819500981246:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.982583Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.982915Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790819500981259:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.987081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:32:05.988904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790819500981293:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.989027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:05.998277Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790819500981261:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:32:06.253442Z node 1 :TX_PROXY ERROR: Actor# [1:7486790823795948613:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:06.278010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:06.346204Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:06.371048Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790823795948629:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:32:06.372372Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTIwZTVlM2QtNzhmNTNmODktN2IzZTMzZWMtODU5MjlmYzM=, ActorId: [1:7486790819500981228:2336], ActorState: ExecuteState, TraceId: 01jqe1gae1fwvcjphg1dj1ek07, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:32:06.374586Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:32:06.409384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486790823795948914:2631] 2025-03-28T09:32:08.380550Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790810911045832:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:08.380631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:32:12.567924Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:32:12.581740Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-28T09:32:12.582885Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486790849565752982:2791], Recipient [1:7486790810911046255:2192]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:12.582917Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:12.582936Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:12.582980Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486790849565752978:2788], Recipient [1:7486790810911046255:2192]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-28T09:32:12.583025Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:12.643555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 10 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:12.644062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:12.644355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-28T09:32:12.644399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-28T09:32:12.644433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, Loca ... 927Z node 8 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-28T09:33:38.183986Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 0 2025-03-28T09:33:38.184108Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 0 user user offset 5 count 6 size 1484 endOffset 10 max time lag 0ms effective offset 5 2025-03-28T09:33:38.184142Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 5, current partition end offset: 10 2025-03-28T09:33:38.184291Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-28T09:33:38.184329Z node 8 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:33:38.184494Z node 8 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 0 messageNo: 0 requestId: cookie: 5 2025-03-28T09:33:38.181606Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 got read request: guid# 8c42c86-7d43b39f-6d692a87-27b28e9 2025-03-28T09:33:38.181619Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 1 EndOffset: 10 WriteTimestampMS: 1743154418025 CreateTimestampMS: 1743154418001 SizeLag: 1237 WriteTimestampEstimateMS: 1743154418164 } Cookie: 18446744073709551615 } 2025-03-28T09:33:38.181667Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 1 committedOffset 1 2025-03-28T09:33:38.181752Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 sending to client partition status 2025-03-28T09:33:38.186467Z :DEBUG: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] [] Got ReadResponse, serverBytesSize = 795, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428005 2025-03-28T09:33:38.183058Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 5 } } 2025-03-28T09:33:38.183245Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, commitOffset# (empty maybe) 2025-03-28T09:33:38.183306Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 readOffset 1 committedOffset 1 clientCommitOffset (empty maybe) clientReadOffset 5 2025-03-28T09:33:38.183337Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 5 endOffset 10 2025-03-28T09:33:38.183388Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 5, endOffset# 10, WTime# 1743154418025, sizeLag# 1237 2025-03-28T09:33:38.183402Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1TEvPartitionReady. Aval parts: 1 2025-03-28T09:33:38.186627Z :DEBUG: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428005 2025-03-28T09:33:38.183447Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 performing read request: guid# 471724c4-155f605-cb68ae54-cf9bc981, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 6, size# 1484, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-28T09:33:38.183526Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 6 maxSize 1484 maxTimeLagMs 0 readTimestampMs 0 readOffset 5 EndOffset 10 ClientCommitOffset 1 committedOffset 1 Guid 471724c4-155f605-cb68ae54-cf9bc981 2025-03-28T09:33:38.185209Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 10 Result { Offset: 5 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 6 WriteTimestampMS: 1743154418061 CreateTimestampMS: 1743154418056 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 6 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 7 WriteTimestampMS: 1743154418069 CreateTimestampMS: 1743154418056 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 7 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 8 WriteTimestampMS: 1743154418070 CreateTimestampMS: 1743154418056 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 8 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 9 WriteTimestampMS: 1743154418070 CreateTimestampMS: 1743154418057 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } Result { Offset: 9 Data: "... 94 bytes ..." SourceId: "\000src-id-test" SeqNo: 10 WriteTimestampMS: 1743154418074 CreateTimestampMS: 1743154418057 UncompressedSize: 10 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 23 RealReadOffset: 9 WaitQuotaTimeMs: 0 EndOffset: 10 StartOffset: 0 } Cookie: 5 } 2025-03-28T09:33:38.185463Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset10 2025-03-28T09:33:38.185504Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 10 ReadOffset 10 ReadGuid 471724c4-155f605-cb68ae54-cf9bc981 has messages 1 2025-03-28T09:33:38.185641Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 read done: guid# 471724c4-155f605-cb68ae54-cf9bc981, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 795 2025-03-28T09:33:38.185671Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 response to read: guid# 471724c4-155f605-cb68ae54-cf9bc981 2025-03-28T09:33:38.185908Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 Process answer. Aval parts: 0 2025-03-28T09:33:38.187102Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (5-9) 2025-03-28T09:33:38.187161Z :DEBUG: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] [] Returning serverBytesSize = 795 to budget 2025-03-28T09:33:38.187201Z :DEBUG: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] [] In ContinueReadingDataImpl, ReadSizeBudget = 795, ReadSizeServerDelta = 52428005 2025-03-28T09:33:38.187517Z :DEBUG: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-03-28T09:33:38.187698Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (5-5) 2025-03-28T09:33:38.187791Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (6-6) 2025-03-28T09:33:38.187844Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (7-7) 2025-03-28T09:33:38.187876Z :DEBUG: [] Take Data. Partition 0. Read: {2, 1} (8-8) 2025-03-28T09:33:38.187918Z :DEBUG: [] Take Data. Partition 0. Read: {3, 0} (9-9) 2025-03-28T09:33:38.187997Z :DEBUG: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] [] The application data is transferred to the client. Number of messages 5, size 115 bytes 2025-03-28T09:33:38.187749Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 grpc read done: success# 1, data# { read_request { bytes_size: 795 } } 2025-03-28T09:33:38.188049Z :DEBUG: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] [] Returning serverBytesSize = 0 to budget 2025-03-28T09:33:38.187881Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 got read request: guid# 1c78c169-f4acca10-6a1b9e36-e90fd8da 2025-03-28T09:33:38.188249Z :INFO: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] Closing read session. Close timeout: 0.000000s 2025-03-28T09:33:38.188300Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:9:1 2025-03-28T09:33:38.188353Z :INFO: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] Counters: { Errors: 0 CurrentSessionLifetimeMs: 28 BytesRead: 115 MessagesRead: 5 BytesReadCompressed: 115 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:33:38.188468Z :NOTICE: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:33:38.188526Z :DEBUG: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] [] Abort session to cluster 2025-03-28T09:33:38.189752Z :NOTICE: [] [] [a73808c7-3f023bee-1b44b43e-1d6fa048] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:33:38.190934Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_2_18098051494537531916_v1 2025-03-28T09:33:38.190979Z node 8 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7486791218534749424:2522] destroyed 2025-03-28T09:33:38.191032Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_2_18098051494537531916_v1 2025-03-28T09:33:38.189541Z node 7 :PQ_READ_PROXY DEBUG: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 grpc read done: success# 0, data# { } 2025-03-28T09:33:38.189564Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 grpc read failed 2025-03-28T09:33:38.191472Z :DEBUG: [] MessageGroupId [src-id-test] SessionId [src-id-test|406e9199-9145b4ff-a90b133a-29f2e30d_0] Write session: destroy 2025-03-28T09:33:38.189592Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 closed 2025-03-28T09:33:38.190108Z node 7 :PQ_READ_PROXY INFO: session cookie 2 consumer shared/user session shared/user_7_2_18098051494537531916_v1 is DEAD 2025-03-28T09:33:38.190629Z node 7 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [7:7486791218534749421:2519] disconnected; active server actors: 1 2025-03-28T09:33:38.190655Z node 7 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [7:7486791218534749421:2519] client user disconnected session shared/user_7_2_18098051494537531916_v1 |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true >> KqpSystemView::Join |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables >> KqpSystemView::FailNavigate >> KqpSysColV1::StreamInnerJoinTables >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true >> KqpSystemView::PartitionStatsFollower >> KqpSysColV1::StreamSelectRange >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> KqpSysColV1::StreamInnerJoinSelect >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:33:39.776136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:33:39.776247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.776292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:33:39.776332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:33:39.777265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:33:39.777311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:33:39.777403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.777483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:33:39.777869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:39.874289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:33:39.874377Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:39.886127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:39.886375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:33:39.886602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:33:39.899186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:33:39.899433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:33:39.900100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:39.900747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:39.906642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.914961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:39.915064Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.919289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:33:39.919373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:39.919465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:33:39.919780Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:33:39.930430Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:33:40.060501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:40.060721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.060933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:33:40.061189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:40.061262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.064094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.064282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:33:40.064528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.064582Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:33:40.064618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:33:40.064662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:33:40.066725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.066780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:33:40.066814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:33:40.068799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.068852Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.068913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.068979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.072488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:40.074247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:33:40.074471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:33:40.075632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.075763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:40.075812Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.076093Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:33:40.076147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.076297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:40.076548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:33:40.078350Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:40.078409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:40.078562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:40.078596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:33:40.078664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.078724Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:33:40.078807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.078840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.078875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.078903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.078934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:33:40.078997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.079046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:33:40.079079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:33:40.079151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:40.079200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:33:40.079232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:33:40.088142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.088266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.088299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:42.310211Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:42.310350Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:33:42.310396Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:42.310435Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:33:42.310543Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:33:42.316992Z node 4 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [4:125:2151] sender: [4:238:2058] recipient: [4:15:2062] 2025-03-28T09:33:42.327567Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:42.327755Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:42.327972Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:33:42.328165Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:42.328210Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:42.330325Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:42.330486Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:33:42.330721Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:42.330779Z node 4 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:33:42.330815Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:33:42.330852Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:33:42.332767Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:42.332825Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:33:42.332861Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:33:42.335583Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:42.335638Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:42.335687Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:42.335739Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:33:42.335876Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:42.337467Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:33:42.337639Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:33:42.338484Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:42.338610Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 17179871342 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:42.338659Z node 4 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:42.338946Z node 4 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:33:42.339002Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:42.339180Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:42.339249Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:33:42.341122Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:42.341172Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:42.341360Z node 4 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:42.341400Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [4:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:33:42.341746Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:42.341797Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:33:42.341930Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:42.341970Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:42.342010Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:42.342046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:42.342086Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:33:42.342129Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:42.342170Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:33:42.342203Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:33:42.342272Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:42.342313Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:33:42.342351Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:33:42.342873Z node 4 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:42.342973Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:42.343011Z node 4 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:33:42.343050Z node 4 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:33:42.343091Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:42.343181Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:33:42.346603Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:33:42.347076Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:42.348276Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] Bootstrap 2025-03-28T09:33:42.367828Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] Become StateWork (SchemeCache [4:276:2267]) 2025-03-28T09:33:42.368090Z node 4 :HTTP WARN: 127.0.0.1:0 POST /login 2025-03-28T09:33:42.368390Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:63954, port: 63954 2025-03-28T09:33:42.368482Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:33:42.371786Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:63954. Invalid credentials 2025-03-28T09:33:42.372373Z node 4 :HTTP ERROR: Login fail for user1@ldap: Could not login via LDAP 2025-03-28T09:33:42.372743Z node 4 :TX_PROXY DEBUG: actor# [4:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:33:42.375646Z node 4 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 AUDIT LOG buffer(2): 2025-03-28T09:33:42.330445Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2025-03-28T09:33:42.372087Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:63954. Invalid credentials, login_user=user1@ldap, sanitized_token={none} AUDIT LOG checked line: 2025-03-28T09:33:42.372087Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=ERROR, detailed_status=UNAUTHORIZED, reason=Could not login via LDAP: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:63954. Invalid credentials, login_user=user1@ldap, sanitized_token={none} >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2025-03-28T09:33:33.056390Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791199043428451:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:33.056472Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001416/r3tmp/tmpIuxRRi/pdisk_1.dat 2025-03-28T09:33:33.727801Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:33.740325Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.740424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.755903Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65293 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:33:34.059041Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791199043428731:2127], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:34.059156Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791199043428731:2127], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:34.059182Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: Create subscriber: self# [1:7486791199043428731:2127], path# /dc-1, domainOwnerId# 72057594046644480 2025-03-28T09:33:34.059348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203338396479:2437][/dc-1] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:33:34.076488Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791199043428388:2049] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791203338396483:2437] 2025-03-28T09:33:34.076565Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791199043428388:2049] Subscribe: subscriber# [1:7486791203338396483:2437], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.076679Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791199043428394:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791203338396485:2437] 2025-03-28T09:33:34.076701Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791199043428394:2055] Subscribe: subscriber# [1:7486791203338396485:2437], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.077011Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791203338396483:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791199043428388:2049] 2025-03-28T09:33:34.077038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791203338396485:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791199043428394:2055] 2025-03-28T09:33:34.077075Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203338396479:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791203338396480:2437] 2025-03-28T09:33:34.077111Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203338396479:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791203338396482:2437] 2025-03-28T09:33:34.077155Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:7486791203338396479:2437][/dc-1] Set up state: owner# [1:7486791199043428731:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:34.077277Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791199043428388:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791203338396483:2437] 2025-03-28T09:33:34.077296Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791199043428394:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791203338396485:2437] 2025-03-28T09:33:34.077457Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791199043428391:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [1:7486791203338396484:2437] 2025-03-28T09:33:34.077500Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791199043428391:2052] Subscribe: subscriber# [1:7486791203338396484:2437], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:34.077568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791203338396484:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791199043428391:2052] 2025-03-28T09:33:34.077624Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203338396479:2437][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791203338396481:2437] 2025-03-28T09:33:34.077696Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:7486791203338396479:2437][/dc-1] Path was already updated: owner# [1:7486791199043428731:2127], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:34.077726Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791199043428391:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [1:7486791203338396484:2437] 2025-03-28T09:33:34.103359Z node 1 :TX_PROXY DEBUG: actor# [1:7486791199043428708:2114] Handle TEvNavigate describe path dc-1 2025-03-28T09:33:34.103401Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791203338396487:2439] HANDLE EvNavigateScheme dc-1 2025-03-28T09:33:34.192035Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791199043428731:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 } 2025-03-28T09:33:34.192441Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791199043428731:2127], notify# NKikimr::TSchemeBoardEvents::TEvNotifyUpdate { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DescribeSchemeResult: Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [1:7486791203338396479:2437] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:33:34.192698Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791199043428731:2127], cacheItem# { Subscriber: { Subscriber: [1:7486791203338396479:2437] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:34.192901Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791203338396491:2443], recipient# [1:7486791203338396478:2436], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 72057594046644480 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:33:34.192959Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791199043428731:2127], request# { ErrorCount: 0 DatabaseNam ... 251:2775] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:33:41.500711Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7486791230492587252:2776][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7486791230492587261:2776] 2025-03-28T09:33:41.500740Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][4:7486791230492587252:2776][/dc-1/USER_0/.metadata/workload_manager/running_requests] Set up state: owner# [4:7486791217607684231:2120], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:41.500759Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][4:7486791230492587252:2776][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7486791230492587262:2776] 2025-03-28T09:33:41.500778Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: [main][4:7486791230492587252:2776][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7486791217607684231:2120], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:41.500780Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486791217607684231:2120], cacheItem# { Subscriber: { Subscriber: [4:7486791230492587251:2775] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:41.500812Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [4:7486791217607684231:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 0 } 2025-03-28T09:33:41.500899Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [4:7486791217607684231:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests PathId: Strong: 0 }, by path# { Subscriber: { Subscriber: [4:7486791230492587252:2776] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:33:41.500950Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486791217607684231:2120], cacheItem# { Subscriber: { Subscriber: [4:7486791230492587252:2776] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:41.501020Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:33:41.501044Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486791230492587266:2778], recipient# [4:7486791230492587244:2373], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:41.501732Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7486791230492587244:2373], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:41.582980Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486791217607684231:2120], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:41.583126Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486791217607684231:2120], cacheItem# { Subscriber: { Subscriber: [4:7486791230492587251:2775] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:41.583192Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486791217607684231:2120], cacheItem# { Subscriber: { Subscriber: [4:7486791230492587252:2776] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:41.583290Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486791230492587267:2779], recipient# [4:7486791230492587244:2373], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:41.583673Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7486791230492587244:2373], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:41.783008Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7486791217607684231:2120], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:41.783145Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486791217607684231:2120], cacheItem# { Subscriber: { Subscriber: [4:7486791230492587251:2775] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:41.783194Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [4:7486791217607684231:2120], cacheItem# { Subscriber: { Subscriber: [4:7486791230492587252:2776] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:41.783314Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [4:7486791230492587268:2780], recipient# [4:7486791230492587244:2373], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:41.783739Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7486791230492587244:2373], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } >> KqpSysColV1::StreamSelectRowAsterisk >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] >> KqpSystemView::PartitionStatsRange2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:125:2058] recipient: [1:108:2140] 2025-03-28T09:33:39.775713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:33:39.775828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.775885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:33:39.775920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:33:39.776655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:33:39.776713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:33:39.776792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.776861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:33:39.778932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:39.871276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:33:39.871337Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:39.888300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:39.888452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:33:39.888587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:33:39.894040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:33:39.894498Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:33:39.897155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:39.897504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:39.908189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.910331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:39.910404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.910512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:33:39.910575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:39.910741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:33:39.911707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:33:39.918274Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:240:2058] recipient: [1:15:2062] 2025-03-28T09:33:40.040456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:40.042016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.042906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:33:40.044530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:40.044635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.047719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.047847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:33:40.048077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.048193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:33:40.048227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:33:40.048267Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:33:40.050228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.050274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:33:40.050325Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:33:40.052136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.052186Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.052237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.052293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.056529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:40.058458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:33:40.059226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:33:40.060225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.060360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 4294969452 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:40.060401Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.061623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:33:40.061685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.061882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:40.061955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:40.063853Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:40.063909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:40.064108Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:40.064160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:33:40.065001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.065050Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:33:40.065136Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.065165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.065198Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.065225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.065259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:33:40.065294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.065322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:33:40.065348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:33:40.065418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:40.065461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:33:40.065503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:33:40.067107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.067221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.067254Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:33:43.137452Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.137621Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:43.137678Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-03-28T09:33:43.137732Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-03-28T09:33:43.138427Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.138565Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.138613Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:33:43.138665Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:33:43.138737Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:33:43.139352Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.139438Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.139471Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:33:43.139503Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-03-28T09:33:43.139533Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:43.139620Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-28T09:33:43.141911Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:33:43.144178Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-03-28T09:33:43.144759Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:33:43.144957Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 231us result status StatusSuccess 2025-03-28T09:33:43.145318Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-03-28T09:33:43.148738Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:43.149209Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.149257Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.149309Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.149346Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:33:43.149577Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:43.149695Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:33:43.149743Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:33:43.149792Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:33:43.149862Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:33:43.149929Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:43.149998Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-03-28T09:33:43.150047Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:33:43.150092Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-28T09:33:43.150139Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-03-28T09:33:43.150178Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-28T09:33:43.152821Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:43.152946Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-03-28T09:33:43.153193Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:43.153252Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.153449Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:43.153502Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-03-28T09:33:43.154067Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:33:43.154190Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:33:43.154237Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:33:43.154288Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-28T09:33:43.154336Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:43.154461Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-03-28T09:33:43.157048Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-03-28T09:33:43.157772Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:33:43.157981Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 245us result status StatusSuccess 2025-03-28T09:33:43.158440Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: 2025-03-28T09:33:40.423467Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:40.424178Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:40.424268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001374/r3tmp/tmpzKPnMj/pdisk_1.dat 2025-03-28T09:33:40.973961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:41.026923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:41.083608Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:41.084061Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:41.101170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:41.202576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:41.701885Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:41.702458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:875:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:41.702549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:41.712933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:41.867611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:879:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:41.932398Z node 1 :TX_PROXY ERROR: Actor# [1:961:2772] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:42.852182Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1k7y33trd1vj6gwvxbqbg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjA5MjU3ZGUtNWFmODAzNTMtNTUxZDNkYzAtNTg1NWU2ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: 2025-03-28T09:33:40.424104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:40.424621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:40.424706Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001388/r3tmp/tmpwIGWLI/pdisk_1.dat 2025-03-28T09:33:40.973913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:33:41.026065Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:41.088681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:41.088818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:41.101396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:41.202053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:41.702733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2713], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:41.702843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:875:2718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:41.702937Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:41.712964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:33:41.865719Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:879:2721], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:33:41.942263Z node 1 :TX_PROXY ERROR: Actor# [1:961:2772] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:42.851822Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1k7y584s3r2v6nvexj8sz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0OTMxMWYtOTc4ZThjZDUtMTkwMGU2NTgtYTc5ZmI5Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:42.969351Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:992:2793], TxId: 281474976715660, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTM0OTMxMWYtOTc4ZThjZDUtMTkwMGU2NTgtYTc5ZmI5Y2U=. CustomerSuppliedId : . TraceId : 01jqe1k7y584s3r2v6nvexj8sz. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2025-03-28T09:33:42.972218Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:992:2793], TxId: 281474976715660, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTM0OTMxMWYtOTc4ZThjZDUtMTkwMGU2NTgtYTc5ZmI5Y2U=. CustomerSuppliedId : . TraceId : 01jqe1k7y584s3r2v6nvexj8sz. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2025-03-28T09:33:42.986961Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:993:2794], TxId: 281474976715660, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=NTM0OTMxMWYtOTc4ZThjZDUtMTkwMGU2NTgtYTc5ZmI5Y2U=. CustomerSuppliedId : . TraceId : 01jqe1k7y584s3r2v6nvexj8sz. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2025-03-28T09:33:42.999441Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTM0OTMxMWYtOTc4ZThjZDUtMTkwMGU2NTgtYTc5ZmI5Y2U=, ActorId: [1:863:2711], ActorState: ExecuteState, TraceId: 01jqe1k7y584s3r2v6nvexj8sz, Create QueryResponse for error on request, msg: 2025-03-28T09:33:43.005415Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1k7y584s3r2v6nvexj8sz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTM0OTMxMWYtOTc4ZThjZDUtMTkwMGU2NTgtYTc5ZmI5Y2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:33:39.775712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:33:39.775822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.775894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:33:39.775947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:33:39.776668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:33:39.776716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:33:39.776789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.776872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:33:39.777842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:39.874801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:33:39.874856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:39.892382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:39.892613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:33:39.892770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:33:39.899291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:33:39.899506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:33:39.900434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:39.900635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:39.902818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.910410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:39.910488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.911626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:33:39.911715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:39.911774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:33:39.911915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:33:39.919428Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:33:40.063374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:40.064036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.064262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:33:40.064516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:40.064586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.066748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.066884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:33:40.067074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.067126Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:33:40.067161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:33:40.067199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:33:40.069185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.069261Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:33:40.069314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:33:40.070934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.070983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.071018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.071064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.074362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:40.076022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:33:40.076190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:33:40.077180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.077271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:40.077306Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.077598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:33:40.077681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.077846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:40.077919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:40.080033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:40.080094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:40.080259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:40.080302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:33:40.080629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.080669Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:33:40.080759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.080797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.080840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.080870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.080904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:33:40.080936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.080967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:33:40.080993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:33:40.081056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:40.081094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:33:40.081138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:33:40.082965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.083064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.083098Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:33:43.575823Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:33:43.575861Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:33:43.575904Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:33:43.575957Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:33:43.576031Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-28T09:33:43.576099Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:33:43.576135Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-28T09:33:43.576181Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:33:43.576218Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:33:43.576259Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:33:43.576295Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-03-28T09:33:43.576335Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2025-03-28T09:33:43.576368Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 3], 5 2025-03-28T09:33:43.579635Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 105, response: Status: StatusSuccess TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:43.579837Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /MyRoot/Dir1/DirSub1, set owner:user2 2025-03-28T09:33:43.580098Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:43.580147Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:33:43.580278Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:33:43.580400Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:43.580446Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 105, path id: 3 2025-03-28T09:33:43.580495Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-03-28T09:33:43.581081Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.581183Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.581224Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:33:43.581265Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 5 2025-03-28T09:33:43.581309Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:33:43.581753Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.581824Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.581850Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:33:43.581877Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-03-28T09:33:43.581906Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:33:43.581977Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-28T09:33:43.586453Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:33:43.587338Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2025-03-28T09:33:43.589969Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "user1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:43.590359Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:43.590458Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:33:43.590512Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:33:43.590556Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:33:43.590589Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:33:43.590650Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:43.590742Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-03-28T09:33:43.590785Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:33:43.590822Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-28T09:33:43.590857Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-03-28T09:33:43.590899Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2025-03-28T09:33:43.592954Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:43.593058Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2025-03-28T09:33:43.593245Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:43.593290Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.593477Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:43.593520Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-03-28T09:33:43.594000Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:33:43.594100Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:33:43.594136Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:33:43.594176Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-28T09:33:43.594219Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:43.594314Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-03-28T09:33:43.596253Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-03-28T09:33:43.596711Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:33:43.596921Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 227us result status StatusSuccess 2025-03-28T09:33:43.597232Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "user2" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:43.597670Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:33:43.597770Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:33:39.844149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:33:39.844226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.844261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:33:39.844286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:33:39.844322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:33:39.844344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:33:39.844381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.844436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:33:39.844648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:39.915912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:33:39.915974Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:39.928872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:39.929100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:33:39.929251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:33:39.935074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:33:39.935298Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:33:39.935910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:39.936141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:39.937912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.939102Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:39.939158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.939316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:33:39.939362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:39.939405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:33:39.939509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:33:39.946119Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:33:40.071480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:40.071684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.071887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:33:40.072125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:40.072195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.075582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.075699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:33:40.075867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.075953Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:33:40.075986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:33:40.076027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:33:40.078067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.078119Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:33:40.078151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:33:40.079828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.079870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.079908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.079970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.083920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:40.085476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:33:40.085594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:33:40.086353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.086435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:40.086477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.086720Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:33:40.086770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.086915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:40.086989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:40.089094Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:40.089151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:40.089319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:40.089362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:33:40.089707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.089753Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:33:40.089845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.089878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.089932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.089962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.089993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:33:40.090026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.090053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:33:40.090079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:33:40.090140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:40.090184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:33:40.090216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:33:40.091813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.091895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.091927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... tablet# 72057594046678944 2025-03-28T09:33:43.374591Z node 5 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:33:43.374653Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:43.374869Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:43.374958Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:33:43.376850Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:43.376903Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.377088Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:43.377143Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:33:43.377391Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:43.377440Z node 5 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:33:43.377570Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:43.377614Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:43.377667Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:43.377707Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:43.377749Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:33:43.377794Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:43.377836Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:33:43.377871Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:33:43.377944Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:43.377995Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:33:43.378034Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:33:43.379006Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:43.379119Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:43.379166Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:33:43.379208Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:33:43.379253Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:43.379351Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:33:43.381943Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:33:43.382460Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:43.382941Z node 5 :TX_PROXY DEBUG: actor# [5:268:2259] Bootstrap 2025-03-28T09:33:43.404652Z node 5 :TX_PROXY DEBUG: actor# [5:268:2259] Become StateWork (SchemeCache [5:273:2264]) 2025-03-28T09:33:43.405181Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:33:43.405377Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 231us result status StatusSuccess 2025-03-28T09:33:43.405761Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:43.406099Z node 5 :TX_PROXY DEBUG: actor# [5:268:2259] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:33:43.408680Z node 5 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944 2025-03-28T09:33:43.409468Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:33:43.409517Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-28T09:33:43.552549Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Cannot find user: user1", at schemeshard: 72057594046678944 2025-03-28T09:33:43.552683Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:43.552731Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.552935Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:43.552983Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-28T09:33:43.553517Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 0 2025-03-28T09:33:43.553910Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:33:43.554089Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 202us result status StatusSuccess 2025-03-28T09:33:43.554525Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU3Nb461BDcTtt7xZUlG\n11PiveegGy7G3iKNqR3Pt5yXoInnXhLX9HGRR3n/X4GklEyxxBhbQj59X9ImFteu\n/PNw4z3alWSLTlCEkxTnAaDdvp7mrbIbwDUR1H0f4oEk51Nh4ANHYAco/wQZoqc6\nnHaWT013vdApb+r8BT3kgvguJb3TIDH+iqoLKksJF+tpaRW0UWPx+gJk97f8Arc+\nvXW3J3nlNoOg7SORU7ZNcU2TiDBY40wy0i9oY6rxORipzh7ABqKSXSv6aZREh+Tf\nTv/5YcgtvOasg9pfGXQS5h6A25yw1OzCJwMHKT13zpMwY2fOgTOJ890D78XMJfU9\nwQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743240823549 } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSysColV0::InnerJoinSelect >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:33:39.804228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:33:39.804352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.804407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:33:39.804450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:33:39.804521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:33:39.804555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:33:39.804622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.804715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:33:39.805035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:39.896466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:33:39.896529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:39.910928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:39.911161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:33:39.911331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:33:39.917226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:33:39.917431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:33:39.918124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:39.918358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:39.920272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.921576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:39.921635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.921797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:33:39.921857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:39.921918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:33:39.922029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:33:39.928475Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:33:40.050352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:40.050582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.050828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:33:40.051080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:40.051142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.054287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.054412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:33:40.054580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.054655Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:33:40.054729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:33:40.054772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:33:40.057137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.057194Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:33:40.057226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:33:40.058798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.058847Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.058884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.058935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.062646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:40.064421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:33:40.064607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:33:40.065688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.065820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:40.065865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.066134Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:33:40.066188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.066357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:40.066441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:40.068289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:40.068353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:40.068525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:40.068572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:33:40.068903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.068946Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:33:40.069034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.069068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.069108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.069148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.069193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:33:40.069227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.069258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:33:40.069283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:33:40.069347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:40.069388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:33:40.069420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:33:40.071187Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.071292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.071328Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:33:43.805330Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.805462Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:43.805508Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 105, path id: 2 2025-03-28T09:33:43.805553Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-03-28T09:33:43.806123Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.806233Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.806272Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:33:43.806320Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:33:43.806363Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:33:43.806888Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.806957Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:33:43.806984Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:33:43.807012Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2025-03-28T09:33:43.807038Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:43.807103Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-28T09:33:43.808996Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:33:43.809829Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 2025-03-28T09:33:43.810288Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:33:43.810460Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 202us result status StatusSuccess 2025-03-28T09:33:43.810797Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2025-03-28T09:33:43.813532Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveGroup { Group: "group1" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:43.813703Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 1] name: MyRoot type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.813740Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.813786Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 2] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.813815Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:33:43.814002Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:43.814103Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:33:43.814140Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:33:43.814185Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#106:0 progress is 1/1 2025-03-28T09:33:43.814221Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:33:43.814284Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:43.814340Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2025-03-28T09:33:43.814383Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2025-03-28T09:33:43.814420Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 106:0 2025-03-28T09:33:43.814460Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2025-03-28T09:33:43.814497Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-28T09:33:43.817079Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:43.817190Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2025-03-28T09:33:43.817383Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:43.817424Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.817575Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:43.817619Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 106, path id: 1 2025-03-28T09:33:43.818085Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:33:43.818188Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2025-03-28T09:33:43.818228Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2025-03-28T09:33:43.818277Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-28T09:33:43.818319Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:43.818414Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2025-03-28T09:33:43.820255Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2025-03-28T09:33:43.820779Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:33:43.820975Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 220us result status StatusSuccess 2025-03-28T09:33:43.821407Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: "Dir1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.2%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> KqpSysColV0::SelectRowAsterisk >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2025-03-28T09:33:34.633708Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791203965310856:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:34.633744Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:34.758654Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791203168568629:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:34.758824Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013cb/r3tmp/tmppe0HMI/pdisk_1.dat 2025-03-28T09:33:35.315932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:35.316084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:35.317971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:35.318042Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:35.324201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:35.325960Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:35.327725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:35.470419Z node 1 :IMPORT WARN: Table profiles were not loaded TClient is connected to server localhost:63113 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:33:35.801632Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791203965310726:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [2:7486791207463536185:2110] 2025-03-28T09:33:35.805541Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486791207463536185:2110][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791203965310726:2052] 2025-03-28T09:33:35.801698Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791203965310726:2052] Subscribe: subscriber# [2:7486791207463536185:2110], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:35.801896Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791203965310732:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [2:7486791207463536187:2110] 2025-03-28T09:33:35.802802Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791203965310729:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1 DomainOwnerId: 72057594046644480 }: sender# [2:7486791207463536186:2110] 2025-03-28T09:33:35.802858Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791203965310729:2055] Subscribe: subscriber# [2:7486791207463536186:2110], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:35.805063Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:7486791203965310732:2058] Subscribe: subscriber# [2:7486791207463536187:2110], path# /dc-1, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2025-03-28T09:33:35.805742Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486791207463536186:2110][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791203965310729:2055] 2025-03-28T09:33:35.812050Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791203965310726:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:7486791207463536185:2110] 2025-03-28T09:33:35.812056Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791203965310729:2055] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:7486791207463536186:2110] 2025-03-28T09:33:35.812497Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][2:7486791207463536187:2110][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [1:7486791203965310732:2058] 2025-03-28T09:33:35.812579Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486791207463536181:2110][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [2:7486791207463536182:2110] 2025-03-28T09:33:35.812611Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486791207463536181:2110][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [2:7486791207463536183:2110] 2025-03-28T09:33:35.812648Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][2:7486791207463536181:2110][/dc-1] Set up state: owner# [2:7486791207463536164:2107], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:35.812770Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][2:7486791207463536181:2110][/dc-1] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] Version: 2 }: sender# [2:7486791207463536184:2110] 2025-03-28T09:33:35.812831Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: [main][2:7486791207463536181:2110][/dc-1] Path was already updated: owner# [2:7486791207463536164:2107], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 2) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:33:35.813668Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791203965310732:2058] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:7486791207463536187:2110] 2025-03-28T09:33:35.844573Z node 1 :TX_PROXY DEBUG: actor# [1:7486791203965311088:2142] Handle TEvNavigate describe path dc-1 2025-03-28T09:33:35.844627Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791208260278868:2472] HANDLE EvNavigateScheme dc-1 2025-03-28T09:33:35.844766Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791203965311111:2155], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:35.844854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791208260278845:2461][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486791203965311111:2155], cookie# 1 2025-03-28T09:33:35.844916Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791208260278849:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791208260278846:2461], cookie# 1 2025-03-28T09:33:35.844932Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791208260278850:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791208260278847:2461], cookie# 1 2025-03-28T09:33:35.844944Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791208260278851:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791208260278848:2461], cookie# 1 2025-03-28T09:33:35.844967Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791203965310726:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791208260278849:2461], cookie# 1 2025-03-28T09:33:35.845006Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791203965310729:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791208260278850:2461], cookie# 1 2025-03-28T09:33:35.845030Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791203965310732:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791208260278851:2461], cookie# 1 2025-03-28T09:33:35.845066Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791208260278849:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791203965310726:2052], cookie# 1 2025-03-28T09:33:35.845080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791208260278850:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791203965310729:2055], cookie# 1 2025-03-28T09:33:35.845094Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791208260278851:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791203965310732:2058], cookie# 1 2025-03-28T09:33:35.845127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791208260278845:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791208260278846:2461], cookie# 1 2025-03-28T09:33:35.845155Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791208260278845:2461][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:33:35.845171Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791208260278845:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791208260278847:2461], cookie# 1 2025-03-28T09:33:35.845188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791208260278845:2461][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:33:35.845247Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791208260278845:2461][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791208260278848:2461], cookie# 1 2025-03-28T09:33:35.845262Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791208260278845:2461][/dc-1] Unexpected sync response: sender# [1:7486791208260278848:2461], cookie# 1 2025-03-28T09:33:35.845310Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791203965311111:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:33:35.845406Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791203965311111:2155], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486791208260278845:2461] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:33:35.845482Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791203965311111:2155], cacheItem# { Subscriber: { Subscriber: [1:7486791208260278845:2461] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 Tabl ... em# { Subscriber: { Subscriber: [3:7486791228867188089:2211] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:42.943135Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791224572220512:2176], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:42.943192Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791224572220512:2176], cacheItem# { Subscriber: { Subscriber: [3:7486791228867188088:2210] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:42.943253Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791237457122717:2226], recipient# [3:7486791237457122714:2546], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:42.943315Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791237457122718:2227], recipient# [3:7486791237457122715:2547], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:42.943376Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791237457122719:2228], recipient# [3:7486791237457122716:2548], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:43.051553Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791224572220512:2176], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:43.051662Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791224572220512:2176], cacheItem# { Subscriber: { Subscriber: [3:7486791228867188074:2208] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:43.051705Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791224572220512:2176], cacheItem# { Subscriber: { Subscriber: [3:7486791228867188075:2209] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:43.051823Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791241752090027:2229], recipient# [3:7486791241752090026:2557], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:43.599118Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486791207463536164:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:43.599351Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791207463536164:2107], cacheItem# { Subscriber: { Subscriber: [2:7486791224643405400:2117] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:43.599485Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791241823274628:2132], recipient# [2:7486791241823274627:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:43.771029Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486791207463536164:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:43.771151Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791207463536164:2107], cacheItem# { Subscriber: { Subscriber: [2:7486791207463536189:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:43.771248Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791241823274630:2133], recipient# [2:7486791241823274629:2320], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:43.931140Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486791207463536164:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:43.931256Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791207463536164:2107], cacheItem# { Subscriber: { Subscriber: [2:7486791207463536189:2112] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:43.931335Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791241823274632:2134], recipient# [2:7486791241823274631:2321], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] >> KqpSystemView::PartitionStatsRange1 >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters >> KqpSysColV0::SelectRange >> KqpSystemView::PartitionStatsRanges >> Viewer::JsonStorageListingV2GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter >> KqpSysColV1::SelectRange >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2025-03-28T09:33:33.230726Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791199314727385:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:33.232931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013fe/r3tmp/tmp91GwBU/pdisk_1.dat 2025-03-28T09:33:34.169324Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:34.169422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:34.194982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:34.221854Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:34.234594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; TClient is connected to server localhost:1936 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T09:33:34.489530Z node 1 :TX_PROXY DEBUG: actor# [1:7486791199314727615:2117] Handle TEvNavigate describe path dc-1 2025-03-28T09:33:34.489559Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791203609695388:2436] HANDLE EvNavigateScheme dc-1 2025-03-28T09:33:34.489638Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791199314727638:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:34.489728Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203609695326:2392][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7486791199314727638:2130], cookie# 1 2025-03-28T09:33:34.490987Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791203609695330:2392][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791203609695327:2392], cookie# 1 2025-03-28T09:33:34.491060Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791203609695331:2392][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791203609695328:2392], cookie# 1 2025-03-28T09:33:34.491078Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791203609695332:2392][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791203609695329:2392], cookie# 1 2025-03-28T09:33:34.491102Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791199314727294:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791203609695331:2392], cookie# 1 2025-03-28T09:33:34.491099Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791199314727291:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791203609695330:2392], cookie# 1 2025-03-28T09:33:34.491147Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:7486791199314727297:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7486791203609695332:2392], cookie# 1 2025-03-28T09:33:34.491170Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791203609695330:2392][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791199314727291:2051], cookie# 1 2025-03-28T09:33:34.491188Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791203609695331:2392][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791199314727294:2054], cookie# 1 2025-03-28T09:33:34.491202Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:7486791203609695332:2392][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791199314727297:2057], cookie# 1 2025-03-28T09:33:34.491257Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203609695326:2392][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791203609695327:2392], cookie# 1 2025-03-28T09:33:34.491282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203609695326:2392][/dc-1] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:33:34.491299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203609695326:2392][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791203609695328:2392], cookie# 1 2025-03-28T09:33:34.491339Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203609695326:2392][/dc-1] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:33:34.491370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203609695326:2392][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:7486791203609695329:2392], cookie# 1 2025-03-28T09:33:34.491383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:7486791203609695326:2392][/dc-1] Unexpected sync response: sender# [1:7486791203609695329:2392], cookie# 1 2025-03-28T09:33:34.491438Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791199314727638:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2025-03-28T09:33:34.501874Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791199314727638:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486791203609695326:2392] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:33:34.502069Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791199314727638:2130], cacheItem# { Subscriber: { Subscriber: [1:7486791203609695326:2392] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2025-03-28T09:33:34.516115Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [1:7486791203609695391:2439], recipient# [1:7486791203609695388:2436], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:33:34.516206Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791203609695388:2436] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:33:34.561287Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791203609695388:2436] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2025-03-28T09:33:34.564328Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791203609695388:2436] Handle TEvDescribeSchemeResult Forward to# [1:7486791203609695387:2435] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 Pa... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:33:34.601979Z node 1 :TX_PROXY DEBUG: actor# [1:7486791199314727615:2117] Handle TEvProposeTransaction 2025-03-28T09:33:34.602015Z node 1 :TX_PROXY DEBUG: actor# [1:7486791199314727615:2117] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T09:33:34.602123Z node 1 :TX_PROXY DEBUG: actor# [1:7486791199314727615:2117] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486791203609695396:2443] 2025-03-28T09:33:34.700527Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791203609695396:2443] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2025-03-28T09:33:34.700572Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791203609695396:2443] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:33:34.700626Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791203609695396:2443] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:33:34.700720Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handl ... Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:44.748507Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791244199477345:5660], recipient# [3:7486791244199477344:4785], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.806905Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791205544761519:2232], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.807051Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791205544761519:2232], cacheItem# { Subscriber: { Subscriber: [3:7486791218429663540:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:44.807144Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791244199477347:5661], recipient# [3:7486791244199477346:4786], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.831075Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486791227204207244:2239], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.831223Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791227204207244:2239], cacheItem# { Subscriber: { Subscriber: [2:7486791240089109253:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:44.831337Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791244384076657:2342], recipient# [2:7486791244384076656:2572], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.839329Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791205544761519:2232], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.839460Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791205544761519:2232], cacheItem# { Subscriber: { Subscriber: [3:7486791218429663554:2307] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:44.839541Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791244199477349:5662], recipient# [3:7486791244199477348:4787], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.863055Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791205544761519:2232], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.863183Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791205544761519:2232], cacheItem# { Subscriber: { Subscriber: [3:7486791218429663540:2301] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:44.863271Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791244199477351:5663], recipient# [3:7486791244199477350:4788], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.863294Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486791227204207244:2239], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.863406Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791227204207244:2239], cacheItem# { Subscriber: { Subscriber: [2:7486791240089109253:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:44.863494Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791244384076659:2343], recipient# [2:7486791244384076658:2573], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.867280Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7486791227204207244:2239], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:44.867403Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [2:7486791227204207244:2239], cacheItem# { Subscriber: { Subscriber: [2:7486791240089109296:2314] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:33:44.867484Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [2:7486791244384076661:2344], recipient# [2:7486791244384076660:2574], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:33:45.062990Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791227204206543:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:45.063092Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=timeout; >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_ReadEmptyPartitions_AutoscaleAwareSDK [GOOD] Test command err: 2025-03-28T09:32:01.634403Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790804822859879:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:01.634473Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00134c/r3tmp/tmpt31kho/pdisk_1.dat 2025-03-28T09:32:01.828975Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:32:01.996175Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30147, node 1 2025-03-28T09:32:02.037838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:02.037982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:02.039700Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:32:02.100821Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00134c/r3tmp/yandexsr0md6.tmp 2025-03-28T09:32:02.100855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00134c/r3tmp/yandexsr0md6.tmp 2025-03-28T09:32:02.101095Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00134c/r3tmp/yandexsr0md6.tmp 2025-03-28T09:32:02.101245Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:02.159891Z INFO: TTestServer started on Port 4304 GrpcPort 30147 TClient is connected to server localhost:4304 PQClient connected to localhost:30147 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:02.533260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:02.569027Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:32:02.580208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:32:04.535052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790817707762597:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.535051Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790817707762590:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.535132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.539670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:32:04.542548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790817707762637:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.543106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.550859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790817707762605:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:32:04.811548Z node 1 :TX_PROXY ERROR: Actor# [1:7486790817707762661:2448] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:04.837305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.865629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.931836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.940528Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790817707762679:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:32:04.942254Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODdjN2RjOGQtNzNkNjk3NTMtOTRiZjEwMTUtODkxZmIwNWE=, ActorId: [1:7486790817707762573:2336], ActorState: ExecuteState, TraceId: 01jqe1g90ze2cemmh5twgxbxjv, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:32:04.944666Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486790822002730254:2630] 2025-03-28T09:32:06.634561Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790804822859879:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:06.634629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:32:11.104177Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:32:11.117768Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-28T09:32:11.119194Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486790847772534335:2803], Recipient [1:7486790804822860307:2190]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:11.119232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:11.119247Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:11.119285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486790847772534331:2800], Recipient [1:7486790804822860307:2190]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-28T09:32:11.119302Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:11.150926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:11.151378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:11.151666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-28T09:32:11.151710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path ... :INFO: [/Root] [/Root] [72fd24fc-f559e99d-11968458-208ceffe] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2118 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:33:42.824969Z :INFO: [/Root] [/Root] [72fd24fc-f559e99d-11968458-208ceffe] Closing read session. Close timeout: 0.000000s 2025-03-28T09:33:42.825022Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:1:3:0:0 -:test-topic:2:2:0:0 -:test-topic:0:1:0:0 2025-03-28T09:33:42.825066Z :INFO: [/Root] [/Root] [72fd24fc-f559e99d-11968458-208ceffe] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2119 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:33:42.825163Z :NOTICE: [/Root] [/Root] [72fd24fc-f559e99d-11968458-208ceffe] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >>>>> Session-0 Received TSessionClosedEvent message SessionClosed { Status: SUCCESS Issues: "
: Error: Session was gracefully closed " } 2025-03-28T09:33:42.830815Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer test-consumer session test-consumer_5_1_10169822760568320870_v1 grpc read done: success# 0, data# { } 2025-03-28T09:33:42.830863Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_10169822760568320870_v1 grpc read failed 2025-03-28T09:33:42.830914Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_10169822760568320870_v1 grpc closed 2025-03-28T09:33:42.830966Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer test-consumer session test-consumer_5_1_10169822760568320870_v1 is DEAD 2025-03-28T09:33:42.832039Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791230131144158:2742], Partition 1, Sender [0:0:0], Recipient [5:7486791230131144239:2748], Cookie: 0 2025-03-28T09:33:42.832132Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791230131144239:2748]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.832163Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.832206Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:42.832268Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:42.832291Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:42.832316Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:42.832363Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791230131144157:2741], Partition 2, Sender [0:0:0], Recipient [5:7486791230131144240:2749], Cookie: 0 2025-03-28T09:33:42.832396Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791230131144240:2749]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.832409Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.832429Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:42.832456Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:42.832470Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:42.832486Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:42.832527Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791204361339284:2456], Partition 0, Sender [0:0:0], Recipient [5:7486791204361339342:2459], Cookie: 0 2025-03-28T09:33:42.832557Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791204361339342:2459]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.832568Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.832598Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:42.832628Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:42.832643Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:42.832661Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:42.832866Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][test-topic] pipe [5:7486791225836176742:2711] disconnected; active server actors: 1 2025-03-28T09:33:42.832891Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][test-topic] pipe [5:7486791225836176742:2711] client test-consumer disconnected session test-consumer_5_1_10169822760568320870_v1 2025-03-28T09:33:42.833042Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7486791230131144284:3423], Recipient [5:7486791230131144158:2742]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:42.833067Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:42.833082Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:42.833097Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] Destroy direct read session test-consumer_5_1_10169822760568320870_v1 2025-03-28T09:33:42.833128Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037897] server disconnected, pipe [5:7486791230131144282:2760] destroyed 2025-03-28T09:33:42.833211Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7486791230131144283:3422], Recipient [5:7486791230131144157:2741]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:42.833226Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:42.833236Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:42.833249Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Destroy direct read session test-consumer_5_1_10169822760568320870_v1 2025-03-28T09:33:42.833269Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037896] server disconnected, pipe [5:7486791230131144281:2759] destroyed 2025-03-28T09:33:42.833312Z node 5 :PERSQUEUE TRACE: HandleHook, received event# 269877764, Sender [5:7486791225836176746:3290], Recipient [5:7486791204361339284:2456]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:42.833324Z node 5 :PERSQUEUE TRACE: HandleHook, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:42.833334Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892] Handle TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:33:42.833347Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session test-consumer_5_1_10169822760568320870_v1 2025-03-28T09:33:42.833364Z node 5 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7486791225836176745:2714] destroyed 2025-03-28T09:33:42.833405Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_10169822760568320870_v1 2025-03-28T09:33:42.833421Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_10169822760568320870_v1 2025-03-28T09:33:42.833437Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: test-consumer_5_1_10169822760568320870_v1 2025-03-28T09:33:42.934837Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791230131144158:2742], Partition 1, Sender [0:0:0], Recipient [5:7486791230131144239:2748], Cookie: 0 2025-03-28T09:33:42.934914Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791230131144239:2748]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.934936Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.934982Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:42.935058Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:42.935082Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:42.935109Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:42.935178Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791230131144157:2741], Partition 2, Sender [0:0:0], Recipient [5:7486791230131144240:2749], Cookie: 0 2025-03-28T09:33:42.935211Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791230131144240:2749]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.935223Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.935244Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:42.935270Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:42.935285Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:42.935300Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:42.935337Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791204361339284:2456], Partition 0, Sender [0:0:0], Recipient [5:7486791204361339342:2459], Cookie: 0 2025-03-28T09:33:42.935370Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791204361339342:2459]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.935381Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:42.935403Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:42.935429Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:42.935441Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:42.935456Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] >> KqpSystemView::PartitionStatsParametricRanges >> KqpSystemView::QueryStatsSimple >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> KqpSystemView::ReadSuccess |93.3%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:33:39.775717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:33:39.775830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.775887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:33:39.775929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:33:39.776639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:33:39.776683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:33:39.776764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.776859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:33:39.777863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:39.892363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:33:39.892421Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:39.913575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:39.913824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:33:39.914003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:33:39.935573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:33:39.935761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:33:39.936502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:39.936702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:39.939056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.940241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:39.940299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.940459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:33:39.940498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:39.940530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:33:39.940603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:33:39.947589Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:33:40.076995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:40.077163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.077364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:33:40.077568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:40.077611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.079525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.079647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:33:40.079810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.079858Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:33:40.079904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:33:40.079982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:33:40.081756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.081805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:33:40.081836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:33:40.083449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.083493Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.083531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.083579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.087559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:40.089689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:33:40.089854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:33:40.090871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.091013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:40.091061Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.091316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:33:40.091371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.091527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:40.091639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:40.093380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:40.093441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:40.093637Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:40.093679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:33:40.094070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.094114Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:33:40.094204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.094234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.094271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.094299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.094332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:33:40.094371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.094420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:33:40.094450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:33:40.094522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:40.094568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:33:40.094617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:33:40.102247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.102372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.102492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 28T09:33:43.520133Z node 5 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:33:43.520177Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:33:43.520245Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:33:43.520308Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:33:43.520350Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:33:43.520393Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:33:43.520455Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2025-03-28T09:33:43.520494Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2025-03-28T09:33:43.527682Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:43.527817Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2025-03-28T09:33:43.528059Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:43.528129Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.528325Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:43.528376Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:33:43.528901Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:33:43.529009Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:33:43.529051Z node 5 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:33:43.529098Z node 5 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-28T09:33:43.529153Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:43.529268Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:33:43.539511Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2025-03-28T09:33:43.540009Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:33:43.540065Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-28T09:33:43.676338Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-03-28T09:33:43.676464Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:43.676513Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:43.676718Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:43.676769Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:205:2207], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-28T09:33:43.677302Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2025-03-28T09:33:43.677629Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:33:43.687608Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-03-28T09:33:43.687991Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:33:43.694832Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-03-28T09:33:43.695189Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:33:43.706148Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-03-28T09:33:43.706565Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:33:43.706722Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 is not permitted to log in", at schemeshard: 72057594046678944 2025-03-28T09:33:43.708429Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:33:43.708554Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 is not permitted to log in", at schemeshard: 72057594046678944 2025-03-28T09:33:43.709007Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:33:43.709228Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 249us result status StatusSuccess 2025-03-28T09:33:43.709660Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAlRVBuKIBHNZKyUGuQM+N\ny4VPReBbqq8R/PSnOngkCKMwVwqQhlMRKW95ajxxjUcIkEYa5Qz+XSJ2PNh1dtvu\ngT73RkVbgv8Kr7qAJbEMgCZR6BSroATJKPZ9NnnB2G5S2FZ7IPuHGMlEExFqqypD\narHP8iGH5hmw1AxcPIKVo4qtCGM+o3i86AM3VWXnmeSB2dFVZVZF+DdhmJPtaTjI\nxm2aOv4+Z2PKpfIresWYjsdB1TrOpQKANnDFurHd0G6WkJC3NxvAG3fYsaBDa3Vf\npYRFGTyjcA0feBanbR6zMDy5uKySKZ+01/ajyPvQU1Q0meoECxrQgpSkRmi8Htsf\n0wIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743240823665 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:47.710592Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:33:47.720470Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "Invalid password", at schemeshard: 72057594046678944 2025-03-28T09:33:47.721049Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:33:47.732529Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk3NjI3LCJpYXQiOjE3NDMxNTQ0MjcsInN1YiI6InVzZXIxIn0.bqe5eezsC4EM7ZmHdDW2x_VX_PPsHrivNOe6_ApRRzN5qHJvP0obawzSZj-a475SgBnKoqx5fuvd2Jp0DaRnEq1igE5d3tNSpqBZmNkEQ_eEeVEXQbjyXW5rEXcBRLH3EKxlHhOjqNq16OSb_lYF7KCSnbuwxlY4W92gasrhVXM5vt1TNKh9vciUgOnXysb3yb2WvP_yCdl1KdDbDEEiaEGF3xta_xcLaN6hVUN3a-arW1TEmhQeU77hWaM0cvdEQErWkyR3MVWZRPsSV2yb8E3ytYRssZpDxMGBuadhOviSc_-U9ZqAh40xbJIXEqfP7VFaaZzEBID6mvygWXeQvg" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk3NjI3LCJpYXQiOjE3NDMxNTQ0MjcsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-03-28T09:33:47.733163Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:33:47.733390Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 249us result status StatusSuccess 2025-03-28T09:33:47.733866Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAlRVBuKIBHNZKyUGuQM+N\ny4VPReBbqq8R/PSnOngkCKMwVwqQhlMRKW95ajxxjUcIkEYa5Qz+XSJ2PNh1dtvu\ngT73RkVbgv8Kr7qAJbEMgCZR6BSroATJKPZ9NnnB2G5S2FZ7IPuHGMlEExFqqypD\narHP8iGH5hmw1AxcPIKVo4qtCGM+o3i86AM3VWXnmeSB2dFVZVZF+DdhmJPtaTjI\nxm2aOv4+Z2PKpfIresWYjsdB1TrOpQKANnDFurHd0G6WkJC3NxvAG3fYsaBDa3Vf\npYRFGTyjcA0feBanbR6zMDy5uKySKZ+01/ajyPvQU1Q0meoECxrQgpSkRmi8Htsf\n0wIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743240823665 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> ColumnShardTiers::TieringUsage [GOOD] >> KqpSystemView::FailNavigate [GOOD] >> KqpSysColV1::StreamSelectRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::ReadFromTimestamp_PQv1 [GOOD] Test command err: 2025-03-28T09:32:00.876408Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790800001390889:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.876566Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:32:01.090149Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001378/r3tmp/tmpNBRlcp/pdisk_1.dat 2025-03-28T09:32:01.355580Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:01.378595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:01.378715Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:01.380259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1991, node 1 2025-03-28T09:32:01.803294Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001378/r3tmp/yandexZBjNO6.tmp 2025-03-28T09:32:01.803323Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001378/r3tmp/yandexZBjNO6.tmp 2025-03-28T09:32:01.803486Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001378/r3tmp/yandexZBjNO6.tmp 2025-03-28T09:32:01.803596Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:02.034870Z INFO: TTestServer started on Port 1109 GrpcPort 1991 TClient is connected to server localhost:1109 PQClient connected to localhost:1991 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:02.357730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:02.423976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:32:04.038009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790817181260758:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.038165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.038566Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790817181260770:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:04.045753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:32:04.060043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790817181260772:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:32:04.248389Z node 1 :TX_PROXY ERROR: Actor# [1:7486790817181260836:2453] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:04.290546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.325794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.392929Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790817181260844:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:32:04.394866Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzNlYTU2NzgtNThlNGUwMzItZjk5ZDE5YTctNzI2YjY2Zjc=, ActorId: [1:7486790817181260755:2337], ActorState: ExecuteState, TraceId: 01jqe1g8hk873rm2jf7t235wc3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:32:04.397353Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:32:04.408994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486790817181261132:2636] 2025-03-28T09:32:05.876266Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790800001390889:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:05.876342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:32:10.452579Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:32:10.463723Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-28T09:32:10.464993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486790842951065193:2789], Recipient [1:7486790804296358490:2200]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.465026Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.465040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:10.465070Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486790842951065189:2786], Recipient [1:7486790804296358490:2200]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-28T09:32:10.465084Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:10.566612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:10.567090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:10.567378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-28T09:32:10.567435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-28T09:32:10.567471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-03-28T09:32:10.567529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 2 2025-03-28T09:32:10.567587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 3 2025-03-28T09:32:10.567767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710673:1, propose status:StatusAccepted, reason: , at schemes ... PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:45.901118Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:45.901179Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791241561043367:2869], Partition 1, Sender [0:0:0], Recipient [5:7486791241561043472:2885], Cookie: 0 2025-03-28T09:33:45.901215Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791241561043472:2885]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:45.901228Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:45.901252Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:45.901281Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:45.901296Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:45.901313Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:45.922813Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791194316401516:2470], Partition 0, Sender [0:0:0], Recipient [5:7486791194316401577:2474], Cookie: 0 2025-03-28T09:33:45.922885Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791194316401577:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:45.922908Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:45.922951Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:45.923019Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:45.923042Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:45.923070Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:46.002912Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791241561043368:2870], Partition 2, Sender [0:0:0], Recipient [5:7486791241561043470:2883], Cookie: 0 2025-03-28T09:33:46.002981Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791241561043470:2883]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.003002Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.003042Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:46.003106Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:46.003129Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:46.003156Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:46.003201Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791241561043367:2869], Partition 1, Sender [0:0:0], Recipient [5:7486791241561043472:2885], Cookie: 0 2025-03-28T09:33:46.003234Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791241561043472:2885]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.003247Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.003269Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:46.003298Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:46.003313Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:46.003329Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:46.026770Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791194316401516:2470], Partition 0, Sender [0:0:0], Recipient [5:7486791194316401577:2474], Cookie: 0 2025-03-28T09:33:46.026856Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791194316401577:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.026881Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.026933Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:46.027002Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:46.027025Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:46.027052Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:46.103922Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791241561043368:2870], Partition 2, Sender [0:0:0], Recipient [5:7486791241561043470:2883], Cookie: 0 2025-03-28T09:33:46.104028Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791241561043470:2883]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.104057Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.104112Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:46.104190Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:46.104236Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:46.104268Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:46.104353Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791241561043367:2869], Partition 1, Sender [0:0:0], Recipient [5:7486791241561043472:2885], Cookie: 0 2025-03-28T09:33:46.104398Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791241561043472:2885]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.104416Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.104441Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:46.104476Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:46.104495Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:46.104514Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:46.126988Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791194316401516:2470], Partition 0, Sender [0:0:0], Recipient [5:7486791194316401577:2474], Cookie: 0 2025-03-28T09:33:46.127064Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791194316401577:2474]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.127084Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.127123Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:46.127183Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:46.127205Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:46.127227Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:46.204315Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791241561043367:2869], Partition 1, Sender [0:0:0], Recipient [5:7486791241561043472:2885], Cookie: 0 2025-03-28T09:33:46.204400Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791241561043472:2885]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.204429Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.204480Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:46.204557Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:46.204587Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:46.204612Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791241561043368:2870], Partition 2, Sender [0:0:0], Recipient [5:7486791241561043470:2883], Cookie: 0 2025-03-28T09:33:46.204620Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:46.204678Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791241561043470:2883]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.204701Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:46.204746Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:46.204798Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:46.204837Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:46.204864Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> KqpSysColV1::StreamInnerJoinTables [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] >> KqpSysColV1::StreamInnerJoinSelect [GOOD] >> KqpSysColV0::InnerJoinTables [GOOD] >> KqpSystemView::NodesSimple >> KqpSystemView::PartitionStatsSimple ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 3171, MsgBus: 4367 2025-03-28T09:33:42.499018Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791235574783232:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:42.499174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f92/r3tmp/tmpq56f38/pdisk_1.dat 2025-03-28T09:33:43.036967Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:43.089510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:43.089591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:43.097883Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3171, node 1 2025-03-28T09:33:43.357843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:43.357866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:43.357873Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:43.357997Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4367 TClient is connected to server localhost:4367 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:44.207352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.256227Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:33:44.266269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.477071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.688320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.774131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.129500Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791252754654120:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.131593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.710436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.757473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.789307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.830855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.866514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.963965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.024528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791257049621932:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.024620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.025628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791257049621937:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.030649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-03-28T09:33:47.045931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791257049621939:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-03-28T09:33:47.145984Z node 1 :TX_PROXY ERROR: Actor# [1:7486791257049621997:3456] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:47.498402Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791235574783232:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:47.498481Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:48.337944Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7486791261344589567:3669], for# user0@builtin, access# DescribeSchema 2025-03-28T09:33:48.337978Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7486791261344589567:3669], for# user0@builtin, access# DescribeSchema 2025-03-28T09:33:48.350487Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486791261344589564:2495], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:48.352019Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWQ5ZmI2MzctNDQ3YmIwYzUtMTE0Y2E5ZWQtMmQ5ZmE3OTc=, ActorId: [1:7486791261344589557:2491], ActorState: ExecuteState, TraceId: 01jqe1kec05fcydcjz1y3n5dve, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 9708, MsgBus: 29529 2025-03-28T09:33:42.643386Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791237393583463:2252];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:42.643559Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f64/r3tmp/tmppvP94V/pdisk_1.dat 2025-03-28T09:33:43.168370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:43.170586Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:43.172933Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:43.190938Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9708, node 1 2025-03-28T09:33:43.357970Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:43.358001Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:43.358010Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:43.358105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29529 TClient is connected to server localhost:29529 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:44.291970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.331720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.541839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.744436Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.830625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.313720Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791254573454215:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.313820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.709122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.747575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.820023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.856223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.890557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.971285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.025334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791258868422031:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.025399Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.025517Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791258868422036:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.029192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:47.038539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791258868422038:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:47.140243Z node 1 :TX_PROXY ERROR: Actor# [1:7486791258868422093:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:47.642987Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791237393583463:2252];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:47.643079Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:48.372852Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154428379, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: 2025-03-28T09:31:25.188014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:25.188523Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:25.188615Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001c5d/r3tmp/tmpCLflz5/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9954, node 1 TClient is connected to server localhost:8505 2025-03-28T09:31:26.159528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:31:26.213823Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:26.225440Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:31:26.225505Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:31:26.225538Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:31:26.225773Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:26.265578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:26.266182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:26.278999Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2025-03-28T09:31:36.928290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:679:2569], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:36.928420Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.044451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-03-28T09:31:37.404717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:822:2661], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.404835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.405177Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:827:2666], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:37.411610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-28T09:31:37.547328Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:829:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:31:38.111953Z node 1 :TX_PROXY ERROR: Actor# [1:924:2734] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:38.831480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:31:39.275236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-03-28T09:31:39.939450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-28T09:31:40.686401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:31:41.166267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:31:42.515945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:31:42.840222Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-28T09:31:58.282439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715708:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-28T09:32:10.133193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715715:0, at schemeshard: 72057594046644480 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier2` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2025-03-28T09:32:12.726995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715732:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715732 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 15 2025-03-28T09:32:13.014903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2925:4263];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:32:13.043942Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037892;self_id=[1:2925:4263];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:32:13.044412Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037892 2025-03-28T09:32:13.054057Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2925:4263];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:32:13.054381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2925:4263];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:32:13.054746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2925:4263];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:32:13.054911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2925:4263];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:32:13.055068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2925:4263];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:32:13.055214Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:2925:4263];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:32:13.055376Z node 1 ... 9:33:48.263336Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;task_id=c0533aca-bb711f0-9c939fd5-3babe327;tablet_id=72075186224037893;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:33:48.263462Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Delete Blob DS:2181038080:[72075186224037893:1:13:15:0:1136:0] 2025-03-28T09:33:48.263529Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037893 Delete Blob DS:2181038080:[72075186224037893:1:12:14:0:1520:0] 2025-03-28T09:33:48.263584Z node 1 :TX_COLUMNSHARD DEBUG: fline=task.cpp:21;event=free_resources;task_id=19;external_task_id=c0533aca-bb711f0-9c939fd5-3babe327;mem=504;cpu=0; 2025-03-28T09:33:48.263674Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:33:48.263807Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4266];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=504;external_task_id=c05b86d0-bb711f0-ab93cc48-8bc3acad;type=CS::TTL;priority=0;; 2025-03-28T09:33:48.264908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:2932:4266];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=16;fline=storage.cpp:87;event=granule_compaction_weight;priority=(10,19999998864); 2025-03-28T09:33:48.265017Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;self_id=[1:2932:4266];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;path_id=16;fline=optimizer.h:894;stop_instant=NO_VALUE_OPTIONAL;size=2656;next=;count=2;info={bytes=1136;count=1;records=1};event=start_optimization;stop_point=;main_portion=19; 2025-03-28T09:33:48.265232Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2932:4266];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=manager.cpp:10;event=lock;process_id=CS::GENERAL::c05bd748-bb711f0-b78f0494-6be6b362; 2025-03-28T09:33:48.265363Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4266];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=5382;external_task_id=c05bd748-bb711f0-b78f0494-6be6b362;type=CS::GENERAL;priority=0;; 2025-03-28T09:33:48.266098Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2932:4266];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:67;event=granule_locked;path_id=16;lock_id=CS::GENERAL::c05bd748-bb711f0-b78f0494-6be6b362; 2025-03-28T09:33:48.266149Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2932:4266];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=storage.cpp:82;event=no_granules; 2025-03-28T09:33:48.266187Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2932:4266];ev=NKikimr::NColumnShard::TEvPrivate::TEvStartCompaction;fline=column_engine_logs.cpp:206;event=no granules for start compaction; 2025-03-28T09:33:48.266217Z node 1 :TX_COLUMNSHARD DEBUG: Compaction not started: cannot prepare compaction at tablet 72075186224037893 2025-03-28T09:33:48.266304Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4266];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=20;task=cpu=0;mem=504;external_task_id=c05b86d0-bb711f0-ab93cc48-8bc3acad;type=CS::TTL;priority=0;; 2025-03-28T09:33:48.266345Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4266];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=c05b86d0-bb711f0-ab93cc48-8bc3acad;mem=504;cpu=0; 2025-03-28T09:33:48.266388Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4266];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=c05b86d0-bb711f0-ab93cc48-8bc3acad;task_id=20;mem=504;cpu=0; 2025-03-28T09:33:48.266670Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4266];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=21;task=cpu=0;mem=5382;external_task_id=c05bd748-bb711f0-b78f0494-6be6b362;type=CS::GENERAL;priority=0;; 2025-03-28T09:33:48.266762Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4266];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=c05bd748-bb711f0-b78f0494-6be6b362;mem=5382;cpu=0; 2025-03-28T09:33:48.266819Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent=[1:2932:4266];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=c05bd748-bb711f0-b78f0494-6be6b362;task_id=21;mem=5382;cpu=0; 2025-03-28T09:33:48.266902Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2978:4298];tablet_id=72075186224037893;parent=[1:2932:4266];fline=manager.cpp:82;event=ask_data;request=request_id=41;16={portions_count=2};; 2025-03-28T09:33:48.267086Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2978:4298];tablet_id=72075186224037893;parent=[1:2932:4266];fline=columnshard_impl.cpp:1035;background=cleanup;changes_info=type=CS::CLEANUP::PORTIONS;details=(drop 2 portions(portion_id:18;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715735;);schema_version:1;level:0;column_size:1136;index_size:0;meta:((produced=INSERTED;));remove_snapshot:(plan_step=1734516167500;tx_id=18446744073709551615;);)(portion_id:17;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715735;);schema_version:1;level:0;column_size:1520;index_size:0;meta:((produced=SPLIT_COMPACTED;));remove_snapshot:(plan_step=1734516167500;tx_id=18446744073709551615;);));; 2025-03-28T09:33:48.267358Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037893 2025-03-28T09:33:48.267521Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[45] (CS::CLEANUP::PORTIONS) apply at tablet 72075186224037893 2025-03-28T09:33:48.268172Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3792;raw_bytes=49660;count=2;records=44} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=109336;raw_bytes=3566557;count=2;records=2967} inactive {blob_bytes=2656;raw_bytes=2178;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 2025-03-28T09:33:48.268398Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:2978:4298];tablet_id=72075186224037893;parent=[1:2932:4266];fline=manager.cpp:82;event=ask_data;request=request_id=42;16={portions_count=2};; 2025-03-28T09:33:48.268571Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2978:4298];tablet_id=72075186224037893;parent=[1:2932:4266];fline=columnshard_impl.cpp:881;event=compaction;external_task_id=c05bd748-bb711f0-b78f0494-6be6b362; 2025-03-28T09:33:48.268643Z node 1 :TX_COLUMNSHARD DEBUG: self_id=[1:2978:4298];tablet_id=72075186224037893;parent=[1:2932:4266];fline=columnshard_impl.cpp:620;event=start_changes;type=CS::GENERAL;task_id=c05bd748-bb711f0-b78f0494-6be6b362; 2025-03-28T09:33:48.268916Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=c05bd748-bb711f0-b78f0494-6be6b362;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2025-03-28T09:33:48.269950Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=c05bd748-bb711f0-b78f0494-6be6b362; 2025-03-28T09:33:48.275191Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;parent_id=[1:2932:4266];fline=general_compaction.cpp:133;event=blobs_created_diff;appended=0;;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:0:264];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:264:256];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:520:232];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:752:192];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:944:192];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:1136:192];;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:1328:192];;;;switched=(portion_id:20;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715735;);schema_version:1;level:0;column_size:1136;index_size:0;meta:((produced=INSERTED;)););(portion_id:19;path_id:16;records_count:1;min_schema_snapshot:(plan_step=9500;tx_id=281474976715735;);schema_version:1;level:0;column_size:1520;index_size:0;meta:((produced=SPLIT_COMPACTED;)););; 2025-03-28T09:33:48.275272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037893;parent_id=[1:2932:4266];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-28T09:33:48.275509Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037893;self_id=[1:2932:4266];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-03-28T09:33:48.275891Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037893 2025-03-28T09:33:48.276151Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[47] (CS::GENERAL) apply at tablet 72075186224037893 2025-03-28T09:33:48.277823Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037893 Save Batch GenStep: 1:18 Blob count: 1 2025-03-28T09:33:48.277969Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3792;raw_bytes=49660;count=2;records=44} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=109336;raw_bytes=3566557;count=2;records=2967} inactive {blob_bytes=2656;raw_bytes=2178;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/Root/tier1' stopped at tablet 72075186224037893 >> KqpSysColV1::StreamSelectRowById ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 24654, MsgBus: 8558 2025-03-28T09:33:42.855162Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791235267124281:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:42.856073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f5f/r3tmp/tmpmq776K/pdisk_1.dat 2025-03-28T09:33:43.372196Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:43.372302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:43.375414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:43.394451Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24654, node 1 2025-03-28T09:33:43.454828Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:43.454849Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:43.454855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:43.454965Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8558 TClient is connected to server localhost:8558 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:44.219032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.265957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.524977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.687336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.765671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.556978Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791252446995246:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.557106Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.943987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.978750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.047225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.076074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.124846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.169686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.236010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791256741963059:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.236086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.236424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791256741963064:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.242018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:47.254556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791256741963066:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:47.320265Z node 1 :TX_PROXY ERROR: Actor# [1:7486791256741963120:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:47.856020Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791235267124281:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:47.856132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:48.718649Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154428683, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 18252, MsgBus: 26709 2025-03-28T09:33:42.537295Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791234801796450:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:42.537365Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f8c/r3tmp/tmpI43fwC/pdisk_1.dat 2025-03-28T09:33:43.166713Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:43.185117Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:43.185266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:43.188363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18252, node 1 2025-03-28T09:33:43.359276Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:43.359319Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:43.359326Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:43.359424Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26709 TClient is connected to server localhost:26709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:44.316354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.352762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.545598Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.710820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.799126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.365120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791251981667252:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.365237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.712567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.758659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.789434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.821560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.861702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.939655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.998192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791251981667769:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.998339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.998554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791251981667774:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.001633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:47.013152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791251981667776:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:47.092202Z node 1 :TX_PROXY ERROR: Actor# [1:7486791256276635125:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:47.533502Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791234801796450:2210];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:47.533574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:48.720036Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154428736, txId: 281474976710671] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] >> KqpSystemView::PartitionStatsRange2 [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 26168, MsgBus: 12205 2025-03-28T09:33:42.519050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791237742339907:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:42.520271Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f7a/r3tmp/tmpRewtyD/pdisk_1.dat 2025-03-28T09:33:43.056189Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:43.099746Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:43.099829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:43.102863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26168, node 1 2025-03-28T09:33:43.356721Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:43.356753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:43.356769Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:43.356891Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12205 TClient is connected to server localhost:12205 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:44.346241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.385543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.570924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.746606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.834466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.183421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791254922210861:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.183505Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.712497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.763742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.801245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.834808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.878431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.921621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.989677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791254922211372:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.989767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.990167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791254922211377:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.999730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:47.010321Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791254922211379:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:47.109052Z node 1 :TX_PROXY ERROR: Actor# [1:7486791259217178731:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:47.517713Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791237742339907:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:47.517784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 12589, MsgBus: 6298 2025-03-28T09:33:42.981833Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791237370550171:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:42.991821Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f62/r3tmp/tmpDnZgyj/pdisk_1.dat 2025-03-28T09:33:43.488040Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:43.492497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:43.492581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:43.495977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12589, node 1 2025-03-28T09:33:43.681412Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:43.681449Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:43.681460Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:43.681608Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6298 TClient is connected to server localhost:6298 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:44.473530Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.512284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.736202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.901028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.989228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.753473Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791254550420972:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.753591Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.034446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.063816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.096259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.127695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.159532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.191658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.274677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791258845388783:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.274765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.274849Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791258845388788:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.278677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:47.290406Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791258845388790:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:47.385488Z node 1 :TX_PROXY ERROR: Actor# [1:7486791258845388845:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:47.968566Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791237370550171:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:47.968630Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:49.055620Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154429079, txId: 281474976710671] shutting down |93.3%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] >> KqpSysColV0::InnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 23850, MsgBus: 9673 2025-03-28T09:33:44.468171Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791246698005470:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:44.468218Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f53/r3tmp/tmpQeew2P/pdisk_1.dat 2025-03-28T09:33:44.911018Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:44.912246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:44.912351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:44.917157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23850, node 1 2025-03-28T09:33:45.038762Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:45.038790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:45.038798Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:45.038914Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9673 TClient is connected to server localhost:9673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:45.697407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:45.730438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:45.880645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.024758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.110263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:47.891831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791259582909143:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.892048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.224991Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.269899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.321920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.371064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.450544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.512209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.574752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791263877876958:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.574840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.575011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791263877876963:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.579055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:48.592405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791263877876965:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:48.647724Z node 1 :TX_PROXY ERROR: Actor# [1:7486791263877877018:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:49.470807Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791246698005470:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:49.479039Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:49.903581Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154429887, txId: 281474976710671] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 11913, MsgBus: 62709 2025-03-28T09:33:44.225440Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791245358908297:2270];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:44.225503Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f59/r3tmp/tmpeAisKc/pdisk_1.dat 2025-03-28T09:33:44.706491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:44.706587Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:44.708015Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11913, node 1 2025-03-28T09:33:44.750884Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:44.805457Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:33:44.806502Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:33:44.887313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:44.887339Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:44.887346Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:44.887438Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62709 TClient is connected to server localhost:62709 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:45.589801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:45.618646Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:45.650932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:45.810156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:45.988526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.061099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:47.857656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791258243811735:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.857803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.289428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.362965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.402645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.444268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.515113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.550028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.604022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791262538779552:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.604116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.604509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791262538779557:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.607875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:48.621565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791262538779559:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:48.726508Z node 1 :TX_PROXY ERROR: Actor# [1:7486791262538779614:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:49.230266Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791245358908297:2270];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:49.230648Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:49.893209Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154429926, txId: 281474976710671] shutting down >> KqpSysColV0::SelectRowAsterisk [GOOD] >> KqpSystemView::NodesRange2 |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 26158, MsgBus: 14955 2025-03-28T09:33:44.133584Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791243272752245:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:44.133649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f58/r3tmp/tmpkJhRG6/pdisk_1.dat 2025-03-28T09:33:44.666880Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:44.666993Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:44.668975Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:44.724892Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26158, node 1 2025-03-28T09:33:44.782702Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:44.782737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:44.782744Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:44.782882Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14955 TClient is connected to server localhost:14955 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:45.430464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:45.461628Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:45.482792Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:33:45.661187Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:45.874207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:45.962522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:47.703859Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791256157655669:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.704015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.118945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.171032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.234274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.288442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.330047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.404988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.459001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791260452623483:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.459076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.460498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791260452623488:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.464441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:48.480847Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791260452623490:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:48.550127Z node 1 :TX_PROXY ERROR: Actor# [1:7486791260452623543:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:49.135734Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791243272752245:2276];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:49.135790Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSysColV1::SelectRowAsterisk >> KqpSysColV1::UpdateAndDelete >> KqpSystemView::PartitionStatsRange1 [GOOD] >> KqpSysColV0::UpdateAndDelete |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] >> KqpSystemView::Sessions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 6920, MsgBus: 62039 2025-03-28T09:33:44.893422Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791244273128157:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:44.893463Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f4d/r3tmp/tmp87vTga/pdisk_1.dat 2025-03-28T09:33:45.476645Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:45.476723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:45.500981Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:45.515905Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6920, node 1 2025-03-28T09:33:45.622245Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:45.622281Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:45.622298Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:45.622439Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62039 TClient is connected to server localhost:62039 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:46.159797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.188982Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:46.198806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.332008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:46.513573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.602101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:48.420981Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791261452999113:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.421090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:48.774613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.812997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.848704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.908973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.941082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.980858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:49.071033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791265747966928:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:49.071128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:49.071539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791265747966933:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:49.075997Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:49.095592Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:33:49.096308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791265747966935:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:49.159234Z node 1 :TX_PROXY ERROR: Actor# [1:7486791265747966991:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:49.893542Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791244273128157:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:49.893619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::QueryStatsScan >> KqpSystemView::PartitionStatsRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 12717, MsgBus: 11976 2025-03-28T09:33:46.177851Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791253084961407:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:46.178210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f47/r3tmp/tmpNq7mwT/pdisk_1.dat 2025-03-28T09:33:46.620212Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:46.623867Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:46.624002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:46.626525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12717, node 1 2025-03-28T09:33:46.803275Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:46.803303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:46.803310Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:46.803429Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11976 TClient is connected to server localhost:11976 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:47.412303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:47.452715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:47.583911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:47.753022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:47.834455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:49.512754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791265969864952:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:49.512888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:49.841100Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:49.874448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:49.910072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:49.942324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:49.972446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.043118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.125031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791270264832767:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.125137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.125532Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791270264832772:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.129521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:50.143597Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791270264832774:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:50.218429Z node 1 :TX_PROXY ERROR: Actor# [1:7486791270264832831:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:51.169224Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791253084961407:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:51.169328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/ut_with_sdk/unittest >> TopicAutoscaling::PartitionSplit_AutosplitByLoad [GOOD] Test command err: 2025-03-28T09:32:00.898062Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790797673677470:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:00.899464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001354/r3tmp/tmpuEjLLz/pdisk_1.dat 2025-03-28T09:32:01.107974Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:32:01.327847Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:01.339736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:01.339839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:01.356068Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18891, node 1 2025-03-28T09:32:01.800299Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001354/r3tmp/yandexk1umqB.tmp 2025-03-28T09:32:01.800325Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001354/r3tmp/yandexk1umqB.tmp 2025-03-28T09:32:01.801447Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001354/r3tmp/yandexk1umqB.tmp 2025-03-28T09:32:01.801598Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:32:02.034979Z INFO: TTestServer started on Port 24971 GrpcPort 18891 TClient is connected to server localhost:24971 PQClient connected to localhost:18891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:32:02.433313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.467453Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:32:02.481082Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:32:02.494420Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:32:03.803804Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790810558580045:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.803807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790810558580039:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.803950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:32:03.809919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:32:03.823923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790810558580054:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:32:03.882279Z node 1 :TX_PROXY ERROR: Actor# [1:7486790810558580118:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:32:04.137465Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790810558580133:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:32:04.148155Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGZiNGRiNjktYjM4Y2VjNmQtNWI2NDljZDUtOTQ3MDU5ODI=, ActorId: [1:7486790810558580022:2336], ActorState: ExecuteState, TraceId: 01jqe1g89m7hy4zv9w6c5gqqy1, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:32:04.151935Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:32:04.204615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.240166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:32:04.324193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7486790814853547710:2634] 2025-03-28T09:32:05.896906Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486790797673677470:2201];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:05.896987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:32:10.444315Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2025-03-28T09:32:10.454209Z node 1 :PQ_READ_PROXY DEBUG: new create topic request 2025-03-28T09:32:10.455479Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486790840623351774:2790], Recipient [1:7486790801968645032:2184]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.455514Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:32:10.455528Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:32:10.455559Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486790840623351770:2787], Recipient [1:7486790801968645032:2184]: {TEvModifySchemeTransaction txid# 281474976710673 TabletId# 72057594046644480} 2025-03-28T09:32:10.455574Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:32:10.496172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "test-topic" TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 100 ScaleThresholdSeconds: 300 ScaleUpPartitionWriteSpeedThresholdPercent: 90 ScaleDownPartitionWriteSpeedThresholdPercent: 30 PartitionStrategyType: CAN_SPLIT } Consumers { Name: "test-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 } } } } TxId: 281474976710673 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:32:10.496651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /Root/test-topic, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:32:10.496883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: test-topic, child id: [OwnerId: 72057594046644480, LocalPathId: 13], at schemeshard: 72057594046644480 2025-03-28T09:32:10.496926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 0 2025-03-28T09:32:10.496972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13] was 1 2025-03-28T09:32:10.497003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 13 ... eIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.159775Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:50.159849Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:50.159880Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:50.159911Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:50.246849Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791249294915147:2981], Partition 4, Sender [0:0:0], Recipient [5:7486791249294915239:2988], Cookie: 0 2025-03-28T09:33:50.246935Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791249294915239:2988]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.246961Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.247013Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:50.247085Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:50.247111Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:50.247138Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:50.247265Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791184870403574:2457], Partition 0, Sender [0:0:0], Recipient [5:7486791184870403642:2461], Cookie: 0 2025-03-28T09:33:50.247301Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791184870403642:2461]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.247315Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.247343Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:50.247375Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:50.247391Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:50.247410Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:50.247576Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791227820078027:2832], Partition 1, Sender [0:0:0], Recipient [5:7486791227820078108:2839], Cookie: 0 2025-03-28T09:33:50.247611Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791227820078108:2839]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.247626Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.247650Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:50.247682Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:50.247698Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:50.247716Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:50.247756Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791249294915148:2982], Partition 3, Sender [0:0:0], Recipient [5:7486791249294915243:2992], Cookie: 0 2025-03-28T09:33:50.247787Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791249294915243:2992]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.247801Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.247825Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:50.247853Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:50.247867Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:50.247886Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:50.250730Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [5:7486791141920729506:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:50.250772Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:50.250836Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [5:7486791141920729506:2148], Recipient [5:7486791141920729506:2148]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:33:50.250855Z node 5 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:33:50.283968Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791227820078025:2831], Partition 2, Sender [0:0:0], Recipient [5:7486791227820078113:2842], Cookie: 0 2025-03-28T09:33:50.284049Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791227820078113:2842]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.284074Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.284119Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:50.284191Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:50.284215Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:50.284246Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037896, Partition: 2, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:50.350518Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791249294915147:2981], Partition 4, Sender [0:0:0], Recipient [5:7486791249294915239:2988], Cookie: 0 2025-03-28T09:33:50.350610Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791249294915239:2988]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.350637Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.350700Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:50.350773Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:50.350805Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:50.350836Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037898, Partition: 4, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:50.350900Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791184870403574:2457], Partition 0, Sender [0:0:0], Recipient [5:7486791184870403642:2461], Cookie: 0 2025-03-28T09:33:50.350937Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791184870403642:2461]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.350951Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.350978Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:50.351009Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:50.351023Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:50.351040Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:50.351082Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791227820078027:2832], Partition 1, Sender [0:0:0], Recipient [5:7486791227820078108:2839], Cookie: 0 2025-03-28T09:33:50.351113Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791227820078108:2839]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.351126Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.351150Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:50.351179Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:50.351193Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:50.351209Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037897, Partition: 1, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 2025-03-28T09:33:50.351250Z node 5 :PERSQUEUE TRACE: StateIdle event# 271188505 (NKikimr::TEvPQ::TEvUpdateAvailableSize), Tablet [5:7486791249294915148:2982], Partition 3, Sender [0:0:0], Recipient [5:7486791249294915243:2992], Cookie: 0 2025-03-28T09:33:50.351285Z node 5 :PERSQUEUE TRACE: StateIdle, received event# 271188505, Sender [0:0:0], Recipient [5:7486791249294915243:2992]: NKikimr::TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.351299Z node 5 :PERSQUEUE TRACE: StateIdle, processing event TEvPQ::TEvUpdateAvailableSize 2025-03-28T09:33:50.351322Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete old stuff 2025-03-28T09:33:50.351351Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] Have 0 items to delete all stuff. Delete command NKikimrClient.TKeyValueRequest 2025-03-28T09:33:50.351369Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::ProcessReserveRequests. 2025-03-28T09:33:50.351385Z node 5 :PERSQUEUE TRACE: [PQ: 72075186224037899, Partition: 3, State: StateIdle] TPartition::AnswerCurrentWrites. Responses.size()=0 >> KqpSysColV0::SelectRowById >> KqpSysColV1::SelectRange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 8238, MsgBus: 8878 2025-03-28T09:33:46.666378Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791252816151120:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:46.666427Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f3c/r3tmp/tmpSJQe2q/pdisk_1.dat 2025-03-28T09:33:47.089799Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:47.123443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 8238, node 1 2025-03-28T09:33:47.123627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:47.125313Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:47.180850Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:47.180870Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:47.180879Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:47.180987Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8878 TClient is connected to server localhost:8878 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:47.740715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:47.758402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:47.905676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:48.102352Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:48.186979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:49.980238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791265701054797:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:49.980353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.293069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.334862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.375855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.411735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.446733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.480426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.569761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791269996022613:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.569857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.569936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791269996022618:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.573735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:50.588393Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 2025-03-28T09:33:50.588694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791269996022620:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:50.654736Z node 1 :TX_PROXY ERROR: Actor# [1:7486791269996022675:3454] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:51.666702Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791252816151120:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:51.701519Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:51.876282Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154431862, txId: 281474976710671] shutting down >> KqpSysColV0::InnerJoinSelectAsterisk >> KqpSystemView::FailResolve ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 13278, MsgBus: 24049 2025-03-28T09:33:47.010127Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791254172325269:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:47.012462Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f30/r3tmp/tmpDIEqDz/pdisk_1.dat 2025-03-28T09:33:47.390595Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:47.393770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:47.393877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:47.397823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13278, node 1 2025-03-28T09:33:47.470603Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:47.470632Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:47.470638Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:47.470751Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24049 TClient is connected to server localhost:24049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:48.029192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:48.058926Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:48.067667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:48.248110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:48.434654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:48.538145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:50.201124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791271352196085:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.201271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.497625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.531860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.569086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.621772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.656092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.692158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.773912Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791271352196601:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.773996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.774125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791271352196606:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.776797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:50.801308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791271352196608:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:50.858038Z node 1 :TX_PROXY ERROR: Actor# [1:7486791271352196663:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:52.004796Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791254172325269:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:52.004871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpSystemView::PartitionStatsParametricRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 17299, MsgBus: 20945 2025-03-28T09:33:47.088732Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791256751763067:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:47.088810Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f32/r3tmp/tmpNOThpP/pdisk_1.dat 2025-03-28T09:33:47.440594Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17299, node 1 2025-03-28T09:33:47.502520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:47.502706Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:47.534226Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:47.563192Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:47.563225Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:47.563241Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:47.566394Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20945 TClient is connected to server localhost:20945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:48.209140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:48.224970Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:48.234895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:48.413913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:33:48.592366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:48.676704Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:50.419955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791269636666712:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.420112Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:50.778218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.813116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.840557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.878236Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.908395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.980965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:51.064655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791273931634529:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.064732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.064942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791273931634534:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.068345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:51.082136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791273931634536:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:51.172356Z node 1 :TX_PROXY ERROR: Actor# [1:7486791273931634593:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:52.088318Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791256751763067:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:52.088433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:52.563383Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154432551, txId: 281474976710671] shutting down >> KqpSystemView::ReadSuccess [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 28400, MsgBus: 29407 2025-03-28T09:33:47.495045Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791257278138191:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:47.499036Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f28/r3tmp/tmpfgXTJx/pdisk_1.dat 2025-03-28T09:33:47.909412Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:47.947983Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:47.948109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:47.954216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28400, node 1 2025-03-28T09:33:48.071330Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:48.071366Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:48.071375Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:48.071497Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29407 TClient is connected to server localhost:29407 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:48.952685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:48.967292Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:48.981472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:49.206342Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:49.390305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:49.464079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.259892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791274458009013:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.260014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.580620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:51.614740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:51.640360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:51.665228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:51.697317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:51.741945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:51.836362Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791274458009532:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.836442Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.836649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791274458009537:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.840205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:51.851718Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791274458009539:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:51.933987Z node 1 :TX_PROXY ERROR: Actor# [1:7486791274458009593:3455] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:52.486805Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791257278138191:2197];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:52.486865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |93.4%| [TA] $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 24152, MsgBus: 5125 2025-03-28T09:33:48.418772Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791263617532264:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:48.439157Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f10/r3tmp/tmpuOacfX/pdisk_1.dat 2025-03-28T09:33:48.927140Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:48.946179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:48.946270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:48.950195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24152, node 1 2025-03-28T09:33:49.186419Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:49.186446Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:49.186454Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:49.186563Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5125 TClient is connected to server localhost:5125 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:49.858092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:49.884428Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:49.905449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:50.050897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:50.229132Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:33:50.301829Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.989685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791276502435782:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.989856Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:52.341090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:52.376919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:52.446739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:52.475689Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:52.505642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:52.577932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:52.625637Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791280797403599:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:52.625740Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:52.626092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791280797403604:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:52.629492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:52.639024Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791280797403606:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:52.737394Z node 1 :TX_PROXY ERROR: Actor# [1:7486791280797403660:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:53.372756Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791263617532264:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:53.372822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:53.719573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:33:53.853698Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqe1kkpe3d314kqjm4401byx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTBiYjdmM2YtNDFiNDdhYTgtZDA5YTEwMi1hYmI2ODU3MQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:53.862837Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154433851, txId: 281474976710672] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 14074, MsgBus: 14634 2025-03-28T09:33:48.235547Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791262600958704:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:48.235581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f18/r3tmp/tmp3BCbus/pdisk_1.dat 2025-03-28T09:33:48.732164Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:48.748681Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:48.748763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:48.752432Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14074, node 1 2025-03-28T09:33:48.868918Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:48.868943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:48.868970Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:48.869103Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14634 TClient is connected to server localhost:14634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:49.445903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:49.482920Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:49.498844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:49.616193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:49.761615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:49.851973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.524739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791275485862376:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.524822Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:51.878527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:51.910288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:51.938571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:51.969864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:52.006931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:52.075312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:52.141634Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791279780830188:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:52.141694Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:52.141803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791279780830193:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:52.145564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:52.158451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791279780830195:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:52.241742Z node 1 :TX_PROXY ERROR: Actor# [1:7486791279780830247:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:53.235926Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791262600958704:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:53.236015Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:53.736838Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154433721, txId: 281474976710671] shutting down >> KqpSystemView::PartitionStatsSimple [GOOD] |93.4%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink >> KqpSysColV1::StreamSelectRowById [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 65243, MsgBus: 16491 2025-03-28T09:33:50.071346Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791271787009733:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:50.071390Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f00/r3tmp/tmph29dH8/pdisk_1.dat 2025-03-28T09:33:50.441457Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65243, node 1 2025-03-28T09:33:50.490931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:50.491247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:50.511725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:50.559818Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:50.559842Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:50.559848Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:50.559969Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16491 TClient is connected to server localhost:16491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:51.174071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.191563Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:51.197354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.353333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.534491Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.614242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:53.311864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791284671913404:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:53.312034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:53.589246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:53.622332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:53.651513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:53.717574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:53.744999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:53.816024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:53.856686Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791284671913919:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:53.856750Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:53.857019Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791284671913924:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:53.861018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:53.871220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791284671913926:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:53.944189Z node 1 :TX_PROXY ERROR: Actor# [1:7486791284671913979:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:55.071569Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791271787009733:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:55.071627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:55.137315Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154435128, txId: 281474976710671] shutting down >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 14577, MsgBus: 24549 2025-03-28T09:33:50.758462Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791269120759136:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:50.759021Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000eff/r3tmp/tmpRx4oYj/pdisk_1.dat 2025-03-28T09:33:51.169264Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:51.207631Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:51.207749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:51.210320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14577, node 1 2025-03-28T09:33:51.411301Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:51.411352Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:51.411367Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:51.411485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24549 TClient is connected to server localhost:24549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:51.921451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.941010Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:52.087874Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:52.242379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:33:52.322956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:53.989992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791282005662807:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:53.990115Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:54.328851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.402559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.435038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.467242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.501729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.571358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.613667Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791286300630619:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:54.613730Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:54.613738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791286300630624:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:54.617717Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:54.630128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791286300630626:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:54.721957Z node 1 :TX_PROXY ERROR: Actor# [1:7486791286300630679:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:55.758537Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791269120759136:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:55.758640Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:55.898506Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154435932, txId: 281474976710671] shutting down |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService >> KqpSysColV1::SelectRowAsterisk [GOOD] >> KqpSystemView::NodesSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 20567, MsgBus: 1192 2025-03-28T09:33:51.456837Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791275784154177:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:51.456870Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ef5/r3tmp/tmpg2ota8/pdisk_1.dat 2025-03-28T09:33:51.836371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:51.848909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:51.849011Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:51.851422Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20567, node 1 2025-03-28T09:33:51.939217Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:51.939237Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:51.939243Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:51.939351Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1192 TClient is connected to server localhost:1192 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:52.481702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:52.514700Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:52.628335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:52.809963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:52.888784Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.544208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791288669057830:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:54.544350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:54.843910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.874607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.903096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.968911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.994343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:55.027878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:55.073352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791292964025640:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:55.073451Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:55.073562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791292964025645:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:55.076766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:55.087417Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791292964025647:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:55.185537Z node 1 :TX_PROXY ERROR: Actor# [1:7486791292964025700:3441] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:56.457172Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791275784154177:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:56.457236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:56.681900Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154436716, txId: 281474976710671] shutting down |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> DataShardVolatile::DistributedUpsertRestartAfterPlan [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit >> KqpSystemView::Join [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 21136, MsgBus: 29436 2025-03-28T09:33:52.436910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791279971396281:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:52.437035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000eea/r3tmp/tmp0rliZ2/pdisk_1.dat 2025-03-28T09:33:52.809413Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:52.830556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:52.830677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:52.833459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21136, node 1 2025-03-28T09:33:52.950182Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:52.950212Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:52.950219Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:52.950347Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29436 TClient is connected to server localhost:29436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:53.494168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:53.514613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:53.664020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:33:53.817212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:53.885178Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.681116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791292856299954:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:55.681229Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:55.954254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.021728Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.049856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.076591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.105849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.136002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.174645Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791297151267762:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.174734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.174768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791297151267767:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.177715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:56.185202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791297151267769:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:56.258948Z node 1 :TX_PROXY ERROR: Actor# [1:7486791297151267823:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:57.436731Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791279971396281:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:57.436783Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] >> KqpSysColV0::SelectRowById [GOOD] >> KqpSystemView::FailResolve [GOOD] >> KqpSysColV1::UpdateAndDelete [GOOD] >> KqpSystemView::QueryStatsScan [GOOD] >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 3025, MsgBus: 25424 2025-03-28T09:33:50.384461Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791269472592147:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:50.384766Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:50.469828Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791271332921018:2165];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:50.471290Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:50.480471Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791271987377164:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:50.480583Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f08/r3tmp/tmpgrmzZo/pdisk_1.dat 2025-03-28T09:33:51.185707Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:51.210833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:51.210940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:51.211497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:51.211542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:51.211652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:51.211694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:51.220548Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:33:51.220614Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:51.220943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:51.222032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:51.225223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3025, node 1 2025-03-28T09:33:51.530748Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:51.530772Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:51.530780Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:51.530929Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25424 TClient is connected to server localhost:25424 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:52.240514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:52.289413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:52.614576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:52.885260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:53.026395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.869540Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791286652463131:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:54.869691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:55.134055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:55.191659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:55.251421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:55.336614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:55.380118Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791269472592147:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:55.380190Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:55.409203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:55.466380Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791271332921018:2165];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:55.466440Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:55.470200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:55.478229Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486791271987377164:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:55.478281Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:55.540802Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791290947431117:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:55.540882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:55.541279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791290947431122:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:55.545315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:55.567423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791290947431124:2405], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:55.711079Z node 1 :TX_PROXY ERROR: Actor# [1:7486791290947431197:4110] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:56.883395Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154436839, txId: 281474976710671] shutting down 2025-03-28T09:33:57.020272Z node 2 :BS_PROXY_PUT ERROR: [cd9fec28c44a9624] Result# TEvPutResult {Id# [72075186224037915:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037915:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-28T09:33:57.020252Z node 3 :BS_PROXY_PUT ERROR: [6d7efd59dd1bbe97] Result# TEvPutResult {Id# [72075186224037895:1:17:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037895:1:17:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 28533, MsgBus: 1169 2025-03-28T09:33:42.554513Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791234595999272:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:42.554556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f80/r3tmp/tmpeYYfKa/pdisk_1.dat 2025-03-28T09:33:43.086962Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28533, node 1 2025-03-28T09:33:43.356938Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:43.356968Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:43.356977Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:43.357129Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:43.362166Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:43.362282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:43.363694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1169 TClient is connected to server localhost:1169 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:44.325534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.354771Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:44.374469Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.628942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.823450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.903476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:46.186828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791251775870203:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.186923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.709840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.747977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.786125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.859266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.958330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.998801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:47.054213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791256070838018:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.054290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.054556Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791256070838023:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:47.057699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:47.066044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791256070838025:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:47.167880Z node 1 :TX_PROXY ERROR: Actor# [1:7486791256070838083:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:47.554953Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791234595999272:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:47.555057Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:48.402290Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154428379, txId: 281474976710671] shutting down waiting... 2025-03-28T09:33:49.566127Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154429547, txId: 281474976710673] shutting down waiting... 2025-03-28T09:33:50.721807Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154430713, txId: 281474976710675] shutting down waiting... 2025-03-28T09:33:51.946855Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154431925, txId: 281474976710677] shutting down waiting... 2025-03-28T09:33:53.154349Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154433118, txId: 281474976710679] shutting down waiting... 2025-03-28T09:33:54.312606Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154434301, txId: 281474976710681] shutting down waiting... 2025-03-28T09:33:55.495226Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154435486, txId: 281474976710683] shutting down waiting... 2025-03-28T09:33:56.625542Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154436619, txId: 281474976710685] shutting down waiting... 2025-03-28T09:33:57.814226Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154437803, txId: 281474976710687] shutting down 2025-03-28T09:33:58.083754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:33:58.083786Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:58.209387Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154438178, txId: 281474976710689] shutting down |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 32627, MsgBus: 12033 2025-03-28T09:33:42.595233Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791236064960595:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:42.595267Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f6f/r3tmp/tmp8OwDEx/pdisk_1.dat 2025-03-28T09:33:43.068017Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:43.083547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:43.086892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:43.098848Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 32627, node 1 2025-03-28T09:33:43.381747Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:43.381774Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:43.381784Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:43.381897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12033 TClient is connected to server localhost:12033 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:44.282024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:44.311623Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:45.102776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:45.102831Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:45.102921Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7486791236064961022:2204], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:33:45.102951Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:33:46.103632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:46.103671Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:46.103729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7486791236064961022:2204], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:33:46.103741Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:33:46.238736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791253244830434:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.238913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:46.710991Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486791253244830461:2313], Recipient [1:7486791236064961022:2204]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:33:46.711030Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:33:46.711046Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:33:46.711104Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:7486791253244830457:2310], Recipient [1:7486791236064961022:2204]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:33:46.711117Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:33:46.777150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:33:46.777582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.777711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2025-03-28T09:33:46.778143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2025-03-28T09:33:46.778180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 0 2025-03-28T09:33:46.778213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:33:46.778332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:33:46.778347Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710658:0 1 -> 2 2025-03-28T09:33:46.778772Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] path /Root/Followers ShardIndices size 1 2025-03-28T09:33:46.779494Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TCreateTable Propose creating new table opId# 281474976710658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 2] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } FailOnExist: false 2025-03-28T09:33:46.779607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:33:46.779639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:33:46.779729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T09:33:46.779771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:33:46.779800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2025-03-28T09:33:46.780312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 2, at schemeshard: 72057594046644480 2025-03-28T09:33:46.780432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Followers 2025-03-28T09:33:46.780458Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:33:46.780471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 281474976710658:0 2025-03-28T09:33:46.780632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:7486791236064961022:2204], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T09:33:46.780647Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T09:33:46.780683Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:33:46.780695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:33:46.780795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, Lo ... rkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7486791253244830524:2341]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:33:56.887037Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:33:56.887065Z node 1 :TX_DATASHARD DEBUG: SendPeriodicTableStats register new pipe at datashard 72075186224037888 FollowerId 1, TableInfos size = 1 2025-03-28T09:33:56.887091Z node 1 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 1, tableId 2 2025-03-28T09:33:56.887373Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:7486791296194503804:2532], Recipient [1:7486791236064961022:2204]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:33:56.887399Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:33:56.887411Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T09:33:56.887577Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877760, Sender [1:7486791296194503803:2413], Recipient [1:7486791253244830522:2339]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [1:7486791296194503804:2532] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:33:56.887611Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:33:56.887666Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:7486791253244830522:2339], Recipient [1:7486791236064961022:2204]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 1 Round: 0 TableStats { ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { } ShardState: 3 NodeId: 1 StartTime: 1743154426866 TableOwnerId: 72057594046644480 FollowerId: 1 2025-03-28T09:33:56.887682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T09:33:56.887709Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 1 pathId [OwnerId: 72057594046644480, LocalPathId: 2] state 'Readonly' dataSize 0 rowCount 0 cpuUsage 0 2025-03-28T09:33:56.887772Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 1 pathId [OwnerId: 72057594046644480, LocalPathId: 2] raw table stats: ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:33:56.894938Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [1:7486791253244830523:2340]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:33:56.894973Z node 1 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:33:56.962859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:33:56.962897Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:33:56.962948Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2025-03-28T09:33:56.962996Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 2 2025-03-28T09:33:56.963009Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2025-03-28T09:33:56.963055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 800 row count 4 2025-03-28T09:33:56.963109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2025-03-28T09:33:56.963137Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 2: RowCount 4, DataSize 800 2025-03-28T09:33:56.963149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 0 2025-03-28T09:33:56.963219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:1 with partCount# 0, rowCount# 4, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-28T09:33:56.963277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046644480:1 data size 0 row count 0 2025-03-28T09:33:56.963294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=1, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], pathId map=Followers, is column=0, is olap=0 2025-03-28T09:33:56.963303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=1, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:33:56.963309Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037888, followerId 1 2025-03-28T09:33:56.963352Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T09:33:56.964000Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T09:33:56.964021Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T09:33:56.964034Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T09:33:56.966873Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 800 RowCount: 4 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1743154426791 AccessTime: 1743154427752 UpdateTime: 1743154427496 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:33:56.967032Z node 1 :SYSTEM_VIEWS TRACE: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 2] shardIdx 72057594046644480 1 followerId 1 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0 TabletId: 72075186224037888 NodeId: 1 StartTime: 1743154426866 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 1 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T09:33:57.113548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:57.113574Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:57.113607Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7486791236064961022:2204], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:33:57.113618Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:33:58.060003Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2025-03-28T09:33:58.060044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:33:58.060057Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:58.114070Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:58.114112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:58.114177Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7486791236064961022:2204], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:33:58.114193Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... SELECT from partition_stats, attempt 2 2025-03-28T09:33:59.012752Z node 1 :SYSTEM_VIEWS INFO: Scan started, actor: [1:7486791309079405718:2423], owner: [1:7486791309079405714:2421], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-28T09:33:59.013231Z node 1 :SYSTEM_VIEWS INFO: Scan prepared, actor: [1:7486791309079405718:2423], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2025-03-28T09:33:59.013443Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274595843, Sender [1:7486791309079405718:2423], Recipient [1:7486791236064961022:2204]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2025-03-28T09:33:59.013472Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2025-03-28T09:33:59.013612Z node 1 :SYSTEM_VIEWS DEBUG: Sending scan batch, actor: [1:7486791309079405718:2423], row count: 2, finished: 1 2025-03-28T09:33:59.013656Z node 1 :SYSTEM_VIEWS INFO: Scan finished, actor: [1:7486791309079405718:2423], owner: [1:7486791309079405714:2421], scan id: 0, table id: [72057594046644480:1:0:partition_stats] 2025-03-28T09:33:59.025277Z node 1 :SYSTEM_VIEWS TRACE: Collect query stats: service id# [1:7486791236064960703:2173], database# /Root, query hash# 14960494650040056739, cpu time# 254452 2025-03-28T09:33:59.114520Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:59.114559Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T09:33:59.114608Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:7486791236064961022:2204], Recipient [1:7486791236064961022:2204]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:33:59.114625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 4228, MsgBus: 26403 2025-03-28T09:33:54.216849Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791289247283717:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:54.257145Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ea0/r3tmp/tmpz0yEjr/pdisk_1.dat 2025-03-28T09:33:54.682982Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:54.687280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:54.687368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:54.689591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4228, node 1 2025-03-28T09:33:54.766895Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:54.766916Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:54.766924Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:54.767046Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26403 TClient is connected to server localhost:26403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:55.277757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.313336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.461451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.611428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.690077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:57.398792Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791302132187373:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.398902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.723055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.758323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.789241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.820022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.848334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.889451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.954579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791302132187885:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.954666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.954840Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791302132187890:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.958366Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:57.967225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791302132187892:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:58.038517Z node 1 :TX_PROXY ERROR: Actor# [1:7486791306427155241:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:59.015244Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:33:59.137743Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:7486791310722122840:3679], for# user0@builtin, access# SelectRow 2025-03-28T09:33:59.137878Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 1] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:33:59.146005Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mjg4ZDk0YTUtNDAzMmU4NjEtMzk1ZDczMzItYjc4ZmExMGQ=, ActorId: [1:7486791310722122820:2492], ActorState: ExecuteState, TraceId: 01jqe1krvx9qtyg3e12yef98k9, Create QueryResponse for error on request, msg: 2025-03-28T09:33:59.146390Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154439136, txId: 281474976710672] shutting down 2025-03-28T09:33:59.146680Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqe1krvx9qtyg3e12yef98k9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg4ZDk0YTUtNDAzMmU4NjEtMzk1ZDczMzItYjc4ZmExMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:33:59.214910Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791289247283717:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:59.215007Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 11319, MsgBus: 2417 2025-03-28T09:33:54.048419Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791289101131495:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:54.048621Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000eb0/r3tmp/tmpbhSj59/pdisk_1.dat 2025-03-28T09:33:54.388895Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:54.443046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:54.443152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:54.444744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11319, node 1 2025-03-28T09:33:54.515141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:54.515166Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:54.515173Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:54.515316Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2417 TClient is connected to server localhost:2417 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:54.980232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.009828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.141796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.302790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:55.367369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.935936Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791297691067850:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.936048Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.223406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.257382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.291210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.360260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.387978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.457484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.506193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791301986035671:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.506268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791301986035676:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.506280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.509990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:57.519581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791301986035678:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:57.581218Z node 1 :TX_PROXY ERROR: Actor# [1:7486791301986035731:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:59.048409Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791289101131495:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:59.048510Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 23851, MsgBus: 11425 2025-03-28T09:33:53.799093Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791283032245595:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:53.804797Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000eab/r3tmp/tmp7ixpzr/pdisk_1.dat 2025-03-28T09:33:54.147909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:54.148047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:54.149982Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:54.172609Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23851, node 1 2025-03-28T09:33:54.274963Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:54.274984Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:54.274992Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:54.275136Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11425 TClient is connected to server localhost:11425 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:54.789487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.815293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.958162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.107956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.189706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:57.263872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791300212116414:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.264011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.569332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.604162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.635503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.667360Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.698256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.769220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.824791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791300212116929:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.824864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.825020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791300212116934:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.830824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:57.840025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791300212116936:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:57.901593Z node 1 :TX_PROXY ERROR: Actor# [1:7486791300212116989:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:58.807451Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791283032245595:2196];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:58.807712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 5414, MsgBus: 11512 2025-03-28T09:33:52.701767Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791279242841793:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:52.701840Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000edc/r3tmp/tmpibwA7V/pdisk_1.dat 2025-03-28T09:33:53.079989Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5414, node 1 2025-03-28T09:33:53.145227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:53.145331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:53.152276Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:53.195300Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:53.195331Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:53.195352Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:53.195512Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11512 TClient is connected to server localhost:11512 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:53.742975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:53.768990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:53.895383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.063032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.140454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.915744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791292127745461:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:55.915867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.204818Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.232841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.254370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.277349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.299870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.329310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.404761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791296422713271:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.404811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.404928Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791296422713276:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.408183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:56.416595Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791296422713278:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:56.497563Z node 1 :TX_PROXY ERROR: Actor# [1:7486791296422713334:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:57.701661Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791279242841793:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:57.701717Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 22685, MsgBus: 9916 2025-03-28T09:33:53.163961Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791281968467482:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:53.166111Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000eb5/r3tmp/tmp3UBawc/pdisk_1.dat TServer::EnableGrpc on GrpcPort 22685, node 1 2025-03-28T09:33:53.590752Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:53.590850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:53.592826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:53.607023Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:53.661667Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:53.661696Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:53.661707Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:53.661838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9916 TClient is connected to server localhost:9916 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:54.298381Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.332050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.498101Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.654364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.733224Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:56.445439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791294853371121:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.445562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.775099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.803725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.832270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.863443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.890935Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.958502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:57.043887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791299148338937:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.044002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.044392Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791299148338942:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.047749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:57.062608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791299148338944:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:57.133409Z node 1 :TX_PROXY ERROR: Actor# [1:7486791299148338998:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:58.163470Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791281968467482:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:58.163574Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:59.072587Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154438536, txId: 281474976710671] shutting down 2025-03-28T09:33:59.184915Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154439178, txId: 281474976710674] shutting down >> Secret::Validation >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 25529, MsgBus: 25731 2025-03-28T09:33:52.751178Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791277670810053:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:52.751446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ed1/r3tmp/tmpCIo1F6/pdisk_1.dat 2025-03-28T09:33:53.228447Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:53.247828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:53.248079Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:53.251449Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25529, node 1 2025-03-28T09:33:53.335293Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:53.335312Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:53.335321Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:53.335419Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25731 TClient is connected to server localhost:25731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:53.869685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:53.909763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.075757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.241106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.318622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:56.067384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791294850681013:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.067533Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.358167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.383678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.409323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.435610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.462003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.529395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.605574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791294850681531:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.605648Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.605781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791294850681536:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.609164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:56.617944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791294850681538:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:56.678728Z node 1 :TX_PROXY ERROR: Actor# [1:7486791294850681590:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:57.754799Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791277670810053:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:57.754874Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> BasicUsage::BrokenCredentialsProvider >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService >> KqpSystemView::Sessions [GOOD] >> Secret::SimpleQueryService >> KqpSystemView::NodesRange2 [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions [GOOD] Test command err: Trying to start YDB, gRPC: 19743, MsgBus: 27247 2025-03-28T09:33:53.048405Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791283896065437:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:53.048526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ec4/r3tmp/tmpyedh0j/pdisk_1.dat 2025-03-28T09:33:53.419444Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:53.437936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:53.438046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:53.439744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19743, node 1 2025-03-28T09:33:53.556735Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:53.556755Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:53.556762Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:53.556862Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27247 TClient is connected to server localhost:27247 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:54.101798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.119504Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:54.126128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.130174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.286309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.449501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:54.518273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:56.169993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791296780968892:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.170098Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.424456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.451772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.477659Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.504271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.529796Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.560889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480 2025-03-28T09:33:56.637644Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791296780969405:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.637747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791296780969410:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.637833Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:56.641825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710669:3, at schemeshard: 72057594046644480 2025-03-28T09:33:56.650924Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791296780969412:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710669 completed, doublechecking } 2025-03-28T09:33:56.754395Z node 1 :TX_PROXY ERROR: Actor# [1:7486791296780969468:3453] txid# 281474976710670, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:58.048283Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791283896065437:2264];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:58.048349Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 1 ydb-cpp-sdk/3.2.2 2025-03-28T09:34:02.058094Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154442052, txId: 281474976710684] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 62544, MsgBus: 24634 2025-03-28T09:33:52.690282Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791278279614287:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:52.690433Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:52.751101Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791280681670139:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:52.751156Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:52.828266Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791277508279056:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:52.830892Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:52.844624Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791277885034818:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:52.845743Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ede/r3tmp/tmpfTywKI/pdisk_1.dat 2025-03-28T09:33:53.296570Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:53.835634Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:53.840217Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:53.853657Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:53.858584Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:53.917156Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:53.946570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:53.946764Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:53.949091Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:53.949195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:53.949789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:53.949874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:53.950478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:53.950535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:53.950783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:53.950805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:53.961108Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2025-03-28T09:33:53.961155Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-28T09:33:53.961181Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:33:53.961225Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:53.961508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:53.965149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:53.965361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:53.965721Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:53.965984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 62544, node 1 2025-03-28T09:33:54.147087Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:54.147116Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:54.147124Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:54.147261Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24634 TClient is connected to server localhost:24634 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:55.072501Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.106931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.306884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.602856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:55.706756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:57.686882Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791278279614287:2277];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:57.690863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:57.758491Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791280681670139:2079];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:57.758787Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:57.772818Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791299754452422:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.772953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:57.821788Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486791277508279056:2141];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:57.821852Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:57.844826Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486791277885034818:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:57.844885Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:58.066839Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:58.165200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:58.214142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:58.328364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:58.401203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:58.553535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:58.706761Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791304049420386:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:58.706876Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:58.708428Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791304049420393:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:58.713039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:58.745440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791304049420395:2394], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:33:58.848729Z node 1 :TX_PROXY ERROR: Actor# [1:7486791304049420472:4015] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:00.094797Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154440083, txId: 281474976710671] shutting down 2025-03-28T09:34:00.335965Z node 2 :BS_PROXY_PUT ERROR: [e1c0cdff771d11e8] Result# TEvPutResult {Id# [72075186224037895:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037895:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-28T09:34:00.345096Z node 3 :BS_PROXY_PUT ERROR: [9609f2a8321f81a5] Result# TEvPutResult {Id# [72075186224037896:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037896:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-28T09:34:00.363082Z node 4 :BS_PROXY_PUT ERROR: [31bb809246fe42ab] Result# TEvPutResult {Id# [72075186224037891:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037891:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-28T09:34:00.368060Z node 5 :BS_PROXY_PUT ERROR: [f33aa87045349b4a] Result# TEvPutResult {Id# [72075186224037913:1:18:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037913:1:18:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> ReadSessionImplTest::DataReceivedCallback [GOOD] |93.8%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown >> TBoardSubscriberTest::SimpleSubscriber >> TBoardSubscriberTest::ReconnectReplica >> TBoardSubscriberTest::DropByDisconnect >> TBoardSubscriberTest::ManySubscribersManyPublisher |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TFlatTest::AutoSplitMergeQueue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::DataReceivedCallback [GOOD] Test command err: 2025-03-28T09:33:31.923046Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.923086Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.923139Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.923549Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.937083Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.937278Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.937626Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.938138Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.938262Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:31.938337Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.938382Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-28T09:33:31.939017Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.939044Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.939064Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.939358Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.939939Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.940077Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.940237Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.940608Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.940704Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:31.940828Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.940877Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-28T09:33:31.943047Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.943094Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.943124Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.943421Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.944081Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.944500Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.944679Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.945476Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.950990Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:31.951181Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.951252Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-28T09:33:31.954471Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.954529Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.954566Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.954860Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.955960Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.956103Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.956286Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.961495Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.964499Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:31.964637Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.964688Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-28T09:33:31.965796Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.965823Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.965877Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.966309Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.967114Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.967280Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.967489Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.967885Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.970410Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:31.970532Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.970591Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-28T09:33:31.971412Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.971440Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.971455Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.971697Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.972574Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.972685Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.972962Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.973374Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.973477Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:31.973569Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.973619Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-28T09:33:31.974322Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.974337Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.974352Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.974613Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.975145Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.975254Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.975500Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.976250Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.976384Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:31.976485Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.976530Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2025-03-28T09:33:31.977447Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.977469Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.977492Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.977795Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:33:31.978333Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:33:31.978475Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.978704Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:33:31.980206Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.980592Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:33:31.980681Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:33:31.980725Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2025-03-28T09:33:31.985948Z :ReadSession INFO: Random seed for debugging is 1743154411985922 2025-03-28T09:33:32.384417Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791194345393690:2279];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.384497Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:32.445509Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791194791121803:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.445666Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error ... 03-28T09:33:52.023903Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 commit request from 3 to 3 in TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) 2025-03-28T09:33:52.023948Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) committing to position 3 prev 2 end 3 by cookie 3 2025-03-28T09:33:52.024294Z node 2 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:33:52.024320Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:33:52.024393Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user offset is set to 3 (startOffset 0) session shared/user_1_1_11180891916693740127_v1 2025-03-28T09:33:52.024479Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:33:52.024494Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:33:52.024512Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:33:52.024526Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:33:52.024533Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:33:52.024539Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:33:52.024547Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:33:52.024556Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:33:52.024577Z node 2 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:33:52.028854Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:33:52.028895Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:33:52.028933Z node 2 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 3 2025-03-28T09:33:52.029253Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { Cookie: 3 } 2025-03-28T09:33:52.029300Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) commit done to position 3 endOffset 3 with cookie 3 2025-03-28T09:33:52.029346Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 replying for commits: assignId# 1, from# 3, to# 3, offset# 3 2025-03-28T09:33:52.033288Z :DEBUG: [/Root] [/Root] [8db10b3-59e5e316-7dacd324-4162b96] [dc1] Committed response: { cookies { assign_id: 1 partition_cookie: 3 } } 2025-03-28T09:33:52.110775Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|234377ec-381bb54c-64b61a4b-5bd35d87_0] Write session will now close 2025-03-28T09:33:52.110860Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|234377ec-381bb54c-64b61a4b-5bd35d87_0] Write session: aborting 2025-03-28T09:33:52.111408Z :INFO: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|234377ec-381bb54c-64b61a4b-5bd35d87_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:33:52.111451Z :DEBUG: [] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|234377ec-381bb54c-64b61a4b-5bd35d87_0] Write session: destroy 2025-03-28T09:33:52.112086Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: test-message-group-id|234377ec-381bb54c-64b61a4b-5bd35d87_0 grpc read done: success: 0 data: 2025-03-28T09:33:52.112112Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|234377ec-381bb54c-64b61a4b-5bd35d87_0 grpc read failed 2025-03-28T09:33:52.112138Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|234377ec-381bb54c-64b61a4b-5bd35d87_0 grpc closed 2025-03-28T09:33:52.112154Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: test-message-group-id|234377ec-381bb54c-64b61a4b-5bd35d87_0 is DEAD 2025-03-28T09:33:52.112750Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:33:52.113476Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7486791275949775092:2633] destroyed 2025-03-28T09:33:52.113493Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:33:54.626501Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 5 from offset3 2025-03-28T09:34:02.018218Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset3 2025-03-28T09:34:02.115798Z :INFO: [/Root] [/Root] [8db10b3-59e5e316-7dacd324-4162b96] Closing read session. Close timeout: 0.000000s 2025-03-28T09:34:02.115885Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2025-03-28T09:34:02.115938Z :INFO: [/Root] [/Root] [8db10b3-59e5e316-7dacd324-4162b96] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16545 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:34:02.116073Z :NOTICE: [/Root] [/Root] [8db10b3-59e5e316-7dacd324-4162b96] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:34:02.116123Z :DEBUG: [/Root] [/Root] [8db10b3-59e5e316-7dacd324-4162b96] [dc1] Abort session to cluster 2025-03-28T09:34:02.116648Z :NOTICE: [/Root] [/Root] [8db10b3-59e5e316-7dacd324-4162b96] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:34:02.117604Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 grpc read done: success# 0, data# { } 2025-03-28T09:34:02.117647Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 grpc read failed 2025-03-28T09:34:02.117693Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 closed 2025-03-28T09:34:02.118573Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_1_1_11180891916693740127_v1 is DEAD 2025-03-28T09:34:02.119913Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [1:7486791250179970817:2542] disconnected; active server actors: 1 2025-03-28T09:34:02.119943Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [1:7486791250179970817:2542] client user disconnected session shared/user_1_1_11180891916693740127_v1 2025-03-28T09:34:02.122559Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_11180891916693740127_v1 2025-03-28T09:34:02.122646Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7486791250179970820:2545] destroyed 2025-03-28T09:34:02.122717Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_1_1_11180891916693740127_v1 2025-03-28T09:34:02.715800Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486791323194415984:2745] TxId: 281474976710724. Ctx: { TraceId: 01jqe1kw3z8b0kk0vmjfd456s0, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODcxOGE0OTYtZGJiODE3NmYtZWM4ZGNkNDgtZjA3NDczNDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 2 2025-03-28T09:34:02.716611Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486791323194415996:2755], TxId: 281474976710724, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=ODcxOGE0OTYtZGJiODE3NmYtZWM4ZGNkNDgtZjA3NDczNDI=. CustomerSuppliedId : . TraceId : 01jqe1kw3z8b0kk0vmjfd456s0. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486791323194415984:2745], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-28T09:34:02.716631Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486791323194415997:2756], TxId: 281474976710724, task: 4. Ctx: { TraceId : 01jqe1kw3z8b0kk0vmjfd456s0. SessionId : ydb://session/3?node_id=1&id=ODcxOGE0OTYtZGJiODE3NmYtZWM4ZGNkNDgtZjA3NDczNDI=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486791323194415984:2745], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-28T09:34:03.665737Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:03.665814Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:03.665851Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:34:03.666203Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:34:03.666574Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:34:03.666766Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:03.666969Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:34:03.667656Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:34:03.668113Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:34:03.668299Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2025-03-28T09:34:03.668396Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:34:03.668451Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:34:03.668490Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2025-03-28T09:34:03.668653Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:34:03.668725Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] |93.9%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2025-03-28T09:32:43.658579Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790981429889068:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:32:43.658709Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ee2/r3tmp/tmpcokmaZ/pdisk_1.dat 2025-03-28T09:32:43.942420Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:44.017209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:32:44.017328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:32:44.018981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17832 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:32:44.198543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:32:44.225339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743154364329 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "String" TypeId: 4097 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) A-0 B-0 2025-03-28T09:32:44.626444Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.10, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.016s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-03-28T09:32:44.631386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-03-28T09:32:44.643984Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.12, eph 1} end=0, 2 blobs 1r (max 1), put Spent{time=0.014s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-03-28T09:32:44.647762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2025-03-28T09:32:44.732389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:32:44.732553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 6291502 row count 1 2025-03-28T09:32:44.732643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-03-28T09:32:44.732679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 1, DataSize 6291502 2025-03-28T09:32:44.732932Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-1 2025-03-28T09:32:44.955083Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 2 blobs 1r (max 1), put Spent{time=0.018s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-03-28T09:32:44.971228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-03-28T09:32:44.992808Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.18, eph 2} end=0, 3 blobs 2r (max 2), put Spent{time=0.022s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583126 0 0)b }, ecr=1.000 2025-03-28T09:32:45.071391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:32:45.071511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583004 row count 2 2025-03-28T09:32:45.071555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-03-28T09:32:45.071579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 2, DataSize 12583004 2025-03-28T09:32:45.073689Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 3], datashard# 72075186224037888, compactionInfo# {72057594046644480:1, SH# 1, Rows# 2, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2025-03-28T09:32:45.073892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Operation queue set wakeup after delta# 599 seconds 2025-03-28T09:32:45.074086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T09:32:45.075686Z node 1 :FLAT_TX_SCHEMESHARD INFO: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 3], datashard# 72075186224037888, shardIdx# 72057594046644480:1 in# 3 ms, with status# 1, next wakeup in# 599.996754s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2025-03-28T09:32:45.118456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2025-03-28T09:32:45.218771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:32:45.218868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 12583004 row count 2 2025-03-28T09:32:45.218923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-03-28T09:32:45.218952Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 2, DataSize 12583004 2025-03-28T09:32:45.219299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2025-03-28T09:32:45.230073Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=0, 2 blobs 1r (max 1), put Spent{time=0.013s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2025-03-28T09:32:45.250901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2025-03-28T09:32:45.280732Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 3} end=0, 4 blobs 3r (max 3), put Spent{time=0.030s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874672 0 0)b }, ecr=1.000 2025-03-28T09:32:45.351501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:32:45.351607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874506 row count 3 2025-03-28T09:32:45.351661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-03-28T09:32:45.351691Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 3, DataSize 18874506 2025-03-28T09:32:45.351860Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T09:32:45.371509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 3] state 'Ready' dataSize 18874522 rowCount 3 cpuUsage 0 A-2 2025-03-28T09:32:45.474200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T09:32:45.474371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046644480:1 data size 18874522 row count 3 2025-03-28T09:32:45.474413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], pathId map=T1, is column=0, is olap=0 2025-03-28T09:32:45.474439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037888 followerId=0, pathId 3: RowCount 3, DataSize 18874522 2025-03-28T09:32:45 ... renExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154406952 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 17 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 17 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 15 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-03-28T09:34:00.337545Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037908 not found 2025-03-28T09:34:00.337592Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037905 not found 2025-03-28T09:34:00.396807Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037913 not found 2025-03-28T09:34:00.399420Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037906 not found 2025-03-28T09:34:00.564124Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037915.1.6, eph 472} end=2, 0 blobs 0r (max 15), put Spent{time=0.001s,wait=0.000s,interrupts=0} 2025-03-28T09:34:00.564225Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037915:1:10} Compact 1 on TGenCompactionParams{1001: gen 255 epoch 0, 1 parts} step 6, product {0 parts epoch 0} thrown 2025-03-28T09:34:00.565305Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037916.1.6, eph 474} end=2, 0 blobs 0r (max 17), put Spent{time=0.001s,wait=0.000s,interrupts=0} 2025-03-28T09:34:00.565358Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037916:1:10} Compact 1 on TGenCompactionParams{1001: gen 255 epoch 0, 2 parts} step 6, product {0 parts epoch 0} thrown 2025-03-28T09:34:00.582540Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037909 not found 2025-03-28T09:34:00.781927Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037916 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154406952 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 21 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 21 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 19 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-03-28T09:34:01.670575Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037915 not found 2025-03-28T09:34:01.670940Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037914 not found 2025-03-28T09:34:01.830145Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037917 not found 2025-03-28T09:34:01.830185Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037919 not found 2025-03-28T09:34:01.836713Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037918 not found 2025-03-28T09:34:01.836754Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037911 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154406952 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 24 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 24 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 22 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154406952 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 24 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 24 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 22 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154406952 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 24 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 24 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 22 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154406952 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 24 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 24 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 22 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) 2025-03-28T09:34:05.812213Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037920 not found 2025-03-28T09:34:05.812257Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037921 not found 2025-03-28T09:34:05.813761Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037912 not found TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154406952 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) TClient::Ls request: /dc-1/Dir/T1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "T1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154406952 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 25 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 25 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 23 } ChildrenExist: false } Table { Name: "T1" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyCol... (TRUNCATED) >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] >> Viewer::JsonStorageListingV1GroupIdFilter [GOOD] >> Viewer::JsonStorageListingV1NodeIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:33:39.775710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:33:39.775845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.775918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:33:39.775989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:33:39.776665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:33:39.776725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:33:39.776809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:33:39.776895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:33:39.783338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:33:39.874595Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:33:39.874647Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:39.890659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:33:39.890914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:33:39.891066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:33:39.896721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:33:39.896935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:33:39.897604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:39.897807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:39.905415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.911700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:39.911791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:39.912017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:33:39.912072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:39.912122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:33:39.912208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:33:39.938285Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:33:40.089658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:33:40.089899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.090137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:33:40.090387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:33:40.090447Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.092842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.092978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:33:40.093156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.093232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:33:40.093301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:33:40.093348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:33:40.095058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.095116Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:33:40.095153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:33:40.096588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.096621Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.096654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.096697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.099689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:40.101255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:33:40.101425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:33:40.102590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:33:40.102730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:33:40.102781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.103069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:33:40.103128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:33:40.103327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:33:40.103406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:33:40.105036Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:33:40.105110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:33:40.105291Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:33:40.105328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:33:40.105760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:33:40.105808Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:33:40.105895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.105922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.105957Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:33:40.105984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.106015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:33:40.106058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:33:40.106086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:33:40.106110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:33:40.106178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:33:40.106248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:33:40.106287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:33:40.107772Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.107873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:33:40.107915Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 0.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:06.202002Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:06.202061Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:06.202100Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:06.202138Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:06.202175Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:06.202223Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:06.202290Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:06.202582Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:06.219442Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:06.220553Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:06.220686Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:06.220813Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:06.220843Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:06.220926Z node 5 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:06.221616Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:06.221717Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.221783Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.222100Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.222170Z node 5 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:34:06.222331Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.222430Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.222519Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.222611Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.222677Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.222829Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.223084Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.223183Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.223497Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.223572Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.223708Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.223785Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.223861Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.224092Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.224175Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.224298Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.224491Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.224646Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.224714Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.224759Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:06.230453Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:06.230514Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:06.230968Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:06.231037Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:06.231083Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:06.232659Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:369:2338] sender: [5:426:2058] recipient: [5:15:2062] 2025-03-28T09:34:06.275152Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:34:06.275209Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin RotateKeys at schemeshard: 72057594046678944 2025-03-28T09:34:06.298391Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Error: "User user1 is not permitted to log in", at schemeshard: 72057594046678944 2025-03-28T09:34:06.298506Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:06.298540Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:06.298702Z node 5 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:06.298742Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [5:417:2375], at schemeshard: 72057594046678944, txId: 0, path id: 1 2025-03-28T09:34:06.299181Z node 5 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2025-03-28T09:34:08.299997Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Execute at schemeshard: 72057594046678944 2025-03-28T09:34:08.310543Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: TTxLogin Complete, result: Token: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk3NjQ4LCJpYXQiOjE3NDMxNTQ0NDgsInN1YiI6InVzZXIxIn0.k-VmLoIav6HSZDfc6fqGgdRvJRMDgiWvbK2ij4pzE9Jvf4wp2ylMlKWuDdBF0mO5hr-xbqC5T5uzufAYiO0l5uTmUL5YZw0FoHKS4HxabbU2oo5Hq36-XQ5GMr1KuIIr-jb4PezFXB7Q63I1R9BDL1gS5G9H8tATxmk12y-ibzra5WlybPpBUQZtkJAt6P0lWs8vBZHK0gDw9bcb2Wxd-_1fYpda0uyJPTblWkzpqJEiFmQzkOk3Ijci3-PJlck4lvdu7N35ioxie6Uoujh1UBwxb8cpIc2Vp-OrlgAHmWdCWVT7tk-ddUllNq0sf_yJzeAjeoJn9aUvds-ReYhGcQ" SanitizedToken: "eyJhbGciOiJQUzI1NiIsImtpZCI6IjMifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzQzMTk3NjQ4LCJpYXQiOjE3NDMxNTQ0NDgsInN1YiI6InVzZXIxIn0.**" IsAdmin: true, at schemeshard: 72057594046678944 2025-03-28T09:34:08.311235Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:08.311469Z node 5 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 239us result status StatusSuccess 2025-03-28T09:34:08.311960Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArc1JHwgTcS+Y0JSudn7T\n0M7YMWpjJjeuJTaqnrickgVpRH/y1jsk8JLVBOGVd3XRq6k63eNzFlM2P2+5uzmj\nIIgBLIzDPLMkg4vrDw3RnUBn1LItYXNBt/5YsxTSj9HZcWdLGZ+QKGQVieN/qtFF\nMGrGTJhUY4QVW+zfTsAW7h9jy/ddzTpJrD0XSQv6RCA7vglVG+OYDhXil07p5CAr\n50VHCleIf1ppofpJ+Z2bS3yZmokxSEtThjnCWvFVN4++rWwMplambybaemKVQ0ve\nvPY39PIg1QoWR8vydonGz1NAJgfsjRt4g4LnUxyEmgkW72iYSVwGvDmIpN5go/IU\nTQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743240843891 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAw5x7nv8yvtdbvA1BpdK5\n3XH2bvibStR61/B+ht9OvKuClVVe5MZNaYtrNl4ahPMJI4+su2XbF8QKZLuTFpNO\nMonPnmPvIXkSqHISEsg6c0umdFIM/Cu5SfEs3bcDkVZRNz6ftU/Ykus36JRPdza0\nZ0NI2qoiQ8bbJg1zQ2OE7Czge9rPUJHtFXUhmM4rTm7VGKMIj4AMp4ZeFAKM20a2\nlwKWi4rxjsRR3N0+BAz06aWwmFfmAyAMNtBFS5K5vzohA9BCajNlKXg5Wrzspi55\n24nRf+mx9A7fTtrDmZ1guhBNyey6lLPvcR+de10biEBvJZ+rJV6OGKIuO7pYYIjL\nFQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743240844151 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzsRdE6bYQxIzcs51FD6h\nUoyd3nereUIi12T4nGzoCyJQXVN6JbhYisonKj3UAbVL8DuNBEw5N/A9lKJE5RXE\nxIyYLqPfJRs5fN1E/rclmgsLB6yIMRTD4ItQAyB4UMTqDBciZuN+Fzqn6AHoayaP\n/H8jIWKoITo4NyRdaMW4ZneEzM5oNOcOKJCQJC/G/KpPAM/Y8C7qodQ87d5+3Ca+\nk6Lh/4QrSJLzvq71/c0Q9B7dLGJH7FoVcVs58+N0XJ0eRKXvpTTnlpjWHpcZGx51\nAgxPBUG7FLUMKTIco/+WyFztvqjpKNVsFvdu7e54rF3EnTKpcSxBTm/B/heWwYhL\nyQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1743240846296 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::HotTiers [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeInvalid [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypePersQueueGroup [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeKesus [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeFileStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeReplication [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExtSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic >> BackupRestore::RestoreViewQueryText >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143154979.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154979.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154979.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154979.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154979.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154979.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153779.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154979.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123154979.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153779.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123153779.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123153779.000000s;Name=;Codec=}; 2025-03-28T09:32:59.711073Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:32:59.794330Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:32:59.817927Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:32:59.818225Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:32:59.826056Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:32:59.826283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:32:59.826512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:32:59.826630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:32:59.826762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:32:59.826883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:32:59.826993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:32:59.827127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:32:59.827244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:32:59.827367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:32:59.827484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:32:59.827593Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:32:59.856020Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:32:59.856339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:32:59.856447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:32:59.856619Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:59.856784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:32:59.856872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:32:59.856918Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:32:59.857008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:32:59.857065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:32:59.857108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:32:59.857140Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:32:59.857291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:59.857364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:32:59.857408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:32:59.857441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:32:59.857521Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:32:59.857570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:32:59.857615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:32:59.857644Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:32:59.857719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:32:59.857760Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:32:59.857790Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:32:59.857842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:32:59.857882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:32:59.857919Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:32:59.858300Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-28T09:32:59.858382Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-03-28T09:32:59.858479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=49; 2025-03-28T09:32:59.858566Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=41; 2025-03-28T09:32:59.858775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:32:59.858832Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:32:59.858866Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:32:59.859074Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:32:59.859122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:32:59.859154Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... D DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:34:09.242258Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:34:09.242300Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:09.242351Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:09.242389Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:09.242480Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:34:09.242753Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-28T09:34:09.242864Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:34:09.243015Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:34:09.243088Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:34:09.243503Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:34:09.243588Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:34:09.244030Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1984:3989];trace_detailed=; 2025-03-28T09:34:09.244530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:34:09.244746Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:34:09.244898Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:34:09.245012Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:34:09.245349Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:34:09.245449Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:34:09.245575Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:34:09.245623Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1984:3989] finished for tablet 9437184 2025-03-28T09:34:09.246021Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1983:3988];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154449243974,"name":"_full_task","f":1743154449243974,"d_finished":0,"c":0,"l":1743154449245694,"d":1720},"events":[{"name":"bootstrap","f":1743154449244247,"d_finished":792,"c":1,"l":1743154449245039,"d":792},{"a":1743154449245323,"name":"ack","f":1743154449245323,"d_finished":0,"c":0,"l":1743154449245694,"d":371},{"a":1743154449245299,"name":"processing","f":1743154449245299,"d_finished":0,"c":0,"l":1743154449245694,"d":395},{"name":"ProduceResults","f":1743154449244830,"d_finished":437,"c":2,"l":1743154449245603,"d":437},{"a":1743154449245606,"name":"Finish","f":1743154449245606,"d_finished":0,"c":0,"l":1743154449245694,"d":88}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:34:09.246087Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1983:3988];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:34:09.246483Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1983:3988];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743154449243974,"name":"_full_task","f":1743154449243974,"d_finished":0,"c":0,"l":1743154449246138,"d":2164},"events":[{"name":"bootstrap","f":1743154449244247,"d_finished":792,"c":1,"l":1743154449245039,"d":792},{"a":1743154449245323,"name":"ack","f":1743154449245323,"d_finished":0,"c":0,"l":1743154449246138,"d":815},{"a":1743154449245299,"name":"processing","f":1743154449245299,"d_finished":0,"c":0,"l":1743154449246138,"d":839},{"name":"ProduceResults","f":1743154449244830,"d_finished":437,"c":2,"l":1743154449245603,"d":437},{"a":1743154449245606,"name":"Finish","f":1743154449245606,"d_finished":0,"c":0,"l":1743154449246138,"d":532}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1984:3989]->[1:1983:3988] 2025-03-28T09:34:09.246583Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:34:09.243562Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:34:09.246637Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:34:09.246779Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-03-28T09:34:10.839264Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:10.847605Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:10.847953Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-28T09:34:10.848016Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:10.848057Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-28T09:34:10.848104Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:10.848170Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:10.848257Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-28T09:34:10.848936Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:262:2254], now have 1 active actors on pipe 2025-03-28T09:34:10.849039Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:10.868210Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:10.874511Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:10.874784Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:10.875735Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:10.875877Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:10.883340Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:10.885153Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:270:2260] 2025-03-28T09:34:10.888521Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-28T09:34:10.888607Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2260] 2025-03-28T09:34:10.888673Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:10.888727Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:10.891585Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:273:2262], now have 1 active actors on pipe 2025-03-28T09:34:10.937509Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:10.940708Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:10.940979Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-03-28T09:34:10.941020Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:10.941061Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-28T09:34:10.941094Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:10.941130Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:10.941211Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-28T09:34:10.941882Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:408:2363], now have 1 active actors on pipe 2025-03-28T09:34:10.942007Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:10.942221Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:34:10.944534Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:34:10.944670Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:10.945654Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:34:10.945781Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:10.946117Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:10.946339Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [2:416:2369] 2025-03-28T09:34:10.948304Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-28T09:34:10.948389Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:416:2369] 2025-03-28T09:34:10.948454Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:10.948510Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:10.949153Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:419:2371], now have 1 active actors on pipe 2025-03-28T09:34:10.965696Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:10.969492Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:10.969781Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:34:10.969829Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:10.969875Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-28T09:34:10.969916Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:10.969978Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:10.970073Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:34:10.970836Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:468:2408], now have 1 active actors on pipe 2025-03-28T09:34:10.970965Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:10.971165Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 3(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:34:10.975628Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:34:10.975750Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:10.976463Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 3 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:34:10.976622Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:10.977069Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:10.977319Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:476:2414] 2025-03-28T09:34:10.978617Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:34:10.978664Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:476:2414] 2025-03-28T09:34:10.978764Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:10.978823Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:10.979470Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:479:2416], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2025-03-28T09:34:10.987967Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:488:2419], now have 1 active actors on pipe 2025-03-28T09:34:10.988355Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [2:491:2420], now have 1 active actors on pipe 2025-03-28T09:34:10.988541Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:492:2420], now have 1 active actors on pipe 2025-03-28T09:34:10.989008Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:488:2419] destroyed 2025-03-28T09:34:10.989419Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [2:491:2420] destroyed 2025-03-28T09:34:10.989519Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:492:2420] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2025-03-28T09:34:10.833843Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:10.844734Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:10.845063Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-28T09:34:10.845788Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:10.845843Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-28T09:34:10.845885Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:10.845993Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:10.846068Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-28T09:34:10.846869Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:262:2254], now have 1 active actors on pipe 2025-03-28T09:34:10.846973Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:10.868144Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:10.870996Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:10.871204Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:10.875183Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:10.875346Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:10.883325Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:10.886893Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [2:270:2260] 2025-03-28T09:34:10.889354Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-28T09:34:10.889428Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:270:2260] 2025-03-28T09:34:10.889493Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:10.889549Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:10.891487Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:273:2262], now have 1 active actors on pipe 2025-03-28T09:34:10.938441Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:10.941704Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:10.941928Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:34:10.941962Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:10.941985Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-28T09:34:10.942012Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:10.942040Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:10.942077Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:34:10.942494Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:407:2362], now have 1 active actors on pipe 2025-03-28T09:34:10.942563Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:10.942677Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 2(current 0) received from actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:34:10.945594Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:34:10.945710Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:10.946336Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 2 actor [2:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:34:10.946417Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:10.946631Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:10.946834Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [2:415:2368] 2025-03-28T09:34:10.948300Z node 2 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:34:10.948353Z node 2 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:415:2368] 2025-03-28T09:34:10.948396Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:10.948430Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:10.948987Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:418:2370], now have 1 active actors on pipe 2025-03-28T09:34:10.953795Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [2:424:2373], now have 1 active actors on pipe 2025-03-28T09:34:10.954000Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [2:426:2374], now have 1 active actors on pipe 2025-03-28T09:34:10.955562Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:34:10.956066Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:34:10.956248Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [2:424:2373] destroyed 2025-03-28T09:34:10.958722Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server disconnected, pipe [2:426:2374] destroyed 2025-03-28T09:34:11.421047Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:11.424666Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:11.424970Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-28T09:34:11.425014Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:11.425053Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-28T09:34:11.425102Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:11.425148Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.425204Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-28T09:34:11.425800Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:261:2253], now have 1 active actors on pipe 2025-03-28T09:34:11.425898Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:11.426081Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:11.428485Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:11.428647Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.429315Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 L ... txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:34:11.591572Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:11.592027Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:11.592308Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:534:2457] 2025-03-28T09:34:11.595221Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:34:11.595302Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:534:2457] 2025-03-28T09:34:11.595368Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:11.595438Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:11.596530Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:537:2459], now have 1 active actors on pipe 2025-03-28T09:34:11.598062Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:543:2462], now have 1 active actors on pipe 2025-03-28T09:34:11.598325Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:34:11.598496Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:544:2463], now have 1 active actors on pipe 2025-03-28T09:34:11.598899Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:34:11.598976Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:545:2463], now have 1 active actors on pipe 2025-03-28T09:34:11.599234Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:34:11.610525Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [3:553:2470], now have 1 active actors on pipe 2025-03-28T09:34:11.641978Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:11.645466Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:11.645956Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:34:11.646034Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:11.646198Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:11.647154Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.647229Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:34:11.647357Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:11.647815Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:11.648134Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [3:610:2515] 2025-03-28T09:34:11.651238Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-28T09:34:11.653188Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-28T09:34:11.653623Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-28T09:34:11.654102Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-28T09:34:11.654457Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-28T09:34:11.654899Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:34:11.654960Z node 3 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:34:11.655013Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:34:11.655071Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:610:2515] 2025-03-28T09:34:11.655140Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:11.655214Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:11.656411Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [3:544:2463] destroyed 2025-03-28T09:34:11.656520Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [3:543:2462] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 79 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 79 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 93 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 93 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 39 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 39 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> Bloom::Conf [GOOD] >> Bloom::Hashes >> TS3FIFOCache::Random [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2025-03-28T09:34:11.132466Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:11.136164Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:11.136471Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] doesn't have tx info 2025-03-28T09:34:11.136545Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:11.136583Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] no config, start with empty partitions and default config 2025-03-28T09:34:11.136631Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:11.136684Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.136750Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] doesn't have tx writes info 2025-03-28T09:34:11.137355Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:262:2254], now have 1 active actors on pipe 2025-03-28T09:34:11.137461Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:11.156401Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Config update version 1(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:11.159360Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:11.159501Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.160315Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037] Config applied version 1 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:11.160461Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:11.160853Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:11.161162Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] bootstrapping 0 [3:270:2260] 2025-03-28T09:34:11.163511Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2025-03-28T09:34:11.163573Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928037, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:270:2260] 2025-03-28T09:34:11.163627Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:11.163675Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:11.164498Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [3:273:2262], now have 1 active actors on pipe 2025-03-28T09:34:11.215965Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:11.219685Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:11.220018Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] doesn't have tx info 2025-03-28T09:34:11.220068Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:11.220102Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] no config, start with empty partitions and default config 2025-03-28T09:34:11.220158Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:11.220222Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.220279Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] doesn't have tx writes info 2025-03-28T09:34:11.220925Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:407:2362], now have 1 active actors on pipe 2025-03-28T09:34:11.221038Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:11.221218Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Config update version 2(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:11.223617Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:11.223766Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.224542Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137] Config applied version 2 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2025-03-28T09:34:11.224648Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:11.225026Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:11.225243Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] bootstrapping 0 [3:415:2368] 2025-03-28T09:34:11.227251Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2025-03-28T09:34:11.227316Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928137, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:415:2368] 2025-03-28T09:34:11.227370Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:11.227413Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:11.228211Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928137] server connected, pipe [3:418:2370], now have 1 active actors on pipe 2025-03-28T09:34:11.243362Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:11.246557Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:11.246795Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] doesn't have tx info 2025-03-28T09:34:11.246846Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:11.246886Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-28T09:34:11.246921Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:11.246971Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.247019Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-28T09:34:11.247612Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [3:467:2407], now have 1 active actors on pipe 2025-03-28T09:34:11.247665Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:11.247792Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 3(current 0) received from actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:34:11.249575Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:34:11.249666Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.250255Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 3 actor [3:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:34:11.250345Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:11.250572Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:11.250745Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [3:475:2413] 2025-03-28T09:34:11.251853Z node 3 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-28T09:34:11.251945Z node 3 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [3:475:2413] 2025-03-28T09:34:11.251999Z node 3 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 b ... 38] doesn't have tx info 2025-03-28T09:34:11.914173Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:11.914219Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] no config, start with empty partitions and default config 2025-03-28T09:34:11.914261Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:11.914312Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.914373Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] doesn't have tx writes info 2025-03-28T09:34:11.915076Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:465:2405], now have 1 active actors on pipe 2025-03-28T09:34:11.915193Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:11.915393Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Config update version 7(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:34:11.917587Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:34:11.917721Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.918403Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138] Config applied version 7 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 7 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2025-03-28T09:34:11.918531Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:11.918901Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:11.919107Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] bootstrapping 1 [4:473:2411] 2025-03-28T09:34:11.921406Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2025-03-28T09:34:11.921484Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928138, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [4:473:2411] 2025-03-28T09:34:11.921542Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:11.921594Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:11.922448Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:476:2413], now have 1 active actors on pipe 2025-03-28T09:34:11.941703Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:11.945435Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:11.945788Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:34:11.945843Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:11.945897Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] no config, start with empty partitions and default config 2025-03-28T09:34:11.945944Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:11.945998Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.946064Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:34:11.946958Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:525:2450], now have 1 active actors on pipe 2025-03-28T09:34:11.947079Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:34:11.947280Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Config update version 8(current 0) received from actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:34:11.949722Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:34:11.949861Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:11.950712Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] Config applied version 8 actor [4:99:2134] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2025-03-28T09:34:11.950860Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:11.951327Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:11.951579Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:533:2456] 2025-03-28T09:34:11.953773Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:34:11.953848Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:533:2456] 2025-03-28T09:34:11.953923Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:11.953999Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:11.954859Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:536:2458], now have 1 active actors on pipe 2025-03-28T09:34:11.956509Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server connected, pipe [4:544:2461], now have 1 active actors on pipe 2025-03-28T09:34:11.956621Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server connected, pipe [4:545:2462], now have 1 active actors on pipe 2025-03-28T09:34:11.956720Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:546:2462], now have 1 active actors on pipe 2025-03-28T09:34:11.967797Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] server connected, pipe [4:551:2466], now have 1 active actors on pipe 2025-03-28T09:34:11.996350Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:11.999941Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:12.000315Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] doesn't have tx info 2025-03-28T09:34:12.000372Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:12.000523Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:12.001362Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:12.001426Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139] doesn't have tx writes info 2025-03-28T09:34:12.001546Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:34:12.001879Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:34:12.002062Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] bootstrapping 2 [4:608:2511] 2025-03-28T09:34:12.005176Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2025-03-28T09:34:12.006931Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2025-03-28T09:34:12.007301Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2025-03-28T09:34:12.007648Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2025-03-28T09:34:12.007969Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2025-03-28T09:34:12.008020Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:34:12.008110Z node 4 :PERSQUEUE INFO: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:34:12.008161Z node 4 :PERSQUEUE DEBUG: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2025-03-28T09:34:12.008218Z node 4 :PERSQUEUE INFO: [PQ: 72057594037928139, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:608:2511] 2025-03-28T09:34:12.008276Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:34:12.008330Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928139, Partition: 2, State: StateIdle] Process pending events. Count 0 2025-03-28T09:34:12.009209Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928138] server disconnected, pipe [4:545:2462] destroyed 2025-03-28T09:34:12.009270Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037928037] server disconnected, pipe [4:544:2461] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } } } >> TPartBtreeIndexIteration::NoNodes_History >> TIterator::Single >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels >> Bloom::Hashes [GOOD] >> Bloom::Rater >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> DataCleanup::CleanupDataNoTables >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> TChargeBTreeIndex::NoNodes_Groups >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> DataCleanup::CleanupDataNoTables [GOOD] >> DataCleanup::CleanupDataNoTablesWithRestart [GOOD] >> DataCleanup::CleanupDataLog >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> DataCleanup::CleanupDataLog [GOOD] >> DataCleanup::CleanupData [GOOD] >> DataCleanup::CleanupDataMultipleFamilies >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History >> Bloom::Rater [GOOD] >> Bloom::Dipping >> DataCleanup::CleanupDataMultipleFamilies [GOOD] >> DataCleanup::CleanupDataMultipleTables [GOOD] >> DataCleanup::CleanupDataWithFollowers [GOOD] >> DataCleanup::CleanupDataMultipleTimes >> TScreen::Sequential [GOOD] >> TScreen::Random >> DataCleanup::CleanupDataMultipleTimes [GOOD] >> DataCleanup::CleanupDataEmptyTable [GOOD] >> DataCleanup::CleanupDataWithRestarts >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'/Root/PQ/\', Marker# PQ17" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } >> DataCleanup::CleanupDataWithRestarts [GOOD] >> DataCleanup::CleanupDataRetryWithNotGreaterGenerations [GOOD] >> DBase::Select [GOOD] >> DBase::Subsets [GOOD] >> DBase::WideKey >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::FewNodes >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups >> DBase::WideKey [GOOD] >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> BuildStatsMixedIndex::Single_Groups_Slices |93.9%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TIterator::Single [GOOD] >> TIterator::SingleReverse >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> Secret::DeactivatedQueryService [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeDir [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlockStoreVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnStore [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeColumnTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TLocalTests::TestRemoveTenantWhileResolving >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::ThreeLeveledLRU >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TDynamicNameserverTest::CacheMissSimpleDeadline |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> BackupRestore::RestoreViewQueryText [GOOD] >> BackupRestore::RestoreViewReferenceTable >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution >> TDynamicNameserverTest::CacheMissNoDeadline >> TLocalTests::TestAddTenantWhileResolving >> TNodeBrokerTest::TestListNodesEpochDeltas >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> TTicketParserTest::AuthenticationWithUserAccount >> TTicketParserTest::AuthorizationRetryError >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> BasicUsage::BrokenCredentialsProvider [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: 2025-03-28T09:34:01.043346Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:301:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e78/r3tmp/tmpte4bgJ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 16227, node 1 TClient is connected to server localhost:19022 2025-03-28T09:34:02.102164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:02.162639Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:02.162740Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:02.162779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:02.163141Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:02.165640Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:02.204676Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:02.205497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:02.218503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-28T09:34:13.941259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:683:2575], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:13.941869Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:692:2580], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:13.944564Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:13.955127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:34:13.975144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:697:2583], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:34:14.035381Z node 1 :TX_PROXY ERROR: Actor# [1:748:2615] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:14.479134Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:758:2624], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2025-03-28T09:34:14.485161Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjFmNmE0MWYtOGI3YzZkMzktZWJlMTJmZWQtNDRlY2Q2OGQ=, ActorId: [1:681:2573], ActorState: ExecuteState, TraceId: 01jqe1m7d0c5capszczkmrmsgp, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-03-28T09:33:30.695002Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791185189095389:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:30.695068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:30.766786Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791183355951641:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:30.766856Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:30.931067Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:30.934266Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001bfd/r3tmp/tmp0NzSXm/pdisk_1.dat 2025-03-28T09:33:31.190357Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:31.192967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:31.193096Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:31.194331Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:31.194407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:31.200419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:31.201013Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:31.217658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16105, node 1 2025-03-28T09:33:31.437220Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001bfd/r3tmp/yandexkRop1h.tmp 2025-03-28T09:33:31.437255Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001bfd/r3tmp/yandexkRop1h.tmp 2025-03-28T09:33:31.438392Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001bfd/r3tmp/yandexkRop1h.tmp 2025-03-28T09:33:31.438626Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:31.667129Z INFO: TTestServer started on Port 15217 GrpcPort 16105 TClient is connected to server localhost:15217 PQClient connected to localhost:16105 === TenantModeEnabled() = 0 === Init PQ - start server on port 16105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:32.075526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:33:32.075778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:32.076034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:33:32.076289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:33:32.076362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:32.078909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:33:32.079079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:33:32.079314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:32.079377Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:33:32.079404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:33:32.079418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-28T09:33:32.081606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:32.081653Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:33:32.081668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:33:32.082645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:32.082665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:33:32.082710Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:32.083368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:32.083416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:32.083485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:33:32.083556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:33:32.088445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:32.090723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:33:32.090886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:33:32.093496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154412139, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:33:32.093704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154412139 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:33:32.093742Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:33:32.094004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:33:32.094075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:33:32.094286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:33:32.094399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:33:32.096765Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:33:32.096796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:33:32.096961Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:33:32.097003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791189484063354:2409], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:33:32.097051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:32.097081Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:33:32.097193Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:33:32.097210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:33:32.097307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:33:32.097324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:33:32.097344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:33:32.097371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:33:32.097389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:33:32.097401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-28T09:33:32.097450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pat ... ive partitions [0] session "shared/cli_5_1_11245170127835541256_v1" sender [5:7486791363497522106:2646] lock partition 0 for ReadingSession "shared/cli_5_1_11245170127835541256_v1" (Sender=[5:7486791363497522106:2646], Pipe=[5:7486791363497522109:2646], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-03-28T09:34:12.854571Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-03-28T09:34:12.854607Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000224s 2025-03-28T09:34:12.854533Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2025-03-28T09:34:12.854726Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 got read request: guid# a7713416-5a548bc1-ff0615b3-e308e81a 2025-03-28T09:34:12.856390Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_11245170127835541256_v1" ClientId: "cli" PipeClient { RawX1: 7486791363497522109 RawX2: 4503621102209622 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-03-28T09:34:12.856502Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-03-28T09:34:12.856919Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7486791363497522111:2649] 2025-03-28T09:34:12.857108Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_11245170127835541256_v1:1 with generation 1 2025-03-28T09:34:12.861431Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1743154452839 } Cookie: 18446744073709551615 } 2025-03-28T09:34:12.861499Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-03-28T09:34:12.861584Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 sending to client partition status 2025-03-28T09:34:12.862432Z :INFO: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-03-28T09:34:12.863103Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-03-28T09:34:12.863296Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-03-28T09:34:12.863353Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-03-28T09:34:12.863388Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-03-28T09:34:12.863460Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-03-28T09:34:12.863482Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1TEvPartitionReady. Aval parts: 1 2025-03-28T09:34:12.863531Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 performing read request: guid# 883e1a1f-8f6418a7-d9101211-d577f194, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-28T09:34:12.863662Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 883e1a1f-8f6418a7-d9101211-d577f194 2025-03-28T09:34:12.865037Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1743154452736 CreateTimestampMS: 1743154452734 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1743154452741 CreateTimestampMS: 1743154452735 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1743154452797 CreateTimestampMS: 1743154452735 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-03-28T09:34:12.865232Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-03-28T09:34:12.865284Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 883e1a1f-8f6418a7-d9101211-d577f194 has messages 1 2025-03-28T09:34:12.865378Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 read done: guid# 883e1a1f-8f6418a7-d9101211-d577f194, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 490 2025-03-28T09:34:12.865413Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 response to read: guid# 883e1a1f-8f6418a7-d9101211-d577f194 2025-03-28T09:34:12.865660Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 Process answer. Aval parts: 0 2025-03-28T09:34:12.867661Z :DEBUG: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] [] Got ReadResponse, serverBytesSize = 490, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-03-28T09:34:12.867826Z :DEBUG: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428310 2025-03-28T09:34:12.868139Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-03-28T09:34:12.868212Z :DEBUG: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] [] Returning serverBytesSize = 490 to budget 2025-03-28T09:34:12.868263Z :DEBUG: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] [] In ContinueReadingDataImpl, ReadSizeBudget = 490, ReadSizeServerDelta = 52428310 2025-03-28T09:34:12.868549Z :DEBUG: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-03-28T09:34:12.868828Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-03-28T09:34:12.868897Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-03-28T09:34:12.868933Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-03-28T09:34:12.868989Z :DEBUG: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-03-28T09:34:12.869041Z :DEBUG: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] [] Returning serverBytesSize = 0 to budget 2025-03-28T09:34:12.869258Z :INFO: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] Closing read session. Close timeout: 0.000000s 2025-03-28T09:34:12.869325Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-03-28T09:34:12.869372Z :INFO: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] Counters: { Errors: 0 CurrentSessionLifetimeMs: 28 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:34:12.869495Z :NOTICE: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:34:12.869446Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 grpc read done: success# 1, data# { read_request { bytes_size: 490 } } 2025-03-28T09:34:12.869546Z :DEBUG: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] [] Abort session to cluster 2025-03-28T09:34:12.869575Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 got read request: guid# 8df9d95d-f1c470f4-544ce26b-48a26421 2025-03-28T09:34:12.870372Z :NOTICE: [] [] [172ebe81-ee846fb4-301d4fd3-ec1433f2] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:34:12.870518Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 grpc read done: success# 0, data# { } 2025-03-28T09:34:12.870542Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 grpc read failed 2025-03-28T09:34:12.870573Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 closed 2025-03-28T09:34:12.871016Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_11245170127835541256_v1 is DEAD 2025-03-28T09:34:12.871260Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_11245170127835541256_v1 2025-03-28T09:34:12.871827Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7486791363497522109:2646] disconnected; active server actors: 1 2025-03-28T09:34:12.871863Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7486791363497522109:2646] client cli disconnected session shared/cli_5_1_11245170127835541256_v1 >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> TLocalTests::TestAddTenantWhileResolving [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2025-03-28T09:33:30.694963Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791183810888128:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:30.695115Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:30.767158Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791183663645800:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:30.767700Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:30.932810Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001bcd/r3tmp/tmpk4qzHC/pdisk_1.dat 2025-03-28T09:33:30.955848Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:31.188016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:31.188136Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:31.189434Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:31.189493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:31.197935Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:31.198447Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:31.200317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:31.237367Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4128, node 1 2025-03-28T09:33:31.270525Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:33:31.271317Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:33:31.437566Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001bcd/r3tmp/yandex37S9W5.tmp 2025-03-28T09:33:31.437599Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001bcd/r3tmp/yandex37S9W5.tmp 2025-03-28T09:33:31.438360Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001bcd/r3tmp/yandex37S9W5.tmp 2025-03-28T09:33:31.438562Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:31.662350Z INFO: TTestServer started on Port 16049 GrpcPort 4128 TClient is connected to server localhost:16049 PQClient connected to localhost:4128 === TenantModeEnabled() = 0 === Init PQ - start server on port 4128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:31.982151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:33:31.982971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:31.984866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:33:31.987181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:33:31.987268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:31.990027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:33:31.991147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:33:31.991379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:31.991443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:33:31.991547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:33:31.991582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-28T09:33:31.992579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:31.992614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:33:31.992716Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:33:31.993708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:31.993754Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:33:31.993770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:33:31.995656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:31.995679Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:31.995695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:33:31.995723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:33:32.023893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:33:32.026049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:33:32.027204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:33:32.029109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154412076, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:33:32.029277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154412076 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:33:32.029303Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:33:32.030408Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:33:32.030455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:33:32.030707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:33:32.030778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:33:32.032878Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:33:32.032909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:33:32.033149Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:33:32.033203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791188105856119:2412], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:33:32.033253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:33:32.033274Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:33:32.033432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:33:32.033445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:33:32.033464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:33:32.033480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:33:32.033501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:33:32.033518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:33:32.033538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:33:32.033557Z node 1 :FLAT_TX_SCHEMESHARD D ... ALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/cli_5_1_13236553665077694130_v1" (Sender=[5:7486791371157257334:2648], Pipe=[5:7486791371157257337:2648], Partitions=[], ActiveFamilyCount=0) 2025-03-28T09:34:13.370758Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] consumer cli family 1 status Active partitions [0] session "shared/cli_5_1_13236553665077694130_v1" sender [5:7486791371157257334:2648] lock partition 0 for ReadingSession "shared/cli_5_1_13236553665077694130_v1" (Sender=[5:7486791371157257334:2648], Pipe=[5:7486791371157257337:2648], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-03-28T09:34:13.370821Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-03-28T09:34:13.370856Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037898][rt3.dc1--topic1] consumer cli balancing duration: 0.000212s 2025-03-28T09:34:13.371877Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/cli_5_1_13236553665077694130_v1" ClientId: "cli" PipeClient { RawX1: 7486791371157257337 RawX2: 4503621102209624 } Path: "/Root/PQ/rt3.dc1--topic1" } 2025-03-28T09:34:13.371989Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2025-03-28T09:34:13.372318Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7486791371157257339:2651] 2025-03-28T09:34:13.372496Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: shared/cli_5_1_13236553665077694130_v1:1 with generation 1 2025-03-28T09:34:13.377036Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 SizeLag: 409 WriteTimestampEstimateMS: 1743154453364 } Cookie: 18446744073709551615 } 2025-03-28T09:34:13.377098Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2025-03-28T09:34:13.377189Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 sending to client partition status 2025-03-28T09:34:13.377978Z :INFO: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2025-03-28T09:34:13.378474Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2025-03-28T09:34:13.378603Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2025-03-28T09:34:13.378663Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-03-28T09:34:13.378721Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2025-03-28T09:34:13.378794Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 0, sizeLag# 409 2025-03-28T09:34:13.378816Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1TEvPartitionReady. Aval parts: 1 2025-03-28T09:34:13.378860Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 performing read request: guid# fed34b2f-f3e0af6d-3b53765b-c5041de3, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-28T09:34:13.378959Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid fed34b2f-f3e0af6d-3b53765b-c5041de3 2025-03-28T09:34:13.379789Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1743154453260 CreateTimestampMS: 1743154453258 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1743154453264 CreateTimestampMS: 1743154453258 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1743154453293 CreateTimestampMS: 1743154453258 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 43 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-03-28T09:34:13.379979Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-03-28T09:34:13.380028Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid fed34b2f-f3e0af6d-3b53765b-c5041de3 has messages 1 2025-03-28T09:34:13.380117Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 read done: guid# fed34b2f-f3e0af6d-3b53765b-c5041de3, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 485 2025-03-28T09:34:13.380161Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 response to read: guid# fed34b2f-f3e0af6d-3b53765b-c5041de3 2025-03-28T09:34:13.380377Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 Process answer. Aval parts: 0 2025-03-28T09:34:13.380720Z :DEBUG: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] [] Got ReadResponse, serverBytesSize = 485, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428315 2025-03-28T09:34:13.380869Z :DEBUG: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428315 2025-03-28T09:34:13.381164Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2025-03-28T09:34:13.381234Z :DEBUG: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] [] Returning serverBytesSize = 485 to budget 2025-03-28T09:34:13.381276Z :DEBUG: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] [] In ContinueReadingDataImpl, ReadSizeBudget = 485, ReadSizeServerDelta = 52428315 2025-03-28T09:34:13.381659Z :DEBUG: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-03-28T09:34:13.381856Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-03-28T09:34:13.381918Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-03-28T09:34:13.381951Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2025-03-28T09:34:13.381903Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 grpc read done: success# 1, data# { read_request { bytes_size: 485 } } 2025-03-28T09:34:13.382002Z :DEBUG: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2025-03-28T09:34:13.382040Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 got read request: guid# 41ecb04c-97db1b9d-6747b6fc-5685b29a 2025-03-28T09:34:13.382065Z :DEBUG: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] [] Returning serverBytesSize = 0 to budget 2025-03-28T09:34:13.382286Z :INFO: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] Closing read session. Close timeout: 0.000000s 2025-03-28T09:34:13.382335Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2025-03-28T09:34:13.382376Z :INFO: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] Counters: { Errors: 0 CurrentSessionLifetimeMs: 21 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:34:13.382481Z :NOTICE: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:34:13.382523Z :DEBUG: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] [] Abort session to cluster 2025-03-28T09:34:13.383285Z :NOTICE: [] [] [efc7b6bd-d202a3c3-9b630b95-9f037248] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:34:13.383456Z node 5 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 grpc read done: success# 0, data# { } 2025-03-28T09:34:13.383480Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 grpc read failed 2025-03-28T09:34:13.383509Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 closed 2025-03-28T09:34:13.383594Z node 5 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/cli session shared/cli_5_1_13236553665077694130_v1 is DEAD 2025-03-28T09:34:13.383784Z node 5 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/cli_5_1_13236553665077694130_v1 2025-03-28T09:34:13.384289Z node 6 :PERSQUEUE_READ_BALANCER INFO: [72075186224037898][rt3.dc1--topic1] pipe [5:7486791371157257337:2648] disconnected; active server actors: 1 2025-03-28T09:34:13.384356Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037898][rt3.dc1--topic1] pipe [5:7486791371157257337:2648] client cli disconnected session shared/cli_5_1_13236553665077694130_v1 >> TSharedPageCache::ThreeLeveledLRU [GOOD] >> TSharedPageCache::S3FIFO >> Secret::Deactivated [GOOD] >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeRtmrVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex >> DBase::VersionCompactedParts [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] |94.0%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex >> Memtable::Wreck [GOOD] >> Memtable::Erased >> Charge::ByKeysIndex [GOOD] >> TNodeBrokerTest::NodeNameReuseRestart >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Defaults [GOOD] >> DBase::Garbage [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::Outer [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBase::DropModifiedTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck >> TTenantPoolTests::TestSensorsConfigForStaticSlot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline [GOOD] Test command err: 2025-03-28T09:34:16.193754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:16.194592Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline [GOOD] Test command err: 2025-03-28T09:34:16.201604Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:16.201683Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from to NODE_BROKER_ACTOR |94.0%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTable [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSubDomain [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSolomonVolume [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::BrokenCredentialsProvider [GOOD] Test command err: 2025-03-28T09:33:31.874700Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1743154411874645 2025-03-28T09:33:32.330073Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791193937019650:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.330346Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:32.417366Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791193278772664:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.417420Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:32.662528Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:32.668170Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cbb/r3tmp/tmpR7dBfN/pdisk_1.dat 2025-03-28T09:33:33.087619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.087755Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.089111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.089172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.104232Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:33.104416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:33.104780Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:33.106577Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9831, node 1 2025-03-28T09:33:33.547329Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001cbb/r3tmp/yandexyhKClV.tmp 2025-03-28T09:33:33.547353Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001cbb/r3tmp/yandexyhKClV.tmp 2025-03-28T09:33:33.547497Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001cbb/r3tmp/yandexyhKClV.tmp 2025-03-28T09:33:33.547619Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:33.690030Z INFO: TTestServer started on Port 19672 GrpcPort 9831 TClient is connected to server localhost:19672 PQClient connected to localhost:9831 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:34.259966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:34.295822Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:33:36.805283Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791211116889707:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.805382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.806855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791211116889734:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.811510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:33:36.913338Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791211116889736:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:33:37.194861Z node 1 :TX_PROXY ERROR: Actor# [1:7486791211116889822:2687] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:37.233773Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486791215411857130:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:37.239496Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzljZDNjMDctNjMzMjJlMjMtNWVkOTIyN2MtOWExN2M5OWI=, ActorId: [1:7486791211116889697:2337], ActorState: ExecuteState, TraceId: 01jqe1k349amrjpabh97wbwgek, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:33:37.241953Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:33:37.244348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:37.245883Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486791210458642063:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:37.246593Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTFhMmYyYzYtZjY4YjBjNjktMzM3M2ZmMDEtZWIyZjE4ZWE=, ActorId: [2:7486791210458642023:2309], ActorState: ExecuteState, TraceId: 01jqe1k37re9d2avsmbzm7r6c6, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:33:37.246981Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:33:37.326903Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791193937019650:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:37.327584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:37.418807Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791193278772664:2194];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:37.418886Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:37.454909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:37.628764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:9831", true, true, 1000); 2025-03-28T09:33:38.036423Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710666. Ctx: { TraceId: 01jqe1k41qcjwgjsae53sdds3g, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODgzMTBkYy0yZGNkNzc0My05ZDhlNjBhNC04YmZjMjFiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486791219706824893:3041] === CheckClustersList. Ok 2025-03-28T09:33:44.065349Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:9831 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:33:44.193244Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:9831 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-top ... e token 2025-03-28T09:34:13.770268Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-03-28T09:34:13.770312Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:24819 2025-03-28T09:34:13.775590Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-28T09:34:13.775958Z node 5 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-28T09:34:13.775985Z node 5 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-03-28T09:34:13.776949Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "test-topic" message_group_id: "src" } 2025-03-28T09:34:13.777077Z node 5 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "test-topic" message_group_id: "src" from ipv6:[::1]:42566 2025-03-28T09:34:13.777101Z node 5 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42566 proto=v1 topic=test-topic durationSec=0 2025-03-28T09:34:13.777111Z node 5 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:34:13.778507Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-03-28T09:34:13.778618Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-28T09:34:13.778636Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:34:13.778644Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-28T09:34:13.778662Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486791371180943121:2522] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:34:13.780924Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486791371180943121:2522] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:34:13.959297Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486791371180943121:2522] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-28T09:34:13.959775Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7486791371180943160:2522] connected; active server actors: 1 2025-03-28T09:34:13.959830Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486791371180943121:2522] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-28T09:34:13.959848Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486791371180943121:2522] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-28T09:34:13.960199Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7486791371180943160:2522] disconnected; active server actors: 1 2025-03-28T09:34:13.960230Z node 5 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [5:7486791371180943160:2522] disconnected no session 2025-03-28T09:34:14.083110Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486791371180943121:2522] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-28T09:34:14.083170Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486791371180943121:2522] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-28T09:34:14.083206Z node 5 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [5:7486791371180943121:2522] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-28T09:34:14.083234Z node 5 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:34:14.084024Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [5:7486791375475910476:2522], now have 1 active actors on pipe 2025-03-28T09:34:14.084130Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 6, Generation: 1 2025-03-28T09:34:14.084277Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:34:14.084320Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:34:14.084421Z node 6 :PERSQUEUE INFO: new Cookie src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-28T09:34:14.084535Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:34:14.084598Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:34:14.085173Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:34:14.085200Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:34:14.085326Z node 6 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:34:14.085666Z node 5 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0 2025-03-28T09:34:14.086423Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743154454086 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:34:14.086546Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:34:14.086768Z :INFO: [] MessageGroupId [src] SessionId [src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0] Write session: close. Timeout = 0 ms 2025-03-28T09:34:14.086843Z :INFO: [] MessageGroupId [src] SessionId [src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0] Write session will now close 2025-03-28T09:34:14.086890Z :DEBUG: [] MessageGroupId [src] SessionId [src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0] Write session: aborting 2025-03-28T09:34:14.087629Z :INFO: [] MessageGroupId [src] SessionId [src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:34:14.087643Z :DEBUG: [] MessageGroupId [src] SessionId [src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0] Write session is aborting and will not restart 2025-03-28T09:34:14.087700Z :DEBUG: [] MessageGroupId [src] SessionId [src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0] Write session: destroy 2025-03-28T09:34:14.088133Z node 5 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0 grpc read done: success: 0 data: 2025-03-28T09:34:14.088154Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0 grpc read failed 2025-03-28T09:34:14.088186Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0 grpc closed 2025-03-28T09:34:14.088202Z node 5 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|6ccfb7e4-46fbfd-33bedceb-a382c18a_0 is DEAD 2025-03-28T09:34:14.089723Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [5:7486791375475910476:2522] destroyed 2025-03-28T09:34:14.089223Z node 5 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:34:14.089769Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:34:14.099338Z :INFO: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] Starting read session 2025-03-28T09:34:14.099388Z :DEBUG: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] Starting session to cluster null (localhost:24819) 2025-03-28T09:34:14.100859Z :DEBUG: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:14.100922Z :DEBUG: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:14.100951Z :DEBUG: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] [null] Reconnecting session to cluster null in 0.000000s 2025-03-28T09:34:14.102045Z :ERROR: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] [null] Got error. Status: CLIENT_UNAUTHENTICATED. Description:
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation 2025-03-28T09:34:14.102090Z :DEBUG: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:14.102111Z :DEBUG: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:14.102227Z :INFO: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] [null] Closing session to cluster: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " } Get event on client 2025-03-28T09:34:14.102351Z :NOTICE: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:34:14.102376Z :DEBUG: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] [null] Abort session to cluster Got close event: SessionClosed { Status: CLIENT_UNAUTHENTICATED Issues: "
: Error: Failed to establish connection to server "" ( cluster null). Attempts done: 1
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp:451: exception during creation " }2025-03-28T09:34:14.102446Z :INFO: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] Closing read session. Close timeout: 0.000000s 2025-03-28T09:34:14.102476Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2025-03-28T09:34:14.102510Z :INFO: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] Counters: { Errors: 1 CurrentSessionLifetimeMs: 3 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:34:14.102613Z :NOTICE: [/Root] [/Root] [d7836aad-174340f3-8af691b-5ca186f9] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups >> TDynamicNameserverTest::TestCacheUsage >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> TNodeBrokerTest::NoEffectBeforeCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: 2025-03-28T09:34:03.705054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:301:2345], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e3c/r3tmp/tmpihWC8C/pdisk_1.dat TServer::EnableGrpc on GrpcPort 25964, node 1 TClient is connected to server localhost:11235 2025-03-28T09:34:04.181639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:04.219529Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:04.219630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:04.219667Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:04.220033Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:04.221427Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:04.256741Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:04.256868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:04.268424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-28T09:34:16.200923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:691:2581], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:16.201136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::ClockPro >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> TNodeBrokerTest::ResolveScopeIdForServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeExternalDataSource [GOOD] Test command err: 2025-03-28T09:34:10.854304Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791358353297490:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:10.854896Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00173f/r3tmp/tmpjIJiQb/pdisk_1.dat 2025-03-28T09:34:11.385008Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:11.406967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:11.407099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:11.418149Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10714, node 1 2025-03-28T09:34:11.642439Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:11.642462Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:11.642473Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:11.642583Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27295 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:12.290069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:13.881002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791371238200429:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:13.881104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.272650Z node 1 :TX_PROXY DEBUG: actor# [1:7486791358353297743:2141] Handle TEvProposeTransaction 2025-03-28T09:34:14.272683Z node 1 :TX_PROXY DEBUG: actor# [1:7486791358353297743:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:34:14.272736Z node 1 :TX_PROXY DEBUG: actor# [1:7486791358353297743:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486791375533167752:2634] 2025-03-28T09:34:14.321498Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167752:2634] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-03-28T09:34:14.321549Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167752:2634] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:34:14.321950Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167752:2634] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:34:14.322019Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167752:2634] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:34:14.322221Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167752:2634] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:34:14.322354Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167752:2634] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:34:14.322408Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167752:2634] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:34:14.322592Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167752:2634] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:34:14.323730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:34:14.325815Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167752:2634] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-03-28T09:34:14.325865Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167752:2634] txid# 281474976710658 SEND to# [1:7486791375533167751:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-03-28T09:34:14.466450Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791375533167894:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.466521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.523260Z node 1 :TX_PROXY DEBUG: actor# [1:7486791358353297743:2141] Handle TEvProposeTransaction 2025-03-28T09:34:14.523315Z node 1 :TX_PROXY DEBUG: actor# [1:7486791358353297743:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:34:14.523379Z node 1 :TX_PROXY DEBUG: actor# [1:7486791358353297743:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486791375533167921:2752] 2025-03-28T09:34:14.525833Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167921:2752] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "a" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-03-28T09:34:14.525879Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167921:2752] txid# 281474976710659 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:34:14.525958Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167921:2752] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:34:14.526295Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167921:2752] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:34:14.526434Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167921:2752] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:34:14.526476Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167921:2752] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-03-28T09:34:14.526660Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167921:2752] txid# 281474976710659 HANDLE EvClientConnected 2025-03-28T09:34:14.534888Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167921:2752] txid# 281474976710659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-28T09:34:14.534937Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533167921:2752] txid# 281474976710659 SEND to# [1:7486791375533167920:2355] Source {TEvProposeTransactionStatus txid# 281474976710659 Status# 53} 2025-03-28T09:34:14.617476Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7486791375533168101:2363] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-28T09:34:14.651009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791375533168195:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.651088Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.659591Z node 1 :TX_PROXY DEBUG: actor# [1:7486791358353297743:2141] Handle TEvProposeTransaction 2025-03-28T09:34:14.659622Z node 1 :TX_PROXY DEBUG: actor# [1:7486791358353297743:2141] TxId# 281474976710660 ProcessProposeTransaction 2025-03-28T09:34:14.659660Z node 1 :TX_PROXY DEBUG: actor# [1:7486791358353297743:2141] Cookie# 0 userReqId# "" txid# 281474976710660 SEND to# [1:7486791375533168207:2959] 2025-03-28T09:34:14.661322Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533168207:2959] txid# 281474976710660 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateCdcStream CreateCdcStream { TableName: "table" StreamDescription { Name: "b" Mode: ECdcStreamModeUpdate Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" } } } } UserToken: "" DatabaseName: "" PeerName: "" 2025-03-28T09:34:14.661362Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533168207:2959] txid# 281474976710660 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:34:14.661394Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533168207:2959] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:34:14.661662Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791375533168207:2959] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:34:14.661759Z node 1 :TX_PROXY DEBUG: ... hId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715766 2025-03-28T09:34:15.631160Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:15.634704Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-28T09:34:15.634732Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715766, status# StatusAccepted 2025-03-28T09:34:15.634817Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Subscribed WaitTxId: 281474976715766 Issue: '' } 2025-03-28T09:34:15.639425Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:15.667786Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][1:7486791379828137674:2486] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:17:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-28T09:34:15.681133Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-28T09:34:15.681167Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715766 2025-03-28T09:34:15.681259Z node 1 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-03-28T09:34:15.682837Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:15.682916Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-28T09:34:15.682927Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976715767, id# 281474976710664 2025-03-28T09:34:15.682971Z node 1 :IMPORT INFO: TImport::TTxProgress: CreateConsumers propose: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976715767 2025-03-28T09:34:15.683265Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:15.683745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-03-28T09:34:15.685407Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-28T09:34:15.685431Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976715767, status# StatusAccepted 2025-03-28T09:34:15.685494Z node 1 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976710664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/table' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 11] State: CreateChangefeed SubState: Subscribed WaitTxId: 281474976715767 Issue: '' } 2025-03-28T09:34:15.687175Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:15.708407Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-28T09:34:15.708433Z node 1 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976715767 2025-03-28T09:34:15.709855Z node 1 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:15.799560Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7486791379828137851:2498] [0] Resolve database: name# /Root 2025-03-28T09:34:15.800051Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7486791379828137851:2498] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:34:15.800079Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7486791379828137851:2498] [0] Send request: schemeShardId# 72057594046644480 2025-03-28T09:34:15.800718Z node 1 :TX_PROXY DEBUG: [GetImport] [1:7486791379828137851:2498] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976710664 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:26373" scheme: HTTP bucket: "test_bucket" items { source_prefix: "table" destination_path: "/Root/table" } } StartTime { seconds: 1743154455 } EndTime { seconds: 1743154455 } } 2025-03-28T09:34:15.807152Z node 1 :TX_PROXY DEBUG: actor# [1:7486791358353297743:2141] Handle TEvNavigate describe path /Root/table 2025-03-28T09:34:15.807211Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791379828137857:4862] HANDLE EvNavigateScheme /Root/table 2025-03-28T09:34:15.807481Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791379828137857:4862] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:34:15.807573Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791379828137857:4862] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table" Options { ShowPrivateTable: false } 2025-03-28T09:34:15.808753Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791379828137857:4862] Handle TEvDescribeSchemeResult Forward to# [1:7486791379828137855:2499] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table" PathDescription { Self { Name: "table" PathId: 11 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715760 CreateStep: 1743154455427 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "table" Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "Key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } } TableSchemaVersion: 4 IsBackup: false CdcStreams { Name: "a" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 14 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } CdcStreams { Name: "b" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 16 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } CdcStreams { Name: "c" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046644480 LocalId: 12 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 9 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 11 PathOwnerId: 72057594046644480 2025-03-28T09:34:15.854589Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791358353297490:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:15.854664Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] >> TNodeBrokerTest::NodeNameExpiration >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:12.940567Z 00000.029 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.035 II| FAKE_ENV: Starting storage for BS group 0 00000.036 II| FAKE_ENV: Starting storage for BS group 1 00000.036 II| FAKE_ENV: Starting storage for BS group 2 00000.036 II| FAKE_ENV: Starting storage for BS group 3 00000.060 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.061 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.061 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.061 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {146b, 4} 00000.061 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.061 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.061 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.061 II| FAKE_ENV: All BS storage groups are stopped 00000.062 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.062 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:13.007051Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.015 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.015 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.015 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.015 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {292b, 8} 00000.015 II| FAKE_ENV: DS.1 gone, left {210b, 6}, put {210b, 6} 00000.015 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.015 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.015 II| FAKE_ENV: All BS storage groups are stopped 00000.015 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.015 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:13.027327Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.039 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.039 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 76b} miss {0 0b} 00000.039 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.040 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1181b, 13} 00000.040 II| FAKE_ENV: DS.1 gone, left {909b, 3}, put {1913b, 12} 00000.040 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {132b, 2} 00000.040 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.040 II| FAKE_ENV: All BS storage groups are stopped 00000.040 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.040 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:13.070999Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.025 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.025 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.025 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.025 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.025 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.025 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.025 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.025 II| FAKE_ENV: All BS storage groups are stopped 00000.025 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.025 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:13.101263Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.064 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.066 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.066 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {18 513007b} miss {0 0b} 00000.067 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.067 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2095b, 23} 00000.067 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {210604b, 21} 00000.067 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.067 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.067 II| FAKE_ENV: All BS storage groups are stopped 00000.067 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.067 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:13.172406Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.041 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.042 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 307329b} miss {0 0b} 00000.042 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.042 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1826b, 23} 00000.042 II| FAKE_ENV: DS.1 gone, left {1247b, 3}, put {311467b, 22} 00000.042 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.042 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.042 II| FAKE_ENV: All BS storage groups are stopped 00000.042 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.042 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:13.219967Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.027 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.028 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307836b} miss {0 0b} 00000.028 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.028 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1436b, 31} 00000.028 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310476b, 16} 00000.028 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.028 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.028 II| FAKE_ENV: All BS storage groups are stopped 00000.028 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.028 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:13.252192Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.034 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.034 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 194646b} miss {0 0b} 00000.034 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.034 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1768b, 27} 00000.034 II| FAKE_ENV: DS.1 gone, left {732b, 6}, put {197813b, 24} 00000.035 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.035 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.035 II| FAKE_ENV: All BS storage groups are stopped 00000.035 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.035 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:13.290604Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.005 II| FAKE_ENV: Starting storage for BS group 1 00000.005 II| FAKE_ENV: Starting storage for BS group 2 00000.005 II| FAKE_ENV: Starting storage for BS group 3 00000.009 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.010 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} 00000.010 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.010 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.010 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.010 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.010 II| FAKE_ENV: All BS storage groups are stopped 00000.010 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.010 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:13.304402Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.053 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.054 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.055 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {8 410030b} miss {0 0b} 00000.055 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.055 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1492b, 23} 00000.055 II| FAKE_ENV: DS.1 gone, left {504b, 4}, put {310786b, 20} 00000.055 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.055 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.055 II| FAKE_ENV: All BS storage groups are stopped 00000.055 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.055 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:13.364222Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.016 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.017 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} 00000.017 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.017 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.017 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 0 ... t32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder [GOOD] Test command err: 2025-03-28T09:34:18.059678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:18.059738Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2025-03-28T09:34:17.564027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:17.564143Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:17.617235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 >> TNodeBrokerTest::ConfigPipelining >> TDynamicNameserverTest::TestCacheUsage [GOOD] >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] >> TSharedPageCache::ClockPro [GOOD] >> TSharedPageCache::ReplacementPolicySwitch >> TEnumerationTest::TestPublish [GOOD] >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnsupported >> TSharedPageCache::ReplacementPolicySwitch [GOOD] >> TSharedPageCache::BigCache_BTreeIndex >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TEnumerationTest::TestPublish [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::TestCacheUsage [GOOD] Test command err: 2025-03-28T09:34:18.027265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:18.027337Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2025-03-28T09:34:18.686032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:18.686103Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:18.763027Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:34:18.801715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> TSharedPageCache::BigCache_FlatIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: 2025-03-28T09:30:51.990044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:30:51.990488Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:30:51.990545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000afb/r3tmp/tmpW86uZy/pdisk_1.dat 2025-03-28T09:30:52.434642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:30:52.489295Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:52.530940Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:30:52.532062Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:30:52.532315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:52.532462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:52.543959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:52.723054Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:30:52.723137Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:30:52.724698Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:645:2553] 2025-03-28T09:30:52.834332Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:30:52.834450Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:30:52.835910Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:30:52.836088Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:30:52.836516Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:30:52.836809Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:30:52.836997Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:30:52.839845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:30:52.840952Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:30:52.842333Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:30:52.842428Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:30:52.882793Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:30:52.883908Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:30:52.884535Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:670:2574] 2025-03-28T09:30:52.884837Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:30:52.938674Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:30:52.939461Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:30:52.939604Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:30:52.942919Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:30:52.942999Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:30:52.943045Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:30:52.944926Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:30:52.945095Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:30:52.945206Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:686:2574] in generation 1 2025-03-28T09:30:52.956118Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:30:52.991624Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:30:52.992766Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:30:52.992970Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:688:2584] 2025-03-28T09:30:52.993014Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:30:52.993056Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:30:52.993104Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:30:52.993361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:670:2574], Recipient [1:670:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:30:52.993955Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:30:52.994935Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:30:52.995052Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:30:52.995171Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:30:52.995244Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:30:52.995303Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:30:52.995352Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:30:52.995387Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:30:52.995422Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:30:52.995467Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:30:52.997105Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:677:2578], Recipient [1:670:2574]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:30:52.997168Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:30:52.997215Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2571], serverId# [1:677:2578], sessionId# [0:0:0] 2025-03-28T09:30:52.997329Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:677:2578] 2025-03-28T09:30:52.997377Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:30:52.997519Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:30:52.997856Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:30:52.997923Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:30:52.998711Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:30:52.998778Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:30:52.998836Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:30:52.998996Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:30:52.999051Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:30:52.999385Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:30:52.999423Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:30:52.999458Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:30:52.999493Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:30:52.999561Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:30:52.999591Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:30:52.999624Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:30:52.999672Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:30:52.999722Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:30:53.001361Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:689:2585], Recipient [1:670:2574]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:30:53.001413Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:30:53.012187Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Compl ... 28T09:34:16.748598Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] shutdown pipe due to pending shutdown request [26:973:2789] 2025-03-28T09:34:16.748690Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] notify reset [26:973:2789] 2025-03-28T09:34:16.748950Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [26:974:2790], Recipient [26:697:2585]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:34:16.749079Z node 26 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:34:16.749194Z node 26 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [26:973:2789], serverId# [26:974:2790], sessionId# [0:0:0] 2025-03-28T09:34:16.749524Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [26:972:2788], Recipient [26:697:2585]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-03-28T09:34:16.749705Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} queued, type NKikimr::NDataShard::TDataShard::TTxCompactTable 2025-03-28T09:34:16.749860Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:34:16.750015Z node 26 :TABLET_EXECUTOR DEBUG: TCompactionLogic PrepareForceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2025-03-28T09:34:16.750220Z node 26 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [26:972:2788], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2025-03-28T09:34:16.750393Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:34:16.750542Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:21} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:34:16.750900Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2025-03-28T09:34:16.751001Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:21} starting compaction 2025-03-28T09:34:16.751445Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} starting Scan{1 on 1001, Compact{72075186224037888.1.21, eph 1}} 2025-03-28T09:34:16.751632Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} started compaction 1 2025-03-28T09:34:16.751723Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR 2025-03-28T09:34:16.755664Z node 26 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:22} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 21, product {tx status + 1 parts epoch 2} done 2025-03-28T09:34:16.756073Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2025-03-28T09:34:16.756253Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2025-03-28T09:34:16.756334Z node 26 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2025-03-28T09:34:16.756870Z node 26 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.530705Z 2025-03-28T09:34:16.757187Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2025-03-28T09:34:16.757346Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:34:16.757486Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-03-28T09:34:16.757612Z node 26 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [26:972:2788]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:34:16.758479Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2025-03-28T09:34:16.758648Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:23} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR ========= Starting an immediate read ========= 2025-03-28T09:34:16.947975Z node 26 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1ma5z10szjcrq5d4dcrk5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=26&id=NzcxODQ4ODYtOGM2ZTEwZDQtNDVkYjQ2MWItOTFkZjk3OTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:34:16.949797Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] send [26:910:2734] 2025-03-28T09:34:16.949911Z node 26 :PIPE_CLIENT DEBUG: TClient[72075186224037888] push event to server [26:910:2734] 2025-03-28T09:34:16.950354Z node 26 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [26:998:2796], Recipient [26:697:2585]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-03-28T09:34:16.950638Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-03-28T09:34:16.950844Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:34:16.951052Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:34:16.951167Z node 26 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1531/281474976715662 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:34:16.951264Z node 26 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v1531/18446744073709551615 2025-03-28T09:34:16.951399Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2025-03-28T09:34:16.951589Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:34:16.951687Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:34:16.951795Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:34:16.951872Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:34:16.951962Z node 26 :TX_DATASHARD TRACE: Activated operation [0:5] at 72075186224037888 2025-03-28T09:34:16.952046Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:34:16.952082Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:34:16.952111Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:34:16.952138Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:34:16.952324Z node 26 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-03-28T09:34:16.952634Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is DelayComplete 2025-03-28T09:34:16.952696Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:34:16.952793Z node 26 :TX_DATASHARD TRACE: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:34:16.952871Z node 26 :TX_DATASHARD TRACE: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:34:16.952935Z node 26 :TX_DATASHARD TRACE: Execution status for [0:5] at 72075186224037888 is Executed 2025-03-28T09:34:16.952964Z node 26 :TX_DATASHARD TRACE: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:34:16.953016Z node 26 :TX_DATASHARD TRACE: Execution plan for [0:5] at 72075186224037888 has finished 2025-03-28T09:34:16.953107Z node 26 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:34:16.953276Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:34:16.953429Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:24} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:34:17.007574Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} commited cookie 8 for step 20 2025-03-28T09:34:17.039310Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:22} commited cookie 8 for step 21 2025-03-28T09:34:17.060354Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-28T09:34:17.060593Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:34:17.060997Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{12, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-28T09:34:17.061225Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} Tx{20, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:34:17.062003Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:19} commited cookie 1 for step 18 2025-03-28T09:34:17.062296Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] send [26:557:2486] 2025-03-28T09:34:17.062401Z node 26 :PIPE_CLIENT DEBUG: TClient[72057594046382081] push event to server [26:557:2486] 2025-03-28T09:34:17.087306Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:23} commited cookie 8 for step 22 2025-03-28T09:34:17.118962Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:24} commited cookie 8 for step 23 2025-03-28T09:34:17.151405Z node 26 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:25} commited cookie 8 for step 24 >> TNodeBrokerTest::SingleDomainModeBannedIds >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2025-03-28T09:34:17.782627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:17.782718Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:17.799196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex >> TSlotIndexesPoolTest::Expansion [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2025-03-28T09:34:16.310773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:16.310875Z node 1 :IMPORT WARN: Table profiles were not loaded >> GracefulShutdown::TTxGracefulShutdown >> TSlotIndexesPoolTest::Ranges [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] Test command err: 2025-03-28T09:34:18.382035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:18.382102Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex >> TDynamicNameserverTest::CacheMissSameDeadline [GOOD] |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> TNodeBrokerTest::RegistrationPipelining >> TNodeBrokerTest::ConfigPipelining [GOOD] >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline [GOOD] Test command err: 2025-03-28T09:34:20.696130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:20.696207Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeCdcStream [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBlobDepot [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeBackupCollection [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] >> TNodeBrokerTest::LoadStateMoveEpoch >> TNodeBrokerTest::RegistrationPipeliningNodeName >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges >> BackupRestore::RestoreViewReferenceTable [GOOD] >> BackupRestore::RestoreViewToDifferentDatabase |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2025-03-28T09:34:20.311196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:20.311267Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:20.419740Z node 1 :NODE_BROKER ERROR: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2025-03-28T09:34:20.447574Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-03-28T09:34:20.461199Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2025-03-28T09:34:21.308545Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-03-28T09:34:21.330839Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] Test command err: 2025-03-28T09:34:19.616193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:19.616290Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:19.703607Z node 1 :NODE_BROKER ERROR: Cannot register node host1:1001: ERROR_TEMP: No free node IDs ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] >> TSwitchableCache::Touch [GOOD] >> TSwitchableCache::Erase [GOOD] >> TSwitchableCache::EvictNext [GOOD] >> TSwitchableCache::UpdateLimit [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_All [GOOD] >> TSwitchableCache::Switch_Touch_RotatePages_Parts [GOOD] >> TSwitchableCache::Switch_RotatePages_Force [GOOD] >> TSwitchableCache::Switch_RotatePages_Evicts [GOOD] >> TSwitchableCache::Switch_Touch [GOOD] >> TSwitchableCache::Switch_Erase [GOOD] >> TSwitchableCache::Switch_EvictNext [GOOD] >> TSwitchableCache::Switch_UpdateLimit [GOOD] >> TVersions::WreckHead >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeInvalid [GOOD] Test command err: 2025-03-28T09:34:10.927223Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791357552223384:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:10.927313Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpV4rJp8/pdisk_1.dat 2025-03-28T09:34:11.440810Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:11.464521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:11.464626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:11.470585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7042, node 1 2025-03-28T09:34:11.647546Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:11.647589Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:11.647602Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:11.647721Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27229 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:12.284274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... Backup "/Root" to "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpmgr7Xk/"Create temporary directory "/Root/~backup_20250328T093412" in databaseProcess "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpmgr7Xk/dir"Create directory "/Root/~backup_20250328T093412/dir" in databaseWrite ACL into "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpmgr7Xk/dir/permissions.pb"Remove directory "/Root/~backup_20250328T093412/dir"2025-03-28T09:34:12.646153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710661:0, at schemeshard: 72057594046644480 Remove temporary directory "/Root/~backup_20250328T093412" in database2025-03-28T09:34:12.690120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-28T09:34:12.713757Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710663:0, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpmgr7Xk/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpmgr7Xk/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpmgr7Xk/dir"Restore empty directory "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpmgr7Xk/dir" to "/Root/dir"Restore ACL "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpmgr7Xk/dir" to "/Root/dir"Read ACL from "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpmgr7Xk/dir/permissions.pb"2025-03-28T09:34:12.803193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-28T09:34:15.552273Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791376714332929:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:15.555259Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001754/r3tmp/tmpeVnCcd/pdisk_1.dat 2025-03-28T09:34:15.718446Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:15.759549Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:15.759675Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:15.764363Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10487, node 4 2025-03-28T09:34:15.819342Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:15.819364Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:15.819373Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:15.819557Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15042 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:16.085808Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:18.735408Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486791389599235717:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:18.735564Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:18.953525Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:34:19.102351Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486791393894203197:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:19.102453Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:19.233889Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7486791393894203378:2359] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:4:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-28T09:34:19.285055Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486791393894203480:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:19.285135Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:19.336102Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7486791393894203660:2380] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:6:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-28T09:34:19.360584Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486791393894203764:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:19.360679Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:19.412640Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][4:7486791393894203936:2400] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:8:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } GetChangefeedAndTopicDescriptions: Backup "/Root" to "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/"Create temporary directory "/Root/~backup_20250328T093419" in databaseProcess "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250328T093419/table" }Backup table "/Root/~backup_20250328T093419/table" to "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table"Describe table "/Root/~backup_20250328T093419/table"Write scheme into "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/scheme.pb"Describe table "/Root/table"Process "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/a"Write changefeed into "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/a/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/a/topic_description.pb"Process "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/b"Write changefeed into "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/b/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/b/topic_description.pb"Process "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/c"Write changefeed into "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/c/changefeed_description.pb"Write topic into "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/c/topic_description.pb"Write ACL into "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/permissions.pb"Read table "/Root/~backup_20250328T093419/table"Write data into "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/data_00.csv"Drop table "/Root/~backup_20250328T093419/table"2025-03-28T09:34:19.825851Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037895 not found Remove temporary directory "/Root/~backup_20250328T093419" in database2025-03-28T09:34:19.848860Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-28T09:34:19.872128Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486791393894204490:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:19.872202Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:19.884380Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710668:2, at schemeshard: 72057594046644480 2025-03-28T09:34:19.914182Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2025-03-28T09:34:19.914221Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2025-03-28T09:34:19.914234Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037893 not found 2025-03-28T09:34:19.914246Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037892 not found 2025-03-28T09:34:19.914272Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037894 not found 2025-03-28T09:34:19.914292Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2025-03-28T09:34:19.924960Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2025-03-28T09:34:19.925090Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2025-03-28T09:34:19.925127Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2025-03-28T09:34:19.925162Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2025-03-28T09:34:19.925198Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2025-03-28T09:34:19.925229Z node 4 :HIVE WARN: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2025-03-28T09:34:19.953269Z node 4 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found Restore "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table"Read scheme from "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table" to "/Root/table"2025-03-28T09:34:20.006132Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/data_00.csv"Process "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/c"Read changefeed from "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/c/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/c/topic_description.pb"2025-03-28T09:34:20.118385Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7486791398189172345:2480] Failed entry at 'ResolveCdcStream': entry# { Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/c"Process "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/a"Read changefeed from "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/a/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/a/topic_description.pb"2025-03-28T09:34:20.176845Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7486791398189172605:2495] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:15:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/a"Process "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/b"Read changefeed from "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/b/changefeed_description.pb"Read topic from "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/b/topic_description.pb"2025-03-28T09:34:20.277152Z node 4 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037896:1][4:7486791398189172875:2510] Failed entry at 'ResolveCdcStream': entry# { Path: TableId: [72057594046644480:16:0] RequestType: ByTableId Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } Created "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/b"Restore ACL "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2909/001754/r3tmp/tmptZI2zi/table/permissions.pb"2025-03-28T09:34:20.301016Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480 Restore completed successfully >> TNodeBrokerTest::NodeNameExpiration [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTableIndex [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBase::KIKIMR_15598_Many_MemTables [GOOD] Test command err: 3 parts: [0:0:1:0:0:0:0] 150 rows, 7 pages, 1 levels: (286, 103) (607, 210) (811, 278) (1315, 446) (1540, 521) [0:0:2:0:0:0:0] 197 rows, 9 pages, 2 levels: (253, 92) (577, 200) (742, 255) (1156, 393) (1594, 539) [0:0:3:0:0:0:0] 153 rows, 7 pages, 1 levels: (199, 74) (514, 179) (769, 264) (1291, 438) (1555, 526) Checking BTree: Touched 100% bytes, 5 pages RowCountHistogram: 2% (actual 0%) key = (10, 11) value = 12 (actual 1 - 2% error) 2% (actual 0%) key = (16, 13) value = 24 (actual 2 - 4% error) 5% (actual 11%) key = (199, 74) value = 49 (actual 61 - -2% error) 4% (actual 2%) key = (253, 92) value = 73 (actual 74 - 0% error) 4% (actual 2%) key = (286, 103) value = 97 (actual 84 - 2% error) 4% (actual 8%) key = (418, 147) value = 120 (actual 125 - -1% error) 4% (actual 5%) key = (514, 179) value = 144 (actual 154 - -2% error) 5% (actual 4%) key = (577, 200) value = 169 (actual 174 - -1% error) 4% (actual 1%) key = (607, 210) value = 192 (actual 183 - 1% error) 4% (actual 8%) key = (742, 255) value = 214 (actual 226 - -2% error) 5% (actual 1%) key = (769, 264) value = 239 (actual 235 - 0% error) 4% (actual 2%) key = (811, 278) value = 262 (actual 248 - 2% error) 4% (actual 9%) key = (958, 327) value = 286 (actual 293 - -1% error) 5% (actual 5%) key = (1054, 359) value = 311 (actual 322 - -2% error) 4% (actual 2%) key = (1087, 370) value = 334 (actual 332 - 0% error) 4% (actual 4%) key = (1156, 393) value = 358 (actual 354 - 0% error) 4% (actual 8%) key = (1291, 438) value = 381 (actual 394 - -2% error) 4% (actual 1%) key = (1315, 446) value = 404 (actual 401 - 0% error) 4% (actual 3%) key = (1375, 466) value = 426 (actual 419 - 1% error) 4% (actual 10%) key = (1540, 521) value = 449 (actual 469 - -4% error) 3% (actual 1%) key = (1555, 526) value = 465 (actual 474 - -1% error) 3% (actual 2%) key = (1594, 539) value = 482 (actual 484 - 0% error) 1% (actual 2%) key = (1636, 553) value = 491 (actual 497 - -1% error) 1% (actual 0%) key = (1639, 554) value = 496 (actual 498 - 0% error) 0% (actual 0%) DataSizeHistogram: 2% (actual 13%) key = (10, 11) value = 950 (actual 5800 - -11% error) 2% (actual 0%) key = (16, 13) value = 1933 (actual 5800 - -9% error) 4% (actual 0%) key = (199, 74) value = 3866 (actual 5800 - -4% error) 4% (actual 9%) key = (253, 92) value = 5849 (actual 9821 - -9% error) 4% (actual 4%) key = (286, 103) value = 7810 (actual 11827 - -9% error) 4% (actual 4%) key = (418, 147) value = 9825 (actual 13848 - -9% error) 4% (actual 4%) key = (514, 179) value = 11834 (actual 15888 - -9% error) 4% (actual 4%) key = (577, 200) value = 13865 (actual 17883 - -9% error) 4% (actual 4%) key = (607, 210) value = 15865 (actual 19876 - -9% error) 4% (actual 4%) key = (742, 255) value = 17859 (actual 21881 - -9% error) 4% (actual 4%) key = (769, 264) value = 19882 (actual 23918 - -9% error) 4% (actual 0%) key = (811, 278) value = 21897 (actual 23918 - -4% error) 4% (actual 9%) key = (958, 327) value = 23894 (actual 27913 - -9% error) 4% (actual 4%) key = (1054, 359) value = 25915 (actual 29895 - -9% error) 4% (actual 0%) key = (1087, 370) value = 27901 (actual 29895 - -4% error) 4% (actual 4%) key = (1156, 393) value = 29881 (actual 31850 - -4% error) 4% (actual 4%) key = (1291, 438) value = 31821 (actual 33747 - -4% error) 4% (actual 4%) key = (1315, 446) value = 33794 (actual 35739 - -4% error) 4% (actual 9%) key = (1375, 466) value = 35737 (actual 39763 - -9% error) 4% (actual 4%) key = (1540, 521) value = 37749 (actual 41447 - -8% error) 3% (actual 0%) key = (1555, 526) value = 39198 (actual 41447 - -5% error) 3% (actual 1%) key = (1594, 539) value = 40605 (actual 42020 - -3% error) 1% (actual 0%) key = (1636, 553) value = 41344 (actual 42020 - -1% error) 0% (actual 0%) key = (1639, 554) value = 41733 (actual 42020 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 4% (actual 0%) key = (10, 11) value = 24 (actual 1 - 4% error) 5% (actual 0%) key = (16, 13) value = 49 (actual 2 - 9% error) 5% (actual 11%) key = (199, 74) value = 74 (actual 61 - 2% error) 4% (actual 2%) key = (253, 92) value = 97 (actual 74 - 4% error) 4% (actual 2%) key = (286, 103) value = 120 (actual 84 - 7% error) 4% (actual 8%) key = (418, 147) value = 144 (actual 125 - 3% error) 5% (actual 5%) key = (514, 179) value = 169 (actual 154 - 3% error) 5% (actual 4%) key = (577, 200) value = 194 (actual 174 - 4% error) 4% (actual 1%) key = (607, 210) value = 216 (actual 183 - 6% error) 4% (actual 8%) key = (742, 255) value = 237 (actual 226 - 2% error) 5% (actual 1%) key = (769, 264) value = 262 (actual 235 - 5% error) 5% (actual 2%) key = (811, 278) value = 287 (actual 248 - 7% error) 4% (actual 9%) key = (958, 327) value = 311 (actual 293 - 3% error) 4% (actual 5%) key = (1054, 359) value = 335 (actual 322 - 2% error) 4% (actual 2%) key = (1087, 370) value = 358 (actual 332 - 5% error) 4% (actual 4%) key = (1156, 393) value = 382 (actual 354 - 5% error) 4% (actual 8%) key = (1291, 438) value = 404 (actual 394 - 2% error) 4% (actual 1%) key = (1315, 446) value = 426 (actual 401 - 5% error) 4% (actual 3%) key = (1375, 466) value = 448 (actual 419 - 5% error) 4% (actual 10%) key = (1540, 521) value = 472 (actual 469 - 0% error) 2% (actual 1%) key = (1555, 526) value = 483 (actual 474 - 1% error) 2% (actual 2%) key = (1594, 539) value = 493 (actual 484 - 1% error) 1% (actual 3%) DataSizeHistogram: 4% (actual 13%) key = (10, 11) value = 1900 (actual 5800 - -9% error) 4% (actual 0%) key = (16, 13) value = 3867 (actual 5800 - -4% error) 4% (actual 0%) key = (199, 74) value = 5800 (actual 5800 - 0% error) 4% (actual 9%) key = (253, 92) value = 7798 (actual 9821 - -4% error) 4% (actual 4%) key = (286, 103) value = 9821 (actual 11827 - -4% error) 4% (actual 4%) key = (418, 147) value = 11827 (actual 13848 - -4% error) 4% (actual 4%) key = (514, 179) value = 13848 (actual 15888 - -4% error) 4% (actual 4%) key = (577, 200) value = 15888 (actual 17883 - -4% error) 4% (actual 4%) key = (607, 210) value = 17883 (actual 19876 - -4% error) 4% (actual 4%) key = (742, 255) value = 19876 (actual 21881 - -4% error) 4% (actual 4%) key = (769, 264) value = 21881 (actual 23918 - -4% error) 4% (actual 0%) key = (811, 278) value = 23918 (actual 23918 - 0% error) 4% (actual 9%) key = (958, 327) value = 25907 (actual 27913 - -4% error) 4% (actual 4%) key = (1054, 359) value = 27913 (actual 29895 - -4% error) 4% (actual 0%) key = (1087, 370) value = 29895 (actual 29895 - 0% error) 4% (actual 4%) key = (1156, 393) value = 31850 (actual 31850 - 0% error) 4% (actual 4%) key = (1291, 438) value = 33747 (actual 33747 - 0% error) 4% (actual 4%) key = (1315, 446) value = 35739 (actual 35739 - 0% error) 4% (actual 9%) key = (1375, 466) value = 37727 (actual 39763 - -4% error) 4% (actual 4%) key = (1540, 521) value = 39763 (actual 41447 - -4% error) 2% (actual 0%) key = (1555, 526) value = 40669 (actual 41447 - -1% error) 1% (actual 1%) key = (1594, 539) value = 41447 (actual 42020 - -1% error) 1% (actual 0%) Checking Mixed: Touched 100% bytes, 5 pages RowCountHistogram: 14% (actual 12%) key = (199, 74) value = 74 (actual 61 - 2% error) 4% (actual 2%) key = (253, 92) value = 97 (actual 74 - 4% error) 4% (actual 2%) key = (286, 103) value = 120 (actual 84 - 7% error) 4% (actual 8%) key = (418, 147) value = 144 (actual 125 - 3% error) 5% (actual 5%) key = (514, 179) value = 169 (actual 154 - 3% error) 5% (actual 4%) key = (577, 200) value = 194 (actual 174 - 4% error) 4% (actual 1%) key = (607, 210) value = 216 (actual 183 - 6% error) 4% (actual 8%) key = (742, 255) value = 237 (actual 226 - 2% error) 5% (actual 1%) key = (769, 264) value = 262 (actual 235 - 5% error) 5% (actual 2%) key = (811, 278) value = 287 (actual 248 - 7% error) 4% (actual 9%) key = (958, 327) value = 311 (actual 293 - 3% error) 4% (actual 5%) key = (1054, 359) value = 335 (actual 322 - 2% error) 4% (actual 2%) key = (1087, 370) value = 358 (actual 332 - 5% error) 4% (actual 4%) key = (1156, 393) value = 382 (actual 354 - 5% error) 4% (actual 8%) key = (1291, 438) value = 404 (actual 394 - 2% error) 4% (actual 1%) key = (1315, 446) value = 426 (actual 401 - 5% error) 4% (actual 3%) key = (1375, 466) value = 448 (actual 419 - 5% error) 4% (actual 10%) key = (1540, 521) value = 472 (actual 469 - 0% error) 2% (actual 1%) key = (1555, 526) value = 483 (actual 474 - 1% error) 2% (actual 2%) key = (1594, 539) value = 493 (actual 484 - 1% error) 1% (actual 3%) DataSizeHistogram: 13% (actual 13%) key = (199, 74) value = 5800 (actual 5800 - 0% error) 4% (actual 9%) key = (253, 92) value = 7798 (actual 9821 - -4% error) 4% (actual 4%) key = (286, 103) value = 9821 (actual 11827 - -4% error) 4% (actual 4%) key = (418, 147) value = 11827 (actual 13848 - -4% error) 4% (actual 4%) key = (514, 179) value = 13848 (actual 15888 - -4% error) 4% (actual 4%) key = (577, 200) value = 15888 (actual 17883 - -4% error) 4% (actual 4%) key = (607, 210) value = 17883 (actual 19876 - -4% error) 4% (actual 4%) key = (742, 255) value = 19876 (actual 21881 - -4% error) 4% (actual 4%) key = (769, 264) value = 21881 (actual 23918 - -4% error) 4% (actual 0%) key = (811, 278) value = 23918 (actual 23918 - 0% error) 4% (actual 9%) key = (958, 327) value = 25907 (actual 27913 - -4% error) 4% (actual 4%) key = (1054, 359) value = 27913 (actual 29895 - -4% error) 4% (actual 0%) key = (1087, 370) value = 29895 (actual 29895 - 0% error) 4% (actual 4%) key = (1156, 393) value = 31850 (actual 31850 - 0% error) 4% (actual 4%) key = (1291, 438) value = 33747 (actual 33747 - 0% error) 4% (actual 4%) key = (1315, 446) value = 35739 (actual 35739 - 0% error) 4% (actual 9%) key = (1375, 466) value = 37727 (actual 39763 - -4% error) 4% (actual 4%) key = (1540, 521) value = 39763 (actual 41447 - -4% error) 2% (actual 0%) key = (1555, 526) value = 40669 (actual 41447 - -1% error) 1% (actual 1%) key = (1594, 539) value = 41447 (actual 42020 - -1% error) 1% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 150 rows, 7 pages, 1 levels: (286, 103) (607, 210) (811, 278) (1315, 446) (1540, 521) [0:0:2:0:0:0:0] 197 rows, 9 pages, 2 levels: (253, 92) (577, 200) (742, 255) (1156, 393) (1594, 539) [0:0:3:0:0:0:0] 153 rows, 7 pages, 1 levels: (199, 74) (514, 179) (769, 264) (1291, 438) (1555, 526) Checking BTree: Touched 100% bytes, 5 pages RowCountHistogram: 19% (actual 16%) key = (286, 103) value = 97 (actual 84 - 2% error) 19% (actual 19%) key = (607, 210) value = 192 (actual 183 - 1% error) 18% (actual 22%) key = (958, 327) value = 286 (actual 293 - -1% error) 19% (actual 20%) key = (1291, 438) value = 381 (actual 394 - -2% error) 23% (actual 21%) DataSizeHistogram: 18% (actual 28%) key = (286, 103) value = 7810 (actual 11827 - -9% error) 19% (actual 19%) key = (607, 210) value = 15865 (actual 19876 - -9% error) 19% (actual 19%) key = (958, 327) value ... 85 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () [0:0:2:0:0:0:0] 166 rows, 1 pages, 0 levels: () () () () () [0:0:3:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TClockProCache::Touch [GOOD] >> TClockProCache::Lifecycle [GOOD] >> TClockProCache::EvictNext [GOOD] >> TClockProCache::UpdateLimit [GOOD] >> TClockProCache::Erase [GOOD] >> TClockProCache::Random [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges [GOOD] Test command err: 2025-03-28T09:34:22.244379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:22.244437Z node 1 :IMPORT WARN: Table profiles were not loaded |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2025-03-28T09:34:21.041946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:21.042022Z node 1 :IMPORT WARN: Table profiles were not loaded |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2025-03-28T09:34:19.317212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:19.317278Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:19.335221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> TNodeBrokerTest::ExtendLeaseRestartRace >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] Test command err: 2025-03-28T09:34:21.937633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:21.937688Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TCompactionMulti::ManyParts >> TNodeBrokerTest::NodeNameWithDifferentTenants |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] Test command err: 2025-03-28T09:34:22.280213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:22.280276Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:22.297914Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:34:22.336044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TIterator::MixedReverse [GOOD] >> TIterator::Serial >> TLocalTests::TestAddTenant |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2025-03-28T09:34:21.856302Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:21.856364Z node 1 :IMPORT WARN: Table profiles were not loaded >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] >> TNodeBrokerTest::FixedNodeId [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect >> TNodeBrokerTest::BasicFunctionality >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2025-03-28T09:34:22.253149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:22.253214Z node 1 :IMPORT WARN: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2025-03-28T09:34:23.142931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:23.143015Z node 1 :IMPORT WARN: Table profiles were not loaded >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization >> TLocalTests::TestAlterTenant >> TLocalTests::TestAddTenant [GOOD] >> TDynamicNameserverTest::CacheMissPipeDisconnect [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2025-03-28T09:34:25.348323Z node 1 :LOCAL ERROR: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2025-03-28T09:34:25.348568Z node 1 :LOCAL ERROR: Unknown domain dc-3 |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissPipeDisconnect [GOOD] Test command err: 2025-03-28T09:34:25.327160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:25.327217Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... waiting for cache miss (done) |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality >> TNodeBrokerTest::TestListNodes >> TSlotIndexesPoolTest::Init [GOOD] >> TLocalTests::TestAlterTenant [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] >> TTenantPoolTests::TestStateStatic >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAlterTenant [GOOD] >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2025-03-28T09:34:24.956615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:24.956683Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:24.974190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:34:25.014375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 >> TIterator::Serial [GOOD] >> TIterator::SerialReverse >> TNodeBrokerTest::UpdateEpochPipelining >> TTenantPoolTests::TestStateStatic [GOOD] >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeResourcePool [GOOD] >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] >> TColumnShardTestSchema::RebootHotTiers [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::RandomOps >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeSequence [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2025-03-28T09:34:24.045589Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:24.045666Z node 1 :IMPORT WARN: Table profiles were not loaded ... rebooting node broker ... OnActivateExecutor tabletId# 72057594037936129 ... captured cache request ... sending extend lease request ... captured cache request ... waiting for response ... waiting for epoch update >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] >> BackupRestore::RestoreViewToDifferentDatabase [GOOD] >> BackupRestore::RestoreViewDependentOnAnotherView ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: 2025-03-28T09:34:11.005190Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791362640499946:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:11.005246Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001739/r3tmp/tmp32zxoJ/pdisk_1.dat 2025-03-28T09:34:11.469208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:11.469342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:11.476694Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:11.507307Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6805, node 1 2025-03-28T09:34:11.563547Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:34:11.563573Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:34:11.643349Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:11.643375Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:11.643381Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:11.643492Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6097 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:12.242670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:14.086985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791375525402873:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.087087Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.310720Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:34:14.465974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791375525403038:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.466063Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.466175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791375525403043:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.469840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:34:14.487254Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791375525403045:2351], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:34:14.550810Z node 1 :TX_PROXY ERROR: Actor# [1:7486791375525403120:2811] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:14.817821Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe1m7y11az1b7bfhcc3d724, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDllMTQxODYtMTdlMjc1MWUtZmI4N2VmZTktNzQ4MmU1NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:34:15.042005Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe1m89y9r9neaw63z3e5q03, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDllMTQxODYtMTdlMjc1MWUtZmI4N2VmZTktNzQ4MmU1NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/"Create temporary directory "/Root/~backup_20250328T093415" in databaseProcess "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250328T093415/table" }Backup table "/Root/~backup_20250328T093415/table" to "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table"Describe table "/Root/~backup_20250328T093415/table"Write scheme into "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table/permissions.pb"Read table "/Root/~backup_20250328T093415/table"Write data into "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table/data_00.csv"Drop table "/Root/~backup_20250328T093415/table"2025-03-28T09:34:15.417597Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found Remove temporary directory "/Root/~backup_20250328T093415" in database2025-03-28T09:34:15.431934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-28T09:34:15.489445Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found Restore "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table"Read scheme from "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table" to "/Root/table"2025-03-28T09:34:15.528993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table/data_00.csv"2025-03-28T09:34:15.717857Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqe1m913aae7dt11nrb934ns, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODMzOTY0MTAtNmZkYWNlYTMtYjQ4ZTBkZDUtOTc0ZmEwYTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmptm5MuW/table/permissions.pb"2025-03-28T09:34:15.741171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480 Restore completed successfully2025-03-28T09:34:15.858315Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqe1m9695xkwtanna34brwfz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDllMTQxODYtMTdlMjc1MWUtZmI4N2VmZTktNzQ4MmU1NzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:34:16.005795Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791362640499946:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:16.005873Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:34:17.290644Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791384970686572:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:17.290759Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpglDl7P/pdisk_1.dat 2025-03-28T09:34:17.462570Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:17.480376Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:17.480468Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:17.486493Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4554, node 4 2025-03-28T09:34:17.579256Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:17.579277Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:17.579287Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:17.579408Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19532 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoCh ... /scheme.pb"Restore table "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpiHAOLu/table" to "/Root/table"2025-03-28T09:34:20.588221Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpiHAOLu/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-03-28T09:34:20.671498Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:34:20.737113Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 Restore ACL "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpiHAOLu/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpiHAOLu/table/permissions.pb"2025-03-28T09:34:21.003345Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-28T09:34:22.478522Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486791406445931488:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:22.478581Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpfWZOII/pdisk_1.dat 2025-03-28T09:34:22.569345Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:22.600251Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:22.600348Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:22.603019Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 64517, node 7 2025-03-28T09:34:22.656285Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:22.656308Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:22.656317Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:22.656458Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19369 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:22.913959Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:25.469760Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486791419330834374:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:25.469861Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:25.486822Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:34:25.597181Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486791419330834604:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:25.597244Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7486791419330834609:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:25.597311Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:25.601153Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:34:25.620822Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7486791419330834611:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:34:25.697610Z node 7 :TX_PROXY ERROR: Actor# [7:7486791419330834699:2835] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:25.838561Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1mjswb80qtgfzywe6qppx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjE5ZDI0ODItOWUxNzIwNjktZThhOTg1OTQtZTI5OGU3ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:34:25.851999Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1mjswb80qtgfzywe6qppx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjE5ZDI0ODItOWUxNzIwNjktZThhOTg1OTQtZTI5OGU3ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:34:26.001862Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1mk2a7dzsgk61y4xxvse7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjE5ZDI0ODItOWUxNzIwNjktZThhOTg1OTQtZTI5OGU3ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/"Create temporary directory "/Root/~backup_20250328T093426" in databaseProcess "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250328T093426/table" }2025-03-28T09:34:26.066154Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715665:1, at schemeshard: 72057594046644480 Backup table "/Root/~backup_20250328T093426/table" to "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table"Describe table "/Root/~backup_20250328T093426/table"Write scheme into "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table/permissions.pb"Read table "/Root/~backup_20250328T093426/table"Write data into "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table/data_00.csv"Drop table "/Root/~backup_20250328T093426/table"2025-03-28T09:34:26.340516Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715668:1, at schemeshard: 72057594046644480 2025-03-28T09:34:26.382915Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found Remove temporary directory "/Root/~backup_20250328T093426" in database2025-03-28T09:34:26.405318Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715669:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-28T09:34:26.444594Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715670:1, at schemeshard: 72057594046644480 Restore "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/" to "/Root"2025-03-28T09:34:26.496151Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table"Read scheme from "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table" to "/Root/table"2025-03-28T09:34:26.536179Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table/data_00.csv"2025-03-28T09:34:26.705002Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqe1mkshast3gebb73s3zhza, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=YmM3YTAxODktODgxNDg0OWEtZWJiOGEzZmUtODdiYTIzNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Restore ACL "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2909/001739/r3tmp/tmpgCMfBf/table/permissions.pb"2025-03-28T09:34:26.729556Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 Restore completed successfully2025-03-28T09:34:26.893959Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqe1mky58j7dtvmgrp8yz8ee, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MjE5ZDI0ODItOWUxNzIwNjktZThhOTg1OTQtZTI5OGU3ZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143154976.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154976.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154976.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154976.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154976.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154976.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153776.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154976.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123154976.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153776.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123153776.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123153776.000000s;Name=;Codec=}; 2025-03-28T09:32:57.222759Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:32:57.310916Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:32:57.337350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:32:57.337663Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:32:57.346267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:32:57.346503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:32:57.346763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:32:57.346898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:32:57.347019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:32:57.347140Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:32:57.347255Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:32:57.347398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:32:57.347537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:32:57.347672Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:32:57.347824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:32:57.347946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:32:57.380265Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:32:57.380615Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:32:57.380685Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:32:57.380865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:57.381049Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:32:57.381143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:32:57.381192Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:32:57.381291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:32:57.381352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:32:57.381399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:32:57.381429Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:32:57.381590Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:57.381653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:32:57.381699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:32:57.381730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:32:57.381832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:32:57.381889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:32:57.381931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:32:57.381961Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:32:57.382054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:32:57.382094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:32:57.382124Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:32:57.382174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:32:57.382213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:32:57.382280Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:32:57.382742Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=73; 2025-03-28T09:32:57.382828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=35; 2025-03-28T09:32:57.382931Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=51; 2025-03-28T09:32:57.383027Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=45; 2025-03-28T09:32:57.383202Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:32:57.383259Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:32:57.383293Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:32:57.383525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:32:57.383573Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:32:57.383605Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... n_data.cpp:29;EXECUTE:finishLoadingTime=342; 2025-03-28T09:34:27.585723Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=38792; 2025-03-28T09:34:27.593553Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7754; 2025-03-28T09:34:27.601576Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=7183; 2025-03-28T09:34:27.601685Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8034; 2025-03-28T09:34:27.601817Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=78; 2025-03-28T09:34:27.601915Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=58; 2025-03-28T09:34:27.602041Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=79; 2025-03-28T09:34:27.602139Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=58; 2025-03-28T09:34:27.610807Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8594; 2025-03-28T09:34:27.622356Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11413; 2025-03-28T09:34:27.622475Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=35; 2025-03-28T09:34:27.622540Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=28; 2025-03-28T09:34:27.622577Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-03-28T09:34:27.622613Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2025-03-28T09:34:27.622648Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-03-28T09:34:27.622739Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=63; 2025-03-28T09:34:27.622781Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-03-28T09:34:27.622854Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2025-03-28T09:34:27.622894Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-03-28T09:34:27.622950Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2025-03-28T09:34:27.623027Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=44; 2025-03-28T09:34:27.623328Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=269; 2025-03-28T09:34:27.623366Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=82293; 2025-03-28T09:34:27.623498Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:34:27.623594Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:34:27.623634Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:34:27.623684Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:34:27.639123Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:34:27.639308Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:34:27.639415Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:27.639498Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:34:27.639564Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:34:27.639608Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:27.639654Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:27.639709Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:27.639807Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:34:27.640306Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:27.640396Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2654:4528];tablet_id=9437184;parent=[1:2612:4493];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-03-28T09:34:27.640954Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:34:27.641265Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:34:27.641289Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:34:27.641307Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:34:27.641343Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:34:27.641389Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:27.641432Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:34:27.641475Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:34:27.641507Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:27.641542Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:27.641572Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:27.641644Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:34:27.642043Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-28T09:34:27.643575Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: 2025-03-28T09:30:51.860628Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:30:51.861222Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:30:51.861282Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b10/r3tmp/tmp2quA8g/pdisk_1.dat 2025-03-28T09:30:52.436662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:30:52.484470Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:52.525737Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:30:52.528083Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:30:52.528517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:30:52.529082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:30:52.541677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:30:52.723031Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:30:52.723126Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:30:52.724456Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:645:2553] 2025-03-28T09:30:52.853631Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:30:52.853748Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:30:52.854293Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:30:52.854414Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:30:52.854829Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:30:52.855023Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:30:52.855135Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:30:52.857016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:30:52.857606Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:30:52.858206Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:30:52.858294Z node 1 :TX_PROXY DEBUG: Actor# [1:645:2553] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:30:52.890957Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:30:52.892118Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:30:52.892580Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:670:2574] 2025-03-28T09:30:52.892970Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:30:52.944043Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:661:2568], Recipient [1:670:2574]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:30:52.944924Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:30:52.945075Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:30:52.946934Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:30:52.947021Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:30:52.947077Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:30:52.947484Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:30:52.947620Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:30:52.947725Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:686:2574] in generation 1 2025-03-28T09:30:52.958570Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:30:53.001227Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:30:53.001451Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:30:53.001624Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:688:2584] 2025-03-28T09:30:53.001690Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:30:53.001731Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:30:53.001777Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:30:53.002024Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:670:2574], Recipient [1:670:2574]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:30:53.002088Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:30:53.002439Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:30:53.002557Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:30:53.002666Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:30:53.002728Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:30:53.002807Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:30:53.002849Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:30:53.002886Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:30:53.002917Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:30:53.002960Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:30:53.003394Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:677:2578], Recipient [1:670:2574]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:30:53.003472Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:30:53.003522Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:666:2571], serverId# [1:677:2578], sessionId# [0:0:0] 2025-03-28T09:30:53.003638Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:677:2578] 2025-03-28T09:30:53.003680Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:30:53.003799Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:30:53.004060Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:30:53.004140Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:30:53.004243Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:30:53.004293Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:30:53.004378Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:30:53.004424Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:30:53.004475Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:30:53.004808Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:30:53.004857Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:30:53.004905Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:30:53.004947Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:30:53.005005Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:30:53.005042Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:30:53.005076Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:30:53.005129Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:30:53.005164Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:30:53.006639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:689:2585], Recipient [1:670:2574]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:30:53.006709Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:30 ... p: 1515 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2025-03-28T09:34:26.805490Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-03-28T09:34:26.805538Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:34:26.805648Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-28T09:34:26.805703Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit CheckRead 2025-03-28T09:34:26.805764Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-03-28T09:34:26.805790Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit CheckRead 2025-03-28T09:34:26.805815Z node 28 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:34:26.805841Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:34:26.805882Z node 28 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037889 2025-03-28T09:34:26.805916Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-03-28T09:34:26.805940Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:34:26.805962Z node 28 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037889 to execution unit ExecuteRead 2025-03-28T09:34:26.805987Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit ExecuteRead 2025-03-28T09:34:26.806085Z node 28 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1515 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2025-03-28T09:34:26.806289Z node 28 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v1515/18446744073709551615 2025-03-28T09:34:26.806330Z node 28 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[28:1102:2884], 1} after executionsCount# 1 2025-03-28T09:34:26.806367Z node 28 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[28:1102:2884], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:34:26.806426Z node 28 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[28:1102:2884], 1} finished in read 2025-03-28T09:34:26.806472Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-03-28T09:34:26.806496Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit ExecuteRead 2025-03-28T09:34:26.806520Z node 28 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:34:26.806548Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:34:26.806588Z node 28 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037889 is Executed 2025-03-28T09:34:26.806611Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:34:26.806637Z node 28 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037889 has finished 2025-03-28T09:34:26.806664Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-28T09:34:26.806763Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:34:26.806815Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:21} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:34:26.806868Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-28T09:34:26.807598Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037889] send [28:955:2767] 2025-03-28T09:34:26.807636Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037889] push event to server [28:955:2767] 2025-03-28T09:34:26.808003Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] ::Bootstrap [28:1105:2887] 2025-03-28T09:34:26.808104Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] lookup [28:1105:2887] 2025-03-28T09:34:26.808257Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [28:1102:2884], Recipient [28:705:2590]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-03-28T09:34:26.808304Z node 28 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } 2025-03-28T09:34:26.808419Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] queue send [28:1105:2887] 2025-03-28T09:34:26.808562Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] forward result local node, try to connect [28:1105:2887] 2025-03-28T09:34:26.808606Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890]::SendEvent [28:1105:2887] 2025-03-28T09:34:26.808782Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [28:1106:2888], Recipient [28:1058:2856]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:34:26.808818Z node 28 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:34:26.808853Z node 28 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [28:1105:2887], serverId# [28:1106:2888], sessionId# [0:0:0] 2025-03-28T09:34:26.808895Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] connected with status OK role: Leader [28:1105:2887] 2025-03-28T09:34:26.808931Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] send queued [28:1105:2887] 2025-03-28T09:34:26.808961Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] push event to server [28:1105:2887] 2025-03-28T09:34:26.809139Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [28:1102:2884], Recipient [28:1058:2856]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1515 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-03-28T09:34:26.809226Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-03-28T09:34:26.809276Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:34:26.809344Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2025-03-28T09:34:26.809399Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-03-28T09:34:26.809456Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-28T09:34:26.809481Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-03-28T09:34:26.809506Z node 28 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-03-28T09:34:26.809535Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-03-28T09:34:26.809582Z node 28 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037890 2025-03-28T09:34:26.809643Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-28T09:34:26.809672Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-03-28T09:34:26.809694Z node 28 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-03-28T09:34:26.809719Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-03-28T09:34:26.809831Z node 28 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1515 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-03-28T09:34:26.810040Z node 28 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037890 promoting UnprotectedReadEdge to v1515/18446744073709551615 2025-03-28T09:34:26.810091Z node 28 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[28:1102:2884], 2} after executionsCount# 1 2025-03-28T09:34:26.810134Z node 28 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[28:1102:2884], 2} sends rowCount# 1, bytes# 32, quota rows left# 998, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:34:26.810211Z node 28 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[28:1102:2884], 2} finished in read 2025-03-28T09:34:26.810265Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-28T09:34:26.810295Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-03-28T09:34:26.810324Z node 28 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-03-28T09:34:26.810353Z node 28 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-03-28T09:34:26.810409Z node 28 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-28T09:34:26.810436Z node 28 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-03-28T09:34:26.810460Z node 28 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037890 has finished 2025-03-28T09:34:26.810486Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-03-28T09:34:26.810570Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{17, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:34:26.810620Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:34:26.810654Z node 28 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-03-28T09:34:26.811231Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] send [28:1105:2887] 2025-03-28T09:34:26.811266Z node 28 :PIPE_CLIENT DEBUG: TClient[72075186224037890] push event to server [28:1105:2887] 2025-03-28T09:34:26.811377Z node 28 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [28:1102:2884], Recipient [28:1058:2856]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2025-03-28T09:34:26.811415Z node 28 :TX_DATASHARD TRACE: 72075186224037890 ReadCancel: { ReadId: 2 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 11 } items { uint32_value: 111 } }, { items { uint32_value: 21 } items { uint32_value: 21 } } >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestAttachOutOfSequence >> TKesusTest::TestAcquireUpgrade >> TKesusTest::TestQuoterHDRRParametersValidation >> TDynamicNameserverTest::BasicFunctionality [GOOD] >> TKesusTest::TestAcquireLocks >> TKesusTest::TestUnregisterProxy >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> TKesusTest::TestQuoterAccountResourcesBurst >> TKesusTest::TestAttachNewSessions >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TNodeBrokerTest::BasicFunctionality [GOOD] |94.3%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] >> TPart::MassCheck [GOOD] >> TPart::WreckPart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2025-03-28T09:34:26.022379Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:26.022433Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:26.038867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] Test command err: 2025-03-28T09:34:26.715576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:26.715649Z node 1 :IMPORT WARN: Table profiles were not loaded ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality [GOOD] Test command err: 2025-03-28T09:34:26.889878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:26.889937Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:26.961980Z node 1 :NODE_BROKER ERROR: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2025-03-28T09:34:26.974156Z node 1 :NODE_BROKER ERROR: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2025-03-28T09:34:25.946385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:25.946461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:27.161768Z node 1 :NODE_BROKER ERROR: Cannot register node host1:1001: WRONG_REQUEST: Another location is registered for host1:1001 2025-03-28T09:34:27.175166Z node 1 :NODE_BROKER ERROR: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2025-03-28T09:34:27.175534Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:34:27.175966Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] Test command err: 2025-03-28T09:34:25.919107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:25.919179Z node 1 :IMPORT WARN: Table profiles were not loaded >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] >> TKesusTest::TestSessionTimeoutAfterUnregister >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2025-03-28T09:26:47.148148Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:47.223165Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:47.246852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:47.247137Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:47.253864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:47.254048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:47.254226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:47.254305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:47.254368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:47.254478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:47.254546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:47.254620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:47.254722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:47.254836Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.254926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:47.255012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:47.279498Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:47.279822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:47.279875Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:47.280093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.280238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:47.280328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:47.280375Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:47.280482Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:47.280554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:47.280599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:47.280628Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:47.280803Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:47.280876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:47.280912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:47.280942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:47.281086Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:47.281141Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:47.281179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:47.281229Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:47.281340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:47.281381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:47.281410Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:47.281455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:47.281494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:47.281525Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:47.281911Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-03-28T09:26:47.282014Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-03-28T09:26:47.282096Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2025-03-28T09:26:47.282182Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-03-28T09:26:47.282350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:47.282413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:47.282447Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:47.282618Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:47.282653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.282677Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:47.282856Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:47.282913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:47.282941Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:47.283119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:47.283166Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:47.283195Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:47.283364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:47.283416Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:47.283461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 3-28T09:34:27.344836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10504:12465];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-28T09:34:28.022737Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:34:28.022822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-03-28T09:34:28.023297Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=400; 2025-03-28T09:34:28.023346Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=471; 2025-03-28T09:34:28.028433Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:34:28.028526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=13; 2025-03-28T09:34:28.040987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=12353; 2025-03-28T09:34:28.053884Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11710; 2025-03-28T09:34:28.054011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12922; 2025-03-28T09:34:28.054157Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=87; 2025-03-28T09:34:28.054302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=85; 2025-03-28T09:34:28.054464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=113; 2025-03-28T09:34:28.054595Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=95; 2025-03-28T09:34:28.054794Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=144; 2025-03-28T09:34:28.054838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=26253; 2025-03-28T09:34:28.058303Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:34:28.058387Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=12; 2025-03-28T09:34:28.060842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2375; 2025-03-28T09:34:28.082967Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=22031; 2025-03-28T09:34:28.083106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=50; 2025-03-28T09:34:28.083184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=30; 2025-03-28T09:34:28.083238Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-03-28T09:34:28.083313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2025-03-28T09:34:28.083376Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-03-28T09:34:28.083464Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2025-03-28T09:34:28.083523Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=11; 2025-03-28T09:34:28.083625Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2025-03-28T09:34:28.083671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2025-03-28T09:34:28.083744Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-03-28T09:34:28.083842Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2025-03-28T09:34:28.083969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=54; 2025-03-28T09:34:28.084005Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=25566; 2025-03-28T09:34:28.084182Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961708;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:34:28.085100Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10504:12465];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:34:28.085167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10504:12465];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:34:28.085260Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:34:28.085331Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-28T09:34:28.085516Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:34:28.085576Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:28.085761Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-28T09:34:28.085840Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T09:34:28.085886Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:28.085935Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:28.085993Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:28.086085Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:34:28.090459Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:28.094217Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:34:28.096077Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:34:28.096129Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:34:28.096156Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:34:28.096207Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:34:28.096271Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:28.096523Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-28T09:34:28.096593Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T09:34:28.096641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:28.096720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:28.096783Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:28.096884Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-28T09:34:28.096971Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10504:12465];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath >> TPart::WreckPart [GOOD] >> TPart::PageFailEnv >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange >> TKesusTest::TestAttachTimeoutTooBig [GOOD] >> TKesusTest::TestCreateSemaphore >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2025-03-28T09:34:28.117544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:28.117622Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:29.253083Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1001: ERROR_TEMP: No free node IDs ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients >> TKesusTest::TestCreateSemaphore [GOOD] >> TTicketParserTest::AuthorizationModify [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2025-03-28T09:33:31.885301Z :ReadSession INFO: Random seed for debugging is 1743154411885276 2025-03-28T09:33:32.318749Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791194053659905:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.320032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:32.441825Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791193026645316:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.673163Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:32.688117Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cf8/r3tmp/tmpu5sdWO/pdisk_1.dat 2025-03-28T09:33:32.810894Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:33.137376Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.137468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.138367Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.138445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.148094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:33.148894Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:33.150062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:33.182964Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7118, node 1 2025-03-28T09:33:33.274525Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:33:33.275868Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:33:33.488059Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001cf8/r3tmp/yandex6dhQ9y.tmp 2025-03-28T09:33:33.488086Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001cf8/r3tmp/yandex6dhQ9y.tmp 2025-03-28T09:33:33.488236Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001cf8/r3tmp/yandex6dhQ9y.tmp 2025-03-28T09:33:33.488359Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:33.674939Z INFO: TTestServer started on Port 19540 GrpcPort 7118 TClient is connected to server localhost:19540 PQClient connected to localhost:7118 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:34.082182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:34.128082Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:33:36.547271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791210206514681:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.547361Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791210206514692:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.547418Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.558734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-28T09:33:36.604980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486791210206514695:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-28T09:33:36.720361Z node 2 :TX_PROXY ERROR: Actor# [2:7486791210206514723:2130] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:37.154429Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486791211233530186:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:37.155184Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486791210206514738:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:37.156689Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NTk0N2VmNGQtNzE3YmM1ZmQtZWIzMDA3M2YtMjBlMjQ5ZTk=, ActorId: [2:7486791210206514664:2308], ActorState: ExecuteState, TraceId: 01jqe1k2wpakqjr4ewywnbkgfy, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:33:37.156530Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWU5ZmNlYWQtMTUzMjgzZWYtMjgyMjdkYjAtNmNmOTBhNjg=, ActorId: [1:7486791211233530137:2337], ActorState: ExecuteState, TraceId: 01jqe1k31g1yz6jc0xvgh8dahb, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:33:37.159255Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:33:37.162910Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:33:37.171804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:33:37.293689Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791194053659905:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:37.293794Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:37.375075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:37.426205Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791193026645316:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:37.426270Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:37.550005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:7118", true, true, 1000); 2025-03-28T09:33:38.036639Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1k40y3xeprbzbng95tsee, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTZhYTVjZTYtZjdiNzBiYTMtNjUwMzZmZC03NGYwY2VlYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486791219823465193:2980] === CheckClustersList. Ok 2025-03-28T09:33:44.006676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:7118 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC 2025-03-28T09 ... : 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 2 } 2025-03-28T09:34:29.775363Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset3 2025-03-28T09:34:29.775412Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid b406c4fb-649e399f-57f80286-bb064536 has messages 1 2025-03-28T09:34:29.775517Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 read done: guid# b406c4fb-649e399f-57f80286-bb064536, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2025-03-28T09:34:29.775558Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 response to read: guid# b406c4fb-649e399f-57f80286-bb064536 2025-03-28T09:34:29.775792Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 Process answer. Aval parts: 0 2025-03-28T09:34:29.776146Z :DEBUG: [/Root] [/Root] [9350b828-4eb1513b-863be08b-21134501] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:29.776384Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2025-03-28T09:34:29.776547Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2025-03-28T09:34:29.776489Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 grpc read done: success# 1, data# { read { } } 2025-03-28T09:34:29.776604Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 got read request: guid# cc7a7fa0-759ad62d-c1196f37-132ab41b 2025-03-28T09:34:29.776629Z :DEBUG: [/Root] [/Root] [9350b828-4eb1513b-863be08b-21134501] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2025-03-28T09:34:28.662000Z WriteTime: 2025-03-28T09:34:28.663000Z Ip: "ipv6:[::1]:50862" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:50862" } } } } 2025-03-28T09:34:29.776834Z :INFO: [/Root] [/Root] [9350b828-4eb1513b-863be08b-21134501] Closing read session. Close timeout: 3.000000s 2025-03-28T09:34:29.776876Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-03-28T09:34:29.776917Z :INFO: [/Root] [/Root] [9350b828-4eb1513b-863be08b-21134501] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1329 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:34:29.777715Z :INFO: [/Root] [/Root] [9350b828-4eb1513b-863be08b-21134501] Closing read session. Close timeout: 0.000000s 2025-03-28T09:34:29.777758Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2025-03-28T09:34:29.777802Z :INFO: [/Root] [/Root] [9350b828-4eb1513b-863be08b-21134501] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1330 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:34:29.777791Z node 7 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 grpc read done: success# 0, data# { } 2025-03-28T09:34:29.777813Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 grpc read failed 2025-03-28T09:34:29.777835Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 grpc closed 2025-03-28T09:34:29.777874Z node 7 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/user session shared/user_7_1_9101993786284959700_v1 is DEAD 2025-03-28T09:34:29.777909Z :NOTICE: [/Root] [/Root] [9350b828-4eb1513b-863be08b-21134501] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:34:29.778155Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/user_7_1_9101993786284959700_v1 2025-03-28T09:34:29.778198Z node 7 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [7:7486791434458898217:2563] destroyed 2025-03-28T09:34:29.778249Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/user_7_1_9101993786284959700_v1 2025-03-28T09:34:29.778779Z node 8 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [7:7486791434458898215:2560] disconnected; active server actors: 1 2025-03-28T09:34:29.778826Z node 8 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--test-topic] pipe [7:7486791434458898215:2560] client user disconnected session shared/user_7_1_9101993786284959700_v1 2025-03-28T09:34:31.201209Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.201247Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.201303Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:34:31.201682Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:34:31.202207Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:34:31.202414Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.202856Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2025-03-28T09:34:31.204526Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.204559Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.204600Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:34:31.204980Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:34:31.205356Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:34:31.205480Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.205706Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:34:31.206979Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2025-03-28T09:34:31.207537Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2025-03-28T09:34:31.207623Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2025-03-28T09:34:31.207797Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:34:31.207850Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2025-03-28T09:34:31.207894Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2025-03-28T09:34:31.207943Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 16 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2025-03-28T09:34:31.210820Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.210884Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.210924Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:34:31.211252Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:34:31.211749Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:34:31.211925Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.212176Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2025-03-28T09:34:31.213061Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.213273Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:34:31.213447Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:34:31.213538Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2025-03-28T09:34:31.213643Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2025-03-28T09:34:31.215427Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.215483Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.215518Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:34:31.215889Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2025-03-28T09:34:31.216294Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2025-03-28T09:34:31.216450Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.217234Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:31.217384Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2025-03-28T09:34:31.217471Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2025-03-28T09:34:31.217560Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2025-03-28T09:34:30.068442Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.068600Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.107740Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.107804Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.125248Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.125949Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=6453976913541598031, session=0, seqNo=0) 2025-03-28T09:34:30.126195Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:30.149572Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=6453976913541598031, session=1) 2025-03-28T09:34:30.150004Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=16160171780796812530, session=0, seqNo=0) 2025-03-28T09:34:30.150156Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:34:30.162126Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=16160171780796812530, session=2) 2025-03-28T09:34:30.598624Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.598859Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.616630Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.616767Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.640389Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.640807Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=13023552569819203828, session=1, seqNo=0) 2025-03-28T09:34:30.654828Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=13023552569819203828, session=1) 2025-03-28T09:34:31.026903Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:31.027022Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.046331Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.046454Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.060519Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.061398Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=10921599619793828177, session=0, seqNo=0) 2025-03-28T09:34:31.061564Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:31.083975Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=10921599619793828177, session=1) 2025-03-28T09:34:31.427379Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:31.427499Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.447250Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.447872Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.472352Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.472720Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[4:132:2158], cookie=11050006932547294386, path="") 2025-03-28T09:34:31.486373Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[4:132:2158], cookie=11050006932547294386, status=SUCCESS) 2025-03-28T09:34:31.487592Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:141:2165], cookie=16844552017005136970, session=0, seqNo=0) 2025-03-28T09:34:31.487777Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:31.500059Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:141:2165], cookie=16844552017005136970, session=1) 2025-03-28T09:34:31.500930Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[4:142:2166], cookie=111, session=0, seqNo=0) 2025-03-28T09:34:31.501071Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:34:31.501265Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Fast-path attach session=1 to sender=[4:142:2166], cookie=222, seqNo=0 2025-03-28T09:34:31.513699Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[4:142:2166], cookie=111, session=2) 2025-03-28T09:34:31.828435Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:31.828526Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.847273Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.847738Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.871739Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.872090Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Execute (sender=[5:132:2158], cookie=10308890605099381494, path="") 2025-03-28T09:34:31.884035Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxConfigSet::Complete (sender=[5:132:2158], cookie=10308890605099381494, status=SUCCESS) 2025-03-28T09:34:31.885008Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:141:2165], cookie=13969616261504589472, session=0, seqNo=0) 2025-03-28T09:34:31.885135Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:31.897164Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:141:2165], cookie=13969616261504589472, session=1) 2025-03-28T09:34:31.897924Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:141:2165], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:34:31.898074Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:34:31.898167Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:34:31.898501Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=111, session=0, seqNo=0) 2025-03-28T09:34:31.898580Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:34:31.898749Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:142:2166], cookie=222, session=1, seqNo=0) 2025-03-28T09:34:31.910433Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:141:2165], cookie=123) 2025-03-28T09:34:31.910498Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=111, session=2) 2025-03-28T09:34:31.910535Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:142:2166], cookie=222, session=1) >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv >> TNodeBrokerTest::TestListNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2025-03-28T09:34:31.056765Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:31.056849Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.069417Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.069507Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.093927Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.094604Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=6010856022685286313, session=0, seqNo=0) 2025-03-28T09:34:31.094753Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:31.106396Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=6010856022685286313, session=1) 2025-03-28T09:34:31.106623Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=2079201685588355804, session=0, seqNo=0) 2025-03-28T09:34:31.106717Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:34:31.118548Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=2079201685588355804, session=2) 2025-03-28T09:34:31.119678Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:34:31.119822Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:34:31.119923Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:34:31.131777Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-28T09:34:31.132122Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=112, session=1, semaphore="Lock2" count=1) 2025-03-28T09:34:31.132267Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-03-28T09:34:31.132333Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-03-28T09:34:31.144415Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=112) 2025-03-28T09:34:31.144854Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=333, name="Lock1") 2025-03-28T09:34:31.144953Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-03-28T09:34:31.145208Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-28T09:34:31.145307Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2025-03-28T09:34:31.145373Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-03-28T09:34:31.145494Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:131:2157], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-03-28T09:34:31.157442Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=333) 2025-03-28T09:34:31.157520Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=222) 2025-03-28T09:34:31.157568Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:131:2157], cookie=223) 2025-03-28T09:34:31.157920Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:130:2156], cookie=334, name="Lock2") 2025-03-28T09:34:31.158001Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-03-28T09:34:31.158053Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-28T09:34:31.170150Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:130:2156], cookie=334) 2025-03-28T09:34:31.170771Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:159:2183], cookie=10553836458166195035, name="Lock1") 2025-03-28T09:34:31.170880Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:159:2183], cookie=10553836458166195035) 2025-03-28T09:34:31.171331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:162:2186], cookie=6271876385926176355, name="Lock2") 2025-03-28T09:34:31.171392Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:162:2186], cookie=6271876385926176355) 2025-03-28T09:34:31.185760Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:31.185854Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.186402Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.187005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.224661Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.224865Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-28T09:34:31.224904Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2025-03-28T09:34:31.225307Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:202:2216], cookie=14137650351018207276, name="Lock1") 2025-03-28T09:34:31.225379Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:202:2216], cookie=14137650351018207276) 2025-03-28T09:34:31.225900Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:210:2223], cookie=11255189504984491772, name="Lock2") 2025-03-28T09:34:31.225983Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:210:2223], cookie=11255189504984491772) 2025-03-28T09:34:31.648765Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:31.648860Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.668050Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.668258Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.692241Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.692969Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=15287840777845175527, session=0, seqNo=0) 2025-03-28T09:34:31.693111Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:31.705262Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=15287840777845175527, session=1) 2025-03-28T09:34:31.705578Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=340280153977310940, session=0, seqNo=0) 2025-03-28T09:34:31.705696Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:34:31.717801Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=340280153977310940, session=2) 2025-03-28T09:34:31.718840Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:34:31.718974Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:34:31.719061Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:34:31.731096Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-03-28T09:34:31.731431Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-03-28T09:34:31.731554Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-03-28T09:34:31.731631Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-03-28T09:34:31.743743Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=112) 2025-03-28T09:34:31.744182Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=333, session=1, semaphore="Lock1" count=1) 2025-03-28T09:34:31.744405Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-28T09:34:31.744495Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-03-28T09:34:31.744594Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-03-28T09:34:31.756627Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=333) 2025-03-28T09:34:31.756705Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=222) 2025-03-28T09:34:31.756734Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=223) 2025-03-28T09:34:31.757311Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:159:2183], cookie=17866366540406852428, name="Lock1") 2025-03-28T09:34:31.757394Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:159:2183], cookie=17866366540406852428) 2025-03-28T09:34:31.757899Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:162:2186], cookie=1028940058592612019, name="Lock2") 2025-03-28T09:34:31.757964Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:162:2186], cookie=1028940058592612019) 2025-03-28T09:34:31.758359Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:165:2189], cookie=3099982287592620144, name="Lock1") 2025-03-28T09:34:31.758418Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:165:2189], cookie=3099982287592620144) 2025-03-28T09:34:31.758871Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:168:2192], cookie=7958805667141626529, name="Lock2") 2025-03-28T09:34:31.758937Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:168:2192], cookie=7958805667141626529) 2025-03-28T09:34:31.759166Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=444, session=2, semaphore="Lock2" count=1) 2025-03-28T09:34:31.759282Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-28T09:34:31.771319Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=444) 2025-03-28T09:34:31.771948Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:173:2197], cookie=2717777824806321314, name="Lock2") 2025-03-28T09:34:31.772035Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:173:2197], cookie=2717777824806321314) 2025-03-28T09:34:31.772453Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:176:2200], cookie=17641959797934298524, name="Lock2") 2025-03-28T09:34:31.772514Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:176:2200], cookie=17641959797934298524) 2025-03-28T09:34:31.785115Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:31.785209Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.785737Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.786303Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.843481Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.843616Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:34:31.843658Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-03-28T09:34:31.843699Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-03-28T09:34:31.843724Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-28T09:34:31.844019Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:216:2230], cookie=13577527948435668404, name="Lock1") 2025-03-28T09:34:31.844092Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:216:2230], cookie=13577527948435668404) 2025-03-28T09:34:31.844649Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:224:2237], cookie=5779799999828764383, name="Lock2") 2025-03-28T09:34:31.844716Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:224:2237], cookie=5779799999828764383) >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> TPart::ForwardEnv [GOOD] >> TPart::WreckPartColumnGroups >> BuildStatsHistogram::Single_History [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2025-03-28T09:34:30.068436Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.068598Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.107440Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.107549Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.126309Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.127303Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=17457492031861853213, session=0, seqNo=222) 2025-03-28T09:34:30.127481Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:30.150085Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=17457492031861853213, session=1) 2025-03-28T09:34:30.150495Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:131:2157], cookie=2375519711854479983, session=1, seqNo=111) 2025-03-28T09:34:30.162604Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:131:2157], cookie=2375519711854479983, session=1) 2025-03-28T09:34:30.595619Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.595714Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.616017Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.616229Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.640549Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.641085Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=111, session=0, seqNo=42) 2025-03-28T09:34:30.641235Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:30.641456Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=222, session=1, seqNo=41) 2025-03-28T09:34:30.653617Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=111, session=1) 2025-03-28T09:34:30.653724Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=222, session=1) 2025-03-28T09:34:30.991631Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.991740Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.010121Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.010219Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.023578Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.024033Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=13239597936502121544, session=0, seqNo=0) 2025-03-28T09:34:31.024135Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:31.046346Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=13239597936502121544, session=1) 2025-03-28T09:34:31.048294Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:149:2173], cookie=9590858765190101311) 2025-03-28T09:34:31.048371Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:149:2173], cookie=9590858765190101311) 2025-03-28T09:34:31.418708Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:31.418808Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.437279Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.437862Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.461670Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.809442Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:31.809534Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.827944Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.828424Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.852185Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.852628Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=13669816569869422923, session=0, seqNo=0) 2025-03-28T09:34:31.852762Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:31.864946Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=13669816569869422923, session=1) 2025-03-28T09:34:31.865270Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:34:31.865416Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:34:31.865524Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:34:31.877559Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-03-28T09:34:31.878418Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:146:2170], cookie=5384926946732625005, name="Sem1", limit=42) 2025-03-28T09:34:31.878544Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 2 "Sem1" 2025-03-28T09:34:31.890817Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:146:2170], cookie=5384926946732625005) 2025-03-28T09:34:31.891331Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:151:2175], cookie=681155435154784607, name="Sem1", limit=42) 2025-03-28T09:34:31.903643Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:151:2175], cookie=681155435154784607) 2025-03-28T09:34:31.904180Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:156:2180], cookie=17311550621550548659, name="Sem1", limit=51) 2025-03-28T09:34:31.916468Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:156:2180], cookie=17311550621550548659) 2025-03-28T09:34:31.917063Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:161:2185], cookie=6740827424490725030, name="Lock1", limit=42) 2025-03-28T09:34:31.929365Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:161:2185], cookie=6740827424490725030) 2025-03-28T09:34:31.929942Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:166:2190], cookie=15136305989904991420, name="Lock1", limit=18446744073709551615) 2025-03-28T09:34:31.942238Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:166:2190], cookie=15136305989904991420) 2025-03-28T09:34:31.942843Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:171:2195], cookie=13301862152675514996, name="Sem1") 2025-03-28T09:34:31.942947Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:171:2195], cookie=13301862152675514996) 2025-03-28T09:34:31.943436Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:174:2198], cookie=7423177595896593419, name="Sem2") 2025-03-28T09:34:31.943519Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:174:2198], cookie=7423177595896593419) 2025-03-28T09:34:31.957599Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:31.957698Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:31.958252Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:31.958945Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:31.996414Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:31.996563Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:34:31.996946Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:214:2228], cookie=9279369038763906334, name="Sem1") 2025-03-28T09:34:31.997011Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:214:2228], cookie=9279369038763906334) 2025-03-28T09:34:31.997642Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:221:2234], cookie=214578163126865582, name="Sem2") 2025-03-28T09:34:31.997698Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:221:2234], cookie=214578163126865582) >> BuildStatsHistogram::Single_History_Slices >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2025-03-28T09:34:27.034959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:27.035032Z node 1 :IMPORT WARN: Table profiles were not loaded >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2025-03-28T09:34:16.083185Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791383016083191:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:16.097605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ead/r3tmp/tmptIsyAN/pdisk_1.dat 2025-03-28T09:34:16.465493Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:16.491623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:16.492234Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:16.524692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3673, node 1 2025-03-28T09:34:16.660237Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:16.660260Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:16.660268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:16.660396Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23697 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:17.197068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:17.229347Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:17.229423Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:17.230259Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:17.230796Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthentication 2025-03-28T09:34:17.230957Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:12255 2025-03-28T09:34:17.236204Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2025-03-28T09:34:17.251921Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:17.252992Z node 1 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-03-28T09:34:17.254425Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Connect to grpc://localhost:62747 2025-03-28T09:34:17.255419Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Request GetUserAccountRequest { user_account_id: "user1" } 2025-03-28T09:34:17.281397Z node 1 :GRPC_CLIENT DEBUG: [517000010788] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-03-28T09:34:17.281778Z node 1 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-28T09:34:19.385943Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791395955947702:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:19.385996Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ead/r3tmp/tmpPIFVAJ/pdisk_1.dat 2025-03-28T09:34:19.511925Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:19.548508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:19.548590Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:19.550131Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4104, node 2 2025-03-28T09:34:19.613966Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:19.614007Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:19.614015Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:19.614137Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9585 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:19.829247Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:19.848945Z node 2 :TICKET_PARSER ERROR: Ticket **** (8E120919): Token is not supported 2025-03-28T09:34:22.388851Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791409477889570:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:22.388903Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ead/r3tmp/tmpXeF2ma/pdisk_1.dat 2025-03-28T09:34:22.490495Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27078, node 3 2025-03-28T09:34:22.532358Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:22.532454Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:22.534081Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:22.574984Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:22.575016Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:22.575025Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:22.575158Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27600 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:22.766534Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:22.773601Z node 3 :TICKET_PARSER ERROR: Ticket **** (8E120919): Unknown token 2025-03-28T09:34:25.663142Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791421561197451:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:25.663222Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ead/r3tmp/tmpnC9ed3/pdisk_1.dat 2025-03-28T09:34:25.781269Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:25.813800Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:25.813887Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:25.815564Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3197, node 4 2025-03-28T09:34:25.853121Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:25.853148Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:25.853158Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:25.853306Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32653 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" Path ... 16 Access Denied 2025-03-28T09:34:26.171037Z node 4 :TICKET_PARSER TRACE: Ticket **** (E2D1584C) permission something.read now has a permanent error "Access Denied" retryable:0 2025-03-28T09:34:26.171054Z node 4 :TICKET_PARSER DEBUG: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2025-03-28T09:34:26.171435Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:26.171445Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:26.171451Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:26.171467Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:26.171582Z node 4 :GRPC_CLIENT DEBUG: [51700002b288] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:26.173360Z node 4 :GRPC_CLIENT DEBUG: [51700002b288] Status 16 Access Denied 2025-03-28T09:34:26.173436Z node 4 :TICKET_PARSER TRACE: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2025-03-28T09:34:26.173454Z node 4 :TICKET_PARSER DEBUG: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2025-03-28T09:34:26.173880Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:26.173895Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:26.173900Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:26.173912Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:26.174034Z node 4 :GRPC_CLIENT DEBUG: [51700002b288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-03-28T09:34:26.175438Z node 4 :GRPC_CLIENT DEBUG: [51700002b288] Status 16 Access Denied 2025-03-28T09:34:26.175499Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2025-03-28T09:34:26.175512Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-03-28T09:34:26.175904Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:26.175919Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:26.175924Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:26.175936Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:26.176021Z node 4 :GRPC_CLIENT DEBUG: [51700002b288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:26.177168Z node 4 :GRPC_CLIENT DEBUG: [51700002b288] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:26.177243Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-28T09:34:26.177326Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:34:26.178536Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:26.178566Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:26.178580Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:26.178593Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:26.178675Z node 4 :GRPC_CLIENT DEBUG: [51700002b288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2025-03-28T09:34:26.180525Z node 4 :GRPC_CLIENT DEBUG: [51700002b288] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:26.180596Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-28T09:34:26.180669Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:34:26.181050Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:26.181071Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:26.181076Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:26.181092Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2025-03-28T09:34:26.181179Z node 4 :GRPC_CLIENT DEBUG: [51700002b288] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2025-03-28T09:34:26.182356Z node 4 :GRPC_CLIENT DEBUG: [51700002b288] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:26.182428Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2025-03-28T09:34:26.182468Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:34:28.976607Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486791433292107384:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:28.976688Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ead/r3tmp/tmplo9n1y/pdisk_1.dat 2025-03-28T09:34:29.060235Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22686, node 5 2025-03-28T09:34:29.108492Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:29.108586Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:29.110198Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:29.124980Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:29.124998Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:29.125004Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:29.125097Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10483 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:29.401080Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:29.408518Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:29.408556Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:29.408566Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:29.408596Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:29.408664Z node 5 :GRPC_CLIENT DEBUG: [51700002bd08] Connect to grpc://localhost:15665 2025-03-28T09:34:29.409530Z node 5 :GRPC_CLIENT DEBUG: [51700002bd08] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:29.417409Z node 5 :GRPC_CLIENT DEBUG: [51700002bd08] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:29.417578Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-28T09:34:29.417680Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:34:29.418210Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:29.418240Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:29.418249Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:29.418275Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:29.418322Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-28T09:34:29.418477Z node 5 :GRPC_CLIENT DEBUG: [51700002bd08] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:29.419071Z node 5 :GRPC_CLIENT DEBUG: [51700002bd08] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:29.420525Z node 5 :GRPC_CLIENT DEBUG: [51700002bd08] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:29.420670Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-28T09:34:29.421294Z node 5 :GRPC_CLIENT DEBUG: [51700002bd08] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:29.421388Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-03-28T09:34:29.421475Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeView [GOOD] >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] Test command err: 2025-03-28T09:34:16.438074Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791381204386147:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:16.438138Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ea1/r3tmp/tmpOPwvUp/pdisk_1.dat 2025-03-28T09:34:16.822275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:16.822416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:16.830197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:16.882527Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12621, node 1 2025-03-28T09:34:16.975132Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:16.975170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:16.975184Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:16.975325Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23366 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:17.250292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:17.270174Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:17.274106Z node 1 :TICKET_PARSER DEBUG: Ticket DC697F4560BC3EBD416953DAA39E2584C3319FF6374ADB9AA88A6CB686571280 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ea1/r3tmp/tmptp4jfs/pdisk_1.dat 2025-03-28T09:34:19.848224Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:19.849902Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:19.880703Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:19.880804Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:19.882665Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4261, node 2 2025-03-28T09:34:19.951286Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:19.951311Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:19.951318Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:19.951422Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5064 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:20.163107Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:20.170448Z node 2 :TICKET_PARSER DEBUG: Ticket 0EA18CD9BFD590CF237EA6376BE9C0C5274241039C87235AE64D6FE106048812 () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2025-03-28T09:34:20.171124Z node 2 :TICKET_PARSER ERROR: Ticket 0EA18CD9BFD590CF237EA6376BE9C0C5274241039C87235AE64D6FE106048812: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2025-03-28T09:34:23.093814Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791413638082114:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:23.093885Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ea1/r3tmp/tmpME84k8/pdisk_1.dat 2025-03-28T09:34:23.201245Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:23.239584Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:23.239675Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:23.241174Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3833, node 3 2025-03-28T09:34:23.280701Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:23.280725Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:23.280741Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:23.280885Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28572 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:23.491690Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:23.499357Z node 3 :TICKET_PARSER DEBUG: Ticket B63AC694A35B1198A99FD5DD163D8901B3A8645DD1B238F1B67705DA9D07E848 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2025-03-28T09:34:23.499965Z node 3 :TICKET_PARSER ERROR: Ticket B63AC694A35B1198A99FD5DD163D8901B3A8645DD1B238F1B67705DA9D07E848: Cannot create token from certificate. Client certificate failed verification 2025-03-28T09:34:26.226488Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791424953335854:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:26.226573Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ea1/r3tmp/tmpD3YhbL/pdisk_1.dat 2025-03-28T09:34:26.380357Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:26.401521Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:26.401624Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:26.402811Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17805, node 4 2025-03-28T09:34:26.463450Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:26.463483Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:26.463492Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:26.463643Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:26.693256Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:26.702526Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:26.702559Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:26.702569Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:26.704175Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-03-28T09:34:26.704243Z node 4 :GRPC_CLIENT DEBUG: [51700004d808] Connect to grpc://localhost:17857 2025-03-28T09:34:26.708208Z node 4 :GRPC_CLIENT DEBUG: [51700004d808] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-03-28T09:34:26.723638Z node 4 :GRPC_CLIENT DEBUG: [51700004d808] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-03-28T09:34:26.723908Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2025-03-28T09:34:26.724055Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of user1@as 2025-03-28T09:34:26.724808Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:26.724841Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:26.724850Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:26.724908Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-03-28T09:34:26.725183Z node 4 :GRPC_CLIENT DEBUG: [51700004d808] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } 0: "OK" 2025-03-28T09:34:26.728341Z node 4 :GRPC_CLIENT DEBUG: [51700004d808] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } } } results { key: 1 value { resultCode: PERMISSION_DENIED } } } 2025-03-28T09:34:26.728630Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write access denied for subject "" 2025-03-28T09:34:26.728689Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' 2025-03-28T09:34:29.546391Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486791436247979067:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:29.546455Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ea1/r3tmp/tmpOZdVKK/pdisk_1.dat 2025-03-28T09:34:29.640634Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4999, node 5 2025-03-28T09:34:29.679536Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:29.679687Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:29.682182Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:29.706233Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:29.706263Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:29.706273Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:29.706424Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24288 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:29.956643Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:29.962664Z node 5 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:34:29.964868Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:29.964898Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:29.964906Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:29.964981Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-03-28T09:34:29.965046Z node 5 :GRPC_CLIENT DEBUG: [51700003ab08] Connect to grpc://localhost:6097 2025-03-28T09:34:29.966489Z node 5 :GRPC_CLIENT DEBUG: [51700003ab08] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } } } 14: "Service Unavailable" 2025-03-28T09:34:29.975822Z node 5 :GRPC_CLIENT DEBUG: [51700003ab08] Status 14 Service Unavailable 2025-03-28T09:34:29.976006Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:34:29.976037Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2025-03-28T09:34:29.976074Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:34:29.976159Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2025-03-28T09:34:29.976486Z node 5 :GRPC_CLIENT DEBUG: [51700003ab08] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } container_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2025-03-28T09:34:29.979418Z node 5 :GRPC_CLIENT DEBUG: [51700003ab08] Status 1 CANCELLED 2025-03-28T09:34:29.979559Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2025-03-28T09:34:29.979579Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2025-03-28T09:34:29.979609Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> BackupRestore::RestoreViewDependentOnAnotherView [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> Cdc::UuidExchange[PqRunner] >> Cdc::DocApi[PqRunner] >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 4910b 40r} data 6206b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 50b {0, 1} | 1 2 50b {0, 4} | 2 4 50b {0, 7} | 3 6 50b {0, 10} | 4 8 50b {1, 3} | 5 10 50b {1, 6} | 6 12 50b {1, 8} | 7 14 50b {2, NULL} | 8 16 50b {2, 4} | 10 18 50b {2, 7} | 11 20 50b {2, 10} | 12 22 50b {3, 3} | 13 24 50b {3, 6} | 15 26 50b {3, 8} | 16 28 50b {4, NULL} | 17 30 50b {4, 4} | 18 32 50b {4, 7} | 19 34 50b {4, 10} | 21 36 50b {5, 3} | 22 38 50b {5, 6} | 22 39 50b {5, 7} + BTreeIndex ... xxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{26} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 29 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestoreS3::TestAllSchemeObjectTypes-EPathTypeTransfer [GOOD] Test command err: 2025-03-28T09:34:10.913928Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791356340938813:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:10.914000Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001755/r3tmp/tmpdCBFLk/pdisk_1.dat 2025-03-28T09:34:11.384577Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:11.395370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:11.395467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:11.406184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12784, node 1 2025-03-28T09:34:11.642393Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:11.642421Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:11.642429Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:11.642559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16909 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:12.242573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:13.792965Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791369225841719:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:13.793084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.272731Z node 1 :TX_PROXY DEBUG: actor# [1:7486791356340939045:2141] Handle TEvProposeTransaction 2025-03-28T09:34:14.272767Z node 1 :TX_PROXY DEBUG: actor# [1:7486791356340939045:2141] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T09:34:14.272816Z node 1 :TX_PROXY DEBUG: actor# [1:7486791356340939045:2141] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486791373520809044:2628] 2025-03-28T09:34:14.323179Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809044:2628] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table" Columns { Name: "Key" Type: "Uint32" NotNull: false } Columns { Name: "Value" Type: "Utf8" NotNull: false } KeyColumnNames: "Key" PartitionConfig { } Temporary: false } } } UserToken: "" DatabaseName: "" 2025-03-28T09:34:14.323236Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809044:2628] txid# 281474976710658 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:34:14.323631Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809044:2628] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:34:14.323696Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809044:2628] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:34:14.323980Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809044:2628] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:34:14.324104Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809044:2628] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:34:14.324173Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809044:2628] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T09:34:14.324322Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809044:2628] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T09:34:14.325628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:34:14.327659Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809044:2628] txid# 281474976710658 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710658} 2025-03-28T09:34:14.327725Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809044:2628] txid# 281474976710658 SEND to# [1:7486791373520809043:2343] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 53} 2025-03-28T09:34:14.469209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791373520809186:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.469268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.469299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791373520809191:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:14.469586Z node 1 :TX_PROXY DEBUG: actor# [1:7486791356340939045:2141] Handle TEvProposeTransaction 2025-03-28T09:34:14.469608Z node 1 :TX_PROXY DEBUG: actor# [1:7486791356340939045:2141] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T09:34:14.469642Z node 1 :TX_PROXY DEBUG: actor# [1:7486791356340939045:2141] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486791373520809194:2745] 2025-03-28T09:34:14.474003Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/Root" 2025-03-28T09:34:14.474059Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] txid# 281474976710659 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:34:14.474096Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] txid# 281474976710659 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 1 2025-03-28T09:34:14.475371Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:34:14.475444Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:34:14.475573Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:34:14.475709Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:34:14.475760Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2025-03-28T09:34:14.475926Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] txid# 281474976710659 HANDLE EvClientConnected 2025-03-28T09:34:14.477073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:34:14.479217Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] txid# 281474976710659 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-28T09:34:14.479254Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809194:2745] txid# 281474976710659 SEND to# [1:7486791373520809193:2354] Source {TEvProposeTransactionStatus txid# 281474976710659 Status# 53} 2025-03-28T09:34:14.491863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791373520809193:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:34:14.544602Z node 1 :TX_PROXY DEBUG: actor# [1:7486791356340939045:2141] Handle TEvProposeTransaction 2025-03-28T09:34:14.544634Z node 1 :TX_PROXY DEBUG: actor# [1:7486791356340939045:2141] TxId# 281474976710660 ProcessProposeTransaction 2025-03-28T09:34:14.544676Z node 1 :TX_PROXY DEBUG: actor# [1:7486791356340939045:2141] Cookie# 0 userReqId# "" txid# 281474976710660 SEND to# [1:7486791373520809277:2793] 2025-03-28T09:34:14.547117Z node 1 :TX_PROXY DEBUG: Actor# [1:7486791373520809277:2793] txid# 281474976710660 Bootstrap EvSchemeRequest record: Transaction ... HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 54B29FFC-CABA-4E00-9CF8-229A28EEA6E1 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250328/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=fda15eb25cb0a026e44c19ff34680caaee21794c6fd43376fecede4184e318c3 content-type: application/xml range: bytes=0-30 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250328T093432Z S3_MOCK::HttpServeRead: /test_bucket/view/metadata.json / 31 2025-03-28T09:34:32.321575Z node 10 :IMPORT DEBUG: HandleMetadata TEvExternalStorage::TEvGetObjectResponse: self# [10:7486791450048560975:2194], result# 3486cb59549c9f2b1f64d3c0399801a0 REQUEST: HEAD /test_bucket/view/scheme.pb HTTP/1.1 HEADERS: Host: localhost:22551 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 887E51CD-2D62-49E1-850A-F4A5D815B385 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250328/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=d33148ecae25c06bb8551f8dc461fcbc9084ee693bde2bcca9cefd51d0b53f15 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250328T093432Z 2025-03-28T09:34:32.361574Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [10:7486791450048560975:2194], result# No response body. REQUEST: HEAD /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:22551 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 8971DA32-A00C-4AF8-9DD1-9AB52D80B187 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250328/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=b902ee13ea6c005804963a88ca3bdd0a13f85c448b83e3ed8e61a86fda90494e content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250328T093432Z S3_MOCK::HttpServeRead: /test_bucket/view/create_view.sql / 165 2025-03-28T09:34:32.365473Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvHeadObjectResponse: self# [10:7486791450048560975:2194], result# HeadObjectResult { ETag: 54623f53d68141118383b3390c4965d5 ContentLength: 165 } REQUEST: GET /test_bucket/view/create_view.sql HTTP/1.1 HEADERS: Host: localhost:22551 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3ED5310C-0DB6-47C5-BDBD-A393A4887033 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250328/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;range;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=ec6c837d5032f6a50383756a78de0e3e02330096788d8080bad93deb08214179 content-type: application/xml range: bytes=0-164 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250328T093432Z S3_MOCK::HttpServeRead: /test_bucket/view/create_view.sql / 165 2025-03-28T09:34:32.369439Z node 10 :IMPORT DEBUG: HandleScheme TEvExternalStorage::TEvGetObjectResponse: self# [10:7486791450048560975:2194], result# 54623f53d68141118383b3390c4965d5 REQUEST: HEAD /test_bucket/view/permissions.pb HTTP/1.1 HEADERS: Host: localhost:22551 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 58228275-5F15-4F55-B08D-8AAADB0EF919 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250328/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=6fbda564425cc39968901b1596679422e039b284efea9fa9e8a8ed0d3f5a6860 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250328T093432Z 2025-03-28T09:34:32.398639Z node 10 :IMPORT DEBUG: HandlePermissions TEvExternalStorage::TEvHeadObjectResponse: self# [10:7486791450048560975:2194], result# No response body. REQUEST: GET /test_bucket?prefix=view HTTP/1.1 HEADERS: Host: localhost:22551 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1CE8EBE7-68AD-4C3D-BAF0-E7F8E6D5A480 amz-sdk-request: attempt=1 authorization: AWS4-HMAC-SHA256 Credential=test_key/20250328/us-east-1/s3/aws4_request, SignedHeaders=amz-sdk-invocation-id;amz-sdk-request;content-type;host;x-amz-api-version;x-amz-content-sha256;x-amz-date, Signature=53256e076ac5ae114ac01c2aee871de117cb93fa110bc8737b6ce62a0bef1050 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-124-generic x86_64 Clang/18.1.8 x-amz-api-version: 2006-03-01 x-amz-content-sha256: e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 x-amz-date: 20250328T093432Z S3_MOCK::HttpServeList: view 2025-03-28T09:34:32.432558Z node 10 :IMPORT DEBUG: HandleChangefeeds TEvExternalStorage::TEvListObjectResponse: self# [10:7486791450048560975:2194], result# ListObjectsResult { } 2025-03-28T09:34:32.432601Z node 10 :IMPORT INFO: Reply: self# [10:7486791450048560975:2194], success# 1, error# 2025-03-28T09:34:32.432687Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-28T09:34:32.432698Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeResult: id# 281474976715664, itemIdx# 0, success# 1 2025-03-28T09:34:32.447776Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:32.479461Z node 10 :IMPORT DEBUG: TSchemeQueryExecutor HandleCompileResponse, self: [10:7486791450048560984:2814], status: SUCCESS 2025-03-28T09:34:32.479529Z node 10 :IMPORT INFO: TSchemeQueryExecutor Reply, self: [10:7486791450048560984:2814], status: SUCCESS 2025-03-28T09:34:32.479766Z node 10 :IMPORT DEBUG: TSchemeQueryExecutor Reply, self: [10:7486791450048560984:2814], status: SUCCESS, prepared query: "WorkingDir: \"/Root\" OperationType: ESchemeOpCreateView FailedOnAlreadyExists: false CreateView { Name: \"view\" QueryText: \"SELECT 1 AS Key UNION SELECT 2 AS Key UNION SELECT 3 AS Key\" CapturedContext { PathPrefix: \"/Root\" SyntaxVersion: 1 AnsiLexer: false PgParser: false Pragmas: \"AnsiInForEmptyOrNullableItemsCollections\" Pragmas: \"AnsiLike\" Pragmas: \"FlexibleTypes\" Pragmas: \"AnsiCurrentRow\" Pragmas: \"WarnOnAnsiAliasShadowing\" Pragmas: \"AnsiOptionalAs\" Pragmas: \"EmitAggApply\" } }" 2025-03-28T09:34:32.479961Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-28T09:34:32.479998Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnSchemeQueryPreparation: id# 281474976715664, itemIdx# 0, status# SUCCESS, error# 2025-03-28T09:34:32.480186Z node 10 :IMPORT INFO: TImport::TTxProgress: Allocate txId: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: State: CreateSchemeObject SubState: AllocateTxId WaitTxId: 0 Issue: '' } 2025-03-28T09:34:32.481980Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:32.482112Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-28T09:34:32.482130Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnAllocateResult: txId# 281474976710758, id# 281474976715664 2025-03-28T09:34:32.482191Z node 10 :IMPORT INFO: TImport::TTxProgress: ExecutePreparedQuery: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: State: CreateSchemeObject SubState: Proposed WaitTxId: 0 Issue: '' }, txId# 281474976710758 2025-03-28T09:34:32.482274Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:32.484664Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-28T09:34:32.484690Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnModifyResult: txId# 281474976710758, status# StatusAccepted 2025-03-28T09:34:32.484837Z node 10 :IMPORT INFO: TImport::TTxProgress: Wait for completion: info# { Id: 281474976715664 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046644480, LocalPathId: 1] UserSID: '(empty maybe)' State: Waiting Issue: '' Items: 1 }, item# { Idx: 0 DstPathName: '/Root/view' DstPathId: [OwnerId: 72057594046644480, LocalPathId: 8] State: CreateSchemeObject SubState: Subscribed WaitTxId: 281474976710758 Issue: '' } 2025-03-28T09:34:32.486219Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:32.492609Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoExecute 2025-03-28T09:34:32.492636Z node 10 :IMPORT DEBUG: TImport::TTxProgress: OnNotifyResult: txId# 281474976710758 2025-03-28T09:34:32.493899Z node 10 :IMPORT DEBUG: TImport::TTxProgress: DoComplete 2025-03-28T09:34:32.519143Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7486791450048561028:2369] [0] Resolve database: name# /Root 2025-03-28T09:34:32.519473Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7486791450048561028:2369] [0] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:34:32.519501Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7486791450048561028:2369] [0] Send request: schemeShardId# 72057594046644480 2025-03-28T09:34:32.520032Z node 10 :TX_PROXY DEBUG: [GetImport] [10:7486791450048561028:2369] [0] Handle TEvImport::TEvGetImportResponse: record# Entry { Id: 281474976715664 Status: SUCCESS Progress: PROGRESS_DONE ImportFromS3Settings { endpoint: "localhost:22551" scheme: HTTP bucket: "test_bucket" items { source_prefix: "view" destination_path: "/Root/view" } } StartTime { seconds: 1743154472 } EndTime { seconds: 1743154472 } } 2025-03-28T09:34:32.632707Z node 10 :TX_PROXY DEBUG: actor# [10:7486791432868690711:2138] Handle TEvExecuteKqpTransaction 2025-03-28T09:34:32.632747Z node 10 :TX_PROXY DEBUG: actor# [10:7486791432868690711:2138] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-03-28T09:34:32.633204Z node 10 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1msjc0z0nkwnngzdjady6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=10&id=ODc4NGYwZjAtODE5MDY5N2YtMzQzYjJiNGEtYTg0NjllMWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::ForwardEnvColumnGroups >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String [GOOD] >> TPartBtreeIndexIteration::NoNodes |94.5%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersRevCompression [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143154985.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154985.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154985.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154985.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143154985.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143154985.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153785.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154985.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123154985.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153785.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123153785.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123153785.000000s;Name=;Codec=}; 2025-03-28T09:33:05.601019Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:33:05.706031Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:33:05.739339Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:33:05.739660Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:33:05.748686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:33:05.748933Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:33:05.749197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:33:05.749341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:33:05.749466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:33:05.749583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:33:05.749702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:33:05.749848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:33:05.749991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:33:05.750123Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:33:05.750271Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:33:05.750389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:33:05.784375Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:33:05.784733Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:33:05.784808Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:33:05.785006Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:33:05.785170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:33:05.785257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:33:05.785301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:33:05.785395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:33:05.785460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:33:05.785506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:33:05.785563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:33:05.785731Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:33:05.785805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:33:05.785850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:33:05.785887Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:33:05.785981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:33:05.786038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:33:05.786084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:33:05.786113Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:33:05.786193Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:33:05.786235Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:33:05.786269Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:33:05.786323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:33:05.786368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:33:05.786413Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:33:05.786888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-03-28T09:33:05.786983Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=39; 2025-03-28T09:33:05.787066Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=34; 2025-03-28T09:33:05.787161Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=40; 2025-03-28T09:33:05.787339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:33:05.787400Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:33:05.787437Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:33:05.787648Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:33:05.787697Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:33:05.787732Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... _data.cpp:29;EXECUTE:finishLoadingTime=352; 2025-03-28T09:34:34.698833Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=34891; 2025-03-28T09:34:34.705994Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7073; 2025-03-28T09:34:34.713586Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6718; 2025-03-28T09:34:34.713684Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7586; 2025-03-28T09:34:34.713870Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=117; 2025-03-28T09:34:34.713976Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=61; 2025-03-28T09:34:34.714075Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=68; 2025-03-28T09:34:34.714157Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=53; 2025-03-28T09:34:34.721589Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7375; 2025-03-28T09:34:34.731791Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10099; 2025-03-28T09:34:34.731927Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=34; 2025-03-28T09:34:34.731994Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=27; 2025-03-28T09:34:34.732033Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-03-28T09:34:34.732069Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-03-28T09:34:34.732102Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2025-03-28T09:34:34.732163Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=34; 2025-03-28T09:34:34.732202Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-03-28T09:34:34.732275Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=45; 2025-03-28T09:34:34.732308Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-03-28T09:34:34.732365Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=26; 2025-03-28T09:34:34.732434Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=40; 2025-03-28T09:34:34.732717Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=254; 2025-03-28T09:34:34.732745Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=74347; 2025-03-28T09:34:34.732883Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:34:34.732973Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:34:34.733013Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:34:34.733061Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:34:34.747502Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:34:34.747654Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:34:34.747704Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:34.747757Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:34:34.747805Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:34:34.747836Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:34.747890Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:34.747927Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:34.748012Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:34:34.748387Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:34.748451Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2654:4528];tablet_id=9437184;parent=[1:2612:4493];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-03-28T09:34:34.749125Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:34:34.749442Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:34:34.749466Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:34:34.749485Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:34:34.749517Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:34:34.749563Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:34.749606Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:34:34.749668Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:34:34.749707Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:34.749743Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:34.749773Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:34.749853Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:34:34.750807Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-28T09:34:34.751934Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Ten_Mixed >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationUnavailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2025-03-28T09:26:48.320557Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:48.412790Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:48.440391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:48.440767Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:48.449882Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:48.450110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:48.450372Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:48.450513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:48.450641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:48.450803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:48.450929Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:48.451045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:48.451197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:48.451320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:48.451435Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:48.451568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:48.484284Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:48.484659Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:48.484723Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:48.484944Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:48.485126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:48.485231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:48.485284Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:48.485395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:48.485478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:48.485528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:48.485567Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:48.485755Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:48.485830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:48.485876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:48.485911Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:48.486065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:48.486126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:48.486174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:48.486208Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:48.486302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:48.486348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:48.486387Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:48.486444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:48.486485Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:48.486522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:48.486976Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=52; 2025-03-28T09:26:48.487106Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=50; 2025-03-28T09:26:48.487220Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=59; 2025-03-28T09:26:48.487328Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-03-28T09:26:48.487528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:48.487621Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:48.487667Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:48.487908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:48.487966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:48.488008Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:48.488180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:48.488236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:48.488270Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:48.488492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:48.488549Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:48.488583Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:48.488735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:48.488793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:48.488852Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 8T09:34:35.616291Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10505:12466];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-28T09:34:36.311554Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:34:36.311661Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-03-28T09:34:36.312312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=564; 2025-03-28T09:34:36.312368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=650; 2025-03-28T09:34:36.318265Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:34:36.318337Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=9; 2025-03-28T09:34:36.337987Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=19553; 2025-03-28T09:34:36.356450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=16865; 2025-03-28T09:34:36.356584Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=18482; 2025-03-28T09:34:36.356781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=123; 2025-03-28T09:34:36.356935Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=94; 2025-03-28T09:34:36.357149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=145; 2025-03-28T09:34:36.357334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=124; 2025-03-28T09:34:36.357573Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=183; 2025-03-28T09:34:36.357623Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=39241; 2025-03-28T09:34:36.362358Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:34:36.362456Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=13; 2025-03-28T09:34:36.367545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4989; 2025-03-28T09:34:36.396618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=28943; 2025-03-28T09:34:36.396773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=48; 2025-03-28T09:34:36.396860Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=34; 2025-03-28T09:34:36.396917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2025-03-28T09:34:36.396977Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2025-03-28T09:34:36.397040Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=11; 2025-03-28T09:34:36.397141Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2025-03-28T09:34:36.397201Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=10; 2025-03-28T09:34:36.397322Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=74; 2025-03-28T09:34:36.397385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=11; 2025-03-28T09:34:36.397469Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2025-03-28T09:34:36.397591Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=72; 2025-03-28T09:34:36.397699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=64; 2025-03-28T09:34:36.397750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=35231; 2025-03-28T09:34:36.397962Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=113961708;raw_bytes=176366876;count=47;records=1845000} inactive {blob_bytes=174125508;raw_bytes=270077548;count=81;records=2819164} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:34:36.399264Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10505:12466];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:34:36.399350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:10505:12466];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:34:36.399447Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:34:36.399512Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-28T09:34:36.399781Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:34:36.399893Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:36.400141Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-28T09:34:36.400235Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T09:34:36.400295Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:36.400353Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:36.400405Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:36.400530Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:34:36.405854Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:36.409316Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:34:36.411442Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:34:36.411500Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:34:36.411538Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:34:36.411603Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:34:36.411692Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:36.411997Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=20; 2025-03-28T09:34:36.412087Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T09:34:36.412152Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=20;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:36.412220Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:36.412275Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:36.412414Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-28T09:34:36.412489Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:10505:12466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2025-03-28T09:34:30.068447Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.068602Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.108021Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.108116Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.136039Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.141674Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=14107879037314766271, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-28T09:34:30.142857Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:34:30.155253Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=14107879037314766271) 2025-03-28T09:34:30.156131Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:140:2164], cookie=17523927762826896090, path="/Root/Res", config={ }) 2025-03-28T09:34:30.156379Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-03-28T09:34:30.169708Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:140:2164], cookie=17523927762826896090) 2025-03-28T09:34:30.179319Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:145:2169]. Cookie: 14117268062463484905. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:30.179412Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[1:145:2169], cookie=14117268062463484905) 2025-03-28T09:34:30.180138Z node 1 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [1:145:2169]. Cookie: 14428422263637821954. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2025-03-28T09:34:30.180189Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[1:145:2169], cookie=14428422263637821954) 2025-03-28T09:34:32.329561Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:32.329673Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:32.348891Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:32.349110Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:32.373158Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:32.373477Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:132:2158], cookie=5788219632817819857, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-28T09:34:32.373707Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:34:32.385450Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:132:2158], cookie=5788219632817819857) 2025-03-28T09:34:32.386125Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:142:2166]. Cookie: 12070433743387103014. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:32.386167Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:142:2166], cookie=12070433743387103014) 2025-03-28T09:34:32.386642Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:142:2166]. Cookie: 10986535951457379935. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:32.386708Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:142:2166], cookie=10986535951457379935) 2025-03-28T09:34:32.387138Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:142:2166]. Cookie: 15067291603639979502. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-03-28T09:34:32.387177Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:142:2166], cookie=15067291603639979502) 2025-03-28T09:34:32.387460Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:142:2166]. Cookie: 435143569480529377. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-03-28T09:34:32.387501Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:142:2166], cookie=435143569480529377) 2025-03-28T09:34:34.312990Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:34.313071Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:34.326191Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:34.326388Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:34.340459Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:34.340909Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:132:2158], cookie=884103767247690519, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-28T09:34:34.341215Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:34:34.363789Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:132:2158], cookie=884103767247690519) 2025-03-28T09:34:34.364383Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:142:2166], cookie=18107431478480269390, path="/Root/Res1", config={ }) 2025-03-28T09:34:34.364603Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2025-03-28T09:34:34.376455Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:142:2166], cookie=18107431478480269390) 2025-03-28T09:34:34.377069Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2171], cookie=8149150010389356262, path="/Root/Res2", config={ }) 2025-03-28T09:34:34.377296Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2025-03-28T09:34:34.389511Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2171], cookie=8149150010389356262) 2025-03-28T09:34:34.390358Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:152:2176]. Cookie: 18290086218736286600. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:34.390448Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:152:2176], cookie=18290086218736286600) 2025-03-28T09:34:34.391100Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:152:2176]. Cookie: 15406771371282377824. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:34.391156Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:152:2176], cookie=15406771371282377824) 2025-03-28T09:34:34.391730Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:152:2176]. Cookie: 1678212405530229093. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2025-03-28T09:34:34.391797Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:152:2176], cookie=1678212405530229093) 2025-03-28T09:34:36.585187Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:36.585275Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:36.597187Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:36.597628Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:36.622017Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:36.622359Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:132:2158], cookie=11740731717658946169, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-03-28T09:34:36.622629Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:34:36.634869Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:132:2158], cookie=11740731717658946169) 2025-03-28T09:34:36.635919Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:142:2166]. Cookie: 16532973736972539651. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:36.635974Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:142:2166], cookie=16532973736972539651) 2025-03-28T09:34:36.636369Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:142:2166]. Cookie: 17638170665244846485. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2025-03-28T09:34:36.636478Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:142:2166], cookie=17638170665244846485) 2025-03-28T09:34:38.827287Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:38.827384Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:38.845363Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:38.846187Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:38.870474Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:38.870895Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=5103043893079867346, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2025-03-28T09:34:38.871147Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:34:38.883218Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=5103043893079867346) 2025-03-28T09:34:38.883757Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:141:2165], cookie=18286942976590158732, path="/Root/Res", config={ }) 2025-03-28T09:34:38.883985Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-03-28T09:34:38.896279Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:141:2165], cookie=18286942976590158732) 2025-03-28T09:34:38.897190Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:146:2170]. Cookie: 12031654700017963978. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:38.897252Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:146:2170], cookie=12031654700017963978) 2025-03-28T09:34:38.897822Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:150:2174], cookie=6201305531025481078, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2025-03-28T09:34:38.897996Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Updated quoter resource 1 "Root" 2025-03-28T09:34:38.898161Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [5:146:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2025-03-28T09:34:38.910638Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:150:2174], cookie=6201305531025481078) 2025-03-28T09:34:38.911377Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:146:2170]. Cookie: 18055667831140168473. Data: { } 2025-03-28T09:34:38.911437Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Update quoter resources consumption state (sender=[5:146:2170], cookie=18055667831140168473) >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobal [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Serial >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> TTicketParserTest::AuthorizationUnavailable [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> KqpSystemView::QueryStatsSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:41.549043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:41.549174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.549224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:41.549261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:41.550523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:41.550606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:41.550722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.550816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:41.554105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:41.646086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:41.646138Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:41.661190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:41.661447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:41.661566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:41.668296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:41.669272Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:41.672973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.674084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.680925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690382Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.691325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:41.691380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.691428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:41.691522Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.697854Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:41.814724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:41.817452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.818928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:41.820054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:41.820167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.823114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.823242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:41.823397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.823515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:41.823570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:41.823603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:41.825174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.825218Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:41.825253Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:41.827920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.827972Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.828010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.828054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.832631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:41.834254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:41.834427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:41.836491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.836604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:41.836649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.838488Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:41.838561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.838755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:41.838832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.840581Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.840763Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:41.841161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.841213Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:41.841301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.841332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.841365Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.841392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.841435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:41.841469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.841503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:41.841542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:41.841591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:41.841621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:41.841652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:41.843354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.843438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.843471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... schemeshard: 72057594046678944 2025-03-28T09:34:42.292196Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:34:42.292392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.292500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.292624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:34:42.292687Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueue, at schemeshard: 72057594046678944 2025-03-28T09:34:42.292733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:34:42.292770Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: PersQueueReadBalancer, at schemeshard: 72057594046678944 2025-03-28T09:34:42.292792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:34:42.292881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.292946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.293122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 8, at schemeshard: 72057594046678944 2025-03-28T09:34:42.293286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:34:42.293623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.293749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.294121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.294225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.294458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.294537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.294632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.294825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.294916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.295135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.295375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.295563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.295643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.295703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:42.295959Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:34:42.300118Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:34:42.300281Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T09:34:42.301338Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [1:562:2491], Recipient [1:562:2491]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:34:42.301387Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:34:42.302766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:42.302840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:42.303318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:42.303373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:42.303422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:42.303471Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:34:42.305068Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:598:2491], Recipient [1:562:2491]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:34:42.305114Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:34:42.305157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:562:2491] sender: [1:619:2058] recipient: [1:15:2062] 2025-03-28T09:34:42.358218Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:618:2535], Recipient [1:562:2491]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-28T09:34:42.358298Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:34:42.358424Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:42.358662Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 227us result status StatusSuccess 2025-03-28T09:34:42.359128Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:42.359786Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:620:2536], Recipient [1:562:2491]: NKikimrPQ.TEvPeriodicTopicStats PathId: 2 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2025-03-28T09:34:42.359853Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-03-28T09:34:42.359896Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 19 UsedReserveSize 7 2025-03-28T09:34:42.359958Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:34:42.360017Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2025-03-28T09:34:42.360245Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:621:2537], Recipient [1:562:2491]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-28T09:34:42.360285Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:34:42.360370Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:42.360522Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 157us result status StatusSuccess 2025-03-28T09:34:42.360900Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:41.549042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:41.549151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.549240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:41.549280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:41.550522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:41.550575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:41.550703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.550807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:41.552343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:41.654382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:41.654442Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:41.668532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:41.668852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:41.668990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:41.675331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:41.675568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:41.676321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.676502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.680801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690262Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.691328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:41.691405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.691461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:41.691551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.699137Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:41.831366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:41.831622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.831820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:41.832072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:41.832165Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.834327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.834460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:41.834622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.834726Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:41.834783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:41.834820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:41.836707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.836766Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:41.836803Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:41.838647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.838728Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.838780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.838824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.842666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:41.844413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:41.844607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:41.845561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.845676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:41.845724Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.846010Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:41.846078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.846257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:41.846343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.848117Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.848171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.848341Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.848389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:41.848758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.848817Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:41.848906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.848940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.848979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.849013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.849049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:41.849090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.849125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:41.849155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:41.849209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:41.849245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:41.849292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:41.857541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.857689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.857732Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... rk, received event# 269877760, Sender [1:1022:2881], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037968897 Status: OK ServerId: [1:1024:2883] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:34:42.532641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:34:42.532733Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594037968897, status: OK, at schemeshard: 72057594046678944 2025-03-28T09:34:42.533139Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 2025-03-28T09:34:42.533254Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 2025-03-28T09:34:42.533350Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:216:2215], Recipient [1:283:2270]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 2025-03-28T09:34:42.533381Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-03-28T09:34:42.533434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:34:42.533896Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268698118, Sender [1:216:2215], Recipient [1:283:2270]: NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 2025-03-28T09:34:42.533937Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvHive::TEvDeleteTabletReply 2025-03-28T09:34:42.534005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:34:42.535172Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2025-03-28T09:34:42.535276Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-03-28T09:34:42.535908Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:34:42.537988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:34:42.538025Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:34:42.538120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:34:42.538505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:34:42.538538Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:34:42.539721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:34:42.539821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:34:42.540013Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877763, Sender [1:1022:2881], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1022:2881] ServerId: [1:1024:2883] } 2025-03-28T09:34:42.540048Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:34:42.540081Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:34:42.540449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:34:42.540491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:34:42.540957Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:1038:2897], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:34:42.541014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:34:42.541049Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:34:42.541185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:546:2479], Recipient [1:283:2270]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2025-03-28T09:34:42.541218Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:34:42.541284Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:34:42.541378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:34:42.541419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1036:2895] 2025-03-28T09:34:42.541596Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [1:1038:2897], Recipient [1:283:2270]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:34:42.541630Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:34:42.541664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2025-03-28T09:34:42.542254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1039:2898], Recipient [1:283:2270]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-28T09:34:42.542304Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:34:42.542391Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:42.542571Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 170us result status StatusSuccess 2025-03-28T09:34:42.543013Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:42.543814Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271188001, Sender [1:1040:2899], Recipient [1:283:2270]: NKikimrPQ.TEvPeriodicTopicStats PathId: 4 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2025-03-28T09:34:42.543879Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2025-03-28T09:34:42.543918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 4] DataSize 151 UsedReserveSize 151 2025-03-28T09:34:42.543954Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T09:34:42.544386Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1041:2900], Recipient [1:283:2270]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-28T09:34:42.544435Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:34:42.544524Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:42.546411Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 155us result status StatusSuccess 2025-03-28T09:34:42.546865Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> TExecutorDb::RandomOps [GOOD] >> TExecutorDb::FullScan >> TExternalTableTest::ReplaceExternalTableIfNotExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2025-03-28T09:34:16.078264Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791384458831757:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:16.078308Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e8b/r3tmp/tmpQnmtEg/pdisk_1.dat 2025-03-28T09:34:16.454016Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:16.505210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:16.505339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:16.509609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11312, node 1 2025-03-28T09:34:16.660471Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:16.660493Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:16.660499Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:16.660675Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14269 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:17.179468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:17.210350Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:17.220009Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:17.221726Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Connect to grpc://localhost:8616 2025-03-28T09:34:17.227628Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:17.250306Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-03-28T09:34:17.250497Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-03-28T09:34:17.250555Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:34:17.250600Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:17.252069Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:17.255704Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-03-28T09:34:17.255843Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-03-28T09:34:17.255886Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:34:18.099972Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-28T09:34:18.100024Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:18.100308Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:18.106841Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Status 14 Service Unavailable 2025-03-28T09:34:18.106975Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-03-28T09:34:18.107009Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:34:20.101787Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket AKIA****MPLE (B3EDC139) 2025-03-28T09:34:20.101844Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:20.102046Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:20.103888Z node 1 :GRPC_CLIENT DEBUG: [517000010088] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:20.104307Z node 1 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2025-03-28T09:34:20.104419Z node 1 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-03-28T09:34:21.078248Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791384458831757:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:21.078342Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e8b/r3tmp/tmpl8Xbv2/pdisk_1.dat 2025-03-28T09:34:29.783133Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:29.835998Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7796, node 2 2025-03-28T09:34:29.861566Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:29.861693Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:29.863617Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:29.896706Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:29.896732Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:29.896740Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:29.896890Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22770 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:30.125315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:30.132902Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:30.133007Z node 2 :GRPC_CLIENT DEBUG: [517000098888] Connect to grpc://localhost:10501 2025-03-28T09:34:30.134192Z node 2 :GRPC_CLIENT DEBUG: [517000098888] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:30.144530Z node 2 :GRPC_CLIENT DEBUG: [517000098888] Status 14 Service Unavailable 2025-03-28T09:34:30.144678Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2025-03-28T09:34:30.144714Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2025-03-28T09:34:30.144740Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:30.145076Z node 2 :GRPC_CLIENT DEBUG: [517000098888] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:30.146977Z node 2 :GRPC_CLIENT DEBUG: [517000098888] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:30.147099Z node 2 :TICKET_PARSER TRACE: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2025-03-28T09:34:30.147213Z node 2 :TICKET_PARSER DEBUG: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2025-03-28T09:34:32.935094Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;s ... ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:36.278787Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:36.301755Z node 4 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:34:36.307504Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:36.307533Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:36.307541Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:36.307573Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:36.307641Z node 4 :GRPC_CLIENT DEBUG: [517000041e88] Connect to grpc://localhost:4411 2025-03-28T09:34:36.308513Z node 4 :GRPC_CLIENT DEBUG: [517000041e88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:36.322487Z node 4 :GRPC_CLIENT DEBUG: [517000041e88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:36.322793Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-28T09:34:36.322832Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-03-28T09:34:36.324434Z node 4 :GRPC_CLIENT DEBUG: [517000006308] Connect to grpc://localhost:4513 2025-03-28T09:34:36.325444Z node 4 :GRPC_CLIENT DEBUG: [517000006308] Request GetUserAccountRequest { user_account_id: "user1" } 2025-03-28T09:34:36.335758Z node 4 :GRPC_CLIENT DEBUG: [517000006308] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2025-03-28T09:34:36.337466Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-28T09:34:36.342200Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:36.342231Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:36.342239Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:36.342274Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-28T09:34:36.342502Z node 4 :GRPC_CLIENT DEBUG: [517000041e88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:36.352901Z node 4 :GRPC_CLIENT DEBUG: [517000041e88] Status 16 Access Denied 2025-03-28T09:34:36.354806Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Access Denied" retryable:0 2025-03-28T09:34:36.354862Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2025-03-28T09:34:36.359098Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:36.359128Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:36.359144Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:36.359186Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:36.359254Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-28T09:34:36.359425Z node 4 :GRPC_CLIENT DEBUG: [517000041e88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:36.360136Z node 4 :GRPC_CLIENT DEBUG: [517000041e88] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:36.362725Z node 4 :GRPC_CLIENT DEBUG: [517000041e88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:36.362824Z node 4 :GRPC_CLIENT DEBUG: [517000041e88] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:36.366822Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-28T09:34:36.366913Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2025-03-28T09:34:36.366934Z node 4 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for UserAccount(user1@as) 2025-03-28T09:34:36.367116Z node 4 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now valid token of login1@passport 2025-03-28T09:34:39.476543Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486791480137806822:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:39.476653Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e8b/r3tmp/tmpefIPey/pdisk_1.dat 2025-03-28T09:34:39.581610Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20253, node 5 2025-03-28T09:34:39.615314Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:39.615399Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:39.619815Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:39.639381Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:39.639409Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:39.639416Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:39.639549Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1665 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:39.895436Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:39.900713Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2025-03-28T09:34:39.900739Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:34:39.900746Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, database /Root, A6 error 2025-03-28T09:34:39.900774Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:39.900813Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-28T09:34:39.900888Z node 5 :GRPC_CLIENT DEBUG: [5170000a2988] Connect to grpc://localhost:11720 2025-03-28T09:34:39.901725Z node 5 :GRPC_CLIENT DEBUG: [5170000a2988] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:39.901969Z node 5 :GRPC_CLIENT DEBUG: [5170000a2988] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:39.907887Z node 5 :GRPC_CLIENT DEBUG: [5170000a2988] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2025-03-28T09:34:39.908013Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2025-03-28T09:34:39.908132Z node 5 :GRPC_CLIENT DEBUG: [5170000a2988] Status 14 Service Unavailable 2025-03-28T09:34:39.908178Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2025-03-28T09:34:39.908230Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2025-03-28T09:34:39.908260Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2025-03-28T09:34:39.908325Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2025-03-28T09:34:39.908554Z node 5 :GRPC_CLIENT DEBUG: [5170000a2988] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:39.909126Z node 5 :GRPC_CLIENT DEBUG: [5170000a2988] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2025-03-28T09:34:39.912194Z node 5 :GRPC_CLIENT DEBUG: [5170000a2988] Status 1 CANCELLED 2025-03-28T09:34:39.912319Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" 2025-03-28T09:34:39.912412Z node 5 :GRPC_CLIENT DEBUG: [5170000a2988] Status 1 CANCELLED 2025-03-28T09:34:39.912497Z node 5 :TICKET_PARSER TRACE: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2025-03-28T09:34:39.912542Z node 5 :TICKET_PARSER DEBUG: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' >> TExternalTableTest::DropExternalTable >> TExternalTableTest::ReadOnlyMode >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] |94.6%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 3986, MsgBus: 16092 2025-03-28T09:33:48.691907Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791263036825690:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:48.692024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:48.823034Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791261101891119:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:48.823079Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:48.866874Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791261223996864:2072];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f1d/r3tmp/tmpTp1BzS/pdisk_1.dat 2025-03-28T09:33:49.332185Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:49.725520Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:49.845458Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:49.845878Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:49.860463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:49.860623Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:49.862056Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:49.862118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:49.890098Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2025-03-28T09:33:49.890134Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:49.892115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:49.892556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:49.945287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:49.945392Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:49.950338Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3986, node 1 2025-03-28T09:33:50.086035Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:33:50.086097Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:33:50.086108Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:33:50.086286Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16092 TClient is connected to server localhost:16092 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:50.959803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.026171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.391555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.624121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:51.729577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:53.580558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791284511664108:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:53.580709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:53.689680Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791263036825690:2213];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:53.689768Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:53.823178Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791261101891119:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:53.823250Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:53.847764Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486791261223996864:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:53.847827Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:53.908301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:53.976388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.018768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720664:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.114815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.209049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720666:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.305899Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720667:0, at schemeshard: 72057594046644480 2025-03-28T09:33:54.421298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791288806632111:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:54.421368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:54.421503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791288806632116:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:54.426305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720668:3, at schemeshard: 72057594046644480 2025-03-28T09:33:54.463513Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791288806632118:2408], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720668 completed, doublechecking } 2025-03-28T09:33:54.569286Z node 1 :TX_PROXY ERROR: Actor# [1:7486791288806632201:4187] txid# 281474976720669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:56.039206Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154436018, txId: 281474976720673] shutting down 2025-03-28T09:33:56.293106Z node 2 :BS_PROXY_PUT ERROR: [bbb80b27cfa29f17] Result# TEvPutResult {Id# [72075186224037895:1:20:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037895:1:20:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-28T09:33:56.293711Z node 3 :BS_PROXY_PUT ERROR: [f044d89cf875863c] Result# TEvPutResult {Id# [72075186224037897:1:20:0:0:41:0] Status# ERR ... s: 20406 2025-03-28T09:34:33.335715Z node 16 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7486791455213583646:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:33.335784Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:33.361168Z node 17 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[17:7486791455620017865:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:33.361255Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:33.365633Z node 18 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[18:7486791456308265057:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:33.365723Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000f1d/r3tmp/tmpirK9du/pdisk_1.dat 2025-03-28T09:34:33.533557Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:33.579678Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:33.579782Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:33.580487Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:33.580563Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:33.581635Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:33.581709Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:33.584025Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2025-03-28T09:34:33.584180Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:33.584864Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:33.585287Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2025-03-28T09:34:33.586128Z node 16 :HIVE WARN: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61846, node 16 2025-03-28T09:34:33.631874Z node 16 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:33.631903Z node 16 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:33.631915Z node 16 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:33.632124Z node 16 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20406 TClient is connected to server localhost:20406 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:34.172610Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:34.190098Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:34.279131Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:34.401357Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:34.486598Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:37.916545Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7486791472393454824:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:37.916678Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:37.991901Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:34:38.061918Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:34:38.129014Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:34:38.201820Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:34:38.261770Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:34:38.325816Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:34:38.335960Z node 16 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7486791455213583646:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:38.336042Z node 16 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:34:38.361656Z node 17 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[17:7486791455620017865:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:38.361736Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:34:38.366832Z node 18 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[18:7486791456308265057:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:38.366914Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:34:38.573691Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7486791476688422805:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:38.573801Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7486791476688422810:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:38.573861Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:38.578588Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:34:38.608124Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7486791476688422812:2404], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:34:38.688758Z node 16 :TX_PROXY ERROR: Actor# [16:7486791476688422897:4133] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:40.713818Z node 16 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154480703, txId: 281474976715673] shutting down 2025-03-28T09:34:41.073343Z node 18 :BS_PROXY_PUT ERROR: [b8ae9f565ed9159a] Result# TEvPutResult {Id# [72075186224037900:1:21:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037900:1:21:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 16 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 2025-03-28T09:34:41.075340Z node 17 :BS_PROXY_PUT ERROR: [0483715bd9627fa0] Result# TEvPutResult {Id# [72075186224037893:1:21:0:0:41:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [72075186224037893:1:21:0:0:41:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 16 ErrorReasons# [ "BS_QUEUE: event undelivered", ] } ] Part situations# [ { OrderNumber# 0 Situations# E } ] " ApproximateFreeSpaceShare# 0} GroupId# 2181038080 Marker# BPP12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2025-03-28T09:34:30.068436Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.068685Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.110777Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.110909Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.127372Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.138629Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:130:2156], cookie=5887211649492370082, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2025-03-28T09:34:30.138855Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:130:2156], cookie=5887211649492370082) 2025-03-28T09:34:30.139511Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:136:2161], cookie=13572228277575884664, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2025-03-28T09:34:30.139631Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:136:2161], cookie=13572228277575884664) 2025-03-28T09:34:30.140191Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:139:2164], cookie=11024376044992539914, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2025-03-28T09:34:30.140431Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2025-03-28T09:34:30.163352Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:139:2164], cookie=11024376044992539914) 2025-03-28T09:34:30.163953Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:145:2169], cookie=6919139594715855849, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2025-03-28T09:34:30.164179Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2025-03-28T09:34:30.176434Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:145:2169], cookie=6919139594715855849) 2025-03-28T09:34:30.554583Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.554664Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.568939Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.569100Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.593146Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.593566Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:132:2158], cookie=11241425590565842125, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2025-03-28T09:34:30.593957Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:34:30.605842Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:132:2158], cookie=11241425590565842125) 2025-03-28T09:34:30.606418Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:142:2166], cookie=10024611090535302047, path="/Root/Res", config={ }) 2025-03-28T09:34:30.606639Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-03-28T09:34:30.618634Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:142:2166], cookie=10024611090535302047) 2025-03-28T09:34:30.620624Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:147:2171]. Cookie: 11000458080526551515. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:30.620706Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[2:147:2171], cookie=11000458080526551515) 2025-03-28T09:34:30.621253Z node 2 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [2:147:2171]. Cookie: 12157559343116973932. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2025-03-28T09:34:30.621303Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[2:147:2171], cookie=12157559343116973932) 2025-03-28T09:34:32.797697Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:32.797827Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:32.816720Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:32.816860Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:32.830946Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:32.831435Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:132:2158], cookie=7598394360071329375, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-28T09:34:32.831746Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:34:32.854164Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:132:2158], cookie=7598394360071329375) 2025-03-28T09:34:32.854592Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:142:2166], cookie=2890438696753148787, path="/Root/Res", config={ }) 2025-03-28T09:34:32.854804Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 2 "Root/Res" 2025-03-28T09:34:32.866906Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:142:2166], cookie=2890438696753148787) 2025-03-28T09:34:32.867804Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:147:2171]. Cookie: 7221973506852942520. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:32.867884Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[3:147:2171], cookie=7221973506852942520) 2025-03-28T09:34:32.868455Z node 3 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [3:147:2171]. Cookie: 12841641924855881003. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2025-03-28T09:34:32.868511Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[3:147:2171], cookie=12841641924855881003) 2025-03-28T09:34:35.042515Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:35.042605Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:35.056563Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:35.057144Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:35.080921Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:35.081349Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:132:2158], cookie=1409990493805149492, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-28T09:34:35.081689Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:34:35.093737Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:132:2158], cookie=1409990493805149492) 2025-03-28T09:34:35.094599Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:142:2166]. Cookie: 12472132138746472890. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:35.094663Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:142:2166], cookie=12472132138746472890) 2025-03-28T09:34:35.095244Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:142:2166]. Cookie: 403064274973625589. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-03-28T09:34:35.095300Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:142:2166], cookie=403064274973625589) 2025-03-28T09:34:35.095722Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [4:142:2166]. Cookie: 6981004237937857994. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2025-03-28T09:34:35.095773Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[4:142:2166], cookie=6981004237937857994) 2025-03-28T09:34:37.371548Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:37.371666Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:37.392139Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:37.392700Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:37.417653Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:37.418089Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:132:2158], cookie=6536464358604821155, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2025-03-28T09:34:37.418459Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:34:37.431173Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:132:2158], cookie=6536464358604821155) 2025-03-28T09:34:37.432173Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:142:2166]. Cookie: 6125867771537804072. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:37.432235Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:142:2166], cookie=6125867771537804072) 2025-03-28T09:34:37.432772Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:142:2166]. Cookie: 11855913107748030012. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2025-03-28T09:34:37.432821Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:142:2166], cookie=11855913107748030012) 2025-03-28T09:34:39.864826Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:173:2190]. Cookie: 4819323202551902206. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:39.864886Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:173:2190], cookie=4819323202551902206) 2025-03-28T09:34:39.865304Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:173:2190]. Cookie: 16574825121617531780. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2025-03-28T09:34:39.865331Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:173:2190], cookie=16574825121617531780) 2025-03-28T09:34:41.912256Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:200:2216]. Cookie: 13170484783252364705. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:41.912335Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[5:200:2216], cookie=13170484783252364705) 2025-03-28T09:34:41.912789Z node 5 :KESUS_TABLET TRACE: [72057594037927937] Send TEvAccountResourcesAck to [5:200:2216]. Cookie: 2503380977385831754. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2025-03-28T09:34:41.912828Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Account quoter resources (sender=[5:200:2216], cookie=2503380977385831754) >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] >> TExternalTableTest::CreateExternalTable |94.6%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> TExternalTableTest::ParallelCreateSameExternalTable >> Worker::Basic [GOOD] >> TTransferTests::Create_Disabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] >> TExternalTableTest::DropExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:41.549004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:41.549097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.549144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:41.549179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:41.550496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:41.550542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:41.550727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.550819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:41.552314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:41.644096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:41.644155Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:41.660294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:41.660644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:41.660773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:41.668695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:41.669280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:41.672790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.673870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.680795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690300Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.691304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:41.691366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.691436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:41.691550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.697763Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:41.837845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:41.838034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.838197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:41.838440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:41.838494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:41.840751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840822Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:41.840879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:41.840908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:41.842484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.842529Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:41.842564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:41.844106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.844153Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.844191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.844228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.847509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:41.849055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:41.849192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:41.850151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.850263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:41.850305Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.850560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:41.850620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.850825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:41.850953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.852645Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.852692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.852851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.852900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:41.853256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.853315Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:41.853400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.853428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.853461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.853491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.853525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:41.853557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.853587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:41.853614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:41.853680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:41.853715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:41.853744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:41.855419Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.855519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.855558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:43.680345Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-03-28T09:34:43.680417Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-03-28T09:34:43.681037Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2025-03-28T09:34:43.681138Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-03-28T09:34:43.681354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-03-28T09:34:43.698992Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:34:43.709398Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:43.709525Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 156us result status StatusSuccess 2025-03-28T09:34:43.709836Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.315976Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-03-28T09:34:44.316087Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-03-28T09:34:44.316855Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2025-03-28T09:34:44.316967Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-03-28T09:34:44.317200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 0 2025-03-28T09:34:44.330878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:34:44.341341Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.341538Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 214us result status StatusSuccess 2025-03-28T09:34:44.342053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.384104Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:44.384308Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 259us result status StatusSuccess 2025-03-28T09:34:44.384746Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.385509Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:629:2551] connected; active server actors: 1 2025-03-28T09:34:44.399702Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2025-03-28T09:34:44.400240Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2025-03-28T09:34:44.402080Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2025-03-28T09:34:44.402184Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.402310Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 130us result status StatusSuccess 2025-03-28T09:34:44.402629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.402995Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2025-03-28T09:34:44.436368Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186233409547][Topic1] pipe [1:676:2586] connected; active server actors: 1 >> TExternalTableTest::Decimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:41.550879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:41.550958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.550991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:41.551015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:41.551044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:41.551064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:41.551143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.551218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:41.554969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:41.645437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:41.645497Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:41.660278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:41.660599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:41.660780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:41.668729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:41.669276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:41.672754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.673858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.680795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690226Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690301Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.691304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:41.691362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.691442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:41.691541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.697744Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:41.814749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:41.817137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.818930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:41.820038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:41.820105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.823325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.823476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:41.823680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.823767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:41.823824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:41.823877Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:41.825825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.825884Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:41.825922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:41.827784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.827860Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.827907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.827971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.832614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:41.834542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:41.834721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:41.836498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.836631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:41.836676Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.837681Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:41.837760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.837930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:41.838027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.839874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.839921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.840084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:41.840535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840609Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:41.840698Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.840734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.840776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.840806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.840844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:41.840887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.840923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:41.840953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:41.841011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:41.841069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:41.841103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:41.843029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.843135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.843177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:43.683281Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-03-28T09:34:43.683361Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2025-03-28T09:34:43.683721Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-28T09:34:43.683822Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-28T09:34:43.683959Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-28T09:34:43.684285Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 16975298 2025-03-28T09:34:43.684396Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-03-28T09:34:43.684595Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-03-28T09:34:43.697916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:34:43.708451Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:43.708640Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 232us result status StatusSuccess 2025-03-28T09:34:43.709210Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.316037Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2025-03-28T09:34:44.316134Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2025-03-28T09:34:44.316517Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-28T09:34:44.316604Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 2, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-28T09:34:44.316669Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186233409546, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2025-03-28T09:34:44.317048Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 2 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2025-03-28T09:34:44.317162Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2025-03-28T09:34:44.317385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 2] DataSize 16975298 UsedReserveSize 16975298 2025-03-28T09:34:44.331066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:34:44.341504Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 2 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.341686Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 2 took 179us result status StatusSuccess 2025-03-28T09:34:44.342253Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.384332Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:44.384571Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 276us result status StatusSuccess 2025-03-28T09:34:44.385040Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalTableTest::ReadOnlyMode [GOOD] >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:34:44.540339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:44.540467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:44.540510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:44.540552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:44.541266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:44.541311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:44.541383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:44.541465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:44.542603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:44.627321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:34:44.627383Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:44.633894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:44.634239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:44.634391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:44.648974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:44.649159Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:44.649820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:44.650004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:44.656320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:44.660082Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:44.660176Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:44.661074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:44.661133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:44.661204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:44.661864Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.668283Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:34:44.764746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:44.766223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.767148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:44.768082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:44.768151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.770953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:44.771078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:44.771235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.771348Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:44.771389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:44.771416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:44.773239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.773281Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:44.773312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:44.774537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.774569Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.774598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:44.774674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.778236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:44.783426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:44.784136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:44.785178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:44.785299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.785346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:44.786423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:44.786477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:44.786628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:44.786724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:44.789156Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:44.789196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:44.789326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:44.789432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:34:44.789505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.789547Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:44.789639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:44.789666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.789697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:44.789723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.789749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:44.789785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.789824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:44.789850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:44.789894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:44.789922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:44.789950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:44.792273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... on: 8 2025-03-28T09:34:44.885889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:34:44.886903Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:34:44.886992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:34:44.887033Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:34:44.887063Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:34:44.887085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:34:44.887170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-28T09:34:44.888238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-03-28T09:34:44.888335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-03-28T09:34:44.889033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:44.889122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.889160Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TAlterExternalTable TPropose, operationId: 103:0 HandleReply TEvOperationPlan: step# 5000004 2025-03-28T09:34:44.889234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 240 2025-03-28T09:34:44.889347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:44.889389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:34:44.890567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:34:44.891036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Erasing txId 103 2025-03-28T09:34:44.891968Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:44.892003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:44.892112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:34:44.892159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:34:44.892383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:44.892420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-28T09:34:44.892450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-28T09:34:44.892480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-28T09:34:44.892540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.892589Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:34:44.892680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:34:44.892705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:34:44.892735Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:34:44.892758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:34:44.892788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-28T09:34:44.892816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:34:44.892843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:34:44.892867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:34:44.892910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:34:44.892936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:34:44.892972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2025-03-28T09:34:44.892997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-28T09:34:44.893028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2025-03-28T09:34:44.893795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:34:44.893852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:34:44.893875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:34:44.893913Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-28T09:34:44.893948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:34:44.894921Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:34:44.895018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:34:44.895048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:34:44.895083Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2025-03-28T09:34:44.895117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:34:44.895180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-28T09:34:44.896799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:34:44.897538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:34:44.897713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:34:44.897778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:34:44.898100Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:34:44.898172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:34:44.898194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:363:2354] TestWaitNotification: OK eventTxId 103 2025-03-28T09:34:44.898578Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:44.898807Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 248us result status StatusSuccess 2025-03-28T09:34:44.899092Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] >> TExternalTableTest::Decimal [GOOD] >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] >> TExternalTableTest::SchemeErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:34:44.540342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:44.540488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:44.540523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:44.540554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:44.541243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:44.541280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:44.541363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:44.541447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:44.542595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:44.626653Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:34:44.626753Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:44.633270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:44.633607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:44.633761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:44.649142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:44.649316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:44.649920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:44.650089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:44.656496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:44.660074Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:44.660152Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:44.661064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:44.661123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:44.661216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:44.661846Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.668324Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:34:44.809046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:44.809365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.809603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:44.809865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:44.809933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.812540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:44.812686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:44.812897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.812981Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:44.813024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:44.813061Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:44.814996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.815057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:44.815097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:44.816902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.816942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.816980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:44.817082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.821120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:44.823175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:44.823388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:44.824540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:44.824707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.824785Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:44.825072Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:44.825139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:44.825308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:44.825407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:44.827523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:44.827574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:44.827739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:44.827800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:34:44.827901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.827958Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:44.828062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:44.828098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.828157Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:44.828201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.828237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:44.828279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.828316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:44.828351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:44.828432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:44.828478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:44.828512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:44.837028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... HEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:45.238168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:34:45.238214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-28T09:34:45.240132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusAccepted TxId: 129 SchemeshardId: 72057594046678944 PathId: 5, at schemeshard: 72057594046678944 2025-03-28T09:34:45.240247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /MyRoot/SubDirBBBB 2025-03-28T09:34:45.240456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:45.240485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:45.240605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-03-28T09:34:45.240675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.240705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:487:2445], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-03-28T09:34:45.240741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:487:2445], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-03-28T09:34:45.241069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.241119Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:45.241172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2025-03-28T09:34:45.241298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:45.241902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:34:45.241997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:34:45.242029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-03-28T09:34:45.242057Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2025-03-28T09:34:45.242097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-03-28T09:34:45.242618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:34:45.242700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:34:45.242721Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2025-03-28T09:34:45.242740Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 2 2025-03-28T09:34:45.242762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-28T09:34:45.242806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2025-03-28T09:34:45.244582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2025-03-28T09:34:45.244716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000005 2025-03-28T09:34:45.245941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-28T09:34:45.246177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.246254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:45.246292Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000005, at schemeshard: 72057594046678944 2025-03-28T09:34:45.246383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 129:0 128 -> 240 2025-03-28T09:34:45.246516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:34:45.246557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-28T09:34:45.246826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2025-03-28T09:34:45.248370Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:45.248421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:45.248573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2025-03-28T09:34:45.248643Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.248674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:487:2445], at schemeshard: 72057594046678944, txId: 129, path id: 1 2025-03-28T09:34:45.248707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:487:2445], at schemeshard: 72057594046678944, txId: 129, path id: 5 2025-03-28T09:34:45.248902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.248944Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 129:0 ProgressState 2025-03-28T09:34:45.249069Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-03-28T09:34:45.249128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-28T09:34:45.249170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#129:0 progress is 1/1 2025-03-28T09:34:45.249233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-28T09:34:45.249272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2025-03-28T09:34:45.249311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2025-03-28T09:34:45.249345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 129:0 2025-03-28T09:34:45.249384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 129:0 2025-03-28T09:34:45.249451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-28T09:34:45.249489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2025-03-28T09:34:45.249524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-28T09:34:45.249558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 5], 3 2025-03-28T09:34:45.250270Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:34:45.250351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:34:45.250381Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2025-03-28T09:34:45.250421Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-28T09:34:45.250464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2025-03-28T09:34:45.251266Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:34:45.251349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 5 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2025-03-28T09:34:45.251380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2025-03-28T09:34:45.251413Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 3 2025-03-28T09:34:45.251444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 2025-03-28T09:34:45.251508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2025-03-28T09:34:45.253912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2025-03-28T09:34:45.254998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Crossed >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2025-03-28T09:34:37.987869Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791472619434285:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:37.987977Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001081/r3tmp/tmpg9IDOa/pdisk_1.dat 2025-03-28T09:34:38.368926Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:38.409828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:38.410345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:38.415521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29081 TServer::EnableGrpc on GrpcPort 5742, node 1 2025-03-28T09:34:38.688095Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:38.688123Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:38.688131Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:38.688249Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29081 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:39.139462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:39.317502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1743154479430 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) 2025-03-28T09:34:39.434391Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Handshake: worker# [1:7486791481209369560:2421] 2025-03-28T09:34:39.434521Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handshake: worker# [1:7486791481209369560:2421] 2025-03-28T09:34:39.435331Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:3:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:34:39.435795Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 3] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:34:39.435825Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Send handshake: worker# [1:7486791481209369560:2421] 2025-03-28T09:34:39.435919Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7486791481209369560:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-03-28T09:34:39.435937Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7486791481209369560:2421] Handshake with writer: sender# [1:7486791481209369562:2421] 2025-03-28T09:34:39.441000Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Create read session: session# [1:7486791481209369565:2293] 2025-03-28T09:34:39.441076Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7486791481209369560:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-03-28T09:34:39.441098Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7486791481209369560:2421] Handshake with reader: sender# [1:7486791481209369561:2421] 2025-03-28T09:34:39.441135Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-28T09:34:39.514290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480 2025-03-28T09:34:40.587890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791485504337054:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:40.587908Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791485504337038:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:40.587994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791485504337055:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:40.588082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:40.592214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:2, at schemeshard: 72057594046644480 2025-03-28T09:34:40.598770Z node 1 :TX_PROXY ERROR: Actor# [1:7486791485504337060:2508] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:34:40.606158Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791485504337058:2378], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:34:40.606159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791485504337059:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:34:40.669228Z node 1 :TX_PROXY ERROR: Actor# [1:7486791485504337107:2539] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:40.694860Z node 1 :TX_PROXY ERROR: Actor# [1:7486791485504337125:2547] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:41.734671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 2025-03-28T09:34:42.159713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:34:42.571216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710675:0, at schemeshard: 72057594046644480 2025-03-28T09:34:42.987859Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791472619434285:2199];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:42.987953Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:34:42.999594Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710680:0, at schemeshard: 72057594046644480 2025-03-28T09:34:43.430228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480 2025-03-28T09:34:44.279680Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-03-28T09:34:44.261000Z MessageGroupId: producer ProducerId: producer }] } } 2025-03-28T09:34:44.279774Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7486791481209369560:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-03-28T09:34:44.261000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-28T09:34:44.279901Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2025-03-28T09:34:44.261000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-28T09:34:44.280037Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2025-03-28T09:34:44.281114Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7486791502684206975:2421] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2025-03-28T09:34:44.281173Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-03-28T09:34:44.281274Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7486791502684206975:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-03-28T09:34:44.282947Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7486791502684206975:2421] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:34:44.282983Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-03-28T09:34:44.283020Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2025-03-28T09:34:44.283076Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7486791481209369560:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-28T09:34:44.283116Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-28T09:34:44.414832Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-03-28T09:34:44.409000Z MessageGroupId: producer ProducerId: producer }] } } 2025-03-28T09:34:44.414899Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7486791481209369560:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-03-28T09:34:44.409000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-28T09:34:44.414951Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2025-03-28T09:34:44.409000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-28T09:34:44.415040Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2025-03-28T09:34:44.415134Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7486791502684206975:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-03-28T09:34:44.416580Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7486791502684206975:2421] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:34:44.416625Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-03-28T09:34:44.416658Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2025-03-28T09:34:44.416697Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7486791481209369560:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-28T09:34:44.416741Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-28T09:34:44.574625Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-03-28T09:34:44.569000Z MessageGroupId: producer ProducerId: producer }] } } 2025-03-28T09:34:44.574768Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7486791481209369560:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-03-28T09:34:44.569000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-28T09:34:44.574825Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2025-03-28T09:34:44.569000Z MessageGroupId: producer ProducerId: producer }] } 2025-03-28T09:34:44.574961Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2025-03-28T09:34:44.575068Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7486791502684206975:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2025-03-28T09:34:44.576834Z node 1 :REPLICATION_SERVICE DEBUG: [TablePartitionWriter][72057594046644480:3:1][72075186224037890][1:7486791502684206975:2421] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2025-03-28T09:34:44.576898Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2025-03-28T09:34:44.576946Z node 1 :REPLICATION_SERVICE DEBUG: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 3][1:7486791481209369562:2421] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2025-03-28T09:34:44.576999Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7486791481209369560:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-28T09:34:44.577048Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2025-03-28T09:34:44.692901Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2025-03-28T09:34:44.692934Z node 1 :REPLICATION_SERVICE INFO: [RemoteTopicReader][/Root/topic][0][1:7486791481209369561:2421] Leave 2025-03-28T09:34:44.692994Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7486791481209369560:2421] Reader has gone: sender# [1:7486791481209369561:2421] 2025-03-28T09:34:44.693051Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791502684207130:2421] Handshake: worker# [1:7486791481209369560:2421] 2025-03-28T09:34:44.693926Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791502684207130:2421] Create read session: session# [1:7486791502684207131:2293] 2025-03-28T09:34:44.693969Z node 1 :REPLICATION_SERVICE DEBUG: [Worker][1:7486791481209369560:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2025-03-28T09:34:44.693981Z node 1 :REPLICATION_SERVICE INFO: [Worker][1:7486791481209369560:2421] Handshake with reader: sender# [1:7486791502684207130:2421] 2025-03-28T09:34:44.694005Z node 1 :REPLICATION_SERVICE DEBUG: [RemoteTopicReader][/Root/topic][0][1:7486791502684207130:2421] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:34:45.373421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:45.373524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:45.373573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:45.373608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:45.373650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:45.373676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:45.373729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:45.373801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:45.374088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:45.456668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:34:45.456738Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:45.463612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:45.464189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:45.464371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:45.481080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:45.481265Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:45.481929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.482119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:45.489038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.492059Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:45.492140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.492420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:45.492467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:45.492502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:45.493029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.500902Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:34:45.646650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:45.646925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.647108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:45.647342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:45.647391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.649407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.649518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:45.649690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.649752Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:45.649798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:45.649825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:45.651463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.651514Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:45.651543Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:45.653018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.653053Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.653088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:45.653154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.656657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:45.658363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:45.658508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:45.659474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.659614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:45.659670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:45.659964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:45.660012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:45.660150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:45.660209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:45.661998Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:45.662047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:45.662199Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.662247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:34:45.662317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.662352Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:45.662480Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:45.662532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.662581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:45.662608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.662637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:45.662673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.662750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:45.662780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:45.662831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:45.662863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:45.662889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:45.665250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... t reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:34:45.742537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 125, subscribers: 0 2025-03-28T09:34:45.744533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-03-28T09:34:45.745725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 2025-03-28T09:34:45.745842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 125 TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 TestModificationResult got TxId: 127, wait until txId: 127 2025-03-28T09:34:45.746338Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:45.746540Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 224us result status StatusSuccess 2025-03-28T09:34:45.746852Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:45.747342Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:45.747476Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 105us result status StatusSuccess 2025-03-28T09:34:45.747734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2025-03-28T09:34:45.748008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2025-03-28T09:34:45.748058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2025-03-28T09:34:45.748130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2025-03-28T09:34:45.748147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2025-03-28T09:34:45.748197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2025-03-28T09:34:45.748212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2025-03-28T09:34:45.748663Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2025-03-28T09:34:45.748765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2025-03-28T09:34:45.748798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:342:2333] 2025-03-28T09:34:45.749062Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-03-28T09:34:45.749124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-03-28T09:34:45.749161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:342:2333] 2025-03-28T09:34:45.749312Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-03-28T09:34:45.749358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-03-28T09:34:45.749389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:342:2333] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-03-28T09:34:45.749902Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:45.750054Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 171us result status StatusSuccess 2025-03-28T09:34:45.750360Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2025-03-28T09:34:45.754435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:45.754707Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-03-28T09:34:45.754782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2025-03-28T09:34:45.754896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-28T09:34:45.756962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 125, at schemeshard: 72057594046678944 2025-03-28T09:34:45.757092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalAsync [GOOD] >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalUnique [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:34:44.540339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:44.540506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:44.540549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:44.540588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:44.541248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:44.541286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:44.541364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:44.541466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:44.542588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:44.612354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:34:44.612457Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:44.621570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:44.621848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:44.621969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:44.636066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:44.636245Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:44.638795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:44.639944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:44.651321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:44.660598Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:44.660708Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:44.661175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:44.661244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:44.661312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:44.662172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.670662Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:34:44.828307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:44.828601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.828795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:44.829051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:44.829104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.831434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:44.831572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:44.831763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.831851Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:44.831895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:44.831931Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:44.833789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.833844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:44.833883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:44.835634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.835694Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.835742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:44.835818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.840091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:44.842083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:44.842280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:44.843348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:44.843493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:44.843572Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:44.843925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:44.843990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:44.844162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:44.844242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:44.846224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:44.846275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:44.846463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:44.846524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:34:44.846604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:44.846647Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:44.846773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:44.846829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.846872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:44.846903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.846956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:44.846995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:44.847031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:44.847065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:44.847148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:44.847188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:44.847238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:44.849942Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... TOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000003 2025-03-28T09:34:45.767002Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.767153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 8589936750 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:45.767230Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TCreateExternalTable TPropose, operationId: 101:0 HandleReply TEvOperationPlan: step# 5000003 2025-03-28T09:34:45.767389Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:34:45.767602Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:45.767690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:34:45.767760Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:34:45.768824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:34:45.769205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:34:45.770869Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:45.770921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:45.771083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:34:45.771217Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:34:45.771305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:34:45.771434Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.771483Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:34:45.771534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-28T09:34:45.771574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 3 2025-03-28T09:34:45.771608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [2:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:34:45.771909Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.771968Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:34:45.772109Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:34:45.772161Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:34:45.772218Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:34:45.772261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:34:45.772311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:34:45.772367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:34:45.772416Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:34:45.772462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:34:45.772547Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:34:45.772603Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:34:45.772655Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2025-03-28T09:34:45.772722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:34:45.772769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:34:45.772798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-28T09:34:45.774589Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:34:45.774738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:34:45.774788Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:34:45.774841Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:34:45.774953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:34:45.776069Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:34:45.776164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:34:45.776203Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:34:45.776285Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:34:45.776333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:34:45.777294Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:34:45.777390Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:34:45.777428Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:34:45.777464Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:34:45.777500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:34:45.777606Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:34:45.779338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:34:45.781354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:34:45.781496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:34:45.781742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:34:45.781811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:34:45.782361Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:34:45.782497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:34:45.782555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:333:2324] TestWaitNotification: OK eventTxId 101 2025-03-28T09:34:45.783222Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:45.783612Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 373us result status StatusSuccess 2025-03-28T09:34:45.784075Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.6%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] |94.6%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:41.549398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:41.549519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.549569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:41.549611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:41.550996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:41.551047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:41.551140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.551227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:41.554371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:41.646651Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:41.646726Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:41.665341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:41.665630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:41.665758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:41.671562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:41.671766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:41.672847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.673867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.680884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690332Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.691302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:41.691385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.691443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:41.691525Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.697948Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:41.837498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:41.837701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.837897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:41.838115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:41.838181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:41.840487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840558Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:41.840611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:41.840663Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:41.842270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.842319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:41.842356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:41.843966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.844021Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.844061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.844109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.854293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:41.856150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:41.856317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:41.857376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.857484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:41.857540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.857820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:41.857890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.858063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:41.858151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.859930Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.859977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.860124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.860162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:41.860555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.860615Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:41.860710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.860739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.860773Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.860802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.860840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:41.860880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.860913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:41.860941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:41.860997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:41.861032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:41.861060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:41.862883Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.862992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.863036Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, coun ... 0s, InflightLimit# 10 2025-03-28T09:34:45.918957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:45.919009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:45.919046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:45.919069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:45.919124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:45.919187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:45.919488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:45.934395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:45.935854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:45.936016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:45.936124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:45.936155Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:45.936470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:45.937124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Paths, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:34:45.937199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SomeTable, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:34:45.937265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.937334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.937727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.937874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:34:45.937946Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:34:45.938183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:34:45.938327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.938409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.938443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:34:45.938551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.938726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.939010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:34:45.939321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.939435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.939848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.939939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.940115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.940206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.940288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.940456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.940543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.940686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.940904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.941074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.941137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.941178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.948145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:45.948221Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.949010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:45.949067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:45.949108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:45.950532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:757:2711] sender: [1:811:2058] recipient: [1:15:2062] 2025-03-28T09:34:45.996240Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:45.996514Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 297us result status StatusSuccess 2025-03-28T09:34:45.996932Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82488 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:45.999212Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:45.999398Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 208us result status StatusSuccess 2025-03-28T09:34:45.999793Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SomeTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExternalTableTest::SchemeErrors [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:34:45.087940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:45.088000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:45.088024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:45.088047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:45.088083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:45.088102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:45.088141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:45.088199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:45.088408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:45.162101Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:34:45.162167Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:45.167751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:45.168079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:45.168321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:45.181857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:45.182023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:45.182583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.182796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:45.188390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.190488Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:45.190549Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.190851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:45.190899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:45.190949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:45.191485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.197014Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:34:45.314640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:45.314947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.315138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:45.315352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:45.315418Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.317499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.317625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:45.317827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.317885Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:45.317937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:45.317969Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:45.319619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.319688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:45.319726Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:45.321311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.321349Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.321384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:45.321447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.330034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:45.332150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:45.332314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:45.333274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.333414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:45.333471Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:45.333723Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:45.333784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:45.333944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:45.334023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:45.336018Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:45.336066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:45.336232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.336288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:34:45.336370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.336417Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:45.336501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:45.336533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.336566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:45.336598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.336639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:45.336680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.336710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:45.336740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:45.336798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:45.336830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:45.336873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:45.339421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2025-03-28T09:34:46.042843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:34:46.042863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-28T09:34:46.043929Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:34:46.044004Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:34:46.044058Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:34:46.044096Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:34:46.044133Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:34:46.045179Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:34:46.045259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:34:46.045289Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:34:46.045315Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:34:46.045342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:34:46.046095Z node 2 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:34:46.046169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:34:46.046198Z node 2 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:34:46.046222Z node 2 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:34:46.046245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:34:46.046304Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:34:46.047701Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:34:46.048824Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:34:46.049679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:34:46.049866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:34:46.049908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:34:46.050209Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:34:46.050274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:34:46.050307Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:337:2328] TestWaitNotification: OK eventTxId 102 2025-03-28T09:34:46.050726Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:46.050871Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 187us result status StatusSuccess 2025-03-28T09:34:46.051147Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-03-28T09:34:46.053882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:46.054153Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2025-03-28T09:34:46.054241Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2025-03-28T09:34:46.054356Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2025-03-28T09:34:46.056381Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 3 PathCreateTxId: 102, at schemeshard: 72057594046678944 2025-03-28T09:34:46.056526Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 3], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:34:46.056787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:34:46.056821Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:34:46.057210Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:34:46.057308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:34:46.057344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:345:2336] TestWaitNotification: OK eventTxId 103 2025-03-28T09:34:46.057850Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:46.057997Z node 2 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 172us result status StatusSuccess 2025-03-28T09:34:46.058228Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:34:45.775535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:45.775628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:45.775668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:45.775705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:45.775749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:45.775826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:45.775905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:45.776012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:45.776342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:45.844090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:34:45.844149Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:45.849456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:45.849739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:45.849871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:45.862199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:45.862386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:45.863073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.863299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:45.868861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.871141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:45.871220Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.871554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:45.871611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:45.871658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:45.872327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.878407Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:34:45.982827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:45.983086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.983285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:45.983528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:45.983586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.985762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.985879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:45.986054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.986128Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:45.986168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:45.986206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:45.988045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.988107Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:45.988148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:45.989815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.989870Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.989916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:45.989989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.993342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:45.994756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:45.994883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:45.995584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.995705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:45.995750Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:45.995956Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:45.995993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:45.996103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:45.996171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:45.997681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:45.997717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:45.997843Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:45.997891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:34:45.997950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:45.997991Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:45.998071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:45.998097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.998125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:45.998150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.998177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:45.998212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:45.998243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:45.998283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:45.998327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:45.998352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:45.998374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:46.000370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... ode 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:46.037588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:34:46.037642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:34:46.037671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:34:46.037962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.038004Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:34:46.038077Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:34:46.038101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:34:46.038132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:34:46.038174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:34:46.038202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:34:46.038232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:34:46.038257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:34:46.038284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:34:46.038328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:34:46.038365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:34:46.038393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:34:46.038418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 2 2025-03-28T09:34:46.038935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:34:46.039030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:34:46.039060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:34:46.039109Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:34:46.039158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:46.039740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:34:46.039791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:34:46.039809Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:34:46.039845Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:34:46.039883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:34:46.039933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:34:46.041985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:34:46.042808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:34:46.042993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:34:46.043022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:34:46.043376Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:34:46.043460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:34:46.043488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:306:2297] TestWaitNotification: OK eventTxId 101 2025-03-28T09:34:46.043851Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:46.044025Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 177us result status StatusSuccess 2025-03-28T09:34:46.044266Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 2 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2025-03-28T09:34:46.046636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:46.046951Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2025-03-28T09:34:46.047013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-03-28T09:34:46.047064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2025-03-28T09:34:46.048733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:46.048863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:34:46.049074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:34:46.049114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:34:46.049483Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:34:46.049552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:34:46.049591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:314:2305] TestWaitNotification: OK eventTxId 102 2025-03-28T09:34:46.049909Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:46.050079Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 170us result status StatusPathDoesNotExist 2025-03-28T09:34:46.050203Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TExternalTableTest::ParallelCreateExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2025-03-28T09:34:35.782421Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791465389633585:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.782469Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:35.829456Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791466270685929:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.830343Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:36.002958Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001559/r3tmp/tmpupvvuy/pdisk_1.dat 2025-03-28T09:34:36.006505Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.261327Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:36.268655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.268757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.271133Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.271191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.281646Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.299728Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:36.300873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19218, node 1 2025-03-28T09:34:36.519285Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001559/r3tmp/yandexqnJnSx.tmp 2025-03-28T09:34:36.519309Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001559/r3tmp/yandexqnJnSx.tmp 2025-03-28T09:34:36.519460Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001559/r3tmp/yandexqnJnSx.tmp 2025-03-28T09:34:36.519578Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:36.711101Z INFO: TTestServer started on Port 1699 GrpcPort 19218 TClient is connected to server localhost:1699 PQClient connected to localhost:19218 === TenantModeEnabled() = 1 === Init PQ - start server on port 19218 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:37.089809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:34:37.090068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.090268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:34:37.090537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:34:37.090578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.094709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.094995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:34:37.095200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.095284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:34:37.095301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:34:37.095314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-03-28T09:34:37.097402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.097477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:34:37.097499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:34:37.099481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.099521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.099539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.099558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.104143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:37.106269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:34:37.106429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 waiting... 2025-03-28T09:34:37.109423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154477155, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.109595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154477155 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:34:37.109630Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.109909Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:34:37.109982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.110171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:34:37.110270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:34:37.111539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.111562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: false 2025-03-28T09:34:37.111581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.112188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:34:37.112207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:34:37.112342Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:34:37.112359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791469684601541:2397], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:34:37.112391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.112411Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:34:37.112492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.112513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.112553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.113594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.113625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:34:37.113642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.113655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:34:37.113666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-28T09:34:37.113700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 7205759404 ... 4 ready parts: 2/2 2025-03-28T09:34:44.534207Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2025-03-28T09:34:44.534216Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:0 2025-03-28T09:34:44.534252Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 3 2025-03-28T09:34:44.534262Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:1 2025-03-28T09:34:44.534266Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715664:1 2025-03-28T09:34:44.534329Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 5 2025-03-28T09:34:44.534342Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715664, publications: 2, subscribers: 1 2025-03-28T09:34:44.534351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715664, [OwnerId: 72057594046644480, LocalPathId: 11], 5 2025-03-28T09:34:44.534359Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715664, [OwnerId: 72057594046644480, LocalPathId: 12], 2 2025-03-28T09:34:44.534791Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-03-28T09:34:44.534880Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 11 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-03-28T09:34:44.534903Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715664 2025-03-28T09:34:44.534918Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 11], version: 5 2025-03-28T09:34:44.534932Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 11] was 2 2025-03-28T09:34:44.535120Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-03-28T09:34:44.535166Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 12 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976715664 2025-03-28T09:34:44.535182Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715664 2025-03-28T09:34:44.535193Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715664, pathId: [OwnerId: 72057594046644480, LocalPathId: 12], version: 2 2025-03-28T09:34:44.535205Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 12] was 4 2025-03-28T09:34:44.535240Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715664, subscribers: 1 2025-03-28T09:34:44.535253Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7486791502551080097:2370] 2025-03-28T09:34:44.536361Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 2025-03-28T09:34:44.536408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715664 Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2025-03-28T09:34:44.641528Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-28T09:34:44.641564Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 1 2025-03-28T09:34:44.641939Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2025-03-28T09:34:44.642045Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:51180 2025-03-28T09:34:44.642066Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:51180 proto=v1 topic=Root/acc/topic1 durationSec=0 2025-03-28T09:34:44.642075Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:34:44.644420Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: describe result for acl check 2025-03-28T09:34:44.644547Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-28T09:34:44.644563Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:34:44.644571Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:34:44.644599Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486791502551080340:2381] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-28T09:34:44.644627Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:34:44.645088Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-28T09:34:44.645153Z node 3 :PERSQUEUE INFO: new Cookie 12345678|a22a088d-631a2472-123bbcda-4df1b2fe_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-03-28T09:34:44.645545Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|a22a088d-631a2472-123bbcda-4df1b2fe_0 2025-03-28T09:34:44.646314Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|a22a088d-631a2472-123bbcda-4df1b2fe_0 grpc read done: success: 0 data: 2025-03-28T09:34:44.646333Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|a22a088d-631a2472-123bbcda-4df1b2fe_0 grpc read failed 2025-03-28T09:34:44.646472Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|a22a088d-631a2472-123bbcda-4df1b2fe_0 2025-03-28T09:34:44.646492Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|a22a088d-631a2472-123bbcda-4df1b2fe_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2025-03-28T09:34:44.646784Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed 2025-03-28T09:34:44.655318Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-28T09:34:44.655341Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-03-28T09:34:44.655618Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2025-03-28T09:34:44.655706Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:51180 2025-03-28T09:34:44.655729Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:51180 proto=v1 topic=topic1 durationSec=0 2025-03-28T09:34:44.655750Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:34:44.657303Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-03-28T09:34:44.657395Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-28T09:34:44.657408Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:34:44.657415Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:34:44.657442Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486791502551080360:2390] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-28T09:34:44.657458Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:34:44.657769Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-28T09:34:44.657869Z node 3 :PERSQUEUE INFO: new Cookie 12345678|5cac0d2-b61471bf-546b2724-33bb1ce3_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-03-28T09:34:44.658159Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|5cac0d2-b61471bf-546b2724-33bb1ce3_0 2025-03-28T09:34:44.660002Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: 12345678|5cac0d2-b61471bf-546b2724-33bb1ce3_0 grpc read done: success: 0 data: 2025-03-28T09:34:44.660021Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|5cac0d2-b61471bf-546b2724-33bb1ce3_0 grpc read failed 2025-03-28T09:34:44.660049Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|5cac0d2-b61471bf-546b2724-33bb1ce3_0 grpc closed 2025-03-28T09:34:44.660071Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: 12345678|5cac0d2-b61471bf-546b2724-33bb1ce3_0 is DEAD 2025-03-28T09:34:44.660768Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2025-03-28T09:34:35.812475Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791462158618929:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.812524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:35.888348Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791464873648761:2086];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.888474Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00154c/r3tmp/tmpiJUAzJ/pdisk_1.dat 2025-03-28T09:34:36.068202Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.071614Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.393493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.393629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.399478Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.399555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.406314Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:36.406442Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.407542Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.436852Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23595, node 1 2025-03-28T09:34:36.454119Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:34:36.455437Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:34:36.518239Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00154c/r3tmp/yandex3VrDSu.tmp 2025-03-28T09:34:36.518268Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00154c/r3tmp/yandex3VrDSu.tmp 2025-03-28T09:34:36.519178Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00154c/r3tmp/yandex3VrDSu.tmp 2025-03-28T09:34:36.519352Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:36.711099Z INFO: TTestServer started on Port 24325 GrpcPort 23595 TClient is connected to server localhost:24325 PQClient connected to localhost:23595 === TenantModeEnabled() = 1 === Init PQ - start server on port 23595 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:37.080238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:34:37.080453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.080626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:34:37.080830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:34:37.080864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.093428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.093715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:34:37.094136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.094281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:34:37.094326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:34:37.094364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-28T09:34:37.099116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.099185Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:34:37.099214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:34:37.101120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.101189Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.101213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.101237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.106248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:37.107255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.107281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:34:37.107302Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.111526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:34:37.111724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:34:37.116034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154477162, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.116245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154477162 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:34:37.116292Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.116541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:34:37.116578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.116803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:34:37.116881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:34:37.120260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:34:37.120287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:34:37.120531Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:34:37.120544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791466453586968:2438], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:34:37.120598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.120632Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:34:37.120763Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.120783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.120802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.120832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.120848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:34:37.120891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.120919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:34:37.120929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for ... UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:34:44.632177Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486791501947102260:2381] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-28T09:34:44.632200Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:34:44.632808Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-28T09:34:44.632860Z node 3 :PERSQUEUE INFO: new Cookie 12345678|f1c60694-7a0e6bad-43a0da7c-c8744307_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-03-28T09:34:44.633351Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|f1c60694-7a0e6bad-43a0da7c-c8744307_0 2025-03-28T09:34:44.634273Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|f1c60694-7a0e6bad-43a0da7c-c8744307_0 grpc read done: success: 0 data: 2025-03-28T09:34:44.634296Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|f1c60694-7a0e6bad-43a0da7c-c8744307_0 grpc read failed 2025-03-28T09:34:44.634459Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|f1c60694-7a0e6bad-43a0da7c-c8744307_0 2025-03-28T09:34:44.634481Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|f1c60694-7a0e6bad-43a0da7c-c8744307_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2025-03-28T09:34:44.634832Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-03-28T09:34:44.649778Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976720665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:57196" , at schemeshard: 72057594046644480 2025-03-28T09:34:44.649982Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976720665:0, at schemeshard: 72057594046644480 2025-03-28T09:34:44.650172Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-03-28T09:34:44.650192Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-28T09:34:44.650365Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976720665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-03-28T09:34:44.650415Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976720665:0, at schemeshard: 72057594046644480 2025-03-28T09:34:44.650511Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720665:0 progress is 1/1 2025-03-28T09:34:44.650535Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-03-28T09:34:44.650559Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976720665:0 progress is 1/1 2025-03-28T09:34:44.650574Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-03-28T09:34:44.650617Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-03-28T09:34:44.650675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976720665, ready parts: 1/1, is published: false 2025-03-28T09:34:44.650723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-03-28T09:34:44.650737Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976720665 ready parts: 1/1 2025-03-28T09:34:44.650754Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976720665:0 2025-03-28T09:34:44.650773Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976720665, publications: 1, subscribers: 0 2025-03-28T09:34:44.650785Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976720665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-03-28T09:34:44.652535Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976720665, response: Status: StatusSuccess TxId: 281474976720665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:34:44.652832Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976720665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2025-03-28T09:34:44.652995Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:34:44.653013Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976720665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-28T09:34:44.653210Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:34:44.653234Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7486791489062199329:2387], at schemeshard: 72057594046644480, txId: 281474976720665, path id: 10 2025-03-28T09:34:44.653632Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976720665 2025-03-28T09:34:44.653712Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976720665 2025-03-28T09:34:44.653733Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720665 2025-03-28T09:34:44.653749Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-03-28T09:34:44.653771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-03-28T09:34:44.653849Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720665, subscribers: 0 2025-03-28T09:34:44.655326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720665 2025-03-28T09:34:44.655358Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-28T09:34:44.655370Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-03-28T09:34:44.655561Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-03-28T09:34:44.655629Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:57176 2025-03-28T09:34:44.655649Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:57176 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-03-28T09:34:44.655654Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:34:44.656397Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-03-28T09:34:44.656575Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-28T09:34:44.656593Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:34:44.656601Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:34:44.656631Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486791501947102290:2392] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-28T09:34:44.656649Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:34:44.656987Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-28T09:34:44.657069Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|4428cf52-96260801-8689a190-20b0627_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-03-28T09:34:44.657326Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|4428cf52-96260801-8689a190-20b0627_0 2025-03-28T09:34:44.658088Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|4428cf52-96260801-8689a190-20b0627_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-03-28T09:34:44.658219Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|4428cf52-96260801-8689a190-20b0627_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-03-28T09:34:44.658252Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|4428cf52-96260801-8689a190-20b0627_0 2025-03-28T09:34:44.658412Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|4428cf52-96260801-8689a190-20b0627_0 is DEAD 2025-03-28T09:34:44.658654Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2025-03-28T09:34:35.812379Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791464100450701:2208];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.812822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:35.832218Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791465867719913:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.832282Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001542/r3tmp/tmpRrRylF/pdisk_1.dat 2025-03-28T09:34:36.063267Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.066910Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.360923Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:36.420838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.420946Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.423571Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.423638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.425951Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:36.426073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.426462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24660, node 1 2025-03-28T09:34:36.518395Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001542/r3tmp/yandexs4tDdF.tmp 2025-03-28T09:34:36.518426Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001542/r3tmp/yandexs4tDdF.tmp 2025-03-28T09:34:36.519302Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001542/r3tmp/yandexs4tDdF.tmp 2025-03-28T09:34:36.519454Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:36.714790Z INFO: TTestServer started on Port 14850 GrpcPort 24660 TClient is connected to server localhost:14850 PQClient connected to localhost:24660 === TenantModeEnabled() = 1 === Init PQ - start server on port 24660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:37.114185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:34:37.114381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.114544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:34:37.114817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:34:37.114882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:37.116898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.117013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:34:37.117168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.117204Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:34:37.117226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:34:37.117238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-03-28T09:34:37.119542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.119586Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:34:37.119598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:34:37.121152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.121178Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.121200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.121229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.125739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:37.126008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.126023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:34:37.126043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.130056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:34:37.130240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:34:37.135112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154477176, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.135264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154477176 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:34:37.135290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.135566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:34:37.135601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.135795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:34:37.135870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:34:37.138406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:34:37.138429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:34:37.138562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:34:37.138583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791468395418514:2384], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:34:37.138623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.138652Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:34:37.138766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.138779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.138797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.138809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.138822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:34:37.138844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.138861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:34:37.138906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-28T09:34:37.138947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 720575940 ... ER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:34:44.645096Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486791502098519818:2381] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-28T09:34:44.645114Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:34:44.645579Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-28T09:34:44.645672Z node 3 :PERSQUEUE INFO: new Cookie 12345678|b177b06a-ddcdc287-92c7a4ef-ce14dcd4_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2025-03-28T09:34:44.646007Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|b177b06a-ddcdc287-92c7a4ef-ce14dcd4_0 2025-03-28T09:34:44.647077Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: 12345678|b177b06a-ddcdc287-92c7a4ef-ce14dcd4_0 grpc read done: success: 0 data: 2025-03-28T09:34:44.647113Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|b177b06a-ddcdc287-92c7a4ef-ce14dcd4_0 grpc read failed 2025-03-28T09:34:44.647320Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: 12345678|b177b06a-ddcdc287-92c7a4ef-ce14dcd4_0 2025-03-28T09:34:44.647347Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: 12345678|b177b06a-ddcdc287-92c7a4ef-ce14dcd4_0 is DEAD Finish: 0 === InitializeWritePQService done === PersQueueClient 2025-03-28T09:34:44.647612Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2025-03-28T09:34:44.661408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:41834" , at schemeshard: 72057594046644480 2025-03-28T09:34:44.661575Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:34:44.661686Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-03-28T09:34:44.661700Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-28T09:34:44.661812Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-03-28T09:34:44.661921Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:34:44.661992Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-03-28T09:34:44.662008Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-03-28T09:34:44.662027Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-03-28T09:34:44.662036Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-03-28T09:34:44.662071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-03-28T09:34:44.662117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-03-28T09:34:44.662145Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-03-28T09:34:44.662156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-03-28T09:34:44.662167Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-03-28T09:34:44.662178Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-03-28T09:34:44.662187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 10], 3 2025-03-28T09:34:44.663968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:34:44.664145Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2025-03-28T09:34:44.664278Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:34:44.664296Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-28T09:34:44.664459Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:34:44.664475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7486791489213616872:2372], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 10 2025-03-28T09:34:44.664939Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-03-28T09:34:44.665078Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2025-03-28T09:34:44.665105Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2025-03-28T09:34:44.665120Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 3 2025-03-28T09:34:44.665141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-03-28T09:34:44.665206Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2025-03-28T09:34:44.666067Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-28T09:34:44.666107Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-03-28T09:34:44.666352Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2025-03-28T09:34:44.666434Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:41814 2025-03-28T09:34:44.666467Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:41814 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-03-28T09:34:44.666481Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:34:44.666745Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2025-03-28T09:34:44.667186Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-03-28T09:34:44.667327Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-28T09:34:44.667340Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:34:44.667348Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:34:44.667376Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486791502098519848:2392] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-28T09:34:44.667391Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:34:44.667728Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-28T09:34:44.667804Z node 3 :PERSQUEUE INFO: new Cookie test-message-group|d9dccadb-a8cc0191-58515608-12f27025_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2025-03-28T09:34:44.668144Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|d9dccadb-a8cc0191-58515608-12f27025_0 2025-03-28T09:34:44.668885Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-message-group|d9dccadb-a8cc0191-58515608-12f27025_0 grpc read done: success: 1 data: update_token_request [content omitted] 2025-03-28T09:34:44.669100Z node 3 :PQ_WRITE_PROXY INFO: updating token 2025-03-28T09:34:44.669140Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:34:44.669673Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|d9dccadb-a8cc0191-58515608-12f27025_0 describe result for acl check 2025-03-28T09:34:44.669755Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|d9dccadb-a8cc0191-58515608-12f27025_0 2025-03-28T09:34:44.669946Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-message-group|d9dccadb-a8cc0191-58515608-12f27025_0 is DEAD 2025-03-28T09:34:44.670247Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:34:46.321847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:46.321941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:46.321981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:46.322016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:46.322063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:46.322092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:46.322146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:46.322243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:46.322577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:46.413890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:34:46.413991Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:46.421699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:46.422124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:46.422307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:46.438660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:46.438924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:46.439669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:46.439911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:46.447504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:46.450314Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:46.450406Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:46.450809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:46.450870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:46.450923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:46.451633Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.459329Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:34:46.611184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:46.611526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.611758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:46.612027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:46.612092Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.614853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:46.615001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:46.615219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.615317Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:46.615359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:46.615396Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:46.617806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.617873Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:46.617916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:46.620160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.620231Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.620289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:46.620359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:46.624324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:46.626703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:46.626915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:46.628059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:46.628211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:46.628290Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:46.628626Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:46.628692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:46.628860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:46.628944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:46.631289Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:46.631350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:46.631535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:46.631592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:34:46.631700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.631758Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:46.631878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:46.631931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:46.631982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:46.632017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:46.632055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:46.632101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:46.632138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:46.632176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:46.632262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:46.632304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:46.632344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:46.635364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 3 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 126 2025-03-28T09:34:46.721201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:46.721615Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "BlaBlaType" } } 2025-03-28T09:34:46.721718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2025-03-28T09:34:46.722037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2025-03-28T09:34:46.724737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:46.724932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2025-03-28T09:34:46.728111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:46.728544Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2025-03-28T09:34:46.728649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2025-03-28T09:34:46.728816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2025-03-28T09:34:46.731242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:46.731463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2025-03-28T09:34:46.734640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:46.735025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2025-03-28T09:34:46.735152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2025-03-28T09:34:46.735325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2025-03-28T09:34:46.737875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:46.738154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2025-03-28T09:34:46.741447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:46.741901Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2025-03-28T09:34:46.742007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2025-03-28T09:34:46.742172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2025-03-28T09:34:46.744960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:46.745182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2025-03-28T09:34:46.748564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:46.748965Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2025-03-28T09:34:46.749082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2025-03-28T09:34:46.749317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2025-03-28T09:34:46.752036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:46.752213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2025-03-28T09:34:46.755529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:46.755923Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2025-03-28T09:34:46.756029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2025-03-28T09:34:46.756191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2025-03-28T09:34:46.758870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:46.759049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2025-03-28T09:34:35.782345Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791464829277539:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.782384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:35.835484Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791464700109332:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.835519Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:36.020123Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.027095Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00153a/r3tmp/tmp9AywgO/pdisk_1.dat 2025-03-28T09:34:36.296071Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.296152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.296763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.296809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.304941Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:36.305058Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.305973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.306167Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1129, node 1 2025-03-28T09:34:36.520333Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00153a/r3tmp/yandex1alenx.tmp 2025-03-28T09:34:36.520360Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00153a/r3tmp/yandex1alenx.tmp 2025-03-28T09:34:36.520490Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00153a/r3tmp/yandex1alenx.tmp 2025-03-28T09:34:36.520603Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:36.711056Z INFO: TTestServer started on Port 14931 GrpcPort 1129 TClient is connected to server localhost:14931 PQClient connected to localhost:1129 === TenantModeEnabled() = 1 === Init PQ - start server on port 1129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:37.137017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:34:37.137264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.137542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:34:37.137851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:34:37.137964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.143999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.144202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:34:37.144784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.144870Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:34:37.144892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:34:37.144907Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-03-28T09:34:37.150726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:37.150866Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:34:37.150894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:34:37.155968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.155997Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.156031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.156070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.160712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:37.161034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.161062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:34:37.161079Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.163866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:34:37.164150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:34:37.167113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154477211, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.167258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154477211 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:34:37.167293Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.167742Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:34:37.167772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.168067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:34:37.168113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:34:37.177596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:34:37.177633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:34:37.177827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:34:37.177853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791469124245498:2404], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:34:37.177888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.177911Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:34:37.178045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.178066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.178090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.178105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.178121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:34:37.178171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.178192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:34:37.178207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-28T09:34:37.178256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 7205759404664 ... : 0 MessagesInflight: 0 } 2025-03-28T09:34:45.107528Z :NOTICE: [/Root] [/Root] [f8b5a4b1-446656d2-c2222000-8a99b5a4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:34:45.107780Z :INFO: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] Starting read session 2025-03-28T09:34:45.107818Z :DEBUG: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] Starting session to cluster null (localhost:61270) 2025-03-28T09:34:45.107928Z :DEBUG: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:45.107954Z :DEBUG: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:45.107984Z :DEBUG: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] [null] Reconnecting session to cluster null in 0.000000s 2025-03-28T09:34:45.110852Z :DEBUG: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] [null] Successfully connected. Initializing session 2025-03-28T09:34:45.111242Z node 3 :PQ_READ_PROXY DEBUG: new grpc connection 2025-03-28T09:34:45.111271Z node 3 :PQ_READ_PROXY DEBUG: new session created cookie 2 2025-03-28T09:34:45.111608Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-03-28T09:34:45.111768Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 read init: from# ipv6:[::1]:33432, request# { init_request { topics_read_settings { topic: "/Root/account1/write_topic" } read_only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2025-03-28T09:34:45.111842Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 auth for : consumer_aba 2025-03-28T09:34:45.112311Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 Handle describe topics response 2025-03-28T09:34:45.112402Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 auth is DEAD 2025-03-28T09:34:45.112440Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 auth ok: topics# 1, initDone# 0 2025-03-28T09:34:45.113536Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 register session: topic# /Root/account1/write_topic 2025-03-28T09:34:45.113883Z :INFO: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] [null] Server session id: consumer_aba_3_2_109532043444006642_v1 2025-03-28T09:34:45.114132Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7486791507276630346:2415] connected; active server actors: 1 2025-03-28T09:34:45.114214Z :DEBUG: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:34:45.114464Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7486791507276630346:2415] session consumer_aba_3_2_109532043444006642_v1 2025-03-28T09:34:45.114490Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 grpc read done: success# 1, data# { read { } } 2025-03-28T09:34:45.114546Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2025-03-28T09:34:45.114623Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2025-03-28T09:34:45.114677Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_109532043444006642_v1" (Sender=[3:7486791507276630343:2415], Pipe=[3:7486791507276630346:2415], Partitions=[], ActiveFamilyCount=0) 2025-03-28T09:34:45.114672Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 got read request: guid# b02a0c5d-b720cc38-92499305-a9d7fbfc 2025-03-28T09:34:45.114728Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2025-03-28T09:34:45.114807Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnradableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2025-03-28T09:34:45.114874Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_109532043444006642_v1" (Sender=[3:7486791507276630343:2415], Pipe=[3:7486791507276630346:2415], Partitions=[], ActiveFamilyCount=0) 2025-03-28T09:34:45.114956Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_109532043444006642_v1" sender [3:7486791507276630343:2415] lock partition 0 for ReadingSession "consumer_aba_3_2_109532043444006642_v1" (Sender=[3:7486791507276630343:2415], Pipe=[3:7486791507276630346:2415], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2025-03-28T09:34:45.115059Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2025-03-28T09:34:45.115095Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000260s 2025-03-28T09:34:45.118053Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_109532043444006642_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7486791507276630346 RawX2: 4503612512274799 } Path: "/Root/account1/write_topic" } 2025-03-28T09:34:45.118185Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2025-03-28T09:34:45.118434Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7486791507276630348:2418] 2025-03-28T09:34:45.118493Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: registered server session: consumer_aba_3_2_109532043444006642_v1:1 with generation 1 2025-03-28T09:34:45.142342Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1743154484995 CreateTimestampMS: 1743154484994 SizeLag: 165 WriteTimestampEstimateMS: 1743154484995 } Cookie: 18446744073709551615 } 2025-03-28T09:34:45.142399Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2025-03-28T09:34:45.142513Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2025-03-28T09:34:45.143375Z :INFO: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] Closing read session. Close timeout: 0.000000s 2025-03-28T09:34:45.143435Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2025-03-28T09:34:45.143480Z :INFO: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] Counters: { Errors: 0 CurrentSessionLifetimeMs: 35 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:34:45.143564Z :NOTICE: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:34:45.143603Z :DEBUG: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] [null] Abort session to cluster 2025-03-28T09:34:45.144349Z :NOTICE: [/Root] [/Root] [67486b26-4a2a87a6-bf3001b9-e742592] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:34:45.144389Z node 3 :PQ_READ_PROXY DEBUG: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 grpc read done: success# 0, data# { } 2025-03-28T09:34:45.144428Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 grpc read failed 2025-03-28T09:34:45.144460Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 grpc closed 2025-03-28T09:34:45.144503Z node 3 :PQ_READ_PROXY INFO: session cookie 2 consumer consumer_aba session consumer_aba_3_2_109532043444006642_v1 is DEAD 2025-03-28T09:34:45.144745Z node 3 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: consumer_aba_3_2_109532043444006642_v1 2025-03-28T09:34:45.145784Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72075186224037894][write_topic] pipe [3:7486791507276630346:2415] disconnected; active server actors: 1 2025-03-28T09:34:45.145830Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037894][write_topic] pipe [3:7486791507276630346:2415] client consumer_aba disconnected session consumer_aba_3_2_109532043444006642_v1 2025-03-28T09:34:45.650797Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486791507276630371:2425], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:34:45.650996Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZmE5NzU3ODktMzEyZWYzMzYtYTdlMjk3NTYtZjcxM2Y2NGQ=, ActorId: [3:7486791507276630364:2421], ActorState: ExecuteState, TraceId: 01jqe1n6bw9jyf93je7zbjabbg, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:34:45.651353Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2025-03-28T09:34:35.794135Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791465705165002:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.794268Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:35.856017Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791463811041365:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.856076Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:36.018313Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.024323Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00156b/r3tmp/tmpsaCVYN/pdisk_1.dat 2025-03-28T09:34:36.266632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.267182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.267937Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.267991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.300082Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:36.300503Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:36.300874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.303024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 9573, node 1 2025-03-28T09:34:36.519324Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00156b/r3tmp/yandexWfeZ2J.tmp 2025-03-28T09:34:36.519353Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00156b/r3tmp/yandexWfeZ2J.tmp 2025-03-28T09:34:36.519508Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00156b/r3tmp/yandexWfeZ2J.tmp 2025-03-28T09:34:36.519668Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:36.711274Z INFO: TTestServer started on Port 28508 GrpcPort 9573 TClient is connected to server localhost:28508 PQClient connected to localhost:9573 === TenantModeEnabled() = 1 === Init PQ - start server on port 9573 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:37.128929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:34:37.129168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.129385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:34:37.129643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:34:37.129714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.131988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.132239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:34:37.132395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.132464Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:34:37.132480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:34:37.132491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 2025-03-28T09:34:37.134801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.134855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:34:37.134873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:34:37.136474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.136499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.136530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.136549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.140816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:37.142507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 waiting... 2025-03-28T09:34:37.142864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:34:37.145517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154477190, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.145715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154477190 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:34:37.145763Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.146043Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:34:37.146085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.146306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:34:37.146389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:34:37.148303Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:34:37.148334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:34:37.148456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:34:37.148470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791470000132953:2394], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:34:37.148501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.148516Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:34:37.148589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.148601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.148638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.148650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.148665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:34:37.148679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.148697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:34:37.148727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-28T09:34:37.148780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:34:37.148791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2025-03-28T09:34:37.148801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976710657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-03-28T09:34:37.155372Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 7205759404664448 ... PublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2025-03-28T09:34:44.501421Z node 3 :PQ_WRITE_PROXY DEBUG: new grpc connection 2025-03-28T09:34:44.501450Z node 3 :PQ_WRITE_PROXY DEBUG: new session created cookie 2 2025-03-28T09:34:44.501736Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2025-03-28T09:34:44.501821Z node 3 :PQ_WRITE_PROXY INFO: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:54494 2025-03-28T09:34:44.501840Z node 3 :PQ_WRITE_PROXY INFO: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:54494 proto=v1 topic=/Root/acc/topic1 durationSec=0 2025-03-28T09:34:44.501889Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:34:44.502589Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: describe result for acl check 2025-03-28T09:34:44.502764Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-28T09:34:44.502781Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:34:44.502807Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:34:44.502846Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486791502430714111:2392] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-28T09:34:44.502866Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:34:44.503261Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2025-03-28T09:34:44.503430Z node 3 :PERSQUEUE INFO: new Cookie test-group-id|aa520d16-c1ecfe75-4da9d171-b6139da9_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2025-03-28T09:34:44.503882Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|aa520d16-c1ecfe75-4da9d171-b6139da9_0 ===Assert streaming op1 ===Assert streaming op2 2025-03-28T09:34:44.504712Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 2 sessionId: test-group-id|aa520d16-c1ecfe75-4da9d171-b6139da9_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-28T09:34:44.504968Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-28T09:34:44.505221Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:34:44.509026Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse ===ModifyAcl BEFORE MODIFY PERMISSIONS 2025-03-28T09:34:44.528393Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976715666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:54526" , at schemeshard: 72057594046644480 2025-03-28T09:34:44.528587Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:34:44.528717Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 10] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 9] 2025-03-28T09:34:44.528740Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-28T09:34:44.528880Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2025-03-28T09:34:44.528917Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:34:44.528999Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2025-03-28T09:34:44.529027Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-03-28T09:34:44.529045Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715666:0 progress is 1/1 2025-03-28T09:34:44.529055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-03-28T09:34:44.529093Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 3 2025-03-28T09:34:44.529138Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715666, ready parts: 1/1, is published: false 2025-03-28T09:34:44.529155Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 10], at schemeshard: 72057594046644480 2025-03-28T09:34:44.529166Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715666 ready parts: 1/1 2025-03-28T09:34:44.529180Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715666:0 2025-03-28T09:34:44.529191Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715666, publications: 1, subscribers: 0 2025-03-28T09:34:44.529203Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715666, [OwnerId: 72057594046644480, LocalPathId: 10], 4 2025-03-28T09:34:44.531083Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715666, response: Status: StatusSuccess TxId: 281474976715666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:34:44.531306Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2025-03-28T09:34:44.531460Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:34:44.531494Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715666, path id: [OwnerId: 72057594046644480, LocalPathId: 10] 2025-03-28T09:34:44.531742Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:34:44.531771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7486791489545811131:2371], at schemeshard: 72057594046644480, txId: 281474976715666, path id: 10 2025-03-28T09:34:44.532251Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-03-28T09:34:44.532339Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 10 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715666 2025-03-28T09:34:44.532375Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715666 2025-03-28T09:34:44.532404Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715666, pathId: [OwnerId: 72057594046644480, LocalPathId: 10], version: 4 2025-03-28T09:34:44.532431Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 10] was 4 2025-03-28T09:34:44.532537Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715666, subscribers: 0 ===Wait for session created with token with removed ACE to die2025-03-28T09:34:44.534359Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715666 2025-03-28T09:34:45.189395Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486791506725681434:2397], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:34:45.189569Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=ZDlmNDM2NTktM2ViZjI1ZjEtMTkzZDViOGQtOGIwNDQ5OGI=, ActorId: [3:7486791506725681432:2396], ActorState: ExecuteState, TraceId: 01jqe1n5xn92hsj1tg3g7et72a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:34:45.189899Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:34:45.504579Z node 3 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:34:45.505346Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|aa520d16-c1ecfe75-4da9d171-b6139da9_0 describe result for acl check 2025-03-28T09:34:45.505476Z node 3 :PQ_WRITE_PROXY INFO: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|aa520d16-c1ecfe75-4da9d171-b6139da9_0 2025-03-28T09:34:45.505826Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 2 sessionId: test-group-id|aa520d16-c1ecfe75-4da9d171-b6139da9_0 is DEAD status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } 2025-03-28T09:34:45.506177Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky >> TExternalTableTest::ParallelCreateExternalTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-03-28T09:34:35.794938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791462401009093:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.795333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:35.842177Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791463669233176:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.842849Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:35.993348Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.019888Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001533/r3tmp/tmpKuD98i/pdisk_1.dat 2025-03-28T09:34:36.284743Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:36.289030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.289129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.292521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.292578Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.298591Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.301936Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:36.303359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12449, node 1 2025-03-28T09:34:36.523362Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001533/r3tmp/yandexcFcJvg.tmp 2025-03-28T09:34:36.523389Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001533/r3tmp/yandexcFcJvg.tmp 2025-03-28T09:34:36.523571Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001533/r3tmp/yandexcFcJvg.tmp 2025-03-28T09:34:36.523697Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:36.711127Z INFO: TTestServer started on Port 9465 GrpcPort 12449 TClient is connected to server localhost:9465 PQClient connected to localhost:12449 === TenantModeEnabled() = 1 === Init PQ - start server on port 12449 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:37.112953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:34:37.113165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.113362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:34:37.113610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:34:37.113654Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.126807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.127031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:34:37.127294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.127435Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:34:37.127467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:34:37.127485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-28T09:34:37.135713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.135778Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:34:37.135797Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:34:37.136783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.136800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:34:37.136821Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.143613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.143652Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.143675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.143732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.161237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:37.173063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:34:37.173221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:34:37.176513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154477218, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.176672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154477218 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:34:37.176713Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.176997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:34:37.177044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.177203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:34:37.177275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:34:37.187396Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:34:37.187441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:34:37.190942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:34:37.190977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791466695976903:2397], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:34:37.191102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.191137Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:34:37.191304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.191335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.191354Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.191363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.191379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:34:37.191395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.191409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:34:37.191417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710657:0 2025-03-28T09:34:37.191469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046 ... ionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:34:45.760737Z node 3 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [3:7486791505289361318:2428] (SourceId=123, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2025-03-28T09:34:45.760759Z node 3 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 4 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:34:45.761349Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037893, NodeId 3, Generation: 1 2025-03-28T09:34:45.761366Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server connected, pipe [3:7486791505289361321:2428], now have 1 active actors on pipe 2025-03-28T09:34:45.761394Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:34:45.761413Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-03-28T09:34:45.761482Z node 3 :PERSQUEUE INFO: new Cookie 123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0 generated for partition 0 topic 'PQ/account/topic' owner 123 2025-03-28T09:34:45.761573Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:34:45.761640Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:34:45.761744Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:34:45.761769Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-03-28T09:34:45.761833Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:34:45.761916Z node 3 :PQ_WRITE_PROXY INFO: session inited cookie: 4 partition: 0 MaxSeqNo: 2 sessionId: 123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0 2025-03-28T09:34:45.762545Z :INFO: [] MessageGroupId [123] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743154485762 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:34:45.762634Z :INFO: [] MessageGroupId [123] SessionId [] Write session established. Init response: last_sequence_number: 2 session_id: "123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0" topic: "PQ/account/topic" 2025-03-28T09:34:45.762870Z :DEBUG: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Write 1 messages with Id from 1 to 1 2025-03-28T09:34:45.763003Z :DEBUG: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Write session: try to update token 2025-03-28T09:34:45.763041Z :DEBUG: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Send 1 message(s) (0 left), first sequence number is 3 2025-03-28T09:34:45.763265Z :INFO: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Write session: close. Timeout = 10000 ms 2025-03-28T09:34:45.763532Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0 grpc read done: success: 1 data: write_request[data omitted] 2025-03-28T09:34:45.763863Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-28T09:34:45.764005Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:34:45.764051Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-03-28T09:34:45.764158Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-03-28T09:34:45.764222Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:34:45.764417Z node 3 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:34:45.764448Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message batch for topic 'PQ/account/topic' partition 0 2025-03-28T09:34:45.764492Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] got client message topic: PQ/account/topic partition: 0 SourceId: '\000123' SeqNo: 3 partNo : 0 messageNo: 1 size 372 offset: -1 2025-03-28T09:34:45.764585Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account/topic". Partition: 0. Amount: 376. Cookie: 3 2025-03-28T09:34:45.764677Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account/topic". Partition: 0: Cookie: 3 2025-03-28T09:34:45.764844Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob processing sourceId '\000123' seqNo 3 partNo 0 2025-03-28T09:34:45.766538Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Topic 'PQ/account/topic' partition 0 part blob complete sourceId '\000123' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 443 count 1 nextOffset 3 batches 1 2025-03-28T09:34:45.767119Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account/topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 431 WTime 1743154485766 2025-03-28T09:34:45.767234Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:34:45.767271Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:34:45.767290Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:34:45.767305Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:34:45.767321Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] m0000000000p123 2025-03-28T09:34:45.767335Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-03-28T09:34:45.767344Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:34:45.767360Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:34:45.767379Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:34:45.767421Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:34:45.767483Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 431 2025-03-28T09:34:45.769706Z node 3 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 431 actorID [3:7486791505289361021:2401] 2025-03-28T09:34:45.769809Z node 3 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037893' partition 0 offset 2 partno 0 count 1 parts 0 size 431 2025-03-28T09:34:45.769835Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 376 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:34:45.769886Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:34:45.769939Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] Answering for message sourceid: '\000123', Topic: 'PQ/account/topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-03-28T09:34:45.769960Z node 3 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-28T09:34:45.770045Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:34:45.770677Z :DEBUG: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Write session got write response: sequence_numbers: 3 offsets: 2 already_written: false write_statistics { persist_duration_ms: 3 queued_in_partition_duration_ms: 2 } 2025-03-28T09:34:45.770856Z :DEBUG: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Write session: acknoledged message 1 2025-03-28T09:34:45.863388Z :INFO: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Write session will now close 2025-03-28T09:34:45.863447Z :DEBUG: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Write session: aborting 2025-03-28T09:34:45.864217Z :INFO: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:34:45.864239Z :DEBUG: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Write session is aborting and will not restart 2025-03-28T09:34:45.864292Z :DEBUG: [] MessageGroupId [123] SessionId [123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0] Write session: destroy 2025-03-28T09:34:45.864522Z node 3 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 4 sessionId: 123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0 grpc read done: success: 0 data: 2025-03-28T09:34:45.864540Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0 grpc read failed 2025-03-28T09:34:45.864838Z node 3 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 4 sessionId: 123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0 2025-03-28T09:34:45.864855Z node 3 :PQ_WRITE_PROXY INFO: session v1 cookie: 4 sessionId: 123|7f6a7f44-b0ca3367-2fee0639-e31acd3b_0 is DEAD 2025-03-28T09:34:45.865109Z node 3 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037893 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:34:45.865297Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893] server disconnected, pipe [3:7486791505289361321:2428] destroyed 2025-03-28T09:34:45.865341Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037893, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:34:46.043531Z node 3 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [3:7486791509584328640:2436], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:34:46.043758Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=YzQzMmRiMDktOTcxZTY1ZmQtY2QzOWM2ZjktZjQ2YWVkYjg=, ActorId: [3:7486791509584328633:2432], ActorState: ExecuteState, TraceId: 01jqe1n6r668e648kyvq47v8m7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:34:46.044218Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:126:2058] recipient: [1:109:2141] 2025-03-28T09:34:47.414888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:47.414987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:47.415026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:47.415058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:47.415102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:47.415146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:47.415216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:47.415293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:47.415573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:47.505953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:34:47.506026Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:47.513799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:47.514160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:47.514336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:47.529621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:47.529842Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:47.530480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:47.530710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:47.537932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:47.540378Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:47.540485Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:47.540799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:47.540854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:47.540906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:47.541561Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:47.548476Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:124:2150] sender: [1:242:2058] recipient: [1:15:2062] 2025-03-28T09:34:47.667267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:47.667513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:47.667660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:47.667861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:47.667907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:47.669813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:47.669919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:47.670054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:47.670111Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:47.670139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:47.670164Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:47.671736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:47.671781Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:47.671807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:47.673053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:47.673083Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:47.673122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:47.673165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:47.680461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:47.682352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:47.682479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:47.683314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:47.683411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 134 RawX2: 4294969453 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:47.683452Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:47.683639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:47.683673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:47.683772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:47.683849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:47.685271Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:47.685302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:47.685415Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:47.685448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:208:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:34:47.685497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:47.685522Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:47.685591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:47.685618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:47.685644Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:47.685668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:47.685722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:47.685780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:47.685828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:47.685858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:47.685914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:47.685946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:47.685975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:47.688291Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 720575940466789 ... satisfy waiter [1:378:2369] 2025-03-28T09:34:47.772633Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2025-03-28T09:34:47.772686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2025-03-28T09:34:47.772699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:378:2369] 2025-03-28T09:34:47.772746Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2025-03-28T09:34:47.772821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2025-03-28T09:34:47.772834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:378:2369] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2025-03-28T09:34:47.773164Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:47.773336Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 156us result status StatusSuccess 2025-03-28T09:34:47.773570Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:47.774083Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:47.774219Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 132us result status StatusSuccess 2025-03-28T09:34:47.774444Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:47.774971Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:47.775075Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 114us result status StatusSuccess 2025-03-28T09:34:47.775316Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:47.775644Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:47.775754Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 114us result status StatusSuccess 2025-03-28T09:34:47.775934Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000005 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 4 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:47.776341Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:47.776471Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 114us result status StatusSuccess 2025-03-28T09:34:47.776652Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 5 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 |94.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> TSchemeShardAuditSettings::CreateExtSubdomain >> TSchemeShardAuditSettings::CreateSubdomain >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices >> TExecutorDb::FullScan [GOOD] >> TExecutorDb::CoordinatorSimulation >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:41.549054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:41.549184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.549238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:41.549276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:41.550537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:41.550592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:41.550728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.550815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:41.554024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:41.644117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:41.644180Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:41.660374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:41.660727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:41.660879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:41.669446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:41.669668Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:41.672794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.673861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.680959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690327Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.691317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:41.691375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.691442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:41.691529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.697753Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:41.814798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:41.817131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.819193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:41.820327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:41.820410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.827973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.828126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:41.828293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.828365Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:41.828417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:41.828455Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:41.831580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.831650Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:41.831687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:41.833591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.833649Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.833704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.833747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.837176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:41.839137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:41.839301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:41.840384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840568Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.840827Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:41.840891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.841057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:41.841141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.842714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.842760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.842907Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.842940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:41.843280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.843351Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:41.843429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.843459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.843490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.843518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.843553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:41.843589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.843618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:41.843642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:41.843691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:41.843721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:41.843766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:41.845385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.845473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.845504Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:34:49.598353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.598410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.598815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.598917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:34:49.598991Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:34:49.599195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:34:49.599328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.599420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.599462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-03-28T09:34:49.599501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:34:49.599639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.599762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.599983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:34:49.600286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.600385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.600724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.600793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.600991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.601076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.601151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.601384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.601486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.601623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.601818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.601971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.602029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.602115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.602328Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:34:49.608434Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:34:49.608578Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T09:34:49.609799Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [1:1015:2959], Recipient [1:1015:2959]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:34:49.609846Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:34:49.611686Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:49.611760Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.612247Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1015:2959], Recipient [1:1015:2959]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:34:49.612308Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:34:49.613117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:49.613174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:49.613219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:49.613249Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:34:49.613682Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1051:2959], Recipient [1:1015:2959]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:34:49.613717Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:34:49.613750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1015:2959] sender: [1:1071:2058] recipient: [1:15:2062] 2025-03-28T09:34:49.654218Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1070:3003], Recipient [1:1015:2959]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-03-28T09:34:49.654283Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:34:49.654395Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:34:49.654732Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 246us result status StatusSuccess 2025-03-28T09:34:49.655499Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82488 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:45.875562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:45.875747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:45.875803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:45.875855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:45.876610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:45.876646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:45.876706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:45.876779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:45.877922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:45.960561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:45.960622Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:45.974904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:45.975095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:45.975233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:45.982216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:45.982413Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:45.986242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:45.987556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:45.993912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:46.001053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:46.001191Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:46.001942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:46.001986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:46.002048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:46.002135Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.007675Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:46.120237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:46.122182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.124158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:46.125234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:46.125288Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.127907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:46.128045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:46.128212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.128299Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:46.128325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:46.128357Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:46.130077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.130141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:46.130172Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:46.131798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.131855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.131902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:46.131944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:46.136208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:46.138123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:46.139043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:46.139926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:46.140019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:46.140057Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:46.141349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:46.141399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:46.141543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:46.141602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:46.143970Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:46.144042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:46.144224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:46.144292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:46.144740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:46.144796Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:46.144902Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:46.144934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:46.144968Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:46.144996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:46.145028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:46.145065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:46.145097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:46.145127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:46.145206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:46.145252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:46.145285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:46.152260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:46.152416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:46.152468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 7594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:49.637656Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:49.637841Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:49.638535Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.638635Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 135 RawX2: 25769805933 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:49.638676Z node 6 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.638914Z node 6 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:49.638958Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.639109Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:49.639167Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:49.640580Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:49.640628Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:49.640776Z node 6 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.640814Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [6:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:34:49.641028Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.641072Z node 6 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:49.641164Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:49.641201Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.641234Z node 6 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:49.641266Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.641297Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:49.641340Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.641381Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:49.641410Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:49.641457Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:49.641493Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:49.641525Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:49.641990Z node 6 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:49.642082Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:49.642119Z node 6 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:34:49.642151Z node 6 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:34:49.642183Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:49.642278Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:34:49.644497Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:34:49.644856Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:34:49.645501Z node 6 :TX_PROXY DEBUG: actor# [6:270:2261] Bootstrap 2025-03-28T09:34:49.662407Z node 6 :TX_PROXY DEBUG: actor# [6:270:2261] Become StateWork (SchemeCache [6:275:2266]) 2025-03-28T09:34:49.662799Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [6:274:2265], Recipient [6:123:2149]: {TEvModifySchemeTransaction txid# 101 TabletId# 72057594046678944} 2025-03-28T09:34:49.662859Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T09:34:49.664879Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot2/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:49.665084Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: [72057594046678944] TCreateReplication Propose: opId# 101:0, path# /MyRoot/Transfer 2025-03-28T09:34:49.665156Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2025-03-28T09:34:49.665353Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:34:49.666053Z node 6 :TX_PROXY DEBUG: actor# [6:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:34:49.668512Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:49.668643Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2025-03-28T09:34:49.668683Z node 6 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:34:49.668973Z node 6 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:34:49.669134Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:34:49.669170Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:34:49.669461Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [6:287:2278], Recipient [6:123:2149]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:34:49.669501Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:34:49.669534Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T09:34:49.669600Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [6:284:2275], Recipient [6:123:2149]: NKikimrScheme.TEvNotifyTxCompletion TxId: 101 2025-03-28T09:34:49.669622Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T09:34:49.669676Z node 6 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:34:49.669750Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:34:49.669779Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:285:2276] 2025-03-28T09:34:49.669898Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877764, Sender [6:287:2278], Recipient [6:123:2149]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:34:49.669921Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:34:49.669954Z node 6 :FLAT_TX_SCHEMESHARD TRACE: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2025-03-28T09:34:49.670210Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [6:288:2279], Recipient [6:123:2149]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2025-03-28T09:34:49.670247Z node 6 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:34:49.670349Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:34:49.670499Z node 6 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 164us result status StatusPathDoesNotExist 2025-03-28T09:34:49.670627Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice |94.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:49.646839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:49.647047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:49.647101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:49.647148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:49.647617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:49.647661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:49.647732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:49.647821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:49.648826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:49.741064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:49.741118Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:49.757263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:49.757521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:49.757650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:49.763161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:49.763329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:49.763959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.764140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:49.767413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773165Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:49.773845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:49.773910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:49.773985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.780312Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:49.912474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:49.912711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.912952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:49.913182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:49.913256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.915510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.915648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:49.915803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.915894Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:49.915948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:49.915987Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:49.917759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.917809Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:49.917845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:49.919459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.919503Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.919556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.919600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.923474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:49.925213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:49.925383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:49.926350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.926485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:49.926531Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.926829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:49.926887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.927047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:49.927111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:49.928942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:49.928993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:49.929158Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.929237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:49.929603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.929651Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:49.929744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:49.929775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.929812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:49.929841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.929880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:49.929915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.929947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:49.929982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:49.930049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:49.930084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:49.930138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:49.931988Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:49.932089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:49.932132Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.207941Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:34:50.207971Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-03-28T09:34:50.207996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:50.208800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.208846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.208866Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:34:50.208885Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-03-28T09:34:50.208926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-28T09:34:50.208972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-03-28T09:34:50.209580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-03-28T09:34:50.209675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-03-28T09:34:50.210289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:50.210352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:50.210383Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-03-28T09:34:50.210439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:50.210471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-03-28T09:34:50.210496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 134 2025-03-28T09:34:50.211265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:34:50.212299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2025-03-28T09:34:50.213048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-03-28T09:34:50.213078Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:50.213151Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 134 -> 135 2025-03-28T09:34:50.213263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:50.213309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-28T09:34:50.214425Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:50.214453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:50.214537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-03-28T09:34:50.214601Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:50.214622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-03-28T09:34:50.214664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-03-28T09:34:50.214823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-03-28T09:34:50.214849Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-03-28T09:34:50.214868Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 135 -> 240 2025-03-28T09:34:50.215506Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.215569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.215586Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:34:50.215605Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-03-28T09:34:50.215634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:50.216138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.216212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.216238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:34:50.216268Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-03-28T09:34:50.216294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-28T09:34:50.216335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-03-28T09:34:50.218247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-03-28T09:34:50.218304Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 112:0 ProgressState 2025-03-28T09:34:50.218382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-03-28T09:34:50.218410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-28T09:34:50.218441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-03-28T09:34:50.218484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-28T09:34:50.218519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2025-03-28T09:34:50.218566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-28T09:34:50.218596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-03-28T09:34:50.218622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-03-28T09:34:50.218701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-28T09:34:50.218988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:34:50.219024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-28T09:34:50.219073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-03-28T09:34:50.219419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:34:50.219457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-28T09:34:50.219504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:50.220292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:34:50.221442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:34:50.223678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:34:50.223780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-03-28T09:34:50.224107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-03-28T09:34:50.224141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-03-28T09:34:50.224627Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-03-28T09:34:50.224699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-03-28T09:34:50.224751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:659:2650] TestWaitNotification: OK eventTxId 112 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:49.646838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:49.647006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:49.647052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:49.647095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:49.647599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:49.647646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:49.647727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:49.647812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:49.648817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:49.741103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:49.741152Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:49.754862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:49.755110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:49.755265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:49.760677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:49.760847Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:49.761419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.762364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:49.767454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773295Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:49.773843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:49.773892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:49.773955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.779379Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:49.908212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:49.908439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.908612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:49.908793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:49.908846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.910867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.910974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:49.911113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.911193Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:49.911222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:49.911250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:49.912951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.913000Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:49.913030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:49.914486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.914528Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.914579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.914615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.917253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:49.918616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:49.918773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:49.919612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.919713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:49.919755Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.919964Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:49.920004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.920142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:49.920234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:49.921766Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:49.921804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:49.921942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.921993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:49.922277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.922311Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:49.922399Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:49.922422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.922450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:49.922484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.922512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:49.922541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.922574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:49.922611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:49.922675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:49.922738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:49.922777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:49.924165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:49.924246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:49.924284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.219653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 26 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.219683Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:34:50.219711Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 26 2025-03-28T09:34:50.219741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:50.220520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.220586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 3 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.220614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:34:50.220658Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 3 2025-03-28T09:34:50.220692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-28T09:34:50.220755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2025-03-28T09:34:50.222975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2025-03-28T09:34:50.223106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000013 2025-03-28T09:34:50.223455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000013, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:50.223557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000013 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:50.223599Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000013, at schemeshard: 72057594046678944 2025-03-28T09:34:50.223634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 7] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:50.223661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-03-28T09:34:50.223769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 112:0 128 -> 130 2025-03-28T09:34:50.223940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:50.224002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-28T09:34:50.224233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:34:50.225266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2025-03-28T09:34:50.226539Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:50.226572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:50.226673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 7] 2025-03-28T09:34:50.226798Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:50.226833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-03-28T09:34:50.226867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 7 2025-03-28T09:34:50.226918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2025-03-28T09:34:50.226957Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2025-03-28T09:34:50.227051Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-03-28T09:34:50.227086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-28T09:34:50.227117Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#112:0 progress is 1/1 2025-03-28T09:34:50.227157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-28T09:34:50.227187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-03-28T09:34:50.227215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-28T09:34:50.227241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-03-28T09:34:50.227264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-03-28T09:34:50.227312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 3 2025-03-28T09:34:50.227339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2025-03-28T09:34:50.227365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 27 2025-03-28T09:34:50.227389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 7], 18446744073709551615 2025-03-28T09:34:50.228238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.228328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 27 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.228362Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:34:50.228391Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 27 2025-03-28T09:34:50.228420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:50.229532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.229607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 7 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:34:50.229639Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:34:50.229665Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 7], version: 18446744073709551615 2025-03-28T09:34:50.229691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 2 2025-03-28T09:34:50.229761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-03-28T09:34:50.230146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:34:50.230183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-28T09:34:50.230269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 1 2025-03-28T09:34:50.230767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:34:50.230804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 7], at schemeshard: 72057594046678944 2025-03-28T09:34:50.230852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:50.232143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:34:50.233976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:34:50.234059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:34:50.234142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-03-28T09:34:50.234445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-03-28T09:34:50.234478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-03-28T09:34:50.234989Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-03-28T09:34:50.235065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-03-28T09:34:50.235094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:659:2650] TestWaitNotification: OK eventTxId 112 |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> AnalyzeColumnshard::AnalyzeMultiOperationId ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2025-03-28T09:34:35.790319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791464367357014:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.790410Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:35.877946Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791465090743861:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:36.041519Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001547/r3tmp/tmphRG4su/pdisk_1.dat 2025-03-28T09:34:36.045816Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.077288Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:36.350384Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.350512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.351543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.351605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.355352Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.364709Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:36.368300Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.395460Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8968, node 1 2025-03-28T09:34:36.430653Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:34:36.430699Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:34:36.521717Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001547/r3tmp/yandexLSqRQA.tmp 2025-03-28T09:34:36.521744Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001547/r3tmp/yandexLSqRQA.tmp 2025-03-28T09:34:36.521919Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001547/r3tmp/yandexLSqRQA.tmp 2025-03-28T09:34:36.522053Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:36.711547Z INFO: TTestServer started on Port 24358 GrpcPort 8968 TClient is connected to server localhost:24358 PQClient connected to localhost:8968 === TenantModeEnabled() = 1 === Init PQ - start server on port 8968 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:37.093004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:34:37.093372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.093663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:34:37.094013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:34:37.095159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.107399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.110760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:34:37.111151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.111229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:34:37.111247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:34:37.111261Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-28T09:34:37.115509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.115576Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:34:37.115638Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:34:37.119340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.120378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:34:37.120413Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:34:37.121275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.121338Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.121372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.121395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.126369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:37.129985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:34:37.130163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:34:37.137367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154477183, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:37.138773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154477183 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:34:37.138813Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.139210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:34:37.139251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:34:37.139412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:34:37.139477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:34:37.143571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:34:37.143592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:34:37.144042Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:34:37.144059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:7486791468662325052:2447], at schemeshard: 72057594046644480, txId: 281474976710657, path id: 1 2025-03-28T09:34:37.144114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.144136Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976710657:0 ProgressState 2025-03-28T09:34:37.144229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.144241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.144257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710657:0 progress is 1/1 2025-03-28T09:34:37.144267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.144279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 1/1, is published: false 2025-03-28T09:34:37.144292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:34:37.144305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710657:0 2025-03-28T09:34:37.144316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid ... 72075186224037899 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2025-03-28T09:34:48.696697Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:34:48.696729Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-03-28T09:34:48.696798Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 1 2025-03-28T09:34:48.696910Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:34:48.697071Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:34:48.697093Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message batch for topic 'PQ/account3/folder1/folder2/topic' partition 0 2025-03-28T09:34:48.697536Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 0 messageNo: 1 size: 511961 2025-03-28T09:34:48.698001Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 1 messageNo: 1 size: 511961 2025-03-28T09:34:48.698202Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client PART message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size: 176151 2025-03-28T09:34:48.698228Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] got client message topic: PQ/account3/folder1/folder2/topic partition: 0 SourceId: '\0001236' SeqNo: 1 partNo : 2 messageNo: 1 size 176151 offset: -1 2025-03-28T09:34:48.698506Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Send write quota request. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0. Amount: 1200088. Cookie: 7 2025-03-28T09:34:48.748602Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486791520201935349:2625], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:34:48.748833Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjNmMTBiOWQtN2UyYWFmMjAtYWMyMmZiNWUtMmMwMmQxZQ==, ActorId: [1:7486791520201935347:2624], ActorState: ExecuteState, TraceId: 01jqe1n9cs8qa9myt38xm0k1fm, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:34:48.749241Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:34:49.472595Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Got quota. Topic: "PQ/account3/folder1/folder2/topic". Partition: 0: Cookie: 7 2025-03-28T09:34:49.472779Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 0 2025-03-28T09:34:49.472824Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 1 2025-03-28T09:34:49.472844Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob processing sourceId '\0001236' seqNo 1 partNo 2 2025-03-28T09:34:49.479351Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Topic 'PQ/account3/folder1/folder2/topic' partition 0 part blob complete sourceId '\0001236' seqNo 1 partNo 2 FormedBlobsCount 0 NewHead: Offset 6 PartNo 0 PackedSize 1200285 count 1 nextOffset 7 batches 3 2025-03-28T09:34:49.480039Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Add new write blob: topic 'PQ/account3/folder1/folder2/topic' partition 0 compactOffset 6,1 HeadOffset 6 endOffset 6 curOffset 7 d0000000000_00000000000000000006_00000_0000000001_00002| size 1200275 WTime 1743154489479 2025-03-28T09:34:49.480936Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:34:49.480960Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:34:49.480972Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:34:49.480985Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:34:49.480995Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] m0000000000p1236 2025-03-28T09:34:49.481003Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] d0000000000_00000000000000000006_00000_0000000001_00002| 2025-03-28T09:34:49.481011Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:34:49.481021Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:34:49.481032Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:34:49.481109Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:34:49.481199Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 6 partNo 0 count 1 size 1200275 2025-03-28T09:34:49.486124Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 6 count 1 size 1200275 actorID [1:7486791511612000286:2556] 2025-03-28T09:34:49.486196Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037899' partition 0 offset 6 partno 0 count 1 parts 2 size 1200275 2025-03-28T09:34:49.486212Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1200088 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:34:49.486246Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:34:49.486278Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 6 is stored on disk 2025-03-28T09:34:49.486304Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:34:49.486325Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 1, Offset: 6 is stored on disk 2025-03-28T09:34:49.486362Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:34:49.486394Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] Answering for message sourceid: '\0001236', Topic: 'PQ/account3/folder1/folder2/topic', Partition: 0, SeqNo: 1, partNo: 2, Offset: 6 is stored on disk 2025-03-28T09:34:49.486396Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-28T09:34:49.486446Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:34:49.487264Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|ce177ce3-d9c78480-c43976d0-9497675c_0] Write session got write response: sequence_numbers: 1 offsets: 6 already_written: false write_statistics { persist_duration_ms: 13 queued_in_partition_duration_ms: 774 throttled_on_partition_duration_ms: 774 } 2025-03-28T09:34:49.487321Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|ce177ce3-d9c78480-c43976d0-9497675c_0] Write session: acknoledged message 1 2025-03-28T09:34:49.494112Z :INFO: [] MessageGroupId [1236] SessionId [1236|ce177ce3-d9c78480-c43976d0-9497675c_0] Write session will now close 2025-03-28T09:34:49.494162Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|ce177ce3-d9c78480-c43976d0-9497675c_0] Write session: aborting 2025-03-28T09:34:49.494575Z :INFO: [] MessageGroupId [1236] SessionId [1236|ce177ce3-d9c78480-c43976d0-9497675c_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:34:49.494608Z :DEBUG: [] MessageGroupId [1236] SessionId [1236|ce177ce3-d9c78480-c43976d0-9497675c_0] Write session: destroy DURATION 2.957224s 2025-03-28T09:34:49.495945Z node 1 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 22 sessionId: 1236|ce177ce3-d9c78480-c43976d0-9497675c_0 grpc read done: success: 0 data: 2025-03-28T09:34:49.496002Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|ce177ce3-d9c78480-c43976d0-9497675c_0 grpc read failed 2025-03-28T09:34:49.496042Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|ce177ce3-d9c78480-c43976d0-9497675c_0 grpc closed 2025-03-28T09:34:49.496055Z node 1 :PQ_WRITE_PROXY INFO: session v1 cookie: 22 sessionId: 1236|ce177ce3-d9c78480-c43976d0-9497675c_0 is DEAD 2025-03-28T09:34:49.496581Z node 1 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037899 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:34:49.496738Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899] server disconnected, pipe [1:7486791520201935345:2622] destroyed 2025-03-28T09:34:49.496798Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037899, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:34:49.768381Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486791524496902675:2632], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:34:49.768563Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmQ5ZWUyOWMtNTBiNmNiMzgtZWUwYjNlMTYtYTliYzJiZmI=, ActorId: [1:7486791524496902673:2631], ActorState: ExecuteState, TraceId: 01jqe1nacq0tat1s5grfbyprmq, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:34:49.768843Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] >> AnalyzeDatashard::AnalyzeOneTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:49.646837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:49.647014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:49.647061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:49.647105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:49.647591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:49.647629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:49.647701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:49.647787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:49.648813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:49.739885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:49.739930Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:49.750952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:49.751145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:49.751262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:49.758976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:49.759155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:49.761406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.762358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:49.767373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773119Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:49.773794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:49.773844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:49.773914Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.779456Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:49.881907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:49.882112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.882276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:49.882449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:49.882497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.884660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.884770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:49.884904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.884947Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:49.884977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:49.885003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:49.886522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.886571Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:49.886599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:49.887995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.888029Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.888067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.888102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.891009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:49.892431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:49.893218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:49.894043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.894159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:49.894200Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.895284Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:49.895334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.895484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:49.895545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:49.897266Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:49.897302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:49.897465Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.897527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:49.898519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.898597Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:49.898719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:49.898759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.898801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:49.898833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.898872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:49.898930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.898965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:49.899001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:49.899090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:49.899150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:49.899206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:49.901262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:49.901373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:49.901420Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... hard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:51.739579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 102 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:51.739603Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-28T09:34:51.739626Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 102 2025-03-28T09:34:51.739650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:51.740348Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:51.740410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 4 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:51.740451Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-28T09:34:51.740493Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 4 2025-03-28T09:34:51.740519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-03-28T09:34:51.740572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-03-28T09:34:51.743044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2025-03-28T09:34:51.743121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000076 FAKE_COORDINATOR: advance: minStep5000076 State->FrontStep: 5000075 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000076 2025-03-28T09:34:51.744376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000076, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:51.744505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000076 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:51.744552Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000076, at schemeshard: 72057594046678944 2025-03-28T09:34:51.744595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 26] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:51.744622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-03-28T09:34:51.744730Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 128 -> 130 2025-03-28T09:34:51.744904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:51.744969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-03-28T09:34:51.745668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-28T09:34:51.745922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2025-03-28T09:34:51.747861Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:51.747903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:51.748051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-03-28T09:34:51.748169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:51.748200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-03-28T09:34:51.748240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-03-28T09:34:51.748593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-28T09:34:51.748636Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-03-28T09:34:51.748717Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-28T09:34:51.748746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-28T09:34:51.748781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-28T09:34:51.748805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-28T09:34:51.748835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2025-03-28T09:34:51.748864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-28T09:34:51.748892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-03-28T09:34:51.748916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-03-28T09:34:51.748984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 2025-03-28T09:34:51.749013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2025-03-28T09:34:51.749040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 103 2025-03-28T09:34:51.749065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 26], 18446744073709551615 2025-03-28T09:34:51.749712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:51.749793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:51.749822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2025-03-28T09:34:51.749853Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-03-28T09:34:51.749881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:51.750851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:51.750918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:51.750940Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2025-03-28T09:34:51.750957Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-03-28T09:34:51.750975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-03-28T09:34:51.751024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2025-03-28T09:34:51.751651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:34:51.751680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-28T09:34:51.751740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-03-28T09:34:51.752026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:34:51.752053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-28T09:34:51.752092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:51.753993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-28T09:34:51.755645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-28T09:34:51.755737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:34:51.755794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-03-28T09:34:51.756730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-03-28T09:34:51.756768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-03-28T09:34:51.757759Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-03-28T09:34:51.757840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-03-28T09:34:51.757861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2474:4465] TestWaitNotification: OK eventTxId 175 >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve >> TraverseColumnShard::TraverseServerlessColumnTable >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:41.548979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:41.549097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.549145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:41.549183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:41.550504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:41.550549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:41.550646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.550773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:41.552336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:41.644120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:41.644185Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:41.660388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:41.660707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:41.660897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:41.669239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:41.669486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:41.672803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.673888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.680972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690477Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690605Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.691342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:41.691419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.691483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:41.691581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.698839Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:41.837560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:41.837801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.838021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:41.838245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:41.838317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:41.840826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840897Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:41.840955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:41.840995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:41.842633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.842707Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:41.842750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:41.847389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.847459Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.847505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.847549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.851208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:41.855412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:41.855585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:41.856675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.856814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:41.856865Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.857156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:41.857224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.857406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:41.857502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.859474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.859523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.859684Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.859725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:41.860128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.860192Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:41.860283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.860319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.860359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.860394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.860455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:41.860497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.860532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:41.860561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:41.860627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:41.860663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:41.860698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:41.862465Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.862581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.862618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... t for Tables, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:34:52.058197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:34:52.058234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-28T09:34:52.058293Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:34:52.058457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 4, at schemeshard: 72057594046678944 2025-03-28T09:34:52.058563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.058649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:34:52.058721Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-03-28T09:34:52.058758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:34:52.058782Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:2, tabletId: 72075186233409547, PathId: [OwnerId: 72057594046678944, LocalPathId: 3], TabletType: DataShard, at schemeshard: 72057594046678944 2025-03-28T09:34:52.058798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:34:52.058876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:34:52.059008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.059188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2025-03-28T09:34:52.059407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.059514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.059840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.059894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.060064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.060136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.060217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.060340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.060396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.060508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.060671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.060819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.060857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.060905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:34:52.061071Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:34:52.065637Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:34:52.065756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T09:34:52.067276Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [1:1138:3070], Recipient [1:1138:3070]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:34:52.067315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:34:52.068603Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:52.068672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:52.068835Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1138:3070], Recipient [1:1138:3070]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:34:52.068862Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:34:52.068995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:52.069034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:52.069069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:52.069094Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:34:52.069265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1174:3070], Recipient [1:1138:3070]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:34:52.069298Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:34:52.069329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1138:3070] sender: [1:1192:2058] recipient: [1:15:2062] 2025-03-28T09:34:52.105376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1191:3112], Recipient [1:1138:3070]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-03-28T09:34:52.105428Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:34:52.105524Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:34:52.105747Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 220us result status StatusSuccess 2025-03-28T09:34:52.106312Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 15842 Memory: 141368 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/backup_ut/unittest >> BackupRestore::TestAllIndexTypes-EIndexTypeGlobalVectorKmeansTree [GOOD] Test command err: 2025-03-28T09:34:10.810031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791356219606491:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:10.810160Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCd1BEG/pdisk_1.dat 2025-03-28T09:34:11.379363Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:11.394297Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:11.394450Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:11.406167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3382, node 1 2025-03-28T09:34:11.642393Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:11.642429Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:11.642443Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:11.642561Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30874 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:12.314332Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:13.816588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791369104509442:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:13.816588Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791369104509453:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:13.816700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:13.821770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:34:13.837789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791369104509456:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:34:13.912248Z node 1 :TX_PROXY ERROR: Actor# [1:7486791369104509534:2685] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Backup "/Root" to "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/"Create temporary directory "/Root/~backup_20250328T093414" in databaseProcess "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/view"Backup view "/Root/view" to "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/view"Write view into "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/view/create_view.sql"Write ACL into "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/view/permissions.pb"Remove temporary directory "/Root/~backup_20250328T093414" in database2025-03-28T09:34:14.419900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710662:0, at schemeshard: 72057594046644480 Backup completed successfullyRestore "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/view"Restore view "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/view" to "/Root/view"Read view from "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/view/create_view.sql"Created "/Root/view"Restore ACL "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/view" to "/Root/view"Read ACL from "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpoyIbUE/view/permissions.pb"2025-03-28T09:34:14.535472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-28T09:34:15.941231Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791378874671766:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:15.941352Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpX9xPar/pdisk_1.dat 2025-03-28T09:34:16.065993Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:16.089619Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:16.089700Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:16.093180Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15253, node 4 2025-03-28T09:34:16.166822Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:16.166846Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:16.166853Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:16.167019Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1854 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:16.432244Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:18.936874Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486791391759574669:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:18.936878Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486791391759574677:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:18.936983Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:18.940068Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:34:18.957494Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486791391759574683:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:34:19.015411Z node 4 :TX_PROXY ERROR: Actor# [4:7486791396054542053:2676] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:19.050243Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:34:19.337881Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1mcja6m0mw41xe0w60vfq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NmY5MzYxZjAtMWY2NjViNWUtY2E4MGVjOGQtNjE4ODgwYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:34:19.587662Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1mcs16990y0rsy1gp2cas, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NmY5MzYxZjAtMWY2NjViNWUtY2E4MGVjOGQtNjE4ODgwYmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Backup "/Root" to "/home/runner/.ya/build/build_root/2909/0017 ... olFetcherActor] ActorId: [16:7486791503800123330:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:44.563340Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpOicQqd/" to "/Root"Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpOicQqd/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpOicQqd/table"Read scheme from "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpOicQqd/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpOicQqd/table" to "/Root/table"2025-03-28T09:34:44.652988Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:34:44.680037Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037888 not found 2025-03-28T09:34:44.680079Z node 16 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 16, TabletId: 72075186224037889 not found Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpOicQqd/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-03-28T09:34:44.763946Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480 2025-03-28T09:34:44.805370Z node 16 :CHANGE_EXCHANGE WARN: [AsyncIndexChangeSenderMain][72075186224037892:1][16:7486791503800123797:2424] Failed entry at 'ResolveTargetTable': entry# { Path: TableId: [72057594046644480:11:0] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-28T09:34:44.819185Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480 Restore ACL "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpOicQqd/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpOicQqd/table/permissions.pb"2025-03-28T09:34:45.096357Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480 Restore completed successfully 2025-03-28T09:34:46.784534Z node 19 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7486791513093849250:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:46.784635Z node 19 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpVVnWR2/pdisk_1.dat 2025-03-28T09:34:46.904143Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:46.941946Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:46.942053Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:46.945446Z node 19 :HIVE WARN: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13023, node 19 2025-03-28T09:34:47.004729Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:47.004754Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:47.004764Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:47.004939Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4688 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:47.161305Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:34:49.721654Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7486791525978752149:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:49.721755Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:49.749927Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 Backup "/Root" to "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/"Create temporary directory "/Root/~backup_20250328T093449" in databaseProcess "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table"Copy tables: { src: "/Root/table", dst: "/Root/~backup_20250328T093449/table" }Backup table "/Root/~backup_20250328T093449/table" to "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table"Describe table "/Root/~backup_20250328T093449/table"Write scheme into "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table/scheme.pb"Describe table "/Root/table"Write ACL into "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table/permissions.pb"Read table "/Root/~backup_20250328T093449/table"Write data into "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table/data_00.csv"Drop table "/Root/~backup_20250328T093449/table"2025-03-28T09:34:50.191409Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037893 not found 2025-03-28T09:34:50.191452Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037891 not found 2025-03-28T09:34:50.193678Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037892 not found Remove temporary directory "/Root/~backup_20250328T093449" in database2025-03-28T09:34:50.206279Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715664:0, at schemeshard: 72057594046644480 Backup completed successfully2025-03-28T09:34:50.224274Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [19:7486791530273720527:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:50.224405Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Restore "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/" to "/Root"2025-03-28T09:34:50.281619Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037888 not found 2025-03-28T09:34:50.281676Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037889 not found 2025-03-28T09:34:50.283018Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037890 not found Resolved db base path: "/Root"Restore folder "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/" to "/Root"Process "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table"Read scheme from "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table/scheme.pb"Restore table "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table" to "/Root/table"2025-03-28T09:34:50.310828Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 Created "/Root/table"Read data from "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table/data_00.csv"Restore index "byValue" on "/Root/table"2025-03-28T09:34:50.463097Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:34:50.522434Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:34:50.578286Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710760:0, at schemeshard: 72057594046644480 2025-03-28T09:34:50.732488Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710761:0, at schemeshard: 72057594046644480 2025-03-28T09:34:50.865122Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037897 not found 2025-03-28T09:34:50.865170Z node 19 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 19, TabletId: 72075186224037898 not found Restore ACL "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table" to "/Root/table"Read ACL from "/home/runner/.ya/build/build_root/2909/001764/r3tmp/tmpCWebKn/table/permissions.pb"2025-03-28T09:34:51.215726Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715669:0, at schemeshard: 72057594046644480 Restore completed successfully >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Serial_Log >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |94.8%| [TA] $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/services/ydb/backup_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeColumnshard::AnalyzeDeadline >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2025-03-28T09:34:30.068482Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.068616Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.108174Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.108293Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.136106Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.559634Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.559740Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.581145Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.581379Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.605372Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.928448Z node 3 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.928526Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.939689Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.939776Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.953461Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.953964Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[3:132:2158], cookie=14813036706676284224, session=0, seqNo=0) 2025-03-28T09:34:30.954163Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:30.976391Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[3:132:2158], cookie=14813036706676284224, session=1) 2025-03-28T09:34:30.976874Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:141:2165], cookie=18029900255872231825) 2025-03-28T09:34:30.976940Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:141:2165], cookie=18029900255872231825) 2025-03-28T09:34:31.378553Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:31.390849Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:31.739598Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:31.751678Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:32.100410Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:32.112124Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:32.450648Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:32.462759Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:32.822596Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:32.834562Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:33.173339Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:33.185302Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:33.513883Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:33.525890Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:33.864393Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:33.876155Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:34.225422Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:34.237479Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:34.640044Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:34.652154Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:35.011417Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:35.023204Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:35.383150Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:35.395099Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:35.754008Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:35.766026Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:36.124323Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:36.140022Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:36.558861Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:36.575385Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:36.928183Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:36.940293Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:37.337324Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:37.353388Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:37.698049Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:37.710358Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:38.066869Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:38.078846Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:38.470972Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:38.488108Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:38.854995Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:38.867219Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:39.220689Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:39.233009Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:39.605962Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:39.618068Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:39.973058Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:39.985195Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:40.344631Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:40.356609Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:40.718014Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:40.731380Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:41.084866Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:41.097040Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:41.453263Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:41.467473Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:41.837733Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:41.852763Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:42.257274Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:42.269707Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:42.633888Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:42.646296Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:42.996446Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:43.008319Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:43.348911Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:43.360999Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:43.713663Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:43.725406Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:44.097552Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:44.111530Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:44.480996Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:44.493444Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:44.845384Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:44.857419Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:45.208519Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:45.220720Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:45.572922Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:45.584978Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:45.967459Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:45.979467Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:46.330548Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:46.345573Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:46.714322Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:46.726344Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:47.078544Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:47.090860Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:47.444047Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:47.456435Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:47.839356Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:47.851646Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:48.203929Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:48.215886Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:48.578878Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:48.591006Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:48.941837Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:48.954191Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:49.307119Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:49.319049Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:49.691859Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:49.703854Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:50.045739Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:50.057997Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:50.411200Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:50.423574Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:50.766073Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:50.779356Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:51.120885Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:51.133079Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:51.485969Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:51.498231Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:51.840204Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:51.852500Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:52.193640Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:52.205867Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:52.547559Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:52.559875Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:52.892399Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:52.904832Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:53.303004Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-28T09:34:53.303116Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-28T09:34:53.315712Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-28T09:34:53.326822Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:473:2480], cookie=3841400232251767799) 2025-03-28T09:34:53.326928Z node 3 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:473:2480], cookie=3841400232251767799) 2025-03-28T09:34:53.703223Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:53.703313Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:53.716226Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:53.716792Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:53.741659Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:53.748914Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:132:2158], cookie=15534432383912998866, path="Root", config={ MaxUnitsPerSecond: 100 }) 2025-03-28T09:34:53.749178Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Created new quoter resource 1 "Root" 2025-03-28T09:34:53.761411Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:132:2158], cookie=15534432383912998866) 2025-03-28T09:34:53.763346Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:141:2165]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:53.763427Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:141:2165], cookie=0) 2025-03-28T09:34:53.763751Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:143:2167]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2025-03-28T09:34:53.763790Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Subscribe on quoter resources (sender=[4:143:2167], cookie=0) 2025-03-28T09:34:53.805834Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:143:2167]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-03-28T09:34:53.805922Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2025-03-28T09:34:53.806178Z node 4 :KESUS_TABLET TRACE: Got TEvServerDisconnected([4:146:2170]) 2025-03-28T09:34:53.806300Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:143:2167]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2025-03-28T09:34:53.848069Z node 4 :KESUS_TABLET TRACE: [72057594037927937] Send TEvResourcesAllocated to [4:141:2165]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:49.646842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:49.647016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:49.647070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:49.647112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:49.647575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:49.647624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:49.647706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:49.647791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:49.648834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:49.738917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:49.738969Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:49.750976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:49.751174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:49.751280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:49.758743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:49.759004Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:49.761428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.762345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:49.767529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773079Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773133Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.773735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:49.773798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:49.773898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:49.773997Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.779374Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:49.878197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:49.879775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.880667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:49.881621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:49.881678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.884044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.884169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:49.884304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.884410Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:49.884437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:49.884460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:49.885892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.885929Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:49.885965Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:49.887252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.887283Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.887314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.887345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.898666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:49.900382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:49.900571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:49.901346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:49.901457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:49.901498Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.901699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:49.901744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:49.901855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:49.901905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:49.903353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:49.903387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:49.903512Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:49.903557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:49.903820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:49.903882Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:49.903946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:49.903968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.903991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:49.904010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.904033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:49.904062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:49.904093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:49.904114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:49.904157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:49.904180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:49.904225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:49.905583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:49.905654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:49.905679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 233409618 TxId: 175 } 2025-03-28T09:34:54.972780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free owner tablets reply, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897, at schemeshard: 72057594046678944 2025-03-28T09:34:54.972895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 175:0, at schemeshard: 72057594046678944, message: Status: ALREADY Owner: 72075186233409618 TxId: 175 Origin: 72057594037968897 2025-03-28T09:34:54.972945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 HandleReply TDeleteExternalShards, Status: ALREADY, from Hive: 72057594037968897, Owner: 72075186233409618, at schemeshard: 72057594046678944 2025-03-28T09:34:54.973067Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 134 -> 135 2025-03-28T09:34:54.973262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:54.973327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-03-28T09:34:54.975404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-28T09:34:54.975700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:54.975746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:54.975903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 26] 2025-03-28T09:34:54.976028Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:54.976066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 1 2025-03-28T09:34:54.976107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 175, path id: 26 2025-03-28T09:34:54.976416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-28T09:34:54.976466Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2025-03-28T09:34:54.976502Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 175:0 135 -> 240 2025-03-28T09:34:54.977875Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:54.977964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 103 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:54.977998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-28T09:34:54.978034Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 103 2025-03-28T09:34:54.978074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:54.979423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:54.979509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 26 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2025-03-28T09:34:54.979541Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2025-03-28T09:34:54.979573Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 26], version: 18446744073709551615 2025-03-28T09:34:54.979604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 6 2025-03-28T09:34:54.979680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2025-03-28T09:34:54.981647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:34:54.981696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:34:54.981726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:34:54.981993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2025-03-28T09:34:54.982049Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 175:0 ProgressState 2025-03-28T09:34:54.982133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-28T09:34:54.982171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-28T09:34:54.982206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#175:0 progress is 1/1 2025-03-28T09:34:54.982234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-28T09:34:54.982266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2025-03-28T09:34:54.982306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2025-03-28T09:34:54.982339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 175:0 2025-03-28T09:34:54.982369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 175:0 2025-03-28T09:34:54.982532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 5 2025-03-28T09:34:54.983990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-28T09:34:54.984723Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 74 TabletID: 72075186233409619 2025-03-28T09:34:54.984906Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 73 TabletID: 72075186233409618 2025-03-28T09:34:54.985745Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:54.988630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 74 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2025-03-28T09:34:54.988894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 4 2025-03-28T09:34:54.989467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 73 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2025-03-28T09:34:54.989678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 3 Forgetting tablet 72075186233409619 Forgetting tablet 72075186233409618 2025-03-28T09:34:54.991170Z node 1 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 75 TabletID: 72075186233409620 Forgetting tablet 72075186233409620 2025-03-28T09:34:54.991611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 75 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2025-03-28T09:34:54.991806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 2 2025-03-28T09:34:54.993027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2025-03-28T09:34:54.993532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:34:54.993576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-28T09:34:54.993694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 1 2025-03-28T09:34:54.994948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:34:54.994998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 26], at schemeshard: 72057594046678944 2025-03-28T09:34:54.995066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:54.996293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:74 2025-03-28T09:34:54.996345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2025-03-28T09:34:54.999241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:73 2025-03-28T09:34:54.999289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2025-03-28T09:34:54.999407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:75 2025-03-28T09:34:54.999449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2025-03-28T09:34:54.999742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2025-03-28T09:34:54.999847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2025-03-28T09:34:55.001358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2025-03-28T09:34:55.001401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2025-03-28T09:34:55.002969Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2025-03-28T09:34:55.003096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2025-03-28T09:34:55.003134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:6763:7739] TestWaitNotification: OK eventTxId 175 >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution |94.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |94.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] >> TraverseDatashard::TraverseOneTable >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade >> AnalyzeColumnshard::AnalyzeRebootColumnShard >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> AnalyzeColumnshard::AnalyzeStatus >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2025-03-28T09:26:38.676797Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.772153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:38.795130Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:38.795469Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:38.804556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:38.804789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:38.805084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:38.805233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:38.805370Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:38.805502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:38.805614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:38.805720Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:38.805857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:38.805972Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.806083Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:38.806215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:38.854945Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:38.855305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:38.855361Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:38.855562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.855765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:38.855852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:38.855902Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:38.856003Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:38.856065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:38.856108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:38.856144Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:38.856309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:38.856379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:38.856425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:38.856465Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:38.856607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:38.856663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:38.856707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:38.856735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:38.856801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:38.856833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:38.856862Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:38.856912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:38.856950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:38.856983Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:38.857357Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=47; 2025-03-28T09:26:38.857471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=51; 2025-03-28T09:26:38.857564Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=41; 2025-03-28T09:26:38.857665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=51; 2025-03-28T09:26:38.857828Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:38.857892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:38.857941Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:38.858150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:38.858198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.858233Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:38.858395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:38.858447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:38.858478Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:38.858873Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:38.858932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:38.858969Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:38.859098Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:38.859150Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:38.859195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15728:17686];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-28T09:34:55.465407Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:34:55.465526Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-03-28T09:34:55.465988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=371; 2025-03-28T09:34:55.466039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=450; 2025-03-28T09:34:55.471791Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:34:55.471917Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=15; 2025-03-28T09:34:55.483940Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=11900; 2025-03-28T09:34:55.496738Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11176; 2025-03-28T09:34:55.496884Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12827; 2025-03-28T09:34:55.497112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=139; 2025-03-28T09:34:55.497295Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=120; 2025-03-28T09:34:55.497522Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=167; 2025-03-28T09:34:55.497721Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=140; 2025-03-28T09:34:55.498018Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=229; 2025-03-28T09:34:55.498071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=26092; 2025-03-28T09:34:55.506126Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T09:34:55.506242Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=16; 2025-03-28T09:34:55.518402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=12034; 2025-03-28T09:34:55.563986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=45438; 2025-03-28T09:34:55.564160Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=49; 2025-03-28T09:34:55.564245Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=33; 2025-03-28T09:34:55.564294Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-03-28T09:34:55.564343Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2025-03-28T09:34:55.564392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-03-28T09:34:55.564467Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2025-03-28T09:34:55.564515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-03-28T09:34:55.564628Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=73; 2025-03-28T09:34:55.564681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=10; 2025-03-28T09:34:55.564759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=38; 2025-03-28T09:34:55.564908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=90; 2025-03-28T09:34:55.565041Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=79; 2025-03-28T09:34:55.565103Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=58788; 2025-03-28T09:34:55.565359Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:34:55.566363Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15728:17686];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:34:55.566463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15728:17686];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:34:55.566573Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:34:55.566641Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-28T09:34:55.566922Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:34:55.567022Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:55.567268Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-28T09:34:55.567371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T09:34:55.567438Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:55.567501Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:55.567552Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:55.567687Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:34:55.572297Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:55.576917Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:34:55.578744Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:34:55.578802Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:34:55.578847Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:34:55.578919Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:34:55.579013Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:34:55.579295Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-28T09:34:55.579394Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T09:34:55.579462Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:34:55.579534Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:55.579591Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:34:55.579709Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-28T09:34:55.579784Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15728:17686];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2025-03-28T09:34:35.804948Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791465290053925:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.805946Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:35.835352Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791463076403961:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.835396Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:36.022934Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.022909Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001573/r3tmp/tmpw2TESF/pdisk_1.dat 2025-03-28T09:34:36.267633Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:36.291820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.291940Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.292419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.292464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.300968Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:36.301127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.302321Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11932, node 1 2025-03-28T09:34:36.525920Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001573/r3tmp/yandexG4LLta.tmp 2025-03-28T09:34:36.525947Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001573/r3tmp/yandexG4LLta.tmp 2025-03-28T09:34:36.531089Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001573/r3tmp/yandexG4LLta.tmp 2025-03-28T09:34:36.531302Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:36.711115Z INFO: TTestServer started on Port 20193 GrpcPort 11932 TClient is connected to server localhost:20193 PQClient connected to localhost:11932 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:36.997606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:34:37.055974Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2025-03-28T09:34:37.073569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:34:38.922015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791478174956825:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:38.922132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791478174956811:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:38.922271Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:38.924932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:34:38.940549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791478174956848:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:34:39.215859Z node 1 :TX_PROXY ERROR: Actor# [1:7486791482469924234:2769] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:39.283248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:34:39.313462Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486791482469924254:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:34:39.315329Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Mjc3NGMxM2YtNjQ5ODA1NzUtNjY0ZjAyMGYtYTZlNTJmOWY=, ActorId: [1:7486791478174956807:2336], ActorState: ExecuteState, TraceId: 01jqe1mzsm1cnfvgefpwhmz0dk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:34:39.317583Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:34:39.385952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:34:39.494102Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:34:39.952402Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe1n0fb106g8480der3f8b5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2I4NmFkZjAtYjAxMjUwYTEtZmVlNmMxMDMtYzcxZTlhZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486791482469924686:3098] 2025-03-28T09:34:40.804693Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791465290053925:2142];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:40.804931Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:34:40.835347Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791463076403961:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:40.835403Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-03-28T09:34:45.814201Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791469585021400:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:34:45.814439Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791469585021400:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-03-28T09:34:45.814545Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791469585021400:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486791469585021893:2452] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743154477057 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:34:45.814644Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791469585021400:2129], cacheItem# { Subscriber: { Subscriber: [1:7486791469585021893:2452] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743154477057 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true Show ... ctRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:34:58.356677Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791516282707128:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:34:58.356753Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791516282707128:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791529167610158:2908] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743154491064 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:34:58.356867Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791563527351196:4980], recipient# [3:7486791516282707105:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:34:58.356962Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791563527351197:4981], recipient# [3:7486791516282707105:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:34:58.357026Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791516282707128:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:34:58.357117Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791516282707128:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791520577674907:2444] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 29 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743154488432 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:34:58.357328Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791563527351198:4982], recipient# [3:7486791516282707105:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:34:58.631878Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvResolveKeySet: self# [3:7486791516282707128:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:34:58.631972Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7486791516282707128:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791529167610158:2908] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743154491064 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:34:58.632026Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TResolve: self# [3:7486791516282707128:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791529167609940:2754] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743154490861 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: Unknown Kind: KindUnknown PartitionsCount: 0 DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:34:58.632254Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791563527351205:4987], recipient# [3:7486791563527351204:2601], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 12] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 },{ TableId: [OwnerId: 72057594046644480, LocalPathId: 10] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Utf8 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2025-03-28T09:34:58.633031Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791516282707128:2128], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:34:58.633134Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791516282707128:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791520577674907:2444] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 29 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743154488432 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:34:58.633255Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791563527351208:4988], recipient# [3:7486791563527351207:2607], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:34:58.714441Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791516282707128:2128], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:34:58.714572Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791516282707128:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791529167609812:2688] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:34:58.714664Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791563527351228:4996], recipient# [3:7486791563527351227:2612], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> CompressExecutor::TestReorderedExecutor >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Five_Five_Mixed >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] >> AnalyzeColumnshard::AnalyzeTwoColumnTables >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2025-03-28T09:34:35.836242Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791465279320274:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.839993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:35.927957Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791464714243308:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:35.928054Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:34:36.100066Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:34:36.104345Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00155e/r3tmp/tmpfO6jGm/pdisk_1.dat 2025-03-28T09:34:36.442818Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.442929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.443795Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:36.443866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:36.450153Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:36.450369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.451805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:36.483636Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1859, node 1 2025-03-28T09:34:36.583118Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00155e/r3tmp/yandexMwOxxU.tmp 2025-03-28T09:34:36.583143Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00155e/r3tmp/yandexMwOxxU.tmp 2025-03-28T09:34:36.583343Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00155e/r3tmp/yandexMwOxxU.tmp 2025-03-28T09:34:36.583469Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:36.711212Z INFO: TTestServer started on Port 29220 GrpcPort 1859 TClient is connected to server localhost:29220 PQClient connected to localhost:1859 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:34:36.987959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:34:37.074977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:34:39.259007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791482459190406:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:39.259007Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791482459190429:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:39.259094Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:39.262596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:34:39.265599Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791482459190468:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:39.265696Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:39.278377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791482459190434:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:34:39.550852Z node 1 :TX_PROXY ERROR: Actor# [1:7486791482459190508:2762] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:39.579026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:34:39.662739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:34:39.668147Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486791482459190540:2351], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:34:39.669288Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWRjYjg2NzktNTA1MzA5ZGEtMjIyYWJjZWEtNTc2MDNlNGM=, ActorId: [1:7486791482459190402:2337], ActorState: ExecuteState, TraceId: 01jqe1n04k37q3aq2qg1ggf5v3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:34:39.671159Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:34:39.757532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:34:40.110192Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe1n0pzcx1y24gnjg9qfsv7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmNmMzU4ZDAtNmE3ZWM1MzMtMjU1YjQxNC00MjM0NWU2MA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486791486754158256:3093] 2025-03-28T09:34:40.834260Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791465279320274:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:40.834351Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:34:40.915651Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791464714243308:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:40.915723Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok >>>>> Prepare scheme WaitRootIsUp 'Root'... TClient::Ls request: Root 2025-03-28T09:34:46.156811Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7486791469574287685:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:34:46.157000Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [1:7486791469574287685:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 } 2025-03-28T09:34:46.157095Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [1:7486791469574287685:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7486791469574288179:2453] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 14 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743154477064 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2025-03-28T09:34:46.157189Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [1:7486791469574287685:2130], cacheItem# { Subscriber: { Subscriber: [1:7486791469574288179:2453] DomainOwnerId: 72057594 ... false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 23 IsSync: true Partial: 0 } 2025-03-28T09:35:03.488337Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7486791521627349612:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Cluster PathId: Partial: 0 } 2025-03-28T09:35:03.488388Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7486791521627349612:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Cluster PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7486791534512252420:2747] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743154491918 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-03-28T09:35:03.488439Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791521627349612:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791534512252420:2747] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743154491918 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/PQ/Config/V2/Cluster TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 24 IsSync: true Partial: 0 } 2025-03-28T09:35:03.488504Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: HandleNotify: self# [3:7486791521627349612:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Versions PathId: Partial: 0 } 2025-03-28T09:35:03.488557Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: ResolveCacheItem: self# [3:7486791521627349612:2128], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /Root/PQ/Config/V2/Versions PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7486791534512252559:2829] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743154492093 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2025-03-28T09:35:03.488610Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791521627349612:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791534512252559:2829] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743154492093 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: Root/PQ/Config/V2/Versions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 24 IsSync: true Partial: 0 } 2025-03-28T09:35:03.488716Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791586051863975:5503], recipient# [3:7486791586051863973:2875], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:35:03.488776Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791586051863976:5504], recipient# [3:7486791586051863974:2876], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:35:03.489034Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791521627349612:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:35:03.489101Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791521627349612:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791534512252559:2829] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743154492093 PathId: [OwnerId: 72057594046644480, LocalPathId: 12] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:12:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:35:03.489173Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791521627349612:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:35:03.489250Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791521627349612:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791534512252420:2747] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 24 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1743154491918 PathId: [OwnerId: 72057594046644480, LocalPathId: 10] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:10:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:35:03.489250Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791586051863979:5505], recipient# [3:7486791521627349592:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:12:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:35:03.489366Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7486791521627349612:2128], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2025-03-28T09:35:03.489369Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791586051863980:5506], recipient# [3:7486791521627349592:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:10:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2025-03-28T09:35:03.489422Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: FillEntry for TNavigate: self# [3:7486791521627349612:2128], cacheItem# { Subscriber: { Subscriber: [3:7486791525922317393:2442] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 28 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1743154489363 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2025-03-28T09:35:03.489518Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: Send result: self# [3:7486791586051863981:5507], recipient# [3:7486791521627349592:2289], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] Test command err: ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 755b + FlatIndex{4} Label{3 rev 3, 172b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, b} | 2 6 86b {2, NULL} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 208b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, b} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, NULL} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 346b 12r} data 777b + FlatIndex{4} Label{3 rev 3, 179b} 5 rec | Page Row Bytes (Uint32, String) | 0 0 86b {1, aaa} | 1 3 88b {1, baaaa} | 2 6 86b {2, aaa} | 3 9 86b {2, ccx} | 3 11 86b {2, cxz} + BTreeIndex{PageId: 5 RowCount: 12 DataSize: 346 ErasedRowCount: 0} Label{13 rev 1, 223b} | PageId: 0 RowCount: 3 DataSize: 86 ErasedRowCount: 0 | > {1, baaaa} | PageId: 1 RowCount: 6 DataSize: 174 ErasedRowCount: 0 | > {2, aaa} | PageId: 2 RowCount: 9 DataSize: 260 ErasedRowCount: 0 | > {2, ccx} | PageId: 3 RowCount: 12 DataSize: 346 ErasedRowCount: 0 ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= SLICES ======= { [0, 1), [1, 3), [3, 4), [4, 5), [5, 7), [7, 8), [8, 9] } ======= CUT ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1347b + FlatIndex{10} Label{3 rev 3, 362b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, ab} | 2 2 42b {1, ac} | 3 3 42b {1, b} | 4 4 42b {1, bb} | 5 5 42b {2, NULL} | 6 6 42b {2, ab} | 7 7 42b {2, ac} | 8 8 42b {2, b} | 9 9 42b {2, bb} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 536b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, ab} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, ac} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, b} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bb} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, NULL} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, ab} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, ac} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, b} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bb} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 ======= FULL ======= Part{[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{10} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 11 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{2} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{2} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{2} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{2} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{2} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{2} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{2} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{2} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{2} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 3 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 10 ... {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{48} Label{484 rev 1, 138b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{51} Label{514 rev 1, 138b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 4), [6, 8), [8, 12), [14, 16), [16, 18), [20, 28), [32, 34), [34, 38), [38, 39] } Part{[1:2:3:0:0:0:0] eph 0, 10774b 40r} data 15576b + FlatIndex{95} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 66b {0, 1} | 2 2 66b {0, 4} | 4 4 82b {0, 7} | 8 6 66b {0, 10} | 11 8 66b {1, 3} | 14 10 82b {1, 6} | 20 12 66b {1, 8} | 23 14 66b {2, NULL} | 26 16 82b {2, 4} | 36 18 66b {2, 7} | 39 20 66b {2, 10} | 42 22 82b {3, 3} | 48 24 66b {3, 6} | 53 26 66b {3, 8} | 58 28 82b {4, NULL} | 64 30 66b {4, 4} | 67 32 66b {4, 7} | 70 34 82b {4, 10} | 82 36 66b {5, 3} | 87 38 66b {5, 6} | 87 39 66b {5, 7} + BTreeIndex{PageId: 98 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 72 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 15 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 66 GroupDataSize: 76 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 2 RowCount: 4 DataSize: 132 GroupDataSize: 526 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 4 RowCount: 6 DataSize: 214 GroupDataSize: 1052 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 27 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 8 RowCount: 8 DataSize: 280 GroupDataSize: 1488 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 11 RowCount: 10 DataSize: 346 GroupDataSize: 1938 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 14 RowCount: 12 DataSize: 428 GroupDataSize: 2467 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 43 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 20 RowCount: 14 DataSize: 494 GroupDataSize: 2906 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 23 RowCount: 16 DataSize: 560 GroupDataSize: 3360 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 26 RowCount: 18 DataSize: 642 GroupDataSize: 3891 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 97 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 59 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 36 RowCount: 20 DataSize: 708 GroupDataSize: 4330 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 39 RowCount: 22 DataSize: 774 GroupDataSize: 4784 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 42 RowCount: 24 DataSize: 856 GroupDataSize: 5315 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 71 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 48 RowCount: 26 DataSize: 922 GroupDataSize: 5754 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 53 RowCount: 28 DataSize: 988 GroupDataSize: 6208 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 58 RowCount: 30 DataSize: 1070 GroupDataSize: 6739 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 96 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 64 RowCount: 32 DataSize: 1136 GroupDataSize: 7178 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 67 RowCount: 34 DataSize: 1202 GroupDataSize: 7632 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 70 RowCount: 36 DataSize: 1284 GroupDataSize: 8163 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 82 RowCount: 38 DataSize: 1350 GroupDataSize: 8602 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 87 RowCount: 40 DataSize: 1416 GroupDataSize: 9358 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 66b}, [0, +2)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} + Rows{2} Label{24 rev 1, 66b}, [2, +2)row | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} + Rows{4} Label{44 rev 1, 82b}, [4, +2)row | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} + Rows{8} Label{84 rev 1, 66b}, [6, +2)row | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} + Rows{11} Label{114 rev 1, 66b}, [8, +2)row | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} + Rows{14} Label{144 rev 1, 82b}, [10, +2)row | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} + Rows{20} Label{204 rev 1, 66b}, [12, +2)row | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} + Rows{23} Label{234 rev 1, 66b}, [14, +2)row | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} + Rows{26} Label{264 rev 1, 82b}, [16, +2)row | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} + Rows{36} Label{364 rev 1, 66b}, [18, +2)row | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} + Rows{39} Label{394 rev 1, 66b}, [20, +2)row | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} + Rows{42} Label{424 rev 1, 82b}, [22, +2)row | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} + Rows{48} Label{484 rev 1, 66b}, [24, +2)row | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} + Rows{53} Label{534 rev 1, 66b}, [26, +2)row | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} + Rows{58} Label{584 rev 1, 82b}, [28, +2)row | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} + Rows{64} Label{644 rev 1, 66b}, [30, +2)row | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} + Rows{67} Label{674 rev 1, 66b}, [32, +2)row | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} + Rows{70} Label{704 rev 1, 82b}, [34, +2)row | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} + Rows{82} Label{824 rev 1, 66b}, [36, +2)row | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} + Rows{87} Label{874 rev 1, 66b}, [38, +2)row | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable |94.9%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:34:41.549060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:34:41.549185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.549242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:34:41.549284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:34:41.550533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:34:41.550590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:34:41.550707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:34:41.550829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:34:41.552392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:41.645974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:41.646026Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:41.663496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:41.663773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:34:41.663935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:34:41.669140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:34:41.669337Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:34:41.672818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.673912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.680985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690256Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.690359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.691313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:34:41.691374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.691420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:34:41.691497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.697911Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:34:41.820804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:34:41.821020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.821228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:34:41.821398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:34:41.821454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.823620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.823757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:34:41.823970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.824046Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:34:41.824100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:34:41.824139Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:34:41.825929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.825980Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:34:41.826042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:34:41.827762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.827862Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.827907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.827957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.834032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:34:41.836087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:34:41.836249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:34:41.837195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:34:41.837298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:34:41.837359Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.837660Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:34:41.837724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:34:41.837899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:34:41.837978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:34:41.839803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:34:41.839862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:34:41.840010Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:34:41.840428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:34:41.840481Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:34:41.840566Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.840594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.840632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:34:41.840659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.840691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:34:41.840732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:34:41.840763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:34:41.840791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:34:41.840843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:34:41.840875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:34:41.840903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:34:41.842627Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.842771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:34:41.842804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ly one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: Simple, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:35:05.078117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.078189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.078578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Tables, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:35:05.078714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:35:05.078871Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberDataErasureTenantsInRunning# 0 2025-03-28T09:35:05.079047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2025-03-28T09:35:05.079160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.079265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:35:05.079305Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TTxInit for Shards, read: 72057594046678944:1, tabletId: 72075186233409546, PathId: [OwnerId: 72057594046678944, LocalPathId: 2], TabletType: DataShard, at schemeshard: 72057594046678944 2025-03-28T09:35:05.079333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:35:05.079419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2025-03-28T09:35:05.079495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.079633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2025-03-28T09:35:05.079880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.079958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.080226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.080308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.080445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.080510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.080576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.080693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.080761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.080860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.081015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IndexBuild , records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.081160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.081237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.081294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: LongLocks: records: 0, at schemeshard: 72057594046678944 2025-03-28T09:35:05.081473Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2025-03-28T09:35:05.086516Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:35:05.086646Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T09:35:05.087913Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435082, Sender [1:1751:3674], Recipient [1:1751:3674]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:35:05.087962Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2025-03-28T09:35:05.089235Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:35:05.089285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:35:05.089655Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:1751:3674], Recipient [1:1751:3674]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:35:05.089684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T09:35:05.090082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:35:05.090140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:35:05.090192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:35:05.090250Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2025-03-28T09:35:05.092688Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 274399233, Sender [1:1789:3674], Recipient [1:1751:3674]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:35:05.092736Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2025-03-28T09:35:05.092775Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1751:3674] sender: [1:1809:2058] recipient: [1:15:2062] 2025-03-28T09:35:05.139970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122945, Sender [1:1808:3720], Recipient [1:1751:3674]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2025-03-28T09:35:05.140032Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2025-03-28T09:35:05.140153Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:35:05.140376Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 238us result status StatusSuccess 2025-03-28T09:35:05.140924Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 14336 Memory: 156728 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage |94.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> AnalyzeDatashard::AnalyzeTwoTables |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] >> TExecutorDb::EncodedPage [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest >> AnalyzeColumnshard::AnalyzeServerless >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> TraverseDatashard::TraverseOneTable [GOOD] >> TraverseDatashard::TraverseTwoTablesServerless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: 2025-03-28T09:34:54.805331Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:54.805536Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:54.805570Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000dd2/r3tmp/tmp2wzUp1/pdisk_1.dat 2025-03-28T09:34:55.170254Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25328, node 1 2025-03-28T09:34:55.397412Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:55.397467Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:55.397498Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:55.397940Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:55.400267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:55.490793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:55.490954Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:55.504247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3793 2025-03-28T09:34:56.038072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:58.819124Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:58.856925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:58.857046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:58.896705Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:58.898365Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.128768Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.129390Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.130008Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.130171Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.130477Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.130578Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.130660Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.130790Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.130885Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.294330Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.294436Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.307630Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.455976Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:59.506446Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:34:59.506540Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:34:59.532765Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:34:59.532960Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:34:59.533189Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:34:59.533248Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:34:59.533300Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:34:59.533380Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:34:59.533445Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:34:59.533500Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:34:59.533977Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:34:59.566054Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.566205Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.574464Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:34:59.580908Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:34:59.581184Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:34:59.587344Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:34:59.606389Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:34:59.606454Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:34:59.606524Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:34:59.621573Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:34:59.632346Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:34:59.632502Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:34:59.811652Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:34:59.960649Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:00.038517Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:00.719511Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:35:01.382333Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:01.545452Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:35:01.545506Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:35:01.545575Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2594:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:35:01.547347Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2596:2950] 2025-03-28T09:35:01.548240Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2596:2950], schemeshard id = 72075186224037899 2025-03-28T09:35:02.526488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:35:03.048421Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:03.264987Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037905 2025-03-28T09:35:03.265041Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037905 2025-03-28T09:35:03.265119Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:3077:3150], at schemeshard: 72075186224037905, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037905 2025-03-28T09:35:03.266198Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3079:3152] 2025-03-28T09:35:03.266921Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3079:3152], schemeshard id = 72075186224037905 2025-03-28T09:35:04.533716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3206:3409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.533902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.549510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037899 2025-03-28T09:35:04.813408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3512:3459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.813509Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.845720Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3517:3463]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:04.845879Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:04.846039Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-28T09:35:04.846116Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3520:3466] 2025-03-28T09:35:04.846169Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3520:3466] 2025-03-28T09:35:04.846730Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3521:3389] 2025-03-28T09:35:04.846917Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3520:3466], server id = [2:3521:3389], tablet id = 72075186224037894, status = OK 2025-03-28T09:35:04.847088Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3521:3389], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:35:04.847145Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:35:04.847414Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:35:04.847481Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3517:3463], StatRequests.size() = 1 2025-03-28T09:35:04.860917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3525:3470], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.861084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.861559Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3530:3475], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.867015Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480 2025-03-28T09:35:05.098549Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:35:05.098630Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:35:05.198789Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3520:3466], schemeshard count = 1 2025-03-28T09:35:05.557906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3532:3477], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2025-03-28T09:35:05.733779Z node 1 :TX_PROXY ERROR: Actor# [1:3658:3554] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:35:05.743977Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3681:3570]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:05.744148Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:05.744184Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3681:3570], StatRequests.size() = 1 2025-03-28T09:35:05.814154Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe1ns3c81kqgz8e5retmgka, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjM4ZTJmODgtY2RjMTFjY2MtY2ZiOWEzYmEtZGNmMmViZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:35:05.888852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905 2025-03-28T09:35:06.222749Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4033:3637]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:06.222988Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:35:06.223356Z node 2 :STATISTICS DEBUG: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2025-03-28T09:35:06.223401Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:35:06.223613Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:35:06.223671Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:4033:3637], StatRequests.size() = 1 2025-03-28T09:35:06.244069Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4042:3646]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:06.244191Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-03-28T09:35:06.244241Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:4042:3646], StatRequests.size() = 1 2025-03-28T09:35:06.290026Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqe1ntf38rz3bh4zbxsy7v8s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2RjZTEzMDEtZDVmZWY4YTUtZGUyMjlkMmQtZDk1NWQyZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:35:06.335787Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4082:3650]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:06.337983Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:06.338030Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:35:06.338376Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:06.338417Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-28T09:35:06.338454Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:35:06.362010Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-28T09:35:06.366789Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-03-28T09:35:06.367397Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4107:3663]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:06.385375Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:06.385448Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:35:06.385986Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:06.386032Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-28T09:35:06.386074Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:35:06.388125Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-03-28T09:35:06.388290Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: 2025-03-28T09:34:59.660070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:59.660335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:59.660387Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b35/r3tmp/tmp2pwEyI/pdisk_1.dat 2025-03-28T09:35:00.008302Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4844, node 1 2025-03-28T09:35:00.225935Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:00.225990Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:00.226021Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:00.226445Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:00.228710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:00.310892Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.311063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.325034Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19838 2025-03-28T09:35:00.856205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:03.631716Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:03.660270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:03.660368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:03.697383Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:03.698751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:03.916143Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:03.916854Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:03.917490Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:03.917686Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:03.918018Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:03.918137Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:03.918236Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:03.918361Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:03.918473Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.078713Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:04.078810Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:04.091364Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:04.205125Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:04.243501Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:04.243576Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:04.267928Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:04.268088Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:04.268247Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:04.268311Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:04.268361Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:04.268406Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:04.268458Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:04.268506Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:04.268859Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:04.296742Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:04.296847Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:04.302879Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:04.307555Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:04.307760Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:04.314300Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:04.332352Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:04.332417Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:04.332488Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:04.346096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:04.353506Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:04.353647Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:04.499107Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:04.663667Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:04.771888Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:05.564671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:05.564813Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:05.578789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:05.989524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2538:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:05.989651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:05.990892Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2543:3125]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:05.991047Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:05.991097Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2545:3127] 2025-03-28T09:35:05.991150Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2545:3127] 2025-03-28T09:35:05.991615Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2546:2990] 2025-03-28T09:35:05.991792Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2545:3127], server id = [2:2546:2990], tablet id = 72075186224037894, status = OK 2025-03-28T09:35:05.991951Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2546:2990], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:35:05.992008Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:35:05.992304Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:35:05.992374Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2543:3125], StatRequests.size() = 1 2025-03-28T09:35:06.004959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2550:3131], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:06.005030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:06.005330Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2555:3136], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:06.009686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:35:06.135731Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:35:06.135791Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:35:06.243279Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2545:3127], schemeshard count = 1 2025-03-28T09:35:06.594180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2557:3138], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-28T09:35:06.745441Z node 1 :TX_PROXY ERROR: Actor# [1:2679:3212] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:35:06.753293Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2702:3228]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:06.753400Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:06.753423Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2702:3228], StatRequests.size() = 1 2025-03-28T09:35:06.807998Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1nt6x68cqnmyayv7zfb2r, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ViNDg2MzQtNGQyM2NiNjUtY2U0OWVhMzQtMzg2MDYzNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:35:06.880284Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:2747:3043]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:06.882170Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:06.882212Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:35:06.882520Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:06.882550Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:35:06.882583Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:35:06.892532Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-28T09:35:06.892779Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed >> AnalyzeColumnshard::Analyze >> TKesusTest::TestAcquireSemaphore [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2025-03-28T09:34:30.068840Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.068956Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.107529Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.107827Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.136500Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.137084Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=7319109843103119734, session=0, seqNo=0) 2025-03-28T09:34:30.137268Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:30.149669Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=7319109843103119734, session=1) 2025-03-28T09:34:30.150040Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=4053524397936920185, session=0, seqNo=0) 2025-03-28T09:34:30.150161Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:34:30.162411Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=4053524397936920185, session=2) 2025-03-28T09:34:30.163244Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:34:30.163391Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:34:30.163499Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:34:30.163935Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=2, semaphore="Lock2" count=1) 2025-03-28T09:34:30.164006Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-03-28T09:34:30.164087Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-03-28T09:34:30.164215Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=333, session=1, semaphore="Lock2" count=1) 2025-03-28T09:34:30.164278Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-03-28T09:34:30.176448Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-28T09:34:30.176520Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-03-28T09:34:30.176550Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=333) 2025-03-28T09:34:30.177241Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=2406243062840574743, name="Lock1") 2025-03-28T09:34:30.178611Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=2406243062840574743) 2025-03-28T09:34:30.179132Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:149:2173], cookie=10001672959388865281, name="Lock2") 2025-03-28T09:34:30.179199Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:149:2173], cookie=10001672959388865281) 2025-03-28T09:34:30.207256Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.207368Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.208005Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.208611Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.229082Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.229230Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:34:30.229292Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2025-03-28T09:34:30.229331Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2025-03-28T09:34:30.229801Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:189:2203], cookie=13010288008785376048, name="Lock1") 2025-03-28T09:34:30.229886Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:189:2203], cookie=13010288008785376048) 2025-03-28T09:34:30.230386Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:197:2210], cookie=9391570096962790076, name="Lock2") 2025-03-28T09:34:30.230451Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:197:2210], cookie=9391570096962790076) 2025-03-28T09:34:30.682875Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:30.695542Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:31.046249Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:31.058214Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:31.398087Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:31.410108Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:31.751621Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:31.763879Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:32.124898Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:32.137084Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:32.477304Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:32.489511Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:32.819011Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:32.830862Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:33.170629Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:33.182919Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:33.532872Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:33.544929Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:33.927127Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:33.939279Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:34.289100Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:34.300769Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:34.650218Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:34.661904Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:35.011267Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:35.023306Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:35.383997Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:35.396085Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:35.789188Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:35.803315Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:36.169619Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:36.185731Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:36.547302Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:36.563405Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:36.932012Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:36.945065Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:37.301571Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:37.315480Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:37.725547Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:37.739885Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:38.110722Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:38.123166Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:38.482391Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:38.495198Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:38.838964Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:38.856793Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:39.207299Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:39.219361Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:39.624425Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:39.636581Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:39.987426Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:39.999658Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:40.351402Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:40.363815Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:40.706903Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:40.719052Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:41.080255Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:41.093185Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:41.467534Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:363:2365], cookie=17176727243828792546, name="Lock1") 2025-03-28T09:34:41.467694Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:363:2365], cookie=17176727243828792546) 2025-03-28T09:34:41.468262Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:366:2368], cookie=5799174456033218000, name="Lock2") 2025-03-28T09:34:41.468362Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:366:2368], cookie=5799174456033218000) 2025-03-28T09:34:41.524468Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:41.537081Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:41.880257Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:41.892502Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:42.249325Z node 1 :KESUS_TABLET DEBUG: [72057594 ... -03-28T09:35:01.859828Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:02.222360Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:02.234470Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:02.586256Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:02.598016Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:02.948646Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:02.960890Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:03.312046Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:03.324290Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:03.707140Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:03.719451Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:04.079555Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:04.091621Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:04.441746Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:04.453860Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:04.803387Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:04.815608Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:05.165722Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:05.177538Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:05.539124Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:05.550711Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:05.911577Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:05.923905Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:06.274214Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:06.286414Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:06.636807Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:06.648764Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:06.998698Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:07.010459Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:07.359187Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:07.370837Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:07.720220Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:07.732381Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:08.093089Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:08.104687Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:08.454459Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:08.466103Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:08.815130Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:08.826938Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:09.166898Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2025-03-28T09:35:09.166980Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 2025-03-28T09:35:09.167020Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2025-03-28T09:35:09.167106Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-03-28T09:35:09.167149Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2025-03-28T09:35:09.167170Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-28T09:35:09.178834Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2025-03-28T09:35:09.179344Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:331:2344], cookie=10624800996105528194, name="Lock1") 2025-03-28T09:35:09.179441Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:331:2344], cookie=10624800996105528194) 2025-03-28T09:35:09.179867Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:334:2347], cookie=12402005460936327545, name="Lock2") 2025-03-28T09:35:09.179914Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:334:2347], cookie=12402005460936327545) 2025-03-28T09:35:09.180229Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:337:2350], cookie=10419374257544681580) 2025-03-28T09:35:09.180293Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:337:2350], cookie=10419374257544681580) 2025-03-28T09:35:09.191459Z node 4 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:35:09.191532Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:35:09.191970Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:35:09.192421Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:35:09.238430Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:35:09.238559Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2025-03-28T09:35:09.238608Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2025-03-28T09:35:09.238944Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:377:2380], cookie=9909122050970749053) 2025-03-28T09:35:09.239024Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:377:2380], cookie=9909122050970749053) 2025-03-28T09:35:09.239548Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:384:2386], cookie=5525171800620776215, name="Lock1") 2025-03-28T09:35:09.239624Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:384:2386], cookie=5525171800620776215) 2025-03-28T09:35:09.240118Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:387:2389], cookie=7366116708464344691, name="Lock2") 2025-03-28T09:35:09.240181Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:387:2389], cookie=7366116708464344691) 2025-03-28T09:35:09.514095Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:35:09.514173Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:35:09.526334Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:35:09.526810Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:35:09.550060Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:35:09.550511Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=7516104562831548256, session=0, seqNo=0) 2025-03-28T09:35:09.550636Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:35:09.562204Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=7516104562831548256, session=1) 2025-03-28T09:35:09.562500Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=17308411330341854424, session=0, seqNo=0) 2025-03-28T09:35:09.562608Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:35:09.574476Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=17308411330341854424, session=2) 2025-03-28T09:35:09.574821Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=1) 2025-03-28T09:35:09.586560Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-03-28T09:35:09.587089Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=10274835367828661899, name="Sem1", limit=1) 2025-03-28T09:35:09.587249Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-28T09:35:09.598848Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=10274835367828661899) 2025-03-28T09:35:09.599231Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=1, semaphore="Sem1" count=100500) 2025-03-28T09:35:09.611228Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-03-28T09:35:09.611483Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=1, semaphore="Sem1" count=1) 2025-03-28T09:35:09.611603Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-28T09:35:09.611735Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=2, semaphore="Sem1" count=1) 2025-03-28T09:35:09.623390Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-03-28T09:35:09.623455Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-03-28T09:35:09.623955Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:155:2179], cookie=13122988970818346604, name="Sem1") 2025-03-28T09:35:09.624034Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:155:2179], cookie=13122988970818346604) 2025-03-28T09:35:09.624382Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:158:2182], cookie=93679625523377288, name="Sem1") 2025-03-28T09:35:09.624436Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:158:2182], cookie=93679625523377288) 2025-03-28T09:35:09.624741Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:161:2185], cookie=14158503250731937671, name="Sem1", force=0) 2025-03-28T09:35:09.636382Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:161:2185], cookie=14158503250731937671) 2025-03-28T09:35:09.636833Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:166:2190], cookie=7798980253712509808, name="Sem1", force=1) 2025-03-28T09:35:09.636901Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting semaphore 1 "Sem1" 2025-03-28T09:35:09.649002Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:166:2190], cookie=7798980253712509808) >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] >> AnalyzeColumnshard::AnalyzeTable >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History >> TraverseDatashard::TraverseTwoTables >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatTableDatetime::TestDate >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2025-03-28T09:34:30.068464Z node 1 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.068663Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.110787Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.110887Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.125842Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.126406Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[1:130:2156], cookie=4505093691396927097, session=0, seqNo=0) 2025-03-28T09:34:30.126580Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:30.149832Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[1:130:2156], cookie=4505093691396927097, session=1) 2025-03-28T09:34:30.152907Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=111, session=1, semaphore="Lock1" count=1) 2025-03-28T09:34:30.154488Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:34:30.154632Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:34:30.166837Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=111) 2025-03-28T09:34:30.167214Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:130:2156], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:34:30.181027Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:130:2156], cookie=222) 2025-03-28T09:34:30.181852Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:146:2170], cookie=8901889418864344094, name="Lock1") 2025-03-28T09:34:30.181958Z node 1 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:146:2170], cookie=8901889418864344094) 2025-03-28T09:34:30.576471Z node 2 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:34:30.576564Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:34:30.594930Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:34:30.595103Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:34:30.618571Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:34:30.619295Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:132:2158], cookie=12907371438442939254, session=0, seqNo=0) 2025-03-28T09:34:30.619441Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:34:30.631334Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:132:2158], cookie=12907371438442939254, session=1) 2025-03-28T09:34:30.631667Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[2:133:2159], cookie=16804422202124139015, session=0, seqNo=0) 2025-03-28T09:34:30.631820Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:34:30.643578Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[2:133:2159], cookie=16804422202124139015, session=2) 2025-03-28T09:34:30.644422Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2025-03-28T09:34:30.644544Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2025-03-28T09:34:30.644626Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2025-03-28T09:34:30.656550Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=111) 2025-03-28T09:34:30.656887Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:132:2158], cookie=112, session=1, semaphore="Lock2" count=1) 2025-03-28T09:34:30.657026Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2025-03-28T09:34:30.657164Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2025-03-28T09:34:30.669360Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:132:2158], cookie=112) 2025-03-28T09:34:30.669847Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=222, session=2, semaphore="Lock1" count=1) 2025-03-28T09:34:30.670064Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2025-03-28T09:34:30.682145Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=222) 2025-03-28T09:34:30.682225Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=223) 2025-03-28T09:34:30.682618Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=333, session=2, semaphore="Lock1" count=1) 2025-03-28T09:34:30.682937Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:133:2159], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2025-03-28T09:34:30.695028Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=333) 2025-03-28T09:34:30.695098Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:133:2159], cookie=334) 2025-03-28T09:34:31.087151Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:31.098954Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:31.437964Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:31.450147Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:31.799600Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:31.811185Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:32.150708Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:32.162922Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:32.512536Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:32.524415Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:32.853225Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:32.865007Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:33.204909Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:33.217035Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:33.556095Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:33.567980Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:33.907684Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:33.920403Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:34.282867Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:34.294586Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:34.655795Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:34.667543Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:35.017465Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:35.029580Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:35.380595Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:35.392997Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:35.746569Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:35.760746Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:36.148535Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:36.175396Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:36.550991Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:36.563430Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:36.916290Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:36.928597Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:37.309096Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:37.323601Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:37.677295Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:37.690676Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:38.055614Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:38.075460Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:38.450019Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:38.463445Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:38.829078Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:38.841244Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:39.194574Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:39.207129Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:39.570999Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:39.587807Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:39.942276Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:39.954372Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:40.304655Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:40.316929Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:40.680555Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:40.692862Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:41.047009Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:41.059417Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:41.420115Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:34:41.432793Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:34:41.786605Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=2) 2025-03-28T09:34:41.786834Z node 2 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 2 "Lock2" waiter link 2025-03-28T09:34:41.802813Z node 2 :KESUS_TABLET DEBUG: [ ... 8T09:35:06.638183Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:06.999569Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:07.011197Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:07.360395Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:07.372301Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:07.722544Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:07.734623Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:08.085446Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:08.097746Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:08.449163Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:08.461272Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:08.833904Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:08.845742Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:09.207138Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:09.218978Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:09.569157Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:09.580845Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:09.930437Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:09.942577Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:10.292788Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:10.304573Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:10.685670Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:10.697381Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:11.057642Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:11.069251Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:11.418084Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:11.429806Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:11.780392Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:11.792157Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:12.143724Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:12.155811Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:12.508092Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:12.520148Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:12.871635Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:12.883684Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:13.235031Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:13.247106Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:13.577607Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:13.589517Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:13.929010Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:13.940679Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:14.279948Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:14.291680Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:14.641936Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:14.653709Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:14.993244Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:15.005213Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:15.335320Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:15.347065Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:15.687701Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Execute 2025-03-28T09:35:15.699544Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSelfCheck::Complete 2025-03-28T09:35:16.144124Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2025-03-28T09:35:16.144212Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2025-03-28T09:35:16.156227Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2025-03-28T09:35:16.177382Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:537:2533], cookie=3299554314812215719) 2025-03-28T09:35:16.177490Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:537:2533], cookie=3299554314812215719) 2025-03-28T09:35:16.177968Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:540:2536], cookie=17364879994900380553) 2025-03-28T09:35:16.178028Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:540:2536], cookie=17364879994900380553) 2025-03-28T09:35:16.178537Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:543:2539], cookie=11466520204626975750, name="Lock1") 2025-03-28T09:35:16.178607Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:543:2539], cookie=11466520204626975750) 2025-03-28T09:35:16.179099Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:546:2542], cookie=10113449235736894272, name="Lock1") 2025-03-28T09:35:16.179163Z node 4 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:546:2542], cookie=10113449235736894272) 2025-03-28T09:35:16.415921Z node 5 :KESUS_TABLET INFO: OnActivateExecutor: 72057594037927937 2025-03-28T09:35:16.416001Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Execute 2025-03-28T09:35:16.428396Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInitSchema::Complete 2025-03-28T09:35:16.428768Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Execute 2025-03-28T09:35:16.452052Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxInit::Complete 2025-03-28T09:35:16.452464Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=10197039852497278327, session=0, seqNo=0) 2025-03-28T09:35:16.452582Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 1 2025-03-28T09:35:16.464250Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=10197039852497278327, session=1) 2025-03-28T09:35:16.464468Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=14063029601700844194, session=0, seqNo=0) 2025-03-28T09:35:16.464557Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 2 2025-03-28T09:35:16.476217Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=14063029601700844194, session=2) 2025-03-28T09:35:16.476447Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Execute (sender=[5:132:2158], cookie=17976164754740699171, session=0, seqNo=0) 2025-03-28T09:35:16.476539Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new session 3 2025-03-28T09:35:16.488118Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSessionAttach::Complete (sender=[5:132:2158], cookie=17976164754740699171, session=3) 2025-03-28T09:35:16.488554Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:145:2169], cookie=4029875158733688437, name="Sem1", limit=3) 2025-03-28T09:35:16.488670Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Created new semaphore 1 "Sem1" 2025-03-28T09:35:16.500427Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:145:2169], cookie=4029875158733688437) 2025-03-28T09:35:16.500726Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=111, session=1, semaphore="Sem1" count=2) 2025-03-28T09:35:16.500844Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2025-03-28T09:35:16.501001Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=222, session=2, semaphore="Sem1" count=2) 2025-03-28T09:35:16.501116Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:132:2158], cookie=333, session=3, semaphore="Sem1" count=1) 2025-03-28T09:35:16.512751Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=111) 2025-03-28T09:35:16.512818Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=222) 2025-03-28T09:35:16.512852Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:132:2158], cookie=333) 2025-03-28T09:35:16.513359Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:154:2178], cookie=10182301051163544533, name="Sem1") 2025-03-28T09:35:16.513424Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:154:2178], cookie=10182301051163544533) 2025-03-28T09:35:16.513741Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:157:2181], cookie=17419903882246986694, name="Sem1") 2025-03-28T09:35:16.513790Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:157:2181], cookie=17419903882246986694) 2025-03-28T09:35:16.513963Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:132:2158], cookie=444, name="Sem1") 2025-03-28T09:35:16.514027Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2025-03-28T09:35:16.514070Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2025-03-28T09:35:16.514110Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2025-03-28T09:35:16.525636Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:132:2158], cookie=444) 2025-03-28T09:35:16.526169Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:162:2186], cookie=1135469108876129094, name="Sem1") 2025-03-28T09:35:16.526248Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:162:2186], cookie=1135469108876129094) 2025-03-28T09:35:16.526569Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:165:2189], cookie=13888526880783874943, name="Sem1") 2025-03-28T09:35:16.526617Z node 5 :KESUS_TABLET DEBUG: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:165:2189], cookie=13888526880783874943) >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False |95.0%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: 2025-03-28T09:35:10.577070Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:10.577363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:10.577403Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b22/r3tmp/tmp9lYMdh/pdisk_1.dat 2025-03-28T09:35:10.839288Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5854, node 1 2025-03-28T09:35:11.050844Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:11.050884Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:11.050905Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:11.051195Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:11.052763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:11.133295Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:11.133410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:11.146545Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9891 2025-03-28T09:35:11.613206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:13.916374Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:13.940107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:13.940193Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:13.977761Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:13.979475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:14.203598Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:14.204353Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:14.205077Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:14.205289Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:14.205672Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:14.205828Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:14.205993Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:14.206161Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:14.206286Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:14.364162Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:14.364297Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:14.376692Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:14.511300Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:14.547920Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:14.548033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:14.570725Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:14.570869Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:14.571033Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:14.571085Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:14.571123Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:14.571162Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:14.571204Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:14.571244Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:14.571608Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:14.599112Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:14.599214Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:14.605213Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:14.609870Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:14.610078Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:14.615176Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:35:14.628697Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:14.628753Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:14.628853Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:35:14.639108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:14.644369Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:14.644473Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:14.808753Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:14.944986Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:15.021335Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:15.691316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:35:16.334589Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:16.514437Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:35:16.514489Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:35:16.514553Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2597:2951], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:35:16.515414Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2598:2952] 2025-03-28T09:35:16.515511Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2598:2952], schemeshard id = 72075186224037899 2025-03-28T09:35:17.645085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2725:3245], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:17.645200Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:17.657982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-28T09:35:17.946421Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3034:3293], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:17.946514Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:17.981929Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3039:3297]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:17.982094Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:17.982204Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-28T09:35:17.982260Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3042:3300] 2025-03-28T09:35:17.982313Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3042:3300] 2025-03-28T09:35:17.982785Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3043:3191] 2025-03-28T09:35:17.983000Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3042:3300], server id = [2:3043:3191], tablet id = 72075186224037894, status = OK 2025-03-28T09:35:17.983150Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3043:3191], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:35:17.983197Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:35:17.983364Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:35:17.983412Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3039:3297], StatRequests.size() = 1 2025-03-28T09:35:17.996069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3047:3304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:17.996169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:17.996429Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3052:3309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:18.001493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-03-28T09:35:18.160199Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:35:18.160265Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:35:18.191947Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3042:3300], schemeshard count = 1 2025-03-28T09:35:18.575296Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3054:3311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-28T09:35:18.841618Z node 1 :TX_PROXY ERROR: Actor# [1:3179:3380] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:35:18.850270Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3202:3396]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:18.850500Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:18.850544Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3202:3396], StatRequests.size() = 1 2025-03-28T09:35:18.902350Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1p5xsbajsvbwa7fyqhgh5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTE2M2YzZWItNmE4ZmI3M2EtNDhlN2UzOTYtOGJlZGQ2ZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:35:18.972415Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899 2025-03-28T09:35:19.317503Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3534:3464]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:19.317718Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:35:19.317753Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3534:3464], StatRequests.size() = 1 2025-03-28T09:35:19.335286Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3543:3473]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:19.335402Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-03-28T09:35:19.335425Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3543:3473], StatRequests.size() = 1 2025-03-28T09:35:19.376949Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe1p78bb7vw8h2h146cr9a6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWIzOTAwMjEtZDE4YmExYTYtZTg1Yzc2MjYtMjNkYWFmZGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:35:19.409745Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3581:3449]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:19.412103Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:19.412197Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:35:19.412868Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:19.412925Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-28T09:35:19.412978Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:35:19.448025Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-28T09:35:19.448214Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-03-28T09:35:19.448419Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3606:3462]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:19.450229Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:19.450272Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:35:19.450477Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:19.450507Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-28T09:35:19.450544Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 3] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:35:19.452093Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-03-28T09:35:19.452287Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{7} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 8474b 40r} data 6832b + FlatIndex{15} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 7 0 1036b {0, 1} | 7 39 1036b {5, 7} + BTreeIndex{Empty, PageId: 7 RowCount: 40 DataSize: 1036 GroupDataSize: 7438 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{7} Label{74 rev 1, 1036b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{20} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 21 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_2 ... 6:30:2062]) to queue queue_background_compaction 00000.321 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task gen2-table-101-tablet-1 (56 by [16:30:2062])) 00000.321 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.325 DD| RESOURCE_BROKER: Finish task gen2-table-101-tablet-1 (56 by [16:30:2062]) (release resources {1, 0}) 00000.325 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 600.000000 to 300.000000 (remove task gen2-table-101-tablet-1 (56 by [16:30:2062])) 00000.326 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task bckg-block (987987987988 by [16:8:2055]) from queue queue_background_compaction 00000.326 DD| RESOURCE_BROKER: Assigning in-fly task bckg-block (987987987988 by [16:8:2055]) to queue queue_background_compaction 00000.326 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (insert task bckg-block (987987987988 by [16:8:2055])) 00000.326 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.327 DD| RESOURCE_BROKER: Submitted new background_compaction_gen0 task gen0-table-101-tablet-1 (63 by [16:30:2062]) priority=200 resources={1, 0} 00000.327 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_background_compaction 00000.327 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.329 DD| RESOURCE_BROKER: Update task gen0-table-101-tablet-1 (63 by [16:30:2062]) (priority=5 type=compaction_gen0 resources={1, 0} resubmit=0) 00000.329 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_compaction_gen0 00000.329 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_compaction_gen0 from 0.000000 to 300.000000 00000.329 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (63 by [16:30:2062]) from queue queue_compaction_gen0 00000.329 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (63 by [16:30:2062]) to queue queue_compaction_gen0 00000.329 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.334 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (63 by [16:30:2062]) (release resources {1, 0}) 00000.334 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.345 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.356 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.366 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.377 DD| RESOURCE_BROKER: Update task gen1-table-101-tablet-1 (62 by [16:30:2062]) (priority=166 type=background_compaction_gen1 resources={1, 0} resubmit=0) 00000.377 DD| RESOURCE_BROKER: Assigning waiting task gen1-table-101-tablet-1 (62 by [16:30:2062]) to queue queue_background_compaction 00000.377 DD| RESOURCE_BROKER: Updated real resource usage for queue queue_background_compaction from 300.000000 to 600.000000 (in-fly consumption {1, 0}) 00000.377 DD| RESOURCE_BROKER: Skip queue queue_background_compaction due to exceeded limits 00000.377 II| TABLET_SAUSAGECACHE: Wakeup 1 00000.379 DD| RESOURCE_BROKER: Removing task gen1-table-101-tablet-1 (62 by [16:30:2062]) 00000.379 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.380 NN| TABLET_SAUSAGECACHE: Poison cache serviced 55 reqs hit {55 29100b} miss {0 0b} 00000.380 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.380 II| FAKE_ENV: DS.0 gone, left {9632b, 89}, put {69336b, 689} 00000.380 II| FAKE_ENV: DS.1 gone, left {49690b, 125}, put {120844b, 750} 00000.381 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.381 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.381 II| FAKE_ENV: All BS storage groups are stopped 00000.381 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 2.000m 00000.381 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 659}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:35:13.835466Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.934 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.934 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00000.935 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.935 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00000.935 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269804b, 2026} 00000.935 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.935 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.935 II| FAKE_ENV: All BS storage groups are stopped 00000.935 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.935 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:35:14.779305Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.852 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.852 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00000.853 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.853 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00000.853 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269821b, 2026} 00000.853 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.853 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.853 II| FAKE_ENV: All BS storage groups are stopped 00000.853 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.853 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:35:15.645084Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.832 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.833 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00000.833 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.833 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199545b, 2023} 00000.833 II| FAKE_ENV: DS.1 gone, left {2007010b, 4}, put {7211269b, 2026} 00000.834 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.834 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.834 II| FAKE_ENV: All BS storage groups are stopped 00000.834 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.834 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:35:16.487508Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.801 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.802 NN| TABLET_SAUSAGECACHE: Poison cache serviced 353 reqs hit {1164 6970614b} miss {0 0b} 00000.802 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.802 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00000.802 II| FAKE_ENV: DS.1 gone, left {2013604b, 4}, put {7237892b, 2026} 00000.803 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.803 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.803 II| FAKE_ENV: All BS storage groups are stopped 00000.803 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.803 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:35:17.299356Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.802 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.803 NN| TABLET_SAUSAGECACHE: Poison cache serviced 299 reqs hit {1012 6947830b} miss {0 0b} 00000.803 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.803 II| FAKE_ENV: DS.1 gone, left {2007005b, 4}, put {7211300b, 2026} 00000.803 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.803 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.803 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199551b, 2023} 00000.803 II| FAKE_ENV: All BS storage groups are stopped 00000.803 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.803 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:35:18.111633Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.784 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.785 NN| TABLET_SAUSAGECACHE: Poison cache serviced 309 reqs hit {1118 6955338b} miss {2 9773b} 00000.785 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.785 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {199548b, 2023} 00000.785 II| FAKE_ENV: DS.1 gone, left {2023431b, 4}, put {7269804b, 2026} 00000.786 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.786 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.786 II| FAKE_ENV: All BS storage groups are stopped 00000.786 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.786 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:35:18.906206Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00001.257 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00001.258 NN| TABLET_SAUSAGECACHE: Poison cache serviced 651 reqs hit {780 4008302b} miss {1002 6916040b} 00001.258 II| FAKE_ENV: Shut order, stopping 4 BS groups 00001.258 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {200301b, 2033} 00001.258 II| FAKE_ENV: DS.1 gone, left {2023563b, 4}, put {9254533b, 2039} 00001.259 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00001.259 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00001.259 II| FAKE_ENV: All BS storage groups are stopped 00001.259 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00001.259 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped >> TraverseDatashard::TraverseOneTableServerless |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount >> TraverseDatashard::TraverseTwoTables [GOOD] >> Viewer::JsonStorageListingV2NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV2PDiskIdFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: 2025-03-28T09:35:15.098105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:15.098295Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:15.098333Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b12/r3tmp/tmpZNEpkl/pdisk_1.dat 2025-03-28T09:35:15.368383Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9875, node 1 2025-03-28T09:35:15.557680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:15.557721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:15.557744Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:15.558058Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:15.559734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:15.638723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:15.638849Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:15.651617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23117 2025-03-28T09:35:16.134474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:18.356840Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:18.380409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:18.380488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:18.427052Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:18.428703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:18.635102Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.635553Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.636010Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.636138Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.636318Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.636369Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.636445Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.636499Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.636567Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.782279Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:18.782365Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:18.795273Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:18.906259Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:18.939559Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:18.939633Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:18.960636Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:18.962090Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:18.962245Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:18.962287Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:18.962335Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:18.962380Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:18.962424Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:18.962476Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:18.962874Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:18.987041Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:35:18.987610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:18.993849Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:18.993893Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:18.993948Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:18.996086Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:18.996168Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:19.005700Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:35:19.006112Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:35:19.032585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:19.037617Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:19.037727Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:19.175001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:19.395283Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:19.459655Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:20.249203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2234:3070], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:20.249337Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:20.262356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:20.642888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2539:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:20.643020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:20.644039Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2544:3123]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:20.644202Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:20.644261Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2546:3125] 2025-03-28T09:35:20.644305Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2546:3125] 2025-03-28T09:35:20.644748Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2547:2992] 2025-03-28T09:35:20.644936Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2546:3125], server id = [2:2547:2992], tablet id = 72075186224037894, status = OK 2025-03-28T09:35:20.645075Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2547:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:35:20.645144Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:35:20.645362Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:35:20.645426Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2544:3123], StatRequests.size() = 1 2025-03-28T09:35:20.659033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2551:3129], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:20.659132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:20.659375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2556:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:20.664406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:35:20.816002Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:35:20.816081Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:35:20.890801Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2546:3125], schemeshard count = 1 2025-03-28T09:35:21.234905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2558:3136], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-28T09:35:21.394460Z node 1 :TX_PROXY ERROR: Actor# [1:2684:3209] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:35:21.406000Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2707:3225]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:21.406154Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:21.406193Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:2707:3225], StatRequests.size() = 1 2025-03-28T09:35:21.481118Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1p8h20r1yc0txdm3yz7ra, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjFhNDdjMWYtMTE2ZGUxYmYtZTZiNTk0MTktYjgxMDBlNGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:35:21.583729Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037897 2025-03-28T09:35:21.967822Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3054:3292]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:21.968023Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:35:21.968069Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 3, ReplyToActorId = [1:3054:3292], StatRequests.size() = 1 2025-03-28T09:35:21.987823Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3063:3301]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:21.988020Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2025-03-28T09:35:21.988074Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 4, ReplyToActorId = [1:3063:3301], StatRequests.size() = 1 2025-03-28T09:35:22.040374Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1p9v4cwxkhj74tgsjhmgx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjFmNmU0NTktN2NlNTU3NjktODkyMDhhOTctNGZmOGNjZTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:35:22.085288Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3103:3263]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:22.087277Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:22.087320Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:35:22.087515Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:22.087551Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:35:22.087586Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:35:22.098896Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-28T09:35:22.099125Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 2025-03-28T09:35:22.099460Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3128:3276]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:22.101453Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:22.101498Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:35:22.101842Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:22.101876Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:35:22.101912Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:35:22.103638Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-03-28T09:35:22.103898Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> TraverseColumnShard::TraverseColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTable [GOOD] Test command err: 2025-03-28T09:35:14.553521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:14.553700Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:14.553731Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b15/r3tmp/tmpxUKvG7/pdisk_1.dat 2025-03-28T09:35:14.801835Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1419, node 1 2025-03-28T09:35:14.983387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:14.983425Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:14.983442Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:14.983696Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:14.985028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:15.060996Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:15.061093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:15.073990Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16256 2025-03-28T09:35:15.564086Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:17.838932Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:17.863433Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:17.863526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:17.900229Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:17.901619Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:18.116157Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.116735Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.117335Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.117486Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.117744Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.117825Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.117898Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.118004Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.118083Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:18.274002Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:18.274084Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:18.286362Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:18.421889Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:18.469579Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:18.469660Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:18.491256Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:18.491373Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:18.491525Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:18.491593Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:18.491639Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:18.491680Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:18.491722Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:18.491759Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:18.492111Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:18.517979Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:18.518088Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:18.523951Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:18.528703Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:18.528926Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:18.534164Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:18.548307Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:18.548374Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:18.548436Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:18.559299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:18.564867Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:18.564980Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:18.701260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:18.861203Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:18.938917Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:19.768026Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:19.768125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:19.781625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:19.889689Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:19.889938Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:19.890258Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:19.890442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:19.890576Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:19.890715Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:19.890848Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:19.890944Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:19.891031Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:19.891116Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:19.891211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:19.891295Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:19.912624Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:19.912717Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:35:19.912823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:35:19.912856Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:35:19.913060Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:35:19.913111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:35:19.913216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:35:19.913249Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:35:19.913326Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:35:19.913361Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:35:19.913397Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:35:19.913426Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:35:19.914042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:35:19.914093Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:35:19.914255Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:35:19.914301Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:35:19.914435Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:35:19.914463Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:35:19.914629Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:35:19.914660Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:35:19.914774Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:35:19.914819Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:35:20.027193Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715659; 2025-03-28T09:35:20.976066Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2588:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:20.976220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:20.978751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715660:0, at schemeshard: 72075186224037897 2025-03-28T09:35:21.083335Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715660;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715660; 2025-03-28T09:35:21.981653Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2684:3169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:21.981836Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:21.986143Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037897 2025-03-28T09:35:22.023442Z node 2 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000015s FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DescribeStream ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2025-03-28T09:33:31.866167Z :WriteRAW INFO: Random seed for debugging is 1743154411866127 2025-03-28T09:33:32.286805Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791194482634477:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.291787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d0d/r3tmp/tmph0NPbE/pdisk_1.dat 2025-03-28T09:33:32.649269Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:32.636599Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:32.763206Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:33.007598Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:33.049436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.049730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.053403Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.053463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.056861Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:33.060948Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:33.063524Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 30284, node 1 2025-03-28T09:33:33.332500Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001d0d/r3tmp/yandexeQoJ8O.tmp 2025-03-28T09:33:33.332530Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001d0d/r3tmp/yandexeQoJ8O.tmp 2025-03-28T09:33:33.332693Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001d0d/r3tmp/yandexeQoJ8O.tmp 2025-03-28T09:33:33.332823Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:33.675831Z INFO: TTestServer started on Port 14936 GrpcPort 30284 TClient is connected to server localhost:14936 PQClient connected to localhost:30284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:34.229035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:33:36.561829Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791209970838357:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.562149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.562456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791209970838392:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.568739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:33:36.595965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486791209970838394:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:33:36.656806Z node 2 :TX_PROXY ERROR: Actor# [2:7486791209970838422:2131] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:37.078530Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486791209970838437:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:37.078807Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YmM5YTRlY2YtMzg1NmRjZjYtYzA4MjcyNTAtZmUyMjUzMGU=, ActorId: [2:7486791209970838354:2307], ActorState: ExecuteState, TraceId: 01jqe1k2xb74wpn4hdnwrjf6nk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:33:37.078327Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486791211662504753:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:37.087185Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGFjZDI5NTItZGZlODUxNmItMjA2NmU1YzAtZTBhNGI1NWE=, ActorId: [1:7486791211662504720:2337], ActorState: ExecuteState, TraceId: 01jqe1k33ad89hfdave2dqna6k, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:33:37.097263Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:33:37.097398Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:33:37.168970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-03-28T09:33:37.286845Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791194482634477:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:37.286937Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:37.431964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:37.604923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:30284", true, true, 1000); 2025-03-28T09:33:38.036239Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976720664. Ctx: { TraceId: 01jqe1k41r26nja7355t50k0jh, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzM5ZTdkZjYtZjBhOGE5OWQtZTY2MDcxMDAtYWUxN2VlMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486791220252439760:2980] === CheckClustersList. Ok 2025-03-28T09:33:43.910421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:30284 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:33:44.155044Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:30284 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976720679 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK ... Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-28T09:35:23.533839Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:35:23.533848Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-28T09:35:23.533862Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7486791670584685334:2512] (SourceId=src, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:35:23.536109Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7486791670584685334:2512] (SourceId=src, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:35:23.672944Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7486791670584685334:2512] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2025-03-28T09:35:23.673162Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7486791670584685386:2512] connected; active server actors: 1 2025-03-28T09:35:23.673210Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7486791670584685334:2512] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2025-03-28T09:35:23.673226Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7486791670584685334:2512] (SourceId=src, PreferedPartition=(NULL)) Update the table 2025-03-28T09:35:23.673420Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7486791670584685386:2512] disconnected; active server actors: 1 2025-03-28T09:35:23.673445Z node 15 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--test-topic] pipe [15:7486791670584685386:2512] disconnected no session 2025-03-28T09:35:23.787054Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7486791670584685334:2512] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-28T09:35:23.787098Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7486791670584685334:2512] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2025-03-28T09:35:23.787119Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7486791670584685334:2512] (SourceId=src, PreferedPartition=(NULL)) Start idle 2025-03-28T09:35:23.787153Z node 15 :PQ_WRITE_PROXY DEBUG: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2025-03-28T09:35:23.788020Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server connected, pipe [15:7486791670584685404:2512], now have 1 active actors on pipe 2025-03-28T09:35:23.788082Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2025-03-28T09:35:23.788252Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:35:23.788295Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:35:23.788405Z node 16 :PERSQUEUE INFO: new Cookie src|afd219c4-d2451146-92ba03c9-e5c26480_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2025-03-28T09:35:23.788532Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:35:23.788633Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:35:23.789051Z node 16 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2025-03-28T09:35:23.789081Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2025-03-28T09:35:23.789179Z node 16 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:35:23.789402Z node 15 :PQ_WRITE_PROXY INFO: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|afd219c4-d2451146-92ba03c9-e5c26480_0 2025-03-28T09:35:23.790095Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743154523790 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:35:23.790204Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|afd219c4-d2451146-92ba03c9-e5c26480_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:35:23.790377Z :INFO: [] MessageGroupId [src] SessionId [src|afd219c4-d2451146-92ba03c9-e5c26480_0] Write session: close. Timeout = 0 ms 2025-03-28T09:35:23.790429Z :INFO: [] MessageGroupId [src] SessionId [src|afd219c4-d2451146-92ba03c9-e5c26480_0] Write session will now close 2025-03-28T09:35:23.790491Z :DEBUG: [] MessageGroupId [src] SessionId [src|afd219c4-d2451146-92ba03c9-e5c26480_0] Write session: aborting 2025-03-28T09:35:23.790974Z :INFO: [] MessageGroupId [src] SessionId [src|afd219c4-d2451146-92ba03c9-e5c26480_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:35:23.791024Z :DEBUG: [] MessageGroupId [src] SessionId [src|afd219c4-d2451146-92ba03c9-e5c26480_0] Write session: destroy 2025-03-28T09:35:23.791693Z node 15 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 1 sessionId: src|afd219c4-d2451146-92ba03c9-e5c26480_0 grpc read done: success: 0 data: 2025-03-28T09:35:23.791721Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|afd219c4-d2451146-92ba03c9-e5c26480_0 grpc read failed 2025-03-28T09:35:23.791769Z node 15 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 1 sessionId: src|afd219c4-d2451146-92ba03c9-e5c26480_0 2025-03-28T09:35:23.791787Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 1 sessionId: src|afd219c4-d2451146-92ba03c9-e5c26480_0 is DEAD 2025-03-28T09:35:23.792083Z node 15 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:35:23.792447Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [15:7486791670584685404:2512] destroyed 2025-03-28T09:35:23.792496Z node 16 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:35:23.824165Z :INFO: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Starting read session 2025-03-28T09:35:23.824216Z :DEBUG: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Starting cluster discovery 2025-03-28T09:35:23.824466Z :INFO: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23734: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23734
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23734. " 2025-03-28T09:35:23.824509Z :DEBUG: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Restart cluster discovery in 0.005212s 2025-03-28T09:35:23.830808Z :DEBUG: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Starting cluster discovery 2025-03-28T09:35:23.831089Z :INFO: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23734: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23734
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23734. " 2025-03-28T09:35:23.831136Z :DEBUG: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Restart cluster discovery in 0.016362s 2025-03-28T09:35:23.847857Z :DEBUG: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Starting cluster discovery 2025-03-28T09:35:23.848085Z :INFO: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23734: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23734
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23734. " 2025-03-28T09:35:23.848131Z :DEBUG: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Restart cluster discovery in 0.023512s 2025-03-28T09:35:23.871877Z :DEBUG: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Starting cluster discovery 2025-03-28T09:35:23.872115Z :NOTICE: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23734: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23734
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23734. " } 2025-03-28T09:35:23.872377Z :NOTICE: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:23734: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23734
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:23734. " } 2025-03-28T09:35:23.872553Z :INFO: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Closing read session. Close timeout: 0.000000s 2025-03-28T09:35:23.872686Z :NOTICE: [/Root] [/Root] [93b6890a-8b92358c-3652a5a5-34ece768] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:35:24.191003Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [15:7486791674879652737:2528]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 0 2025-03-28T09:35:24.223929Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [15:7486791674879652737:2528]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:35:24.271985Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [15:7486791674879652737:2528]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:35:24.336960Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [15:7486791674879652737:2528]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 2025-03-28T09:35:24.414357Z node 15 :KQP_COMPUTE WARN: TxId: 281474976710687, task: 1, CA Id [15:7486791674879652737:2528]. Got EvDeliveryProblem, TabletId: 72075186224037890, NotDelivered: 1 >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod >> TLocksFatTest::PointSetNotBreak >> TFlatTest::ShardFreezeRejectBadProtobuf >> BuildStatsHistogram::Many_Mixed [GOOD] >> BuildStatsHistogram::Many_Serial >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] >> TFlatTest::SelectRangeSkipNullKeys [GOOD] >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2025-03-28T09:35:28.128316Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791693438552463:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:28.128380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f29/r3tmp/tmphXdyMV/pdisk_1.dat 2025-03-28T09:35:28.346459Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:28.349906Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:28.350043Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:28.351853Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9047 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:28.547416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:28.559788Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:28.667843Z node 1 :TX_PROXY ERROR: Actor# [1:7486791693438553149:2361] txid# 281474976710659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-03-28T09:35:28.669902Z node 1 :TX_PROXY ERROR: Actor# [1:7486791693438553162:2367] txid# 281474976710660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2025-03-28T09:35:28.671802Z node 1 :TX_PROXY ERROR: Actor# [1:7486791693438553168:2372] txid# 281474976710661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-03-28T09:35:28.673630Z node 1 :TX_PROXY ERROR: Actor# [1:7486791693438553174:2377] txid# 281474976710662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2025-03-28T09:35:30.173729Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791700152274423:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:30.173783Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f29/r3tmp/tmpXOgQHu/pdisk_1.dat 2025-03-28T09:35:30.246129Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:30.298030Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:30.298120Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:30.299719Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4780 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:30.382533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:30.401891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... >> TraverseDatashard::TraverseOneTableServerless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExternalTTL_Types [GOOD] Test command err: 2025-03-28T09:32:43.219009Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:32:43.298406Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:32:43.302479Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:32:43.302868Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:32:43.324857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:32:43.325133Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:32:43.332800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:32:43.332999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:32:43.333241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:32:43.333354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:32:43.333452Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:32:43.333559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:32:43.333655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:32:43.333753Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:32:43.333893Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:32:43.334003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:32:43.334090Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:32:43.334211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:32:43.358027Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:32:43.362570Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:32:43.362909Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:32:43.362961Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:32:43.363143Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:43.363303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:32:43.363373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:32:43.363424Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:32:43.363535Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:32:43.363599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:32:43.363640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:32:43.363669Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:32:43.363819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:32:43.363884Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:32:43.363945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:32:43.363974Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:32:43.364082Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:32:43.364135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:32:43.364187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:32:43.364221Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:32:43.364286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:32:43.364322Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:32:43.364346Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:32:43.364412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:32:43.364460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:32:43.364486Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:32:43.364851Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-28T09:32:43.364922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=25; 2025-03-28T09:32:43.364989Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-03-28T09:32:43.365053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-03-28T09:32:43.365248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:32:43.365304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:32:43.365336Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:32:43.365506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:32:43.365545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:32:43.365586Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:32:43.365770Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:32:43.365809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:32:43.365837Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:32:43.366019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:32:43.366060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:32:43.366088Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T0 ... );;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:35:31.649660Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:35:31.649688Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:35:31.649718Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:35:31.649789Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:35:31.649852Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: uint64;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:35:31.649891Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:35:31.649979Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-03-28T09:35:31.650007Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-03-28T09:35:31.650100Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[4:593:2609];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2025-03-28T09:35:31.650173Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:35:31.650258Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:35:31.650337Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:35:31.650407Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:35:31.650469Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:35:31.650523Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:35:31.650544Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: Scan [4:600:2616] finished for tablet 9437184 2025-03-28T09:35:31.650897Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[4:593:2609];stats={"p":[{"events":["f_bootstrap"],"t":0.203},{"events":["f_ProduceResults"],"t":1.443},{"events":["l_bootstrap"],"t":2.875},{"events":["f_processing","f_task_result"],"t":2.893},{"events":["l_task_result"],"t":19.601},{"events":["f_ack"],"t":19.633},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":24.243}],"full":{"a":1743154507407056,"name":"_full_task","f":1743154507407056,"d_finished":0,"c":0,"l":1743154531650579,"d":24243523},"events":[{"name":"bootstrap","f":1743154507610354,"d_finished":2671704,"c":1,"l":1743154510282058,"d":2671704},{"a":1743154531650396,"name":"ack","f":1743154527040519,"d_finished":4559152,"c":903,"l":1743154531650354,"d":4559335},{"a":1743154531650389,"name":"processing","f":1743154510300767,"d_finished":16803903,"c":4515,"l":1743154531650356,"d":16804093},{"name":"ProduceResults","f":1743154508850379,"d_finished":8327584,"c":5420,"l":1743154531650533,"d":8327584},{"a":1743154531650534,"name":"Finish","f":1743154531650534,"d_finished":0,"c":0,"l":1743154531650579,"d":45},{"name":"task_result","f":1743154510300797,"d_finished":12181331,"c":3612,"l":1743154527008189,"d":12181331}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:35:31.650946Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[4:593:2609];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:35:31.651251Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[4:593:2609];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.203},{"events":["f_ProduceResults"],"t":1.443},{"events":["l_bootstrap"],"t":2.875},{"events":["f_processing","f_task_result"],"t":2.893},{"events":["l_task_result"],"t":19.601},{"events":["f_ack"],"t":19.633},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":24.243}],"full":{"a":1743154507407056,"name":"_full_task","f":1743154507407056,"d_finished":0,"c":0,"l":1743154531650968,"d":24243912},"events":[{"name":"bootstrap","f":1743154507610354,"d_finished":2671704,"c":1,"l":1743154510282058,"d":2671704},{"a":1743154531650396,"name":"ack","f":1743154527040519,"d_finished":4559152,"c":903,"l":1743154531650354,"d":4559724},{"a":1743154531650389,"name":"processing","f":1743154510300767,"d_finished":16803903,"c":4515,"l":1743154531650356,"d":16804482},{"name":"ProduceResults","f":1743154508850379,"d_finished":8327584,"c":5420,"l":1743154531650533,"d":8327584},{"a":1743154531650534,"name":"Finish","f":1743154531650534,"d_finished":0,"c":0,"l":1743154531650968,"d":434},{"name":"task_result","f":1743154510300797,"d_finished":12181331,"c":3612,"l":1743154527008189,"d":12181331}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:35:31.651307Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:35:07.360524Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-03-28T09:35:31.651334Z node 4 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:35:31.651479Z node 4 :TX_COLUMNSHARD_SCAN INFO: SelfId=[4:600:2616];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; >> Cdc::DecimalKey [GOOD] >> Cdc::DropColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: 2025-03-28T09:35:24.574655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:24.574854Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:24.574900Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b0f/r3tmp/tmpqiXWob/pdisk_1.dat 2025-03-28T09:35:24.830985Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12740, node 1 2025-03-28T09:35:25.021378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:25.021425Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:25.021448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:25.021766Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:25.023481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:25.107305Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:25.107418Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:25.120558Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11115 2025-03-28T09:35:25.611005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:27.948218Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:27.982121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:27.982224Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:28.030166Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:28.031714Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:28.245390Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:28.246081Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:28.246651Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:28.246865Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:28.247128Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:28.247211Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:28.247292Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:28.247378Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:28.247512Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:28.404024Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:28.404156Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:28.416907Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:28.536257Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:28.570109Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:28.570187Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:28.590853Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:28.592433Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:28.592591Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:28.592629Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:28.592663Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:28.592701Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:28.592754Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:28.592791Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:28.593178Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:28.616550Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:35:28.617088Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:35:28.623004Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:28.623044Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:28.623088Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:35:28.625230Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:28.625323Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:28.634399Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:35:28.634726Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:35:28.660321Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:28.669729Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:28.669852Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:28.832042Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:29.013460Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:29.057313Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:29.675243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:35:30.271393Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:30.418374Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:35:30.418448Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:35:30.418530Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2593:2946], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:35:30.419730Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2597:2950] 2025-03-28T09:35:30.420192Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2597:2950], schemeshard id = 72075186224037899 2025-03-28T09:35:31.432661Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2728:3247], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:31.432806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:31.446702Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-28T09:35:31.729907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3033:3296], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:31.730044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:31.764362Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3038:3300]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:31.764533Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:31.764645Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2025-03-28T09:35:31.764685Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:3041:3303] 2025-03-28T09:35:31.764738Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:3041:3303] 2025-03-28T09:35:31.765151Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:3042:3189] 2025-03-28T09:35:31.765322Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:3041:3303], server id = [2:3042:3189], tablet id = 72075186224037894, status = OK 2025-03-28T09:35:31.765465Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:3042:3189], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:35:31.765519Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:35:31.765684Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:35:31.765729Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:3038:3300], StatRequests.size() = 1 2025-03-28T09:35:31.778171Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3046:3307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:31.778290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:31.778617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3051:3312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:31.783139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-03-28T09:35:31.908797Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:35:31.908854Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:35:31.952555Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:3041:3303], schemeshard count = 1 2025-03-28T09:35:32.221807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3053:3314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-28T09:35:32.483171Z node 1 :TX_PROXY ERROR: Actor# [1:3176:3383] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:35:32.492982Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3199:3399]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:32.493222Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:35:32.493259Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [1:3199:3399], StatRequests.size() = 1 2025-03-28T09:35:32.542556Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1pkcg7xgyhavz47vnc4k7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmVmMmU5Ni1hMzA1M2MxNS0yMDQzYjFhYS05NWUzMzEy, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:35:32.590060Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3237:3243]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:32.591981Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:32.592025Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:35:32.592372Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:32.592419Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-28T09:35:32.592456Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:35:32.602806Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-28T09:35:32.602985Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 1 >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> TFlatTest::SplitInvalidPath >> TLocksTest::GoodSameKeyLock >> TColumnShardTestSchema::EnableColdTiersAfterTtl >> CompressExecutor::TestExecutorMemUsage [GOOD] >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge >> TLocksFatTest::PointSetRemove [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> CompressExecutor::TestExecutorMemUsage [GOOD] Test command err: 2025-03-28T09:33:31.858428Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1743154411858396 2025-03-28T09:33:32.334548Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791194016602145:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.334608Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:32.392110Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791191847650879:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.392228Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:32.553891Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ce3/r3tmp/tmpkpoIgu/pdisk_1.dat 2025-03-28T09:33:32.563517Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:33.023985Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:33.048375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.048507Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.051879Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.051959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.060497Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:33.061244Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:33.067424Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1114, node 1 2025-03-28T09:33:33.351431Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001ce3/r3tmp/yandexNTz2kO.tmp 2025-03-28T09:33:33.351466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001ce3/r3tmp/yandexNTz2kO.tmp 2025-03-28T09:33:33.351640Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001ce3/r3tmp/yandexNTz2kO.tmp 2025-03-28T09:33:33.351767Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:33.671861Z INFO: TTestServer started on Port 9061 GrpcPort 1114 TClient is connected to server localhost:9061 PQClient connected to localhost:1114 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:34.154273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:33:34.322214Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:33:36.549832Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791209027520378:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.549910Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791209027520396:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.549982Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.595054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-28T09:33:36.665278Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486791209027520400:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-28T09:33:36.994531Z node 2 :TX_PROXY ERROR: Actor# [2:7486791209027520429:2131] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:37.090915Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486791211196472410:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:37.099768Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2YyM2I1OTQtNDE0M2M3OGUtZDQzYTViYTAtZmEyY2MzNmQ=, ActorId: [1:7486791211196472355:2337], ActorState: ExecuteState, TraceId: 01jqe1k306ewd7h0psp2gnrb86, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:33:37.096769Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486791209027520436:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:37.106970Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmYwN2M3MmQtMjgwNTMzZDQtNjZhNGRjZjEtNjFjOGY1OGE=, ActorId: [2:7486791209027520361:2308], ActorState: ExecuteState, TraceId: 01jqe1k2wt6gqbj2qkaj02mcwh, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:33:37.114171Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:33:37.114901Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:33:37.169466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:33:37.334626Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791194016602145:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:37.334750Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:37.395587Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791191847650879:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:37.395663Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:37.421889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:37.605941Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:1114", true, true, 1000); 2025-03-28T09:33:38.036537Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqe1k42mdgsw4c50h1gcgpen, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTk5MzA4ODEtNGE1NjJlODEtZDkyYjk3MDAtNGI3ZDVkNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486791219786407427:2981] === CheckClustersList. Ok 2025-03-28T09:33:44.160471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:1114 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:33:44.273940Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:1114 MetaRequest { CmdCreateTopic { Top ... st { topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" } 2025-03-28T09:35:33.939398Z node 15 :PQ_WRITE_PROXY INFO: session request cookie: 3 topic: "test-topic" message_group_id: "test-message-group-id" preferred_cluster: "dc1" from ipv6:[::1]:52582 2025-03-28T09:35:33.939424Z node 15 :PQ_WRITE_PROXY INFO: write session: cookie=3 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:52582 proto=v1 topic=test-topic durationSec=0 2025-03-28T09:35:33.939437Z node 15 :PQ_WRITE_PROXY INFO: init check schema 2025-03-28T09:35:33.940950Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: describe result for acl check 2025-03-28T09:35:33.941086Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint32; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `/Root/PQ/SourceIdMeta2` WHERE Hash == $Hash AND Topic == $Topic AND SourceId == $SourceId; 2025-03-28T09:35:33.941103Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64;DECLARE $SeqNo AS Uint64; UPSERT INTO `/Root/PQ/SourceIdMeta2` (Hash, Topic, SourceId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:35:33.941113Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint32; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `/Root/PQ/SourceIdMeta2` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND SourceId = $SourceId AND Partition = $Partition; 2025-03-28T09:35:33.941129Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7486791712055699749:2570] (SourceId=test-message-group-id, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:35:33.944057Z node 15 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [15:7486791712055699749:2570] (SourceId=test-message-group-id, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:35:34.107271Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715697. Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-28T09:35:34.107393Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7486791712055699762:2572] TxId: 281474976715697. Ctx: { TraceId: 01jqe1pnhr77vbjwbzmx4rysbz, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=NzZkMzU3NmMtNTE4NmVkMDItZWVkNDc2NGMtM2I4ZGZmMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037891 after several retries. 2025-03-28T09:35:34.107600Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=NzZkMzU3NmMtNTE4NmVkMDItZWVkNDc2NGMtM2I4ZGZmMzg=, ActorId: [15:7486791712055699750:2572], ActorState: ExecuteState, TraceId: 01jqe1pnhr77vbjwbzmx4rysbz, Create QueryResponse for error on request, msg: 2025-03-28T09:35:34.108884Z node 15 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [15:7486791712055699749:2570] (SourceId=test-message-group-id, PreferedPartition=(NULL)) ReplyError: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NzZkMzU3NmMtNTE4NmVkMDItZWVkNDc2NGMtM2I4ZGZmMzg=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqe1pnhr77vbjwbzmxv931y5" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 2025-03-28T09:35:34.109014Z node 15 :PQ_WRITE_PROXY INFO: session v1 error cookie: 3 reason: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NzZkMzU3NmMtNTE4NmVkMDItZWVkNDc2NGMtM2I4ZGZmMzg=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqe1pnhr77vbjwbzmxv931y5" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 sessionId: 2025-03-28T09:35:34.109370Z node 15 :PQ_WRITE_PROXY INFO: session v1 cookie: 3 sessionId: is DEAD Test retry state: get retry delay 2025-03-28T09:35:34.109767Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ea471bab-40b4a9e6-3a5e5c1e-d4c262d3_0] Got error. Status: UNAVAILABLE, Description:
: Error: kqp error Marker# PQ50 : Response { SessionId: "ydb://session/3?node_id=15&id=NzZkMzU3NmMtNTE4NmVkMDItZWVkNDc2NGMtM2I4ZGZmMzg=" QueryIssues { message: "Failed to resolve tablet: 72075186224037891 after several retries." severity: 1 } TxMeta { id: "01jqe1pnhr77vbjwbzmxv931y5" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 , code: 500001 2025-03-28T09:35:34.109805Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ea471bab-40b4a9e6-3a5e5c1e-d4c262d3_0] Write session will restart in 2.000000s 2025-03-28T09:35:34.109912Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ea471bab-40b4a9e6-3a5e5c1e-d4c262d3_0] Write session: Do CDS request 2025-03-28T09:35:34.109954Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ea471bab-40b4a9e6-3a5e5c1e-d4c262d3_0] Do schedule cds request after 2000 ms 2025-03-28T09:35:34.350249Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720679. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:35:34.350375Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7486791716553210171:2472] TxId: 281474976720679. Ctx: { TraceId: 01jqe1pns4d91s7rppy6zfsj19, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=MmNkNzMzN2MtZTNiZGUzYzYtNGI5ZWNjMjAtYjgzM2E5YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:35:34.350575Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=MmNkNzMzN2MtZTNiZGUzYzYtNGI5ZWNjMjAtYjgzM2E5YTI=, ActorId: [16:7486791716553210168:2472], ActorState: ExecuteState, TraceId: 01jqe1pns4d91s7rppy6zfsj19, Create QueryResponse for error on request, msg: 2025-03-28T09:35:34.351507Z node 16 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqe1pns4d91s7rppyaevchpx" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-28T09:35:34.400802Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715699. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:35:34.400917Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7486791716350667107:2575] TxId: 281474976715699. Ctx: { TraceId: 01jqe1pntt6gkbxfnn0d5c1fs9, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=ZmMzODg3YTItNDBkZWQ3N2MtOTNiMDU5OGMtYzNhN2U4M2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:35:34.401078Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=ZmMzODg3YTItNDBkZWQ3N2MtOTNiMDU5OGMtYzNhN2U4M2U=, ActorId: [15:7486791716350667104:2575], ActorState: ExecuteState, TraceId: 01jqe1pntt6gkbxfnn0d5c1fs9, Create QueryResponse for error on request, msg: 2025-03-28T09:35:34.401894Z node 15 :PQ_METACACHE ERROR: Got error trying to perform request: { Response { QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqe1pntt6gkbxfnn0ge7ppzz" } } YdbStatus: UNAVAILABLE ConsumedRu: 1 } 2025-03-28T09:35:34.899350Z node 15 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976715701. Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:35:34.899432Z node 15 :KQP_EXECUTER WARN: ActorId: [15:7486791716350667177:2576] TxId: 281474976715701. Ctx: { TraceId: 01jqe1pnz26vt20gnt7y8dp687, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=15&id=MjZlNWJiMDUtODA0ODU5MTUtNjAxMmMxMzEtZDI5MzhmZWM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037890 after several retries. 2025-03-28T09:35:34.899594Z node 15 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=15&id=MjZlNWJiMDUtODA0ODU5MTUtNjAxMmMxMzEtZDI5MzhmZWM=, ActorId: [15:7486791716350667144:2576], ActorState: ExecuteState, TraceId: 01jqe1pnz26vt20gnt7y8dp687, Create QueryResponse for error on request, msg: 2025-03-28T09:35:34.900396Z node 15 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037890 after several retries." severity: 1 } TxMeta { id: "01jqe1ppaac403fc6f3d0zyqeg" } } YdbStatus: UNAVAILABLE ConsumedRu: 224 } 2025-03-28T09:35:34.933037Z node 16 :KQP_EXECUTER WARN: [ShardsResolver] TxId: 281474976720681. Failed to resolve tablet: 72075186224037888 after several retries. 2025-03-28T09:35:34.933176Z node 16 :KQP_EXECUTER WARN: ActorId: [16:7486791716553210231:2474] TxId: 281474976720681. Ctx: { TraceId: 01jqe1pp041jagvn5sk3de0px8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=16&id=Y2YzZGEwOWQtNzUzMmRlYzMtZTVkZGJkNjItNWEwMzBkNjU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolve failed, status: UNAVAILABLE, issues:
: Error: Failed to resolve tablet: 72075186224037888 after several retries. 2025-03-28T09:35:34.933354Z node 16 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=16&id=Y2YzZGEwOWQtNzUzMmRlYzMtZTVkZGJkNjItNWEwMzBkNjU=, ActorId: [16:7486791716553210216:2474], ActorState: ExecuteState, TraceId: 01jqe1pp041jagvn5sk3de0px8, Create QueryResponse for error on request, msg: 2025-03-28T09:35:34.933838Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ea471bab-40b4a9e6-3a5e5c1e-d4c262d3_0] Write session: close. Timeout = 0 ms 2025-03-28T09:35:34.933889Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ea471bab-40b4a9e6-3a5e5c1e-d4c262d3_0] Write session will now close 2025-03-28T09:35:34.933989Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ea471bab-40b4a9e6-3a5e5c1e-d4c262d3_0] Write session: aborting 2025-03-28T09:35:34.934303Z node 16 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Execution" issue_code: 1060 severity: 2 issues { position { row: 3 column: 120 } message: "Cost Based Optimizer could not be applied to this query: couldn\'t load statistics" end_position { row: 3 column: 120 } issue_code: 8001 severity: 2 } } QueryIssues { message: "Failed to resolve tablet: 72075186224037888 after several retries." severity: 1 } TxMeta { id: "01jqe1ppaq5gd9j0zx3e867761" } } YdbStatus: UNAVAILABLE ConsumedRu: 222 } 2025-03-28T09:35:34.934637Z :WARNING: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ea471bab-40b4a9e6-3a5e5c1e-d4c262d3_0] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2025-03-28T09:35:34.934677Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|ea471bab-40b4a9e6-3a5e5c1e-d4c262d3_0] Write session: destroy >> Cdc::SupportedTypes [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2025-03-28T09:35:27.865331Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791687408391333:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:27.865464Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f2b/r3tmp/tmpRya5Ou/pdisk_1.dat 2025-03-28T09:35:28.090907Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:28.206843Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:28.206962Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:28.208517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19686 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:28.287856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:28.309228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:28.400912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:28.436183Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:32.423031Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791707059629685:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:32.423093Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f2b/r3tmp/tmpoTANuZ/pdisk_1.dat 2025-03-28T09:35:32.490571Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:32.549815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:32.549884Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:32.551430Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24585 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:32.671371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:32.682502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:32.723352Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:32.761668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:35.090815Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791722961813279:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:35.090856Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f2b/r3tmp/tmpc13ofx/pdisk_1.dat 2025-03-28T09:35:35.153170Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:35.212367Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:35.212448Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:35.213969Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61167 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:35.312136Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:35.330106Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:35.373395Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:35.411758Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> TObjectStorageListingTest::Listing >> TFlatTest::SplitThenMerge [GOOD] >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex >> BuildStatsHistogram::Many_Serial [GOOD] >> TFlatTest::CopyTableAndCompareColumnsSchema ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2025-03-28T09:35:35.358086Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791720216917403:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:35.358209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f27/r3tmp/tmpB2zmkg/pdisk_1.dat 2025-03-28T09:35:35.602314Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:35.674980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:35.675112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:35.676737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14648 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:35.810795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:35.837491Z node 1 :TX_PROXY ERROR: Actor# [1:7486791720216917993:2297] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) 2025-03-28T09:35:37.423811Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791731210510561:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:37.423874Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001f27/r3tmp/tmpdQOkmo/pdisk_1.dat 2025-03-28T09:35:37.494357Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:37.554945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:37.555060Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:37.556554Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23334 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:37.660527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:37.677549Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:37.815175Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:35:37.819683Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:35:37.832567Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-28T09:35:37.835698Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 TClient::Ls request: /dc-1/Dir/TableOld TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "TableOld" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154537782 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableOld" Columns { Name: "unused004" Type: "Float" TypeId: 33 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "Key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name... (TRUNCATED) 2025-03-28T09:35:37.887498Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:35:37.888910Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.889059Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:35:37.889900Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.890012Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:35:37.890431Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:35:37.891005Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.891098Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:35:37.891397Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:35:37.891811Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.891882Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:35:37.892145Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:35:37.892537Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.892604Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:35:37.892873Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:35:37.893234Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.893307Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:35:37.893562Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:35:37.893945Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.894006Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:35:37.894253Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:35:37.894650Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.894724Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:35:37.895019Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:35:37.895455Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.895526Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:35:37.895835Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:35:37.896236Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.896302Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:35:37.896581Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:35:37.897013Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.897088Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:35:37.897430Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:35:37.897826Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 released its data 2025-03-28T09:35:37.897890Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:35:37.898148Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037888 restored its data 2025-03-28T09:35:37.898675Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:35:37.898711Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:35:37.898853Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:35:37.899129Z node 2 :TX_DATASHARD DEBUG: tx 281474976715676 at 72075186224037889 restored its data 2025-03-28T09:35:37.899466Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:35:37.899485Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at table ... at tablet: 72057594046644480 message: Source { RawX1: 7486791731210511188 RawX2: 4503608217307386 } Origin: 72075186224037889 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-03-28T09:35:38.222188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:2, datashard: 72075186224037889, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-28T09:35:38.222216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-03-28T09:35:38.222389Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791735505479109 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-03-28T09:35:38.222407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 281474976715693, tablet: 72075186224037894, partId: 0 2025-03-28T09:35:38.222476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791735505479109 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-03-28T09:35:38.222495Z node 2 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 2025-03-28T09:35:38.222537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7486791735505479109 RawX2: 4503608217307478 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2025-03-28T09:35:38.222561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:7, datashard: 72075186224037894, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2025-03-28T09:35:38.222585Z node 2 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-28T09:35:38.222609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2025-03-28T09:35:38.222628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2025-03-28T09:35:38.222643Z node 2 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715693:0 129 -> 240 2025-03-28T09:35:38.222849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715693 2025-03-28T09:35:38.222913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-28T09:35:38.222968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-28T09:35:38.223025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-28T09:35:38.223070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-28T09:35:38.223127Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037889 state PreOffline 2025-03-28T09:35:38.223167Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-28T09:35:38.223170Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2025-03-28T09:35:38.223197Z node 2 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:35:38.223237Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037894 state PreOffline 2025-03-28T09:35:38.223252Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-03-28T09:35:38.223537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-28T09:35:38.223669Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-03-28T09:35:38.223690Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-03-28T09:35:38.223707Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715693:0 progress is 1/1 2025-03-28T09:35:38.223721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-03-28T09:35:38.223774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715693, ready parts: 1/1, is published: true 2025-03-28T09:35:38.223827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7486791735505479333:2427] message: TxId: 281474976715693 2025-03-28T09:35:38.223856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2025-03-28T09:35:38.223880Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715693:0 2025-03-28T09:35:38.223897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715693:0 2025-03-28T09:35:38.223981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:35:38.224323Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:35:38.224370Z node 2 :TX_DATASHARD INFO: 72075186224037889 Initiating switch from PreOffline to Offline state 2025-03-28T09:35:38.225401Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:35:38.225438Z node 2 :TX_DATASHARD INFO: 72075186224037894 Initiating switch from PreOffline to Offline state 2025-03-28T09:35:38.226218Z node 2 :TX_DATASHARD INFO: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:35:38.226290Z node 2 :TX_DATASHARD INFO: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:35:38.226451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791731210511188 RawX2: 4503608217307386 } TabletId: 72075186224037889 State: 4 TClient::Ls request: /dc-1/Dir/TableOld2025-03-28T09:35:38.226509Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:35:38.226663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7486791735505479109 RawX2: 4503608217307478 } TabletId: 72075186224037894 State: 4 2025-03-28T09:35:38.226704Z node 2 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2025-03-28T09:35:38.226818Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2025-03-28T09:35:38.226835Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:35:38.226913Z node 2 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2025-03-28T09:35:38.226914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 TClient::Ls response: Status: 128 StatusCode: PATH_NOT_EXIST Issues { message: "Path not exist" issue_code: 200200 severity: 1 } SchemeStatus: 2 ErrorReason: "Path not found" 2025-03-28T09:35:38.228508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:35:38.228578Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037894 reason = ReasonStop 2025-03-28T09:35:38.228731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:35:38.228943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 7 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2025-03-28T09:35:38.228966Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037894 2025-03-28T09:35:38.229015Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037894 2025-03-28T09:35:38.229107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:35:38.229276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:35:38.229300Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:35:38.229350Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:35:38.229728Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2025-03-28T09:35:38.229977Z node 2 :TX_DATASHARD INFO: OnTabletStop: 72075186224037889 reason = ReasonStop 2025-03-28T09:35:38.230155Z node 2 :TX_DATASHARD INFO: OnTabletDead: 72075186224037889 2025-03-28T09:35:38.230187Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037889 2025-03-28T09:35:38.230433Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2025-03-28T09:35:38.230524Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:35:38.230556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:35:38.230592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:7 2025-03-28T09:35:38.230618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2025-03-28T09:35:38.230653Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: Got : 24000 2106439 49449 9 9 Expected: 24000 2106439 49449 9 9 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 9 9 Expected: 12816 1121048 49449 9 9 Got : 24000 3547100 81694 9 9 Expected: 24000 3547100 81694 9 9 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425282 81694 9 9 Expected: 9582 1425282 81694 9 9 Got : 24000 2460139 23760 9 9 Expected: 24000 2460139 23760 9 9 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060767 23760 9 9 Expected: 10440 1060767 23760 9 9 Got : 24000 4054050 46562 9 9 Expected: 24000 4054050 46562 9 9 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2273213 46562 9 9 Expected: 13570 2273213 46562 9 9 Got : 24000 2106459 49449 9 9 Expected: 24000 2106459 49449 9 9 Got : 24000 2460219 23555 9 9 Expected: 24000 2460219 23555 9 9 Got : 24000 4054270 46543 9 9 Expected: 24000 4054270 46543 9 9 Got : 24000 2106439 25272 38 44 Expected: 24000 2106439 25272 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 25272 20 23 Expected: 12816 1121048 25272 20 23 Got : 24000 3547100 49916 64 44 Expected: 24000 3547100 49916 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 49916 26 17 Expected: 9582 1425198 49916 26 17 Got : 24000 2460139 13170 42 41 Expected: 24000 2460139 13170 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 13170 18 18 Expected: 10440 1060798 13170 18 18 Got : 24000 4054050 29361 68 43 Expected: 24000 4054050 29361 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 29361 38 24 Expected: 13570 2277890 29361 38 24 Got : 24000 2106459 25428 38 44 Expected: 24000 2106459 25428 38 44 Got : 24000 2460219 13482 41 41 Expected: 24000 2460219 13482 41 41 Got : 24000 4054270 29970 67 43 Expected: 24000 4054270 29970 67 43 Got : 24000 2106479 25458 38 44 Expected: 24000 2106479 25458 38 44 Got : 24000 2460259 13528 42 41 Expected: 24000 2460259 13528 42 41 Got : 24000 4054290 30013 67 43 Expected: 24000 4054290 30013 67 43 1 parts: [0:0:1:0:0:0:0] 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 0% bytes, 4 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 205073 (actual 205115 - 0% error) 14% (actual 14%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 5% (actual 5%) key = (683260, 227761) value = 17416844 (actual 17420850 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (80065, 26696) value = 24008 (actual 24056 - 0% error) 10% (actual 10%) key = (160045, 53356) value = 48012 (actual 48061 - 0% error) 10% (actual 10%) key = (240238, 80087) value = 72016 (actual 72061 - 0% error) 10% (actual 10%) key = (320152, 106725) value = 96035 (actual 96085 - 0% error) 10% (actual 10%) key = (400354, 133459) value = 120047 (actual 120093 - 0% error) 10% (actual 10%) key = (480133, 160052) value = 144053 (actual 144100 - 0% error) 10% (actual 10%) key = (560080, 186701) value = 168060 (actual 168102 - 0% error) 10% (actual 10%) key = (639892, 213305) value = 192073 (actual 192119 - 0% error) 10% (actual 10%) key = (719776, 239933) value = 216090 (actual 216137 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2042645 - 0% error) 10% (actual 10%) key = (159427, 53150) value = 4076220 (actual 4080259 - 0% error) 10% (actual 10%) key = (239872, 79965) value = 6113940 (actual 6117932 - 0% error) 10% (actual 10%) key = (319834, 106619) value = 8152983 (actual 8156951 - 0% error) 10% (actual 10%) key = (400105, 133376) value = 10190566 (actual 10194584 - 0% error) 10% (actual 10%) key = (479833, 159952) value = 12228261 (actual 12232212 - 0% error) 10% (actual 10%) key = (559774, 186599) value = 14265925 (actual 14269984 - 0% error) 10% (actual 10%) key = (639385, 213136) value = 16304923 (actual 16308915 - 0% error) 10% (actual 10%) key = (719437, 239820) value = 18342658 (actual 18346641 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 51 pages RowCountHistogram: 10% (actual 10%) key = (80152, 26725) value = 24033 (actual 24079 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 48088 (actual 48136 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 72280 (actual 72327 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 96428 (actual 96478 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 120604 (actual 120651 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 144727 (actual 144775 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 168893 (actual 168936 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 192974 (actual 193024 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 217180 (actual 217228 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80152, 26725) value = 2048715 (actual 2052707 - 0% error) 10% (actual 10%) key = (160300, 53441) value = 4098370 (actual 4102393 - 0% error) 10% (actual 10%) key = (241096, 80373) value = 6145924 (actual 6149966 - 0% error) 10% (actual 10%) key = (321454, 107159) value = 8194622 (actual 8198636 - 0% error) 10% (actual 10%) key = (402202, 134075) value = 10244365 (actual 10248317 - 0% error) 10% (actual 10%) key = (482362, 160795) value = 12292389 (actual 12296360 - 0% error) 10% (actual 10%) key = (562825, 187616) value = 14344066 (actual 14348128 - 0% error) 10% (actual 10%) key = (642871, 214298) value = 16393002 (actual 16396983 - 0% error) 10% (actual 10%) key = (723403, 241142) value = 18443184 (actual 18447186 - 0% error) 9% (actual 9%) { [12965, 17271), [20685, 27602), [31405, 43682), [58051, 73731), [81074, 85635), [86559, 89297), [92588, 112654), [134937, 148111), [152568, 158136), [169526, 171272), [181381, 184364), [188301, 199001), [201179, 227534) } 1 parts: [0:0:1:0:0:0:0] 240000 rows, 10181 pages, 7 levels: (159964, 53329) (319996, 106673) (479902, 159975) (639565, 213196) (799303, 266442) Checking BTree: Touched 3% bytes, 111 pages RowCountHistogram: 6% (actual 6%) key = (80152, 26725) value = 7654 (actual 7700 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 21908 (actual 21959 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 37729 (actual 37776 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 44561 (actual 44610 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 62406 (actual 62455 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 70269 (actual 70314 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 83950 (actual 83996 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 96207 (actual 96256 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 110645 (actual 110694 - 0% error) 12% (actual 12%) DataSizeHistogram: 6% (actual 6%) key = (80152, 26725) value = 650681 (actual 654673 - 0% error) 11% (actual 11%) key = (140245, 46756) value = 1862907 (actual 1866988 - 0% error) 12% (actual 12%) key = (241096, 80373) value = 3200081 (actual 3204123 - 0% error) 5% (actual 5%) key = (291388, 97137) value = 3780473 (actual 3784554 - 0% error) 14% (actual 14%) key = (361831, 120618) value = 5294670 (actual 5298760 - 0% error) 6% (actual 6%) key = (462178, 154067) value = 5965285 (actual 5969310 - 0% error) 10% (actual 10%) key = (522574, 174199) value = 7125413 (actual 7129406 - 0% error) 9% (actual 9%) key = (647905, 215976) value = 8166922 (actual 8170966 - 0% error) 11% (actual 11%) key = (703270, 234431) value = 9391370 (actual 9395383 - 0% error) 12% (actual 12%) { [12965, 17271), [20685, 27602), [31405, 43682), [58051, 73731), [81074, 85635), [86559, 89297), [92588, 112654), [134937, 148111), [152568, 158136), [169526, 171272), [181381, 184364), [188301, 199001), [201179, 227534) } Checking Flat: Touched 100% bytes, 1 pages RowCountHistogram: 10% (actual 10%) key = (109672, 36565) value = 12716 (actual 12760 - 0% error) 10% (actual 10%) key = (200011, 66678) value = 25439 (actual 25485 - 0% error) 10% (actual 10%) key = (242497, 80840) value = 38151 (actual 38197 - 0% error) 10% (actual 10%) key = (323278, 107767) value = 50861 (actual 50910 - 0% error) 9% (actual 9%) key = (365755, 121926) value = 63568 (actual 63614 - 0% error) 10% (actual 10%) key = (482191, 160738) value = 76283 (actual 76335 - 0% error) 10% (actual 9%) key = (610882, 203635) value = 88992 (actual 89039 - 0% error) 10% (actual 10%) key = (673702, 224575) value = 101722 (actual 101768 - 0% error) 10% (actual 10%) key = (715753, 238592) value = 114435 (actual 114484 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) ... 140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) [0:0:935:0:0:0:0] 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) [0:0:936:0:0:0:0] 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) [0:0:937:0:0:0:0] 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) [0:0:938:0:0:0:0] 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) [0:0:939:0:0:0:0] 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) [0:0:940:0:0:0:0] 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) [0:0:941:0:0:0:0] 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) [0:0:942:0:0:0:0] 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) [0:0:943:0:0:0:0] 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) [0:0:944:0:0:0:0] 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) [0:0:945:0:0:0:0] 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) [0:0:946:0:0:0:0] 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) [0:0:947:0:0:0:0] 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) [0:0:948:0:0:0:0] 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) [0:0:949:0:0:0:0] 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) [0:0:950:0:0:0:0] 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) [0:0:951:0:0:0:0] 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) [0:0:952:0:0:0:0] 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) [0:0:953:0:0:0:0] 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) [0:0:954:0:0:0:0] 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) [0:0:955:0:0:0:0] 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) [0:0:956:0:0:0:0] 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) [0:0:957:0:0:0:0] 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) [0:0:958:0:0:0:0] 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) [0:0:959:0:0:0:0] 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) [0:0:960:0:0:0:0] 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) [0:0:961:0:0:0:0] 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) [0:0:962:0:0:0:0] 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) [0:0:963:0:0:0:0] 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) [0:0:964:0:0:0:0] 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) [0:0:965:0:0:0:0] 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) [0:0:966:0:0:0:0] 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) [0:0:967:0:0:0:0] 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) [0:0:968:0:0:0:0] 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) [0:0:969:0:0:0:0] 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) [0:0:970:0:0:0:0] 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) [0:0:971:0:0:0:0] 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) [0:0:972:0:0:0:0] 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) [0:0:973:0:0:0:0] 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) [0:0:974:0:0:0:0] 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) [0:0:975:0:0:0:0] 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) [0:0:976:0:0:0:0] 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) [0:0:977:0:0:0:0] 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) [0:0:978:0:0:0:0] 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) [0:0:979:0:0:0:0] 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) [0:0:980:0:0:0:0] 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) [0:0:981:0:0:0:0] 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) [0:0:982:0:0:0:0] 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) [0:0:983:0:0:0:0] 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) [0:0:984:0:0:0:0] 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) [0:0:985:0:0:0:0] 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) [0:0:986:0:0:0:0] 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) [0:0:987:0:0:0:0] 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) [0:0:988:0:0:0:0] 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) [0:0:989:0:0:0:0] 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) [0:0:990:0:0:0:0] 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) [0:0:991:0:0:0:0] 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) [0:0:992:0:0:0:0] 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) [0:0:993:0:0:0:0] 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) [0:0:994:0:0:0:0] 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) [0:0:995:0:0:0:0] 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) [0:0:996:0:0:0:0] 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) [0:0:997:0:0:0:0] 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) [0:0:998:0:0:0:0] 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) [0:0:999:0:0:0:0] 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) [0:0:1000:0:0:0:0] 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> TFlatTest::Mix_DML_DDL >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] >> Cdc::DropIndex [GOOD] >> Cdc::DisableStream >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 >> Viewer::JsonStorageListingV1NodeIdFilter [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2025-03-28T09:35:43.527700Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791756021222581:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:43.527890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ecd/r3tmp/tmpR5mHTg/pdisk_1.dat 2025-03-28T09:35:43.755955Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:43.852093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:43.852195Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:43.853852Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13741 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:43.928569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:43.938934Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:44.040468Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 2025-03-28T09:35:44.056790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:44.071199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:44.084875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 waiting... proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> TFlatTest::SelectBigRangePerf >> TFlatTest::CopyTableAndDropCopy [GOOD] >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> Cdc::ResolvedTimestamps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2025-03-28T09:35:40.543660Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791745148496210:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:40.543723Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed0/r3tmp/tmpBR8UGe/pdisk_1.dat 2025-03-28T09:35:40.769579Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:40.783194Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:40.783266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:40.785203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3302 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:40.978894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:41.007439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1743154541114 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_1_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_1_Copy" PathId: 4 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1743154541163 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_1_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot... (TRUNCATED) 2025-03-28T09:35:41.131563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_2 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2" PathId: 5 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715661 CreateStep: 1743154541191 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col_0" ... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_2_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_2_Copy" PathId: 6 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715662 CreateStep: 1743154541219 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_2_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: ... (TRUNCATED) 2025-03-28T09:35:41.181315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_3 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3" PathId: 7 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715663 CreateStep: 1743154541240 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_3_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_3_Copy" PathId: 8 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715664 CreateStep: 1743154541261 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_3_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { ... (TRUNCATED) 2025-03-28T09:35:41.228221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 waiting... waiting... TClient::Ls request: /dc-1/Dir/Table_4 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4" PathId: 9 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715665 CreateStep: 1743154541289 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col... (TRUNCATED) TClient::Ls request: /dc-1/Dir/Table_4_Copy TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table_4_Copy" PathId: 10 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715666 CreateStep: 1743154541317 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table_4_Copy" Columns { Name: "col_0" Type: "Int32" TypeId: 1 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col_1" Type: "Int32" TypeId: 1 Id: 2 NotNull: false IsBuildInProgress: false } ... r::TEvDataShard::TEvProposeTransactionResult> complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-28T09:35:48.470516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-28T09:35:48.470579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-28T09:35:48.470673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-28T09:35:48.470680Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037895 state Ready 2025-03-28T09:35:48.470715Z node 2 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715686:0 ProgressState 2025-03-28T09:35:48.470731Z node 2 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-03-28T09:35:48.470770Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-03-28T09:35:48.470787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-03-28T09:35:48.470800Z node 2 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715686:0 progress is 1/1 2025-03-28T09:35:48.470807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-03-28T09:35:48.470819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715686, ready parts: 1/1, is published: true 2025-03-28T09:35:48.470836Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715686 datashard 72075186224037894 state Ready 2025-03-28T09:35:48.470866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7486791779102602593:2401] message: TxId: 281474976715686 2025-03-28T09:35:48.470867Z node 2 :TX_DATASHARD DEBUG: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2025-03-28T09:35:48.470907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715686 ready parts: 1/1 2025-03-28T09:35:48.470923Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715686:0 2025-03-28T09:35:48.470938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715686:0 2025-03-28T09:35:48.471045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 6] was 4 2025-03-28T09:35:48.473919Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037894, clientId# [2:7486791779102602706:3004], serverId# [2:7486791779102602707:3005], sessionId# [0:0:0] 2025-03-28T09:35:48.473984Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-28T09:35:48.474879Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-28T09:35:48.474913Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:35:48.476868Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037895, clientId# [2:7486791779102602716:3011], serverId# [2:7486791779102602717:3012], sessionId# [0:0:0] 2025-03-28T09:35:48.476930Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-28T09:35:48.477910Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-28T09:35:48.477944Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-28T09:35:48.479805Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-28T09:35:48.480663Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-28T09:35:48.480702Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:35:48.482446Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-28T09:35:48.484047Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-28T09:35:48.484084Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-28T09:35:48.485926Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-28T09:35:48.486932Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-28T09:35:48.486976Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:35:48.487847Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2025-03-28T09:35:48.488109Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:35:48.488147Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-03-28T09:35:48.489540Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-28T09:35:48.490497Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-28T09:35:48.490545Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-28T09:35:48.491703Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=0, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2025-03-28T09:35:48.491971Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:35:48.492002Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-03-28T09:35:48.492364Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-28T09:35:48.493026Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-28T09:35:48.493059Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:35:48.494617Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-28T09:35:48.495293Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-28T09:35:48.495322Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-28T09:35:48.496969Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-28T09:35:48.497663Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-28T09:35:48.497694Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:35:48.499476Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-28T09:35:48.500179Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-28T09:35:48.500217Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-28T09:35:48.502092Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-28T09:35:48.502952Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-28T09:35:48.503003Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:35:48.504671Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2025-03-28T09:35:48.505005Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:35:48.505027Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2025-03-28T09:35:48.505257Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-28T09:35:48.506085Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-28T09:35:48.506126Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-28T09:35:48.507380Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=0, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2025-03-28T09:35:48.507741Z node 2 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2025-03-28T09:35:48.507760Z node 2 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2025-03-28T09:35:48.507927Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-28T09:35:48.509624Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-28T09:35:48.509673Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:35:48.511364Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-28T09:35:48.512705Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-28T09:35:48.512754Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-28T09:35:48.514386Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037894 2025-03-28T09:35:48.515734Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037894 2025-03-28T09:35:48.515782Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2025-03-28T09:35:48.517467Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037895 2025-03-28T09:35:48.518896Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037895 2025-03-28T09:35:48.518961Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2025-03-28T09:35:48.519432Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2025-03-28T09:35:48.519661Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2025-03-28T09:35:48.519918Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2025-03-28T09:35:48.520163Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2025-03-28T09:35:48.520385Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2025-03-28T09:35:48.520700Z node 2 :HIVE WARN: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) >> Cdc::Alter [GOOD] >> Cdc::AddColumn >> Cdc::DisableStream [GOOD] >> Cdc::InitialScan >> TColumnShardTestSchema::ForgetAfterFail >> TFlatTest::SelectRangeBothLimit [GOOD] >> TObjectStorageListingTest::ManyDeletes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2025-03-28T09:35:48.620557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791777589451347:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:48.620743Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eca/r3tmp/tmpsz1I4k/pdisk_1.dat 2025-03-28T09:35:48.843475Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:48.949876Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:48.949960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:48.951621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18068 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:49.038714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:35:49.077203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... insert finished 6405 usec 6037 usec 6088 usec 6450 usec 6307 usec 6279 usec 13192 usec 6466 usec 8076 usec 7028 usec 2025-03-28T09:35:50.708663Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791787718087204:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:50.708938Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001eca/r3tmp/tmp3m9Fxb/pdisk_1.dat 2025-03-28T09:35:50.764855Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:50.827458Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:50.827603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:50.829100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29596 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:50.923393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:50.942076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2025-03-28T09:35:39.173544Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791737194851731:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:39.173760Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed2/r3tmp/tmpvJ5ib6/pdisk_1.dat 2025-03-28T09:35:39.385609Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22873, node 1 2025-03-28T09:35:39.450266Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:39.450286Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:39.450296Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:39.450416Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:39.506483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:39.506552Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:39.508168Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:25867 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:39.674824Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:39.695719Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:42.948797Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791752561433080:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:42.948934Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed2/r3tmp/tmp5SC6bo/pdisk_1.dat 2025-03-28T09:35:43.034767Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10906, node 2 2025-03-28T09:35:43.071000Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:43.071019Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:43.071024Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:43.071101Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:43.075971Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:43.076041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:43.077602Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6147 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:43.231710Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:43.247715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... ..2025-03-28T09:35:47.948819Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791752561433080:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:47.948893Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:35:48.058071Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:35:48.058184Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037890 2025-03-28T09:35:48.058822Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037889 2025-03-28T09:35:48.058858Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037890 2025-03-28T09:35:48.059029Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037891 2025-03-28T09:35:48.059082Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037892 2025-03-28T09:35:48.059462Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037891 2025-03-28T09:35:48.059662Z node 2 :TX_DATASHARD DEBUG: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037892 2025-03-28T09:35:48.060043Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:35:48.060075Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037890 2025-03-28T09:35:48.060541Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037892 2025-03-28T09:35:48.060546Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037891 2025-03-28T09:35:48.067361Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711500 at step 1743154548107 at tablet 72075186224037889 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154548107 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-28T09:35:48.067361Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711500 at step 1743154548107 at tablet 72075186224037891 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154548107 MediatorID: 72057594046382081 TabletID: 72075186224037891 } 2025-03-28T09:35:48.067385Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:35:48.067386Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-28T09:35:48.067469Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:35:48.067480Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-28T09:35:48.067483Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:35:48.067493Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:35:48.067519Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743154548107:281474976711500] in PlanQueue unit at 72075186224037889 2025-03-28T09:35:48.067524Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743154548107:281474976711500] in PlanQueue unit at 72075186224037891 2025-03-28T09:35:48.067552Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 got data tx from cache 1743154548107:281474976711500 2025-03-28T09:35:48.067552Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1743154548107:281474976711500 2025-03-28T09:35:48.068283Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:35:48.068587Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711500 at step 1743154548107 at tablet 72075186224037890 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154548107 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2025-03-28T09:35:48.068606Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:35:48.068627Z node 2 :TX_DATASHARD DEBUG: tx 281474976711500 released its data 2025-03-28T09:35:48.068644Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:35:48.068670Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:35:48.068680Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:35:48.068692Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743154548107:281474976711500] in PlanQueue unit at 72075186224037890 2025-03-28T09:35:48.068713Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 got data tx from cache 1743154548107:281474976711500 2025-03-28T09:35:48.068929Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976711500 at step 1743154548107 at tablet 72075186224037892 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154548107 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2025-03-28T09:35:48.068944Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:35:48.068991Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-28T09:35:48.069004Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 acti ... TASHARD DEBUG: Planned transaction txId 281474976711911 at step 1743154552615 at tablet 72075186224037892 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154552615 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2025-03-28T09:35:52.570988Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:35:52.570990Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:35:52.570999Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743154552615:281474976711911] in PlanQueue unit at 72075186224037891 2025-03-28T09:35:52.571016Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037891 got data tx from cache 1743154552615:281474976711911 2025-03-28T09:35:52.571064Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-28T09:35:52.571086Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:35:52.571098Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743154552615:281474976711911] in PlanQueue unit at 72075186224037892 2025-03-28T09:35:52.571120Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037892 got data tx from cache 1743154552615:281474976711911 2025-03-28T09:35:52.571550Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:35:52.571784Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1743154552615} 2025-03-28T09:35:52.571829Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:35:52.571951Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-03-28T09:35:52.571982Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:35:52.572123Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1743154552615} 2025-03-28T09:35:52.572190Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:35:52.572264Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037889 restored its data 2025-03-28T09:35:52.572840Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037890 restored its data 2025-03-28T09:35:52.573221Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-03-28T09:35:52.573243Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:35:52.573540Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-03-28T09:35:52.573568Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:35:52.573647Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1743154552615} 2025-03-28T09:35:52.573685Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-28T09:35:52.573726Z node 2 :TX_DATASHARD DEBUG: Complete [1743154552615 : 281474976711911] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7486791795511120399:11510], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:35:52.573754Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-28T09:35:52.573938Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1743154552615} 2025-03-28T09:35:52.573977Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-28T09:35:52.574561Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037892 restored its data 2025-03-28T09:35:52.574746Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:35:52.575253Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037890 restored its data 2025-03-28T09:35:52.575325Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-03-28T09:35:52.575344Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:35:52.575689Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:35:52.576010Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-28T09:35:52.576089Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:35:52.576408Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037892 restored its data 2025-03-28T09:35:52.576654Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037889 restored its data 2025-03-28T09:35:52.576854Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:35:52.577138Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:35:52.577171Z node 2 :TX_DATASHARD DEBUG: Complete [1743154552615 : 281474976711911] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7486791795511120399:11510], exec latency: 5 ms, propose latency: 7 ms 2025-03-28T09:35:52.577189Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:35:52.577859Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-28T09:35:52.577881Z node 2 :TX_DATASHARD DEBUG: Complete [1743154552615 : 281474976711911] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7486791795511120399:11510], exec latency: 5 ms, propose latency: 6 ms 2025-03-28T09:35:52.577894Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:35:52.578606Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 released its data 2025-03-28T09:35:52.578640Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:35:52.585069Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:35:52.585725Z node 2 :TX_DATASHARD DEBUG: tx 281474976711911 at 72075186224037889 restored its data 2025-03-28T09:35:52.589149Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:35:52.591816Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:35:52.591861Z node 2 :TX_DATASHARD DEBUG: Complete [1743154552615 : 281474976711911] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7486791795511120399:11510], exec latency: 19 ms, propose latency: 22 ms 2025-03-28T09:35:52.591880Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:35:52.603951Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:35:52.604533Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:35:52.604882Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:35:52.605232Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:35:52.605405Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2025-03-28T09:35:52.605618Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:35:52.605699Z node 2 :TX_DATASHARD DEBUG: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2025-03-28T09:35:52.605879Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:35:52.606194Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:35:52.606530Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2025-03-28T09:35:52.606912Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2025-03-28T09:35:52.607389Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2025-03-28T09:35:52.607894Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 5 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2025-03-28T09:35:52.608043Z node 2 :TX_DATASHARD DEBUG: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 >> TLocksTest::GoodLock >> TLocksFatTest::RangeSetRemove >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning >> Cdc::InitialScan [GOOD] >> Cdc::InitialScanDebezium >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions >> TLocksTest::GoodSameShardLock [GOOD] >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2025-03-28T09:35:35.800118Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791723792644301:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:35.800215Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed4/r3tmp/tmpyefumg/pdisk_1.dat 2025-03-28T09:35:36.018282Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:36.134487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:36.134598Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:36.136375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:65140 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:36.260714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:36.282291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:36.370992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:36.410895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:37.905987Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791729287316415:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:37.906030Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed4/r3tmp/tmpLHrYLM/pdisk_1.dat 2025-03-28T09:35:37.979667Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:38.032352Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:38.032454Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:38.034139Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61951 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:38.130743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:38.142981Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:38.209302Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:38.241506Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:40.070355Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791742791383548:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:40.070427Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed4/r3tmp/tmp7KiZb9/pdisk_1.dat 2025-03-28T09:35:40.144403Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:40.193629Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:40.193699Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:40.195365Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19011 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:40.282508Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:40.299048Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:40.365259Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:40.460515Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:42.313113Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791751583677980:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:42.313163Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed4/r3tmp/tmpCw5iqk/pdisk_1.dat 2025-03-28T09:35:42.385914Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:42.436134Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:42.436213Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:42.437671Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11344 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:42.513219Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:42.524212Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiti ... 01ed4/r3tmp/tmpOXjkTE/pdisk_1.dat 2025-03-28T09:35:49.412710Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:49.466890Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:49.466966Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:49.468581Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22131 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:49.635819Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:49.651829Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:49.697164Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:49.734093Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:51.989532Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486791792234438236:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:51.989587Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed4/r3tmp/tmpb1nIUY/pdisk_1.dat 2025-03-28T09:35:52.080219Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:52.124146Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:52.124225Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:52.125758Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14111 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:52.288134Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:52.305499Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:52.372347Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:52.414894Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:54.702238Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486791804348748265:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:54.702295Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed4/r3tmp/tmpFbc1Jv/pdisk_1.dat 2025-03-28T09:35:54.793364Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:54.830764Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:54.830872Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:54.832482Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18183 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:55.019365Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:55.035158Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:55.106216Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:55.179733Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:57.754290Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486791814277520817:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:57.754351Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ed4/r3tmp/tmp37csyw/pdisk_1.dat 2025-03-28T09:35:57.858609Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:57.889171Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:57.889246Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:57.890596Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19274 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:58.054969Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:58.073029Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:58.167566Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:58.212305Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> Cdc::AddIndex >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse >> TLocksFatTest::ShardLocks [GOOD] >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2025-03-28T09:35:56.763969Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791812849685150:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:56.764069Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec3/r3tmp/tmpYC0A3M/pdisk_1.dat 2025-03-28T09:35:56.979722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:57.097370Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:57.097489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:57.099032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7727 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:57.175147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:57.197931Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:57.290916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:57.324614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:59.493677Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791826428841270:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:59.493724Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec3/r3tmp/tmp87g5Co/pdisk_1.dat 2025-03-28T09:35:59.553574Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:59.615679Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:59.615753Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:59.617317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16246 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:59.691063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:59.702117Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:59.734268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:59.764999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:01.862956Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791833633442378:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:01.863026Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec3/r3tmp/tmpW2e8Ms/pdisk_1.dat 2025-03-28T09:36:01.928266Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:01.984793Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:01.984853Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:01.986372Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15232 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:36:02.075323Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:36:02.089555Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:02.154316Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:02.219218Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:04.185506Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791845065546159:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:04.185565Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec3/r3tmp/tmpI0bnQG/pdisk_1.dat 2025-03-28T09:36:04.251876Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:04.308169Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:04.308245Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:04.309716Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13183 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:36:04.397373Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:36:04.414296Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:04.459845Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:04.512809Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> Cdc::InitialScanAndLimits >> TSubscriberCombinationsTest::MigratedPathRecreation >> RetryPolicy::RetryWithBatching [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberTest::Boot >> TNodeBrokerTest::TestRandomActions [GOOD] >> TSubscriberTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: 2025-03-28T09:36:08.867616Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-03-28T09:36:08.867664Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Successful handshake: owner# 800, generation# 1 2025-03-28T09:36:08.867801Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:34:2065] 2025-03-28T09:36:08.867827Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Commit generation: owner# 800, generation# 1 2025-03-28T09:36:08.867874Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-03-28T09:36:08.867899Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Successful handshake: owner# 900, generation# 1 2025-03-28T09:36:08.868079Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:35:2066] 2025-03-28T09:36:08.868102Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Commit generation: owner# 900, generation# 1 2025-03-28T09:36:08.868159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:36:08.868463Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:41:2068] 2025-03-28T09:36:08.868486Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside 2025-03-28T09:36:08.870084Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Subscribe: subscriber# [1:41:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T09:36:08.870210Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:42:2068] 2025-03-28T09:36:08.870226Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside 2025-03-28T09:36:08.870253Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Subscribe: subscriber# [1:42:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T09:36:08.870345Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 1 }: sender# [1:43:2068] 2025-03-28T09:36:08.870361Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Upsert description: path# /root/db/dir_inside 2025-03-28T09:36:08.870381Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:9:2056] Subscribe: subscriber# [1:43:2068], path# /root/db/dir_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2025-03-28T09:36:08.870439Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:3:2050] 2025-03-28T09:36:08.870485Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:41:2068] 2025-03-28T09:36:08.870517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:6:2053] 2025-03-28T09:36:08.870537Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:42:2068] 2025-03-28T09:36:08.870555Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:43:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:9:2056] 2025-03-28T09:36:08.870573Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2068] 2025-03-28T09:36:08.870625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:38:2068] 2025-03-28T09:36:08.870677Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:39:2068] 2025-03-28T09:36:08.870720Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Set up state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:36:08.870757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside Version: 0 }: sender# [1:40:2068] 2025-03-28T09:36:08.870778Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:37:2068][/root/db/dir_inside] Ignore empty state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2025-03-28T09:36:08.870946Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:34:2065], cookie# 0, event size# 118 2025-03-28T09:36:08.870994Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:3:2050] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2025-03-28T09:36:08.875041Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:3:2050] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-03-28T09:36:08.875177Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:3:2050] 2025-03-28T09:36:08.875226Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:41:2068] 2025-03-28T09:36:08.875270Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 800, LocalPathId: 1111] Version: 1 }: sender# [1:38:2068] 2025-03-28T09:36:08.875321Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Update to strong state: owner# [1:36:2067], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() < argsRight.GetSuperId() =========== !argsRight.IsDeletion 2025-03-28T09:36:08.875472Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:35:2066], cookie# 0, event size# 117 2025-03-28T09:36:08.875495Z node 1 :SCHEME_BOARD_REPLICA NOTICE: [1:6:2053] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2025-03-28T09:36:08.875531Z node 1 :SCHEME_BOARD_REPLICA INFO: [1:6:2053] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2025-03-28T09:36:08.875606Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:42:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:6:2053] 2025-03-28T09:36:08.875642Z node 1 :SCHEME_BOARD_REPLICA DEBUG: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:42:2068] 2025-03-28T09:36:08.875697Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:37:2068][/root/db/dir_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/db/dir_inside PathId: [OwnerId: 900, LocalPathId: 11] Version: 1 }: sender# [1:39:2068] 2025-03-28T09:36:08.875746Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:37:2068][/root/db/dir_inside] Path was updated to new version: owner# [1:36:2067], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 1111], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 900, LocalPathId: 11], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:36:09.318156Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:36:09.318726Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2025-03-28T09:36:09.318785Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2025-03-28T09:36:09.318823Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][3:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2025-03-28T09:36:09.318887Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:35:2065] 2025-03-28T09:36:09.318924Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:36:2065] 2025-03-28T09:36:09.318950Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][3:34:2065][path] Set up state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:36:09.318984Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][3:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2065] 2025-03-28T09:36:09.319016Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: [main][3:34:2065][path] Ignore empty state: owner# [3:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-03-28T09:31:12.347322Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.347364Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.347396Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:31:12.347937Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:31:12.348008Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.348058Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.349324Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008426s 2025-03-28T09:31:12.349935Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:31:12.349972Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.349997Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.350033Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006294s 2025-03-28T09:31:12.350447Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:31:12.350484Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.350505Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:31:12.350548Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005593s 2025-03-28T09:31:12.368251Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1743154272368211 2025-03-28T09:31:12.656384Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486790591952447058:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:12.656446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:12.689032Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486790592955088150:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:31:12.689179Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:31:12.834874Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:31:12.846723Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001b17/r3tmp/tmpqomuAi/pdisk_1.dat 2025-03-28T09:31:13.029568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:13.029697Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:31:13.034854Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:31:13.036748Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:13.096794Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:13.104316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:31:13.104399Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 27065, node 1 2025-03-28T09:31:13.114216Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:13.114584Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:31:13.118132Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:31:13.155217Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001b17/r3tmp/yandexhj1W5D.tmp 2025-03-28T09:31:13.155243Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001b17/r3tmp/yandexhj1W5D.tmp 2025-03-28T09:31:13.155393Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001b17/r3tmp/yandexhj1W5D.tmp 2025-03-28T09:31:13.155487Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:31:13.203713Z INFO: TTestServer started on Port 63991 GrpcPort 27065 TClient is connected to server localhost:63991 PQClient connected to localhost:27065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:31:13.462376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:31:15.813381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790604837349964:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:15.813470Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790604837349941:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:15.813768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:15.816895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T09:31:15.820279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486790604837349999:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:15.820374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:31:15.832312Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486790604837349970:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T09:31:16.044238Z node 1 :TX_PROXY ERROR: Actor# [1:7486790604837350046:2683] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:31:16.064954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:31:16.072451Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486790605839990376:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:31:16.072694Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yjk1YjlhOTYtMjYzMzBhYzUtNmUzNTkzNTQtMWQ2NGZjNDY=, ActorId: [2:7486790605839990336:2308], ActorState: ExecuteState, TraceId: 01jqe1esj6fwxyke0w6c2cvb6x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:31:16.072708Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486790609132317364:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:31:16.072884Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODg4NDZjOGQtNjVlZjZmMTUtMjlmMmQ4NS0xNzUyOTAy, ActorId: [1:7486790604837349938:2336], ActorState: ExecuteState, TraceId: 01jqe1esf391reh4zyaypq6pw3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:31:16.074858Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:31:16.075244Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:31:16.172647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at s ... Q: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000010_00000| 2025-03-28T09:36:07.559073Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:36:07.559096Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:36:07.559112Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:36:07.559167Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:36:07.559276Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 10 size 1208 2025-03-28T09:36:07.562114Z node 17 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 10 size 1208 actorID [17:7486791849157763435:2632] 2025-03-28T09:36:07.562210Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:36:07.562222Z node 17 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 size 1208 2025-03-28T09:36:07.562251Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:07.562295Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-28T09:36:07.562319Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:07.562342Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-03-28T09:36:07.562359Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:07.562378Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-03-28T09:36:07.562391Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:07.562408Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-03-28T09:36:07.562423Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:07.562444Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-03-28T09:36:07.562457Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:07.562476Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-03-28T09:36:07.562491Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:07.562510Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-03-28T09:36:07.562522Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:07.562539Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-03-28T09:36:07.562555Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:07.562574Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-03-28T09:36:07.562589Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:07.562609Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-03-28T09:36:07.562631Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-28T09:36:07.562740Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:36:07.562774Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:36:07.562808Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:36:07.562903Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-03-28T09:36:07.562942Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 10 2025-03-28T09:36:07.563080Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-28T09:36:07.563101Z node 17 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:36:07.563149Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1743154567551 queuesize 0 startOffset 0 2025-03-28T09:36:07.563476Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 6 } 2025-03-28T09:36:07.563534Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: acknoledged message 1 2025-03-28T09:36:07.563579Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: acknoledged message 2 2025-03-28T09:36:07.563601Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: acknoledged message 3 2025-03-28T09:36:07.563619Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: acknoledged message 4 2025-03-28T09:36:07.563641Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: acknoledged message 5 2025-03-28T09:36:07.563661Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: acknoledged message 6 2025-03-28T09:36:07.563696Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: acknoledged message 7 2025-03-28T09:36:07.563716Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: acknoledged message 8 2025-03-28T09:36:07.563739Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: acknoledged message 9 2025-03-28T09:36:07.563763Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: acknoledged message 10 2025-03-28T09:36:07.563982Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: close. Timeout = 0 ms 2025-03-28T09:36:07.564042Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session will now close 2025-03-28T09:36:07.564090Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: aborting 2025-03-28T09:36:07.564792Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session is aborting and will not restart 2025-03-28T09:36:07.564795Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:36:07.564845Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0] Write session: destroy 2025-03-28T09:36:07.565018Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0 grpc read done: success: 0 data: 2025-03-28T09:36:07.565041Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0 grpc read failed 2025-03-28T09:36:07.565343Z node 17 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 7 sessionId: test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0 2025-03-28T09:36:07.565365Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|37624c10-a60c1903-c24eed71-cad93244_0 is DEAD 2025-03-28T09:36:07.565745Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:36:07.565867Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7486791857747698358:2668] destroyed 2025-03-28T09:36:07.565926Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2025-03-28T09:34:26.769193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:34:26.769262Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:26.871926Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-28T09:34:26.887644Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:27.603751Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:27.636305Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:34:27.661735Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:34:27.662042Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:34:27.662354Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:34:27.680411Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:34:27.680797Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:34:27.753471Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-28T09:34:27.806551Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-28T09:34:27.890248Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-28T09:34:29.126632Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-28T09:34:29.140338Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-03-28T09:34:29.140862Z node 1 :NODE_BROKER ERROR: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2025-03-28T09:34:29.141177Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:34:29.141639Z node 1 :NODE_BROKER ERROR: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2025-03-28T09:34:29.495561Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:29.495958Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:29.528499Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:29.528874Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:29.529184Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.137222Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.138175Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.138661Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.139112Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.139497Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.523515Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.537845Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.942832Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.943292Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.943712Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.982965Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-28T09:34:30.983276Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-28T09:34:31.022615Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-28T09:34:31.076052Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-28T09:34:31.856503Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-28T09:34:32.492297Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:32.492691Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:32.492949Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:32.493193Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:32.895788Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:34:32.921344Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-28T09:34:33.020952Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-28T09:34:33.626186Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:34:33.639390Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-28T09:34:34.435521Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-28T09:34:34.829083Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-28T09:34:34.856857Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:34:34.898712Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:34:34.899051Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:34:34.980350Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-28T09:34:35.007955Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-28T09:34:35.530027Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-28T09:34:35.543769Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-28T09:34:35.569990Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-28T09:34:35.572629Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-28T09:34:35.655218Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-28T09:34:35.695709Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-03-28T09:34:35.708755Z node 1 :NODE_BROKER ERROR: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-03-28T09:34:35.722566Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:34:35.723085Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:34:36.737202Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-03-28T09:34:36.738051Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-03-28T09:34:36.738633Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:36.739255Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:36.740539Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:36.741100Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:36.741688Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:36.742255Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:36.742790Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:36.743314Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:36.760541Z node 1 :NODE_BROKER ERROR: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-03-28T09:34:36.761242Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:36.761821Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:36.788652Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-28T09:34:36.789493Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-03-28T09:34:36.790102Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:34:37.259713Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:34:37.260381Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:34:37.320501Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-28T09:34:37.824363Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-28T09:34:37.855997Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:34:37.907302Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:34:37.923530Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:34:38.443879Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:34:38.461119Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-28T09:34:38.480614Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-28T09:34:40.139106Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:34:40.139579Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:34:40.140406Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:34:40.167254Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:34:40.196008Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-28T09:34:40.196713Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-28T09:34:40.252465Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-28T09:34:40.279170Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:34:40.280116Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-28T09:34:40.281117Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:34:40.766732Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:34:40.892808Z node 1 :NODE ... expired 2025-03-28T09:35:59.975936Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.000820Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-03-28T09:36:00.003476Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.005715Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.008074Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.009849Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-03-28T09:36:00.011959Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.045516Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.047886Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-03-28T09:36:00.063493Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.064784Z node 1 :NODE_BROKER ERROR: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2025-03-28T09:36:00.097221Z node 1 :NODE_BROKER ERROR: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2025-03-28T09:36:00.098618Z node 1 :NODE_BROKER ERROR: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2025-03-28T09:36:00.100080Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.101402Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.102472Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-03-28T09:36:00.104863Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-03-28T09:36:00.106159Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.107472Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.108557Z node 1 :NODE_BROKER ERROR: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2025-03-28T09:36:00.124593Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.144392Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:00.146331Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:01.144418Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:36:01.287628Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:36:01.319008Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:36:01.968184Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:36:02.375648Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:36:02.377583Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:36:02.379476Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:36:02.383209Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:36:02.411028Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:36:02.413279Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2025-03-28T09:36:02.448889Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2025-03-28T09:36:02.919252Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-28T09:36:03.019923Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:36:03.037965Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:36:03.069863Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2025-03-28T09:36:03.134641Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-28T09:36:03.193610Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:03.195078Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-03-28T09:36:03.251893Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-28T09:36:03.287847Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:03.970993Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:03.987059Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:03.988222Z node 1 :NODE_BROKER ERROR: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-03-28T09:36:04.062810Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:04.078205Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:04.220138Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:04.221939Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:04.225023Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:04.228208Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:04.229786Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:04.903916Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2025-03-28T09:36:04.906399Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-03-28T09:36:05.330289Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:05.332501Z node 1 :NODE_BROKER ERROR: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2025-03-28T09:36:05.333592Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-28T09:36:05.768602Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.785198Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.817570Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.819012Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.848896Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.850450Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.895705Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.922859Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.938299Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.939779Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.941911Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2025-03-28T09:36:05.993295Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2025-03-28T09:36:06.638143Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2025-03-28T09:36:06.653591Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.177519Z node 1 :NODE_BROKER ERROR: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.181028Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.195967Z node 1 :NODE_BROKER ERROR: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.227841Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.229276Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.230492Z node 1 :NODE_BROKER ERROR: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.717870Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.763481Z node 1 :NODE_BROKER ERROR: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.764993Z node 1 :NODE_BROKER ERROR: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.768359Z node 1 :NODE_BROKER ERROR: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.769857Z node 1 :NODE_BROKER ERROR: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.771272Z node 1 :NODE_BROKER ERROR: Cannot register node host12:11: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.773051Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.789555Z node 1 :NODE_BROKER ERROR: Cannot register node host8:7: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.792826Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.794586Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.807035Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.808314Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.810004Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.813995Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.819133Z node 1 :NODE_BROKER ERROR: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.820470Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.863587Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.865266Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.869042Z node 1 :NODE_BROKER ERROR: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2025-03-28T09:36:07.874868Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.962550Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.965299Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2025-03-28T09:36:07.966657Z node 1 :NODE_BROKER ERROR: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired >> Cdc::AddIndex [GOOD] >> Cdc::AddStream |95.1%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> YdbIndexTable::MultiShardTableOneIndex >> DataShardTxOrder::RandomPoints_DelayRS_Reboot >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge >> Cdc::AddStream [GOOD] >> Cdc::AwsRegion >> Secret::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: 2025-03-28T09:34:02.445116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:02.445620Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:02.445689Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e54/r3tmp/tmpewscsM/pdisk_1.dat TServer::EnableGrpc on GrpcPort 8692, node 1 TClient is connected to server localhost:14335 2025-03-28T09:34:03.066081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:03.095503Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:03.101841Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:03.101909Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:03.101941Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:03.102160Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:03.138824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:03.138973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:03.150463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-03-28T09:34:15.184726Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:811:2677], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:15.184890Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:15.194124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-03-28T09:34:15.434991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2755], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:15.435138Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:15.435487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2760], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:15.440531Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-28T09:34:15.573085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:935:2762], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:34:16.123333Z node 1 :TX_PROXY ERROR: Actor# [1:1032:2830] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:16.892860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:34:17.337108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-03-28T09:34:18.039994Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-28T09:34:18.792295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:34:19.253153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:34:20.644146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-03-28T09:34:21.095429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-03-28T09:34:37.751309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:34:37.751381Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-03-28T09:35:00.629830Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715732. Ctx: { TraceId: 01jqe1nmnedp2fqjgc50mxpzvd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGRiZGRlYmQtYTQ2MGRjY2QtYzkxNzQ5MzktNTNmYTMzNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-28T09:35:23.502476Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715757:0, at schemeshard: 72057594046644480 2025-03-28T09:35:24.562789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715766:0, at schemeshard: 72057594046644480 2025-03-28T09:35:26.001039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715779:0, at schemeshard: 72057594046644480 2025-03-28T09:35:26.371142Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715782:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-28T09:35:38.895411Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715797. Ctx: { TraceId: 01jqe1pt5v3gm5mvra73tp7ffx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQwZmJmNDQtYzI2OWRhNi1kMWI0M2RkOC1iYjc0OWVhOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-03-28T09:36:15.676076Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715847. Ctx: { TraceId: 01jqe1qy6t4w5ypqyyztvd64jp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzI0YTc0My05MDQ4YmViMi04MDhhOWJkMS00MmEwMTAwNQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> Secret::SimpleQueryService [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: 2025-03-28T09:34:05.333303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:05.333656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:05.333716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e19/r3tmp/tmpJs7Uwr/pdisk_1.dat TServer::EnableGrpc on GrpcPort 21157, node 1 TClient is connected to server localhost:6834 2025-03-28T09:34:05.851280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:05.884204Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:05.887776Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:05.887833Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:05.887869Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:05.888094Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:05.923581Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:05.923677Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:05.934914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-03-28T09:34:17.705528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:812:2680], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:17.705681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:823:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:17.705775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:17.718220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:34:17.738225Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:827:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:34:17.785180Z node 1 :TX_PROXY ERROR: Actor# [1:878:2721] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:18.080957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-03-28T09:34:19.049119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:34:19.549815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-03-28T09:34:20.409668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-28T09:34:21.124971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:34:21.645477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:34:22.795405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2025-03-28T09:34:23.239781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: 2025-03-28T09:34:39.711303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:34:39.711369Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 2025-03-28T09:35:02.420768Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715728. Ctx: { TraceId: 01jqe1npftd9bjb492t5ez9w94, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YmQ5ZWVkOWItMWEyOTk4YmYtYWNlNjQ3NzMtNzJlMTE0YTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-28T09:35:25.558858Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715749:0, at schemeshard: 72057594046644480 2025-03-28T09:35:26.491936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715756:0, at schemeshard: 72057594046644480 2025-03-28T09:35:27.739546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715767:0, at schemeshard: 72057594046644480 2025-03-28T09:35:28.099937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715770:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-28T09:35:40.691306Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715785. Ctx: { TraceId: 01jqe1pvsn3ezzy35jdf42z5ga, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2ZkZmVkODMtNTk5MTI0NDUtZTA2MDQ1ZTEtZWMyYTM3ZTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 2025-03-28T09:36:16.877612Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715829. Ctx: { TraceId: 01jqe1qzcr7njkv12hn0w3twdj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNjNGVhNS0zNmNiMmIwZS03ODZlNTFjMy1lMWQxOTRiOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> TColumnShardTestSchema::DropWriteRace >> DataShardTxOrder::ZigZag >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords >> TLocksTest::GoodNullLock [GOOD] >> TColumnShardTestSchema::DropWriteRace [GOOD] >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2025-03-28T09:36:20.202315Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:36:20.270784Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:36:20.287114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:36:20.287370Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:36:20.293135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:36:20.293330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:36:20.293507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:36:20.293596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:36:20.293660Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:36:20.293719Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:36:20.293786Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:36:20.293864Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:36:20.293960Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:36:20.294038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:36:20.294106Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:36:20.294189Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:36:20.313675Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:36:20.313898Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:36:20.313936Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:36:20.314065Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:20.314174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:36:20.314234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:36:20.314268Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:36:20.314342Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:36:20.314390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:36:20.314431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:36:20.314460Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:36:20.314579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:20.314635Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:36:20.314663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:36:20.314700Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:36:20.314756Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:36:20.314791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:36:20.314823Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:36:20.314848Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:36:20.314902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:36:20.314928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:36:20.314946Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:36:20.314985Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:36:20.315016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:36:20.315033Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:36:20.315295Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=28; 2025-03-28T09:36:20.315368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-03-28T09:36:20.315424Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-03-28T09:36:20.315473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=22; 2025-03-28T09:36:20.315571Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:36:20.315624Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:36:20.315660Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:36:20.315820Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:36:20.315854Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:36:20.315877Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:36:20.315984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:36:20.316019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:36:20.316040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:36:20.316152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:36:20.316176Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:36:20.316194Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:36:20.316277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:36:20.316312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:36:20.316348Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2025-03-28T09:36:20.575225Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-28T09:36:20.575342Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=70; 2025-03-28T09:36:20.575391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2025-03-28T09:36:20.575438Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=19; 2025-03-28T09:36:20.575481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=21; 2025-03-28T09:36:20.575519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=16; 2025-03-28T09:36:20.575556Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3231; 2025-03-28T09:36:20.575683Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:36:20.575723Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:36:20.575799Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:36:20.576027Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:36:20.576064Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-03-28T09:36:20.576257Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:36:20.576365Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:36:20.576430Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:36:20.576451Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:36:20.576471Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:36:20.576513Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:36:20.576547Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:521;problem=Background activities cannot be started: no index at tablet; 2025-03-28T09:36:20.844879Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=101;this=88923004795968;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;fline=schema.h:36;event=sync_schema; 2025-03-28T09:36:20.856620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004795968;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881142080;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-03-28T09:36:20.856696Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004795968;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881142080;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-03-28T09:36:20.856731Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;this=88923004795968;op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:0;;int_this=89197881142080;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2025-03-28T09:36:20.857018Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-03-28T09:36:20.857125Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000001 at tablet 9437184, mediator 0 2025-03-28T09:36:20.857168Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] execute at tablet 9437184 2025-03-28T09:36:20.857403Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 1 ttl settings: { Version: 1 } at tablet 9437184 2025-03-28T09:36:20.861652Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-28T09:36:20.861760Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:245;method=RegisterTable;path_id=1; 2025-03-28T09:36:20.861793Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine.h:144;event=RegisterTable;path_id=1; 2025-03-28T09:36:20.866318Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=1; 2025-03-28T09:36:20.866456Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tx_controller.cpp:211;event=finished_tx;tx_id=101; 2025-03-28T09:36:20.889133Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[2] complete at tablet 9437184 2025-03-28T09:36:20.889282Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6120;columns=10; 2025-03-28T09:36:20.903743Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];fline=actor.cpp:22;event=flush_writing;size=6120;count=1; 2025-03-28T09:36:20.905588Z node 1 :TX_COLUMNSHARD DEBUG: Write (record) into pathId 1 writeId 1 at tablet 9437184 2025-03-28T09:36:20.905926Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-28T09:36:20.917445Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2025-03-28T09:36:20.917562Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxWrite;tablet_id=9437184;tx_state=complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:36:20.941866Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005123232;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881219648;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-03-28T09:36:20.941940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005123232;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881219648;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:99:2134]; 2025-03-28T09:36:20.941979Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;this=88923005123232;op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:99:2134];cookie=00:2;;int_this=89197881219648;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2025-03-28T09:36:20.942202Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-03-28T09:36:20.942279Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000002 at tablet 9437184, mediator 0 2025-03-28T09:36:20.942337Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] execute at tablet 9437184 2025-03-28T09:36:20.942533Z node 1 :TX_COLUMNSHARD DEBUG: DropTable for pathId: 1 at tablet 9437184 2025-03-28T09:36:20.942614Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=tx_controller.cpp:211;event=finished_tx;tx_id=103; 2025-03-28T09:36:20.954227Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[6] complete at tablet 9437184 2025-03-28T09:36:20.954341Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:36:20.954581Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1000000003 at tablet 9437184, mediator 0 2025-03-28T09:36:20.954645Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[8] execute at tablet 9437184 2025-03-28T09:36:20.954853Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=abstract.h:83;progress_tx_id=102;lock_id=1;broken=0; 2025-03-28T09:36:20.954948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;commit_tx_id=102;commit_lock_id=1;fline=insert_table.cpp:50;event=abort_insertion;path_id=1;blob_range={ Blob: DS:0:[9437184:2:1:3:0:7080:0] Offset: 0 Size: 7080 }; 2025-03-28T09:36:20.955051Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=tx_controller.cpp:211;event=finished_tx;tx_id=102; 2025-03-28T09:36:20.966542Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[8] complete at tablet 9437184 2025-03-28T09:36:20.966629Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:93;progress_tx_id=102;lock_id=1;broken=0; 2025-03-28T09:36:20.966729Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2025-03-28T09:35:55.835649Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791806877843219:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:55.835719Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec6/r3tmp/tmpyev8nc/pdisk_1.dat 2025-03-28T09:35:56.074846Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:56.157329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:56.157423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:56.159139Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10736 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:56.272930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:56.301426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:56.408389Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:56.442328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:57.948665Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791817124808155:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:35:57.948709Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec6/r3tmp/tmpSo7Qer/pdisk_1.dat 2025-03-28T09:35:58.028059Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:58.077045Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:58.077128Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:58.078170Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10160 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:35:58.167677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:35:58.183497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:58.218873Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:35:58.285650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:00.103290Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791828924078319:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:00.103331Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec6/r3tmp/tmpBoWsNA/pdisk_1.dat 2025-03-28T09:36:00.154838Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:00.224961Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:00.225024Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:00.226514Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:62312 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:36:00.291513Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:36:00.305282Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:00.340881Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:00.373987Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:02.473137Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791837706602569:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:02.473217Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec6/r3tmp/tmpTPBAh4/pdisk_1.dat 2025-03-28T09:36:02.554524Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:02.599610Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:02.599669Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:02.601242Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32403 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:36:02.704099Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:36:02.720535Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiti ... /001ec6/r3tmp/tmpbUSSJr/pdisk_1.dat 2025-03-28T09:36:09.635006Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:09.671246Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:09.671312Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:09.672363Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27959 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:36:09.823072Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:36:09.839054Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:09.883230Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:09.947842Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:12.052744Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486791879506595464:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:12.052840Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec6/r3tmp/tmp1An4wY/pdisk_1.dat 2025-03-28T09:36:12.126256Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:12.181504Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:12.181578Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:12.182742Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16081 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:36:12.294482Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:36:12.309448Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:12.347506Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:12.383154Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:14.641574Z node 9 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7486791887851155433:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:14.641621Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec6/r3tmp/tmpntJ6kM/pdisk_1.dat 2025-03-28T09:36:14.728559Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:14.766797Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:14.766871Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:14.768376Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3354 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:36:14.919195Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:36:14.934916Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:14.976826Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:15.016007Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:17.743408Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486791900330042528:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:17.743486Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001ec6/r3tmp/tmpBNaPuH/pdisk_1.dat 2025-03-28T09:36:17.833871Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:17.878296Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:17.878374Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:17.879841Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7618 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:36:18.019387Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:36:18.036794Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:18.103506Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:18.145804Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> Cdc::AwsRegion [GOOD] |95.1%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TVersions::Wreck0Reverse [GOOD] >> TAsyncIndexTests::OnlineBuild ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: 2025-03-28T09:34:36.736031Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:36.736480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:36.736533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012ea/r3tmp/tmpwKhIFL/pdisk_1.dat 2025-03-28T09:34:37.108019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.157349Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:37.205359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:37.205514Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:37.216988Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:37.306000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:37.346187Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:34:37.346481Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:37.395637Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:37.395779Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:34:37.397495Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:34:37.397574Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:34:37.397628Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:34:37.397975Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:34:37.398097Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:34:37.398223Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:34:37.408972Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:34:37.431459Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:34:37.431681Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:34:37.431879Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:34:37.431917Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:34:37.431952Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:34:37.431999Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:37.432453Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:34:37.432549Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:34:37.432993Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:34:37.433053Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:34:37.433097Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:34:37.433142Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:34:37.433236Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:34:37.433394Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:34:37.433621Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:34:37.433740Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:34:37.435477Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:34:37.447128Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:34:37.447282Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:34:37.611053Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:34:37.624252Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:34:37.624356Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:37.625184Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:34:37.625256Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:34:37.625343Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:34:37.625656Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:34:37.625865Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:34:37.626799Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:34:37.626902Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:34:37.629134Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:34:37.629590Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:34:37.631298Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:34:37.631350Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:37.632268Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:34:37.632343Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:34:37.633395Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:34:37.633456Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:34:37.633529Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:34:37.633606Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:34:37.633674Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:34:37.633769Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:37.639416Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:684:2580][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-03-28T09:34:37.644777Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:34:37.647942Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:34:37.648044Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:34:37.648545Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:34:40.728395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:40.728722Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:40.728776Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012ea/r3tmp/tmpusjxA8/pdisk_1.dat 2025-03-28T09:34:41.038439Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:41.065752Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:41.102870Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:41.102994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:41.114441Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:41.204959Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:41.237856Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:677:2578] 2025-03-28T09:34:41.238121Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:41.290452Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:41.290623Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:34:41.292332Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:34:41.292411Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:34:41.292474Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:34:41.292840Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:34:41.292992Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-0 ... -28T09:36:22.023938Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:22.023964Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-03-28T09:36:22.024001Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message topic: Table/Stream2/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 2 partNo : 0 messageNo: 1 size 323 offset: -1 2025-03-28T09:36:22.024128Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob processing sourceId '\00072075186224037888' seqNo 2 partNo 0 2025-03-28T09:36:22.024843Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Topic 'Table/Stream2/streamImpl' partition 0 part blob complete sourceId '\00072075186224037888' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 438 count 1 nextOffset 1 batches 1 2025-03-28T09:36:22.025211Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream2/streamImpl' partition 0 compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 d0000000000_00000000000000000000_00000_0000000001_00000| size 426 WTime 2505 2025-03-28T09:36:22.025289Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:36:22.025313Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:36:22.025336Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:36:22.025360Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:36:22.025383Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] m0000000000p72075186224037888 2025-03-28T09:36:22.025404Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000001_00000| 2025-03-28T09:36:22.025421Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:36:22.025441Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:36:22.025463Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:36:22.025536Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:36:22.025703Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 427 2025-03-28T09:36:22.025792Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:36:22.025835Z node 21 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 1 size 426 2025-03-28T09:36:22.027036Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 427 actorID [21:796:2660] 2025-03-28T09:36:22.027252Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 size 427 2025-03-28T09:36:22.027431Z node 21 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 1 size 426 actorID [21:972:2768] 2025-03-28T09:36:22.027501Z node 21 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037891' partition 0 offset 0 partno 0 count 1 parts 0 size 426 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-03-28T09:36:22.028626Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:22.028714Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-03-28T09:36:22.029392Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 0 max time lag 0ms effective offset 0 2025-03-28T09:36:22.029464Z node 21 :PERSQUEUE DEBUG: waiting read cookie 0 partition 0 user $without_consumer offset 0 count 10000 size 26214400 timeout 0 2025-03-28T09:36:22.029563Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:36:22.029642Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] waiting read cookie 0 partition 0 read timeout for $without_consumer offset 0 2025-03-28T09:36:22.029735Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:36:22.040437Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 342 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:36:22.040598Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:22.040748Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream1/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-28T09:36:22.041114Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-03-28T09:36:22.041234Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-28T09:36:22.041394Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 341 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:36:22.041446Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:22.041487Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream2/streamImpl', Partition: 0, SeqNo: 2, partNo: 0, Offset: 0 is stored on disk 2025-03-28T09:36:22.041635Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-28T09:36:22.041845Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-28T09:36:22.042087Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037891][21:1169:2812] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 2 Offset: 0 WriteTimestampMS: 2505 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-03-28T09:36:22.042210Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-28T09:36:22.042300Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-28T09:36:22.042424Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-03-28T09:36:22.042486Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:36:22.042643Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][21:1168:2714] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 1 Offset: 0 WriteTimestampMS: 2505 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-03-28T09:36:22.042735Z node 21 :PERSQUEUE DEBUG: Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer readTimeStamp done, result 2505 queuesize 0 startOffset 0 2025-03-28T09:36:22.042946Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:876:2714] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-28T09:36:22.043080Z node 21 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][21:1030:2812] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-28T09:36:22.043235Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 2, at tablet# 72075186224037888 2025-03-28T09:36:22.043297Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037888 2025-03-28T09:36:22.043427Z node 21 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 2, at tablet: 72075186224037888 2025-03-28T09:36:22.054305Z node 21 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 2, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2025-03-28T09:36:22.316515Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:22.316584Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2025-03-28T09:36:22.316706Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-03-28T09:36:22.316788Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-28T09:36:22.316919Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-28T09:36:22.317017Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:36:22.317673Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2025-03-28T09:36:22.318987Z node 21 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:22.319080Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2025-03-28T09:36:22.319869Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2025-03-28T09:36:22.319961Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-28T09:36:22.320073Z node 21 :PERSQUEUE DEBUG: [PQ: 72075186224037891, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-28T09:36:22.320155Z node 21 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:36:22.320702Z node 21 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> TAsyncIndexTests::OnlineBuild [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: SmallQueue: MainQueue: {11 0f 1b}, {14 1f 1b}, {15 2f 1b}, {18 0f 1b}, {19 0f 1b}, {23 0f 1b}, {27 0f 1b} GhostQueue: 9, 12, 13, 16, 17, 20, 21, 24, 25, 28 0.28973 00000.000 II| FAKE_ENV: Born at 2025-03-28T09:34:14.277790Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.013 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.013 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.016 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 ReplacementPolicy: ThreeLeveledLRU 00000.016 NN| TABLET_SAUSAGECACHE: Switch replacement policy from S3FIFO to ThreeLeveledLRU 00000.016 NN| TABLET_SAUSAGECACHE: Switch replacement policy done from S3FIFO to ThreeLeveledLRU 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.034 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.034 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.034 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.034 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.034 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.034 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.035 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.035 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.035 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.035 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.036 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.036 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.036 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.036 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.036 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.037 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.037 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.037 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.037 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.037 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.037 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.037 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.038 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.038 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.038 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.038 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.038 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.039 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.040 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.041 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxW ... CHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 5 ] owner [12:659:2684] 00000.439 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.439 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.439 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.440 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.440 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.440 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [4 4] 00000.440 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.440 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 4 ] 00000.440 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 4 ] 00000.440 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.440 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.440 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{96, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.440 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 4 ] 00000.440 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 4 ] owner [12:659:2684] 00000.441 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.441 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.441 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.441 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.441 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.441 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [3 4] 00000.441 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.441 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 3 ] 00000.441 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 3 ] 00000.441 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.441 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.441 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{97, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.441 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 3 ] 00000.441 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 3 ] owner [12:659:2684] 00000.442 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.442 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.442 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.442 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.442 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.442 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [2 4] 00000.442 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.442 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 2 ] 00000.442 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 2 ] 00000.442 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.442 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.442 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{98, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.443 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 100 2 ] 00000.443 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 2 ] owner [12:659:2684] 00000.443 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.443 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.443 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.443 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.443 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.443 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [1 4] 00000.443 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.443 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 1 ] 00000.444 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 1 ] 00000.444 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.444 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.444 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{99, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.444 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 1 100 ] 00000.444 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 1 ] owner [12:659:2684] 00000.444 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} queued, type NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow 00000.444 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.444 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 1 -> retry Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.445 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} touch new 0b, 102443b lo load (102443b in total), 0b requested for data (4194304b in total) 00000.445 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.445 D3| TABLET_EXECUTOR: Leader{1:3:2} requests PageCollection [1:2:103:1:12288:2546:0] 102443 bytes, 1 pages: [0 4] 00000.445 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} postponed, 102443b, pages {1 wait, 1 load}, freshly touched 2 pages 00000.445 TT| TABLET_SAUSAGECACHE: Request page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] class Online from cache [ ] already requested [ ] to request [ 0 ] 00000.445 TT| TABLET_SAUSAGECACHE: Loaded page collection [1:2:103:1:12288:2546:0] status OK pages [ 0 ] 00000.445 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:103:1:12288:2546:0] ok OK}, category 1 00000.445 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} hope 2 -> done Change{103, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.445 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{100, NKikimr::NSharedCache::NTestSuiteTSharedPageCache::TTxReadRow} release 8388608b of static, Memory{0 dyn 0} 00000.445 TT| TABLET_SAUSAGECACHE: Touch page collection [1:2:103:1:12288:2546:0] owner [12:659:2684] pages [ 0 100 ] 00000.445 DD| TABLET_SAUSAGECACHE: Dropping page collection [1:2:103:1:12288:2546:0] pages [ 0 ] owner [12:659:2684] Counters: Active:0/0, Passive:2772, MemLimit:-1 00000.445 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.446 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 10249619b +(0, 0b), 1 trc, -48685b acc} 00000.448 DD| TABLET_SAUSAGECACHE: Unregister owner [12:659:2684] 00000.449 NN| TABLET_SAUSAGECACHE: Poison cache serviced 201 reqs hit {0 0b} miss {202 20491164b} 00000.449 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.449 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {10191b, 107} 00000.449 II| FAKE_ENV: DS.1 gone, left {10250914b, 5}, put {10299737b, 107} 00000.454 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.454 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.454 II| FAKE_ENV: All BS storage groups are stopped 00000.454 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.455 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 3357}, stopped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::OnlineBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:36:23.799522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:36:23.799610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:36:23.799673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:36:23.799713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:36:23.799758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:36:23.799808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:36:23.799873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:36:23.800000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:36:23.800302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:23.856051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:23.856096Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:23.866386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:23.866566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:36:23.866712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:36:23.870876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:36:23.871020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:36:23.871464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:36:23.871654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:36:23.872979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:36:23.874084Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:36:23.874127Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:36:23.874236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:36:23.874266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:36:23.874289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:36:23.874368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:36:23.878857Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:36:23.984620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:36:23.984868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:23.985067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:36:23.985279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:36:23.985347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:23.987545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:36:23.987675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:36:23.987831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:23.987872Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:36:23.987901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:36:23.987927Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:36:23.989483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:23.989535Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:36:23.989577Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:36:23.991514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:23.991551Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:23.991606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:36:23.991661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:36:23.999916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:36:24.001780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:36:24.001985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:36:24.002969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:36:24.003097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:36:24.003138Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:36:24.003369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:36:24.003416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:36:24.003569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:36:24.003653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:36:24.005309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:36:24.005358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:36:24.005507Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:36:24.005548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:36:24.005883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:24.005920Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:36:24.005998Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:36:24.006025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:36:24.006062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:36:24.006087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:36:24.006117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:36:24.006154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:36:24.006192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:36:24.006240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:36:24.006296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:36:24.006330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:36:24.006355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:36:24.008027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:36:24.008116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:36:24.008145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710760, database: /MyRoot, subject: , status: StatusAccepted, operation: DROP LOCK, path: /MyRoot/Table 2025-03-28T09:36:24.457646Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvModifySchemeTransactionResult: txId# 281474976710760, status# StatusAccepted 2025-03-28T09:36:24.457705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-03-28T09:36:24.457812Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, BuildIndexId: 102, cookie: 102, txId: 281474976710760, status: StatusAccepted 2025-03-28T09:36:24.457907Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:382:2354], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }}, record: Status: StatusAccepted TxId: 281474976710760 SchemeshardId: 72057594046678944 PathId: 2 2025-03-28T09:36:24.458148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:36:24.458193Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2025-03-28T09:36:24.458249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:36:24.458378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:36:24.460709Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:36:24.460778Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:382:2354], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:36:24.461107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-03-28T09:36:24.461227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:36:24.461387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-28T09:36:24.461414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-03-28T09:36:24.461461Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-03-28T09:36:24.461703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:36:24.461791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:36:24.461830Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-03-28T09:36:24.461865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-03-28T09:36:24.463285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:36:24.463326Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-28T09:36:24.463418Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:36:24.463445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:36:24.463471Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:36:24.463507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:36:24.463536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-28T09:36:24.463580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:123:2149] message: TxId: 281474976710760 2025-03-28T09:36:24.463611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:36:24.463654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-28T09:36:24.463676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-28T09:36:24.463723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-28T09:36:24.464963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-28T09:36:24.465023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-28T09:36:24.465073Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-03-28T09:36:24.465140Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:382:2354], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:36:24.466358Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:36:24.466477Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:382:2354], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:36:24.466521Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:36:24.467888Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:36:24.467966Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndex, IndexColumn: indexed, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:382:2354], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:36:24.467999Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-28T09:36:24.468111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:36:24.468155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:473:2434] TestWaitNotification: OK eventTxId 102 >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer >> TColumnShardTestSchema::RebootOneTierExternalTtl |95.2%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TxOrderInternals::OperationOrder [GOOD] |95.2%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink >> DataShardTxOrder::ZigZag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2025-03-28T09:36:21.075162Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:21.079928Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:21.080267Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:21.080446Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:21.112907Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:21.166276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:21.166316Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:21.172934Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:21.173709Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:21.174779Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:21.174828Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:21.174861Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:21.175086Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:21.175136Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:21.175190Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:21.226315Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:21.252002Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:21.252135Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:21.252202Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:21.252227Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:21.252249Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:21.252273Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:21.252446Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:21.252497Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:21.252660Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:21.252726Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:21.252759Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:21.252801Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:21.252824Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:21.252847Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:21.252868Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:21.252902Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:21.252932Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:21.253007Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:21.253034Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:21.253078Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:21.254671Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:21.254734Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:21.254791Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:21.254897Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:21.254928Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:21.254957Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:21.254988Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:21.255022Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:21.255045Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:21.255068Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:21.255284Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:21.255306Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:21.255326Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:21.255349Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:21.255376Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:21.255391Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:21.255409Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:21.255446Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:21.255468Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:21.266854Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:21.266922Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:21.266954Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:21.266993Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:21.267054Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:21.267625Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:21.267682Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:21.267709Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:21.267759Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:21.267779Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:21.267871Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:21.267911Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:21.267935Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:21.267956Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:21.270289Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:21.270331Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:21.270465Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:21.270487Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:21.270528Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:21.270561Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:21.270581Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:21.270620Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:21.270640Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:21.270666Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:21.270733Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:21.270760Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:21.270783Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:21.270897Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:21.270931Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:21.270946Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:21.270970Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:21.270984Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:21.271016Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:21.271034Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:21.271054Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:21.271076Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:21.271140Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:21.271181Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:21.271215Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:21.271260Z node 1 :TX_DATA ... aitInRS 2025-03-28T09:36:26.475830Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:26.475848Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-03-28T09:36:26.475866Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-03-28T09:36:26.475884Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-03-28T09:36:26.476237Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-03-28T09:36:26.476312Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:36:26.476364Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:26.476394Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-03-28T09:36:26.476416Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-03-28T09:36:26.476439Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-03-28T09:36:26.476594Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-03-28T09:36:26.476622Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-03-28T09:36:26.476653Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-03-28T09:36:26.476681Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-03-28T09:36:26.476710Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:26.476729Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-03-28T09:36:26.476759Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-03-28T09:36:26.476799Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:26.476852Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-28T09:36:26.476886Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-28T09:36:26.476922Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-28T09:36:26.477079Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:233:2226], Recipient [2:233:2226]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:26.477108Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:26.477142Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:26.477184Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:26.477206Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:26.477230Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2025-03-28T09:36:26.477254Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2025-03-28T09:36:26.477279Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:26.477309Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:26.477336Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:26.477358Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:26.477926Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2025-03-28T09:36:26.477963Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:26.477986Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:26.478009Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2025-03-28T09:36:26.478030Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2025-03-28T09:36:26.478059Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:26.478080Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2025-03-28T09:36:26.478108Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:26.478136Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:26.478171Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437184 2025-03-28T09:36:26.478193Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437184 2025-03-28T09:36:26.478226Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437184 2025-03-28T09:36:26.478256Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:26.478275Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:36:26.478294Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2025-03-28T09:36:26.478314Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2025-03-28T09:36:26.478350Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:26.478371Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2025-03-28T09:36:26.478389Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2025-03-28T09:36:26.478410Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2025-03-28T09:36:26.478431Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:26.478461Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2025-03-28T09:36:26.478503Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2025-03-28T09:36:26.478529Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2025-03-28T09:36:26.478554Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:26.478588Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2025-03-28T09:36:26.478610Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2025-03-28T09:36:26.478632Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2025-03-28T09:36:26.478652Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:26.478672Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2025-03-28T09:36:26.478707Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:36:26.478727Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2025-03-28T09:36:26.479018Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2025-03-28T09:36:26.479062Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:36:26.479094Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:26.479117Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:36:26.479137Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-03-28T09:36:26.479176Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-03-28T09:36:26.479309Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-03-28T09:36:26.479332Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-03-28T09:36:26.479354Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-03-28T09:36:26.479376Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-03-28T09:36:26.479401Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:26.479421Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-03-28T09:36:26.479441Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-03-28T09:36:26.479478Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:26.479502Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:26.479524Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:26.479545Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:26.492123Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-03-28T09:36:26.492193Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-03-28T09:36:26.492249Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-28T09:36:26.492294Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-03-28T09:36:26.492364Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:26.492416Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-28T09:36:26.492681Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-03-28T09:36:26.492749Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-03-28T09:36:26.492783Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:26.492807Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-03-28T09:36:26.492840Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:26.492866Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop >> Cdc::ShouldBreakLocksOnConcurrentSchemeTx [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> DataShardOutOfOrder::TestOutOfOrderLockLost >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-03-28T09:36:03.370427Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791841153960734:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:03.370587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001566/r3tmp/tmp6BHDy8/pdisk_1.dat 2025-03-28T09:36:03.593895Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12364, node 1 2025-03-28T09:36:03.640237Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:03.640257Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:03.640265Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:03.640387Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:03.704121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:03.704216Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:03.705941Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:03.835393Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:36:03.838166Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:36:03.838201Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:36:03.839324Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:24793, port: 24793 2025-03-28T09:36:03.839402Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:36:03.895146Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:36:03.942973Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:36:03.943462Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:36:03.943500Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:36:03.991030Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:36:04.035037Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:36:04.036285Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****UCjQ (6FC2C03B) () has now valid token of ldapuser@ldap 2025-03-28T09:36:08.370503Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791841153960734:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:08.370623Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:36:09.380094Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****UCjQ (6FC2C03B) 2025-03-28T09:36:09.380286Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:24793, port: 24793 2025-03-28T09:36:09.380381Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:36:09.431071Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:36:09.431534Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:24793 return no entries 2025-03-28T09:36:09.431934Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****UCjQ (6FC2C03B) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:24793 return no entries)' 2025-03-28T09:36:13.381879Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****UCjQ (6FC2C03B) 2025-03-28T09:36:14.464773Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791888835731063:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:14.464834Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001566/r3tmp/tmpOINxJH/pdisk_1.dat 2025-03-28T09:36:14.540617Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2860, node 2 2025-03-28T09:36:14.585530Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:14.585552Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:14.585556Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:14.585637Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:14.594592Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:14.594671Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:14.596234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:14.699450Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:36:14.701793Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:36:14.701820Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:36:14.702359Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:8441, port: 8441 2025-03-28T09:36:14.702407Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:36:14.751104Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:36:14.751517Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:8441. Server is busy 2025-03-28T09:36:14.751944Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****-R0Q (04A4670E) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:8441. Server is busy)' 2025-03-28T09:36:14.752148Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:36:14.752167Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:36:14.752765Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:8441, port: 8441 2025-03-28T09:36:14.752805Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:36:14.807099Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:36:14.807452Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:8441. Server is busy 2025-03-28T09:36:14.807780Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****-R0Q (04A4670E) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:8441. Server is busy)' 2025-03-28T09:36:16.474773Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****-R0Q (04A4670E) 2025-03-28T09:36:16.474979Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:36:16.475002Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:36:16.475700Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:8441, port: 8441 2025-03-28T09:36:16.475772Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:36:16.527025Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:36:16.527408Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldaps://localhost:8441. Server is busy 2025-03-28T09:36:16.527796Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****-R0Q (04A4670E) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:8441. Server is busy)' 2025-03-28T09:36:19.464960Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486791888835731063:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:19.465049Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:36:19.476685Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****-R0Q (04A4670E) 2025-03-28T09:36:19.476887Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:36:19.476908Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:36:19.477477Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:8441, port: 8441 2025-03-28T09:36:19.477547Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:36:19.531084Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:36:19.578949Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:36:19.579367Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:36:19.579402Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:36:19.623050Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:36:19.670970Z node 2 :LDAP_AUTH_PROVIDER D ... 3-28T09:36:23.619003Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:36:23.663002Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:36:23.663995Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****-R0Q (04A4670E) () has now valid token of ldapuser@ldap 2025-03-28T09:36:25.189970Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486791937799088122:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:25.190037Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001566/r3tmp/tmpH1Y8Ii/pdisk_1.dat 2025-03-28T09:36:25.258938Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3882, node 3 2025-03-28T09:36:25.310086Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:25.310106Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:25.310111Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:25.310192Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:25.313994Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:25.314070Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:25.315517Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:25.581800Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:36:25.584105Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:36:25.584126Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:36:25.584562Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:4128, port: 4128 2025-03-28T09:36:25.584627Z node 3 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:36:25.594233Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:36:25.639280Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:36:25.687052Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:36:25.731506Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****T2Bw (D3677813) () has now valid token of ldapuser@ldap 2025-03-28T09:36:27.423469Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486791943313850422:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:27.423509Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001566/r3tmp/tmpO5hXzF/pdisk_1.dat 2025-03-28T09:36:27.507743Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7239, node 4 2025-03-28T09:36:27.547591Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:27.547657Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:27.548778Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:27.555023Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:27.555046Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:27.555053Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:27.555200Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:27.667677Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:36:27.669988Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:36:27.670021Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:36:27.670525Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:26431, port: 26431 2025-03-28T09:36:27.670573Z node 4 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:36:27.679167Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:36:27.727125Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:36:27.775413Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****1IfQ (83EFFB19) () has now valid token of ldapuser@ldap 2025-03-28T09:36:29.638462Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486791952722675838:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:29.638565Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001566/r3tmp/tmpzmYRYV/pdisk_1.dat 2025-03-28T09:36:29.709786Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3013, node 5 2025-03-28T09:36:29.753760Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:29.753782Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:29.753790Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:29.753913Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:29.769464Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:29.769559Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:29.771031Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:29.930739Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:36:29.933122Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:36:29.933144Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:36:29.933674Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:11253, port: 11253 2025-03-28T09:36:29.933749Z node 5 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:36:29.941623Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:36:29.983117Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-28T09:36:30.026956Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:36:30.027359Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:36:30.027394Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-28T09:36:30.075008Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-28T09:36:30.122981Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-28T09:36:30.123787Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****ztKA (9CE123C0) () has now valid token of ldapuser@ldap 2025-03-28T09:36:32.067488Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486791968001788500:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:32.067539Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001566/r3tmp/tmpEg60JS/pdisk_1.dat 2025-03-28T09:36:32.141154Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8101, node 6 2025-03-28T09:36:32.192317Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:32.192335Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:32.192340Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:32.192448Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:32.193079Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:32.193147Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:32.194704Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:32.264498Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:36:32.266939Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:36:32.266963Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:36:32.267462Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:27176, port: 27176 2025-03-28T09:36:32.267511Z node 6 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:36:32.275460Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:36:32.319155Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-03-28T09:36:32.319216Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:27176. Bad search filter 2025-03-28T09:36:32.319597Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****9wjQ (C9BC008D) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:27176. Bad search filter)' >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] >> TSubscriberTest::Sync >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: 2025-03-28T09:36:28.397838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:28.398121Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:28.398168Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001655/r3tmp/tmplakuBw/pdisk_1.dat 2025-03-28T09:36:28.733799Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:36:28.765211Z node 1 :KQP_RESOURCE_MANAGER INFO: Updated table service config: ComputeActorsCount: 10000 ChannelBufferSize: 8388608 MkqlLightProgramMemoryLimit: 1048576 MkqlHeavyProgramMemoryLimit: 31457280 QueryMemoryLimit: 32212254720 PublishStatisticsIntervalSec: 2 MaxTotalChannelBuffersSize: 2147483648 MinChannelBufferSize: 2048 2025-03-28T09:36:28.765320Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:36:28.765372Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 7 2025-03-28T09:36:28.765482Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:28.768144Z node 1 :KQP_RESOURCE_MANAGER DEBUG: Updated table service config. 2025-03-28T09:36:28.805569Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:36:28.807492Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:36:28.807844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:28.808426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:28.820618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:28.898878Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:36:28.898930Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:36:28.900379Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:642:2550] 2025-03-28T09:36:28.992240Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value1" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value2" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:36:28.992315Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:36:28.993856Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:36:28.993928Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:36:28.994120Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:36:28.994283Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:36:28.994436Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:36:28.997474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:36:29.003762Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:36:29.005097Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:36:29.005165Z node 1 :TX_PROXY DEBUG: Actor# [1:642:2550] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:36:29.036309Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:29.037168Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:29.037603Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:667:2571] 2025-03-28T09:36:29.037788Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:29.069224Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:667:2571]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:29.069835Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:29.069933Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:29.071259Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:36:29.071341Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:36:29.071393Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:36:29.071746Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:29.071854Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:29.071932Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:683:2571] in generation 1 2025-03-28T09:36:29.082535Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:29.106417Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:36:29.106584Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:29.106721Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:685:2581] 2025-03-28T09:36:29.106759Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:36:29.106795Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:36:29.106834Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:36:29.107001Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:667:2571], Recipient [1:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:29.107041Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:29.107256Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:36:29.107322Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:36:29.109065Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:36:29.109108Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:29.109148Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:36:29.109180Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:36:29.109203Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:36:29.109229Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:36:29.109264Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:36:29.109364Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:674:2575], Recipient [1:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:29.109407Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:29.109442Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:674:2575], sessionId# [0:0:0] 2025-03-28T09:36:29.109536Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:674:2575] 2025-03-28T09:36:29.109564Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:29.109636Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:36:29.109854Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:36:29.109902Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:36:29.109972Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:36:29.110007Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:36:29.110040Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:36:29.110067Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:36:29.110092Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:36:29.110376Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:29.110408Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:36:29.110434Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:36:29.110469Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:36:29.110512Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:36:29.110534Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:36:29.110555Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:36:29.110579Z node 1 :TX_DATASH ... processing event TEvDataShard::TEvSchemaChangedResult 2025-03-28T09:36:35.561730Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037888 state Ready 2025-03-28T09:36:35.561773Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 --- resending captured proposals --- waiting for result 2025-03-28T09:36:35.562625Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553206, Sender [2:890:2720], Recipient [2:667:2571]: NKikimrTxDataShard.TEvKqpScan TxId: 281474976715662 ScanId: 2 LocalPathId: 2 TablePath: "/Root/table-1" SchemaVersion: 1 ColumnTags: 3 ColumnTypes: 2 Ranges { From: "\001\000\000\000\000\200" To: "" FromInclusive: true ToInclusive: false } Snapshot { Step: 2000 TxId: 281474976715661 } Generation: 1 ItemsLimit: 0 Reverse: false DataFormat: FORMAT_CELLVEC StatsMode: DQ_STATS_MODE_NONE ColumnTypeInfos { } LockNodeId: 0 2025-03-28T09:36:35.562727Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715662. Table '/Root/table-1' schema version changed at 72075186224037888 2025-03-28T09:36:35.562816Z node 2 :KQP_COMPUTE WARN: SelfId: [2:890:2720]. Got EvScanError scan state: , status: ABORTED, reason:
: Error: Table '/Root/table-1' scheme changed., code: 2028 , tablet id: 72075186224037888, actor_id: [2:667:2571] 2025-03-28T09:36:35.562851Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Enqueue for resolve 72075186224037888 2025-03-28T09:36:35.562889Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:345;event=scanner_finished;tablet_id=72075186224037888;stop_shard=1; 2025-03-28T09:36:35.562927Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:92;event=stop_scanner;actor_id=NO_VALUE_OPTIONAL;message=;final_flag=1; 2025-03-28T09:36:35.562980Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Sending TEvResolveKeySet update for table '/Root/table-1', range: [(Uint32 : NULL) ; ()), attempt #1 2025-03-28T09:36:35.563133Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:890:2720]. Received TEvResolveKeySetResult update for table '/Root/table-1' 2025-03-28T09:36:35.563158Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:890:2720]. Resolve request failed for table '/Root/table-1', ErrorCount# 1 2025-03-28T09:36:35.563238Z node 2 :KQP_COMPUTE DEBUG: kqp_scan_compute_actor.cpp:167 :TEvTerminateFromFetcher: [2:890:2720]/[2:888:2718] 2025-03-28T09:36:35.563291Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:888:2718], TxId: 281474976715662, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1rgvg4edyr2a6hne6s993. SessionId : ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=. CurrentExecutionId : . DatabaseId : /Root. Database : . PoolId : default. }. InternalError: SCHEME_ERROR KIKIMR_SCHEME_MISMATCH: {
: Error: Table '/Root/table-1' scheme changed., code: 2028 }. 2025-03-28T09:36:35.563400Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 1. pass away 2025-03-28T09:36:35.563461Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-28T09:36:35.565013Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-28T09:36:35.565216Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_fetcher_actor.cpp:99;event=TEvTerminateFromCompute;sender=[2:888:2718];info={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-28T09:36:35.565387Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_scan_compute_manager.h:281;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2025-03-28T09:36:35.565757Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jqe1rgvg4edyr2a6hne6s993, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:888:2718], task: 1, state: COMPUTE_STATE_FAILURE, stats: { CpuTimeUs: 148007 Tasks { TaskId: 1 CpuTimeUs: 146520 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 9 BuildCpuTimeUs: 146511 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-mdsjyjltvm" NodeId: 2 CreateTimeMs: 1743154595051 } MaxMemoryUsage: 1048576 } 2025-03-28T09:36:35.565874Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715662. Ctx: { TraceId: 01jqe1rgvg4edyr2a6hne6s993, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:888:2718] 2025-03-28T09:36:35.565945Z node 2 :KQP_EXECUTER INFO: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jqe1rgvg4edyr2a6hne6s993, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-03-28T09:36:35.565998Z node 2 :KQP_EXECUTER INFO: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jqe1rgvg4edyr2a6hne6s993, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. aborting compute actor execution, message: {
: Error: Terminate execution }, compute actor: [2:889:2719], task: 2 2025-03-28T09:36:35.566224Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:883:2692] TxId: 281474976715662. Ctx: { TraceId: 01jqe1rgvg4edyr2a6hne6s993, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:36:35.566402Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:889:2719], TxId: 281474976715662, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1rgvg4edyr2a6hne6s993. SessionId : ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. CA StateFunc 271646735 2025-03-28T09:36:35.566468Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:889:2719], TxId: 281474976715662, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1rgvg4edyr2a6hne6s993. SessionId : ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : . }. Handle abort execution event from: [2:883:2692], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2025-03-28T09:36:35.566622Z node 2 :KQP_COMPUTE DEBUG: TxId: 281474976715662, task: 2. pass away 2025-03-28T09:36:35.566771Z node 2 :KQP_COMPUTE DEBUG: fline=kqp_compute_actor_factory.cpp:66;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2025-03-28T09:36:35.569345Z node 2 :KQP_RESOURCE_MANAGER DEBUG: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2025-03-28T09:36:35.569495Z node 2 :KQP_RESOURCE_MANAGER DEBUG: Schedule publish at 1970-01-01T00:00:04.000000Z, after 1.550000s 2025-03-28T09:36:35.569616Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, ActorId: [2:857:2692], ActorState: ExecuteState, TraceId: 01jqe1rgvg4edyr2a6hne6s993, Create QueryResponse for error on request, msg: 2025-03-28T09:36:35.570204Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvExecuteKqpTransaction 2025-03-28T09:36:35.570242Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 281474976715664 ProcessProposeKqpTransaction 2025-03-28T09:36:35.570325Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 2000, txId: 281474976715661] shutting down 2025-03-28T09:36:35.570451Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Handle TEvProposeTransaction 2025-03-28T09:36:35.570494Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] TxId# 0 ProcessProposeTransaction 2025-03-28T09:36:35.570575Z node 2 :TX_PROXY DEBUG: actor# [2:59:2106] Cookie# 0 userReqId# "" txid# 0 reqId# [2:925:2751] SnapshotReq marker# P0 2025-03-28T09:36:35.571010Z node 2 :TX_PROXY DEBUG: Actor# [2:927:2751] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2025-03-28T09:36:35.571141Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Resolved key sets: 0 2025-03-28T09:36:35.571228Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1rgvg4edyr2a6hne6s993, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:35.571267Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715664. Ctx: { TraceId: 01jqe1rgvg4edyr2a6hne6s993, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-28T09:36:35.571303Z node 2 :KQP_EXECUTER INFO: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01jqe1rgvg4edyr2a6hne6s993, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:36:35.571378Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01jqe1rgvg4edyr2a6hne6s993, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:36:35.571443Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:924:2692] TxId: 281474976715664. Ctx: { TraceId: 01jqe1rgvg4edyr2a6hne6s993, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:36:35.571486Z node 2 :TX_PROXY DEBUG: Actor# [2:927:2751] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2025-03-28T09:36:35.571692Z node 2 :TX_PROXY DEBUG: Actor# [2:925:2751] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2025-03-28T09:36:35.571833Z node 2 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [2:594:2519], selfId: [2:57:2104], source: [2:857:2692] 2025-03-28T09:36:35.572022Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553190, Sender [2:925:2751], Recipient [2:667:2571]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 2 Step: 2000 TxId: 281474976715661 2025-03-28T09:36:35.572652Z node 2 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=2&id=NzBhNWQ2NDUtOTA0ZTI3NTUtZmRiMjMxYTYtZGNjNTY3ZTQ=, workerId: [2:857:2692], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 275 >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> TSubscriberTest::Sync [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2025-03-28T09:36:36.389699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:36:36.391408Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-03-28T09:36:36.391487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2025-03-28T09:36:36.391528Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2025-03-28T09:36:36.391565Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:36:2066] 2025-03-28T09:36:36.391598Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2025-03-28T09:36:36.391643Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:36:36.391722Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2066] 2025-03-28T09:36:36.391760Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:36:36.391857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-03-28T09:36:36.391930Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-03-28T09:36:36.391970Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-03-28T09:36:36.392004Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-03-28T09:36:36.392043Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-03-28T09:36:36.392074Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-03-28T09:36:36.392089Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-03-28T09:36:36.392118Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-03-28T09:36:36.392143Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:36:36.392164Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-03-28T09:36:36.392202Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:36:36.392234Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-03-28T09:36:36.392247Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: 2025-03-28T09:36:32.407255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:32.407496Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:32.407641Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:36:32.408109Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:32.408318Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:32.408443Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00164f/r3tmp/tmpEmbMpj/pdisk_1.dat 2025-03-28T09:36:32.665378Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:32.796634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:36:32.888119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:32.888201Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:32.890047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:32.890102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:32.902898Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:36:32.903227Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:32.903506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:33.173658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:36:33.238363Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:33.241560Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:33.241882Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1282:2390] 2025-03-28T09:36:33.242076Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:33.274672Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:33.278466Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:33.278551Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:33.279687Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:36:33.279737Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:36:33.279773Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:36:33.280024Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:33.280207Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:33.280248Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1306:2390] in generation 1 2025-03-28T09:36:33.282127Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:33.295723Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:36:33.295858Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:33.295940Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1310:2407] 2025-03-28T09:36:33.295975Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:36:33.295994Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:36:33.296017Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:36:33.296183Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1282:2390], Recipient [2:1282:2390]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:33.296212Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:33.296391Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:36:33.296454Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:36:33.296490Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:36:33.296517Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:33.296543Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:36:33.296575Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:36:33.296597Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:36:33.296615Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:36:33.296642Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:36:33.350023Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1314:2408], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:33.350067Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:33.350099Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1266:2774], serverId# [2:1314:2408], sessionId# [0:0:0] 2025-03-28T09:36:33.350322Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:839:2466], Recipient [2:1314:2408] 2025-03-28T09:36:33.350352Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:33.350463Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:36:33.350638Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:36:33.350680Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:36:33.350766Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:36:33.350796Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:36:33.350819Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:36:33.350841Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:36:33.350865Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:36:33.351049Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:33.351070Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:36:33.351090Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:36:33.351114Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:36:33.351151Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:36:33.351167Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:36:33.351189Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:36:33.351207Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:36:33.351234Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:33.353853Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:36:33.353896Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:36:33.353917Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:36:33.353959Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:33.354002Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:33.354358Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1315:2409], Recipient [2:1282:2390]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:36:33.354386Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:36:33.602676Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1348:2418], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:33.602748Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:33.602780Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1345:2797], serverId# [2:1348:2418], sessionId# [0:0:0] 2025-03-28T09:36:33.603456Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:1057:2618], Recipient [2:1348:2418] 2025-03-28T09:36:33.603483Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:33.603568Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:36:33.603596Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] ... :2454], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:36:35.300874Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1710:2454], 0} finished in read 2025-03-28T09:36:35.300911Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-28T09:36:35.300925Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:36:35.300938Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:36:35.300951Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:36:35.300977Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037888 is Executed 2025-03-28T09:36:35.300999Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:36:35.301015Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037888 has finished 2025-03-28T09:36:35.301040Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:36:35.301100Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:36:35.301927Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1710:2454], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:36:35.301976Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-03-28T09:36:35.396750Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1rhf921dc3hg3hhw3eq0d, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY1YTMwZTItMjRmYTMzNzctZTA4YTBiYTUtNzZjZGQ2ZmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:35.398416Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1728:2455], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-03-28T09:36:35.398754Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:36:35.398812Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-03-28T09:36:35.398893Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:36:35.398926Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:36:35.398971Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:35.399032Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:36:35.399118Z node 2 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-03-28T09:36:35.399208Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:36:35.399268Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:36:35.399312Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:36:35.399359Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:36:35.399504Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-28T09:36:35.399713Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:36:35.399756Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-03-28T09:36:35.399841Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1728:2455], 0} after executionsCount# 1 2025-03-28T09:36:35.399936Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1728:2455], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:36:35.400015Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1728:2455], 0} finished in read 2025-03-28T09:36:35.400070Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:36:35.400092Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:36:35.400109Z node 2 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:36:35.400126Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:36:35.400165Z node 2 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:36:35.400180Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:36:35.400197Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-28T09:36:35.400221Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:36:35.400291Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:36:35.401080Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1728:2455], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:36:35.401144Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-03-28T09:36:35.517950Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe1rhjqfadr4gfmpnba2m6y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjQyZTliMTEtOTI0ZTU1ZTItMWZkYzQxMjktMjZmMWFkZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:35.519664Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1752:2456], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-03-28T09:36:35.519815Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:36:35.519894Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2025-03-28T09:36:35.519955Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-28T09:36:35.519980Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:36:35.520006Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:35.520027Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:36:35.520056Z node 2 :TX_DATASHARD TRACE: Activated operation [0:8] at 72075186224037888 2025-03-28T09:36:35.520082Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-28T09:36:35.520107Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:36:35.520141Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:36:35.520162Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:36:35.520417Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-28T09:36:35.520760Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:36:35.520853Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-03-28T09:36:35.520931Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1752:2456], 0} after executionsCount# 1 2025-03-28T09:36:35.520971Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1752:2456], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:36:35.521019Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1752:2456], 0} finished in read 2025-03-28T09:36:35.521070Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-28T09:36:35.521086Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:36:35.521103Z node 2 :TX_DATASHARD TRACE: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:36:35.521117Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:36:35.521151Z node 2 :TX_DATASHARD TRACE: Execution status for [0:8] at 72075186224037888 is Executed 2025-03-28T09:36:35.521175Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:36:35.521200Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:8] at 72075186224037888 has finished 2025-03-28T09:36:35.521230Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:36:35.521296Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:36:35.521839Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1752:2456], Recipient [2:1282:2390]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:36:35.521875Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-28T09:36:35.522816Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:240:2131], Recipient [2:1282:2390]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] Test command err: 2025-03-28T09:34:34.260222Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791460760619332:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:34.260804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00130f/r3tmp/tmpd9z7oz/pdisk_1.dat 2025-03-28T09:34:34.593915Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:34.607823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:34.607931Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:34.619574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29768, node 1 2025-03-28T09:34:34.807365Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:34.807387Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:34.807393Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:34.807530Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:35.050226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:35.068608Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:35.087337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:35.132212Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7486791465055587239:2308] 2025-03-28T09:34:35.132519Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:35.144808Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:35.144873Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:34:35.146801Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:34:35.146877Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:34:35.146931Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:34:35.147386Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:34:35.147463Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:34:35.147507Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7486791465055587253:2308] in generation 1 2025-03-28T09:34:35.148543Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:34:35.184445Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:34:35.185734Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:34:35.185794Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7486791465055587257:2309] 2025-03-28T09:34:35.185803Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:34:35.185817Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:34:35.185827Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:35.186912Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486791465055587234:2299], serverId# [1:7486791465055587256:2310], sessionId# [0:0:0] 2025-03-28T09:34:35.186962Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:34:35.187032Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:34:35.187089Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:34:35.187124Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:34:35.188044Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:34:35.188084Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:34:35.188233Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:34:35.188568Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:34:35.188650Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-28T09:34:35.189854Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:34:35.190561Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:34:35.190621Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:34:35.192767Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486791465055587271:2318], serverId# [1:7486791465055587272:2319], sessionId# [0:0:0] 2025-03-28T09:34:35.197806Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743154475237 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154475237 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:34:35.197845Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:35.197949Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:34:35.198028Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:34:35.198054Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:34:35.198073Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743154475237:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-28T09:34:35.198325Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743154475237:281474976710657 keys extracted: 0 2025-03-28T09:34:35.198538Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:34:35.198652Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:34:35.198728Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:34:35.207084Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:34:35.207578Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:34:35.209232Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743154475236 2025-03-28T09:34:35.209253Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:35.209285Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743154475244 2025-03-28T09:34:35.209337Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743154475237} 2025-03-28T09:34:35.209381Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:34:35.209419Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:34:35.209446Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:34:35.209471Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:34:35.209537Z node 1 :TX_DATASHARD DEBUG: Complete [1743154475237 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7486791460760619738:2186], exec latency: 8 ms, propose latency: 10 ms 2025-03-28T09:34:35.209577Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-28T09:34:35.209653Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:35.215278Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7486791465055587257:2309][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-03-28T09:34:35.216381Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-28T09:34:35.216431Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:34:35.234818Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:34:35.235763Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:34:35.235895Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:34:35.235922Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-03-28T09:34:35.235933Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-03-28T09:34:35.236985Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:34:35.272324Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:35.283867Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-03-28T09:34:35.286681Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:35.287083Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-03-28T09:34:35.287696Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:35.287726Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-03-28T09:34:35.287753Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:35.287789Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:35.287836Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-03-28T09:34:35.290907Z node 1 :PERSQUEUE DEBUG: [PQ: 7207518622403 ... ASHARD INFO: OnTabletDead: 72075186224037892 2025-03-28T09:36:35.479840Z node 24 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 24, TabletId: 72075186224037891 not found 2025-03-28T09:36:35.480286Z node 24 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 24, TabletId: 72075186224037892 not found 2025-03-28T09:36:35.503086Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] server connected, pipe [24:1266:2645], now have 1 active actors on pipe ... release register requests ... wait for merge tx notification 2025-03-28T09:36:35.524396Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:35.524543Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:36:35.524965Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:36:35.525089Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:36:35.525191Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:36:35.525284Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037893 2025-03-28T09:36:35.525339Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:36:35.525393Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:36:35.525480Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:36:35.525648Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:36:35.526873Z node 24 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 ... wait for final heartbeat >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-03-28T09:36:35.529521Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:35.529624Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:36:35.530419Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 2 max time lag 0ms effective offset 0 2025-03-28T09:36:35.530517Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-03-28T09:36:35.530635Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-28T09:36:35.530735Z node 24 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:36:35.531306Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:36:35.542363Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:36:35.542617Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:36:35.543030Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:35.543152Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:36:35.543337Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:36:35.543674Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1317:3041] Handle NKikimr::NPQ::TEvPartitionWriter::TEvInitResult { SessionId: TxId: Success { OwnerCookie: 72075186224037893|18adf253-e92243ba-1b031663-e47974eb_0 SourceIdInfo: SourceId: "\00072075186224037893" SeqNo: 0 Offset: 2 WriteTimestampMS: 0 Explicit: true State: STATE_REGISTERED } } 2025-03-28T09:36:35.543841Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][24:1314:3041] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-28T09:36:35.544084Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1317:3041] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:36:35.544394Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:35.544426Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:36:35.544524Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 1 2025-03-28T09:36:35.544621Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:35.544649Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:36:35.544728Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037893' SeqNo: 1 partNo : 0 messageNo: 1 size 26 offset: -1 2025-03-28T09:36:35.544927Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037893' version v6000/0 2025-03-28T09:36:35.545054Z node 24 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-03-28T09:36:35.545262Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-03-28T09:36:35.586963Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 107 count 1 nextOffset 3 batches 1 2025-03-28T09:36:35.588248Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 93 WTime 6505 2025-03-28T09:36:35.588553Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:36:35.588630Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:36:35.588718Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:36:35.588795Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:36:35.588878Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-03-28T09:36:35.588933Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-03-28T09:36:35.588957Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:36:35.589006Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:36:35.589095Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:36:35.589248Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:36:35.589418Z node 24 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 93 2025-03-28T09:36:35.590630Z node 24 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 93 actorID [24:1286:3021] 2025-03-28T09:36:35.590884Z node 24 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 size 93 2025-03-28T09:36:35.601312Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:36:35.601501Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:35.601671Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037893', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 1, partNo: 0, Offset: 2 is stored on disk 2025-03-28T09:36:35.602096Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-28T09:36:35.602417Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037893:1][0][72075186224037889][24:1317:3041] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037893" SeqNo: 1 Offset: 2 WriteTimestampMS: 6505 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 1 } } } 2025-03-28T09:36:35.602546Z node 24 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037893:1][24:1314:3041] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-28T09:36:35.602756Z node 24 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037893 2025-03-28T09:36:35.602837Z node 24 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 1, at tablet: 72075186224037893 2025-03-28T09:36:35.613956Z node 24 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037893 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-03-28T09:36:36.012299Z node 24 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:36.012359Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:36:36.012502Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2025-03-28T09:36:36.012543Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-03-28T09:36:36.012601Z node 24 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-03-28T09:36:36.012636Z node 24 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:36:36.012830Z node 24 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneTierExternalTtl [GOOD] Test command err: 2025-03-28T09:36:25.669265Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:36:25.728836Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:25.731958Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:25.732234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:36:25.748317Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:36:25.748563Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:36:25.754076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:36:25.754218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:36:25.754360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:36:25.754424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:36:25.754484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:36:25.754604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:36:25.754724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:36:25.754845Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:36:25.754934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:36:25.754989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:36:25.755060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:36:25.755122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:36:25.769598Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:25.772849Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:36:25.773059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:36:25.773099Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:36:25.773222Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:25.773338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:36:25.773411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:36:25.773448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:36:25.773515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:36:25.773555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:36:25.773577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:36:25.773597Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:36:25.773700Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:25.773758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:36:25.773793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:36:25.773815Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:36:25.773878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:36:25.773912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:36:25.773939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:36:25.773955Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:36:25.774004Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:36:25.774030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:36:25.774046Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:36:25.774078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:36:25.774100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:36:25.774117Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:36:25.774370Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=26; 2025-03-28T09:36:25.774429Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-03-28T09:36:25.774491Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-03-28T09:36:25.774543Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=21; 2025-03-28T09:36:25.774654Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:36:25.774709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:36:25.774730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:36:25.774850Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:36:25.774876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:36:25.774902Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:36:25.774986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:36:25.775009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:36:25.775038Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:36:25.775185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:36:25.775215Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:36:25.775231Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T0 ... data;compute_actor_id=[1:1032:3025];bytes=350080;rows=43760;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-28T09:36:37.391373Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:36:37.391484Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:36:37.391511Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:36:37.391549Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:36:37.391683Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:36:37.391793Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:36240;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:36:37.391830Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:36:37.391926Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=36240; 2025-03-28T09:36:37.391971Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=289920;num_rows=36240;batch_columns=timestamp; 2025-03-28T09:36:37.392095Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1032:3025];bytes=289920;rows=36240;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-28T09:36:37.392200Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:36:37.392276Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:36:37.392363Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:36:37.392452Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:36:37.392505Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:36:37.392551Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:36:37.392572Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1037:3030] finished for tablet 9437184 2025-03-28T09:36:37.392963Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1032:3025];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.44},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.442}],"full":{"a":1743154596950411,"name":"_full_task","f":1743154596950411,"d_finished":0,"c":0,"l":1743154597392612,"d":442201},"events":[{"name":"bootstrap","f":1743154596950796,"d_finished":2727,"c":1,"l":1743154596953523,"d":2727},{"a":1743154597392439,"name":"ack","f":1743154597390752,"d_finished":1538,"c":2,"l":1743154597392384,"d":1711},{"a":1743154597392430,"name":"processing","f":1743154596955076,"d_finished":117126,"c":16,"l":1743154597392386,"d":117308},{"name":"ProduceResults","f":1743154596952129,"d_finished":3537,"c":20,"l":1743154597392562,"d":3537},{"a":1743154597392564,"name":"Finish","f":1743154597392564,"d_finished":0,"c":0,"l":1743154597392612,"d":48},{"name":"task_result","f":1743154596955093,"d_finished":115282,"c":14,"l":1743154597390582,"d":115282}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:36:37.393022Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1032:3025];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:36:37.393329Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1032:3025];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.004},{"events":["f_ack","l_task_result"],"t":0.44},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.442}],"full":{"a":1743154596950411,"name":"_full_task","f":1743154596950411,"d_finished":0,"c":0,"l":1743154597393057,"d":442646},"events":[{"name":"bootstrap","f":1743154596950796,"d_finished":2727,"c":1,"l":1743154596953523,"d":2727},{"a":1743154597392439,"name":"ack","f":1743154597390752,"d_finished":1538,"c":2,"l":1743154597392384,"d":2156},{"a":1743154597392430,"name":"processing","f":1743154596955076,"d_finished":117126,"c":16,"l":1743154597392386,"d":117753},{"name":"ProduceResults","f":1743154596952129,"d_finished":3537,"c":20,"l":1743154597392562,"d":3537},{"a":1743154597392564,"name":"Finish","f":1743154597392564,"d_finished":0,"c":0,"l":1743154597393057,"d":493},{"name":"task_result","f":1743154596955093,"d_finished":115282,"c":14,"l":1743154597390582,"d":115282}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:36:37.393383Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:36:36.949456Z;index_granules=0;index_portions=2;index_batches=1721;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5175704;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5175704;selected_rows=0; 2025-03-28T09:36:37.393418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:36:37.393637Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1037:3030];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] >> DataShardTxOrder::ZigZag_oo8_dirty |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 2025-03-28 09:36:37,445 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:36:37,557 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 473773 46.6M 46.6M 23.6M test_tool run_ut @/home/runner/.ya/build/build_root/2909/000b19/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk29/testing_out_stuff/test_tool.args 473962 1.9G 1.8G 1.6G └─ ydb-core-tx-columnshard-ut_rw --trace-path-append /home/runner/.ya/build/build_root/2909/000b19/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chu Test command err: 2025-03-28T09:26:38.896714Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:26:38.993428Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:26:39.017715Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:26:39.018035Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:26:39.026229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:26:39.026461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:26:39.026821Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:26:39.026952Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:26:39.027054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:26:39.027188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:26:39.027300Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:26:39.027428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:26:39.027539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:26:39.027639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.027773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:26:39.027878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:26:39.058492Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:26:39.058840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:26:39.058894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:26:39.059071Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.059237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:26:39.059327Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:26:39.059380Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:26:39.059486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:26:39.059569Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:26:39.059614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:26:39.059679Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:26:39.059848Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:26:39.059911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:26:39.059948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:26:39.059974Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:26:39.060102Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:26:39.060154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:26:39.060196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:26:39.060238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:26:39.060326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:26:39.060367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:26:39.060397Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:26:39.060440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:26:39.060478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:26:39.060508Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:26:39.060903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-03-28T09:26:39.061008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=41; 2025-03-28T09:26:39.061113Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=52; 2025-03-28T09:26:39.061216Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=47; 2025-03-28T09:26:39.061393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:26:39.061443Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:26:39.061473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:26:39.061663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:26:39.061707Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.061734Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:26:39.061881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:26:39.061928Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:26:39.061958Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:26:39.062163Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:26:39.062209Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:26:39.062242Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:26:39.062366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:26:39.062409Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:26:39.062453Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 31.412081Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:384:2768:0]; 2025-03-28T09:36:31.412134Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:385:2848:0]; 2025-03-28T09:36:31.412199Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:386:2768:0]; 2025-03-28T09:36:31.412239Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:387:2784:0]; 2025-03-28T09:36:31.412276Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:388:2816:0]; 2025-03-28T09:36:31.412316Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:389:2768:0]; 2025-03-28T09:36:31.412414Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:390:2768:0]; 2025-03-28T09:36:31.412454Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:391:2768:0]; 2025-03-28T09:36:31.412492Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:392:2840:0]; 2025-03-28T09:36:31.412531Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:393:2768:0]; 2025-03-28T09:36:31.412567Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:394:2768:0]; 2025-03-28T09:36:31.412605Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:395:2768:0]; 2025-03-28T09:36:31.412641Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:396:8560:0]; 2025-03-28T09:36:31.412679Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:397:2784:0]; 2025-03-28T09:36:31.412717Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:398:2816:0]; 2025-03-28T09:36:31.412754Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:399:2776:0]; 2025-03-28T09:36:31.412790Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:400:2776:0]; 2025-03-28T09:36:31.412829Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:401:2864:0]; 2025-03-28T09:36:31.412866Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:402:2776:0]; 2025-03-28T09:36:31.412902Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:403:2776:0]; 2025-03-28T09:36:31.412953Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:404:2784:0]; 2025-03-28T09:36:31.413010Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:405:2808:0]; 2025-03-28T09:36:31.413076Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:406:2784:0]; 2025-03-28T09:36:31.413117Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:407:2792:0]; 2025-03-28T09:36:31.413160Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:408:2840:0]; 2025-03-28T09:36:31.413216Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:409:2784:0]; 2025-03-28T09:36:31.413268Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:410:2784:0]; 2025-03-28T09:36:31.413310Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:411:2784:0]; 2025-03-28T09:36:31.413348Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:412:2792:0]; 2025-03-28T09:36:31.413388Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:413:2784:0]; 2025-03-28T09:36:31.413426Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:414:2784:0]; 2025-03-28T09:36:31.413465Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:415:2864:0]; 2025-03-28T09:36:31.413504Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:416:2784:0]; 2025-03-28T09:36:31.413554Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:417:2784:0]; 2025-03-28T09:36:31.413598Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:418:2800:0]; 2025-03-28T09:36:31.413635Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:419:2776:0]; 2025-03-28T09:36:31.413669Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:420:2776:0]; 2025-03-28T09:36:31.413709Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:421:2784:0]; 2025-03-28T09:36:31.413762Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:422:2864:0]; 2025-03-28T09:36:31.413820Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:423:2776:0]; 2025-03-28T09:36:31.413858Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:424:2776:0]; 2025-03-28T09:36:31.413911Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:425:2800:0]; 2025-03-28T09:36:31.413948Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:426:2776:0]; 2025-03-28T09:36:31.414002Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:427:2776:0]; 2025-03-28T09:36:31.414041Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:428:2768:0]; 2025-03-28T09:36:31.414092Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:429:2848:0]; 2025-03-28T09:36:31.414134Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:430:2768:0]; 2025-03-28T09:36:31.414187Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:431:2784:0]; 2025-03-28T09:36:31.414263Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:432:2816:0]; 2025-03-28T09:36:31.414318Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:433:2768:0]; 2025-03-28T09:36:31.414377Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:434:2768:0]; 2025-03-28T09:36:31.414419Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:435:2768:0]; 2025-03-28T09:36:31.414475Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:436:2840:0]; 2025-03-28T09:36:31.414516Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:437:2768:0]; 2025-03-28T09:36:31.414554Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:438:2768:0]; 2025-03-28T09:36:31.414608Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:439:2768:0]; 2025-03-28T09:36:31.414646Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:98:255:440:8560:0]; 2025-03-28T09:36:32.454978Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:36:32.455689Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[284] (CS::INDEXATION) apply at tablet 9437184 2025-03-28T09:36:32.482751Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:118 Blob count: 727 2025-03-28T09:36:32.486663Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3540408;raw_bytes=5350040;count=2;records=50000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=119955652;raw_bytes=178485710;count=63;records=1720000} inactive {blob_bytes=228289052;raw_bytes=334228965;count=117;records=3253860} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:36:33.011779Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:36:33.012430Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[286] (CS::INDEXATION) apply at tablet 9437184 2025-03-28T09:36:33.042515Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:119 Blob count: 727 2025-03-28T09:36:33.046875Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3540408;raw_bytes=5350040;count=2;records=50000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=119955652;raw_bytes=178485710;count=63;records=1720000} inactive {blob_bytes=228289052;raw_bytes=334228965;count=117;records=3253860} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:36:33.101569Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:36:33.101795Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[288] (CS::INDEXATION) apply at tablet 9437184 2025-03-28T09:36:33.103683Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:120 Blob count: 24 2025-03-28T09:36:33.103942Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3540408;raw_bytes=5350040;count=2;records=50000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=119955652;raw_bytes=178485710;count=63;records=1720000} inactive {blob_bytes=228289052;raw_bytes=334228965;count=117;records=3253860} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:36:34.451156Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:36:34.452158Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[290] (CS::GENERAL) apply at tablet 9437184 2025-03-28T09:36:34.551824Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:121 Blob count: 1303 2025-03-28T09:36:34.560112Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3540408;raw_bytes=5350040;count=2;records=50000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=119955652;raw_bytes=178485710;count=63;records=1720000} inactive {blob_bytes=228289052;raw_bytes=334228965;count=117;records=3253860} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/000b19/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk29/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/000b19/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/chunk29/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: 2025-03-28T09:36:30.408572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:30.408865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:30.408912Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001651/r3tmp/tmpHCXHxy/pdisk_1.dat 2025-03-28T09:36:30.674813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:36:30.699885Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:30.736862Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:30.737039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:30.748186Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:30.826114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:36:30.855506Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:30.856238Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:30.856546Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:36:30.856703Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:30.889020Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:30.889503Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:30.889591Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:30.890698Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:36:30.890757Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:36:30.890792Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:36:30.891023Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:30.891113Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:30.891170Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:36:30.901844Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:30.931603Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:36:30.931789Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:30.931894Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:36:30.931923Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:36:30.931976Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:36:30.932010Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:36:30.932207Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:30.932252Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:30.932530Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:36:30.932624Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:36:30.933030Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:36:30.933067Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:30.933097Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:36:30.933128Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:36:30.933166Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:36:30.933214Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:36:30.933253Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:36:30.933351Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:30.933396Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:30.933440Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:36:30.933499Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:36:30.933531Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:30.933620Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:36:30.933820Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:36:30.933868Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:36:30.933953Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:36:30.933999Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:36:30.934046Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:36:30.934080Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:36:30.934108Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:36:30.934375Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:30.934418Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:36:30.934450Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:36:30.934476Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:36:30.934521Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:36:30.934559Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:36:30.934610Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:36:30.934642Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:36:30.934667Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:30.935894Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:36:30.935938Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:36:30.946501Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:36:30.946557Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:36:30.946600Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:36:30.946628Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:36:30.946677Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:31.093529Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:31.093576Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:31.093602Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:36:31.093857Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:36:31.093879Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:31.093949Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:36:31.093984Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:36:31.094023Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:36:31.094047Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:36:31.096752Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:36:31.096825Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:36:31.097303Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:31.097328Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:31.097357Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:36:3 ... 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-03-28T09:36:38.232448Z node 2 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-03-28T09:36:38.232568Z node 2 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976715671. Shard resolve complete, resolved shards: 1 2025-03-28T09:36:38.232611Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-03-28T09:36:38.232674Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 2: [72075186224037888] 2025-03-28T09:36:38.232716Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:38.232749Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-28T09:36:38.232930Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [2:1248:2998] 2025-03-28T09:36:38.232974Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [2:1248:2998], channels: 1 2025-03-28T09:36:38.233016Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:36:38.233053Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1248:2998], 2025-03-28T09:36:38.233105Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1248:2998], 2025-03-28T09:36:38.233138Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-28T09:36:38.233576Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1248:2998], task: 1, state: COMPUTE_STATE_EXECUTING, stats: { } 2025-03-28T09:36:38.233619Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Waiting for: CA [2:1248:2998], 2025-03-28T09:36:38.233660Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1248:2998], 2025-03-28T09:36:38.233824Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1250:2998], Recipient [2:1169:2949]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2025-03-28T09:36:38.233917Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:36:38.233960Z node 2 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4010/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2025-03-28T09:36:38.233995Z node 2 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2025-03-28T09:36:38.234032Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-03-28T09:36:38.234085Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:36:38.234110Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:36:38.234135Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:38.234172Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:36:38.234200Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-03-28T09:36:38.234227Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:36:38.234242Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:36:38.234255Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:36:38.234267Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:36:38.234326Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2025-03-28T09:36:38.234470Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1250:2998], 0} after executionsCount# 1 2025-03-28T09:36:38.234515Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1250:2998], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:36:38.234577Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1250:2998], 0} finished in read 2025-03-28T09:36:38.234622Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:36:38.234646Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:36:38.234662Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:36:38.234700Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:36:38.234736Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:36:38.234750Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:36:38.234768Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-03-28T09:36:38.234791Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:36:38.235235Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1250:2998], Recipient [2:1169:2949]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:36:38.235267Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-28T09:36:38.235761Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: ExecuteState, got execution state from compute actor: [2:1248:2998], task: 1, state: COMPUTE_STATE_FINISHED, stats: { CpuTimeUs: 575 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 124 FinishTimeMs: 1743154598235 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 56 BuildCpuTimeUs: 68 HostName: "ghrun-mdsjyjltvm" NodeId: 2 StartTimeMs: 1743154598234 CreateTimeMs: 1743154598233 } MaxMemoryUsage: 1048576 } 2025-03-28T09:36:38.235836Z node 2 :KQP_EXECUTER INFO: TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Compute actor has finished execution: [2:1248:2998] 2025-03-28T09:36:38.235949Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:36:38.236005Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1244:2998] TxId: 281474976715671. Ctx: { TraceId: 01jqe1rm908z9kcrjm0ctat4nf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjdiYzIzYi1lNDk5YzdhYS02YjYwNjM1Ny1mZDBiOGIzOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000575s ReadRows: 1 ReadBytes: 8 ru: 1 rate limiter was not found force flag: 1 { items { uint32_value: 7 } items { uint32_value: 4 } } >> TColumnShardTestSchema::RebootHotTiersWithStat >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> DataShardTxOrder::ImmediateBetweenOnline_Init >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps >> TColumnShardTestSchema::ExportWithLostAnswer >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2025-03-28T09:36:38.809262Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:38.815907Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:38.816372Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:38.816539Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:38.863336Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:38.920916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:38.920961Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:38.927802Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:38.928826Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:38.930088Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:38.930142Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:38.930178Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:38.930488Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:38.930541Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:38.930601Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:38.987745Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:39.012366Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:39.012519Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:39.012593Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:39.012621Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:39.012645Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:39.012669Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:39.012879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:39.012952Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:39.013138Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:39.013201Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:39.013233Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:39.013281Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:39.013310Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:39.013338Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:39.013366Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:39.013399Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:39.013432Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:39.013511Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:39.013537Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:39.013571Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:39.015251Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:39.015290Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:39.015346Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:39.015453Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:39.015483Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:39.015516Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:39.015551Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:39.015592Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:39.015630Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:39.015654Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:39.015856Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:39.015885Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:39.015908Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:39.015929Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:39.015964Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:39.015983Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:39.016011Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:39.016049Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:39.016079Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:39.028114Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:39.028181Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:39.028211Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:39.028242Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:39.028295Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:39.028807Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:39.028855Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:39.028890Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:39.028962Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:39.028991Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:39.029105Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:39.029158Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:39.029189Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:39.029214Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:39.035619Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:39.035685Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:39.035879Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:39.035914Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:39.035956Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:39.035985Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:39.036013Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:39.036043Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:39.036071Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:39.036103Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:39.036146Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:39.036178Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:39.036208Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:39.036352Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:39.036410Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:39.036434Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:39.036457Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:39.036495Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:39.036561Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:39.036588Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:39.036619Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:39.036652Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:39.036713Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:39.036753Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:39.036794Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:39.036840Z node 1 :TX_DATA ... CE: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:36:43.616194Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-03-28T09:36:43.616210Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-03-28T09:36:43.616325Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-03-28T09:36:43.616345Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-03-28T09:36:43.616371Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-03-28T09:36:43.616392Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-03-28T09:36:43.616417Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:36:43.616442Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-03-28T09:36:43.616464Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-03-28T09:36:43.616489Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:43.616511Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:43.616533Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:43.616555Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:43.616687Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [2:99:2134], Recipient [2:345:2312]: {TEvPlanStep step# 1000016 MediatorId# 0 TabletID 9437185} 2025-03-28T09:36:43.616710Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:43.616765Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit WaitForPlan 2025-03-28T09:36:43.616785Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.616803Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit WaitForPlan 2025-03-28T09:36:43.616821Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PlanQueue 2025-03-28T09:36:43.616903Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 45 at step 1000016 at tablet 9437185 { Transactions { TxId: 45 AckTo { RawX1: 99 RawX2: 8589936726 } } Step: 1000016 MediatorID: 0 TabletID: 9437185 } 2025-03-28T09:36:43.616927Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-28T09:36:43.617026Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:345:2312], Recipient [2:345:2312]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:43.617043Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:43.617066Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-03-28T09:36:43.617085Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:43.617100Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-28T09:36:43.617120Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-03-28T09:36:43.617135Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-03-28T09:36:43.617154Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.617180Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-03-28T09:36:43.617199Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-03-28T09:36:43.617214Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-03-28T09:36:43.617575Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-03-28T09:36:43.617597Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.617613Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-03-28T09:36:43.617628Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-03-28T09:36:43.617642Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-03-28T09:36:43.617669Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.617698Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-03-28T09:36:43.617721Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:43.617737Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-03-28T09:36:43.617765Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2025-03-28T09:36:43.617781Z node 2 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-03-28T09:36:43.617797Z node 2 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2025-03-28T09:36:43.617817Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.617831Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-03-28T09:36:43.617844Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-03-28T09:36:43.617856Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-03-28T09:36:43.617880Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.617897Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-03-28T09:36:43.617911Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-03-28T09:36:43.617937Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-03-28T09:36:43.617954Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.617967Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-03-28T09:36:43.617980Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-03-28T09:36:43.617994Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-03-28T09:36:43.618009Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.618022Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-03-28T09:36:43.618034Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-03-28T09:36:43.618050Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-03-28T09:36:43.618063Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.618077Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-03-28T09:36:43.618091Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-03-28T09:36:43.618103Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-03-28T09:36:43.618323Z node 2 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-03-28T09:36:43.618366Z node 2 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:36:43.618391Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.618406Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-03-28T09:36:43.618422Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-03-28T09:36:43.618439Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-03-28T09:36:43.618537Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-03-28T09:36:43.618553Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-03-28T09:36:43.618568Z node 2 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-03-28T09:36:43.618583Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-03-28T09:36:43.618599Z node 2 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:36:43.618612Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-03-28T09:36:43.618626Z node 2 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-03-28T09:36:43.618655Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:43.618670Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-28T09:36:43.618701Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-28T09:36:43.618719Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-28T09:36:43.630807Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-03-28T09:36:43.630856Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-03-28T09:36:43.630896Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:43.630929Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-03-28T09:36:43.630986Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:43.631021Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:43.631217Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-03-28T09:36:43.631244Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-03-28T09:36:43.631268Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-28T09:36:43.631292Z node 2 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-03-28T09:36:43.631332Z node 2 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:43.631370Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2025-03-28T09:36:40.071693Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:40.076471Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:40.076822Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:40.076987Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:40.108007Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:40.161984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:40.162021Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:40.167809Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:40.168732Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:40.169746Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:40.169790Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:40.169836Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:40.170094Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:40.170139Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:40.170181Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:40.221847Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:40.240453Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:40.240582Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:40.240642Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:40.240663Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:40.240694Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:40.240755Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:40.240908Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:40.240957Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:40.241108Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:40.241178Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:40.241231Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:40.241271Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:40.241295Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:40.241313Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:40.241336Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:40.241360Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:40.241390Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:40.241460Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:40.241483Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:40.241513Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:40.243103Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:40.243138Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:40.243191Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:40.243283Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:40.243315Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:40.243347Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:40.243378Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:40.243414Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:40.243441Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:40.243460Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:40.243671Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:40.243694Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:40.243718Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:40.243738Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:40.243772Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:40.243800Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:40.243823Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:40.243850Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:40.243872Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:40.255404Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:40.255452Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:40.255477Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:40.255509Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:40.255619Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:40.256007Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:40.256039Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:40.256066Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:40.256127Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:40.256157Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:40.256233Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:40.256271Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:40.256298Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:40.256319Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:40.258717Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:40.258771Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:40.258921Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:40.258955Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:40.258996Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:40.259022Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:40.259044Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:40.259067Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:40.259094Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:40.259124Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:40.259188Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:40.259226Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:40.259249Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:40.259364Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:40.259385Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:40.259398Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:40.259409Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:40.259421Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:40.259456Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:40.259469Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:40.259489Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:40.259509Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:40.259558Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:40.259579Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:40.259612Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:40.259640Z node 1 :TX_DATA ... lt to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:36:43.872947Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.872999Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.873010Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.873038Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:36:43.873061Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.873136Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.873156Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.873181Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:36:43.873217Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.873294Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.873332Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.873359Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:36:43.873378Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.873452Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.873473Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.873499Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:36:43.873519Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.873591Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.873612Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:36:43.873758Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 104 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 34} 2025-03-28T09:36:43.873791Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.873852Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 104 2025-03-28T09:36:43.873981Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 110 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 36} 2025-03-28T09:36:43.874010Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.874032Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 110 2025-03-28T09:36:43.874108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 107 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 35} 2025-03-28T09:36:43.874132Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.874160Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 107 2025-03-28T09:36:43.874230Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 113 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 37} 2025-03-28T09:36:43.874265Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.874290Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 113 2025-03-28T09:36:43.874374Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-03-28T09:36:43.874398Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.874436Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-03-28T09:36:43.874509Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-03-28T09:36:43.874531Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.874551Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-03-28T09:36:43.874625Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-03-28T09:36:43.874652Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.874677Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-03-28T09:36:43.874782Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-03-28T09:36:43.874814Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.874836Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-03-28T09:36:43.874918Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-28T09:36:43.874941Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.874997Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-28T09:36:43.875073Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-28T09:36:43.875099Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.875119Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-28T09:36:43.875217Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-28T09:36:43.875241Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.875261Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-28T09:36:43.875338Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-28T09:36:43.875360Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.875380Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-28T09:36:43.875424Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-28T09:36:43.875456Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.875493Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-28T09:36:43.875565Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-28T09:36:43.875607Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.875628Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-28T09:36:43.875707Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-28T09:36:43.875744Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.875767Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-28T09:36:43.875824Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-28T09:36:43.875850Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.875873Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-28T09:36:43.889361Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.889423Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.889471Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-28T09:36:43.889523Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:36:43.889555Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.889759Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:36:43.889800Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.889824Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2025-03-28T09:36:39.319394Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:39.324105Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:39.324438Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:39.324608Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:39.357526Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:39.413166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:39.413205Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:39.420198Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:39.421284Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:39.422307Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:39.422351Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:39.422393Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:39.422640Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:39.422700Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:39.422745Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:39.473331Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:39.504657Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:39.504843Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:39.504938Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:39.504965Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:39.504987Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:39.505027Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:39.505200Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:39.505234Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:39.505387Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:39.505465Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:39.505503Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:39.505545Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:39.505569Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:39.505590Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:39.505613Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:39.505638Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:39.505667Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:39.505735Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:39.505755Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:39.505791Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:39.507448Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:39.507483Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:39.507534Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:39.507640Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:39.507668Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:39.507701Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:39.507733Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:39.507771Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:39.507793Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:39.507814Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:39.507996Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:39.508017Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:39.508041Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:39.508060Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:39.508097Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:39.508126Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:39.508149Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:39.508189Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:39.508213Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:39.519933Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:39.519983Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:39.520007Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:39.520043Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:39.520099Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:39.520492Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:39.520530Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:39.520558Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:39.520632Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:39.520651Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:39.520739Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:39.520788Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:39.520815Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:39.520834Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:39.523293Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:39.523344Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:39.523522Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:39.523555Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:39.523614Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:39.523650Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:39.523677Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:39.523705Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:39.523742Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:39.523779Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:39.523839Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:39.523865Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:39.523885Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:39.524016Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:39.524043Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:39.524057Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:39.524070Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:39.524083Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:39.524119Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:39.524142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:39.524165Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:39.524187Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:39.524237Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:39.524261Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:39.524282Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:39.524307Z node 1 :TX_DATA ... [1000005:154] at 9437184 on unit CompleteOperation 2025-03-28T09:36:43.857839Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-03-28T09:36:43.857876Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-28T09:36:43.857891Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:43.857996Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-28T09:36:43.858020Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.858043Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-03-28T09:36:43.858110Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.858126Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.858148Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-03-28T09:36:43.858175Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-28T09:36:43.858189Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.858259Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:36:43.858277Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:36:43.858289Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.858303Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.858340Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-03-28T09:36:43.858363Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-28T09:36:43.858376Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.858426Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:36:43.858437Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:36:43.858456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.858469Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.858485Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-03-28T09:36:43.858504Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-28T09:36:43.858532Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.858587Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:36:43.858598Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:36:43.858610Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.858621Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.858638Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-03-28T09:36:43.858679Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-28T09:36:43.858712Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.858764Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.858776Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.858794Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-03-28T09:36:43.858815Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-28T09:36:43.858833Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.858894Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.858907Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.858927Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-03-28T09:36:43.858950Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-28T09:36:43.858963Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.859027Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:36:43.859041Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-03-28T09:36:43.859069Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-03-28T09:36:43.859095Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:36:43.859124Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:43.859283Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-28T09:36:43.859307Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.859333Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-03-28T09:36:43.859393Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-28T09:36:43.859406Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.859419Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-28T09:36:43.859459Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-28T09:36:43.859479Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.859493Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-28T09:36:43.859523Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-28T09:36:43.859535Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.859547Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-03-28T09:36:43.859609Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-28T09:36:43.859623Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.859635Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-28T09:36:43.859667Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-28T09:36:43.859702Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.859724Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-03-28T09:36:43.859788Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-28T09:36:43.859805Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.859818Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-28T09:36:43.859867Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-28T09:36:43.859905Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.859930Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-28T09:36:43.859991Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-28T09:36:43.860007Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.860023Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-28T09:36:43.860065Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:36:43.860083Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:43.860101Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |95.2%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> TColumnShardTestSchema::RebootHotTiersTtl >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> TGroupMapperTest::MonteCarlo [GOOD] |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> Secret::Validation [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut/unittest >> TGroupMapperTest::MonteCarlo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2025-03-28T09:34:34.260358Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791458976633535:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:34:34.261233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012fc/r3tmp/tmpIBJktq/pdisk_1.dat 2025-03-28T09:34:34.593354Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:34.617210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:34.617318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:34.620672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2818, node 1 2025-03-28T09:34:34.807391Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:34.807420Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:34.807426Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:34.807544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:35.050346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:35.068459Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2025-03-28T09:34:35.087599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:35.114368Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7486791463271601444:2308] 2025-03-28T09:34:35.114840Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:34:35.129863Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:34:35.129950Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:34:35.132834Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:34:35.132906Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:34:35.132960Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:34:35.134033Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:34:35.134119Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:34:35.134165Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7486791463271601458:2308] in generation 1 2025-03-28T09:34:35.135513Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:34:35.190257Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:34:35.190407Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:34:35.190452Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7486791463271601462:2309] 2025-03-28T09:34:35.190467Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:34:35.190476Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:34:35.190491Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:35.190654Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:34:35.190757Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:34:35.190810Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486791463271601441:2301], serverId# [1:7486791463271601461:2310], sessionId# [0:0:0] 2025-03-28T09:34:35.190932Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:34:35.190950Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:34:35.190972Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:34:35.190998Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:34:35.191028Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:34:35.191222Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:34:35.191294Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-28T09:34:35.192447Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:34:35.192769Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:34:35.192827Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:34:35.194759Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486791463271601476:2318], serverId# [1:7486791463271601477:2319], sessionId# [0:0:0] 2025-03-28T09:34:35.199818Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743154475244 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154475244 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:34:35.199867Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:35.200006Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:34:35.200080Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:34:35.200104Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:34:35.200121Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743154475244:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-28T09:34:35.200429Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743154475244:281474976710657 keys extracted: 0 2025-03-28T09:34:35.200565Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:34:35.200687Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:34:35.200743Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:34:35.203801Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:34:35.206073Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:34:35.207757Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743154475243 2025-03-28T09:34:35.207836Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:35.207884Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743154475251 2025-03-28T09:34:35.207932Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743154475244} 2025-03-28T09:34:35.207993Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:34:35.208045Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:34:35.208069Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:34:35.208164Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:34:35.208214Z node 1 :TX_DATASHARD DEBUG: Complete [1743154475244 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7486791458976633941:2184], exec latency: 5 ms, propose latency: 7 ms 2025-03-28T09:34:35.208259Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-28T09:34:35.208316Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:34:35.217706Z node 1 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][1:7486791463271601462:2309][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2025-03-28T09:34:35.219172Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-28T09:34:35.219241Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:34:35.234478Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:34:35.234638Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710658 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:34:35.234670Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976710658 2025-03-28T09:34:35.234734Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710658 at tablet 72075186224037888 2025-03-28T09:34:35.235032Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:34:35.236269Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:34:35.272331Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:34:35.283573Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Registered with mediator time cast 2025-03-28T09:34:35.286700Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:34:35.287022Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] doesn't have tx info 2025-03-28T09:34:35.287665Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:34:35.287698Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] no config, start with empty partitions and default config 2025-03-28T09:34:35.287714Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037889] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:34:35.287824Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037889] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:34:35.287904Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037889] doesn't have tx writes info 2025-03-28T09:34:35.288732Z node 1 :PERSQUEUE DEBUG: [PQ: 720751862240378 ... G: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:36:47.053008Z node 27 :TX_DATASHARD DEBUG: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 2], version# 3, step# 7500, txId# 281474976715662, at tablet# 72075186224037888 2025-03-28T09:36:47.053376Z node 27 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:47.068986Z node 27 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 7500} 2025-03-28T09:36:47.069127Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:36:47.069190Z node 27 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:36:47.069251Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:36:47.069405Z node 27 :TX_DATASHARD DEBUG: Complete [7500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [27:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:36:47.069500Z node 27 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2025-03-28T09:36:47.069659Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:36:47.069929Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-03-28T09:36:47.070124Z node 27 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:36:47.070712Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Emit change records: edge# v7500/18446744073709551615, at tablet# 72075186224037888 2025-03-28T09:36:47.072178Z node 27 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2025-03-28T09:36:47.072283Z node 27 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:36:47.083864Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2025-03-28T09:36:47.084016Z node 27 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 } 2025-03-28T09:36:47.084135Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:36:47.084239Z node 27 :TX_DATASHARD DEBUG: Waiting for PlanStep# 9000 from mediator time cast 2025-03-28T09:36:47.084320Z node 27 :TX_DATASHARD INFO: [CdcStreamHeartbeat] Enqueue 0 change record(s): at tablet# 72075186224037888 2025-03-28T09:36:47.084393Z node 27 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:36:47.084553Z node 27 :CHANGE_EXCHANGE DEBUG: [ChangeSender][72075186224037888:1][27:684:2580] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-03-28T09:36:47.084625Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:978:2780] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] BodySize: 0 }] } 2025-03-28T09:36:47.084985Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2025-03-28T09:36:47.085128Z node 27 :TX_DATASHARD DEBUG: Send 1 change records: to# [27:978:2780], at tablet# 72075186224037888 2025-03-28T09:36:47.085167Z node 27 :TX_DATASHARD INFO: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2025-03-28T09:36:47.085259Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:978:2780] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:36:47.085444Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1059:2780] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 4 Group: 0 Step: 6000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 7] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:36:47.085868Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:47.085906Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:36:47.085976Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 2 requestId: cookie: 2 2025-03-28T09:36:47.086073Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:47.086096Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:36:47.086130Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 4 partNo : 0 messageNo: 3 size 26 offset: -1 2025-03-28T09:36:47.086279Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v6000/0 2025-03-28T09:36:47.086415Z node 27 :PERSQUEUE INFO: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v6000/0 2025-03-28T09:36:47.086609Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2025-03-28T09:36:47.087284Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 3 PartNo 0 PackedSize 107 count 1 nextOffset 4 batches 1 2025-03-28T09:36:47.087693Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 3,1 HeadOffset 0 endOffset 3 curOffset 4 d0000000000_00000000000000000003_00000_0000000001_00000| size 93 WTime 7451 2025-03-28T09:36:47.087790Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:36:47.087817Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:36:47.087841Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:36:47.087864Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:36:47.087888Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] m0000000000p72075186224037889 2025-03-28T09:36:47.087910Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] d0000000000_00000000000000000003_00000_0000000001_00000| 2025-03-28T09:36:47.087931Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:36:47.087952Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:36:47.087976Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:36:47.088035Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:36:47.088094Z node 27 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 93 2025-03-28T09:36:47.088713Z node 27 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 3 count 1 size 93 actorID [27:913:2731] 2025-03-28T09:36:47.088801Z node 27 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 size 93 2025-03-28T09:36:47.099102Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:36:47.099213Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:36:47.099267Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-03-28T09:36:47.099449Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 3 requestId: cookie: 2 2025-03-28T09:36:47.099635Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][27:1059:2780] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 4 Offset: 3 WriteTimestampMS: 7451 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 2 } } } 2025-03-28T09:36:47.099699Z node 27 :CHANGE_EXCHANGE DEBUG: [CdcChangeSenderMain][72075186224037888:1][27:978:2780] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2025-03-28T09:36:47.099823Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2025-03-28T09:36:47.099849Z node 27 :TX_DATASHARD DEBUG: RemoveChangeRecord: order: 4, at tablet: 72075186224037888 2025-03-28T09:36:47.110600Z node 27 :TX_DATASHARD INFO: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2025-03-28T09:36:47.246250Z node 27 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'streamImpl' requestId: 2025-03-28T09:36:47.246306Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2025-03-28T09:36:47.246431Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2025-03-28T09:36:47.246513Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 4 2025-03-28T09:36:47.246903Z node 27 :PERSQUEUE DEBUG: [PQ: 72075186224037889, Partition: 0, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-28T09:36:47.246984Z node 27 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:36:47.247790Z node 27 :PERSQUEUE DEBUG: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |95.3%| [TA] $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: 2025-03-28T09:34:04.164148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:04.164638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:04.164695Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e3d/r3tmp/tmpCJrVrO/pdisk_1.dat TServer::EnableGrpc on GrpcPort 23662, node 1 TClient is connected to server localhost:16975 2025-03-28T09:34:04.749478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:04.780202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:04.782852Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:04.782903Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:04.782932Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:04.783109Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:04.818742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:04.818877Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:04.830311Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-28T09:34:16.595342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:751:2629], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:16.595552Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-03-28T09:34:26.926480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2648], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:26.926593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:26.931606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480 2025-03-28T09:34:27.089727Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:891:2725], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:27.089855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:27.090236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:896:2730], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:27.093523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480 2025-03-28T09:34:27.221168Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:898:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:34:27.445211Z node 1 :TX_PROXY ERROR: Actor# [1:993:2798] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:27.900137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:34:28.275251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480 2025-03-28T09:34:28.901018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480 2025-03-28T09:34:29.539481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:34:29.929306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:34:30.997625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:34:31.272864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-03-28T09:34:33.261675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:34:33.261727Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-28T09:35:08.464627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480 2025-03-28T09:35:09.243877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715723:0, at schemeshard: 72057594046644480 2025-03-28T09:35:10.788902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715732:0, at schemeshard: 72057594046644480 2025-03-28T09:35:11.159643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715735:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-28T09:35:35.689213Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3723:4875], TxId: 281474976715766, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=YzljYjA3ZTItM2UwOTdiY2MtYWVmMTY2OTgtNDY0ODAzYzg=. CustomerSuppliedId : . TraceId : 01jqe1pq21ayzxn2qcxk9za21j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:35:35.690081Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3724:4876], TxId: 281474976715766, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=YzljYjA3ZTItM2UwOTdiY2MtYWVmMTY2OTgtNDY0ODAzYzg=. TraceId : 01jqe1pq21ayzxn2qcxk9za21j. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:3720:4803], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:35:35.690577Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YzljYjA3ZTItM2UwOTdiY2MtYWVmMTY2OTgtNDY0ODAzYzg=, ActorId: [1:3623:4803], ActorState: ExecuteState, TraceId: 01jqe1pq21ayzxn2qcxk9za21j, Create QueryResponse for error on request, msg: 2025-03-28T09:35:35.698732Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jqe1ppw6fa1ed8n9d8pwa7wx" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YzljYjA3ZTItM2UwOTdiY2MtYWVmMTY2OTgtNDY0ODAzYzg=" tx_control { tx_id: "01jqe1ppw6fa1ed8n9d8pwa7wx" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-03-28T09:35:58.387475Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4261:5278], for# root@builtin, access# DescribeSchema 2025-03-28T09:35:58.387555Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4261:5278], for# root@builtin, access# DescribeSchema 2025-03-28T09:35:58.389231Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4255:5272], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:35:58.390509Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGIwNmE4YmYtYjk3NTFlODItNGFkNTY0NTEtZWU5ZjVlYmU=, ActorId: [1:4242:5260], ActorState: ExecuteState, TraceId: 01jqe1qdd60m0fptnfzb7w6md9, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-28T09:36:09.605374Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-03-28T09:36:46.582176Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715848. Ctx: { TraceId: 01jqe1rw8gabg66nnb5qnwe3k1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjBhMGNmZmQtODRkOGU0M2MtYTVkMWUzMzMtNDk4MmMzNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |95.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite >> DataShardTxOrder::DelayData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2025-03-28T09:36:39.888398Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:39.893503Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:39.893822Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:39.894008Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:39.925872Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:39.979109Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:39.979169Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:39.985649Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:39.986739Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:39.987829Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:39.987876Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:39.987923Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:39.988166Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:39.988217Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:39.988261Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:40.041452Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:40.066195Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:40.066325Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:40.066400Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:40.066425Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:40.066445Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:40.066475Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:40.066637Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:40.066670Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:40.066833Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:40.066890Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:40.066920Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:40.066958Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:40.067005Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:40.067027Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:40.067052Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:40.067080Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:40.067112Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:40.067182Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:40.067202Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:40.067235Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:40.068765Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:40.068797Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:40.068842Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:40.068928Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:40.068954Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:40.068982Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:40.069013Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:40.069049Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:40.069070Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:40.069091Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:40.069267Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:40.069289Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:40.069308Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:40.069330Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:40.069360Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:40.069382Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:40.069409Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:40.069434Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:40.069457Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:40.080917Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:40.080965Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:40.080989Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:40.081013Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:40.081075Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:40.081423Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:40.081460Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:40.081492Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:40.081534Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:40.081551Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:40.081640Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:40.081678Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:40.081703Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:40.081726Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:40.084112Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:40.084158Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:40.084297Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:40.084321Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:40.084352Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:40.084379Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:40.084403Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:40.084428Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:40.084451Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:40.084477Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:40.084515Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:40.084542Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:40.084564Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:40.084684Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:40.084717Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:40.084735Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:40.084747Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:40.084759Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:40.084797Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:40.084814Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:40.084834Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:40.084865Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:40.084909Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:40.084932Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:40.084951Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:40.084976Z node 1 :TX_D ... eady operations at 9437184 2025-03-28T09:36:49.220460Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.220502Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.220554Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-28T09:36:49.220623Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-28T09:36:49.220669Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.220781Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:49.220811Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.220825Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.220849Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:49.220884Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-28T09:36:49.220901Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.220961Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:49.220973Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:49.220984Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.220999Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.221017Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:49.221054Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-28T09:36:49.221078Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.221157Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:49.221171Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:49.221193Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:49.221205Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.221218Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.221237Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:49.221259Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-28T09:36:49.221271Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.221331Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:49.221381Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.221400Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.221418Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:49.221442Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-28T09:36:49.221458Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.221519Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.221531Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.221560Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:49.221606Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-28T09:36:49.221623Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.221693Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.221707Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.221725Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:49.221745Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-28T09:36:49.221757Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.221822Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.221852Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:516] at 9437184 on unit FinishPropose 2025-03-28T09:36:49.221888Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-03-28T09:36:49.221991Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.222092Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.222108Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.222129Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 4 ms 2025-03-28T09:36:49.222163Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-28T09:36:49.222187Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.222247Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.222261Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.222292Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:49.222311Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.222438Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-28T09:36:49.222466Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.222505Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2025-03-28T09:36:49.222728Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-28T09:36:49.222750Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.222767Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2025-03-28T09:36:49.222852Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-28T09:36:49.222875Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.222899Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2025-03-28T09:36:49.222994Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-28T09:36:49.223055Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.223072Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2025-03-28T09:36:49.223172Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-28T09:36:49.223195Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.223231Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2025-03-28T09:36:49.223317Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-28T09:36:49.223334Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.223346Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-03-28T09:36:49.223388Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-28T09:36:49.223406Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.223418Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-03-28T09:36:49.223526Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-28T09:36:49.223542Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.223557Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - >> Secret::ValidationQueryService [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2025-03-28T09:36:45.957726Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:45.962452Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:45.962777Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:45.962954Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:45.994602Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:46.046579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:46.046621Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:46.052971Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:46.053850Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:46.054923Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:46.054990Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:46.055024Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:46.055296Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:46.055345Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:46.055386Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:46.106618Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:46.136315Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:46.136489Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:46.136586Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:46.136616Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:46.136648Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:46.136694Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:46.136913Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:46.136961Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:46.137194Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:46.137268Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:46.137316Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:46.137375Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:46.137407Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:46.137451Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:46.137481Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:46.137515Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:46.137560Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:46.137670Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:46.137710Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:46.137761Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:46.139559Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:46.139612Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:46.139662Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:46.139759Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:46.139788Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:46.139817Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:46.139848Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:46.139886Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:46.139913Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:46.139934Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:46.140121Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:46.140142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:46.140162Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:46.140183Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:46.140216Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:46.140240Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:46.140265Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:46.140291Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:46.140314Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:46.152064Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:46.152123Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:46.152149Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:46.152172Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:46.152240Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:46.152602Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:46.152653Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:46.152680Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:46.152730Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:46.152748Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:46.152850Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:46.152888Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:46.152912Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:46.152933Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:46.155257Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:46.155311Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:46.155477Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:46.155501Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:46.155533Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:46.155560Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:46.155617Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:46.155649Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:46.155673Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:46.155701Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:46.155737Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:46.155765Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:46.155784Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:46.155900Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:46.155924Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:46.155954Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:46.155967Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:46.155979Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:46.156028Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:46.156045Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:46.156063Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:46.156084Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:46.156129Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:46.156155Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:46.156177Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:46.156202Z node 1 :TX_DATA ... BUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:49.729721Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:36:49.729910Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-03-28T09:36:49.729935Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.729955Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-03-28T09:36:49.730009Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-03-28T09:36:49.730027Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.730056Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-03-28T09:36:49.730127Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-03-28T09:36:49.730143Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.730167Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-03-28T09:36:49.730222Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-28T09:36:49.730242Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.730257Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-28T09:36:49.730294Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-28T09:36:49.730315Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.730335Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-28T09:36:49.730397Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-28T09:36:49.730413Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.730426Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-28T09:36:49.730463Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-03-28T09:36:49.730486Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.730508Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-03-28T09:36:49.730558Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:36:49.730570Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.730582Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-03-28T09:36:49.730626Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-28T09:36:49.730648Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.730662Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-28T09:36:49.730731Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-28T09:36:49.730755Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.730774Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-28T09:36:49.730861Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-28T09:36:49.730930Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.730960Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-28T09:36:49.731015Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-28T09:36:49.731033Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.731046Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-28T09:36:49.731097Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-28T09:36:49.731135Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.731150Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-28T09:36:49.731201Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.731219Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.731243Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-28T09:36:49.731281Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-28T09:36:49.731303Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.731381Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.731402Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.731426Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-28T09:36:49.731451Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-28T09:36:49.731481Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.731545Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.731558Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.731588Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-28T09:36:49.731610Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-28T09:36:49.731623Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.731679Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:49.731702Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-03-28T09:36:49.731725Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-28T09:36:49.731746Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-28T09:36:49.731758Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:49.731847Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-28T09:36:49.731863Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.731878Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-03-28T09:36:49.731934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-28T09:36:49.731947Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.731959Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-03-28T09:36:49.732025Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-28T09:36:49.732047Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.732083Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-03-28T09:36:49.732144Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-28T09:36:49.732160Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:49.732175Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> KqpLimits::QueryReplySize ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: 2025-03-28T09:34:05.195923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:05.196281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:05.196323Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001e1f/r3tmp/tmpnqCZSf/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10298, node 1 TClient is connected to server localhost:26231 2025-03-28T09:34:05.670993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:05.698769Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:05.704955Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:05.705017Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:05.705044Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:05.705258Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:05.740187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:05.740296Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:05.751781Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-28T09:34:17.391558Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:748:2628], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:17.391685Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:758:2633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:17.391783Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:34:17.397738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:34:17.421427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:762:2636], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:34:17.479450Z node 1 :TX_PROXY ERROR: Actor# [1:813:2668] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:34:17.529266Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:823:2677], status: GENERIC_ERROR, issues:
:1:20: Error: mismatched input '-' expecting '(' 2025-03-28T09:34:17.531106Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDgxOGMzY2MtYjAxYmY0NzgtMzc0OTQwMWItMTM0ZWEwZWE=, ActorId: [1:746:2626], ActorState: ExecuteState, TraceId: 01jqe1mas7fvtqqa0g1pj2pf7r, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2025-03-28T09:34:27.939488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480 2025-03-28T09:34:28.661865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:34:29.087753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480 2025-03-28T09:34:29.810793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480 2025-03-28T09:34:30.684285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:34:31.136915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:34:31.620512Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480 2025-03-28T09:34:32.467487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480 2025-03-28T09:34:34.094088Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTBjNDRmYzEtZTUxMDU3N2QtNGFhNjM5OTMtY2I4MWMxZDk=, ActorId: [1:842:2688], ActorState: ExecuteState, TraceId: 01jqe1mmvz9927hpevdtesxgy5, Create QueryResponse for error on request, msg: 2025-03-28T09:34:34.096524Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715689. Ctx: { TraceId: 01jqe1mmvz9927hpevdtesxgy5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBjNDRmYzEtZTUxMDU3N2QtNGFhNjM5OTMtY2I4MWMxZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 2025-03-28T09:34:35.148769Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:34:35.148831Z node 1 :IMPORT WARN: Table profiles were not loaded FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-28T09:35:09.654553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715719:0, at schemeshard: 72057594046644480 2025-03-28T09:35:10.554081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715726:0, at schemeshard: 72057594046644480 2025-03-28T09:35:12.053498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715733:0, at schemeshard: 72057594046644480 2025-03-28T09:35:12.648261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715738:0, at schemeshard: 72057594046644480 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2025-03-28T09:35:24.989894Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2JjM2U1NTItNDk5OWRlZjItMjkwNThkN2YtNjE0YzE4OTE=, ActorId: [1:3298:4569], ActorState: ExecuteState, TraceId: 01jqe1pcbg9de3014308w756xp, Create QueryResponse for error on request, msg: 2025-03-28T09:35:24.990847Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715754. Ctx: { TraceId: 01jqe1pcbg9de3014308w756xp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Y2JjM2U1NTItNDk5OWRlZjItMjkwNThkN2YtNjE0YzE4OTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2025-03-28T09:35:37.174617Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3705:4870], TxId: 281474976715768, task: 1. Ctx: { TraceId : 01jqe1prh5dm015bsgzp0pkjjz. SessionId : ydb://session/3?node_id=1&id=MjE3ZWQyM2EtMTlmZjE5OWUtMzUyYWQxMTctYmZlYjI1MzE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:35:37.175319Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:3706:4871], TxId: 281474976715768, task: 2. Ctx: { SessionId : ydb://session/3?node_id=1&id=MjE3ZWQyM2EtMTlmZjE5OWUtMzUyYWQxMTctYmZlYjI1MzE=. TraceId : 01jqe1prh5dm015bsgzp0pkjjz. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:3702:4798], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:35:37.175672Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjE3ZWQyM2EtMTlmZjE5OWUtMzUyYWQxMTctYmZlYjI1MzE=, ActorId: [1:3605:4798], ActorState: ExecuteState, TraceId: 01jqe1prh5dm015bsgzp0pkjjz, Create QueryResponse for error on request, msg: 2025-03-28T09:35:37.180644Z node 1 :METADATA_PROVIDER ERROR: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01jqe1prat38gze9drtqx0dwcc" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=MjE3ZWQyM2EtMTlmZjE5OWUtMzUyYWQxMTctYmZlYjI1MzE=" tx_control { tx_id: "01jqe1prat38gze9drtqx0dwcc" } query { yql_text: "DECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-03-28T09:35:48.684291Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDE5MGNkYWYtODQwMjExMzYtMWNjNmNjMjEtZTA5ZGMzMg==, ActorId: [1:3917:5026], ActorState: ExecuteState, TraceId: 01jqe1q3fp1hxn6wjehy0xcges, Create QueryResponse for error on request, msg: 2025-03-28T09:35:48.685213Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715781. Ctx: { TraceId: 01jqe1q3fp1hxn6wjehy0xcges, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDE5MGNkYWYtODQwMjExMzYtMWNjNmNjMjEtZTA5ZGMzMg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2025-03-28T09:35:59.755915Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4241:5272], for# root@builtin, access# DescribeSchema 2025-03-28T09:35:59.756024Z node 1 :TX_PROXY_SCHEME_CACHE WARN: Access denied: self# [1:4241:5272], for# root@builtin, access# DescribeSchema 2025-03-28T09:35:59.757948Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:4238:5269], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:35:59.759684Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTZmODZiM2UtNmNhZTFjM2MtZTZjN2FkZjAtOWIyOTg1OA==, ActorId: [1:4234:5266], ActorState: ExecuteState, TraceId: 01jqe1qeqz81rgb3yfagj350ky, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-28T09:36:11.058602Z node 1 :TICKET_PARSER ERROR: Ticket **** (51449FAE): Could not find correct token validator 2025-03-28T09:36:11.613418Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODIwMDdiMmYtZDg3NzBiMzctNTI4NDk0Y2ItMzA5NTA4ZTk=, ActorId: [1:4499:5465], ActorState: ExecuteState, TraceId: 01jqe1qssj0tyjzs5hp5j3nvaa, Create QueryResponse for error on request, msg: 2025-03-28T09:36:11.614267Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715805. Ctx: { TraceId: 01jqe1qssj0tyjzs5hp5j3nvaa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODIwMDdiMmYtZDg3NzBiMzctNTI4NDk0Y2ItMzA5NTA4ZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2025-03-28T09:36:23.502234Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTBlNTRjMDAtNDUxOGIxMWUtNDc5NjYyYmMtMmY3N2FiMjE=, ActorId: [1:4895:5762], ActorState: ExecuteState, TraceId: 01jqe1r5ba3ca2r8s1cdvc8rek, Create QueryResponse for error on request, msg: 2025-03-28T09:36:23.503098Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715822. Ctx: { TraceId: 01jqe1r5ba3ca2r8s1cdvc8rek, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTBlNTRjMDAtNDUxOGIxMWUtNDc5NjYyYmMtMmY3N2FiMjE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2025-03-28T09:36:48.390289Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715853. Ctx: { TraceId: 01jqe1rxzt76nbr8b2nz1tvfv9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2EyYmZhNmMtZjVkMDYzM2EtNDc0NzIwOTQtNzVlMzllNWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 >> KqpStats::RequestUnitForBadRequestExecute |95.3%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSubscriberTest::InvalidNotification >> TSubscriberTest::InvalidNotification [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2025-03-28T09:36:52.427930Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:36:52.429657Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-28T09:36:52.429739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-28T09:36:52.429778Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-28T09:36:52.429818Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-28T09:36:52.429874Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-28T09:36:52.429906Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:36:52.429942Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-03-28T09:36:52.429964Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:36:52.430072Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] 2025-03-28T09:36:52.430106Z node 1 :SCHEME_BOARD_SUBSCRIBER ERROR: [main][1:34:2065][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [1:33:2064] >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] >> AnalyzeDatashard::DropTableNavigateError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143155144.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155144.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155144.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155144.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155144.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155144.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155144.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153944.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123155144.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123155144.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153944.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123153944.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123153944.000000s;Name=;Codec=}; 2025-03-28T09:35:44.283018Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:35:44.344991Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:35:44.358589Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:35:44.358813Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:35:44.364546Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:44.364706Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:44.364857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:44.364930Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:44.364989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:44.365054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:44.365115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:44.365197Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:44.365267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:44.365341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:44.365413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:44.365473Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:44.383971Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:35:44.384202Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:35:44.384246Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:35:44.384356Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:35:44.384479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:44.384537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:35:44.384564Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:35:44.384617Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:35:44.384653Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:35:44.384678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:35:44.384725Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:35:44.384836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:35:44.384878Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:35:44.384906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:35:44.384923Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:35:44.384988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:35:44.385023Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:35:44.385048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:35:44.385065Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:35:44.385113Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:35:44.385135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:35:44.385155Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:35:44.385183Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:35:44.385204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:35:44.385227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:35:44.385480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=28; 2025-03-28T09:35:44.385530Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=22; 2025-03-28T09:35:44.385594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-03-28T09:35:44.385656Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=26; 2025-03-28T09:35:44.385771Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:35:44.385824Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:35:44.385850Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:35:44.385981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:35:44.386010Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:35:44.386029Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pr ... n_data.cpp:29;EXECUTE:finishLoadingTime=345; 2025-03-28T09:36:52.991076Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=36957; 2025-03-28T09:36:52.998265Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7114; 2025-03-28T09:36:53.005548Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6336; 2025-03-28T09:36:53.005648Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7292; 2025-03-28T09:36:53.005791Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=81; 2025-03-28T09:36:53.005885Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=54; 2025-03-28T09:36:53.005999Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=75; 2025-03-28T09:36:53.006090Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=56; 2025-03-28T09:36:53.013283Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7128; 2025-03-28T09:36:53.023193Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9801; 2025-03-28T09:36:53.023329Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=45; 2025-03-28T09:36:53.023401Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=26; 2025-03-28T09:36:53.023447Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-03-28T09:36:53.023491Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-03-28T09:36:53.023547Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-03-28T09:36:53.023633Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2025-03-28T09:36:53.023676Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-28T09:36:53.023757Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2025-03-28T09:36:53.023798Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-03-28T09:36:53.023854Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=23; 2025-03-28T09:36:53.023933Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=44; 2025-03-28T09:36:53.024278Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=308; 2025-03-28T09:36:53.024319Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=76015; 2025-03-28T09:36:53.024457Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:36:53.024554Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:36:53.024599Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:36:53.024653Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:36:53.039353Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:36:53.039492Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:36:53.039541Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:36:53.039613Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:36:53.039667Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:36:53.039705Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:36:53.039745Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:36:53.039775Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:36:53.039851Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:36:53.040363Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:36:53.040768Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2625:4499];tablet_id=9437184;parent=[1:2585:4466];fline=manager.cpp:82;event=ask_data;request=request_id=151;1={portions_count=29};; 2025-03-28T09:36:53.041248Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:36:53.041327Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:36:53.041349Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:36:53.041369Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:36:53.041403Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:36:53.041447Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:36:53.041488Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:36:53.041535Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:36:53.041571Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:36:53.041611Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:36:53.041644Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:36:53.041706Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:36:53.042601Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-28T09:36:53.043722Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: 2025-03-28T09:34:54.132339Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:54.132660Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:54.132712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e22/r3tmp/tmp1AJraZ/pdisk_1.dat 2025-03-28T09:34:54.600289Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14529, node 1 2025-03-28T09:34:55.103573Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:55.103631Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:55.103663Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:55.104116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:55.111713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:55.200491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:55.200731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:55.215462Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22171 2025-03-28T09:34:55.718779Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:58.522539Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:58.554744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:58.554858Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:58.594229Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:58.596177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:58.842941Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.843569Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.844180Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.844324Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.844566Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.844686Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.844767Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.844845Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.844943Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.015345Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.015454Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.028431Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.170510Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:59.216205Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:34:59.216286Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:34:59.244887Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:34:59.245017Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:34:59.245220Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:34:59.245274Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:34:59.245324Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:34:59.245376Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:34:59.245427Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:34:59.245469Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:34:59.245827Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:34:59.274850Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.274944Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.281426Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:34:59.286860Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:34:59.287113Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:34:59.292426Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:34:59.308232Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:34:59.308280Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:34:59.308346Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:34:59.320889Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:34:59.327877Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:34:59.328002Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:34:59.517392Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:34:59.678397Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:34:59.756383Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:00.733629Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.733742Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.827366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:01.298638Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2543:3123], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.298784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.300030Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2547:3127]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:01.300191Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:01.300263Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2549:3129] 2025-03-28T09:35:01.301579Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2549:3129] 2025-03-28T09:35:01.302122Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2550:2992] 2025-03-28T09:35:01.302395Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2549:3129], server id = [2:2550:2992], tablet id = 72075186224037894, status = OK 2025-03-28T09:35:01.302617Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2550:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:35:01.302709Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:35:01.302904Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:35:01.302981Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2547:3127], StatRequests.size() = 1 2025-03-28T09:35:01.347054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2554:3133], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.347127Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.347419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2559:3138], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.352047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:35:01.503815Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:35:01.503900Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:35:01.589067Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2549:3129], schemeshard count = 1 2025-03-28T09:35:01.966211Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... 03-28T09:36:11.886300Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:13.484757Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:13.485089Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:15.510969Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:17.150571Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:17.150916Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:19.229978Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:20.698458Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:20.698857Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:22.906464Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:24.462408Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:24.462711Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:26.828973Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:28.654731Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:28.654988Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:31.010576Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:32.671391Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:32.671766Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:34.969277Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:36.704557Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:36.704925Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:38.791287Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:40.401507Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:40.401845Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:42.515424Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:43.977030Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:43.977333Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:46.136205Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:47.508168Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:47.508596Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:49.636435Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:50.454547Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:36:50.454614Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:36:50.454650Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:36:50.454679Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:36:51.446841Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:51.447179Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:51.479081Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-03-28T09:36:51.479143Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 197.000000s, at schemeshard: 72075186224037897 2025-03-28T09:36:51.479412Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-03-28T09:36:51.492092Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:36:52.166069Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:36:52.166139Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:36:52.166176Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:36:52.166213Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:36:52.166244Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:36:52.166526Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:36:52.168776Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:36:52.171086Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6756:4732], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:52.171155Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6767:4737], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:52.171253Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:52.179157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:36:52.216130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6770:4740], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:36:52.330269Z node 2 :TX_PROXY ERROR: Actor# [2:6863:4786] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:36:52.360223Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6892:4801]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:36:52.360371Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:36:52.360431Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6894:4803] 2025-03-28T09:36:52.360484Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6894:4803] 2025-03-28T09:36:52.360806Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:6895:4804] 2025-03-28T09:36:52.360911Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6894:4803], server id = [2:6895:4804], tablet id = 72075186224037894, status = OK 2025-03-28T09:36:52.361024Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:6895:4804], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:36:52.361076Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:36:52.361185Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:36:52.361250Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6892:4801], StatRequests.size() = 1 2025-03-28T09:36:52.453722Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTRhMTc0OTAtNTYwNjNmZjgtNzg1MDk3ZGEtYjI5ZTE1MGY=, TxId: 2025-03-28T09:36:52.453790Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTRhMTc0OTAtNTYwNjNmZjgtNzg1MDk3ZGEtYjI5ZTE1MGY=, TxId: 2025-03-28T09:36:52.454125Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:36:52.466739Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:36:52.466790Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:36:52.498838Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:36:52.498917Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:36:52.551102Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6894:4803], schemeshard count = 1 2025-03-28T09:36:53.159679Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:36:53.159749Z node 2 :STATISTICS ERROR: [72075186224037894] IsColumnTable. traversal path [OwnerId: 72075186224037897, LocalPathId: 4] is not known to schemeshard 2025-03-28T09:36:53.159961Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:36:53.161510Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:36:53.169092Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGJlNTFmMjMtMTk5NTYwZDEtNjE3MTEwM2YtYjJlZDMyN2U=, TxId: 2025-03-28T09:36:53.169127Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGJlNTFmMjMtMTk5NTYwZDEtNjE3MTEwM2YtYjJlZDMyN2U=, TxId: 2025-03-28T09:36:53.169395Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:36:53.181839Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:36:53.181885Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2859:3276] 2025-03-28T09:36:53.182319Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6978:4859]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:36:53.184338Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:36:53.184375Z node 2 :STATISTICS ERROR: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2025-03-28T09:36:53.184402Z node 2 :STATISTICS DEBUG: ReplyFailed(), request id = 2 >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> DataShardTxOrder::ZigZag_oo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] Test command err: 2025-03-28T09:36:47.999779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:48.000049Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:48.000089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00162d/r3tmp/tmpYVwHaA/pdisk_1.dat 2025-03-28T09:36:48.267847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:36:48.302247Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:48.338576Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:48.338659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:48.349710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:48.427881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:36:48.695167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:36:48.939765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:48.939852Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:839:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:48.939896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:48.943439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:36:49.092506Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:36:49.158530Z node 1 :TX_PROXY ERROR: Actor# [1:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:36:49.399963Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1rysacvp5gd16fxqx5ckk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjQxYzdkODAtM2VhMWY5YzEtNTQ4YzQ4YTEtNDBkYmQ3YTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:49.470205Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1rz8kamkjtpe2rknkr3wc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTU4NzFhODMtZjQ5YzYzNi0zZWQ3MGUyNy1jOTYwNDI4OA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-03-28T09:36:50.113908Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1rzvv6q3w1b9rvh7h6x04, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Mjg4ZmFhNTQtYWUyMTgyYzUtNjhjNTI2MzItMTZlYTdmMWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:50.172966Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe1rzy8df3px6eqn2tzwff0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmI0NjMzYTUtMWU1NWYzYTAtNjA0YjA3OGUtODA5YjUxMTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 2025-03-28T09:36:52.535277Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:52.535456Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:52.535491Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00162d/r3tmp/tmpMJ5aKp/pdisk_1.dat 2025-03-28T09:36:52.733398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:36:52.753035Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:52.787184Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:52.787274Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:52.798247Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:52.875843Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:36:53.103988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:36:53.356291Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:53.356394Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:839:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:53.356471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:53.361216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:36:53.512207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:36:53.546473Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:36:53.579824Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1s33a8s1yxjnb1vhnbafd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZmJkNjhmZjUtNzZhNjg3OGEtOTcyNWJkNDMtYTNiNTliYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:53.626895Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1s3av7pe5hjw3wywav20w, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YjdmMTQxYjMtYWYyZjFmNGItMTFiYjljNjktNjc4MDRjNzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:54.063496Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1s3hx4s5wh6mvt1m5w3vf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y1YjY0MmQtZTNmNTE1NjUtYzI5MmExYjQtMmQ1NDg2NmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-03-28T09:36:54.372839Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1s422bfsdxbsqf92afjma, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZGFjY2JhMjItM2VkYzI1ZmItMjhhYWJkZTAtNDg0OGVjNjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:54.446317Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1s43m4d7hb6rve4cyk9rj, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y1YjY0MmQtZTNmNTE1NjUtYzI5MmExYjQtMmQ1NDg2NmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:54.505763Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe1s45hd4grktsfn9y2rkwm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2Y1YjY0MmQtZTNmNTE1NjUtYzI5MmExYjQtMmQ1NDg2NmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:54.545198Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2Y1YjY0MmQtZTNmNTE1NjUtYzI5MmExYjQtMmQ1NDg2NmM=, ActorId: [2:975:2779], ActorState: ExecuteState, TraceId: 01jqe1s47ce66trwrtamx4s7x7, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken >> TColumnShardTestSchema::Drop >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> TColumnShardTestSchema::RebootInternalTTL >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::ReadsetCountLimit >> DataShardOutOfOrder::TestSnapshotReadPriority >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2025-03-28T09:36:56.290115Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:56.294989Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:56.295334Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:56.295533Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:56.327046Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:56.382502Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:56.382542Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:56.388722Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:56.389689Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:56.391254Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:56.391356Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:56.391408Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:56.391741Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:56.391795Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:56.391849Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:56.442796Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:56.466456Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:56.466601Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:56.466668Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:56.466705Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:56.466731Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:56.466761Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:56.466934Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:56.466973Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:56.467152Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:56.467221Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:56.467255Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:56.467298Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:56.467331Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:56.467357Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:56.467378Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:56.467426Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:56.467457Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:56.467532Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:56.467570Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:56.467607Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:56.469250Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:56.469292Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:56.469350Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:56.469441Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:56.469469Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:56.469504Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:56.469536Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:56.469573Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:56.469599Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:56.469624Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:56.469810Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:56.469831Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:56.469850Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:56.469869Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:56.469901Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:56.469926Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:56.469954Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:56.469980Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:56.470000Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:56.481612Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:56.481665Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:56.481691Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:56.481720Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:56.481786Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:56.482179Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:56.482214Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:56.482250Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:56.482309Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:56.482330Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:56.482426Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:56.482467Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:56.482496Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:56.482517Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:56.484921Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:56.484970Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:56.485126Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:56.485152Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:56.485193Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:56.485219Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:56.485241Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:56.485268Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:56.485291Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:56.485315Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:56.485352Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:56.485378Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:56.485399Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:56.485551Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:56.485579Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:56.485601Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:56.485617Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:56.485630Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:56.485664Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:56.485678Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:56.485702Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:56.485724Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:56.485768Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:56.485819Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:56.485842Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:56.485867Z node 1 :TX_DATA ... 2025-03-28T09:36:57.350147Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.350193Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:4] at 9437184 on unit CompleteOperation 2025-03-28T09:36:57.350245Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 4] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:36:57.350306Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-28T09:36:57.350385Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:57.350504Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:57.350535Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.350569Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:57.350584Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:57.350597Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.350613Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:6] at 9437184 on unit CompleteOperation 2025-03-28T09:36:57.350636Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 6] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:36:57.350674Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-28T09:36:57.350707Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:57.350781Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:57.350796Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:57.350809Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:57.350820Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.350833Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2025-03-28T09:36:57.350861Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:36:57.350892Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-28T09:36:57.350905Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:57.350975Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.350989Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2025-03-28T09:36:57.351018Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:36:57.351068Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-28T09:36:57.351095Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:57.351169Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.351184Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2025-03-28T09:36:57.351218Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:36:57.351242Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-28T09:36:57.351255Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:57.351319Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.351339Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:14] at 9437184 on unit FinishPropose 2025-03-28T09:36:57.351376Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2025-03-28T09:36:57.351429Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:57.351534Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.351566Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2025-03-28T09:36:57.351593Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:36:57.351638Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-28T09:36:57.351659Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:57.351757Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.351783Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2025-03-28T09:36:57.351806Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:36:57.351832Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-28T09:36:57.351847Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:57.351910Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.351922Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2025-03-28T09:36:57.351939Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:57.351955Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:57.352007Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437184 2025-03-28T09:36:57.352021Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:57.352034Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2025-03-28T09:36:57.352057Z node 1 :TX_DATASHARD DEBUG: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:36:57.352100Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-28T09:36:57.352134Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:57.352284Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2025-03-28T09:36:57.352310Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:57.352347Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2025-03-28T09:36:57.352397Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-28T09:36:57.352413Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:57.352432Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-03-28T09:36:57.352481Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-28T09:36:57.352497Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:57.352509Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-03-28T09:36:57.352555Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-28T09:36:57.352571Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:57.352590Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-03-28T09:36:57.352656Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-28T09:36:57.352688Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:57.352704Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-03-28T09:36:57.352739Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-28T09:36:57.352752Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:57.352764Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-03-28T09:36:57.352805Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-28T09:36:57.352823Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:57.352836Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-03-28T09:36:57.352874Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-28T09:36:57.352891Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:36:57.352915Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] >> DataShardTxOrder::ReadWriteReorder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] Test command err: 2025-03-28T09:36:51.996546Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:51.996859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:51.996895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001629/r3tmp/tmpNsgdkl/pdisk_1.dat 2025-03-28T09:36:52.285344Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:36:52.318805Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:52.356025Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:52.356141Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:52.367293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:52.446483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:36:52.744127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-03-28T09:36:52.991261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:52.991343Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:839:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:52.991405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:52.995047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:36:53.143799Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:36:53.210420Z node 1 :TX_PROXY ERROR: Actor# [1:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:36:53.423943Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1s2qx5cn9mbq6sm9benev, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTUyM2QzNGItNTJlYWM3NzAtZGI2Y2QxZWYtY2I0N2ZkOWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:53.478950Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1s364bd5j9900qqxgxt92, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2FjNmY2ZGMtZmVkZjc4MTctMWVkNWFkNmYtMWVmZTRmN2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-03-28T09:36:53.762003Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1s389dn8v5390a6jqafnr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDMwYmQwYjktZjQyZDE4OGYtZGVlMGYwNGItMTJjMTFjZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-03-28T09:36:53.831996Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1s3gj998wxpy81ve1cc6y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDMwYmQwYjktZjQyZDE4OGYtZGVlMGYwNGItMTJjMTFjZTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet 2025-03-28T09:36:54.001214Z node 1 :KQP_COMPUTE WARN: SelfId: [1:1034:2772], Table: `/Root/table-1` ([72057594046644480:2:1]), SessionActorId: [1:968:2772]TEvDeliveryProblem was received from tablet: 72075186224037888 ===== Waiting for commit response ===== Last SELECT 2025-03-28T09:36:54.231522Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1s3v9f8anfgs2ry77z9r0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTJhMWEwMzEtMzM0YzU1NjYtYzMxNGMzMGEtZDk5Zjk4M2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } 2025-03-28T09:36:56.658437Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:56.658701Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:56.658744Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001629/r3tmp/tmpSQPYFd/pdisk_1.dat 2025-03-28T09:36:56.901488Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:36:56.921894Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:56.956568Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:56.956667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:56.967701Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:57.045716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:36:57.283176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 ===== UPSERT initial rows 2025-03-28T09:36:57.527134Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:57.527206Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:839:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:57.527255Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:57.530897Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:36:57.680306Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:36:57.714784Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:36:57.774207Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1s75nahmm6397b9hsdbq9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWQ5MTJjNTctMTE1ZDJiYTEtMTc3OWIyZDEtYzM1MDI4NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:57.856080Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1s7edae0a4azsjphdnvnq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTk0YWMyZjktNGMzN2IyZWItNTU5ZWMyNWItYjVlY2YyYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===== Begin SELECT 2025-03-28T09:36:58.115913Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1s7gh4z70xpgaz105wtfd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWM5NTIzOC1hMzZkZGM3YS02MDdmMTRhNy01ZTM5NzA1Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets 2025-03-28T09:36:58.194811Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1s7reb8r530e8msdr4yxx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=OWM5NTIzOC1hMzZkZGM3YS02MDdmMTRhNy01ZTM5NzA1Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT 2025-03-28T09:36:58.564131Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1s839fbwdph09fn4xkgp5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=M2VlM2MzMjAtODU1ZDA3MTItMjA2ZmUwZjctNWMyNWU5NA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 3 } items { uint32_value: 2 } } >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] >> DataShardTxOrder::DelayData [GOOD] >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2025-03-28T09:36:50.486027Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:50.491190Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:50.491608Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:50.491791Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:50.525425Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:50.582987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:50.583032Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:50.590558Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:50.591767Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:50.593302Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:50.593368Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:50.593428Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:50.593821Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:50.593920Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:50.593989Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:50.651435Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:50.679282Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:50.679430Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:50.679504Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:50.679527Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:50.679561Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:50.679599Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:50.679778Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:50.679822Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:50.680025Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:50.680102Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:50.680140Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:50.680187Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:50.680225Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:50.680257Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:50.680284Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:50.680316Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:50.680351Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:50.680429Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:50.680463Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:50.680515Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:50.682236Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:50.682273Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:50.682336Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:50.682449Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:50.682480Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:50.682512Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:50.682550Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:50.682588Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:50.682613Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:50.682634Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:50.682901Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:50.682927Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:50.682950Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:50.682970Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:50.683007Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:50.683023Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:50.683051Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:50.683083Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:50.683110Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:50.694889Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:50.694943Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:50.694970Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:50.694996Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:50.695053Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:50.695555Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:50.695616Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:50.695667Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:50.695737Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:50.695766Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:50.695872Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:50.695930Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:50.695963Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:50.695995Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:50.699262Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:50.699326Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:50.699508Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:50.699549Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:50.699620Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:50.699647Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:50.699670Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:50.699699Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:50.699726Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:50.699754Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:50.699793Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:50.699822Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:50.699844Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:50.699981Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:50.700008Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:50.700021Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:50.700034Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:50.700049Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:50.700105Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:50.700125Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:50.700153Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:50.700177Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:50.700249Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:50.700288Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:50.700320Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:50.700374Z node 1 :TX_D ... e 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit PrepareDataTxInRS 2025-03-28T09:36:59.860992Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit LoadAndWaitInRS 2025-03-28T09:36:59.861007Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit LoadAndWaitInRS 2025-03-28T09:36:59.861025Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-03-28T09:36:59.861047Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit LoadAndWaitInRS 2025-03-28T09:36:59.861072Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:36:59.861117Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit ExecuteDataTx 2025-03-28T09:36:59.861515Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:506] at tablet 9437184 with status COMPLETE 2025-03-28T09:36:59.861602Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:506] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 81, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:36:59.861653Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-03-28T09:36:59.861678Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:36:59.861701Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompleteOperation 2025-03-28T09:36:59.861715Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompleteOperation 2025-03-28T09:36:59.861829Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is DelayComplete 2025-03-28T09:36:59.861848Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompleteOperation 2025-03-28T09:36:59.861868Z node 1 :TX_DATASHARD TRACE: Add [1000005:506] at 9437184 to execution unit CompletedOperations 2025-03-28T09:36:59.861888Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:506] at 9437184 on unit CompletedOperations 2025-03-28T09:36:59.861957Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:506] at 9437184 is Executed 2025-03-28T09:36:59.861980Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:506] at 9437184 executing on unit CompletedOperations 2025-03-28T09:36:59.862015Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:506] at 9437184 has finished 2025-03-28T09:36:59.862042Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:59.862062Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:59.862085Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2025-03-28T09:36:59.862294Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:235:2228], Recipient [1:235:2228]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:59.862330Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:59.862357Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:59.862378Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:59.862396Z node 1 :TX_DATASHARD DEBUG: Return cached ready operation [1000005:507] at 9437184 2025-03-28T09:36:59.862427Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2025-03-28T09:36:59.862445Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-28T09:36:59.862462Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:59.862474Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:59.862487Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:59.862848Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2025-03-28T09:36:59.862872Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-28T09:36:59.862886Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:59.862900Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2025-03-28T09:36:59.862916Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2025-03-28T09:36:59.862945Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-28T09:36:59.862960Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2025-03-28T09:36:59.862971Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:59.862994Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:59.863032Z node 1 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically complete end at 9437184 2025-03-28T09:36:59.863058Z node 1 :TX_DATASHARD TRACE: Operation [1000005:507] is the new logically incomplete end at 9437184 2025-03-28T09:36:59.863087Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:507] at 9437184 2025-03-28T09:36:59.863107Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-28T09:36:59.863124Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:36:59.863136Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2025-03-28T09:36:59.863147Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2025-03-28T09:36:59.863170Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-28T09:36:59.863182Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2025-03-28T09:36:59.863198Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2025-03-28T09:36:59.863215Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2025-03-28T09:36:59.863231Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-28T09:36:59.863253Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2025-03-28T09:36:59.863266Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2025-03-28T09:36:59.863277Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2025-03-28T09:36:59.863290Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-28T09:36:59.863312Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2025-03-28T09:36:59.863325Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2025-03-28T09:36:59.863336Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2025-03-28T09:36:59.863359Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-28T09:36:59.863372Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2025-03-28T09:36:59.863384Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:36:59.863404Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2025-03-28T09:36:59.863593Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2025-03-28T09:36:59.863621Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:36:59.863648Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:59.863666Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:36:59.863700Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2025-03-28T09:36:59.863735Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2025-03-28T09:36:59.863840Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is DelayComplete 2025-03-28T09:36:59.863857Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2025-03-28T09:36:59.863872Z node 1 :TX_DATASHARD TRACE: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2025-03-28T09:36:59.863886Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2025-03-28T09:36:59.863901Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:507] at 9437184 is Executed 2025-03-28T09:36:59.863914Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2025-03-28T09:36:59.863926Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:507] at 9437184 has finished 2025-03-28T09:36:59.863940Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:59.863953Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:59.863972Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:59.863995Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:59.877365Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2025-03-28T09:36:59.877426Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-03-28T09:36:59.877468Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:59.877495Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2025-03-28T09:36:59.877545Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:59.877586Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:59.877697Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:59.877719Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2025-03-28T09:36:59.877807Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:36:59.877827Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 >> DataShardTxOrder::ReadWriteReorder [GOOD] >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::QueryExecTimeoutCancel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2025-03-28T09:36:59.530178Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:59.535817Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:59.536149Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:59.536312Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:59.568144Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:59.618804Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:59.618852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:59.624811Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:59.625806Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:59.626914Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:59.626961Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:59.626989Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:59.627282Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:59.627330Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:59.627383Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:59.685193Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:59.707029Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:59.707158Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:59.707219Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:59.707242Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:59.707262Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:59.707284Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:59.707453Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:59.707492Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:59.707682Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:59.707738Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:59.707772Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:59.707813Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:59.707835Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:59.707854Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:59.707874Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:59.707902Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:59.707931Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:59.708000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:59.708026Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:59.708058Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:59.709688Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:59.709725Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:59.709774Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:59.709870Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:59.709897Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:59.709926Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:59.709958Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:59.709995Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:59.710016Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:59.710037Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:59.710234Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:59.710257Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:59.710293Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:59.710313Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:59.710344Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:59.710363Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:59.710384Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:59.710422Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:59.710444Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:59.721854Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:59.721920Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:59.721944Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:59.721972Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:59.722018Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:59.722418Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:59.722453Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:59.722481Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:59.722544Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:59.722565Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:59.722645Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:59.722700Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:59.722734Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:59.722755Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:59.725043Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:59.725088Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:59.725226Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:59.725259Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:59.725299Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:59.725325Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:59.725346Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:59.725370Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:59.725396Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:59.725437Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:59.725475Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:59.725502Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:59.725520Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:59.725648Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:59.725680Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:59.725695Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:59.725708Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:59.725721Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:59.725759Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:59.725772Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:59.725795Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:59.725816Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:59.725869Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:59.725895Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:59.725914Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:59.725942Z node 1 :TX_DATASH ... WaitInRS 2025-03-28T09:37:00.460224Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-03-28T09:37:00.460235Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit LoadAndWaitInRS 2025-03-28T09:37:00.460245Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit ExecuteDataTx 2025-03-28T09:37:00.460257Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit ExecuteDataTx 2025-03-28T09:37:00.460518Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437185 with status COMPLETE 2025-03-28T09:37:00.460562Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437185: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 8, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:37:00.460591Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-03-28T09:37:00.460617Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit ExecuteDataTx 2025-03-28T09:37:00.460644Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompleteOperation 2025-03-28T09:37:00.460659Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompleteOperation 2025-03-28T09:37:00.460777Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is DelayComplete 2025-03-28T09:37:00.460797Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompleteOperation 2025-03-28T09:37:00.460818Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437185 to execution unit CompletedOperations 2025-03-28T09:37:00.460837Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437185 on unit CompletedOperations 2025-03-28T09:37:00.460855Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437185 is Executed 2025-03-28T09:37:00.460866Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437185 executing on unit CompletedOperations 2025-03-28T09:37:00.460880Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437185 has finished 2025-03-28T09:37:00.460905Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:00.460923Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-28T09:37:00.460946Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-28T09:37:00.460965Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-28T09:37:00.461108Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:235:2228], Recipient [1:235:2228]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:00.461137Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:00.461163Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:37:00.461180Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:37:00.461206Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:37:00.461224Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2025-03-28T09:37:00.461238Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2025-03-28T09:37:00.461253Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-28T09:37:00.461266Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2025-03-28T09:37:00.461278Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:37:00.461293Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2025-03-28T09:37:00.461763Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2025-03-28T09:37:00.461791Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-28T09:37:00.461816Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:37:00.461832Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2025-03-28T09:37:00.461857Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2025-03-28T09:37:00.461877Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-28T09:37:00.461889Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2025-03-28T09:37:00.461900Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:37:00.461911Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:37:00.461937Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically complete end at 9437184 2025-03-28T09:37:00.461952Z node 1 :TX_DATASHARD TRACE: Operation [1000005:12] is the new logically incomplete end at 9437184 2025-03-28T09:37:00.461968Z node 1 :TX_DATASHARD TRACE: Activated operation [1000005:12] at 9437184 2025-03-28T09:37:00.461986Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-28T09:37:00.461997Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2025-03-28T09:37:00.462009Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2025-03-28T09:37:00.462034Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2025-03-28T09:37:00.462061Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-28T09:37:00.462073Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2025-03-28T09:37:00.462083Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2025-03-28T09:37:00.462094Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2025-03-28T09:37:00.462106Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-28T09:37:00.462142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2025-03-28T09:37:00.462159Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2025-03-28T09:37:00.462181Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2025-03-28T09:37:00.462199Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-28T09:37:00.462213Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2025-03-28T09:37:00.462230Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2025-03-28T09:37:00.462248Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2025-03-28T09:37:00.462271Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-28T09:37:00.462284Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2025-03-28T09:37:00.462295Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2025-03-28T09:37:00.462320Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2025-03-28T09:37:00.462567Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2025-03-28T09:37:00.462595Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:37:00.462627Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-28T09:37:00.462648Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:37:00.462680Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2025-03-28T09:37:00.462719Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2025-03-28T09:37:00.462835Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is DelayComplete 2025-03-28T09:37:00.462854Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2025-03-28T09:37:00.462879Z node 1 :TX_DATASHARD TRACE: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2025-03-28T09:37:00.462898Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2025-03-28T09:37:00.462915Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:12] at 9437184 is Executed 2025-03-28T09:37:00.462926Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2025-03-28T09:37:00.462938Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:12] at 9437184 has finished 2025-03-28T09:37:00.462953Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:00.462966Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:37:00.462979Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:37:00.462993Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:37:00.474910Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2025-03-28T09:37:00.474979Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2025-03-28T09:37:00.475045Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-28T09:37:00.475076Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2025-03-28T09:37:00.475117Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:00.475153Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-28T09:37:00.475324Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2025-03-28T09:37:00.475351Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2025-03-28T09:37:00.475377Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:00.475394Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2025-03-28T09:37:00.475416Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:00.475448Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: 2025-03-28T09:35:08.914005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:08.914193Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:08.914228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b26/r3tmp/tmpggPsln/pdisk_1.dat 2025-03-28T09:35:09.181584Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28579, node 1 2025-03-28T09:35:09.362172Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:09.362211Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:09.362230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:09.362524Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:09.366497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:09.444451Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:09.444565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:09.457590Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18660 2025-03-28T09:35:09.926158Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:12.147411Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:12.172722Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:12.172809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:12.219455Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:12.220887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:12.436587Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:12.436995Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:12.437373Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:12.437484Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:12.437610Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:12.437654Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:12.437704Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:12.437748Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:12.437814Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:12.582502Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:12.582621Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:12.595771Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:12.706630Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:12.748969Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:12.749047Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:12.770871Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:12.772394Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:12.772626Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:12.772685Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:12.772735Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:12.772783Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:12.772854Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:12.772913Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:12.773349Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:12.797690Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1859:2587] 2025-03-28T09:35:12.798322Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:12.803990Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:12.804032Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:12.804092Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:12.806729Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:12.806823Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1895:2609], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:12.816541Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1935:2627] 2025-03-28T09:35:12.816896Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1935:2627], schemeshard id = 72075186224037897 2025-03-28T09:35:12.843553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:12.850519Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:12.850667Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:12.993131Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:13.228605Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:13.273777Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:14.052316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:14.052433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:14.066053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:14.442902Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2542:3122], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:14.443027Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:14.444035Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2547:3126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:14.444183Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:14.444244Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2549:3128] 2025-03-28T09:35:14.444294Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2549:3128] 2025-03-28T09:35:14.444748Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2550:2993] 2025-03-28T09:35:14.444991Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2549:3128], server id = [2:2550:2993], tablet id = 72075186224037894, status = OK 2025-03-28T09:35:14.445099Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2550:2993], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:35:14.445144Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:35:14.445319Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:35:14.445366Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2547:3126], StatRequests.size() = 1 2025-03-28T09:35:14.457952Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2554:3132], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:14.458045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:14.458286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2559:3137], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:14.462855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:35:14.622908Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:35:14.622982Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:35:14.697332Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2549:3128], schemeshard count = 1 2025-03-28T09:35:15.019410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... ry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:36:52.041395Z node 2 :TX_PROXY ERROR: Actor# [2:7197:5026] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:36:52.091654Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7226:5041]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:36:52.091841Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:36:52.091910Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7228:5043] 2025-03-28T09:36:52.091974Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7228:5043] 2025-03-28T09:36:52.092306Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7228:5043], server id = [2:7229:5044], tablet id = 72075186224037894, status = OK 2025-03-28T09:36:52.092359Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7229:5044] 2025-03-28T09:36:52.092406Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7229:5044], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:36:52.092442Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:36:52.092505Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:36:52.092561Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7226:5041], StatRequests.size() = 1 2025-03-28T09:36:52.239757Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTM3ZGNjNjgtMWM5MDAzYS1mZDFjYzE3ZC1iYzAxMzlmYQ==, TxId: 2025-03-28T09:36:52.239821Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTM3ZGNjNjgtMWM5MDAzYS1mZDFjYzE3ZC1iYzAxMzlmYQ==, TxId: 2025-03-28T09:36:52.240956Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:36:52.253896Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-28T09:36:52.253950Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:36:52.296671Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:36:52.296762Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:36:52.382645Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7228:5043], schemeshard count = 1 2025-03-28T09:36:53.492984Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:36:53.493071Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-28T09:36:53.493114Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:36:54.734222Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:54.755592Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:36:54.755718Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-28T09:36:54.755759Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:36:54.756051Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:36:54.758158Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:36:54.767217Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzQzYWI0NTItMWRiZjRjNmEtMjEwMWNhNjUtNWFmNWViNDA=, TxId: 2025-03-28T09:36:54.767264Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzQzYWI0NTItMWRiZjRjNmEtMjEwMWNhNjUtNWFmNWViNDA=, TxId: 2025-03-28T09:36:54.767535Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:36:54.780719Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:36:54.780772Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3107:3317] 2025-03-28T09:36:56.081624Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:36:56.081688Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-03-28T09:36:56.081716Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-28T09:36:57.279209Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2025-03-28T09:36:57.280607Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:36:57.280987Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:57.302890Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:36:57.302973Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:36:57.303009Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:36:57.303342Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:36:57.306031Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:36:57.316609Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZGEzNTc3YmItYzU3MjBjMGUtODJmZTRkMTYtNjhmNjcwNzk=, TxId: 2025-03-28T09:36:57.316659Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGEzNTc3YmItYzU3MjBjMGUtODJmZTRkMTYtNjhmNjcwNzk=, TxId: 2025-03-28T09:36:57.316936Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:36:57.330428Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:36:57.330486Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:36:58.593450Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:36:58.593516Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:36:58.593556Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:36:59.813480Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:36:59.813561Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is data table. 2025-03-28T09:36:59.813584Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-28T09:36:59.813823Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:36:59.815478Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:36:59.844246Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTVkMzBjYjMtOTJkODJmODItYjA3YWM1YjctMTRjODRhMTE=, TxId: 2025-03-28T09:36:59.844290Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTVkMzBjYjMtOTJkODJmODItYjA3YWM1YjctMTRjODRhMTE=, TxId: 2025-03-28T09:36:59.844527Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:36:59.857068Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-28T09:36:59.857127Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3107:3317] 2025-03-28T09:36:59.857486Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7548:5229]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:36:59.859496Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:36:59.859540Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:36:59.862417Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:36:59.862465Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:36:59.862499Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:36:59.864161Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-28T09:36:59.864360Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 2025-03-28T09:36:59.864598Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:7578:5241]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:36:59.866329Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:36:59.866368Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:36:59.866705Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:36:59.866747Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:36:59.866779Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 5] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:36:59.868161Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2025-03-28T09:36:59.868348Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV2PDiskIdFilter [GOOD] Test command err: 2025-03-28T09:32:05.116752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:3161:2436], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:05.118434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:05.118800Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:05.120447Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:3119:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:05.121133Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:1278:2181], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:05.121381Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:3116:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:05.121896Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3157:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:05.122035Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3164:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:05.122189Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3110:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:05.122292Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3113:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:05.122393Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:05.122511Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1311:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:05.123281Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:05.123362Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:05.123390Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:05.123607Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:05.123675Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:05.123715Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:05.123771Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:05.123805Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:05.123842Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:05.124083Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:05.124124Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:05.124218Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:05.124437Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:05.124513Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:05.124578Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:32:05.530374Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:05.745640Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:32:05.762060Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:32:06.315445Z node 1 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 8177, node 1 TClient is connected to server localhost:19084 2025-03-28T09:32:06.561862Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:06.561919Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:06.561958Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:06.562801Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:27.161340Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:3130:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:27.224305Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:27.225493Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:33:27.226010Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:1958:2373], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:27.228281Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:33:27.229519Z node 13 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [13:3136:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:27.229648Z node 15 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:27.229811Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:1967:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:27.231242Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3126:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:27.231434Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3133:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:27.231601Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:27.231730Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:27.232593Z node 13 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:33:27.232721Z node 18 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:33:27.233056Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:27.233138Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:27.233887Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:33:27.233942Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:33:27.236525Z node 14 :KQP_W ... rror=incorrect path status: LookupError; 2025-03-28T09:35:05.307655Z node 27 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [27:2709:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:05.308044Z node 21 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:35:05.308087Z node 22 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:35:05.308128Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:05.308157Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:35:05.309084Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:05.309788Z node 20 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [20:3158:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:05.310051Z node 27 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:35:05.310898Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:05.311424Z node 24 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [24:2700:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:05.311765Z node 20 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:35:05.312316Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:05.312526Z node 24 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:35:05.700127Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:05.970370Z node 19 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:35:06.000123Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:35:06.649438Z node 19 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 26976, node 19 TClient is connected to server localhost:26831 2025-03-28T09:35:07.076683Z node 19 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:07.076759Z node 19 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:07.076813Z node 19 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:07.077601Z node 19 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:39.095837Z node 28 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [28:3135:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:39.096773Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:39.097462Z node 28 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:36:39.100399Z node 33 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [33:3151:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:39.100583Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [34:3154:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:39.101669Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:39.101729Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:39.102452Z node 35 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [35:1440:2309], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:39.102527Z node 33 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:36:39.102567Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:36:39.103454Z node 30 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [30:3138:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:39.104097Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:36:39.104504Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:39.104955Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:39.105794Z node 30 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:36:39.105916Z node 31 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [31:3141:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:39.106047Z node 32 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [32:3144:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:39.107307Z node 29 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [29:3131:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:39.107389Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:39.107453Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:39.107782Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:36:39.107821Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:36:39.107956Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [36:1443:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:39.108502Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:39.109109Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:39.109456Z node 29 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:36:39.109587Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:36:39.486239Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:39.743992Z node 28 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:36:39.766722Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:36:40.483718Z node 28 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 23747, node 28 TClient is connected to server localhost:19543 2025-03-28T09:36:40.942285Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:40.942362Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:40.942413Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:40.942782Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TColumnShardTestSchema::RebootHotTiersAfterTtl >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> KqpStats::RequestUnitForExecute [GOOD] >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate >> TColumnShardTestSchema::Drop [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155171.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143155171.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155171.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123155171.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153971.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123153971.000000s;Name=;Codec=}; 2025-03-28T09:36:12.516479Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:36:12.586771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:36:12.600912Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:36:12.601142Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:36:12.608250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:36:12.608484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:36:12.608772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:36:12.608900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:36:12.609011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:36:12.609121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:36:12.609229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:36:12.609367Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:36:12.609491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:36:12.609606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:36:12.609724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:36:12.609848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:36:12.639803Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:36:12.640148Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:36:12.640216Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:36:12.640391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:12.640537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:36:12.640617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:36:12.640660Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:36:12.640748Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:36:12.640807Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:36:12.640851Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:36:12.640883Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:36:12.641034Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:12.641097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:36:12.641144Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:36:12.641178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:36:12.641266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:36:12.641324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:36:12.641366Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:36:12.641396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:36:12.641467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:36:12.641508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:36:12.641538Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:36:12.641591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:36:12.641630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:36:12.641667Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:36:12.642070Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=48; 2025-03-28T09:36:12.642155Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-28T09:36:12.642232Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-03-28T09:36:12.642313Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=38; 2025-03-28T09:36:12.642479Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:36:12.642557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:36:12.642595Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:36:12.642837Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:36:12.642891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:36:12.642929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:36:12.643094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:36:12.643143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:36:12.643176Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:36:12.643373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... ta.cpp:29;PRECHARGE:finishLoadingTime=8; 2025-03-28T09:37:01.921908Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=160; 2025-03-28T09:37:01.921936Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=17026; 2025-03-28T09:37:01.925601Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=3610; 2025-03-28T09:37:01.929281Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=2970; 2025-03-28T09:37:01.929357Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=3692; 2025-03-28T09:37:01.929466Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=64; 2025-03-28T09:37:01.929543Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=45; 2025-03-28T09:37:01.929635Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=59; 2025-03-28T09:37:01.929735Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=67; 2025-03-28T09:37:01.933891Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4101; 2025-03-28T09:37:01.940176Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=6193; 2025-03-28T09:37:01.940282Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=30; 2025-03-28T09:37:01.940339Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=21; 2025-03-28T09:37:01.940374Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-03-28T09:37:01.940406Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2025-03-28T09:37:01.940448Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2025-03-28T09:37:01.940513Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=33; 2025-03-28T09:37:01.940548Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-03-28T09:37:01.940617Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=40; 2025-03-28T09:37:01.940651Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-03-28T09:37:01.940696Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=20; 2025-03-28T09:37:01.940759Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=37; 2025-03-28T09:37:01.940930Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=146; 2025-03-28T09:37:01.940958Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=40826; 2025-03-28T09:37:01.941071Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:37:01.941156Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:37:01.941199Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:37:01.941247Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:37:01.947880Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:37:01.948009Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:37:01.948057Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:37:01.948116Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-28T09:37:01.948166Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:37:01.948198Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:37:01.948237Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:01.948263Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:01.948330Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:37:01.948819Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:37:01.948876Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1991:3891];tablet_id=9437184;parent=[1:1953:3860];fline=manager.cpp:82;event=ask_data;request=request_id=95;1={portions_count=11};; 2025-03-28T09:37:01.949268Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:37:01.949362Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:37:01.949383Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:37:01.949400Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:37:01.949431Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:37:01.949471Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:37:01.949507Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-28T09:37:01.949546Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:37:01.949578Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:37:01.949609Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:01.949637Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:01.949692Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:37:01.950166Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=11;path_id=1; 2025-03-28T09:37:01.951380Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1953:3860];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> TColumnShardTestSchema::OneTierExternalTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 5943, MsgBus: 22694 2025-03-28T09:36:51.618283Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792048941423503:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:51.618369Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a0/r3tmp/tmpSPbK6n/pdisk_1.dat 2025-03-28T09:36:51.867960Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5943, node 1 2025-03-28T09:36:51.902099Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:51.902122Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:51.902128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:51.902264Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:51.939484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:51.939635Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:51.941660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22694 TClient is connected to server localhost:22694 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:36:52.269905Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:52.291336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:52.393950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:52.497906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:52.542022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:53.629553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792057531359874:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:53.629658Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:53.815110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:36:53.877539Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:36:53.898610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:36:53.920059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:36:53.941023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:36:53.979069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:36:54.048625Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792061826327685:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:54.048677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:54.048767Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792061826327690:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:54.051229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:36:54.057272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792061826327692:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:36:54.118799Z node 1 :TX_PROXY ERROR: Actor# [1:7486792061826327746:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:36:54.758123Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792061826328000:2492], status: GENERIC_ERROR, issues:
:2:12: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} 2025-03-28T09:36:54.758347Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWM1YjRjMC0zYjU4NDlhZi1jMmQ0ZWJmMi01YmJlOTg3Nw==, ActorId: [1:7486792061826327992:2487], ActorState: ExecuteState, TraceId: 01jqe1s4et9gqhz6wdbyyfr8ck, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:12: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} Trying to start YDB, gRPC: 14386, MsgBus: 12235 2025-03-28T09:36:55.241873Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792064540162922:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:55.241966Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a0/r3tmp/tmp1TEluP/pdisk_1.dat 2025-03-28T09:36:55.316860Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14386, node 2 2025-03-28T09:36:55.362963Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:55.362988Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:55.362993Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:55.363092Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:55.377587Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:55.377689Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:55.379080Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12235 TClient is connected to server localhost:12235 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:36:55.632449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:55.640999Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:55.728806Z n ... _FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:57.256852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:36:57.279183Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:36:57.300720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:36:57.363277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:36:57.386296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:36:57.412458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:36:57.442387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792073130099801:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:57.442448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:57.442456Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792073130099806:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:57.444867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:36:57.451248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792073130099808:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:36:57.529252Z node 2 :TX_PROXY ERROR: Actor# [2:7486792073130099862:3435] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:36:58.144004Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486792077425067411:2490], status: GENERIC_ERROR, issues:
:2:8: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} 2025-03-28T09:36:58.144176Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MTEzYWVkYy02NmNmOGY1Ny1hOGFhZjNiZi0yNWUwZWU5NQ==, ActorId: [2:7486792077425067403:2485], ActorState: ExecuteState, TraceId: 01jqe1s7rv3fvwh0e4cf2ebyj9, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
:2:8: Error: mismatched input 'INCORRECT_STMT' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, UPDATE, UPSERT, USE, VALUES} Trying to start YDB, gRPC: 61673, MsgBus: 7182 2025-03-28T09:36:58.724183Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486792077902505276:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:58.724265Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a0/r3tmp/tmp1BrtuO/pdisk_1.dat 2025-03-28T09:36:58.794335Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 61673, node 3 2025-03-28T09:36:58.839844Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:58.839873Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:58.839881Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:58.840007Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:58.847634Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:58.847708Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:58.848858Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7182 TClient is connected to server localhost:7182 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:36:59.173837Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:59.180950Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:59.216701Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:59.353809Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:59.421664Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:00.709791Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792086492441628:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:00.709893Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:00.725934Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:00.749901Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:00.772595Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:00.794144Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:00.833400Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:00.856811Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:00.887509Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792086492442136:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:00.887607Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:00.887651Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792086492442141:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:00.891011Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:00.899302Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486792086492442143:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:00.982909Z node 3 :TX_PROXY ERROR: Actor# [3:7486792086492442197:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Consumed units: 245 Consumed units: 6 >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop [GOOD] Test command err: 2025-03-28T09:36:55.816222Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:36:55.879020Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:36:55.902760Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:36:55.903022Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:36:55.908926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:36:55.909078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:36:55.909234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:36:55.909323Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:36:55.909393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:36:55.909455Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:36:55.909508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:36:55.909584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:36:55.909679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:36:55.909752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:36:55.909808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:36:55.909866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:36:55.928381Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:36:55.928674Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:36:55.928721Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:36:55.928838Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:55.928951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:36:55.929006Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:36:55.929040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:36:55.929115Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:36:55.929179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:36:55.929206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:36:55.929223Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:36:55.929332Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:55.929374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:36:55.929399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:36:55.929418Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:36:55.929473Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:36:55.929518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:36:55.929558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:36:55.929584Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:36:55.929639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:36:55.929665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:36:55.929684Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:36:55.929711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:36:55.929758Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:36:55.929779Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:36:55.930064Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=38; 2025-03-28T09:36:55.930123Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=23; 2025-03-28T09:36:55.930188Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-03-28T09:36:55.930272Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-28T09:36:55.930388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:36:55.930466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:36:55.930490Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:36:55.930665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:36:55.930718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:36:55.930745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:36:55.930896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:36:55.930959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:36:55.930995Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:36:55.931211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:36:55.931265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:36:55.931304Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:36:55.931461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:36:55.931518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:36:55.931579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... 0];range=bytes=0-7855;object_exists=1; 2025-03-28T09:37:02.764246Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=3449cfd4-bb811f0-80df321c-c1e9c23a;fline=actor.cpp:48;task=agents_waiting=2;additional_info=();; 2025-03-28T09:37:02.765639Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=3449cfd4-bb811f0-80df321c-c1e9c23a; 2025-03-28T09:37:02.778819Z node 1 :TX_COLUMNSHARD DEBUG: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:139:2171];fline=general_compaction.cpp:133;event=blobs_created_diff;appended=0;;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:0:3928];;column_id:6;chunk_idx:0;blob_range:[NO_BLOB:3928:3928];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:0:6208];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:6208:3928];;column_id:10;chunk_idx:0;blob_range:[NO_BLOB:0:3928];;column_id:7;chunk_idx:0;blob_range:[NO_BLOB:3928:2456];;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:6384:2400];;column_id:8;chunk_idx:0;blob_range:[NO_BLOB:0:2400];;column_id:9;chunk_idx:0;blob_range:[NO_BLOB:2400:2400];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:4800:2392];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:7192:232];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:7424:224];;;;switched=(portion_id:7;path_id:1;records_count:100;min_schema_snapshot:(plan_step=1000000001;tx_id=101;);schema_version:1;level:0;column_size:10280;index_size:28;meta:((produced=INSERTED;)););(portion_id:3;path_id:1;records_count:552;min_schema_snapshot:(plan_step=1000000001;tx_id=101;);schema_version:1;level:0;column_size:34408;index_size:28;meta:((produced=SPLIT_COMPACTED;)););; 2025-03-28T09:37:02.778898Z node 1 :TX_COLUMNSHARD INFO: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;tablet_id=9437184;parent_id=[1:139:2171];fline=general_compaction.cpp:135;event=blobs_created;appended=1;switched=2; 2025-03-28T09:37:02.779137Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=1; 2025-03-28T09:37:02.779785Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000003:max} readable: {1000000004:max} at tablet 9437184 2025-03-28T09:37:02.791978Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:37:02.793832Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 9 } } } ; 2025-03-28T09:37:02.793917Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 9 } } } ; 2025-03-28T09:37:02.794440Z node 1 :TX_COLUMNSHARD DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"saved_at","id":9}]},"o":"9","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"9","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:37:02.794548Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:37:02.795609Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000003:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:657:2674];trace_detailed=; 2025-03-28T09:37:02.796061Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:5:255:1:7856:0]; 2025-03-28T09:37:02.797208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:84;ff_first=(column_ids=9;column_names=saved_at;);; 2025-03-28T09:37:02.797463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; 2025-03-28T09:37:02.824540Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:37:02.824780Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[22] (CS::GENERAL) apply at tablet 9437184 2025-03-28T09:37:02.826269Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=3449cfd4-bb811f0-80df321c-c1e9c23a;fline=with_appended.cpp:24;event=skip_inserted_data;reason=table_removed;path_id=1; 2025-03-28T09:37:02.826437Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:7 Blob count: 3 2025-03-28T09:37:02.826559Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=10308;raw_bytes=8378;count=1;records=100} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5597040;raw_bytes=7864562;count=4;records=80000} inactive {blob_bytes=5605344;raw_bytes=7864506;count=2;records=80000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:37:02.827000Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:657:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:37:02.827147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:657:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:02.827252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:657:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:02.827287Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:657:2674] finished for tablet 9437184 2025-03-28T09:37:02.827671Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:657:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:647:2664];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.031}],"full":{"a":1743154622795517,"name":"_full_task","f":1743154622795517,"d_finished":0,"c":0,"l":1743154622827331,"d":31814},"events":[{"name":"bootstrap","f":1743154622796260,"d_finished":1342,"c":1,"l":1743154622797602,"d":1342},{"a":1743154622826973,"name":"ack","f":1743154622826973,"d_finished":0,"c":0,"l":1743154622827331,"d":358},{"a":1743154622826951,"name":"processing","f":1743154622826951,"d_finished":0,"c":0,"l":1743154622827331,"d":380},{"name":"ProduceResults","f":1743154622797580,"d_finished":260,"c":2,"l":1743154622827273,"d":260},{"a":1743154622827277,"name":"Finish","f":1743154622827277,"d_finished":0,"c":0,"l":1743154622827331,"d":54}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:02.827751Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:657:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:647:2664];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:37:02.828138Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:657:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:647:2664];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.031},{"events":["l_ack","l_processing","l_Finish"],"t":0.032}],"full":{"a":1743154622795517,"name":"_full_task","f":1743154622795517,"d_finished":0,"c":0,"l":1743154622827802,"d":32285},"events":[{"name":"bootstrap","f":1743154622796260,"d_finished":1342,"c":1,"l":1743154622797602,"d":1342},{"a":1743154622826973,"name":"ack","f":1743154622826973,"d_finished":0,"c":0,"l":1743154622827802,"d":829},{"a":1743154622826951,"name":"processing","f":1743154622826951,"d_finished":0,"c":0,"l":1743154622827802,"d":851},{"name":"ProduceResults","f":1743154622797580,"d_finished":260,"c":2,"l":1743154622827273,"d":260},{"a":1743154622827277,"name":"Finish","f":1743154622827277,"d_finished":0,"c":0,"l":1743154622827802,"d":525}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:02.828225Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:657:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:37:02.794516Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:37:02.828269Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:657:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:37:02.828379Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:657:2674];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: 2025-03-28T09:36:58.995774Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:36:58.996158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:36:58.996216Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001612/r3tmp/tmp0Xwa6A/pdisk_1.dat 2025-03-28T09:36:59.289110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:36:59.297138Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2025-03-28T09:36:59.297183Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 step# 500 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-03-28T09:36:59.323886Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:59.360028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:59.360113Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:59.371187Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:59.443668Z node 1 :TX_COORDINATOR DEBUG: Transaction 1 has been planned 2025-03-28T09:36:59.443745Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2025-03-28T09:36:59.443894Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 1000 in 0.500000s at 0.950000s 2025-03-28T09:36:59.444168Z node 1 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 500, txid# 1 marker# C2 2025-03-28T09:36:59.444215Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 1 stepId# 500 Status# 17 SEND EvProposeTransactionStatus to# [1:409:2404] Proxy 2025-03-28T09:36:59.445572Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2025-03-28T09:36:59.445635Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2025-03-28T09:36:59.445674Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 for mediator 72057594046382081 acknowledged 2025-03-28T09:36:59.445705Z node 1 :TX_COORDINATOR DEBUG: at tablet# 72057594046316545 [2:8] persistent tx 1 acknowledged 2025-03-28T09:36:59.449192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:36:59.481565Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:59.482326Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:59.482637Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:665:2570] 2025-03-28T09:36:59.482820Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:59.512236Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:656:2564], Recipient [1:665:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:59.512793Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:59.512912Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:59.514074Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:36:59.514129Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:36:59.514172Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:36:59.514438Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:59.514530Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:59.514582Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:681:2570] in generation 1 2025-03-28T09:36:59.514830Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:59.536863Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:36:59.536991Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:59.537067Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:683:2580] 2025-03-28T09:36:59.537092Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:36:59.537115Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:36:59.537142Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:36:59.537312Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:665:2570], Recipient [1:665:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:59.537362Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:59.537558Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:36:59.537623Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:36:59.537890Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:36:59.537923Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:59.537956Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:36:59.537984Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:36:59.538004Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:36:59.538024Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:36:59.538052Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:36:59.538124Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:672:2574], Recipient [1:665:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:59.538145Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:59.538175Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:662:2568], serverId# [1:672:2574], sessionId# [0:0:0] 2025-03-28T09:36:59.538241Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:672:2574] 2025-03-28T09:36:59.538278Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:59.538341Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:36:59.538517Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:36:59.538556Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:36:59.538610Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:36:59.538645Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:36:59.538669Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:36:59.538708Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:36:59.538743Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:36:59.538927Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:59.538950Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:36:59.538971Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:36:59.538991Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:36:59.539055Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:36:59.539114Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:36:59.539140Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:36:59.539171Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:36:59.539187Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:59.539775Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:36:59.539807Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:36:59.539828Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:36:59.539850Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:36:59.539891Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:59.541151Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 HANDLE EvProposeTransaction marker# C0 2025-03-28T09:36:59.541200Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 281474976715657 step# 1000 Status# 16 SEND to# [1:409:2404] Proxy marker# C1 2025-03-28T09:36:59.541407Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:684:2581], Recipient [1:665:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:36:59.541447Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:36:59.686039Z node 1 :TX_COORDINATOR DEBUG: Transaction 281474976715657 has been planned 2025-03-28T09:36:59.686113Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for mediator 72057594046382081 tablet 72057594046644480 2025-03-28T09:36:59.686150Z node 1 :TX_COORDINATOR DEBUG: Planned transaction 281474976715657 for ... 000 2025-03-28T09:37:03.047000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [1:24:2071], Recipient [1:1356:3059]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 4000} 2025-03-28T09:37:03.047035Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-03-28T09:37:03.047065Z node 1 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 4000 at tablet 72075186224037888 2025-03-28T09:37:03.047103Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:03.047649Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} 2025-03-28T09:37:03.047949Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 1 SubscriptionId: 2 LatestStep: 4000 2025-03-28T09:37:03.048024Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 1 TimeBarrier# 4000} 2025-03-28T09:37:03.124607Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715685. Ctx: { TraceId: 01jqe1scj8apmwq000k1r6x8s2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjJiNTU3NC1iZTJkODNmZS0yNjcxYjRkYS03NWU4ODVlNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:03.125780Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1415:3099], Recipient [1:1356:3059]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-03-28T09:37:03.125887Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:37:03.125936Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2025-03-28T09:37:03.125996Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-28T09:37:03.126023Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:37:03.126048Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:37:03.126070Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:37:03.126102Z node 1 :TX_DATASHARD TRACE: Activated operation [0:2] at 72075186224037888 2025-03-28T09:37:03.126126Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-28T09:37:03.126142Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:37:03.126161Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:37:03.126181Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:37:03.126276Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-28T09:37:03.126438Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715683, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:37:03.126474Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2025-03-28T09:37:03.126502Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1415:3099], 0} after executionsCount# 1 2025-03-28T09:37:03.126534Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1415:3099], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:37:03.126582Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1415:3099], 0} finished in read 2025-03-28T09:37:03.126618Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-28T09:37:03.126632Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:37:03.126645Z node 1 :TX_DATASHARD TRACE: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:37:03.126661Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:37:03.126710Z node 1 :TX_DATASHARD TRACE: Execution status for [0:2] at 72075186224037888 is Executed 2025-03-28T09:37:03.126731Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:37:03.126750Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:2] at 72075186224037888 has finished 2025-03-28T09:37:03.126785Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:37:03.126842Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:37:03.127495Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1415:3099], Recipient [1:1356:3059]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:37:03.127561Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2025-03-28T09:37:03.237027Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2025-03-28T09:37:03.237142Z node 1 :TX_COORDINATOR TRACE: Coordinator# 72057594046316545 scheduling step 4500 in 0.499900s at 4.450000s 2025-03-28T09:37:03.238180Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715686. Ctx: { TraceId: 01jqe1scmvdvybp8syeq57025z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjcyM2YyNTgtNGRmNWRkZjktOGI5NWY5MWItYmFmMzk4ZWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:03.239478Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:1439:3116], Recipient [1:1356:3059]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2025-03-28T09:37:03.239655Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:37:03.239740Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-03-28T09:37:03.239856Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:37:03.239893Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:37:03.239923Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:37:03.239950Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:37:03.239979Z node 1 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-03-28T09:37:03.240006Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:37:03.240020Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:37:03.240035Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:37:03.240047Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:37:03.240123Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2025-03-28T09:37:03.240312Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715686, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:37:03.240376Z node 1 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4000/18446744073709551615 2025-03-28T09:37:03.240404Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:1439:3116], 0} after executionsCount# 1 2025-03-28T09:37:03.240437Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1439:3116], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:37:03.240502Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:1439:3116], 0} finished in read 2025-03-28T09:37:03.240556Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:37:03.240571Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:37:03.240587Z node 1 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:37:03.240600Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:37:03.240626Z node 1 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:37:03.240639Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:37:03.240653Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-03-28T09:37:03.240678Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:37:03.240747Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:37:03.240905Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [1:61:2108], Recipient [1:1356:3059]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715686 LockNode: 1 Status: STATUS_SUBSCRIBED 2025-03-28T09:37:03.241430Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:1439:3116], Recipient [1:1356:3059]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:37:03.241468Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } >> DataShardOutOfOrder::UncommittedReads >> DataShardTxOrder::RandomPoints_DelayData >> TSubscriberTest::SyncWithOutdatedReplica >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2025-03-28T09:37:05.000381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:37:05.002129Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:3:2050] 2025-03-28T09:37:05.002213Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:6:2053] 2025-03-28T09:37:05.002253Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:9:2056] 2025-03-28T09:37:05.002292Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [1:36:2066] 2025-03-28T09:37:05.002335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:37:2066] 2025-03-28T09:37:05.002366Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:35:2066][path] Set up state: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:37:05.002452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [1:38:2066] 2025-03-28T09:37:05.002492Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:35:2066][path] Path was already updated: owner# [1:33:2064], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:37:05.002572Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-03-28T09:37:05.002665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2066], cookie# 1 2025-03-28T09:37:05.002724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2025-03-28T09:37:05.002760Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2025-03-28T09:37:05.002792Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:3:2050], cookie# 1 2025-03-28T09:37:05.002821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-03-28T09:37:05.002836Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-03-28T09:37:05.002866Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 }: sender# [1:36:2066], cookie# 1 2025-03-28T09:37:05.002893Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 0 2025-03-28T09:37:05.002913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:37:2066], cookie# 1 2025-03-28T09:37:05.002935Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 0, partial# 0 2025-03-28T09:37:05.002988Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 }: sender# [1:38:2066], cookie# 1 2025-03-28T09:37:05.003016Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:35:2066][path] Unexpected sync response: sender# [1:38:2066], cookie# 1 >> TColumnShardTestSchema::ColdCompactionSmoke >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143155137.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155137.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155137.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155137.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155137.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155137.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155137.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153937.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123155137.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123155137.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153937.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123153937.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123153937.000000s;Name=;Codec=}; 2025-03-28T09:35:38.026745Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:35:38.090824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:35:38.104938Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:35:38.105138Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:35:38.110510Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:38.110649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:38.110829Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:38.110900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:38.110962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:38.111026Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:38.111085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:38.111165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:38.111238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:38.111309Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:38.111382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:38.111440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:38.129190Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:35:38.129425Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:35:38.129470Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:35:38.129579Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:35:38.129691Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:38.129767Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:35:38.129816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:35:38.129882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:35:38.129921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:35:38.129948Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:35:38.129965Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:35:38.130061Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:35:38.130100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:35:38.130122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:35:38.130138Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:35:38.130189Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:35:38.130219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:35:38.130249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:35:38.130265Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:35:38.130306Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:35:38.130326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:35:38.130348Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:35:38.130375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:35:38.130396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:35:38.130415Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:35:38.150197Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=41; 2025-03-28T09:35:38.150302Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=36; 2025-03-28T09:35:38.150368Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=27; 2025-03-28T09:35:38.150459Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=44; 2025-03-28T09:35:38.150664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:35:38.150737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:35:38.150775Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:35:38.150945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:35:38.150986Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:35:38.151010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;pr ... ikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:37:06.643342Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:37:06.643408Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:37:06.643478Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:06.643553Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:06.643677Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:37:06.644017Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-28T09:37:06.644166Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:37:06.644345Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:37:06.644419Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:37:06.644946Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:37:06.645066Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:37:06.645604Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1970:3975];trace_detailed=; 2025-03-28T09:37:06.646056Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:37:06.646320Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:37:06.646519Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:06.646667Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:06.647124Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:37:06.647253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:06.647390Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:06.740126Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1970:3975] finished for tablet 9437184 2025-03-28T09:37:06.740887Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1969:3974];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.094}],"full":{"a":1743154626645527,"name":"_full_task","f":1743154626645527,"d_finished":0,"c":0,"l":1743154626740288,"d":94761},"events":[{"name":"bootstrap","f":1743154626645744,"d_finished":978,"c":1,"l":1743154626646722,"d":978},{"a":1743154626647096,"name":"ack","f":1743154626647096,"d_finished":0,"c":0,"l":1743154626740288,"d":93192},{"a":1743154626647071,"name":"processing","f":1743154626647071,"d_finished":0,"c":0,"l":1743154626740288,"d":93217},{"name":"ProduceResults","f":1743154626646430,"d_finished":93188,"c":2,"l":1743154626740060,"d":93188},{"a":1743154626740080,"name":"Finish","f":1743154626740080,"d_finished":0,"c":0,"l":1743154626740288,"d":208}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:06.741011Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1969:3974];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:37:06.741609Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1969:3974];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.001},{"events":["l_ProduceResults","f_Finish"],"t":0.094},{"events":["l_ack","l_processing","l_Finish"],"t":0.095}],"full":{"a":1743154626645527,"name":"_full_task","f":1743154626645527,"d_finished":0,"c":0,"l":1743154626741077,"d":95550},"events":[{"name":"bootstrap","f":1743154626645744,"d_finished":978,"c":1,"l":1743154626646722,"d":978},{"a":1743154626647096,"name":"ack","f":1743154626647096,"d_finished":0,"c":0,"l":1743154626741077,"d":93981},{"a":1743154626647071,"name":"processing","f":1743154626647071,"d_finished":0,"c":0,"l":1743154626741077,"d":94006},{"name":"ProduceResults","f":1743154626646430,"d_finished":93188,"c":2,"l":1743154626740060,"d":93188},{"a":1743154626740080,"name":"Finish","f":1743154626740080,"d_finished":0,"c":0,"l":1743154626741077,"d":997}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1970:3975]->[1:1969:3974] 2025-03-28T09:37:06.741739Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:37:06.645031Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:37:06.741798Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:37:06.741940Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1970:3975];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> DataShardOutOfOrder::UncommittedReads [GOOD] >> TColumnShardTestSchema::RebootInternalTTL [GOOD] >> DataShardScan::ScanFollowedByUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: 2025-03-28T09:37:06.417741Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:06.418035Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:06.418080Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015f0/r3tmp/tmpxl0cJ4/pdisk_1.dat 2025-03-28T09:37:06.688669Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:06.724509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:06.761361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:06.761445Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:06.772600Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:06.851118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:06.879371Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:06.880379Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:06.880792Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:37:06.881083Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:06.912143Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:06.912650Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:06.912726Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:37:06.913859Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:37:06.913909Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:37:06.913956Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:37:06.914203Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:37:06.914283Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:37:06.914332Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:37:06.914565Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:37:06.929802Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:37:06.929932Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:37:06.930020Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:37:06.930048Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:37:06.930087Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:37:06.930128Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:06.930302Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:06.930334Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:06.930526Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:37:06.930589Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:37:06.930897Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:37:06.930925Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:06.930951Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:37:06.930975Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:37:06.930996Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:37:06.931017Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:37:06.931047Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:37:06.931123Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:06.931164Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:06.931209Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:37:06.931270Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:37:06.931308Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:06.931378Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:37:06.931517Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:37:06.931565Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:37:06.931622Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:37:06.931650Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:37:06.931675Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:37:06.931712Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:37:06.931743Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:37:06.931937Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:37:06.931963Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:37:06.931984Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:37:06.932005Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:06.932054Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:37:06.932077Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:37:06.932144Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:37:06.932171Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:37:06.932193Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:37:06.932810Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:37:06.932843Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:37:06.932865Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:06.932889Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:37:06.932932Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:37:06.934412Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:37:06.934452Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:37:07.080218Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:07.080262Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:07.080287Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:37:07.080552Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:37:07.080577Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:37:07.080658Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:37:07.080698Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:37:07.080738Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:37:07.080765Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:37:07.083573Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:37:07.083620Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:07.084258Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:07.084287Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:07.084320Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:37:0 ... \022\024\n\022\t\257\003\000\000\000\000\000\000\021\300\n\000\000\001\000\000\000\032\256\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_0\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\360?i\000\000\000\000\000\000\360?q\000\000\000\0 2025-03-28T09:37:08.064672Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:08.064771Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:37:08.064792Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:37:08.064845Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:37:08.065142Z node 1 :TX_DATASHARD TRACE: TxId: 281474976715664, shard 72075186224037888, task: 1, meta: Table { TableId { OwnerId: 72057594046644480 TableId: 2 } TablePath: "/Root/table-1" SchemaVersion: 1 SysViewInfo: "" TableKind: 1 } Writes { Range { Ranges { KeyPoints: "\001\000\004\000\000\000\004\000\000\000" } } Columns { Column { Id: 1 Name: "key" Type: 2 } MaxValueSizeBytes: 4 } Columns { Column { Id: 2 Name: "value" Type: 2 } MaxValueSizeBytes: 4 } } 2025-03-28T09:37:08.065196Z node 1 :TX_DATASHARD TRACE: Table /Root/table-1, shard: 72075186224037888, task: 1, write point (Uint32 : 4) 2025-03-28T09:37:08.065239Z node 1 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:2:1] 2025-03-28T09:37:08.065427Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CheckDataTx 2025-03-28T09:37:08.065465Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-03-28T09:37:08.065487Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2025-03-28T09:37:08.065509Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:37:08.065542Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:37:08.065574Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-03-28T09:37:08.065615Z node 1 :TX_DATASHARD TRACE: Activated operation [0:281474976715664] at 72075186224037888 2025-03-28T09:37:08.065638Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-03-28T09:37:08.065652Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:37:08.065666Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-28T09:37:08.065684Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-28T09:37:08.065719Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-03-28T09:37:08.065760Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191926 2025-03-28T09:37:08.065917Z node 1 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:37:08.065974Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:37:08.066002Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-28T09:37:08.066029Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:37:08.066048Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:08.066067Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2025-03-28T09:37:08.066084Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:37:08.066118Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:37:08.066135Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:37:08.066162Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2025-03-28T09:37:08.066185Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:37:08.066201Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:281474976715664] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2025-03-28T09:37:08.138405Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1shf23ydcnp1pwfdxgzcv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MjVkNjg3NDYtM2IwOTM2MDMtMzYxMmMyZjUtYmViZWUyZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:08.139933Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [1:963:2777], Recipient [1:666:2570]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-28T09:37:08.140097Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:37:08.140136Z node 1 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2025-03-28T09:37:08.140163Z node 1 :TX_DATASHARD TRACE: 72075186224037888 changed HEAD read to non-repeatable v2500/18446744073709551615 2025-03-28T09:37:08.140200Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2025-03-28T09:37:08.140256Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:37:08.140278Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:37:08.140331Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:37:08.140359Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:37:08.140393Z node 1 :TX_DATASHARD TRACE: Activated operation [0:7] at 72075186224037888 2025-03-28T09:37:08.140417Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:37:08.140432Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:37:08.140444Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:37:08.140455Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:37:08.140518Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-28T09:37:08.140680Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is DelayComplete 2025-03-28T09:37:08.140710Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:37:08.140744Z node 1 :TX_DATASHARD TRACE: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:37:08.140774Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:37:08.140815Z node 1 :TX_DATASHARD TRACE: Execution status for [0:7] at 72075186224037888 is Executed 2025-03-28T09:37:08.140832Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:37:08.140850Z node 1 :TX_DATASHARD TRACE: Execution plan for [0:7] at 72075186224037888 has finished 2025-03-28T09:37:08.140874Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:37:08.214272Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 3000 2025-03-28T09:37:08.214366Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} 2025-03-28T09:37:08.350800Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:37:08.350847Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:08.350882Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1000 ms, status: COMPLETE 2025-03-28T09:37:08.350947Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:08.351152Z node 1 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:37:08.351202Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:37:08.351258Z node 1 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[1:963:2777], 0} after executionsCount# 1 2025-03-28T09:37:08.351293Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:963:2777], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:37:08.351397Z node 1 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[1:963:2777], 0} finished in read 2025-03-28T09:37:08.353042Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [1:963:2777], Recipient [1:666:2570]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:37:08.353093Z node 1 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootInternalTTL [GOOD] Test command err: 2025-03-28T09:36:56.313583Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:36:56.377518Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:56.380580Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:56.380839Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:36:56.396815Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:36:56.397054Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:36:56.403317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:36:56.403474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:36:56.403628Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:36:56.403698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:36:56.403763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:36:56.403866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:36:56.403958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:36:56.404032Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:36:56.404139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:36:56.404211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:36:56.404286Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:36:56.404354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:36:56.426256Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:56.430409Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:36:56.430750Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:36:56.430803Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:36:56.430982Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:56.431164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:36:56.431244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:36:56.431300Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:36:56.431396Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:36:56.431461Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:36:56.431502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:36:56.431540Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:36:56.431724Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:56.431803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:36:56.431862Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:36:56.431893Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:36:56.431996Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:36:56.432053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:36:56.432110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:36:56.432151Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:36:56.432230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:36:56.432268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:36:56.432302Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:36:56.432362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:36:56.432407Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:36:56.432441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:36:56.432822Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=43; 2025-03-28T09:36:56.432901Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-03-28T09:36:56.432981Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=37; 2025-03-28T09:36:56.433087Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=62; 2025-03-28T09:36:56.433261Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:36:56.433343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:36:56.433381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:36:56.433570Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:36:56.433616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:36:56.433656Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:36:56.433841Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:36:56.433899Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:36:56.433935Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:36:56.434128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:36:56.434174Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:36:56.434210Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T0 ... ne=actor.cpp:361;event=send_data;compute_actor_id=[1:1036:3029];bytes=362872;rows=45359;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-28T09:37:08.380307Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:08.380400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:08.380428Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:37:08.380469Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:37:08.380577Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:37:08.380701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:34641;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:08.380733Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:37:08.380853Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=34641; 2025-03-28T09:37:08.380899Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=277128;num_rows=34641;batch_columns=saved_at; 2025-03-28T09:37:08.381020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1036:3029];bytes=277128;rows=34641;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-28T09:37:08.381126Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:08.381209Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:08.381295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:08.381382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:37:08.381443Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:08.381493Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:08.381521Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1041:3034] finished for tablet 9437184 2025-03-28T09:37:08.381901Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1036:3029];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.474},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.476}],"full":{"a":1743154627905049,"name":"_full_task","f":1743154627905049,"d_finished":0,"c":0,"l":1743154628381561,"d":476512},"events":[{"name":"bootstrap","f":1743154627905490,"d_finished":3003,"c":1,"l":1743154627908493,"d":3003},{"a":1743154628381369,"name":"ack","f":1743154628379695,"d_finished":1548,"c":2,"l":1743154628381315,"d":1740},{"a":1743154628381361,"name":"processing","f":1743154627910445,"d_finished":102614,"c":18,"l":1743154628381318,"d":102814},{"name":"ProduceResults","f":1743154627906941,"d_finished":4134,"c":22,"l":1743154628381507,"d":4134},{"a":1743154628381508,"name":"Finish","f":1743154628381508,"d_finished":0,"c":0,"l":1743154628381561,"d":53},{"name":"task_result","f":1743154627910469,"d_finished":100684,"c":16,"l":1743154628379469,"d":100684}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:08.381978Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1036:3029];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:37:08.382307Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1036:3029];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.003},{"events":["f_processing","f_task_result"],"t":0.005},{"events":["f_ack","l_task_result"],"t":0.474},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.476}],"full":{"a":1743154627905049,"name":"_full_task","f":1743154627905049,"d_finished":0,"c":0,"l":1743154628382015,"d":476966},"events":[{"name":"bootstrap","f":1743154627905490,"d_finished":3003,"c":1,"l":1743154627908493,"d":3003},{"a":1743154628381369,"name":"ack","f":1743154628379695,"d_finished":1548,"c":2,"l":1743154628381315,"d":2194},{"a":1743154628381361,"name":"processing","f":1743154627910445,"d_finished":102614,"c":18,"l":1743154628381318,"d":103268},{"name":"ProduceResults","f":1743154627906941,"d_finished":4134,"c":22,"l":1743154628381507,"d":4134},{"a":1743154628381508,"name":"Finish","f":1743154628381508,"d_finished":0,"c":0,"l":1743154628382015,"d":507},{"name":"task_result","f":1743154627910469,"d_finished":100684,"c":16,"l":1743154628379469,"d":100684}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:08.382378Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:37:07.904159Z;index_granules=0;index_portions=2;index_batches=1720;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=5265968;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5265968;selected_rows=0; 2025-03-28T09:37:08.382414Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:37:08.382673Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:1041:3034];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> AnalyzeDatashard::AnalyzeOneTable [GOOD] >> TColumnShardTestSchema::OneColdTier >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: 2025-03-28T09:34:55.537777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:55.538114Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:55.538175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000d77/r3tmp/tmp3RyCfI/pdisk_1.dat 2025-03-28T09:34:55.867575Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18655, node 1 2025-03-28T09:34:56.067798Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:56.067856Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:56.067879Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:56.068194Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:56.070047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:56.153515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:56.153672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:56.167162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12578 2025-03-28T09:34:56.660499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:59.523967Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:59.556701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.556810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.594253Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:59.595742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.842903Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.843485Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.844020Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.844161Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.844359Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.844438Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.844520Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.844638Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.844713Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.004353Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.004468Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.017435Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.160044Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:00.208523Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:00.208615Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:00.239244Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:00.239393Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:00.239607Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:00.239663Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:00.239714Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:00.239776Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:00.239848Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:00.239901Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:00.240376Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:00.270962Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.271085Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1869:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.279008Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2608] 2025-03-28T09:35:00.284800Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1917:2623] 2025-03-28T09:35:00.285069Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1917:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:00.291078Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:00.306843Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:00.306897Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:00.306951Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:00.319110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:00.325156Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:00.325281Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:00.473659Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:00.667902Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:00.788135Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:01.622110Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.622230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.640169Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:02.040333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2544:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.040440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.041361Z node 1 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2548:3128]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:35:02.041503Z node 1 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:35:02.041581Z node 1 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [1:2550:3130] 2025-03-28T09:35:02.041653Z node 1 :STATISTICS DEBUG: SyncNode(), pipe client id = [1:2550:3130] 2025-03-28T09:35:02.042197Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2551:2992] 2025-03-28T09:35:02.042496Z node 1 :STATISTICS DEBUG: EvClientConnected, node id = 1, client id = [1:2550:3130], server id = [2:2551:2992], tablet id = 72075186224037894, status = OK 2025-03-28T09:35:02.042675Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:2551:2992], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:35:02.042759Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2025-03-28T09:35:02.042951Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:35:02.043007Z node 1 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [1:2548:3128], StatRequests.size() = 1 2025-03-28T09:35:02.058133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2555:3134], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.058243Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.058619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2560:3139], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.063490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T09:35:02.193732Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:35:02.193821Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:35:02.302199Z node 1 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [1:2550:3130], schemeshard count = 1 2025-03-28T09:35:02.687786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreator ... 24037894] EvPropagateTimeout 2025-03-28T09:36:37.210899Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:37.211191Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:39.985559Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:41.877672Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:41.877948Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:44.478274Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:46.564763Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:46.565235Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:49.310381Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:51.337070Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:51.337459Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:54.129805Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:36:56.050314Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:36:56.050789Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:36:58.893991Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:37:00.686265Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:37:00.686564Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:37:03.452273Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:37:04.506109Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:37:04.506195Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:37:04.506234Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:37:04.506266Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:37:05.679377Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:37:05.679695Z node 1 :STATISTICS DEBUG: EvPropagateStatistics, node id = 1 2025-03-28T09:37:05.721852Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-28T09:37:05.721932Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 210.000000s, at schemeshard: 72075186224037897 2025-03-28T09:37:05.722166Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 49 2025-03-28T09:37:05.735161Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:37:06.731658Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:06.731742Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:06.731789Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:37:06.731858Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:37:06.731924Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:06.732362Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:06.746370Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:37:06.749618Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6660:4667], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:06.749723Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6670:4672], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:06.749880Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:06.760676Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:37:06.808911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:6674:4675], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:37:06.971653Z node 2 :TX_PROXY ERROR: Actor# [2:6772:4723] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:07.013395Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6801:4738]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:07.013625Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:37:07.013715Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:6803:4740] 2025-03-28T09:37:07.013778Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:6803:4740] 2025-03-28T09:37:07.014076Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:6804:4741] 2025-03-28T09:37:07.014206Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:6804:4741], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:37:07.014253Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:37:07.014404Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:6803:4740], server id = [2:6804:4741], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:07.014460Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:07.014532Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:6801:4738], StatRequests.size() = 1 2025-03-28T09:37:07.104418Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODkxNDQ2OTUtYjllMDBhOTctYTJlNWUxMDYtOGU2Mzk0MDA=, TxId: 2025-03-28T09:37:07.104481Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODkxNDQ2OTUtYjllMDBhOTctYTJlNWUxMDYtOGU2Mzk0MDA=, TxId: 2025-03-28T09:37:07.105035Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:07.117537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:07.117595Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:07.159889Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:37:07.159984Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:37:07.213063Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:6803:4740], schemeshard count = 1 2025-03-28T09:37:07.957704Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:07.957778Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-28T09:37:07.957816Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. Skip analyze for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:09.009974Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:37:09.020664Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:09.020788Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is data table. 2025-03-28T09:37:09.020814Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:09.021085Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:09.022806Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:37:09.031295Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2UzNTYwYzUtZmZiMWRmMDUtNTJiNjc3NjctODI0ZmIzNDQ=, TxId: 2025-03-28T09:37:09.031340Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2UzNTYwYzUtZmZiMWRmMDUtNTJiNjc3NjctODI0ZmIzNDQ=, TxId: 2025-03-28T09:37:09.031686Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:09.044300Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:09.044347Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2754:3245] 2025-03-28T09:37:09.044869Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6923:4814]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:09.048232Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:09.048288Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:09.052682Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:09.052734Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:09.052788Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:09.055096Z node 2 :STATISTICS ERROR: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2025-03-28T09:37:09.055349Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 2 >> DataShardScan::ScanFollowedByUpdate [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] >> DataShardTxOrder::ZigZag_oo [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] Test command err: 2025-03-28T09:37:09.417987Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:09.423081Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:09.423466Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:37:09.423663Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:09.456166Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:09.510623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:09.510672Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:09.517482Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:09.518463Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:37:09.519595Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:37:09.519664Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:37:09.519694Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:37:09.519962Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:37:09.520011Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:37:09.520081Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:37:09.572798Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:37:09.596479Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:37:09.596620Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:37:09.596684Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:37:09.596711Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:37:09.596732Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:37:09.596756Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:09.596928Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:09.596968Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:09.597117Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:37:09.597166Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:37:09.597195Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:37:09.597245Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:09.597271Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:37:09.597293Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:37:09.597318Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:37:09.597343Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:37:09.597372Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:09.597443Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:09.597464Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:09.597498Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:37:09.599110Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:37:09.599149Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:09.599201Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:37:09.599312Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:37:09.599341Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:37:09.599372Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:37:09.599421Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:37:09.599458Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:37:09.599485Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:37:09.599506Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:37:09.599734Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:37:09.599778Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:37:09.599814Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:37:09.599847Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:37:09.599899Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:37:09.599927Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:37:09.599962Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:37:09.600063Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:37:09.600102Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:37:09.611989Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:37:09.612052Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:37:09.612084Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:37:09.612117Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:37:09.612184Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:37:09.612584Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:09.612623Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:09.612649Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:37:09.612697Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:37:09.612715Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:37:09.612790Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:37:09.612832Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:09.612862Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:37:09.612883Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:37:09.615475Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:37:09.615540Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:09.615682Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:09.615716Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:09.615769Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:37:09.615797Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:37:09.615818Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:37:09.615845Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:37:09.615866Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:37:09.615893Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:09.615945Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:37:09.615972Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:37:09.616003Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:37:09.616120Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:37:09.616153Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:09.616168Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:37:09.616181Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:37:09.616194Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:37:09.616231Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:37:09.616245Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:37:09.616266Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:37:09.616286Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:37:09.616344Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:37:09.616373Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:37:09.616399Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:37:09.616424Z node 1 :TX_D ... essageQuota: 9 2025-03-28T09:37:11.187629Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 9437186, TxId: 36, MessageQuota: 10 2025-03-28T09:37:11.187743Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 9437186, TxId: 36, Size: 22, Rows: 0, PendingAcks: 1, MessageQuota: 9 2025-03-28T09:37:11.187820Z node 1 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 9437184, TxId: 36, MessageQuota: 10 2025-03-28T09:37:11.187920Z node 1 :TX_DATASHARD DEBUG: Send response data ShardId: 9437184, TxId: 36, Size: 22, Rows: 0, PendingAcks: 1, MessageQuota: 9 2025-03-28T09:37:11.187971Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437185, TxId: 36, PendingAcks: 0 2025-03-28T09:37:11.188001Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437185, TxId: 36, MessageQuota: 9 2025-03-28T09:37:11.188214Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437186, TxId: 36, PendingAcks: 0 2025-03-28T09:37:11.188264Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437186, TxId: 36, MessageQuota: 9 2025-03-28T09:37:11.188408Z node 1 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 9437184, TxId: 36, PendingAcks: 0 2025-03-28T09:37:11.188441Z node 1 :TX_DATASHARD DEBUG: Finish scan ShardId: 9437184, TxId: 36, MessageQuota: 9 2025-03-28T09:37:11.188640Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437185 2025-03-28T09:37:11.188663Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437185 2025-03-28T09:37:11.188718Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437186 2025-03-28T09:37:11.188731Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437186 2025-03-28T09:37:11.188753Z node 1 :TX_DATASHARD DEBUG: FullScan complete at 9437184 2025-03-28T09:37:11.188770Z node 1 :TX_DATASHARD DEBUG: Found op: cookie: 36, at: 9437184 2025-03-28T09:37:11.188856Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:455:2397], Recipient [1:455:2397]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:11.188882Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:11.188910Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-03-28T09:37:11.188929Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:37:11.188947Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2025-03-28T09:37:11.188962Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2025-03-28T09:37:11.188983Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2025-03-28T09:37:11.189013Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is Executed 2025-03-28T09:37:11.189030Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2025-03-28T09:37:11.189047Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2025-03-28T09:37:11.189091Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2025-03-28T09:37:11.189218Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is DelayComplete 2025-03-28T09:37:11.189237Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2025-03-28T09:37:11.189259Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2025-03-28T09:37:11.189280Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2025-03-28T09:37:11.189298Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437186 is Executed 2025-03-28T09:37:11.189309Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2025-03-28T09:37:11.189324Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437186 has finished 2025-03-28T09:37:11.189352Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:11.189373Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-03-28T09:37:11.189404Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-03-28T09:37:11.189429Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-03-28T09:37:11.189523Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:235:2228], Recipient [1:235:2228]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:11.189553Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:11.189591Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:37:11.189622Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:37:11.189640Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2025-03-28T09:37:11.189654Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2025-03-28T09:37:11.189677Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2025-03-28T09:37:11.189699Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is Executed 2025-03-28T09:37:11.189714Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2025-03-28T09:37:11.189727Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2025-03-28T09:37:11.189742Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2025-03-28T09:37:11.189842Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is DelayComplete 2025-03-28T09:37:11.189857Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2025-03-28T09:37:11.189877Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2025-03-28T09:37:11.189906Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2025-03-28T09:37:11.189926Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437184 is Executed 2025-03-28T09:37:11.189939Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2025-03-28T09:37:11.189952Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437184 has finished 2025-03-28T09:37:11.189966Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:11.189979Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:37:11.189992Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:37:11.190006Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:37:11.190095Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:347:2314], Recipient [1:347:2314]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:11.190115Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:11.190151Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-03-28T09:37:11.190168Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:37:11.190192Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2025-03-28T09:37:11.190213Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2025-03-28T09:37:11.190240Z node 1 :TX_DATASHARD TRACE: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2025-03-28T09:37:11.190258Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is Executed 2025-03-28T09:37:11.190274Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2025-03-28T09:37:11.190287Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2025-03-28T09:37:11.190302Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2025-03-28T09:37:11.190392Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is DelayComplete 2025-03-28T09:37:11.190408Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2025-03-28T09:37:11.190425Z node 1 :TX_DATASHARD TRACE: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2025-03-28T09:37:11.190446Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2025-03-28T09:37:11.190482Z node 1 :TX_DATASHARD TRACE: Execution status for [1000006:36] at 9437185 is Executed 2025-03-28T09:37:11.190498Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2025-03-28T09:37:11.190512Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000006:36] at 9437185 has finished 2025-03-28T09:37:11.190537Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:11.190562Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-28T09:37:11.190579Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-28T09:37:11.190593Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-28T09:37:11.202626Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:37:11.202698Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:37:11.202720Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2025-03-28T09:37:11.202782Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 3 ms, propose latency: 4 ms 2025-03-28T09:37:11.202816Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:37:11.202920Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:11.202937Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:11.202951Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2025-03-28T09:37:11.202971Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 3 ms, propose latency: 4 ms 2025-03-28T09:37:11.202986Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:11.203063Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-28T09:37:11.203078Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-28T09:37:11.203093Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2025-03-28T09:37:11.203130Z node 1 :TX_DATASHARD DEBUG: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:99:2134], exec latency: 3 ms, propose latency: 4 ms 2025-03-28T09:37:11.203149Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> DataShardTxOrder::ForceOnlineBetweenOnline ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2025-03-28T09:37:07.514839Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:07.519936Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:07.520293Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:37:07.520468Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:07.552878Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:07.607000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:07.607053Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:07.613835Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:07.614879Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:37:07.616127Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:37:07.616177Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:37:07.616233Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:37:07.616507Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:37:07.616561Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:37:07.616606Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:37:07.669208Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:37:07.690078Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:37:07.690235Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:37:07.690315Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:37:07.690339Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:37:07.690367Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:37:07.690402Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:07.690579Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:07.690618Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:07.690789Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:37:07.690866Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:37:07.690904Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:37:07.690948Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:07.690975Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:37:07.690999Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:37:07.691022Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:37:07.691051Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:37:07.691082Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:07.691152Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:07.691179Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:07.691212Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:37:07.692836Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:37:07.692874Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:07.692930Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:37:07.693018Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:37:07.693049Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:37:07.693079Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:37:07.693114Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:37:07.693168Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:37:07.693196Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:37:07.693219Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:37:07.693433Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:37:07.693466Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:37:07.693496Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:37:07.693518Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:37:07.693555Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:37:07.693582Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:37:07.693607Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:37:07.693635Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:37:07.693656Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:37:07.705132Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:37:07.705190Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:37:07.705225Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:37:07.705257Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:37:07.705319Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:37:07.705714Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:07.705751Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:07.705781Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:37:07.705832Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:37:07.705853Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:37:07.705929Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:37:07.705968Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:07.706003Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:37:07.706031Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:37:07.708599Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:37:07.708650Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:07.708856Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:07.708891Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:07.708929Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:37:07.708959Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:37:07.708983Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:37:07.709013Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:37:07.709041Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:37:07.709070Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:07.709119Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:37:07.709161Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:37:07.709187Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:37:07.709305Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:37:07.709331Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:07.709346Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:37:07.709360Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:37:07.709373Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:37:07.709408Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:37:07.709425Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:37:07.709453Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:37:07.709484Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:37:07.709543Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:37:07.709570Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:37:07.709596Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:37:07.709627Z node 1 :TX_DATA ... BUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:11.457030Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:37:11.457233Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 116 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 38} 2025-03-28T09:37:11.457272Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.457299Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 116 2025-03-28T09:37:11.457363Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 119 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 39} 2025-03-28T09:37:11.457378Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.457405Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 119 2025-03-28T09:37:11.457455Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 122 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 40} 2025-03-28T09:37:11.457469Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.457484Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 122 2025-03-28T09:37:11.457532Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-28T09:37:11.457549Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.457562Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-28T09:37:11.457594Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-28T09:37:11.457610Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.457623Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-28T09:37:11.457673Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-28T09:37:11.457689Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.457716Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-28T09:37:11.457755Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 125 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 41} 2025-03-28T09:37:11.457770Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.457789Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 125 2025-03-28T09:37:11.457844Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:37:11.457858Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.457870Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2025-03-28T09:37:11.457922Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-28T09:37:11.457938Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.457951Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-28T09:37:11.457999Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-28T09:37:11.458022Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.458051Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-28T09:37:11.458102Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-28T09:37:11.458117Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.458128Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-28T09:37:11.458171Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-28T09:37:11.458204Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.458225Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-28T09:37:11.458326Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-28T09:37:11.458349Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.458370Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-28T09:37:11.458417Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:11.458452Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2025-03-28T09:37:11.458512Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-28T09:37:11.458563Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-28T09:37:11.458605Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:11.458703Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:11.458720Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2025-03-28T09:37:11.458741Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-28T09:37:11.458766Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-28T09:37:11.458781Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:11.458840Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:11.458853Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2025-03-28T09:37:11.458891Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-28T09:37:11.458918Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-28T09:37:11.458934Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:11.459002Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:11.459024Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2025-03-28T09:37:11.459051Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:99:2134], exec latency: 2 ms, propose latency: 3 ms 2025-03-28T09:37:11.459084Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-28T09:37:11.459104Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:11.459249Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2025-03-28T09:37:11.459280Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.459300Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2025-03-28T09:37:11.459365Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-28T09:37:11.459379Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.459403Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-03-28T09:37:11.459458Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-28T09:37:11.459471Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.459483Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-03-28T09:37:11.459560Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-28T09:37:11.459586Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:11.459604Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: 2025-03-28T09:37:04.888842Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:04.889124Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:04.889158Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015fa/r3tmp/tmpx3WVMU/pdisk_1.dat 2025-03-28T09:37:05.155229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:05.178702Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:05.214718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:05.214809Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:05.225832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:05.305889Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:05.591901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:37:05.834738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:925:2736], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:05.834838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:934:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:05.834892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:05.838283Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:37:05.985830Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:939:2744], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:37:06.049455Z node 1 :TX_PROXY ERROR: Actor# [1:1000:2786] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:06.271737Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1sf99dv3xt70hn152b8ea, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzJmZTlmOWEtZDZjNGExNGItZDMzZmRjZjctZTFkOWIwMzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:06.336398Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1sfqna29vqfr34fzx6kb5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njg3ZDM3MWMtM2RkMjAyZjItNWFjOTg0NGMtY2UxOWVlNGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:06.593374Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1sfsr2eswvt06dcs4868v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRiNDAxMzgtOGZiNzNlZTEtN2E1MzY2N2QtYzYxNDNlMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets 2025-03-28T09:37:06.673579Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1sg1e3s3k6cr8g7gfmc3q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRiNDAxMzgtOGZiNzNlZTEtN2E1MzY2N2QtYzYxNDNlMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... sending immediate upsert ... waiting for immediate propose 2025-03-28T09:37:06.739151Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1sg40akmf1kf0qg9m3nz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZmYzIwNjgtZjQwNDc3NzUtOTcwMzY5ZS1jZGQzN2YwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... immediate upsert is blocked 2025-03-28T09:37:06.741094Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:1175:2842] TxId: 281474976715665. Ctx: { TraceId: 01jqe1sg40akmf1kf0qg9m3nz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZmYzIwNjgtZjQwNDc3NzUtOTcwMzY5ZS1jZGQzN2YwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. OVERLOADED: [WRONG_SHARD_STATE] Rejecting immediate tx 281474976715665 because datashard 72075186224037889 is restarting; 2025-03-28T09:37:06.749053Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NmZmYzIwNjgtZjQwNDc3NzUtOTcwMzY5ZS1jZGQzN2YwYQ==, ActorId: [1:1070:2842], ActorState: ExecuteState, TraceId: 01jqe1sg40akmf1kf0qg9m3nz2, Create QueryResponse for error on request, msg: 2025-03-28T09:37:06.750486Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWRiNDAxMzgtOGZiNzNlZTEtN2E1MzY2N2QtYzYxNDNlMTk=, ActorId: [1:1072:2844], ActorState: ExecuteState, TraceId: 01jqe1sg1e3s3k6cr8g7gfmc3q, Create QueryResponse for error on request, msg: 2025-03-28T09:37:06.751090Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe1sg40akmf1kf0qg9m3nz2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmZmYzIwNjgtZjQwNDc3NzUtOTcwMzY5ZS1jZGQzN2YwYQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:06.762827Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqe1sg1e3s3k6cr8g7gfmc3q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWRiNDAxMzgtOGZiNzNlZTEtN2E1MzY2N2QtYzYxNDNlMTk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:07.055014Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqe1sgb88ghb1rqfkxyvasfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmM3ZjJkMjItYzA2YzM1NTItZWM2MTZlNTAtNjc3OTNlYWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } } 2025-03-28T09:37:09.467420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:09.467642Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:09.467683Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015fa/r3tmp/tmpiRWIoJ/pdisk_1.dat 2025-03-28T09:37:09.682891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:09.706985Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:09.742158Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:09.742315Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:09.753473Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:09.832128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:10.068371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:37:10.322725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:832:2684], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:10.322821Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:842:2689], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:10.322881Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:10.328350Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:37:10.480926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:846:2692], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:37:10.515892Z node 2 :TX_PROXY ERROR: Actor# [2:906:2733] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:10.570564Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1skng0jv99atdes3vheeg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZjMwZDc2YWItM2FhMTkyZGYtYzM0MmEyNjktNGJiZTBkZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:10.638991Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1skxtc7zaqbfrf9046px5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDk3ZGMwMWYtNDhkN2JhMTQtYWJlMjExMzAtYWYzYjU4Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... waiting for readsets 2025-03-28T09:37:11.154389Z node 2 :KQP_COMPUTE WARN: TxId: 281474976715664, task: 1, CA Id [2:1004:2813]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2025-03-28T09:37:11.154624Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2ZkMThjOC1iNDFjM2Q1NC02NzYzZGRkLTUwYTEyOGU1, ActorId: [2:960:2774], ActorState: ExecuteState, TraceId: 01jqe1skzz1gj254jjpmh0we46, Create QueryResponse for error on request, msg: { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2025-03-28T09:36:55.930470Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:55.935637Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:55.936026Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:55.936219Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:55.968837Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:56.020495Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:56.020534Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:56.028171Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:56.029401Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:56.031048Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:56.031135Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:56.031205Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:56.031596Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:56.031696Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:56.031775Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:56.082106Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:56.106469Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:56.106626Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:56.106734Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:56.106766Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:56.106792Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:56.106816Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:56.106999Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:56.107046Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:56.107210Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:56.107264Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:56.107305Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:56.107353Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:56.107378Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:56.107399Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:56.107421Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:56.107453Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:56.107484Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:56.107543Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:56.107578Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:56.107622Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:56.112751Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:56.112797Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:56.112875Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:56.112968Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:56.112996Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:56.113034Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:56.113079Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:56.113125Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:56.113151Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:56.113174Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:56.113376Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:56.113402Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:56.113426Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:56.113447Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:56.113481Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:56.113497Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:56.113525Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:56.113557Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:56.113610Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:56.125207Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:56.125264Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:56.125290Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:56.125316Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:56.125365Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:56.125759Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:56.125798Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:56.125826Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:56.125873Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:56.125890Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:56.125977Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:56.126016Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:56.126042Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:56.126092Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:56.128526Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:56.128573Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:56.128736Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:56.128761Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:56.128809Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:56.128842Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:56.128864Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:56.128890Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:56.128917Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:56.128944Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:56.128984Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:56.129012Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:56.129034Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:56.129168Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:56.129193Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:56.129206Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:56.129231Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:56.129245Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:56.129285Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:56.129300Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:56.129320Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:56.129341Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:56.129390Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:56.129414Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:56.129436Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:56.129460Z node 1 :TX_DATA ... E: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2025-03-28T09:37:11.640186Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2025-03-28T09:37:11.640205Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2025-03-28T09:37:11.640355Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is DelayComplete 2025-03-28T09:37:11.640380Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2025-03-28T09:37:11.640415Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2025-03-28T09:37:11.640448Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2025-03-28T09:37:11.640474Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437184 is Executed 2025-03-28T09:37:11.640491Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2025-03-28T09:37:11.640512Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437184 has finished 2025-03-28T09:37:11.640546Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:11.640576Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:37:11.640614Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:37:11.640646Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:37:11.640813Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [6:99:2134], Recipient [6:347:2314]: {TEvPlanStep step# 1000016 MediatorId# 0 TabletID 9437185} 2025-03-28T09:37:11.640860Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:37:11.640966Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit WaitForPlan 2025-03-28T09:37:11.641005Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.641036Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit WaitForPlan 2025-03-28T09:37:11.641070Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PlanQueue 2025-03-28T09:37:11.641213Z node 6 :TX_DATASHARD DEBUG: Planned transaction txId 45 at step 1000016 at tablet 9437185 { Transactions { TxId: 45 AckTo { RawX1: 99 RawX2: 25769805910 } } Step: 1000016 MediatorID: 0 TabletID: 9437185 } 2025-03-28T09:37:11.641250Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2025-03-28T09:37:11.641402Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [6:347:2314], Recipient [6:347:2314]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:11.641426Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:11.641456Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437185 2025-03-28T09:37:11.641479Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:37:11.641499Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-28T09:37:11.641521Z node 6 :TX_DATASHARD DEBUG: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2025-03-28T09:37:11.641542Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2025-03-28T09:37:11.641563Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.641582Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2025-03-28T09:37:11.641604Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2025-03-28T09:37:11.641622Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2025-03-28T09:37:11.642098Z node 6 :TX_DATASHARD DEBUG: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2025-03-28T09:37:11.642127Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.642147Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2025-03-28T09:37:11.642164Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2025-03-28T09:37:11.642184Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2025-03-28T09:37:11.642210Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.642226Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2025-03-28T09:37:11.642243Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2025-03-28T09:37:11.642260Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2025-03-28T09:37:11.642290Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically complete end at 9437185 2025-03-28T09:37:11.642312Z node 6 :TX_DATASHARD TRACE: Operation [1000016:45] is the new logically incomplete end at 9437185 2025-03-28T09:37:11.642335Z node 6 :TX_DATASHARD TRACE: Activated operation [1000016:45] at 9437185 2025-03-28T09:37:11.642361Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.642378Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2025-03-28T09:37:11.642395Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2025-03-28T09:37:11.642411Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2025-03-28T09:37:11.642437Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.642455Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2025-03-28T09:37:11.642473Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2025-03-28T09:37:11.642490Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2025-03-28T09:37:11.642507Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.642523Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2025-03-28T09:37:11.642538Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2025-03-28T09:37:11.642554Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2025-03-28T09:37:11.642573Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.642587Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2025-03-28T09:37:11.642602Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2025-03-28T09:37:11.642618Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2025-03-28T09:37:11.642634Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.642648Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2025-03-28T09:37:11.642665Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2025-03-28T09:37:11.642705Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2025-03-28T09:37:11.642958Z node 6 :TX_DATASHARD TRACE: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2025-03-28T09:37:11.643015Z node 6 :TX_DATASHARD TRACE: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:37:11.643061Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.643089Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2025-03-28T09:37:11.643117Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2025-03-28T09:37:11.643147Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2025-03-28T09:37:11.643332Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is DelayComplete 2025-03-28T09:37:11.643370Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2025-03-28T09:37:11.643400Z node 6 :TX_DATASHARD TRACE: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2025-03-28T09:37:11.643429Z node 6 :TX_DATASHARD TRACE: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2025-03-28T09:37:11.643466Z node 6 :TX_DATASHARD TRACE: Execution status for [1000016:45] at 9437185 is Executed 2025-03-28T09:37:11.643496Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2025-03-28T09:37:11.643546Z node 6 :TX_DATASHARD TRACE: Execution plan for [1000016:45] at 9437185 has finished 2025-03-28T09:37:11.643577Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:11.643605Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437185 2025-03-28T09:37:11.643633Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437185 has no attached operations 2025-03-28T09:37:11.643661Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437185 2025-03-28T09:37:11.656635Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2025-03-28T09:37:11.656702Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2025-03-28T09:37:11.656767Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:11.656809Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2025-03-28T09:37:11.656875Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:11.656920Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:11.657172Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2025-03-28T09:37:11.657203Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2025-03-28T09:37:11.657242Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437185 2025-03-28T09:37:11.657266Z node 6 :TX_DATASHARD TRACE: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2025-03-28T09:37:11.657299Z node 6 :TX_DATASHARD DEBUG: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:11.657327Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] Test command err: 2025-03-28T09:37:04.543889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:04.544188Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:04.544234Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001606/r3tmp/tmpANBWdZ/pdisk_1.dat 2025-03-28T09:37:04.835612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:04.866615Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:04.903112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:04.903230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:04.914350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:04.993136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:05.264768Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:37:05.507518Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:05.507603Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:839:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:05.507655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:05.510973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:37:05.658052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:37:05.715832Z node 1 :TX_PROXY ERROR: Actor# [1:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:05.910964Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1sez266yx278pr8wb68dz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzlmNjcwODMtY2M0OWJiZTAtY2ZlYmY3MDctNDE0MDZkOGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:05.971347Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1sfca3q6w7ycvh4wtfr8z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTFlMDViMzktZGYxNTNiMjUtZmZlNzA0Y2MtNDRiY2I0OTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:06.452832Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1sfkvdc49w9p76gemy4gq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQyYmI1ZTQtODBjMTYyNDUtNzdmODJjOGMtYmI5NDk0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-03-28T09:37:06.784335Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1sg5ccc19jw47zdvd04cc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ODUzYThmZS05NTU0ZWQ5Ny00Yjc3NWI1ZS03OTdkYzBlNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:06.859061Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1sg7d4mm4pfs0p4kpm9d9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQyYmI1ZTQtODBjMTYyNDUtNzdmODJjOGMtYmI5NDk0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:06.929381Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe1sg9mfy0bf7xnhmy329ms, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQyYmI1ZTQtODBjMTYyNDUtNzdmODJjOGMtYmI5NDk0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:06.985650Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDQyYmI1ZTQtODBjMTYyNDUtNzdmODJjOGMtYmI5NDk0NzE=, ActorId: [1:967:2781], ActorState: ExecuteState, TraceId: 01jqe1sgbmay0rvqm085838dcp, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-03-28T09:37:06.997114Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqe1sgbmay0rvqm085838dcp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDQyYmI1ZTQtODBjMTYyNDUtNzdmODJjOGMtYmI5NDk0NzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:09.221443Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:09.221688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:09.221725Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001606/r3tmp/tmp6VQEpw/pdisk_1.dat 2025-03-28T09:37:09.444348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:09.464512Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:09.499420Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:09.499505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:09.510494Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:09.588772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:09.824393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:37:10.071475Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2682], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:10.071581Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:839:2687], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:10.071645Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:10.076253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:37:10.226244Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:844:2690], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:37:10.261344Z node 2 :TX_PROXY ERROR: Actor# [2:904:2731] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:10.308940Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1skdn72c26efg0n7m8z90, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=N2Q3ZTI0M2MtY2FjNjg1NWQtNjczYjU2ZjgtMjdiNzVmMmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:10.370676Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1sknk9gt1g0zqfems62sf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWUzYzBkYWYtODYzMmYxMGUtN2I5NjdlMS00NTVmYzAzZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the first select 2025-03-28T09:37:10.804752Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1skx64jp68g1qzg4n11yf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVmMThkZi1kOTAzZDBjNS1hMjM2NDU5ZS1iMGY2ZWY3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2025-03-28T09:37:11.022279Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1sm52dz44kcbb50y3m1ww, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTg1YjBiOWMtZjBiYTliMTktNjViZjRhZTItODk2OTc1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets 2025-03-28T09:37:11.095679Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1smbq6nwx9x8jahydxz4e, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MTg1YjBiOWMtZjBiYTliMTktNjViZjRhZTItODk2OTc1NjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... captured readset ... captured readset ... performing an upsert 2025-03-28T09:37:11.467481Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe1smq6853ycj6nvts2frvr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NWJjNmY0MmUtMTM0MjhlYWUtNWU2NGY5Mi1iY2Y0N2YyOA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the second select 2025-03-28T09:37:11.541607Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqe1smss6vqpa9340p9z7mgv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVmMThkZi1kOTAzZDBjNS1hMjM2NDU5ZS1iMGY2ZWY3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the third select 2025-03-28T09:37:11.597090Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqe1smvrb113xff7g1bs5k30, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVmMThkZi1kOTAzZDBjNS1hMjM2NDU5ZS1iMGY2ZWY3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ... performing the last upsert and commit 2025-03-28T09:37:11.653875Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NmVmMThkZi1kOTAzZDBjNS1hMjM2NDU5ZS1iMGY2ZWY3, ActorId: [2:967:2781], ActorState: ExecuteState, TraceId: 01jqe1smxge53wcv03252dxzcc, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-03-28T09:37:11.665228Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715669. Ctx: { TraceId: 01jqe1smxge53wcv03252dxzcc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NmVmMThkZi1kOTAzZDBjNS1hMjM2NDU5ZS1iMGY2ZWY3, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> DataShardOutOfOrder::UncommittedReadSetAck >> TColumnShardTestSchema::ColdTiersWithStat >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] >> KqpEffects::InsertAbort_Params_Conflict+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2025-03-28T09:37:12.466841Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:12.472960Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:12.473341Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:37:12.473509Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:12.506151Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:12.560382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:12.560425Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:12.567357Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:12.568309Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:37:12.569415Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:37:12.569460Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:37:12.569507Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:37:12.569761Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:37:12.569811Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:37:12.569884Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:37:12.617826Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:37:12.639908Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:37:12.640054Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:37:12.640119Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:37:12.640141Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:37:12.640163Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:37:12.640194Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:12.640376Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:12.640409Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:12.640565Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:37:12.640633Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:37:12.640673Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:37:12.640738Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:12.640764Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:37:12.640789Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:37:12.640815Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:37:12.640854Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:37:12.640906Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:12.641038Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:12.641072Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:12.641127Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:37:12.642915Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:37:12.642953Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:12.643007Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:37:12.643102Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:37:12.643129Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:37:12.643162Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:37:12.643200Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:37:12.643236Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:37:12.643260Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:37:12.643281Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:37:12.643490Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:37:12.643510Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:37:12.643563Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:37:12.643584Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:37:12.643622Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:37:12.643648Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:37:12.643673Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:37:12.643701Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:37:12.643724Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:37:12.655572Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:37:12.655636Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:37:12.655673Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:37:12.655713Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:37:12.655802Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:37:12.656328Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:12.656390Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:12.656428Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:37:12.656504Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:37:12.656531Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:37:12.656645Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:37:12.656701Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:12.656741Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:37:12.656772Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:37:12.660649Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:37:12.660708Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:12.660924Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:12.660970Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:12.661022Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:37:12.661062Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:37:12.661093Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:37:12.661128Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:37:12.661163Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:37:12.661205Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:12.661265Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:37:12.661305Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:37:12.661347Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:37:12.661524Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:37:12.661561Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:12.661586Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:37:12.661609Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:37:12.661632Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:37:12.661693Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:37:12.661717Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:37:12.661745Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:37:12.661777Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:37:12.661853Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:37:12.661889Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:37:12.661918Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:37:12.661960Z node 1 :TX_DATA ... 1000005:149] at 9437186 on unit CompleteOperation 2025-03-28T09:37:17.179511Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:17.179536Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:37:17.179639Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:37:17.179895Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 128 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 42} 2025-03-28T09:37:17.179924Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.179949Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 128 2025-03-28T09:37:17.180028Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2025-03-28T09:37:17.180056Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.180076Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2025-03-28T09:37:17.180136Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 131 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 43} 2025-03-28T09:37:17.180161Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.180196Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 131 2025-03-28T09:37:17.180244Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2025-03-28T09:37:17.180264Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.180283Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2025-03-28T09:37:17.180330Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:235:2228], Recipient [1:456:2398]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-03-28T09:37:17.180353Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:37:17.180381Z node 1 :TX_DATASHARD DEBUG: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2025-03-28T09:37:17.180448Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2025-03-28T09:37:17.180494Z node 1 :TX_DATASHARD TRACE: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2025-03-28T09:37:17.180555Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 9437186 2025-03-28T09:37:17.180670Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:235:2228], Recipient [1:347:2314]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2025-03-28T09:37:17.180697Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.180717Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2025-03-28T09:37:17.180791Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:456:2398], Recipient [1:456:2398]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:17.180821Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:17.180864Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437186 2025-03-28T09:37:17.180907Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:37:17.180936Z node 1 :TX_DATASHARD DEBUG: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2025-03-28T09:37:17.180958Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2025-03-28T09:37:17.180985Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-03-28T09:37:17.181032Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2025-03-28T09:37:17.181058Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2025-03-28T09:37:17.181080Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2025-03-28T09:37:17.181574Z node 1 :TX_DATASHARD TRACE: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2025-03-28T09:37:17.181622Z node 1 :TX_DATASHARD TRACE: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2025-03-28T09:37:17.181679Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2025-03-28T09:37:17.181705Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2025-03-28T09:37:17.181729Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2025-03-28T09:37:17.181750Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2025-03-28T09:37:17.181916Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is DelayComplete 2025-03-28T09:37:17.181943Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2025-03-28T09:37:17.181974Z node 1 :TX_DATASHARD TRACE: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2025-03-28T09:37:17.182014Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2025-03-28T09:37:17.182044Z node 1 :TX_DATASHARD TRACE: Execution status for [1000005:152] at 9437186 is Executed 2025-03-28T09:37:17.182063Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2025-03-28T09:37:17.182084Z node 1 :TX_DATASHARD TRACE: Execution plan for [1000005:152] at 9437186 has finished 2025-03-28T09:37:17.182112Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:17.182131Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437186 2025-03-28T09:37:17.182153Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437186 has no attached operations 2025-03-28T09:37:17.182176Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437186 2025-03-28T09:37:17.182365Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2025-03-28T09:37:17.182402Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.182432Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2025-03-28T09:37:17.182507Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2025-03-28T09:37:17.182529Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.182548Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2025-03-28T09:37:17.182639Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2025-03-28T09:37:17.182662Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.182702Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2025-03-28T09:37:17.182770Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2025-03-28T09:37:17.182798Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.182818Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2025-03-28T09:37:17.182894Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2025-03-28T09:37:17.182935Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.182972Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2025-03-28T09:37:17.183044Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2025-03-28T09:37:17.183066Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.183084Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2025-03-28T09:37:17.197189Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437186 2025-03-28T09:37:17.197247Z node 1 :TX_DATASHARD TRACE: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2025-03-28T09:37:17.197300Z node 1 :TX_DATASHARD DEBUG: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:99:2134], exec latency: 1 ms, propose latency: 3 ms 2025-03-28T09:37:17.197382Z node 1 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:37:17.197438Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2025-03-28T09:37:17.197700Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [1:456:2398], Recipient [1:235:2228]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2025-03-28T09:37:17.197735Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:17.197765Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeReqDistribution [GOOD] Test command err: 2025-03-28T09:34:58.681426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:58.681613Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:58.681646Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b3a/r3tmp/tmpzrZDPx/pdisk_1.dat 2025-03-28T09:34:59.029830Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12466, node 1 2025-03-28T09:34:59.269065Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:59.269117Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:59.269145Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:59.269550Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:59.271751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:59.356010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.356140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.369190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22450 2025-03-28T09:34:59.895122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:02.465543Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:02.491661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:02.491744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:02.541273Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:02.543344Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:02.787965Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.788659Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.789247Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.789453Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.789708Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.789799Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.789889Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.789980Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.790116Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.948193Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:02.948304Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:02.961883Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:03.080605Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:03.114016Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:03.114081Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:03.137648Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:03.139194Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:03.139372Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:03.139428Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:03.139479Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:03.139521Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:03.139564Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:03.139606Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:03.140514Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:03.171415Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:03.171590Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:03.177139Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2606] 2025-03-28T09:35:03.184211Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1924:2623] 2025-03-28T09:35:03.184446Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1924:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:03.188189Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:03.203253Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:03.203307Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:03.203377Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:03.218221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:03.225800Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:03.225949Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:03.377967Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:03.592343Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:03.690164Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:04.568886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3072], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.568993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.584600Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:04.668762Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:04.668964Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:04.669210Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:04.669334Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:04.669431Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:04.669534Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:04.669622Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:04.669738Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:04.669871Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:04.669949Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:04.670040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:04.670121Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2322:2847];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:04.690182Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:04.690264Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:37:14.681923Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7124:5235], server id = [2:7125:5236], tablet id = 72075186224037894 2025-03-28T09:37:14.681980Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7250:5312] 2025-03-28T09:37:14.682015Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7250:5312] 2025-03-28T09:37:14.713026Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:37:14.713111Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:37:14.713603Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:37:14.714185Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:37:14.714428Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-03-28T09:37:14.714470Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-03-28T09:37:14.714503Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-03-28T09:37:14.714551Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-03-28T09:37:14.714580Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743154634665812 2025-03-28T09:37:14.714608Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-03-28T09:37:14.714674Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-03-28T09:37:14.714738Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:37:14.714820Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-03-28T09:37:14.714891Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-03-28T09:37:14.714947Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-03-28T09:37:14.714991Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:37:14.715097Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:14.715674Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:37:14.716207Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:14.716260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:14.716363Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:37:14.717262Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:14.717313Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:14.718534Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:14.780146Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:14.780365Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:14.780822Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7297:5343], server id = [2:7298:5344], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:14.780920Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7297:5343], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:14.784004Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:14.784086Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:14.784216Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:14.784348Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:14.784567Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:14.786729Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7297:5343], server id = [2:7298:5344], tablet id = 72075186224037899 2025-03-28T09:37:14.786772Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:14.787171Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:14.815701Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7318:5363]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:14.815925Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:14.815962Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7318:5363], StatRequests.size() = 1 2025-03-28T09:37:14.920735Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjY4NTY0OTUtMzFhYjBhMjgtM2UyODIyMWUtYjkwZTVkODQ=, TxId: 2025-03-28T09:37:14.920789Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjY4NTY0OTUtMzFhYjBhMjgtM2UyODIyMWUtYjkwZTVkODQ=, TxId: 2025-03-28T09:37:14.921202Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:14.933037Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7328:5369] 2025-03-28T09:37:14.933190Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7328:5369], schemeshard id = 72075186224037897 2025-03-28T09:37:14.933251Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7250:5312], server id = [2:7329:5370], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:14.933294Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7329:5370] 2025-03-28T09:37:14.933368Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7329:5370], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-28T09:37:14.945969Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:14.946028Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:15.031238Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7333:5373] 2025-03-28T09:37:15.031847Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2801:3222] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-28T09:37:15.031891Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2801:3222] 2025-03-28T09:37:15.031944Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-28T09:37:15.541716Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-28T09:37:15.541841Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:16.241342Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:16.241428Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:16.241476Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:37:17.469840Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:17.469971Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:17.470015Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:17.470544Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:17.483141Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:17.483466Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:17.483568Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:17.484244Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:17.496672Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:17.496830Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:37:17.497252Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7412:5416], server id = [2:7413:5417], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:17.497345Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7412:5416], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:17.498406Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:17.498482Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:17.498631Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:17.498786Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:17.498992Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:17.501165Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7412:5416], server id = [2:7413:5417], tablet id = 72075186224037899 2025-03-28T09:37:17.501203Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:17.501569Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:17.519453Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzZjOTViZjMtN2RhZTFlYmQtZmEyYmZmNWUtYjQ4NjU1ZTc=, TxId: 2025-03-28T09:37:17.519535Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzZjOTViZjMtN2RhZTFlYmQtZmEyYmZmNWUtYjQ4NjU1ZTc=, TxId: 2025-03-28T09:37:17.519950Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:17.543197Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:17.543258Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2801:3222] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] >> TSubscriberTest::NotifyUpdate >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] >> TColumnShardTestSchema::OneTierExternalTtl [GOOD] >> TSubscriberTest::NotifyUpdate [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] Test command err: 2025-03-28T09:37:12.706154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:12.706476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:12.706532Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015c3/r3tmp/tmpjSsyfO/pdisk_1.dat 2025-03-28T09:37:12.971853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:12.996402Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:13.033359Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:13.033475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:13.044569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:13.123219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:13.149665Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:13.150511Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:13.150922Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:37:13.151116Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:13.181685Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:13.182239Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:13.182319Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:37:13.183638Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:37:13.183703Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:37:13.183746Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:37:13.184049Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:37:13.184150Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:37:13.184223Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:37:13.194850Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:37:13.217975Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:37:13.218128Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:37:13.218254Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:37:13.218284Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:37:13.218322Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:37:13.218349Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:13.218515Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:13.218546Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:13.218781Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:37:13.218847Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:37:13.219172Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:37:13.219204Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:13.219248Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:37:13.219293Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:37:13.219329Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:37:13.219357Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:37:13.219388Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:37:13.219470Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:13.219508Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:13.219556Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:37:13.219605Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:37:13.219637Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:13.219702Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:37:13.219879Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:37:13.219930Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:37:13.219991Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:37:13.220026Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:37:13.220064Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:37:13.220091Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:37:13.220115Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:37:13.220342Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:37:13.220370Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:37:13.220395Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:37:13.220417Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:13.220454Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:37:13.220485Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:37:13.220512Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:37:13.220535Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:37:13.220554Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:37:13.221563Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:37:13.221598Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:37:13.232164Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:37:13.232213Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:37:13.232252Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:13.232283Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:37:13.232350Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:37:13.379324Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:13.379378Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:13.379411Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:37:13.379807Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:37:13.379845Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:37:13.379942Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:37:13.379985Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:37:13.380038Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:37:13.380074Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:37:13.384466Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:37:13.384583Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:13.385323Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:13.385360Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:13.385410Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:37:1 ... 86224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:37:19.593447Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037892 2025-03-28T09:37:19.593499Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037892, actorId: [2:1198:2936] 2025-03-28T09:37:19.593526Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-03-28T09:37:19.593563Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-03-28T09:37:19.593591Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:37:19.593726Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1044:2823], Recipient [2:756:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037892 OperationCookie: 281474976715665 2025-03-28T09:37:19.593768Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715665 2025-03-28T09:37:19.593956Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1044:2823], Recipient [2:1044:2823]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:19.593978Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:19.594176Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1193:2931], Recipient [2:756:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037892 ClientId: [2:1193:2931] ServerId: [2:1195:2933] } 2025-03-28T09:37:19.594206Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:37:19.594445Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1044:2823]: {TEvRegisterTabletResult TabletId# 72075186224037892 Entry# 2000} 2025-03-28T09:37:19.594483Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-28T09:37:19.594521Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-03-28T09:37:19.594553Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:37:19.594601Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-28T09:37:19.594630Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:19.594660Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2025-03-28T09:37:19.594706Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-03-28T09:37:19.594734Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-03-28T09:37:19.594759Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-03-28T09:37:19.594793Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-28T09:37:19.594919Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1195:2933], Recipient [2:1044:2823]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:37:19.594945Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:37:19.594977Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1193:2931], serverId# [2:1195:2933], sessionId# [0:0:0] 2025-03-28T09:37:19.595150Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1044:2823]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-03-28T09:37:19.595191Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-28T09:37:19.595232Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 2000 2025-03-28T09:37:19.595270Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-28T09:37:19.595314Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-03-28T09:37:19.606009Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-03-28T09:37:19.606108Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-03-28T09:37:19.606212Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:37:19.606290Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-03-28T09:37:19.606353Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1202:2940] 2025-03-28T09:37:19.606385Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-03-28T09:37:19.606450Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-03-28T09:37:19.606480Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-28T09:37:19.606673Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1048:2825], Recipient [2:756:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-03-28T09:37:19.606757Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-03-28T09:37:19.607062Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1048:2825], Recipient [2:1048:2825]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:19.607107Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:19.607298Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1194:2932], Recipient [2:756:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1194:2932] ServerId: [2:1196:2934] } 2025-03-28T09:37:19.607334Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:37:19.607661Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1048:2825]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-03-28T09:37:19.607701Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-28T09:37:19.607728Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-03-28T09:37:19.607758Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-28T09:37:19.607990Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-28T09:37:19.608031Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:19.608057Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-03-28T09:37:19.608085Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-03-28T09:37:19.608109Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-03-28T09:37:19.608136Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-03-28T09:37:19.608171Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-28T09:37:19.608322Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1196:2934], Recipient [2:1048:2825]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:37:19.608354Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:37:19.608387Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1194:2932], serverId# [2:1196:2934], sessionId# [0:0:0] 2025-03-28T09:37:19.608567Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1048:2825]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-03-28T09:37:19.608591Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-28T09:37:19.608612Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-03-28T09:37:19.608640Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-28T09:37:19.608673Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-03-28T09:37:19.619441Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-03-28T09:37:19.622282Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:409:2404], Recipient [2:761:2636] 2025-03-28T09:37:19.622368Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-03-28T09:37:19.624769Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-03-28T09:37:19.624850Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:37:19.625518Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:748:2629], Recipient [2:756:2634]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-28T09:37:20.150851Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 278003712, Sender [2:995:2681], Recipient [2:756:2634]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 3 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC } TxId: 281474976715663 TxMode: MODE_VOLATILE_PREPARE Locks { SendingShards: 72075186224037888 SendingShards: 72075186224037889 ReceivingShards: 72075186224037888 ReceivingShards: 72075186224037889 Op: Commit } 2025-03-28T09:37:20.150926Z node 2 :TX_DATASHARD TRACE: Handle TTxWrite: at tablet# 72075186224037889 2025-03-28T09:37:20.151045Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 281474976715663 because datashard 72075186224037889: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=281474976715663; 2025-03-28T09:37:20.151112Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037889: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-03-28T09:37:20.151590Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-03-28T09:37:20.152091Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::NotifyUpdate [GOOD] Test command err: 2025-03-28T09:37:20.595151Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:37:20.597118Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-28T09:37:20.597201Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-28T09:37:20.597242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-28T09:37:20.597286Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-28T09:37:20.597346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-28T09:37:20.597378Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:37:20.597414Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-03-28T09:37:20.597435Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:37:20.597777Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2025-03-28T09:37:20.597841Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:35:2065] 2025-03-28T09:37:20.597916Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Update to strong state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: 2025-03-28T09:34:55.948369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:55.948600Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:55.948637Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ca4/r3tmp/tmpShOgzX/pdisk_1.dat 2025-03-28T09:34:56.241339Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25575, node 1 2025-03-28T09:34:56.457114Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:56.457170Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:56.457201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:56.457613Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:56.459774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:56.544241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:56.544388Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:56.557724Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18512 2025-03-28T09:34:57.046543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:59.822383Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:59.846640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.846732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.884974Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:59.886793Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.105062Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.105624Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.105998Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.106089Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.106327Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.106410Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.106493Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.106547Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.106592Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.268221Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.268318Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.281290Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.406338Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:00.449891Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:00.449971Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:00.474854Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:00.474986Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:00.475172Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:00.475226Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:00.475271Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:00.475308Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:00.475348Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:00.475392Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:00.475758Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:00.504598Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.504683Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.511241Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:00.516863Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:00.517064Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:00.522438Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:35:00.539913Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:00.539978Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:00.540052Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:35:00.554509Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:00.565048Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:00.565190Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:00.727982Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:00.909130Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:00.987640Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:01.664966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:35:02.311819Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:02.460894Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:35:02.460971Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:35:02.461068Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2594:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:35:02.462943Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2596:2950] 2025-03-28T09:35:02.463742Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2596:2950], schemeshard id = 72075186224037899 2025-03-28T09:35:03.621426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2723:3244], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:03.621570Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:03.638898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-28T09:35:03.884569Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2877:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:03.884814Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2877:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:03.885133Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2877:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:03.885321Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2877:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:03.885470Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2877:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:03.885605Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2877:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:03.885739Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2877:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:03.885882Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2877:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:03.886054Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2877:3088];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09 ... 025-03-28T09:37:17.223544Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:9516:7177], schemeshard count = 1 2025-03-28T09:37:18.593047Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-28T09:37:18.593117Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 230.000000s, at schemeshard: 72075186224037899 2025-03-28T09:37:18.593428Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-03-28T09:37:18.606802Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:37:19.405723Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:19.405785Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:37:19.405825Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-03-28T09:37:19.405862Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:37:19.417386Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:19.433833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:19.434501Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:19.434576Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:19.435469Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:19.448781Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:19.448998Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:19.449761Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9647:7260], server id = [2:9652:7265], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:19.450129Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9647:7260], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:19.451251Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9648:7261], server id = [2:9653:7266], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:19.451318Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9648:7261], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:19.452135Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9649:7262], server id = [2:9654:7267], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:19.452193Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9649:7262], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:19.453070Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9650:7263], server id = [2:9655:7268], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:19.453117Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9650:7263], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:19.453223Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9651:7264], server id = [2:9657:7270], tablet id = 72075186224037909, status = OK 2025-03-28T09:37:19.453266Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9651:7264], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:19.457208Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:19.457745Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9647:7260], server id = [2:9652:7265], tablet id = 72075186224037905 2025-03-28T09:37:19.457785Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.458849Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9670:7280], server id = [2:9671:7281], tablet id = 72075186224037910, status = OK 2025-03-28T09:37:19.458906Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9670:7280], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:19.459910Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:19.460499Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:19.461219Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9648:7261], server id = [2:9653:7266], tablet id = 72075186224037906 2025-03-28T09:37:19.461254Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.461423Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9649:7262], server id = [2:9654:7267], tablet id = 72075186224037907 2025-03-28T09:37:19.461441Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.462606Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9675:7283], server id = [2:9678:7286], tablet id = 72075186224037911, status = OK 2025-03-28T09:37:19.462660Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9675:7283], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:19.463222Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:19.463438Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9677:7285], server id = [2:9679:7287], tablet id = 72075186224037912, status = OK 2025-03-28T09:37:19.463480Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9677:7285], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:19.464297Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9650:7263], server id = [2:9655:7268], tablet id = 72075186224037908 2025-03-28T09:37:19.464323Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.464710Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037909 2025-03-28T09:37:19.465499Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9651:7264], server id = [2:9657:7270], tablet id = 72075186224037909 2025-03-28T09:37:19.465521Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.465648Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037910 2025-03-28T09:37:19.466150Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9670:7280], server id = [2:9671:7281], tablet id = 72075186224037910 2025-03-28T09:37:19.466172Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.466278Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9682:7289], server id = [2:9686:7293], tablet id = 72075186224037913, status = OK 2025-03-28T09:37:19.466330Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9682:7289], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:19.467046Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9684:7291], server id = [2:9688:7295], tablet id = 72075186224037914, status = OK 2025-03-28T09:37:19.467107Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9684:7291], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:19.469181Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037911 2025-03-28T09:37:19.469351Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9675:7283], server id = [2:9678:7286], tablet id = 72075186224037911 2025-03-28T09:37:19.469366Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.470554Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037912 2025-03-28T09:37:19.470938Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9677:7285], server id = [2:9679:7287], tablet id = 72075186224037912 2025-03-28T09:37:19.470962Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.471336Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037913 2025-03-28T09:37:19.471714Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9682:7289], server id = [2:9686:7293], tablet id = 72075186224037913 2025-03-28T09:37:19.471735Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.472101Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037914 2025-03-28T09:37:19.472138Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:19.472280Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:19.472433Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:19.472642Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:37:19.474291Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9684:7291], server id = [2:9688:7295], tablet id = 72075186224037914 2025-03-28T09:37:19.474312Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.474752Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:19.505153Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:9713:7316]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:19.505312Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:19.505343Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:9713:7316], StatRequests.size() = 1 2025-03-28T09:37:19.645875Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MThiMDIxOGMtYWRiOTljMDQtYzIwNWE3Y2YtODA5NWQ5Y2I=, TxId: 2025-03-28T09:37:19.645944Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MThiMDIxOGMtYWRiOTljMDQtYzIwNWE3Y2YtODA5NWQ5Y2I=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-28T09:37:19.646594Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:9721:7322]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:19.647097Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:19.647517Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:19.647575Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:19.651293Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:19.651353Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2025-03-28T09:37:19.651419Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 2] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:19.659268Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneTierExternalTtl [GOOD] Test command err: 2025-03-28T09:37:03.690958Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:37:03.754135Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:03.757598Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:03.757903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:37:03.773781Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:37:03.774095Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:37:03.780362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:37:03.780513Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:37:03.780674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:37:03.780780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:37:03.780881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:37:03.781008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:37:03.781089Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:37:03.781180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:37:03.781267Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:37:03.781339Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:37:03.781408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:37:03.781478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:37:03.797828Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:03.802006Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:37:03.802333Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:37:03.802377Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:37:03.802512Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:03.802663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:37:03.802755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:37:03.802801Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:37:03.802882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:37:03.802931Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:37:03.802964Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:37:03.802994Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:37:03.803133Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:03.803184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:37:03.803229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:37:03.803251Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:37:03.803351Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:37:03.803411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:37:03.803448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:37:03.803469Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:37:03.803514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:37:03.803553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:37:03.803581Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:37:03.803627Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:37:03.803686Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:37:03.803707Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:37:03.804012Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-03-28T09:37:03.804075Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-03-28T09:37:03.804153Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-03-28T09:37:03.804227Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-03-28T09:37:03.804364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:37:03.804411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:37:03.804435Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:37:03.804639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:37:03.804683Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:37:03.804716Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:37:03.804840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:37:03.804872Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:37:03.804894Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:37:03.805029Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:37:03.805060Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:37:03.805085Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T0 ... ;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:20.335371Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:20.335393Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:37:20.335414Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:37:20.335472Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:37:20.335556Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:73;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:20.335582Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:37:20.335639Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=73; 2025-03-28T09:37:20.335668Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=584;num_rows=73;batch_columns=timestamp; 2025-03-28T09:37:20.335744Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:673:2689];bytes=584;rows=73;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2025-03-28T09:37:20.335797Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:20.335851Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:20.335951Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:20.336024Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:37:20.336074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:20.336130Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:20.336154Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:680:2696] finished for tablet 9437184 2025-03-28T09:37:20.336542Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:673:2689];stats={"p":[{"events":["f_bootstrap"],"t":0.034},{"events":["f_ProduceResults"],"t":0.253},{"events":["l_bootstrap"],"t":0.389},{"events":["f_processing","f_task_result"],"t":0.396},{"events":["l_task_result"],"t":4.486},{"events":["f_ack"],"t":4.512},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":5.014}],"full":{"a":1743154635321661,"name":"_full_task","f":1743154635321661,"d_finished":0,"c":0,"l":1743154640336213,"d":5014552},"events":[{"name":"bootstrap","f":1743154635356198,"d_finished":354942,"c":1,"l":1743154635711140,"d":354942},{"a":1743154640336016,"name":"ack","f":1743154639834523,"d_finished":464741,"c":904,"l":1743154640335973,"d":464938},{"a":1743154640336009,"name":"processing","f":1743154635717728,"d_finished":2280841,"c":4520,"l":1743154640335975,"d":2281045},{"name":"ProduceResults","f":1743154635574724,"d_finished":929696,"c":5426,"l":1743154640336143,"d":929696},{"a":1743154640336144,"name":"Finish","f":1743154640336144,"d_finished":0,"c":0,"l":1743154640336213,"d":69},{"name":"task_result","f":1743154635717749,"d_finished":1755141,"c":3616,"l":1743154639808415,"d":1755141}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:20.336601Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:673:2689];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:37:20.337024Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:673:2689];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.034},{"events":["f_ProduceResults"],"t":0.253},{"events":["l_bootstrap"],"t":0.389},{"events":["f_processing","f_task_result"],"t":0.396},{"events":["l_task_result"],"t":4.486},{"events":["f_ack"],"t":4.512},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":5.014}],"full":{"a":1743154635321661,"name":"_full_task","f":1743154635321661,"d_finished":0,"c":0,"l":1743154640336634,"d":5014973},"events":[{"name":"bootstrap","f":1743154635356198,"d_finished":354942,"c":1,"l":1743154635711140,"d":354942},{"a":1743154640336016,"name":"ack","f":1743154639834523,"d_finished":464741,"c":904,"l":1743154640335973,"d":465359},{"a":1743154640336009,"name":"processing","f":1743154635717728,"d_finished":2280841,"c":4520,"l":1743154640335975,"d":2281466},{"name":"ProduceResults","f":1743154635574724,"d_finished":929696,"c":5426,"l":1743154640336143,"d":929696},{"a":1743154640336144,"name":"Finish","f":1743154640336144,"d_finished":0,"c":0,"l":1743154640336634,"d":490},{"name":"task_result","f":1743154635717749,"d_finished":1755141,"c":3616,"l":1743154639808415,"d":1755141}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:20.337113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:37:15.287244Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=904;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7049848;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7049848;selected_rows=0; 2025-03-28T09:37:20.337156Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:37:20.337333Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:680:2696];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaInAggregate [GOOD] Test command err: 2025-03-28T09:34:54.174568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:54.174836Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:54.174892Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e54/r3tmp/tmpyXe8qC/pdisk_1.dat 2025-03-28T09:34:54.587638Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16193, node 1 2025-03-28T09:34:55.102943Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:55.102988Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:55.103013Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:55.103337Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:55.111185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:55.205093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:55.205203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:55.218653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:29314 2025-03-28T09:34:55.721928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:58.576616Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:58.611039Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:58.611151Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:58.652175Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:58.654282Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:58.899654Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.900216Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.900717Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.900859Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.901123Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.901206Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.901277Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.901355Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.901431Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.070028Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.070156Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.083521Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.234201Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:59.284069Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:34:59.284150Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:34:59.315153Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:34:59.315309Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:34:59.315512Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:34:59.315587Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:34:59.315646Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:34:59.315699Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:34:59.315751Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:34:59.315797Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:34:59.316255Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:34:59.347033Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.347158Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.354725Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:34:59.361032Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:34:59.361249Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:34:59.367689Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:34:59.386581Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:34:59.386638Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:34:59.386774Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:34:59.400781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:34:59.408095Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:34:59.408245Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:34:59.617338Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:34:59.776794Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:34:59.897396Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:00.741372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.741531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.835742Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:01.210165Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.210461Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:01.210794Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:01.210939Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:01.211066Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:01.211198Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:01.211327Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:01.211489Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:01.211652Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:01.211772Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:01.211911Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:01.212042Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:01.272216Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.272307Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... 037894] EvServerConnected, pipe server id = [2:8841:6654] 2025-03-28T09:37:17.382160Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8841:6654], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-28T09:37:17.394746Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:17.394826Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:17.482059Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8845:6657] 2025-03-28T09:37:17.482665Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4095:3315] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-28T09:37:17.482738Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:4095:3315] 2025-03-28T09:37:17.482786Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-28T09:37:18.085831Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-28T09:37:18.085934Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:18.096743Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-03-28T09:37:18.096797Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:18.607908Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:18.607970Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:18.608007Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:37:19.761718Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:19.761864Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:19.761926Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:19.762657Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:19.776317Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:19.776767Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:19.776854Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:19.777354Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:19.790835Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:19.790988Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-03-28T09:37:19.791899Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8924:6698], server id = [2:8929:6703], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:19.792021Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8924:6698], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.792366Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8925:6699], server id = [2:8930:6704], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:19.792439Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8925:6699], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.793706Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8926:6700], server id = [2:8931:6705], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:19.793765Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8926:6700], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.794738Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8927:6701], server id = [2:8932:6706], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:19.794796Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8927:6701], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.795238Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:19.796384Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8928:6702], server id = [2:8934:6708], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:19.796442Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8928:6702], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.796829Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8924:6698], server id = [2:8929:6703], tablet id = 72075186224037899 2025-03-28T09:37:19.796869Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.797668Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:19.798171Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:19.798455Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8925:6699], server id = [2:8930:6704], tablet id = 72075186224037900 2025-03-28T09:37:19.798500Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.798945Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8936:6710], server id = [2:8939:6713], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:19.799016Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8936:6710], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.799304Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:19.799532Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8927:6701], server id = [2:8932:6706], tablet id = 72075186224037902 2025-03-28T09:37:19.799559Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.800215Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:19.800568Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8926:6700], server id = [2:8931:6705], tablet id = 72075186224037901 2025-03-28T09:37:19.800596Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.800785Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8941:6715], server id = [2:8943:6717], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:19.800840Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8941:6715], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.801289Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8928:6702], server id = [2:8934:6708], tablet id = 72075186224037903 2025-03-28T09:37:19.801318Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.802071Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8942:6716], server id = [2:8945:6719], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:19.802139Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8942:6716], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.802412Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:19.803044Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8944:6718], server id = [2:8948:6722], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:19.803105Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8944:6718], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.803359Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8947:6721], server id = [2:8949:6723], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:19.803405Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8947:6721], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.804209Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:19.804906Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8936:6710], server id = [2:8939:6713], tablet id = 72075186224037904 2025-03-28T09:37:19.804935Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.805255Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:19.805503Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8941:6715], server id = [2:8943:6717], tablet id = 72075186224037905 2025-03-28T09:37:19.805528Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.805764Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:19.805955Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8942:6716], server id = [2:8945:6719], tablet id = 72075186224037906 2025-03-28T09:37:19.805979Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.806097Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:19.806137Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:19.806401Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:19.806642Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:19.806925Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:19.809514Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8944:6718], server id = [2:8948:6722], tablet id = 72075186224037907 2025-03-28T09:37:19.809547Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.809966Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8947:6721], server id = [2:8949:6723], tablet id = 72075186224037908 2025-03-28T09:37:19.809994Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.810307Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:19.862561Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGQ5NGYyZWYtMTgzMzlmNTAtNzkzZmI1NC00ZmE2NTQ3Yg==, TxId: 2025-03-28T09:37:19.862637Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGQ5NGYyZWYtMTgzMzlmNTAtNzkzZmI1NC00ZmE2NTQ3Yg==, TxId: 2025-03-28T09:37:19.863260Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:19.877044Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:19.877127Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:4095:3315] >> AnalyzeColumnshard::AnalyzeStatus [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: 2025-03-28T09:34:54.175081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:54.175392Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:54.175443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e7a/r3tmp/tmpGXSBJD/pdisk_1.dat 2025-03-28T09:34:54.587440Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17559, node 1 2025-03-28T09:34:55.103605Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:55.103653Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:55.103683Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:55.104124Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:55.111372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:55.200339Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:55.200467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:55.215440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22893 2025-03-28T09:34:55.719783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:58.608318Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:58.641414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:58.641530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:58.682733Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:58.684919Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:58.924341Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.924866Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.925393Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.925526Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.925774Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.925858Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.925930Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.926005Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.926077Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.091783Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.091893Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.108485Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.249551Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:59.298454Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:34:59.298552Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:34:59.329988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:34:59.330140Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:34:59.330338Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:34:59.330390Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:34:59.330455Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:34:59.330518Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:34:59.330563Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:34:59.330611Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:34:59.331060Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:34:59.362045Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.362141Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.368162Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:34:59.373226Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:34:59.373463Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:34:59.378617Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:34:59.392393Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:34:59.392453Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:34:59.392528Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:34:59.403009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:34:59.412949Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:34:59.413088Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:34:59.587960Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:34:59.751838Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:34:59.829807Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:00.733618Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.733734Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.835723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:01.200581Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.200857Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:01.201171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:01.201316Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:01.201468Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:01.201609Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:01.201747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:01.201889Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:01.202030Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:01.202160Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:01.202304Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:01.202441Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:01.260803Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.260906Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... [72075186224037894] Subscribed for config changes 2025-03-28T09:37:19.634355Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:19.634403Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:19.634493Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:37:19.635535Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:19.635588Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:19.636091Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:19.674845Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:19.675031Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:19.675983Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8719:6543], server id = [2:8724:6548], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:19.676319Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8719:6543], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.676659Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8720:6544], server id = [2:8725:6549], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:19.676713Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8720:6544], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.678627Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8721:6545], server id = [2:8727:6551], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:19.678702Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8721:6545], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.679011Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8722:6546], server id = [2:8726:6550], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:19.679065Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8722:6546], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.679305Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8723:6547], server id = [2:8730:6554], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:19.679345Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8723:6547], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.682862Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:19.683172Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8719:6543], server id = [2:8724:6548], tablet id = 72075186224037899 2025-03-28T09:37:19.683204Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.684544Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:19.684952Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8742:6563], server id = [2:8744:6564], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:19.685005Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8742:6563], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.685436Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8720:6544], server id = [2:8725:6549], tablet id = 72075186224037900 2025-03-28T09:37:19.685460Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.685708Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:19.686297Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8722:6546], server id = [2:8726:6550], tablet id = 72075186224037902 2025-03-28T09:37:19.686322Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.686803Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:19.687280Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8745:6565], server id = [2:8747:6566], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:19.687325Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8745:6565], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.687531Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8721:6545], server id = [2:8727:6551], tablet id = 72075186224037901 2025-03-28T09:37:19.687557Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.688132Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:19.688485Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8748:6567], server id = [2:8752:6571], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:19.688525Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8748:6567], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.688774Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8723:6547], server id = [2:8730:6554], tablet id = 72075186224037903 2025-03-28T09:37:19.688793Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.688851Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8750:6569], server id = [2:8756:6575], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:19.688882Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8750:6569], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.690131Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8754:6573], server id = [2:8758:6576], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:19.690171Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8754:6573], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.692335Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:19.692549Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8742:6563], server id = [2:8744:6564], tablet id = 72075186224037904 2025-03-28T09:37:19.692568Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.692826Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:19.693353Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8745:6565], server id = [2:8747:6566], tablet id = 72075186224037905 2025-03-28T09:37:19.693374Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.693890Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:19.694348Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8748:6567], server id = [2:8752:6571], tablet id = 72075186224037906 2025-03-28T09:37:19.694368Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.694635Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:19.694916Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8750:6569], server id = [2:8756:6575], tablet id = 72075186224037907 2025-03-28T09:37:19.694932Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.695242Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:19.695286Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:19.695491Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:19.695651Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:19.695868Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:19.698044Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8754:6573], server id = [2:8758:6576], tablet id = 72075186224037908 2025-03-28T09:37:19.698078Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.698714Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:19.726146Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8785:6599]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:19.726348Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:19.726384Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8785:6599], StatRequests.size() = 1 2025-03-28T09:37:19.837703Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTJlMmM5NmMtNzIzZmMxYzktNmFkOWQ0OGMtODhmMTQ0Zjc=, TxId: 2025-03-28T09:37:19.837763Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTJlMmM5NmMtNzIzZmMxYzktNmFkOWQ0OGMtODhmMTQ0Zjc=, TxId: 2025-03-28T09:37:19.838385Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:19.862439Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8795:6605] 2025-03-28T09:37:19.862588Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8671:6511], server id = [2:8795:6605], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:19.862823Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8796:6606] 2025-03-28T09:37:19.862966Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8795:6605], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-28T09:37:19.863106Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8796:6606], schemeshard id = 72075186224037897 2025-03-28T09:37:19.876612Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:19.876679Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:19.962360Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8799:6609]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:19.962647Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:19.962702Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:19.964953Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:19.964996Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:19.965046Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:19.969488Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: 2025-03-28T09:34:57.170996Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:57.171214Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:57.171263Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b59/r3tmp/tmpnJpZOj/pdisk_1.dat 2025-03-28T09:34:57.463516Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22709, node 1 2025-03-28T09:34:57.691564Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:57.691619Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:57.691647Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:57.692074Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:57.694053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:57.781281Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:57.781390Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:57.794609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5590 2025-03-28T09:34:58.329376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:00.932048Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:00.955749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.955899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.993232Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:00.994529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:01.210394Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:01.210946Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:01.211433Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:01.211568Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:01.211839Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:01.211926Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:01.211995Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:01.212069Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:01.212136Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:01.372937Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:01.373007Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:01.385976Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:01.495418Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:01.527671Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:01.527751Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:01.547428Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:01.547565Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:01.547698Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:01.547745Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:01.547778Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:01.547846Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:01.547882Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:01.547919Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:01.548189Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:01.574133Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:01.574247Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:01.580292Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:01.585072Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:01.585267Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:01.590553Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:01.605681Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:01.605733Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:01.605805Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:01.619417Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:01.631205Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:01.631356Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:01.817504Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:01.964438Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:02.042899Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:02.892306Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.892419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.913687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:03.204335Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:03.204563Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:03.204815Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:03.204942Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:03.205059Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:03.205181Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:03.205302Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:03.205435Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:03.205567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:03.205693Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:03.205811Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:03.205923Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:03.259034Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:03.259123Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=T ... [72075186224037894] Subscribed for config changes 2025-03-28T09:37:20.214523Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:20.214590Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:20.214746Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:37:20.216153Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:20.216228Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:20.216941Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:20.247781Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:20.247929Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:37:20.248970Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8706:6535], server id = [2:8711:6540], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:20.249330Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8706:6535], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.249930Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8708:6537], server id = [2:8713:6542], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:20.250015Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8708:6537], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.251347Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8707:6536], server id = [2:8712:6541], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:20.251408Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8707:6536], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.252491Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8709:6538], server id = [2:8714:6543], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:20.252546Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8709:6538], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.253339Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8710:6539], server id = [2:8715:6544], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:20.253419Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8710:6539], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.258546Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:20.259324Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8706:6535], server id = [2:8711:6540], tablet id = 72075186224037899 2025-03-28T09:37:20.259381Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.260856Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:20.261615Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8708:6537], server id = [2:8713:6542], tablet id = 72075186224037901 2025-03-28T09:37:20.261651Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.262121Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:20.262725Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8729:6555], server id = [2:8732:6556], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:20.262807Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8729:6555], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.263366Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8707:6536], server id = [2:8712:6541], tablet id = 72075186224037900 2025-03-28T09:37:20.263400Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.263752Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:20.264672Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:20.265144Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8733:6557], server id = [2:8735:6559], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:20.265215Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8733:6557], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.265681Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8709:6538], server id = [2:8714:6543], tablet id = 72075186224037902 2025-03-28T09:37:20.265715Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.265801Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8710:6539], server id = [2:8715:6544], tablet id = 72075186224037903 2025-03-28T09:37:20.265827Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.266447Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8734:6558], server id = [2:8737:6561], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:20.266511Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8734:6558], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.267356Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8736:6560], server id = [2:8741:6565], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:20.267414Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8736:6560], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.268035Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8739:6563], server id = [2:8742:6566], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:20.268090Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8739:6563], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.272794Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:20.273212Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8729:6555], server id = [2:8732:6556], tablet id = 72075186224037904 2025-03-28T09:37:20.273245Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.274002Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:20.274544Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8733:6557], server id = [2:8735:6559], tablet id = 72075186224037905 2025-03-28T09:37:20.274576Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.274989Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:20.275427Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8734:6558], server id = [2:8737:6561], tablet id = 72075186224037906 2025-03-28T09:37:20.275469Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.276927Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:20.277321Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8736:6560], server id = [2:8741:6565], tablet id = 72075186224037907 2025-03-28T09:37:20.277352Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.277494Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:20.277543Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:20.277885Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:20.278091Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:20.278377Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:20.280896Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8739:6563], server id = [2:8742:6566], tablet id = 72075186224037908 2025-03-28T09:37:20.280930Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.281715Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:20.318809Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8772:6591]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:20.319103Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:20.319172Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8772:6591], StatRequests.size() = 1 2025-03-28T09:37:20.456594Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjZmZTA4YTctYzgzZjdlNjEtOWExZGEzYmEtZjYzZTQwMzU=, TxId: 2025-03-28T09:37:20.456651Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjZmZTA4YTctYzgzZjdlNjEtOWExZGEzYmEtZjYzZTQwMzU=, TxId: 2025-03-28T09:37:20.457079Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:20.469877Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8782:6597] 2025-03-28T09:37:20.470002Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8658:6503], server id = [2:8782:6597], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:20.470174Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8782:6597], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-28T09:37:20.470417Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8783:6598] 2025-03-28T09:37:20.470528Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8783:6598], schemeshard id = 72075186224037897 2025-03-28T09:37:20.504266Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:20.504340Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:20.579514Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8786:6601]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:20.579806Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:20.579847Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:20.582668Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:20.582737Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:20.582779Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:20.587028Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpInplaceUpdate::SingleRowArithm+UseSink >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] >> TColumnShardTestSchema::InternalTTL >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: 2025-03-28T09:35:00.928832Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:00.929068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:00.929106Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b31/r3tmp/tmpmkZZDR/pdisk_1.dat 2025-03-28T09:35:01.223568Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11243, node 1 2025-03-28T09:35:01.427406Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:01.427441Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:01.427461Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:01.427793Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:01.429364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:01.511210Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:01.511332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:01.524731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:3270 2025-03-28T09:35:02.040320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:04.550021Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:04.579749Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:04.579838Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:04.617138Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:04.619134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:04.835704Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.836152Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.836512Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.836615Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.836765Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.836842Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.836907Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.836967Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.837023Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.994661Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:04.994767Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:05.007144Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:05.111862Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:05.163621Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:05.163729Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:05.184816Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:05.185815Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:05.185985Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:05.186029Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:05.186068Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:05.186118Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:05.186169Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:05.186207Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:05.186581Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:05.222583Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:05.222710Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:05.230571Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1884:2609] 2025-03-28T09:35:05.234142Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2619] 2025-03-28T09:35:05.234626Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1904:2619], schemeshard id = 72075186224037897 2025-03-28T09:35:05.242991Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:05.260912Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:05.260979Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:05.261054Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:05.274743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:05.280145Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:05.280244Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:05.456998Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:05.608835Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:05.675088Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:06.507093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:06.507202Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:06.520140Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:06.594342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:06.594520Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:06.594760Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:06.594839Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:06.594910Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:06.595011Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:06.595099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:06.595172Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:06.595255Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:06.595323Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:06.595390Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:06.595496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2326:2850];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:06.616050Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:06.616176Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... ase 2025-03-28T09:37:17.981008Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:37:17.983808Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:6992:5170], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:17.983890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7003:5175], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:17.983967Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:17.993539Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:37:18.035199Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7006:5178], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:37:18.260276Z node 2 :TX_PROXY ERROR: Actor# [2:7102:5224] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:18.309448Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7131:5239]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:18.309606Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:37:18.309676Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7133:5241] 2025-03-28T09:37:18.309730Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7133:5241] 2025-03-28T09:37:18.309939Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7134:5242] 2025-03-28T09:37:18.310005Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7134:5242], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:37:18.310066Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:37:18.310182Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7133:5241], server id = [2:7134:5242], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:18.310267Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:18.310323Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7131:5239], StatRequests.size() = 1 2025-03-28T09:37:18.407277Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MTlkMTg3NzgtYjY4NWU0ZTgtNWI3MTc2MWQtYjEzOTNlNWU=, TxId: 2025-03-28T09:37:18.407333Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTlkMTg3NzgtYjY4NWU0ZTgtNWI3MTc2MWQtYjEzOTNlNWU=, TxId: 2025-03-28T09:37:18.407649Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:18.420610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:18.420660Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:18.463327Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:37:18.463399Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:37:18.548367Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7133:5241], schemeshard count = 1 2025-03-28T09:37:19.541456Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:19.541540Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:19.544030Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:19.559195Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:19.559648Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:19.559703Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-28T09:37:19.572271Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:19.583095Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-03-28T09:37:19.585124Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7212:5291] 2025-03-28T09:37:19.585547Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_ENQUEUED 2025-03-28T09:37:19.586422Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7214:5292]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 3
FastCheckInFlight: 0
FastSchemeShards: 0
FastNodes: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 2
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 4], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 4], [OwnerId: 72075186224037897, LocalPathId: 3]
ForceTraversals: 1
    1970-01-01T00:00:05Z
NavigateType: Analyze
NavigateAnalyzeOperationId: 
NavigatePathId: 
ForceTraversalOperationId: 
TraversalStartTime: 1970-01-01T00:00:00Z
TraversalPathId: 
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-03-28T09:37:19.587305Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-28T09:37:19.587382Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-28T09:37:19.600936Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:37:20.830417Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:20.830551Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:20.830611Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:20.831313Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:20.844584Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:20.845010Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:20.845078Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:20.845928Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:20.859151Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:20.859338Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:20.859903Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7264:5321], server id = [2:7265:5322], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:20.860023Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7264:5321], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.863681Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:20.863814Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:20.864117Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:20.864297Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:20.864641Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:20.867144Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7264:5321], server id = [2:7265:5322], tablet id = 72075186224037899 2025-03-28T09:37:20.867191Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.867764Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:20.902801Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7285:5341]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:20.903070Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:20.903123Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7285:5341], StatRequests.size() = 1 2025-03-28T09:37:21.027153Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YWRkMmE5ZjktOThiZjNlNjYtNzVjNmNhYTAtYWY2NjRkMw==, TxId: 2025-03-28T09:37:21.027244Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YWRkMmE5ZjktOThiZjNlNjYtNzVjNmNhYTAtYWY2NjRkMw==, TxId: 2025-03-28T09:37:21.027784Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:21.041435Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:21.041511Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:948:2752] 2025-03-28T09:37:21.042680Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7297:5349] 2025-03-28T09:37:21.043079Z node 2 :STATISTICS DEBUG: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION >> KqpImmediateEffects::UpdateAfterInsert >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] >> KqpImmediateEffects::MultiShardUpsertAfterRead >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> KqpImmediateEffects::WriteThenReadWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: 2025-03-28T09:37:17.609382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:17.609594Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:17.609693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:37:17.610100Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:17.610309Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:17.610437Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ab/r3tmp/tmpM7Kssd/pdisk_1.dat 2025-03-28T09:37:17.878034Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:18.017646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:18.119157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:18.119246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:18.121427Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:18.121530Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:18.135170Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:37:18.135624Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:18.136038Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:18.407185Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:18.472389Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:18.476202Z node 2 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:18.476590Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1282:2390] 2025-03-28T09:37:18.476780Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:18.512798Z node 2 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [2:1256:2378], Recipient [2:1282:2390]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:18.517080Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:18.517215Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:37:18.518456Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:37:18.518518Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:37:18.518557Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:37:18.518826Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:37:18.519088Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:37:18.519150Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:1306:2390] in generation 1 2025-03-28T09:37:18.521588Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:37:18.545257Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:37:18.545398Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:37:18.545479Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:1310:2407] 2025-03-28T09:37:18.545502Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:37:18.545526Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:37:18.545549Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:18.545728Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1282:2390], Recipient [2:1282:2390]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:18.545761Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:18.545953Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:37:18.546028Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:37:18.546069Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:37:18.546099Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:18.546127Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:37:18.546159Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:37:18.546185Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:37:18.546209Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:37:18.546235Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:37:18.600086Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1314:2408], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:18.600133Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:18.600172Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1266:2774], serverId# [2:1314:2408], sessionId# [0:0:0] 2025-03-28T09:37:18.600393Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:839:2466], Recipient [2:1314:2408] 2025-03-28T09:37:18.600430Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:18.600523Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:37:18.600708Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:37:18.600745Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:37:18.600818Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:37:18.600858Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:37:18.600886Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:37:18.600913Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:37:18.600938Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:37:18.601189Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:37:18.601237Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:37:18.601264Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:37:18.601285Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:18.601332Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:37:18.601359Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:37:18.601392Z node 2 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:37:18.601425Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:37:18.601456Z node 2 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:37:18.604421Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:37:18.604468Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:37:18.604492Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:18.604516Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:37:18.604571Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:37:18.604880Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [2:1315:2409], Recipient [2:1282:2390]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:37:18.604942Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:37:18.857181Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:1348:2418], Recipient [2:1282:2390]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:18.857224Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:18.857248Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:1345:2797], serverId# [2:1348:2418], sessionId# [0:0:0] 2025-03-28T09:37:18.857969Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:1057:2618], Recipient [2:1348:2418] 2025-03-28T09:37:18.858001Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:37:18.858097Z node 2 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:37:18.858125Z node 2 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] ... ing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:21.715425Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976715669 2025-03-28T09:37:21.748861Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:2306:2586], Recipient [2:2187:2544]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:21.748907Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:21.748938Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [1:2181:3335], serverId# [2:2306:2586], sessionId# [0:0:0] 2025-03-28T09:37:21.749482Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [2:2307:2587], Recipient [2:2187:2544]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:21.749508Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:21.749528Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037890, clientId# [2:2179:2542], serverId# [2:2307:2587], sessionId# [0:0:0] 2025-03-28T09:37:21.749612Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2062:2514], Recipient [2:2187:2544]: {TEvReadSet step# 2517 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:37:21.749632Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:37:21.749655Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715667 2025-03-28T09:37:21.749706Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2517 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:37:21.750024Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:37:21.750180Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2062:2514], Recipient [2:2187:2544]: {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:37:21.750201Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:37:21.750221Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715669 2025-03-28T09:37:21.750264Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:37:21.750561Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2062:2514], Recipient [2:2187:2544]: {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-03-28T09:37:21.750594Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:37:21.750620Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715669 2025-03-28T09:37:21.750654Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-03-28T09:37:21.750800Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2120:3289], Recipient [2:2306:2586] 2025-03-28T09:37:21.750825Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:37:21.750847Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715668 2025-03-28T09:37:21.750876Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2518 txid# 281474976715668 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2025-03-28T09:37:21.751127Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:37:21.751251Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2120:3289], Recipient [2:2306:2586] 2025-03-28T09:37:21.751269Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:37:21.751290Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715669 2025-03-28T09:37:21.751356Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:37:21.751462Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [1:2120:3289], Recipient [2:2306:2586] 2025-03-28T09:37:21.751482Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:37:21.751522Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037890 source 72075186224037889 dest 72075186224037890 producer 72075186224037889 txId 281474976715669 2025-03-28T09:37:21.751545Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2025-03-28T09:37:21.751609Z node 2 :TX_DATASHARD DEBUG: Complete [2667 : 281474976715669] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2258:3353], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:37:21.751911Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:37:21.755592Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-28T09:37:21.755792Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2187:2544], Recipient [2:2062:2514]: {TEvReadSet step# 2517 txid# 281474976715667 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 1} 2025-03-28T09:37:21.755817Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:21.755841Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715667 2025-03-28T09:37:21.755906Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-28T09:37:21.755945Z node 2 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037890 {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:37:21.755978Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-28T09:37:21.756760Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2187:2544], Recipient [2:2062:2514]: {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-03-28T09:37:21.756795Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:37:21.756817Z node 2 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976715669 2025-03-28T09:37:21.756858Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-03-28T09:37:21.756893Z node 2 :TX_DATASHARD NOTICE: Outdated readset for 2667:281474976715669 at 72075186224037888 2025-03-28T09:37:21.756945Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-28T09:37:21.757964Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-28T09:37:21.758029Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-28T09:37:21.758055Z node 2 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037890 {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:37:21.758186Z node 2 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037890 2025-03-28T09:37:21.758283Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2187:2544], Recipient [1:2120:3289] 2025-03-28T09:37:21.758309Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:21.758337Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715668 2025-03-28T09:37:21.758391Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2187:2544], Recipient [2:2062:2514]: {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2025-03-28T09:37:21.758415Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:21.758436Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715669 2025-03-28T09:37:21.758662Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [2:2187:2544], Recipient [1:2120:3289] 2025-03-28T09:37:21.758708Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:37:21.758742Z node 1 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976715669 2025-03-28T09:37:21.758784Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2667 txid# 281474976715669 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-03-28T09:37:21.758808Z node 1 :TX_DATASHARD NOTICE: Outdated readset for 2667:281474976715669 at 72075186224037889 2025-03-28T09:37:21.758839Z node 1 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-28T09:37:21.758887Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:2187:2544], Recipient [1:2120:3289] 2025-03-28T09:37:21.758903Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:21.758922Z node 1 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715669 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] Test command err: 2025-03-28T09:37:16.893401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:16.893649Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:16.893681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015a4/r3tmp/tmpsHm5IN/pdisk_1.dat 2025-03-28T09:37:17.144950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:17.177596Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:17.217217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:17.217329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:17.228511Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:17.307600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:17.339279Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:17.340066Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:17.340365Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:37:17.340518Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:17.369832Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:17.370314Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:17.370418Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:37:17.371620Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:37:17.371673Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:37:17.371711Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:37:17.371933Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:37:17.372018Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:37:17.372067Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:37:17.382619Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:37:17.401874Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:37:17.402010Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:37:17.402088Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:37:17.402111Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:37:17.402148Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:37:17.402175Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:17.402326Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:17.402363Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:17.402576Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:37:17.402631Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:37:17.402957Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:37:17.402998Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:17.403031Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:37:17.403056Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:37:17.403076Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:37:17.403098Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:37:17.403124Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:37:17.403210Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:17.403246Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:17.403279Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:37:17.403327Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:37:17.403352Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:17.403413Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:37:17.403584Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:37:17.403625Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:37:17.403681Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:37:17.403712Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:37:17.403745Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:37:17.403770Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:37:17.403791Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:37:17.404005Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:37:17.404044Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:37:17.404077Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:37:17.404098Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:17.404135Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:37:17.404165Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:37:17.404191Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:37:17.404211Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:37:17.404227Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:37:17.405310Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:37:17.405345Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:37:17.415810Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:37:17.415851Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:37:17.415901Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:17.415939Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:37:17.415990Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:37:17.563248Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:17.563284Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:17.563306Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:37:17.563585Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:37:17.563619Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:37:17.563711Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:37:17.563739Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:37:17.563776Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:37:17.563801Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:37:17.567093Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:37:17.567148Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:17.567735Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:17.567773Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:17.567827Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:37:1 ... because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state); 2025-03-28T09:37:22.276907Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1031:2734] TxId: 281474976715671. Ctx: { TraceId: 01jqe1syqv18v89q882g1d15p3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRmZThjY2ItODlhMGVlZGUtMzdmYjNjODMtMzk5ZjliNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. task: 1, does not have the CA id yet or is already complete 2025-03-28T09:37:22.276926Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1031:2734] TxId: 281474976715671. Ctx: { TraceId: 01jqe1syqv18v89q882g1d15p3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRmZThjY2ItODlhMGVlZGUtMzdmYjNjODMtMzk5ZjliNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shutdown immediately - nothing to wait 2025-03-28T09:37:22.276958Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1031:2734] TxId: 281474976715671. Ctx: { TraceId: 01jqe1syqv18v89q882g1d15p3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRmZThjY2ItODlhMGVlZGUtMzdmYjNjODMtMzk5ZjliNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:37:22.277380Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqe1syqp8p130yrkaz31t2q6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM5NWU0Yi1hZjJlNmU0My00YWMzNjFhOS0yMjUyYWZkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:22.277420Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715677. Ctx: { TraceId: 01jqe1syqp8p130yrkaz31t2q6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM5NWU0Yi1hZjJlNmU0My00YWMzNjFhOS0yMjUyYWZkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-28T09:37:22.277442Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1148:2718] TxId: 281474976715677. Ctx: { TraceId: 01jqe1syqp8p130yrkaz31t2q6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM5NWU0Yi1hZjJlNmU0My00YWMzNjFhOS0yMjUyYWZkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:37:22.277477Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1148:2718] TxId: 281474976715677. Ctx: { TraceId: 01jqe1syqp8p130yrkaz31t2q6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM5NWU0Yi1hZjJlNmU0My00YWMzNjFhOS0yMjUyYWZkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:37:22.277499Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1148:2718] TxId: 281474976715677. Ctx: { TraceId: 01jqe1syqp8p130yrkaz31t2q6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzM5NWU0Yi1hZjJlNmU0My00YWMzNjFhOS0yMjUyYWZkNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:37:22.277592Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWRmZThjY2ItODlhMGVlZGUtMzdmYjNjODMtMzk5ZjliNzI=, ActorId: [2:899:2734], ActorState: ExecuteState, TraceId: 01jqe1syqv18v89q882g1d15p3, Create QueryResponse for error on request, msg: 2025-03-28T09:37:22.277984Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715678. Resolved key sets: 0 2025-03-28T09:37:22.278406Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqe1syqtc7w6rm8b4mc1bj17, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTE3NzdkNTgtZDM5ZTdlZmUtYjdlMzA4ZWYtZTg4ZDMyZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:22.278426Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715678. Ctx: { TraceId: 01jqe1syqtc7w6rm8b4mc1bj17, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTE3NzdkNTgtZDM5ZTdlZmUtYjdlMzA4ZWYtZTg4ZDMyZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-28T09:37:22.278446Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1155:2726] TxId: 281474976715678. Ctx: { TraceId: 01jqe1syqtc7w6rm8b4mc1bj17, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTE3NzdkNTgtZDM5ZTdlZmUtYjdlMzA4ZWYtZTg4ZDMyZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:37:22.278475Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1155:2726] TxId: 281474976715678. Ctx: { TraceId: 01jqe1syqtc7w6rm8b4mc1bj17, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTE3NzdkNTgtZDM5ZTdlZmUtYjdlMzA4ZWYtZTg4ZDMyZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:37:22.278495Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1155:2726] TxId: 281474976715678. Ctx: { TraceId: 01jqe1syqtc7w6rm8b4mc1bj17, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZTE3NzdkNTgtZDM5ZTdlZmUtYjdlMzA4ZWYtZTg4ZDMyZDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:37:22.278513Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715679. Resolved key sets: 0 2025-03-28T09:37:22.278856Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqe1syqt4qp099rb7dx51c5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDEzMzc5ZTctOTA4ZDEzMzctYzBmMmQ3NzctZWQxNWU3YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:22.278889Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715679. Ctx: { TraceId: 01jqe1syqt4qp099rb7dx51c5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDEzMzc5ZTctOTA4ZDEzMzctYzBmMmQ3NzctZWQxNWU3YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-28T09:37:22.278917Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1157:2728] TxId: 281474976715679. Ctx: { TraceId: 01jqe1syqt4qp099rb7dx51c5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDEzMzc5ZTctOTA4ZDEzMzctYzBmMmQ3NzctZWQxNWU3YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:37:22.278952Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1157:2728] TxId: 281474976715679. Ctx: { TraceId: 01jqe1syqt4qp099rb7dx51c5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDEzMzc5ZTctOTA4ZDEzMzctYzBmMmQ3NzctZWQxNWU3YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:37:22.278974Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1157:2728] TxId: 281474976715679. Ctx: { TraceId: 01jqe1syqt4qp099rb7dx51c5q, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NDEzMzc5ZTctOTA4ZDEzMzctYzBmMmQ3NzctZWQxNWU3YzU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:37:22.279158Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715680. Resolved key sets: 0 2025-03-28T09:37:22.279182Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715681. Resolved key sets: 0 2025-03-28T09:37:22.279404Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqe1syqv528s9yh3r5enmbe5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4ZTM4NDctYTQ0MTgzMGQtYzYzYWRlZTAtYTE1MDQ0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:22.279427Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715680. Ctx: { TraceId: 01jqe1syqv528s9yh3r5enmbe5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4ZTM4NDctYTQ0MTgzMGQtYzYzYWRlZTAtYTE1MDQ0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-28T09:37:22.279448Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1161:2731] TxId: 281474976715680. Ctx: { TraceId: 01jqe1syqv528s9yh3r5enmbe5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4ZTM4NDctYTQ0MTgzMGQtYzYzYWRlZTAtYTE1MDQ0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:37:22.279483Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1161:2731] TxId: 281474976715680. Ctx: { TraceId: 01jqe1syqv528s9yh3r5enmbe5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4ZTM4NDctYTQ0MTgzMGQtYzYzYWRlZTAtYTE1MDQ0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:37:22.279518Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1161:2731] TxId: 281474976715680. Ctx: { TraceId: 01jqe1syqv528s9yh3r5enmbe5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MjI4ZTM4NDctYTQ0MTgzMGQtYzYzYWRlZTAtYTE1MDQ0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 2025-03-28T09:37:22.279542Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqe1syqv18v89q882g1d15p3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRmZThjY2ItODlhMGVlZGUtMzdmYjNjODMtMzk5ZjliNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:22.279561Z node 2 :KQP_EXECUTER DEBUG: TxId: 281474976715681. Ctx: { TraceId: 01jqe1syqv18v89q882g1d15p3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRmZThjY2ItODlhMGVlZGUtMzdmYjNjODMtMzk5ZjliNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, snapshot: {0, 0} 2025-03-28T09:37:22.279580Z node 2 :KQP_EXECUTER INFO: ActorId: [2:1164:2734] TxId: 281474976715681. Ctx: { TraceId: 01jqe1syqv18v89q882g1d15p3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRmZThjY2ItODlhMGVlZGUtMzdmYjNjODMtMzk5ZjliNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 1, datashardTxs: 0, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:37:22.279604Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1164:2734] TxId: 281474976715681. Ctx: { TraceId: 01jqe1syqv18v89q882g1d15p3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRmZThjY2ItODlhMGVlZGUtMzdmYjNjODMtMzk5ZjliNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:37:22.279624Z node 2 :KQP_EXECUTER DEBUG: ActorId: [2:1164:2734] TxId: 281474976715681. Ctx: { TraceId: 01jqe1syqv18v89q882g1d15p3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MWRmZThjY2ItODlhMGVlZGUtMzdmYjNjODMtMzk5ZjliNzI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: 2025-03-28T09:34:54.949192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:54.949470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:54.949524Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e5d/r3tmp/tmpkvW5MB/pdisk_1.dat 2025-03-28T09:34:55.318025Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62470, node 1 2025-03-28T09:34:55.545505Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:55.545561Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:55.545590Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:55.546006Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:55.548320Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:55.630896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:55.631029Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:55.644894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16817 2025-03-28T09:34:56.153985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:59.175357Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:59.207335Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.207440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.245178Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:59.246959Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.482540Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.483038Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.483540Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.483658Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.483903Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.483978Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.484040Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.484106Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.484170Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.644540Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.644663Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.657539Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.772627Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:59.814535Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:34:59.814636Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:34:59.843635Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:34:59.843794Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:34:59.844009Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:34:59.844077Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:34:59.844136Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:34:59.844198Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:34:59.844265Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:34:59.844316Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:34:59.844800Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:34:59.876656Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.876820Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.884458Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:34:59.890722Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:34:59.891032Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:34:59.897453Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:34:59.915684Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:34:59.915735Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:34:59.915860Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:34:59.930785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:34:59.937914Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:34:59.938060Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:00.113203Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:00.248716Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:00.325985Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:01.159973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.160074Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.173080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:01.292154Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.292349Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:01.292551Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:01.292628Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:01.292740Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:01.292861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:01.292989Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:01.293175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:01.293328Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:01.293457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:01.293581Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:01.293683Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:01.318403Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:01.318496Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... p traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:13.447733Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:13.450355Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:37:13.453223Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7022:5165], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:13.453290Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7032:5170], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:13.453353Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:13.462526Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:37:13.501960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7036:5173], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:37:13.671846Z node 2 :TX_PROXY ERROR: Actor# [2:7131:5220] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:13.720541Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7160:5235]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:13.720834Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:37:13.720930Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7162:5237] 2025-03-28T09:37:13.721004Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7162:5237] 2025-03-28T09:37:13.721350Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7163:5238] 2025-03-28T09:37:13.721530Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7162:5237], server id = [2:7163:5238], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:13.721591Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7163:5238], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:37:13.721648Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:37:13.721810Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:13.721891Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7160:5235], StatRequests.size() = 1 2025-03-28T09:37:13.829581Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2Q2ZWQ3ZjMtOTg0NTExNDMtYWQwNmNlZWMtOGRkNDc3ZTc=, TxId: 2025-03-28T09:37:13.829646Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2Q2ZWQ3ZjMtOTg0NTExNDMtYWQwNmNlZWMtOGRkNDc3ZTc=, TxId: 2025-03-28T09:37:13.829994Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:13.843211Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:13.843269Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:13.886948Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:37:13.887035Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:37:13.962539Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7162:5237], schemeshard count = 1 2025-03-28T09:37:14.917566Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:14.917661Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:14.920705Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:14.936510Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:14.937005Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:14.937067Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-28T09:37:14.950326Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:14.961221Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) ... unblocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR 2025-03-28T09:37:14.962558Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-28T09:37:14.962658Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-28T09:37:14.963267Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2800:3221] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-28T09:37:14.963336Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2800:3221] 2025-03-28T09:37:14.976282Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:37:14.976355Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-28T09:37:16.121233Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:16.121376Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:16.121424Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:16.122011Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:16.134738Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:16.135045Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:16.135102Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:16.135776Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:16.148155Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:16.148321Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:16.148722Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7292:5315], server id = [2:7293:5316], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:16.148814Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7292:5315], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:16.151799Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:16.151888Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:16.152102Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7292:5315], server id = [2:7293:5316], tablet id = 72075186224037899 2025-03-28T09:37:16.152135Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:16.152194Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:16.152349Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:16.152613Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:16.155525Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:16.182639Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7313:5335]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:16.182855Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:16.182894Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7313:5335], StatRequests.size() = 1 2025-03-28T09:37:16.280321Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGE2NmY3MjktNzVmMzc1NDAtMWNkMTUyYi1hYzFlYzc4ZQ==, TxId: 2025-03-28T09:37:16.280396Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGE2NmY3MjktNzVmMzc1NDAtMWNkMTUyYi1hYzFlYzc4ZQ==, TxId: 2025-03-28T09:37:16.280932Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:16.294905Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:16.294980Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2800:3221] 2025-03-28T09:37:16.804692Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-28T09:37:16.804760Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:18.579445Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:37:18.579642Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:18.590425Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:20.755938Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:20.756016Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:37:21.833127Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:37:21.854637Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-28T09:37:21.854730Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: 2025-03-28T09:37:15.674636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:15.674923Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:15.674959Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0015ae/r3tmp/tmpjBFx1O/pdisk_1.dat 2025-03-28T09:37:15.934936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:15.961157Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:15.997251Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:15.997356Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:16.008302Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:16.086330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:16.114913Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:16.115710Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:16.116002Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:37:16.116149Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:16.146263Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:16.146770Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:16.146846Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:37:16.148010Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:37:16.148066Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:37:16.148119Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:37:16.148360Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:37:16.148439Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:37:16.148501Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:37:16.159079Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:37:16.176476Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:37:16.176614Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:37:16.176707Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:37:16.176733Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:37:16.176774Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:37:16.176801Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:16.176963Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:16.177002Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:16.177196Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:37:16.177283Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:37:16.177715Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:37:16.177771Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:16.177809Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:37:16.177844Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:37:16.177877Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:37:16.177909Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:37:16.177947Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:37:16.178031Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:16.178076Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:16.178113Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:37:16.178165Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:37:16.178191Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:16.178261Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:37:16.178408Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:37:16.178445Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:37:16.178500Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:37:16.178556Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:37:16.178607Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:37:16.178631Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:37:16.178653Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:37:16.178874Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:37:16.178895Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:37:16.178915Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:37:16.178935Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:16.179007Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:37:16.179042Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:37:16.179064Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:37:16.179083Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:37:16.179101Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:37:16.180105Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:37:16.180134Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:37:16.190741Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:37:16.190793Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:37:16.190829Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:37:16.190858Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:37:16.190920Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:37:16.338477Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:16.338528Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:16.338563Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:37:16.339019Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:37:16.339059Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:37:16.339166Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:37:16.339227Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:37:16.339283Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:37:16.339318Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:37:16.347808Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:37:16.347877Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:37:16.348687Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:16.348729Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:16.348778Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:37:1 ... actorId: [2:1186:2938] 2025-03-28T09:37:22.012805Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037892 2025-03-28T09:37:22.012834Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037892 2025-03-28T09:37:22.012868Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:37:22.013004Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1027:2823], Recipient [2:1027:2823]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:22.013032Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:22.013137Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1027:2823], Recipient [2:756:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037892 OperationCookie: 281474976715665 2025-03-28T09:37:22.013170Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715665 2025-03-28T09:37:22.013390Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1181:2933], Recipient [2:756:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037892 ClientId: [2:1181:2933] ServerId: [2:1183:2935] } 2025-03-28T09:37:22.013410Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:37:22.013630Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1027:2823]: {TEvRegisterTabletResult TabletId# 72075186224037892 Entry# 2000} 2025-03-28T09:37:22.013651Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-28T09:37:22.013673Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 2000 2025-03-28T09:37:22.013699Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:37:22.013767Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-28T09:37:22.013796Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:22.013823Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037892 2025-03-28T09:37:22.013849Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-03-28T09:37:22.013871Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-03-28T09:37:22.013893Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037892 TxInFly 0 2025-03-28T09:37:22.013934Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-28T09:37:22.014099Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1183:2935], Recipient [2:1027:2823]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:37:22.014123Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:37:22.014155Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037892, clientId# [2:1181:2933], serverId# [2:1183:2935], sessionId# [0:0:0] 2025-03-28T09:37:22.014326Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1027:2823]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-03-28T09:37:22.014348Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-28T09:37:22.014373Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 0 next step 2000 2025-03-28T09:37:22.014408Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037892: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-28T09:37:22.014457Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037892 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-03-28T09:37:22.025129Z node 2 :TX_DATASHARD DEBUG: 72075186224037893 ack snapshot OpId 281474976715665 2025-03-28T09:37:22.025220Z node 2 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037893 2025-03-28T09:37:22.025314Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037893 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:37:22.025381Z node 2 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037893 2025-03-28T09:37:22.025434Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037893, actorId: [2:1190:2942] 2025-03-28T09:37:22.025458Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037893 2025-03-28T09:37:22.025492Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037893 2025-03-28T09:37:22.025518Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-28T09:37:22.025664Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:1029:2825], Recipient [2:1029:2825]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:22.025725Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:22.025960Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553157, Sender [2:1029:2825], Recipient [2:756:2634]: NKikimrTxDataShard.TEvSplitTransferSnapshotAck TabletId: 72075186224037893 OperationCookie: 281474976715665 2025-03-28T09:37:22.026005Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 Received snapshot Ack from dst 72075186224037893 for split OpId 281474976715665 2025-03-28T09:37:22.026457Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [2:1182:2934], Recipient [2:756:2634]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1182:2934] ServerId: [2:1184:2936] } 2025-03-28T09:37:22.026484Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:37:22.026525Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037893 2025-03-28T09:37:22.026547Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:22.026568Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037893 2025-03-28T09:37:22.026587Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037893 has no attached operations 2025-03-28T09:37:22.026604Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037893 2025-03-28T09:37:22.026623Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037893 TxInFly 0 2025-03-28T09:37:22.026644Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037893 2025-03-28T09:37:22.026828Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [2:24:2071], Recipient [2:1029:2825]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 2000} 2025-03-28T09:37:22.026848Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-28T09:37:22.026867Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 2000 2025-03-28T09:37:22.026888Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2025-03-28T09:37:22.026968Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269877764, Sender [2:1184:2936], Recipient [2:1029:2825]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:37:22.026985Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2025-03-28T09:37:22.027007Z node 2 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1182:2934], serverId# [2:1184:2936], sessionId# [0:0:0] 2025-03-28T09:37:22.027186Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [2:24:2071], Recipient [2:1029:2825]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 2000 ReadStep# 2000 } 2025-03-28T09:37:22.027214Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-28T09:37:22.027242Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 2000 2025-03-28T09:37:22.027266Z node 2 :TX_DATASHARD DEBUG: CheckMediatorStateRestored at 72075186224037893: waitStep# 2000 readStep# 2000 observedStep# 2000 2025-03-28T09:37:22.027295Z node 2 :TX_DATASHARD TRACE: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v2000/18446744073709551615 2025-03-28T09:37:22.037863Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split to schemeshard 281474976715665 2025-03-28T09:37:22.039934Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553158, Sender [2:409:2404], Recipient [2:761:2636] 2025-03-28T09:37:22.039993Z node 2 :TX_DATASHARD DEBUG: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2025-03-28T09:37:22.041504Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2025-03-28T09:37:22.041557Z node 2 :TX_DATASHARD DEBUG: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:37:22.041937Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [2:748:2629], Recipient [2:756:2634]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-28T09:37:22.574806Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [2:977:2681], Recipient [2:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 977 RawX2: 8589937273 } TxBody: " \0008\000`\200\200\200\005j\324\006\010\001\022\225\006\010\001\022\024\n\022\t\321\003\000\000\000\000\000\000\021y\n\000\000\002\000\000\000\032\256\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? \004\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2025-03-28T09:37:22.574870Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:22.574950Z node 2 :TX_DATASHARD NOTICE: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2025-03-28T09:37:22.575292Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2025-03-28T09:37:22.575636Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 >> AnalyzeColumnshard::Analyze [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: 2025-03-28T09:34:55.674568Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:55.674893Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:55.674967Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000c90/r3tmp/tmpYjER8O/pdisk_1.dat 2025-03-28T09:34:56.030559Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13841, node 1 2025-03-28T09:34:56.238613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:56.238672Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:56.238714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:56.239158Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:56.241262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:56.323733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:56.323870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:56.337236Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12267 2025-03-28T09:34:56.864676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:59.683301Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:59.717840Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.717947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.756292Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:59.758022Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.973442Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.973826Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.974188Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.974271Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.974415Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.974478Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.974544Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.974593Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.974638Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.139130Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.139246Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.152329Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.301094Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:00.341117Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:00.341203Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:00.366412Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:00.366584Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:00.366854Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:00.366970Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:00.367055Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:00.367121Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:00.367181Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:00.367235Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:00.367759Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:00.400644Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.400764Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.408523Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:00.415096Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:00.415344Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:00.422218Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:00.442060Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:00.442130Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:00.442195Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:00.457491Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:00.463501Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:00.463626Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:00.603246Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:00.741039Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:00.818937Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:01.739499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.739632Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.758963Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:01.872479Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.872763Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:01.873097Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:01.873258Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:01.873406Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:01.873544Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:01.873652Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:01.873783Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:01.873940Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:01.874119Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:01.874268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:01.874394Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:01.902902Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:01.902975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 03-28T09:35:15.606106Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:16.005350Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:37:16.005428Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:37:16.005468Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:37:16.005505Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:37:17.583975Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-28T09:37:17.584041Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 221.000000s, at schemeshard: 72075186224037897 2025-03-28T09:37:17.584236Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-03-28T09:37:17.596776Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:37:18.807233Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:18.807325Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:18.807374Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:37:18.807427Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:37:18.807476Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:18.807894Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:18.811578Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:37:18.815358Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7024:5166], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:18.815459Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7034:5171], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:18.815563Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:18.829016Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:37:18.882754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7038:5174], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:37:19.109056Z node 2 :TX_PROXY ERROR: Actor# [2:7134:5220] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:19.171992Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7163:5235]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:19.172143Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:37:19.172207Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7165:5237] 2025-03-28T09:37:19.172259Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7165:5237] 2025-03-28T09:37:19.172470Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7166:5238] 2025-03-28T09:37:19.172555Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7166:5238], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:37:19.172599Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:37:19.172690Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7165:5237], server id = [2:7166:5238], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:19.172746Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:19.172799Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7163:5235], StatRequests.size() = 1 2025-03-28T09:37:19.269877Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmU2YzQ4OGYtZTFmZDRlMS05YzY3MGQ1Ni0xZDQzYWYxNA==, TxId: 2025-03-28T09:37:19.269952Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmU2YzQ4OGYtZTFmZDRlMS05YzY3MGQ1Ni0xZDQzYWYxNA==, TxId: 2025-03-28T09:37:19.270357Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:19.283105Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:19.283158Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:19.346377Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:37:19.346458Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:37:19.420859Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7165:5237], schemeshard count = 1 2025-03-28T09:37:20.429918Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:20.430012Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:20.433473Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:20.449610Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:20.450171Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:20.450259Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-28T09:37:20.463789Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:20.474833Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-28T09:37:20.475859Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-28T09:37:20.475975Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-28T09:37:20.489215Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:37:21.746833Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:21.746935Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:21.746988Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:21.747427Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:21.760149Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:21.760475Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:21.760545Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:21.761165Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:21.773821Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:21.773995Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:21.774411Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7292:5315], server id = [2:7293:5316], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:21.774492Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7292:5315], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:21.777510Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:21.777606Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:21.777805Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:21.777947Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:21.778118Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7292:5315], server id = [2:7293:5316], tablet id = 72075186224037899 2025-03-28T09:37:21.778146Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:21.778297Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:21.780179Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:21.807779Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7313:5335]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:21.807948Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:21.807979Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7313:5335], StatRequests.size() = 1 2025-03-28T09:37:21.923754Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzI5ZjYzNTQtYjQ5MzE4MS0xOWVkMDI0NS02NTMzMjNmOQ==, TxId: 2025-03-28T09:37:21.923806Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzI5ZjYzNTQtYjQ5MzE4MS0xOWVkMDI0NS02NTMzMjNmOQ==, TxId: 2025-03-28T09:37:21.924205Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:21.937787Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:21.937852Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2802:3221] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeSave [GOOD] Test command err: 2025-03-28T09:34:55.262126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:55.262446Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:55.262505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000de9/r3tmp/tmpyGGFeF/pdisk_1.dat 2025-03-28T09:34:55.676288Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4618, node 1 2025-03-28T09:34:55.894751Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:55.894807Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:55.894838Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:55.895274Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:55.901013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:55.985244Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:55.985375Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:55.999192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30758 2025-03-28T09:34:56.517497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:59.363220Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:59.391153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.391247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.428791Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:59.430366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.659313Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.659953Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.660538Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.660707Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.661007Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.661133Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.661225Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.661309Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.661407Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.826347Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.826460Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.839426Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.957507Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:59.998895Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:34:59.999001Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:00.028704Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:00.028871Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:00.029082Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:00.029138Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:00.029186Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:00.029236Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:00.029296Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:00.029352Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:00.029776Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:00.061035Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.061148Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.069595Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:00.076808Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:00.077118Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:00.082928Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:00.097314Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:00.097382Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:00.097437Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:00.108863Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:00.119944Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:00.120106Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:00.265040Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:00.434319Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:00.513311Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:01.346390Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.346501Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.362020Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:01.494713Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.494988Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:01.495286Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:01.495431Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:01.495552Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:01.495730Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:01.495951Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:01.496103Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:01.496287Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:01.496415Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:01.496545Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:01.496692Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:01.528540Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:01.528653Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7292:5316] 2025-03-28T09:37:19.370792Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7292:5316] 2025-03-28T09:37:19.409703Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:37:19.409776Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:37:19.410385Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:37:19.411275Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:37:19.411646Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-03-28T09:37:19.411710Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-03-28T09:37:19.411752Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-03-28T09:37:19.411800Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-03-28T09:37:19.411844Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743154639337459 2025-03-28T09:37:19.411883Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-03-28T09:37:19.411923Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-03-28T09:37:19.412024Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-03-28T09:37:19.412113Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:37:19.412219Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-03-28T09:37:19.412309Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-03-28T09:37:19.412406Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-03-28T09:37:19.412477Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:37:19.412661Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:19.413372Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:37:19.414023Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:19.414106Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:19.414232Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:37:19.415830Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:19.415896Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:19.416568Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:19.452104Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:19.452294Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:37:19.452913Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7339:5347], server id = [2:7340:5348], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:19.452996Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7339:5347], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.453861Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:19.453948Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:19.454076Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:19.454218Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:19.454431Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:19.456435Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7339:5347], server id = [2:7340:5348], tablet id = 72075186224037899 2025-03-28T09:37:19.456471Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.456872Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:19.481562Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7358:5366]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:19.481722Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:19.481754Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7358:5366], StatRequests.size() = 1 2025-03-28T09:37:19.592687Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTNlODQ0MTEtZWI5Njc2NWUtYzIzZDI2ZWQtZjEzMGVmMmU=, TxId: 2025-03-28T09:37:19.592749Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTNlODQ0MTEtZWI5Njc2NWUtYzIzZDI2ZWQtZjEzMGVmMmU=, TxId: 2025-03-28T09:37:19.593088Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:19.605219Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7368:5372] 2025-03-28T09:37:19.605428Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7368:5372], schemeshard id = 72075186224037897 2025-03-28T09:37:19.605505Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7292:5316], server id = [2:7369:5373], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:19.605533Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7369:5373] 2025-03-28T09:37:19.605579Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7369:5373], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-28T09:37:19.618013Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:19.618090Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:19.703774Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7373:5376] 2025-03-28T09:37:19.704323Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2802:3221] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-28T09:37:19.704369Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2802:3221] 2025-03-28T09:37:19.704421Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-28T09:37:20.183977Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-28T09:37:20.184046Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:20.194613Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-03-28T09:37:20.194664Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:20.941059Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:20.941132Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:20.941168Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:37:22.231383Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:22.231522Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:22.231572Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:22.232145Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:22.244795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:22.245123Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:22.245179Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:22.245496Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:22.258434Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:22.258618Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-03-28T09:37:22.259126Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7454:5419], server id = [2:7455:5420], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:22.259244Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7454:5419], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.260774Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:22.260888Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:22.261079Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:22.261272Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:22.261551Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:22.264222Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7454:5419], server id = [2:7455:5420], tablet id = 72075186224037899 2025-03-28T09:37:22.264270Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.264797Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:22.285854Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzYzZmEzYTEtZTVmZmE5NmItYmQyNDZjZTQtNzM5ZTRlMTc=, TxId: 2025-03-28T09:37:22.285930Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzYzZmEzYTEtZTVmZmE5NmItYmQyNDZjZTQtNzM5ZTRlMTc=, TxId: 2025-03-28T09:37:22.286494Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:22.300618Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:22.300708Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2802:3221] >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: 2025-03-28T09:34:54.525227Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:54.525505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:54.525557Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e23/r3tmp/tmprEjcAs/pdisk_1.dat 2025-03-28T09:34:54.899435Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29206, node 1 2025-03-28T09:34:55.121136Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:55.121198Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:55.121232Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:55.121696Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:55.124175Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:55.214423Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:55.214570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:55.228661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6558 2025-03-28T09:34:55.749864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:58.442365Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:58.475481Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:58.475599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:58.514256Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:58.516292Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:58.754326Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.755019Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.755770Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.755940Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.756253Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.756393Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.756506Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.756595Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.756685Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.923366Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:58.923486Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:58.936423Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.064476Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:59.117545Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:34:59.117639Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:34:59.151932Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:34:59.152105Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:34:59.152311Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:34:59.152381Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:34:59.152443Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:34:59.152494Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:34:59.152560Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:34:59.152615Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:34:59.153036Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:34:59.182561Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.182640Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.189311Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:34:59.196979Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:34:59.197218Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:34:59.202994Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:34:59.221472Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:34:59.221522Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:34:59.221623Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:34:59.237441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:34:59.243864Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:34:59.244000Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:34:59.420819Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:34:59.590741Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:34:59.669275Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:00.733709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.733843Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.835776Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:01.133566Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.133934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:01.134211Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:01.134349Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:01.134466Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:01.134601Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:01.134754Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:01.134923Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:01.135068Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:01.135201Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:01.135321Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:01.135473Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:01.189014Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.189090Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=T ... Execute 2025-03-28T09:37:22.012947Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:22.013051Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR 2025-03-28T09:37:22.077010Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8706:6529] 2025-03-28T09:37:22.077143Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8659:6499], server id = [2:8706:6529], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:22.077285Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8706:6529], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-28T09:37:22.077455Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8707:6530] 2025-03-28T09:37:22.077523Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8707:6530], schemeshard id = 72075186224037897 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2025-03-28T09:37:22.143578Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:22.143706Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:22.144947Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:22.158761Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:22.158905Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:22.159723Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8712:6535], server id = [2:8717:6540], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:22.160076Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8712:6535], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.160454Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8713:6536], server id = [2:8718:6541], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:22.160508Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8713:6536], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.162437Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8714:6537], server id = [2:8719:6542], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:22.162505Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8714:6537], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.162719Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8715:6538], server id = [2:8720:6543], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:22.162780Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8715:6538], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.163712Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8716:6539], server id = [2:8722:6545], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:22.163763Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8716:6539], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.168135Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:22.168724Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8712:6535], server id = [2:8717:6540], tablet id = 72075186224037899 2025-03-28T09:37:22.168769Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.170267Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:22.170863Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8713:6536], server id = [2:8718:6541], tablet id = 72075186224037900 2025-03-28T09:37:22.170894Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.171451Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8735:6555], server id = [2:8738:6556], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:22.171542Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8735:6555], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.171671Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:22.172866Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8714:6537], server id = [2:8719:6542], tablet id = 72075186224037901 2025-03-28T09:37:22.172893Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.173424Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:22.173914Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8739:6557], server id = [2:8741:6559], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:22.173983Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8739:6557], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.174380Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8715:6538], server id = [2:8720:6543], tablet id = 72075186224037902 2025-03-28T09:37:22.174405Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.175053Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:22.175869Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8740:6558], server id = [2:8745:6563], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:22.175930Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8740:6558], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.176276Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8716:6539], server id = [2:8722:6545], tablet id = 72075186224037903 2025-03-28T09:37:22.176301Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.176426Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8744:6562], server id = [2:8746:6564], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:22.176472Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8744:6562], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.177386Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8748:6566], server id = [2:8751:6568], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:22.177436Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8748:6566], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.178896Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:22.179601Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8735:6555], server id = [2:8738:6556], tablet id = 72075186224037904 2025-03-28T09:37:22.179630Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.181341Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:22.181881Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8739:6557], server id = [2:8741:6559], tablet id = 72075186224037905 2025-03-28T09:37:22.181910Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.182480Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:22.182963Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8740:6558], server id = [2:8745:6563], tablet id = 72075186224037906 2025-03-28T09:37:22.182989Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.184153Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:22.184310Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:22.184350Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:22.184625Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:22.184835Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:22.185084Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:22.187809Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8744:6562], server id = [2:8746:6564], tablet id = 72075186224037907 2025-03-28T09:37:22.187840Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.188165Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8748:6566], server id = [2:8751:6568], tablet id = 72075186224037908 2025-03-28T09:37:22.188190Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.188522Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:22.225121Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8778:6591]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:22.225312Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:22.225350Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8778:6591], StatRequests.size() = 1 2025-03-28T09:37:22.356033Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2U2OWFlZWEtZGYzYmMzOTQtZDI5ODUwODAtMTdjYzU3YzI=, TxId: 2025-03-28T09:37:22.356122Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2U2OWFlZWEtZGYzYmMzOTQtZDI5ODUwODAtMTdjYzU3YzI=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-28T09:37:22.356851Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8786:6597]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:22.357123Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:22.357844Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:22.357896Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:22.361000Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:22.361077Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:22.361153Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:22.367431Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> KqpImmediateEffects::Delete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: 2025-03-28T09:35:11.645808Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:11.645998Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:11.646032Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b1f/r3tmp/tmptPBrwM/pdisk_1.dat 2025-03-28T09:35:11.937501Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3726, node 1 2025-03-28T09:35:12.148991Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:12.149063Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:12.149091Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:12.149498Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:12.151520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:12.232863Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:12.232992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:12.246486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11389 2025-03-28T09:35:12.750724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:14.994734Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:15.018364Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:15.018456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:15.055099Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:15.056548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:15.281149Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:15.281609Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:15.282000Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:15.282115Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:15.282322Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:15.282381Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:15.282430Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:15.282480Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:15.282532Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:15.438132Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:15.438222Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:15.450592Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:15.554940Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:15.598420Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:15.598527Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:15.627380Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:15.627690Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:15.627931Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:15.628011Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:15.628067Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:15.628123Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:15.628194Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:15.628251Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:15.628859Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:15.658552Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:15.658642Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:15.665191Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:15.671180Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:15.671389Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:15.676906Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:15.691342Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:15.691392Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:15.691448Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:15.702316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:15.708063Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:15.708179Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:15.852430Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:16.024798Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:16.091430Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:16.930651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:16.930831Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:16.945483Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:17.053759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:17.053931Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:17.054188Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:17.054290Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:17.054387Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:17.054561Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:17.054669Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:17.054859Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:17.055012Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:17.055175Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:17.055308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:17.055412Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:17.077073Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:17.077186Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 03-28T09:35:30.714322Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:17.019292Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:37:17.019379Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:37:17.019415Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:37:17.019449Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:37:18.591229Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-28T09:37:18.591319Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 213.000000s, at schemeshard: 72075186224037897 2025-03-28T09:37:18.591638Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-03-28T09:37:18.604341Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:37:19.824865Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:19.824938Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:19.825013Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:37:19.825072Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:37:19.825108Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:19.825514Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:19.828417Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:37:19.831984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7021:5164], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:19.832056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7031:5169], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:19.832118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:19.841278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:37:19.884106Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7035:5172], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:37:20.125028Z node 2 :TX_PROXY ERROR: Actor# [2:7133:5219] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:20.194748Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7162:5234]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:20.195015Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:37:20.195108Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7164:5236] 2025-03-28T09:37:20.195181Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7164:5236] 2025-03-28T09:37:20.195530Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7165:5237] 2025-03-28T09:37:20.195656Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7165:5237], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:37:20.195742Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:37:20.195906Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7164:5236], server id = [2:7165:5237], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:20.195983Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:20.196064Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7162:5234], StatRequests.size() = 1 2025-03-28T09:37:20.342380Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Mjg3NTZkMGYtYmJlNjRjMWUtNGY1YWYxMTEtYWYyZGQyNzk=, TxId: 2025-03-28T09:37:20.342462Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Mjg3NTZkMGYtYmJlNjRjMWUtNGY1YWYxMTEtYWYyZGQyNzk=, TxId: 2025-03-28T09:37:20.342987Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:20.357298Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:20.357386Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:20.411199Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:37:20.411278Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:37:20.497185Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7164:5236], schemeshard count = 1 2025-03-28T09:37:21.414068Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:21.414162Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:21.416627Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:21.431848Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:21.432280Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:21.432321Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-28T09:37:21.444888Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:21.455652Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-28T09:37:21.456396Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-28T09:37:21.456478Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-28T09:37:21.469518Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:37:22.627037Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:22.627181Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:22.627238Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:22.627976Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:22.641365Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:22.641773Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:22.641824Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:22.642673Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:22.655819Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:22.655987Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:22.656375Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7291:5314], server id = [2:7292:5315], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:22.656456Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7291:5314], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:22.659393Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:22.659504Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:22.659681Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:22.659855Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:22.660030Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7291:5314], server id = [2:7292:5315], tablet id = 72075186224037899 2025-03-28T09:37:22.660060Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:22.660202Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:22.662042Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:22.686837Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7312:5334]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:22.687022Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:22.687054Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7312:5334], StatRequests.size() = 1 2025-03-28T09:37:22.784020Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjE4Njc4ZDgtYWM3NjcwNGUtMmNhNjg0YWYtMTlhNTU3NmE=, TxId: 2025-03-28T09:37:22.784082Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjE4Njc4ZDgtYWM3NjcwNGUtMmNhNjg0YWYtMTlhNTU3NmE=, TxId: 2025-03-28T09:37:22.784459Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:22.797623Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:22.797680Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2800:3220] >> KqpImmediateEffects::UpdateOn >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2025-03-28T09:36:22.923178Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:22.928219Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:22.928589Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:22.928779Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:22.962835Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:23.020093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:23.020144Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:23.026739Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:23.027767Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:23.029590Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:23.029672Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:23.029729Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:23.030070Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:23.030138Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:23.030195Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:23.087186Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:23.110638Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:23.110916Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:23.111005Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:23.111033Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:23.111058Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:23.111094Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:23.111310Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:23.111349Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:23.111534Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:23.111598Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:23.111669Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:23.111720Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:23.111750Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:23.111776Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:23.111816Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:23.111859Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:23.111892Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:23.111958Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:23.111980Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:23.112017Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:23.117173Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:23.117225Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:23.117287Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:23.117411Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:23.117447Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:23.117485Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:23.117539Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:23.117576Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:23.117602Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:23.117626Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:23.117844Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:23.117871Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:23.117895Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:23.117919Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:23.117953Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:23.117979Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:23.118006Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:23.118035Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:23.118057Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:23.129904Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:23.129970Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:23.129999Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:23.130031Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:23.130101Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:23.130546Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:23.130588Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:23.130626Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:23.130678Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:23.130717Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:23.130833Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:23.130886Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:23.130921Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:23.130945Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:23.133535Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:23.133587Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:23.133762Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:23.133796Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:23.133841Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:23.133875Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:23.133901Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:23.133931Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:23.133957Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:23.133990Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:23.134048Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:23.134090Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:23.134115Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:23.134242Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:23.134269Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:23.134283Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:23.134297Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:23.134314Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:23.134357Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:23.134374Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:23.134396Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:23.134420Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:23.134475Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:23.134503Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:23.134527Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:23.134555Z node 1 :TX_DATA ... -03-28T09:37:24.425665Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.425879Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.425913Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.425955Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.425987Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.426150Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.426180Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.426220Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.426252Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.426372Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.426403Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.426441Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.426471Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.426649Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.426707Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.426748Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.426780Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.426906Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.426938Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.426976Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.427007Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.427262Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.427299Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.427341Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.427376Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.427549Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.427582Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.427626Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.427660Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.427809Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.427839Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.427877Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.427907Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.428084Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.428115Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.428158Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.428191Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.428448Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.428485Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.428525Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.428558Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.428720Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.428751Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.428793Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.428826Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.428964Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.428994Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.429032Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.429062Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.429242Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.429276Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.429315Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.429346Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.429526Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:24.429558Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-03-28T09:37:24.429596Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:24.429630Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:24.429884Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-28T09:37:24.429937Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:24.429977Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-03-28T09:37:24.430110Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-28T09:37:24.430144Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:24.430176Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-03-28T09:37:24.430274Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2025-03-28T09:37:24.430307Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:24.430336Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2025-03-28T09:37:24.430423Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-28T09:37:24.430477Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:24.430508Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-03-28T09:37:24.430604Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-28T09:37:24.430637Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:24.430665Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-03-28T09:37:24.430808Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-28T09:37:24.430842Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:24.430872Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-03-28T09:37:24.431009Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-28T09:37:24.431043Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:24.431072Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-03-28T09:37:24.431134Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-03-28T09:37:24.431164Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:24.431190Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 expect 27 23 31 31 29 26 27 18 14 30 27 19 26 31 26 31 27 23 27 31 23 12 30 27 23 30 30 0 23 - 23 - actual 27 23 31 31 29 26 27 18 14 30 27 19 26 31 26 31 27 23 27 31 23 12 30 27 23 30 30 0 23 - 23 - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: 2025-03-28T09:34:54.260215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:54.260505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:54.260564Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e3d/r3tmp/tmpzvETJ3/pdisk_1.dat 2025-03-28T09:34:54.618619Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19263, node 1 2025-03-28T09:34:55.103557Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:55.103616Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:55.103648Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:55.104135Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:55.111111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:55.200326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:55.200489Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:55.215438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17757 2025-03-28T09:34:55.731140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:58.416553Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:58.454169Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:58.454287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:58.494542Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:58.496886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:58.732127Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.735363Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.736070Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.736234Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.736660Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.736780Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.736892Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.736974Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.737050Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:58.930083Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:58.930207Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:58.943599Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.091744Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:59.137927Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:34:59.138033Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:34:59.169379Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:34:59.169527Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:34:59.169716Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:34:59.169782Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:34:59.169848Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:34:59.169902Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:34:59.169960Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:34:59.170014Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:34:59.170444Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:34:59.199922Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.200007Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.205621Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:34:59.210595Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:34:59.210804Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:34:59.216216Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:34:59.232591Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:34:59.232649Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:34:59.232723Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:34:59.246361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:34:59.253711Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:34:59.253848Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:34:59.468838Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:34:59.625802Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:34:59.707218Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:00.733615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.733733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.835614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:01.126456Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.126781Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:01.127117Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:01.127293Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:01.127470Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:01.127628Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:01.127778Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:01.127962Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:01.128160Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:01.128312Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:01.128450Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:01.128588Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:01.190325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.190428Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... 290639Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:23.290743Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:23.291503Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:23.304132Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:23.304292Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:23.304805Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8666:6508], server id = [2:8671:6513], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:23.305191Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8666:6508], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.306567Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8667:6509], server id = [2:8672:6514], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:23.306624Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8667:6509], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.307125Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8668:6510], server id = [2:8673:6515], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:23.307167Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8668:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.307842Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8669:6511], server id = [2:8674:6516], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:23.307881Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8669:6511], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.308481Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8670:6512], server id = [2:8677:6519], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:23.308535Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8670:6512], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.312024Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:23.312642Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8666:6508], server id = [2:8671:6513], tablet id = 72075186224037899 2025-03-28T09:37:23.312676Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.313471Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:23.313824Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8667:6509], server id = [2:8672:6514], tablet id = 72075186224037900 2025-03-28T09:37:23.313863Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.314458Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8689:6528], server id = [2:8691:6529], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:23.314540Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8689:6528], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.315112Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:23.315810Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8668:6510], server id = [2:8673:6515], tablet id = 72075186224037901 2025-03-28T09:37:23.315833Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.316426Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8692:6530], server id = [2:8694:6531], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:23.316477Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8692:6530], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.316651Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:23.317540Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8669:6511], server id = [2:8674:6516], tablet id = 72075186224037902 2025-03-28T09:37:23.317562Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.317869Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:23.318238Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8695:6532], server id = [2:8699:6536], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:23.318290Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8695:6532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.318620Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8670:6512], server id = [2:8677:6519], tablet id = 72075186224037903 2025-03-28T09:37:23.318640Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.319367Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8698:6535], server id = [2:8702:6539], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:23.319432Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8698:6535], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.320151Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8703:6540], server id = [2:8705:6541], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:23.320210Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8703:6540], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.321520Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:23.321815Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8689:6528], server id = [2:8691:6529], tablet id = 72075186224037904 2025-03-28T09:37:23.321835Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.323073Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:23.323295Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8692:6530], server id = [2:8694:6531], tablet id = 72075186224037905 2025-03-28T09:37:23.323314Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.324728Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:23.325185Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8695:6532], server id = [2:8699:6536], tablet id = 72075186224037906 2025-03-28T09:37:23.325207Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.325394Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:23.325613Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8698:6535], server id = [2:8702:6539], tablet id = 72075186224037907 2025-03-28T09:37:23.325630Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.325725Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:23.325757Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:23.325902Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:23.326007Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:23.326185Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8703:6540], server id = [2:8705:6541], tablet id = 72075186224037908 2025-03-28T09:37:23.326203Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.326480Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:23.349976Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:23.350118Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:37:23.350524Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8718:6550], server id = [2:8719:6551], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:23.350593Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8718:6550], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.351562Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:23.351624Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:23.351733Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:23.351857Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:23.352110Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:23.353589Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8718:6550], server id = [2:8719:6551], tablet id = 72075186224037900 2025-03-28T09:37:23.353615Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.354070Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:23.379971Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8737:6569]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:23.380132Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:23.380161Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8737:6569], StatRequests.size() = 1 2025-03-28T09:37:23.492219Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTU1OGU3My02MjJkMWI0NS1iZjY1M2M5ZC05OTRiMDQ3Yw==, TxId: 2025-03-28T09:37:23.492269Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTU1OGU3My02MjJkMWI0NS1iZjY1M2M5ZC05OTRiMDQ3Yw==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-28T09:37:23.492711Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8746:6575]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:23.492890Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:23.493266Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:23.493308Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:23.495720Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:23.495765Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:23.495803Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:23.501217Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> KqpImmediateEffects::Upsert >> KqpImmediateEffects::DeleteAfterUpsert >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::ReplySizeExceeded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeResolve [GOOD] Test command err: 2025-03-28T09:34:55.371593Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:55.371843Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:55.371890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000d0a/r3tmp/tmpdE0zs1/pdisk_1.dat 2025-03-28T09:34:55.698996Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10141, node 1 2025-03-28T09:34:55.906371Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:55.906417Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:55.906440Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:55.906791Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:55.912359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:55.992275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:55.992437Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:56.006468Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:19943 2025-03-28T09:34:56.472591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:59.608266Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:59.634176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.634307Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.672589Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:59.674538Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.914050Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.914477Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.914896Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.914999Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.915207Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.915267Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.915315Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.915375Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.915430Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.075032Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.075120Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.088502Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.236272Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:00.275271Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:00.275386Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:00.307309Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:00.307446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:00.307627Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:00.307687Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:00.307728Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:00.307797Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:00.307876Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:00.307929Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:00.308285Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:00.337223Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.337324Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.343523Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:00.349265Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:00.349490Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:00.355192Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:00.372391Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:00.372449Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:00.372508Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:00.385422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:00.392020Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:00.392161Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:00.576535Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:00.737273Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:00.817019Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:01.669187Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.669287Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.684909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:01.797759Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.797958Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:01.798222Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:01.798360Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:01.798445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:01.798536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:01.798641Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:01.798766Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:01.798876Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:01.798975Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:01.799067Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:01.799162Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:01.821433Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:01.821521Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... ConnectSchemeShard, pipe server id = [2:7278:5308], schemeshard id = 72075186224037897 2025-03-28T09:37:17.726922Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7234:5281], server id = [2:7279:5309], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:17.726953Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7279:5309] 2025-03-28T09:37:17.727004Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7279:5309], node id = 2, have schemeshards count = 1, need schemeshards count = 0 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to 2025-03-28T09:37:17.812403Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7281:5310] 2025-03-28T09:37:17.812920Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2802:3221] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-28T09:37:17.812961Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2802:3221] 2025-03-28T09:37:17.813010Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-28T09:37:19.058393Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:19.058461Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:19.058490Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:37:19.058539Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:19.058582Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:19.059127Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:19.071656Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:19.071985Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:19.072056Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:19.072679Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:19.085106Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:19.085252Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:19.085690Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7329:5337], server id = [2:7330:5338], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:19.085768Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7329:5337], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:19.088495Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:19.088567Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:19.088697Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:19.088838Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:19.089055Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:19.091084Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7329:5337], server id = [2:7330:5338], tablet id = 72075186224037899 2025-03-28T09:37:19.091115Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:19.091477Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:19.115866Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7350:5357]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:19.116031Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:19.116064Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7350:5357], StatRequests.size() = 1 2025-03-28T09:37:19.206186Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjQ1MDg0MzktMWUzZjFmMTUtNDRjZjU5Zi03YmViNDJlNA==, TxId: 2025-03-28T09:37:19.206242Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjQ1MDg0MzktMWUzZjFmMTUtNDRjZjU5Zi03YmViNDJlNA==, TxId: 2025-03-28T09:37:19.206647Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:19.219479Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:19.219571Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:19.765311Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-28T09:37:19.765366Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:20.484137Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:20.484202Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:20.484616Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:20.497104Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:20.497361Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:20.497405Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-28T09:37:20.520892Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:21.760500Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:21.760557Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:21.760579Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:37:21.760737Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-28T09:37:21.761350Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-28T09:37:21.761432Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-28T09:37:21.773904Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:37:22.959318Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:22.959387Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:22.959439Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:37:24.167611Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:37:24.167801Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:24.178619Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:24.178752Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:24.178789Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:24.179414Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:24.192537Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:24.192878Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:24.192936Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:24.193273Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:24.217110Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:24.217274Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:37:24.217744Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7517:5449], server id = [2:7518:5450], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:24.217832Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7517:5449], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.218994Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:24.219087Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:24.219215Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:24.219369Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:24.219618Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:24.221615Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7517:5449], server id = [2:7518:5450], tablet id = 72075186224037899 2025-03-28T09:37:24.221647Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.222233Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:24.273174Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDM3YTFlMjUtYjYzNzBmYzAtYzJjODdjYTQtMzA1NTMxMmI=, TxId: 2025-03-28T09:37:24.273230Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDM3YTFlMjUtYjYzNzBmYzAtYzJjODdjYTQtMzA1NTMxMmI=, TxId: 2025-03-28T09:37:24.273692Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:24.287227Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:24.287286Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2802:3221] >> KqpImmediateEffects::ConflictingKeyW1RR2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: 2025-03-28T09:34:55.602663Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:55.602956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:55.603004Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000d1b/r3tmp/tmpNoKDys/pdisk_1.dat 2025-03-28T09:34:55.972796Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24090, node 1 2025-03-28T09:34:56.181501Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:56.181557Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:56.181589Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:56.182031Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:56.188933Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:56.277363Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:56.277527Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:56.291006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11674 2025-03-28T09:34:56.765690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:00.089023Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:00.124208Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.124328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.162438Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:00.163984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.379399Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.379998Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.380442Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.380556Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.380773Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.380831Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.380877Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.380930Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.380975Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.540920Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.541013Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.553892Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.690113Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:00.723340Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:00.723407Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:00.744494Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:00.744619Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:00.744769Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:00.744821Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:00.744860Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:00.744892Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:00.744931Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:00.744964Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:00.745281Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:00.773426Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.773530Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.780566Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:00.785562Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:00.785875Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:00.791502Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:00.810834Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:00.810894Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:00.810976Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:00.824624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:00.832393Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:00.832568Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:01.005039Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:01.154576Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:01.242191Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:02.104368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.104466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.123931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:02.501325Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:02.501586Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:02.501861Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:02.501996Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:02.502115Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:02.502236Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:02.502366Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:02.502510Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:02.502653Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:02.502802Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:02.502913Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:02.503033Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:02.562733Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:02.562824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... ode 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:37:21.072248Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8552:6443], schemeshard count = 1 2025-03-28T09:37:23.182229Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:23.182301Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:37:23.182339Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:23.182389Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:23.186100Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:23.202712Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:23.203379Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:23.203504Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:23.204650Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2025-03-28T09:37:23.204726Z node 2 :STATISTICS WARN: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2025-03-28T09:37:23.204795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:24.330461Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:24.343551Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:24.343720Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:24.344227Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8714:6529], server id = [2:8719:6534], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:24.344517Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8714:6529], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.345829Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8715:6530], server id = [2:8720:6535], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:24.345885Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8715:6530], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.346365Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8716:6531], server id = [2:8721:6536], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:24.346410Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8716:6531], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.346761Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8717:6532], server id = [2:8722:6537], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:24.346824Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8717:6532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.347764Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8718:6533], server id = [2:8725:6540], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:24.347810Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8718:6533], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.351098Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:24.351573Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8714:6529], server id = [2:8719:6534], tablet id = 72075186224037899 2025-03-28T09:37:24.351609Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.352613Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8736:6549], server id = [2:8738:6550], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:24.352677Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8736:6549], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.353887Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:24.354168Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8715:6530], server id = [2:8720:6535], tablet id = 72075186224037900 2025-03-28T09:37:24.354199Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.355701Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:24.356242Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8716:6531], server id = [2:8721:6536], tablet id = 72075186224037901 2025-03-28T09:37:24.356275Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.356699Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8742:6552], server id = [2:8744:6554], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:24.356776Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8742:6552], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.356960Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:24.358002Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8745:6555], server id = [2:8746:6556], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:24.358058Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8745:6555], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.358400Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8717:6532], server id = [2:8722:6537], tablet id = 72075186224037902 2025-03-28T09:37:24.358424Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.359134Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:24.359706Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8718:6533], server id = [2:8725:6540], tablet id = 72075186224037903 2025-03-28T09:37:24.359739Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.359920Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8747:6557], server id = [2:8751:6560], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:24.359985Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8747:6557], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.361138Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:24.361491Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8753:6562], server id = [2:8756:6564], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:24.361556Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8753:6562], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.362527Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8736:6549], server id = [2:8738:6550], tablet id = 72075186224037904 2025-03-28T09:37:24.362552Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.363625Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:24.363917Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8742:6552], server id = [2:8744:6554], tablet id = 72075186224037905 2025-03-28T09:37:24.363937Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.365050Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:24.365350Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8745:6555], server id = [2:8746:6556], tablet id = 72075186224037906 2025-03-28T09:37:24.365369Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.365653Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:24.366021Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8747:6557], server id = [2:8751:6560], tablet id = 72075186224037907 2025-03-28T09:37:24.366041Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.366460Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:24.366496Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:24.366731Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:24.366913Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:24.367191Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:24.368832Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8753:6562], server id = [2:8756:6564], tablet id = 72075186224037908 2025-03-28T09:37:24.368868Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.369472Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:24.399016Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8780:6585]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:24.399224Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:24.399260Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8780:6585], StatRequests.size() = 1 2025-03-28T09:37:24.536931Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2IzMGNmMzItZWZmZDQ3NWEtYWNkYWJiNzQtYzc0OTNiNWI=, TxId: 2025-03-28T09:37:24.536995Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2IzMGNmMzItZWZmZDQ3NWEtYWNkYWJiNzQtYzc0OTNiNWI=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-28T09:37:24.537650Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8788:6591]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:24.537879Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:24.538341Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:24.538396Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:24.541466Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:24.541535Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:24.541596Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:24.547308Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: 2025-03-28T09:34:58.594584Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:58.594863Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:58.594917Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b3c/r3tmp/tmpiJjKdU/pdisk_1.dat 2025-03-28T09:34:58.946283Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13688, node 1 2025-03-28T09:34:59.166532Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:59.166576Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:59.166604Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:59.167003Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:59.168760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:59.250024Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.250158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.263847Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9673 2025-03-28T09:34:59.771039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:02.405851Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:02.434317Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:02.434416Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:02.471183Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:02.472855Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:02.697560Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.697954Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.698446Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.698579Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.698866Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.698932Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.698977Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.699025Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.699067Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.854174Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:02.854273Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:02.866832Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:03.003458Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:03.052229Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:03.052307Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:03.082974Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:03.083171Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:03.083379Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:03.083443Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:03.083495Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:03.083541Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:03.083595Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:03.083655Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:03.084144Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:03.113345Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:03.113459Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:03.119980Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:03.124786Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:03.125020Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:03.129918Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:03.143639Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:03.143687Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:03.143736Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:03.154643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:03.159650Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:03.159752Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:03.295660Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:03.461705Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:03.539968Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:04.337490Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.337636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.356808Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:04.606581Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:04.606829Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:04.607040Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:04.607141Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:04.607259Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:04.607361Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:04.607445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:04.607530Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:04.607607Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:04.607698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:04.607835Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:04.607913Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:04.649035Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:04.649095Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=T ... 3-28T09:37:24.344273Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:37:24.345815Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:24.345898Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:24.347899Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:24.424676Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:24.424910Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2025-03-28T09:37:24.425406Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8670:6512], server id = [2:8677:6519], tablet id = 72075186224037903 2025-03-28T09:37:24.425457Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.426090Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8761:6578], server id = [2:8766:6583], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:24.426194Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8761:6578], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.426351Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8762:6579], server id = [2:8767:6584], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:24.426381Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8762:6579], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.427437Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8763:6580], server id = [2:8769:6586], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:24.427505Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8763:6580], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.434921Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8764:6581], server id = [2:8768:6585], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:24.435007Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8764:6581], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.435601Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8765:6582], server id = [2:8771:6588], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:24.435646Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8765:6582], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.436253Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:24.437058Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:24.437680Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8761:6578], server id = [2:8766:6583], tablet id = 72075186224037899 2025-03-28T09:37:24.437712Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.437827Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:24.438267Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8762:6579], server id = [2:8767:6584], tablet id = 72075186224037900 2025-03-28T09:37:24.438289Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.438397Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:24.438712Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8763:6580], server id = [2:8769:6586], tablet id = 72075186224037901 2025-03-28T09:37:24.438738Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.439138Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8775:6592], server id = [2:8780:6597], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:24.439189Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8775:6592], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.439617Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8777:6594], server id = [2:8781:6598], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:24.439658Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8777:6594], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.439818Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8764:6581], server id = [2:8768:6585], tablet id = 72075186224037902 2025-03-28T09:37:24.439837Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.440880Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8779:6596], server id = [2:8783:6600], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:24.440924Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8779:6596], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.441149Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8782:6599], server id = [2:8785:6601], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:24.441184Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8782:6599], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.442347Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:24.442728Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:24.443031Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8765:6582], server id = [2:8771:6588], tablet id = 72075186224037903 2025-03-28T09:37:24.443051Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.443370Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:24.443565Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8775:6592], server id = [2:8780:6597], tablet id = 72075186224037904 2025-03-28T09:37:24.443588Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.443692Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:24.443780Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:24.443895Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8777:6594], server id = [2:8781:6598], tablet id = 72075186224037905 2025-03-28T09:37:24.443910Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.443938Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8790:6606], server id = [2:8791:6607], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:24.443974Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8790:6606], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:24.444093Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8779:6596], server id = [2:8783:6600], tablet id = 72075186224037906 2025-03-28T09:37:24.444120Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.444180Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8782:6599], server id = [2:8785:6601], tablet id = 72075186224037907 2025-03-28T09:37:24.444194Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.444885Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:24.444920Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:24.445110Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:24.445287Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:24.445518Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:24.447765Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8790:6606], server id = [2:8791:6607], tablet id = 72075186224037908 2025-03-28T09:37:24.447792Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:24.448417Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:24.482292Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8809:6625]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:24.482520Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:24.482560Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8809:6625], StatRequests.size() = 1 2025-03-28T09:37:24.619241Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODljN2RkYWYtMjUzMDYyZWItMWQxNjVhYmItMmY0NzgyM2Q=, TxId: 2025-03-28T09:37:24.619318Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODljN2RkYWYtMjUzMDYyZWItMWQxNjVhYmItMmY0NzgyM2Q=, TxId: 2025-03-28T09:37:24.619989Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:24.643865Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8819:6631] 2025-03-28T09:37:24.644044Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8686:6526], server id = [2:8819:6631], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:24.644218Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8819:6631], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-28T09:37:24.644329Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8820:6632] 2025-03-28T09:37:24.644462Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8820:6632], schemeshard id = 72075186224037897 2025-03-28T09:37:24.658163Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:24.658235Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:24.724499Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8823:6635]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:24.724767Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:24.724826Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:24.727372Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:24.727438Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:24.727510Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:24.733071Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: 2025-03-28T09:34:56.576984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:56.577260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:56.577317Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b84/r3tmp/tmpX6NNin/pdisk_1.dat 2025-03-28T09:34:56.929066Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24629, node 1 2025-03-28T09:34:57.151775Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:57.151845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:57.151876Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:57.152336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:57.154755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:57.242556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:57.242725Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:57.256773Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23978 2025-03-28T09:34:57.785301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:00.544332Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:00.569453Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.569557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.606791Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:00.608429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.841432Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.841845Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.842265Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.842363Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.842565Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.842627Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.842679Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.842766Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.842843Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:01.006542Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:01.006632Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:01.019503Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:01.139767Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:01.176964Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:01.177054Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:01.201477Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:01.201614Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:01.201813Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:01.201873Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:01.201935Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:01.201989Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:01.202040Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:01.202086Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:01.202480Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:01.232683Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:01.232793Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:01.238809Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:01.244087Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:01.244387Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:01.250051Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:01.265744Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:01.265803Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:01.265887Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:01.279582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:01.286792Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:01.286952Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:01.449892Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:01.658850Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:01.746885Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:02.605789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.605892Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.619706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:02.745202Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:02.745443Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:02.745748Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:02.745889Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:02.746023Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:02.746285Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:02.746459Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:02.746599Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:02.746764Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:02.746903Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:02.747030Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:02.747168Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:02.778280Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:02.778381Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 1474976715661;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715661; waiting actualization: 0/0.000016s ... waiting for TEvAnalyzeTableResponse 2025-03-28T09:35:06.902993Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2804:3166] 2025-03-28T09:35:06.905061Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2802:3221] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-28T09:35:06.905114Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId=operationId 2025-03-28T09:35:06.905144Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId=operationId , PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:35:06.930914Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-28T09:35:15.516748Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:35:15.516813Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:16.641001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:35:16.641056Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:04.953480Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvSendTopPartitions, time mismath: , partition interval end# 1970-01-01T00:01:01.000000Z, event time# 1970-01-01T00:01:01.079673Z 2025-03-28T09:37:16.019257Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:37:16.019319Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:37:16.019381Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:37:16.019430Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:37:17.514156Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-28T09:37:17.514240Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 237.000000s, at schemeshard: 72075186224037897 2025-03-28T09:37:17.514523Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-03-28T09:37:17.527853Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:37:18.665279Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:18.665366Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:18.665404Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:37:18.665475Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:37:18.665537Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:18.665956Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:18.669476Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:37:18.673344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7021:5165], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:18.673445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7031:5170], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:18.673529Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:18.686510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:37:18.735735Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7035:5173], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:37:18.959619Z node 2 :TX_PROXY ERROR: Actor# [2:7131:5219] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:19.016291Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7160:5234]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:19.016442Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:37:19.016519Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7162:5236] 2025-03-28T09:37:19.016568Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7162:5236] 2025-03-28T09:37:19.016764Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7163:5237] 2025-03-28T09:37:19.016847Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7163:5237], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:37:19.016886Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:37:19.016975Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7162:5236], server id = [2:7163:5237], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:19.017026Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:19.017071Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:7160:5234], StatRequests.size() = 1 2025-03-28T09:37:19.110828Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzExNjI3NTktY2M2Mjk1MjktNDk4ZWRiNmYtMzdmM2FiZWI=, TxId: 2025-03-28T09:37:19.110884Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzExNjI3NTktY2M2Mjk1MjktNDk4ZWRiNmYtMzdmM2FiZWI=, TxId: 2025-03-28T09:37:19.111242Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:19.123994Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:19.124054Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:19.166723Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:37:19.166819Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:37:19.241896Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:7162:5236], schemeshard count = 1 2025-03-28T09:37:20.174968Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:20.175053Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:20.178156Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:20.193589Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:20.193910Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:20.193962Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-28T09:37:20.206500Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:20.217212Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. ... blocking NKikimr::NStat::TEvStatistics::TEvAnalyzeTableResponse from TX_COLUMNSHARD_ACTOR to STATISTICS_AGGREGATOR ... waiting for TEvAnalyzeTableResponse (done) 2025-03-28T09:37:20.641594Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:20.641649Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:20.641681Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:37:20.641747Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:20.641802Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:20.644348Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:20.657004Z node 2 :STATISTICS ERROR: [72075186224037894] Delete long analyze operation, OperationId=operationId 2025-03-28T09:37:20.956262Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:37:20.956435Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:22.831992Z node 1 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2025-03-28T09:37:22.832078Z node 1 :STATISTICS DEBUG: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:37:22.832119Z node 1 :STATISTICS DEBUG: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2025-03-28T09:37:22.832163Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:37:25.072473Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 2, at schemeshard: 72075186224037897 2025-03-28T09:37:25.072565Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 199.000000s, at schemeshard: 72075186224037897 2025-03-28T09:37:25.072734Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 51 2025-03-28T09:37:25.191580Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:25.191828Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:2802:3221] 2025-03-28T09:37:25.191892Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:37:25.192298Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:25.192353Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:25.192970Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 >> KqpEffects::UpdateOn_Params >> KqpInplaceUpdate::SingleRowStr+UseSink >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowArithm+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowArithm-UseSink >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAnalyzeTableResponse [GOOD] Test command err: 2025-03-28T09:34:58.586745Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:58.587053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:58.587131Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b55/r3tmp/tmpPvt8YE/pdisk_1.dat 2025-03-28T09:34:59.000962Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8750, node 1 2025-03-28T09:34:59.233884Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:59.233940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:59.233971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:59.234413Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:59.236898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:59.320044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.320180Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.333891Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26848 2025-03-28T09:34:59.824051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:02.452023Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:02.481263Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:02.481357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:02.519089Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:02.521032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:02.742139Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.742648Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.743141Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.743255Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.743459Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.743516Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.743574Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.743622Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.743670Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:02.904288Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:02.904387Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:02.917531Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:03.059664Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:03.106536Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:03.106649Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:03.140870Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:03.141127Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:03.141394Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:03.141448Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:03.141507Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:03.141555Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:03.141623Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:03.141672Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:03.142775Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:03.167281Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:03.167412Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2596], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:03.173981Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2608] 2025-03-28T09:35:03.179627Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1913:2617] 2025-03-28T09:35:03.180089Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1913:2617], schemeshard id = 72075186224037897 2025-03-28T09:35:03.188358Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:03.202738Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:03.202780Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:03.202842Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:03.213053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:03.218064Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:03.218166Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:03.370611Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:03.565869Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:03.654959Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:04.496985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2241:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.497091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:04.510615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:04.602161Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:04.602331Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:04.602580Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:04.602676Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:04.602789Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:04.602872Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:04.602944Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:04.603021Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:04.603109Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:04.603197Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:04.603266Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:04.603353Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2851];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:04.622567Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:04.622644Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... UG: EvClientConnected, node id = 2, client id = [2:7209:5289], server id = [2:7254:5317], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:19.212264Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7254:5317] 2025-03-28T09:37:19.212321Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7254:5317], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-28T09:37:19.288265Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7263:5318] 2025-03-28T09:37:19.288823Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2802:3222] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-28T09:37:19.288873Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2802:3222] 2025-03-28T09:37:19.288923Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-28T09:37:20.541406Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:20.541488Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:20.541537Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:37:20.541584Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:20.541633Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:20.542450Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:20.555314Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:20.555680Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:20.555755Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:20.556441Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:20.569983Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:20.570200Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:20.570877Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7304:5345], server id = [2:7305:5346], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:20.570998Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7304:5345], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:20.574883Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:20.574990Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:20.575130Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:20.575290Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:20.575488Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:20.577464Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7304:5345], server id = [2:7305:5346], tablet id = 72075186224037899 2025-03-28T09:37:20.577501Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:20.577860Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:20.605533Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7325:5365]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:20.605774Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:20.605822Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7325:5365], StatRequests.size() = 1 2025-03-28T09:37:20.704111Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:04.000000Z, event interval end# 2025-03-28T09:37:18.000000Z 2025-03-28T09:37:20.704338Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Mjc2OTM4NzItMzVhZGMyYWEtMTA4NTRkZS03M2VkM2FhZA==, TxId: 2025-03-28T09:37:20.704391Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Mjc2OTM4NzItMzVhZGMyYWEtMTA4NTRkZS03M2VkM2FhZA==, TxId: 2025-03-28T09:37:20.704991Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:20.718713Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:20.718783Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:21.246227Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-28T09:37:21.246312Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:21.991182Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:21.991276Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:21.991901Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:22.005272Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:22.005633Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:22.005688Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 4], AnalyzedShards 1 2025-03-28T09:37:22.030318Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:23.259457Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:23.259525Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:23.259573Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:37:23.259735Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-28T09:37:23.260114Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-28T09:37:23.260216Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-28T09:37:23.272845Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:37:24.483630Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:24.483709Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:24.483758Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:37:25.715381Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:37:25.715599Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:25.726515Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:25.726640Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:25.726700Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:25.727386Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:25.741149Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:25.741480Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:25.741535Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:25.741905Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:25.768107Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:25.768283Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:37:25.768852Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7488:5455], server id = [2:7489:5456], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:25.768946Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7488:5455], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.770247Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:25.770343Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:25.770486Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:25.770641Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:25.770998Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:25.773154Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7488:5455], server id = [2:7489:5456], tablet id = 72075186224037899 2025-03-28T09:37:25.773188Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.773855Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:25.794044Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2ZkZDUxZmUtNjFmYTYzOWItZWFkOTVjYzktYTE2Y2MxZjQ=, TxId: 2025-03-28T09:37:25.794106Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2ZkZDUxZmUtNjFmYTYzOWItZWFkOTVjYzktYTE2Y2MxZjQ=, TxId: 2025-03-28T09:37:25.794630Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:25.809640Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:25.809745Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2802:3222] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: 2025-03-28T09:34:55.633355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:55.633619Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:55.633667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000d5e/r3tmp/tmpoTIzCA/pdisk_1.dat 2025-03-28T09:34:55.980257Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3195, node 1 2025-03-28T09:34:56.190729Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:56.190790Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:56.190822Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:56.191234Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:56.193563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:56.282922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:56.283082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:56.296692Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22193 2025-03-28T09:34:56.776885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:59.718265Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:59.755045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.755177Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.794342Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:59.796256Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.035039Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.035636Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.036162Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.036301Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.036588Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.036670Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.036740Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.036826Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.036893Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.202234Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.202349Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.215741Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.332680Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:00.373854Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:00.373972Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:00.408278Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:00.408446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:00.408670Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:00.408728Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:00.408787Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:00.408853Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:00.408910Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:00.408966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:00.409421Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:00.443727Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.443878Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.452108Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:00.459042Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:00.459338Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:00.466309Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:00.486774Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:00.486843Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:00.486920Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:00.502734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:00.517016Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:00.517194Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:00.686841Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:00.881458Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:00.971503Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:01.843255Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.843413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:01.863145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:02.176096Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:02.176390Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:02.176747Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:02.176934Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:02.177061Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:02.177194Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:02.177340Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:02.177515Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:02.177737Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:02.177917Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:02.178064Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:02.178203Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:02.221032Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:02.221114Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=T ... extTraversal 2025-03-28T09:37:25.448284Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:37:25.448332Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:25.448377Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:25.452187Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:25.469113Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:25.469647Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:25.469749Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:25.471181Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:25.485171Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:25.485405Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:25.486071Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8672:6512], server id = [2:8677:6517], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:25.486418Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8672:6512], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.488185Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8673:6513], server id = [2:8678:6518], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:25.488281Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8673:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.489053Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8674:6514], server id = [2:8679:6519], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:25.489123Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8674:6514], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.490084Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8675:6515], server id = [2:8680:6520], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:25.490141Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8675:6515], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.491041Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8676:6516], server id = [2:8683:6523], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:25.491095Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8676:6516], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.496220Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:25.497080Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8672:6512], server id = [2:8677:6517], tablet id = 72075186224037899 2025-03-28T09:37:25.497115Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.497356Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:25.497889Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8673:6513], server id = [2:8678:6518], tablet id = 72075186224037900 2025-03-28T09:37:25.497912Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.498444Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8696:6532], server id = [2:8699:6534], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:25.498513Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8696:6532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.499022Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:25.499725Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8674:6514], server id = [2:8679:6519], tablet id = 72075186224037901 2025-03-28T09:37:25.499746Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.500502Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:25.500953Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8697:6533], server id = [2:8700:6535], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:25.501007Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8697:6533], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.501367Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8675:6515], server id = [2:8680:6520], tablet id = 72075186224037902 2025-03-28T09:37:25.501387Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.501490Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:25.502142Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8701:6536], server id = [2:8705:6540], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:25.502194Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8701:6536], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.502780Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8676:6516], server id = [2:8683:6523], tablet id = 72075186224037903 2025-03-28T09:37:25.502801Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.503568Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8703:6538], server id = [2:8706:6541], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:25.503631Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8703:6538], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.504116Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8707:6542], server id = [2:8711:6545], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:25.504157Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8707:6542], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:25.506287Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:25.506759Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8696:6532], server id = [2:8699:6534], tablet id = 72075186224037904 2025-03-28T09:37:25.506783Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.508850Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:25.509339Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8697:6533], server id = [2:8700:6535], tablet id = 72075186224037905 2025-03-28T09:37:25.509372Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.510017Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:25.510550Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8701:6536], server id = [2:8705:6540], tablet id = 72075186224037906 2025-03-28T09:37:25.510580Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.511414Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:25.511826Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8703:6538], server id = [2:8706:6541], tablet id = 72075186224037907 2025-03-28T09:37:25.511869Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.512152Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:25.512196Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:25.512377Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:25.512572Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:25.513028Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:25.514563Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8707:6542], server id = [2:8711:6545], tablet id = 72075186224037908 2025-03-28T09:37:25.514585Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:25.515208Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:25.543524Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8738:6568]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:25.543683Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:25.543719Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8738:6568], StatRequests.size() = 1 2025-03-28T09:37:25.658263Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTU4Zjg2ODctMWI4OTg5MzUtMWE2Yjg5NzAtZWMwMmFiOGM=, TxId: 2025-03-28T09:37:25.658330Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTU4Zjg2ODctMWI4OTg5MzUtMWE2Yjg5NzAtZWMwMmFiOGM=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-28T09:37:25.659612Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:25.661060Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:1153;event=tablet_die; 2025-03-28T09:37:25.694400Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:25.694465Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:25.768069Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:8754:6579];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=16; 2025-03-28T09:37:26.010629Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8864:6674]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:26.010954Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:26.011177Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:26.014510Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:26.014574Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:26.014620Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:26.024769Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpImmediateEffects::ReplaceDuplicates >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpImmediateEffects::ManyFlushes >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> KqpInplaceUpdate::BigRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 14004, MsgBus: 19418 2025-03-28T09:37:17.931417Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792158975513375:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:17.931480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ba/r3tmp/tmpiKJRtH/pdisk_1.dat 2025-03-28T09:37:18.229363Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:18.286031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:18.286612Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 14004, node 1 2025-03-28T09:37:18.288016Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:18.416694Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:18.416713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:18.416718Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:18.416826Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19418 TClient is connected to server localhost:19418 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:18.996079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:19.038529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:19.159612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:19.269864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:19.319131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:19.975353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792167565449715:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:19.975524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:20.493607Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:20.517708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:20.541055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:20.565507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:20.590830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:20.656691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:20.740868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792171860417567:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:20.740940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:20.740962Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792171860417572:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:20.746113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:20.753275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792171860417574:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:20.852001Z node 1 :TX_PROXY ERROR: Actor# [1:7486792171860417628:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:21.934426Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-28T09:37:21.942569Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:37:21.942798Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:37:21.943007Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792176155385257:2495], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7486792176155385218:2495]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7486792176155385257:2495].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:37:21.943563Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792176155385250:2495], SessionActorId: [1:7486792176155385218:2495], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7486792176155385218:2495]. isRollback=0 2025-03-28T09:37:21.943790Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWI4MzY2ZjgtNjRhODEwN2QtYTk1MWZjNWYtN2QwYjQxNjg=, ActorId: [1:7486792176155385218:2495], ActorState: ExecuteState, TraceId: 01jqe1sytxc69ze8dfaex2s2ka, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486792176155385251:2495] from: [1:7486792176155385250:2495] 2025-03-28T09:37:21.943876Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792176155385251:2495] TxId: 281474976715671. Ctx: { TraceId: 01jqe1sytxc69ze8dfaex2s2ka, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWI4MzY2ZjgtNjRhODEwN2QtYTk1MWZjNWYtN2QwYjQxNjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:37:21.944819Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWI4MzY2ZjgtNjRhODEwN2QtYTk1MWZjNWYtN2QwYjQxNjg=, ActorId: [1:7486792176155385218:2495], ActorState: ExecuteState, TraceId: 01jqe1sytxc69ze8dfaex2s2ka, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29367, MsgBus: 23069 2025-03-28T09:37:22.995191Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792179279735589:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:22.995236Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ba/r3tmp/tmpSoAXYr/pdisk_1.dat 2025-03-28T09:37:23.069181Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29367, node 2 2025-03-28T09:37:23.119218Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:23.119299Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:23.121067Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:23.125192Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:23.125219Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:23.125227Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:23.125358Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23069 TClient is connected to server localhost:23069 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:23.477957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:23.495685Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:23.566724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:23.704995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:23.762102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.307011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792192164639242:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.307097Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.341820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.368733Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.394507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.425888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.453769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.488218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.528313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792192164639750:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.528396Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.528636Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792192164639755:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.531989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:25.540954Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792192164639757:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:25.635861Z node 2 :TX_PROXY ERROR: Actor# [2:7486792192164639812:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:26.813622Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792196459607429:2499], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jqe1t3e38bpcmt1yab99hna0. SessionId : ydb://session/3?node_id=2&id=ODU1NTM5ZDItMzBlNGY2N2ItZjcyZjMzNjEtNjE1ZTEyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:37:26.814060Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792196459607431:2500], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jqe1t3e38bpcmt1yab99hna0. SessionId : ydb://session/3?node_id=2&id=ODU1NTM5ZDItMzBlNGY2N2ItZjcyZjMzNjEtNjE1ZTEyMGU=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486792196459607426:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:37:26.814500Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ODU1NTM5ZDItMzBlNGY2N2ItZjcyZjMzNjEtNjE1ZTEyMGU=, ActorId: [2:7486792196459607362:2487], ActorState: ExecuteState, TraceId: 01jqe1t3e38bpcmt1yab99hna0, Create QueryResponse for error on request, msg: >> KqpImmediateEffects::InsertDuplicates+UseSink >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: 2025-03-28T09:34:55.781610Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:55.781895Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:55.781949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000cf7/r3tmp/tmpuEedWQ/pdisk_1.dat 2025-03-28T09:34:56.184433Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31677, node 1 2025-03-28T09:34:56.396749Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:56.396791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:56.396812Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:56.397121Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:56.398843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:56.476334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:56.476460Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:56.489936Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15669 2025-03-28T09:34:57.007918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:00.210960Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:00.234660Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.234768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.271669Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:00.273599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.507070Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.507611Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.508125Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.508259Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.508547Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.508628Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.508696Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.508765Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.508849Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:00.670676Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.670795Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.683774Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:00.801478Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:00.839304Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:00.839381Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:00.866362Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:00.866531Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:00.866754Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:00.866816Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:00.866883Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:00.866941Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:00.867003Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:00.867049Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:00.867449Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:00.897608Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.897685Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:00.903919Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:00.908833Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:00.909052Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:00.913908Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:00.954421Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:00.954466Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:00.954518Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:00.964955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:00.970045Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:00.970161Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:01.107452Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:01.256079Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:01.343722Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:02.156252Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.156364Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:02.174625Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:02.510160Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:02.510427Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:02.510703Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:02.510845Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:02.510971Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:02.511095Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:02.511217Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:02.511369Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:02.511525Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:02.511650Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:02.511770Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:02.511914Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:02.568399Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:02.568485Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... TEvStatisticsRequest send, client id = [2:8666:6508], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.401265Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8667:6509], server id = [2:8672:6514], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:23.401318Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8667:6509], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.402540Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8668:6510], server id = [2:8673:6515], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:23.402604Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8668:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.403818Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8669:6511], server id = [2:8676:6518], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:23.403880Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8669:6511], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.408571Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:23.409341Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8665:6507], server id = [2:8670:6512], tablet id = 72075186224037899 2025-03-28T09:37:23.409390Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.410101Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:23.410818Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8666:6508], server id = [2:8671:6513], tablet id = 72075186224037900 2025-03-28T09:37:23.410851Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.411370Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8688:6527], server id = [2:8691:6529], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:23.411457Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8688:6527], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.412155Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:23.413174Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8667:6509], server id = [2:8672:6514], tablet id = 72075186224037901 2025-03-28T09:37:23.413207Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.413977Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8690:6528], server id = [2:8693:6530], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:23.414058Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8690:6528], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.414466Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:23.414863Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:23.415858Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8694:6531], server id = [2:8697:6534], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:23.415935Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8694:6531], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.416127Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8668:6510], server id = [2:8673:6515], tablet id = 72075186224037902 2025-03-28T09:37:23.416154Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.417282Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8669:6511], server id = [2:8676:6518], tablet id = 72075186224037903 2025-03-28T09:37:23.417313Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.417903Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8698:6535], server id = [2:8703:6539], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:23.417971Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8698:6535], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.418234Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8699:6536], server id = [2:8704:6540], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:23.418283Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8699:6536], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:23.421021Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:23.421445Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8688:6527], server id = [2:8691:6529], tablet id = 72075186224037904 2025-03-28T09:37:23.421477Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.423146Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:23.423454Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8690:6528], server id = [2:8693:6530], tablet id = 72075186224037905 2025-03-28T09:37:23.423477Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.423700Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:23.424284Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8694:6531], server id = [2:8697:6534], tablet id = 72075186224037906 2025-03-28T09:37:23.424306Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.424680Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:23.424852Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8698:6535], server id = [2:8703:6539], tablet id = 72075186224037907 2025-03-28T09:37:23.424870Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.425096Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:23.425133Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:23.425306Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:23.425629Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8699:6536], server id = [2:8704:6540], tablet id = 72075186224037908 2025-03-28T09:37:23.425653Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:23.449964Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:23.450260Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:37:24.139613Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 3 2025-03-28T09:37:24.139687Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:26.352815Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:37:26.352996Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:26.925778Z node 2 :STATISTICS INFO: Node 3 is unavailable 2025-03-28T09:37:26.925853Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:26.925981Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-03-28T09:37:26.926010Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:26.926083Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:26.926141Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:26.926618Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:26.940440Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:26.940615Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-03-28T09:37:26.941114Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8831:6604], server id = [2:8832:6605], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:26.941207Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8831:6604], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:26.942384Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:26.942448Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:26.942571Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:26.942730Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:26.943062Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:26.944744Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8831:6604], server id = [2:8832:6605], tablet id = 72075186224037900 2025-03-28T09:37:26.944779Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:26.945493Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:26.976142Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8850:6623]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:26.976326Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:26.976361Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8850:6623], StatRequests.size() = 1 2025-03-28T09:37:27.098577Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2VmMzIwMWItMTRjMDBjMDctMzI2NGEzZTgtZmUwZTY1MTg=, TxId: 2025-03-28T09:37:27.098652Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2VmMzIwMWItMTRjMDBjMDctMzI2NGEzZTgtZmUwZTY1MTg=, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-28T09:37:27.099293Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8859:6629]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:27.099701Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:27.099782Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:27.100212Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:27.103461Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:27.103553Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:27.103616Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:27.108323Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 probe = 3 >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> KqpWrite::CastValues >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::DeletePkPrefixWithIndex >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> TConsoleConfigTests::TestAddConfigItem >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink >> KqpImmediateEffects::UpdateOn [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155200.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143155200.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155200.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123155200.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154000.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123154000.000000s;Name=;Codec=}; 2025-03-28T09:36:41.769063Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:36:41.824178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:36:41.838078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:36:41.838266Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:36:41.843906Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:36:41.844043Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:36:41.844229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:36:41.844302Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:36:41.844371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:36:41.844432Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:36:41.844495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:36:41.844578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:36:41.844664Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:36:41.844727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:36:41.844798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:36:41.844874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:36:41.862221Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:36:41.862480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:36:41.862522Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:36:41.862629Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:41.862798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:36:41.862877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:36:41.862921Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:36:41.863011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:36:41.863053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:36:41.863080Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:36:41.863116Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:36:41.863217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:41.863256Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:36:41.863281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:36:41.863299Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:36:41.863354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:36:41.863383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:36:41.863405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:36:41.863421Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:36:41.863462Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:36:41.863483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:36:41.863502Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:36:41.863528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:36:41.863552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:36:41.863572Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:36:41.863871Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=31; 2025-03-28T09:36:41.863924Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=21; 2025-03-28T09:36:41.863969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=20; 2025-03-28T09:36:41.864044Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-03-28T09:36:41.864171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:36:41.864204Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:36:41.864225Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:36:41.864350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:36:41.864378Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:36:41.864396Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:36:41.864495Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:36:41.864518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:36:41.864537Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:36:41.864655Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... 580406,"d_finished":0,"c":0,"l":1743154650581093,"d":687},{"name":"task_result","f":1743154650024946,"d_finished":211892,"c":28,"l":1743154650576837,"d":211892}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1284:3291]->[1:1283:3290] 2025-03-28T09:37:30.581612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:37:30.015396Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-03-28T09:37:30.581654Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:37:30.581935Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:37:30.583909Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-03-28T09:37:30.584219Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000006:max} readable: {1000000006:max} at tablet 9437184 2025-03-28T09:37:30.584358Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:37:30.584532Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:37:30.584602Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:37:30.585137Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:37:30.585237Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:37:30.585777Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1300:3307];trace_detailed=; 2025-03-28T09:37:30.586265Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:37:30.586508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:37:30.586720Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:30.586868Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:30.587185Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:37:30.587306Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:30.587463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:30.587530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1300:3307] finished for tablet 9437184 2025-03-28T09:37:30.588007Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1299:3306];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154650585705,"name":"_full_task","f":1743154650585705,"d_finished":0,"c":0,"l":1743154650587599,"d":1894},"events":[{"name":"bootstrap","f":1743154650585961,"d_finished":946,"c":1,"l":1743154650586907,"d":946},{"a":1743154650587159,"name":"ack","f":1743154650587159,"d_finished":0,"c":0,"l":1743154650587599,"d":440},{"a":1743154650587138,"name":"processing","f":1743154650587138,"d_finished":0,"c":0,"l":1743154650587599,"d":461},{"name":"ProduceResults","f":1743154650586606,"d_finished":596,"c":2,"l":1743154650587513,"d":596},{"a":1743154650587516,"name":"Finish","f":1743154650587516,"d_finished":0,"c":0,"l":1743154650587599,"d":83}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:37:30.588088Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1299:3306];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:37:30.588539Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1299:3306];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743154650585705,"name":"_full_task","f":1743154650585705,"d_finished":0,"c":0,"l":1743154650588141,"d":2436},"events":[{"name":"bootstrap","f":1743154650585961,"d_finished":946,"c":1,"l":1743154650586907,"d":946},{"a":1743154650587159,"name":"ack","f":1743154650587159,"d_finished":0,"c":0,"l":1743154650588141,"d":982},{"a":1743154650587138,"name":"processing","f":1743154650587138,"d_finished":0,"c":0,"l":1743154650588141,"d":1003},{"name":"ProduceResults","f":1743154650586606,"d_finished":596,"c":2,"l":1743154650587513,"d":596},{"a":1743154650587516,"name":"Finish","f":1743154650587516,"d_finished":0,"c":0,"l":1743154650588141,"d":625}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1300:3307]->[1:1299:3306] 2025-03-28T09:37:30.588646Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:37:30.585204Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:37:30.588726Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:37:30.588856Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsert >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestAutoKind ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSaBeforeAggregate [GOOD] Test command err: 2025-03-28T09:35:04.704165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:04.704360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:04.704395Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b2d/r3tmp/tmpJ1xjq0/pdisk_1.dat 2025-03-28T09:35:05.007939Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18293, node 1 2025-03-28T09:35:05.212218Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:05.212262Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:05.212283Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:05.212589Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:05.214247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:05.294763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:05.294871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:05.308564Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17715 2025-03-28T09:35:05.793119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:08.137351Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:08.160162Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:08.160246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:08.197182Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:08.198737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:08.419030Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:08.419526Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:08.419929Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:08.420042Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:08.420233Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:08.420290Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:08.420338Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:08.420388Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:08.420433Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:08.578544Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:08.578644Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:08.591226Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:08.719821Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:08.761541Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:08.761619Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:08.785082Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:08.785193Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:08.785352Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:08.785400Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:08.785444Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:08.785482Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:08.785520Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:08.785552Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:08.785831Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:08.812218Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:08.812313Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:08.818362Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:08.822766Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:08.822953Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:08.828028Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:08.841695Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:08.841746Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:08.841805Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:08.852275Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:08.857340Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:08.857439Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:09.017893Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:09.176896Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:09.285455Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:10.072957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:10.073053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:10.086790Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:10.206960Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:10.207124Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:10.207342Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:10.207469Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:10.207556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:10.207643Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:10.207736Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:10.207842Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:10.207915Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:10.208018Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:10.208099Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:10.208167Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:10.227841Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:10.227977Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... 4986Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7158:5235], server id = [2:7159:5236], tablet id = 72075186224037894 2025-03-28T09:37:27.145063Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:7289:5314] 2025-03-28T09:37:27.145112Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:7289:5314] 2025-03-28T09:37:27.184604Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:37:27.184737Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:37:27.185424Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:37:27.186006Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:37:27.186337Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded database: /Root/Database 2025-03-28T09:37:27.186397Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start key 2025-03-28T09:37:27.186441Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2025-03-28T09:37:27.186482Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal table local path id: 4 2025-03-28T09:37:27.186521Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal start time: 1743154647113155 2025-03-28T09:37:27.186560Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded traversal IsColumnTable: 1 2025-03-28T09:37:27.186595Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded global traversal round: 2 2025-03-28T09:37:27.186721Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2025-03-28T09:37:27.186807Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:37:27.186919Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2025-03-28T09:37:27.187001Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2025-03-28T09:37:27.187091Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2025-03-28T09:37:27.187159Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:37:27.187305Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:27.188495Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:37:27.189349Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:27.189446Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:27.189577Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:37:27.191194Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:27.191263Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:27.193028Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:27.257330Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:27.257539Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:37:27.258170Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7336:5345], server id = [2:7337:5346], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:27.258275Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7336:5345], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:27.262073Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:27.262192Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:27.262450Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:27.262629Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:27.262916Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:27.266005Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7336:5345], server id = [2:7337:5346], tablet id = 72075186224037899 2025-03-28T09:37:27.266052Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:27.266596Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:27.297095Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7357:5365]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:27.297315Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:27.297360Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7357:5365], StatRequests.size() = 1 2025-03-28T09:37:27.416765Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODc1NmI3MWEtNjAxMDA0MDEtMzczOTIyMTUtOGMzNjliOTE=, TxId: 2025-03-28T09:37:27.416883Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODc1NmI3MWEtNjAxMDA0MDEtMzczOTIyMTUtOGMzNjliOTE=, TxId: 2025-03-28T09:37:27.417539Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:27.430545Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7367:5371] 2025-03-28T09:37:27.430828Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7367:5371], schemeshard id = 72075186224037897 2025-03-28T09:37:27.430972Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7289:5314], server id = [2:7368:5372], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:27.431013Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7368:5372] 2025-03-28T09:37:27.431117Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:7368:5372], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-28T09:37:27.444463Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:27.444541Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:27.532029Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:7372:5375] 2025-03-28T09:37:27.532866Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:2800:3221] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 4 } } Types: TYPE_COUNT_MIN_SKETCH } 2025-03-28T09:37:27.532924Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:2800:3221] 2025-03-28T09:37:27.532994Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyze::Complete 2025-03-28T09:37:28.034250Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-03-28T09:37:28.034339Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:28.767431Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:28.767528Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:28.767571Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:37:30.092563Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:30.092713Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:30.092766Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:30.093455Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:30.108220Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:30.108696Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:30.108793Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:30.109312Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:30.124003Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:30.124255Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-03-28T09:37:30.124892Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7453:5418], server id = [2:7454:5419], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:30.125010Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7453:5418], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:30.126407Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:30.126509Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:30.126742Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:30.126991Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:30.127314Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:30.130049Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7453:5418], server id = [2:7454:5419], tablet id = 72075186224037899 2025-03-28T09:37:30.130101Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:30.130616Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:30.152273Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDcxZDVkYzEtMzY3ZDBkMTUtOGU1MTNiY2ItMzI0Mjk2ZDU=, TxId: 2025-03-28T09:37:30.152370Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDcxZDVkYzEtMzY3ZDBkMTUtOGU1MTNiY2ItMzI0Mjk2ZDU=, TxId: 2025-03-28T09:37:30.152945Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:30.177274Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:30.177339Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:2800:3221] >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::UpdateOn_Select >> KqpEffects::InsertAbort_Params_Success >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates+UseSink >> KqpEffects::InsertAbort_Select_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::ReplaceExistingKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: 2025-03-28T09:35:05.785777Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:05.786058Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:05.786116Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b29/r3tmp/tmpSNWzaC/pdisk_1.dat 2025-03-28T09:35:06.072279Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6718, node 1 2025-03-28T09:35:06.270660Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:06.270728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:06.270749Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:06.271099Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:06.272605Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:06.349938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:06.350073Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:06.363563Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31525 2025-03-28T09:35:06.839480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:09.067973Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:09.089269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:09.089347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:09.124974Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:09.126213Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:09.332112Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:09.332519Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:09.332874Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:09.332978Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:09.333148Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:09.333199Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:09.333270Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:09.333337Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:09.333396Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:09.489487Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:09.489607Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:09.502507Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:09.608582Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:09.640035Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:09.640096Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:09.660689Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:09.660795Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:09.660923Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:09.660970Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:09.661015Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:09.661048Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:09.661081Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:09.661112Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:09.661366Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:09.687304Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:09.687378Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:09.693464Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:09.697952Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:09.698140Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:09.703909Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:09.717576Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:09.717630Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:09.717724Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:09.728436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:09.733231Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:09.733320Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:09.897788Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:10.032507Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:10.119014Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:10.918660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:10.918803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:10.935358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:11.047503Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:11.047678Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:11.047894Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:11.047992Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:11.048063Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:11.048134Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:11.048238Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:11.048386Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:11.048474Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:11.048556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:11.048635Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:11.048698Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:11.068191Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:11.068268Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 26.738551Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:26.738916Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:26.741404Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:26.778009Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7612:5553]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:26.778258Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:26.778320Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:7612:5553], StatRequests.size() = 1 2025-03-28T09:37:26.891596Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTg3MWUzY2EtZWQyMjEwZDMtNzJkNmQ2MjMtYjk5ZmU1ZmQ=, TxId: 2025-03-28T09:37:26.891663Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTg3MWUzY2EtZWQyMjEwZDMtNzJkNmQ2MjMtYjk5ZmU1ZmQ=, TxId: 2025-03-28T09:37:26.892068Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:26.905898Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:26.905963Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3261:3366] 2025-03-28T09:37:27.405598Z node 2 :STATISTICS DEBUG: Event round 2 is different from the current 0 2025-03-28T09:37:27.405685Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:28.182839Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:28.182916Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-03-28T09:37:28.186056Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:28.202495Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:28.203037Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:28.203085Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037897, LocalPathId: 5], AnalyzedShards 1 2025-03-28T09:37:28.228337Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:28.239500Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-28T09:37:28.240819Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-28T09:37:28.240922Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-28T09:37:28.255666Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:37:29.553866Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:29.553972Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-03-28T09:37:29.554012Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-28T09:37:29.554619Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:29.579774Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:29.580165Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:29.580235Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:29.580612Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:29.606594Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:29.606823Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:37:29.607467Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7727:5613], server id = [2:7728:5614], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:29.607596Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7727:5613], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-03-28T09:37:29.610567Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:29.610665Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:29.610976Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:29.611151Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:29.611395Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:29.613694Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7727:5613], server id = [2:7728:5614], tablet id = 72075186224037900 2025-03-28T09:37:29.613737Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:29.614197Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:29.638657Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZjNiZTIyZDYtMjkyNDg1MzEtNDNlM2E1ODUtZjJlZjRhNmI=, TxId: 2025-03-28T09:37:29.638766Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjNiZTIyZDYtMjkyNDg1MzEtNDNlM2E1ODUtZjJlZjRhNmI=, TxId: 2025-03-28T09:37:29.639325Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:29.668170Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-28T09:37:29.668233Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:30.173015Z node 2 :STATISTICS DEBUG: Event round 3 is different from the current 0 2025-03-28T09:37:30.173103Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:37:30.879267Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:37:30.879456Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:30.890350Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:30.890413Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:30.890452Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:37:32.065697Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:32.065802Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 5] is column table. 2025-03-28T09:37:32.065834Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-28T09:37:32.066217Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:32.080578Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:32.081124Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:32.081196Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:32.081612Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:32.097165Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:32.097327Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2025-03-28T09:37:32.097915Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:7825:5671], server id = [2:7826:5672], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:32.098003Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:7825:5671], path = { OwnerId: 72075186224037897 LocalId: 5 } 2025-03-28T09:37:32.099203Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:32.099291Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:32.099484Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:32.099636Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:32.099844Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:32.101770Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:7825:5671], server id = [2:7826:5672], tablet id = 72075186224037900 2025-03-28T09:37:32.101813Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:32.102395Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:32.126404Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:07.000000Z, event interval end# 2025-03-28T09:37:30.000000Z 2025-03-28T09:37:32.126508Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OWRkZTI1MGItOWMyMGUzNzMtNTAxZmQ2MmMtNTFlN2IxNjk=, TxId: 2025-03-28T09:37:32.126536Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWRkZTI1MGItOWMyMGUzNzMtNTAxZmQ2MmMtNTFlN2IxNjk=, TxId: 2025-03-28T09:37:32.127077Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:32.141098Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 5] 2025-03-28T09:37:32.141263Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3261:3366] >> KqpImmediateEffects::InsertDuplicates+UseSink [GOOD] >> KqpImmediateEffects::InsertDuplicates-UseSink >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] >> KqpInplaceUpdate::BigRow [GOOD] >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] >> KqpImmediateEffects::ManyFlushes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25237, MsgBus: 11206 2025-03-28T09:37:22.871177Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792183437230185:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:22.871303Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013b4/r3tmp/tmpa78EFT/pdisk_1.dat 2025-03-28T09:37:23.137713Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25237, node 1 2025-03-28T09:37:23.216558Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:23.216591Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:23.216600Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:23.216755Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:23.252439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:23.252573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:23.254446Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11206 TClient is connected to server localhost:11206 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:23.628516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:23.654713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:23.786778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:23.939896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.006753Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.398539Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792196322133847:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.398679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.697196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.765499Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.794248Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.858639Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.892797Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.922440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.957911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792196322134366:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.957988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.958076Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792196322134371:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.961399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:25.970085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792196322134373:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:26.027012Z node 1 :TX_PROXY ERROR: Actor# [1:7486792200617101721:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:26.802250Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4923, MsgBus: 14004 2025-03-28T09:37:28.021374Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792208415728993:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:28.021482Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013b4/r3tmp/tmpCFISue/pdisk_1.dat 2025-03-28T09:37:28.128110Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4923, node 2 2025-03-28T09:37:28.151193Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:28.151257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:28.152718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:28.170346Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:28.170372Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:28.170380Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:28.170512Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14004 TClient is connected to server localhost:14004 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:28.565883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.571966Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:28.578902Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.686828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.826432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.890574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.943782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792217005665357:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.943863Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.993068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.028775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.058414Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.092789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.123072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.159995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.202063Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792221300633167:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.202128Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.202329Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792221300633172:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.205579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:31.216716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792221300633174:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:31.298460Z node 2 :TX_PROXY ERROR: Actor# [2:7486792221300633227:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:32.216564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.021824Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792208415728993:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.021910Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestAffectedConfigs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1410, MsgBus: 9789 2025-03-28T09:37:23.483917Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792187595194231:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:23.484020Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013b1/r3tmp/tmpWeUDYI/pdisk_1.dat 2025-03-28T09:37:23.736973Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1410, node 1 2025-03-28T09:37:23.797690Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:23.797720Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:23.797728Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:23.797870Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:23.815528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:23.815641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:23.817400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9789 TClient is connected to server localhost:9789 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:24.203731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.226181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.385509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.533780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.585985Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.818372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792196185130605:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.818440Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.052587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.079999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.109979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.139347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.189157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.256473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.336282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792200480098422:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.336357Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.336585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792200480098427:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.340265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:26.348935Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792200480098429:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:26.444845Z node 1 :TX_PROXY ERROR: Actor# [1:7486792200480098485:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:27.352335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8255, MsgBus: 23353 2025-03-28T09:37:28.477674Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792206381276784:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:28.477736Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013b1/r3tmp/tmp3CncB4/pdisk_1.dat 2025-03-28T09:37:28.594914Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8255, node 2 2025-03-28T09:37:28.618206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:28.618307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:28.619602Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:28.651220Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:28.651246Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:28.651254Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:28.651363Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23353 TClient is connected to server localhost:23353 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:29.078000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.087398Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:29.102555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.183840Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.341124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.403604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.504608Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792219266180428:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.504698Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.556437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.586674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.623829Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.659767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.687725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.755416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.826537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792219266180945:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.826598Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.826787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792219266180950:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.829664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:31.840042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792219266180952:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:31.933362Z node 2 :TX_PROXY ERROR: Actor# [2:7486792219266181007:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:32.926523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.481198Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792206381276784:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.481255Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpWrite::CastValues [GOOD] >> KqpWrite::CastValuesOptional ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 7782, MsgBus: 61733 2025-03-28T09:37:23.151969Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792186469720233:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:23.152043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013b7/r3tmp/tmpRUL0H2/pdisk_1.dat 2025-03-28T09:37:23.407621Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7782, node 1 2025-03-28T09:37:23.482539Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:23.482580Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:23.482586Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:23.482727Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:23.499357Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:23.499464Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:23.501179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61733 TClient is connected to server localhost:61733 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:23.910665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:23.936962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.049093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.196841Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.272257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.519698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792195059656613:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.519810Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:25.770892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.798106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.823650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.851845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.879238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:25.968888Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.005872Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792199354624422:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.005943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.005947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792199354624427:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.009081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:26.017707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792199354624429:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:26.087434Z node 1 :TX_PROXY ERROR: Actor# [1:7486792199354624482:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:27.028977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.151919Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792186469720233:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:28.151994Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10494, MsgBus: 18673 2025-03-28T09:37:28.679983Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792205270381512:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:28.680035Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013b7/r3tmp/tmpnkG0h6/pdisk_1.dat 2025-03-28T09:37:28.765078Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10494, node 2 2025-03-28T09:37:28.818815Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:28.818899Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:28.822677Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:28.839190Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:28.839215Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:28.839229Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:28.839338Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18673 TClient is connected to server localhost:18673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:29.288652Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:29.299048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.374041Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.519034Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.584744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.495882Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792218155285152:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.495960Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.542778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.573187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.616609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.650993Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.680420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.752899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.833668Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792218155285671:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.833732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.833840Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792218155285676:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.837429Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:31.848011Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792218155285678:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:31.940173Z node 2 :TX_PROXY ERROR: Actor# [2:7486792218155285733:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:32.858239Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.592862Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Yjk3MjMwYmItZjFiZDliNTQtYzM0MzE0ZjctMTk5YjM4NWE=, ActorId: [2:7486792226745220864:2524], ActorState: ExecuteState, TraceId: 01jqe1tacb929k6mg44e6k7208, Create QueryResponse for error on request, msg: 2025-03-28T09:37:33.680514Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792205270381512:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.680592Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155208.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155208.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143155208.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155208.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155208.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123155208.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=143155208.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155208.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123154008.000000s;Name=;Codec=}; 2025-03-28T09:36:48.229540Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:36:48.286207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:36:48.300562Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:36:48.300785Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:36:48.306418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:36:48.306567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:36:48.306796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:36:48.306875Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:36:48.306937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:36:48.307001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:36:48.307061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:36:48.307137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:36:48.307220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:36:48.307287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:36:48.307355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:36:48.307429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:36:48.325273Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:36:48.325494Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:36:48.325543Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:36:48.325666Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:48.325775Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:36:48.325840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:36:48.325871Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:36:48.325926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:36:48.325962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:36:48.325989Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:36:48.326022Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:36:48.326122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:48.326185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:36:48.326219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:36:48.326238Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:36:48.326305Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:36:48.326337Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:36:48.326361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:36:48.326381Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:36:48.326424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:36:48.326447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:36:48.326464Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:36:48.326493Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:36:48.326518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:36:48.326538Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:36:48.326828Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-03-28T09:36:48.326883Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=21; 2025-03-28T09:36:48.326935Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-03-28T09:36:48.326990Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=26; 2025-03-28T09:36:48.327088Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:36:48.327121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:36:48.327142Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:36:48.327282Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:36:48.327315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:36:48.327334Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:36:48.327430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchem ... line=common_data.cpp:29;EXECUTE:finishLoadingTime=576; 2025-03-28T09:37:34.308666Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=49238; 2025-03-28T09:37:34.320172Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=11426; 2025-03-28T09:37:34.332432Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=11251; 2025-03-28T09:37:34.332544Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=12274; 2025-03-28T09:37:34.332704Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=91; 2025-03-28T09:37:34.332850Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=92; 2025-03-28T09:37:34.333006Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=104; 2025-03-28T09:37:34.333122Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=70; 2025-03-28T09:37:34.347396Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=14201; 2025-03-28T09:37:34.365534Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=18001; 2025-03-28T09:37:34.365679Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=40; 2025-03-28T09:37:34.365780Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=44; 2025-03-28T09:37:34.365838Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2025-03-28T09:37:34.365888Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-03-28T09:37:34.365937Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-28T09:37:34.366018Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=40; 2025-03-28T09:37:34.366069Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-28T09:37:34.366178Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=65; 2025-03-28T09:37:34.366232Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-03-28T09:37:34.366318Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2025-03-28T09:37:34.366417Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2025-03-28T09:37:34.366774Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=312; 2025-03-28T09:37:34.366819Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=114496; 2025-03-28T09:37:34.366987Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=36397736;raw_bytes=56295575;count=22;records=560000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:37:34.367100Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:37:34.367172Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:37:34.367242Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:37:34.387114Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:37:34.387275Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:37:34.387337Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:37:34.387410Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:37:34.387491Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-28T09:37:34.387534Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:37:34.387596Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:34.387642Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:34.387738Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:37:34.388490Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:37:34.388581Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:2600:4502];tablet_id=9437184;parent=[1:2550:4459];fline=manager.cpp:82;event=ask_data;request=request_id=120;1={portions_count=22};; 2025-03-28T09:37:34.389545Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:37:34.390033Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:37:34.390071Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:37:34.390096Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:37:34.390138Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:37:34.390198Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:37:34.390257Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:37:34.390317Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700008;tx_id=18446744073709551615;;current_snapshot_ts=1000000002; 2025-03-28T09:37:34.390361Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:37:34.390412Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:34.390450Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:34.390537Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:37:34.392108Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=22;path_id=1; 2025-03-28T09:37:34.393126Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:2550:4459];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 80000/5203352 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19108, MsgBus: 10491 2025-03-28T09:37:24.366454Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792191132555477:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:24.366618Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ab/r3tmp/tmpVwVfgd/pdisk_1.dat 2025-03-28T09:37:24.681793Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19108, node 1 2025-03-28T09:37:24.746613Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:24.746641Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:24.746652Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:24.746814Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:24.778414Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:24.778557Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:24.780810Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10491 TClient is connected to server localhost:10491 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:25.159976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.177978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.296940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.451450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.510120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.997486Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792199722491833:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.997619Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.251575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.278461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.305333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.332544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.395850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.421428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.460230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792204017459642:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.460297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.460472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792204017459647:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.464346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:27.473722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792204017459649:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:27.536393Z node 1 :TX_PROXY ERROR: Actor# [1:7486792204017459702:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:28.368330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 28757, MsgBus: 10451 2025-03-28T09:37:29.583660Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792211944184223:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:29.586271Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ab/r3tmp/tmpyLm7PL/pdisk_1.dat 2025-03-28T09:37:29.704105Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:29.717915Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:29.717991Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:29.719406Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28757, node 2 2025-03-28T09:37:29.762246Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:29.762269Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:29.762276Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:29.762382Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10451 TClient is connected to server localhost:10451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:30.130559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.147407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.199384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.350786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.431110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.327656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792224829087858:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.327729Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.368602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.400883Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.432828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.462153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.497672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.532335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.586724Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792224829088368:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.586806Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.586933Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792224829088373:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.591551Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:32.601041Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792224829088375:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:32.702766Z node 2 :TX_PROXY ERROR: Actor# [2:7486792224829088433:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:33.660502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] Test command err: Trying to start YDB, gRPC: 29993, MsgBus: 18157 2025-03-28T09:37:23.462809Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792185117315388:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:23.462926Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ad/r3tmp/tmpsdllSz/pdisk_1.dat TServer::EnableGrpc on GrpcPort 29993, node 1 2025-03-28T09:37:23.764090Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:23.766884Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:37:23.768169Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:37:23.805363Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:23.805419Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:23.805428Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:23.805582Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:23.829291Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:23.829407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:23.831120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18157 TClient is connected to server localhost:18157 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:24.257154Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.279541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.399820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.532932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.592255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.114735Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792198002219059:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.114844Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.393466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.422080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.449431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.479586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.508619Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.536130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.613012Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792198002219578:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.613107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.613137Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792198002219583:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.616245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:26.624992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792198002219585:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:26.719278Z node 1 :TX_PROXY ERROR: Actor# [1:7486792198002219640:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:27.615278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 4051, MsgBus: 29584 2025-03-28T09:37:28.876786Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792208879288821:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:28.876836Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ad/r3tmp/tmpH7dRLL/pdisk_1.dat 2025-03-28T09:37:29.006755Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4051, node 2 2025-03-28T09:37:29.049978Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:29.050077Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:29.053349Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:29.092139Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:29.092166Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:29.092174Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:29.092289Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29584 TClient is connected to server localhost:29584 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:29.512071Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:29.526942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.579627Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.733249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.833391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.819785Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792221764192487:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.819897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.864598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.897691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.924832Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.952268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.023229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.095021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.143478Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792226059160302:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.143614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.143754Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792226059160307:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.148077Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:32.160862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792226059160309:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:32.254253Z node 2 :TX_PROXY ERROR: Actor# [2:7486792226059160362:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:33.144905Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.877847Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792208879288821:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.877909Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 18168, MsgBus: 28964 2025-03-28T09:37:23.536188Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792187268956015:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:23.536276Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ae/r3tmp/tmpPfFQp6/pdisk_1.dat 2025-03-28T09:37:23.857112Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18168, node 1 2025-03-28T09:37:23.899784Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:23.899831Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:23.899839Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:23.899967Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:23.901811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:23.901973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:23.903593Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28964 TClient is connected to server localhost:28964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:24.319350Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.338640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.483890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.635386Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:24.690377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.078366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792200153859681:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.078497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.393368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.423221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.450229Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.479734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.512354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.578562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:26.613914Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792200153860195:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.613985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.613991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792200153860200:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:26.617439Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:26.626262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792200153860202:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:26.729996Z node 1 :TX_PROXY ERROR: Actor# [1:7486792200153860255:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:27.592635Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.014211Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795371:2487] TxId: 281474976710675. Ctx: { TraceId: 01jqe1t4ty0dzhcgr9dpr45by8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Executing physical tx, type: 2, stages: 1 2025-03-28T09:37:28.014340Z node 1 :KQP_EXECUTER DEBUG: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2025-03-28T09:37:28.014470Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Resolved key sets: 1 2025-03-28T09:37:28.014624Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 16] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2025-03-28T09:37:28.014704Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795371:2487] TxId: 281474976710675. Ctx: { TraceId: 01jqe1t4ty0dzhcgr9dpr45by8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Stage [0,0] AST: ( (return (lambda '($1) $1)) ) 2025-03-28T09:37:28.014879Z node 1 :KQP_EXECUTER DEBUG: Create result channelId: 1 from task: 1 with index: 0 2025-03-28T09:37:28.015020Z node 1 :KQP_EXECUTER DEBUG: [ShardsResolver] TxId: 281474976710675. Shard resolve complete, resolved shards: 1 2025-03-28T09:37:28.015068Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795371:2487] TxId: 281474976710675. Ctx: { TraceId: 01jqe1t4ty0dzhcgr9dpr45by8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards nodes resolved, success: 1, failed: 0 2025-03-28T09:37:28.015107Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795371:2487] TxId: 281474976710675. Ctx: { TraceId: 01jqe1t4ty0dzhcgr9dpr45by8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Shards on nodes: node 1: [72075186224037919] 2025-03-28T09:37:28.015181Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Ctx: { TraceId: 01jqe1t4ty0dzhcgr9dpr45by8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, snapshot: {18446744073709551615, 1743154648000} 2025-03-28T09:37:28.015419Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Ctx: { TraceId: 01jqe1t4ty0dzhcgr9dpr45by8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Collect channels updates for task: 1 at actor [1:7486792208743795376:2487] 2025-03-28T09:37:28.015471Z node 1 :KQP_EXECUTER DEBUG: TxId: 281474976710675. Ctx: { TraceId: 01jqe1t4ty0dzhcgr9dpr45by8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending channels info to compute actor: [1:7486792208743795376:2487], channels: 1 2025-03-28T09:37:28.015525Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7486792208743795371:2487] TxId: 281474976710675. Ctx: { TraceId: 01jqe1t4ty0dzhcgr9dpr45by8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExec ... rd: 72075186224037919, tasks: [], lockTxId: (empty maybe), locks: Locks { LockId: 281474976710675 DataShard: 72075186224037919 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 16 HasWrites: true } SendingShards: 72075186224037919 ReceivingShards: 72075186224037919 Op: Commit, immediate: 1 2025-03-28T09:37:28.191614Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795432:2487] TxId: 281474976710678. Ctx: { TraceId: 01jqe1t51a0j30tcs1fr9n8h20, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ExecuteDatashardTransaction traceId.verbosity: 0 2025-03-28T09:37:28.191698Z node 1 :KQP_EXECUTER INFO: ActorId: [1:7486792208743795432:2487] TxId: 281474976710678. Ctx: { TraceId: 01jqe1t51a0j30tcs1fr9n8h20, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Total tasks: 0, readonly: 0, datashardTxs: 1, evWriteTxs: 0, topicTxs: 0, volatile: 0, immediate: 1, pending compute tasks0, useFollowers: 0 2025-03-28T09:37:28.191726Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795432:2487] TxId: 281474976710678. Ctx: { TraceId: 01jqe1t51a0j30tcs1fr9n8h20, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, datashard 72075186224037919 not finished yet: Executing 2025-03-28T09:37:28.191752Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795432:2487] TxId: 281474976710678. Ctx: { TraceId: 01jqe1t51a0j30tcs1fr9n8h20, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037919 (Executing), 2025-03-28T09:37:28.191769Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795432:2487] TxId: 281474976710678. Ctx: { TraceId: 01jqe1t51a0j30tcs1fr9n8h20, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ActorState: WaitResolveState, immediate tx, become ExecuteState 2025-03-28T09:37:28.193683Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795432:2487] TxId: 281474976710678. Ctx: { TraceId: 01jqe1t51a0j30tcs1fr9n8h20, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Got propose result, shard: 72075186224037919, status: COMPLETE, error: 2025-03-28T09:37:28.193771Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795432:2487] TxId: 281474976710678. Ctx: { TraceId: 01jqe1t51a0j30tcs1fr9n8h20, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. terminate execution. 2025-03-28T09:37:28.193798Z node 1 :KQP_EXECUTER DEBUG: ActorId: [1:7486792208743795432:2487] TxId: 281474976710678. Ctx: { TraceId: 01jqe1t51a0j30tcs1fr9n8h20, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UxZDk2ZjAtOWMyMjExMzEtYjYxODhkMWUtYjhiMDAyOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Resource usage for last stat interval: ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 ru: 1 rate limiter was not found force flag: 1 Trying to start YDB, gRPC: 13525, MsgBus: 16005 2025-03-28T09:37:29.001988Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792206001508629:2211];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013ae/r3tmp/tmpl4nNR1/pdisk_1.dat 2025-03-28T09:37:29.023098Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:37:29.095489Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13525, node 2 2025-03-28T09:37:29.120821Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:29.120930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:29.123035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:29.178271Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:29.178298Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:29.178306Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:29.178427Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16005 TClient is connected to server localhost:16005 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:29.571670Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.588719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.675408Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.842428Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.916231Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.182823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792223181379416:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.182907Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.220594Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.250923Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.284040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.312955Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.344785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.415496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.460328Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792223181379928:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.460402Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.460725Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792223181379933:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.463744Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:32.473318Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792223181379935:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:32.535942Z node 2 :TX_PROXY ERROR: Actor# [2:7486792223181379988:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:33.450609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.967678Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792206001508629:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.967745Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpInplaceUpdate::SingleRowIf+UseSink >> TConsoleConfigTests::TestAffectedConfigs [GOOD] >> KqpLimits::ReplySizeExceeded [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] >> KqpEffects::InsertRevert_Literal_Duplicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeletePkPrefixWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 22688, MsgBus: 21989 2025-03-28T09:37:25.381027Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792195128230120:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:25.381531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139f/r3tmp/tmpPOnGu7/pdisk_1.dat 2025-03-28T09:37:25.687919Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22688, node 1 2025-03-28T09:37:25.760831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:25.760855Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:25.760861Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:25.760979Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:25.763063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:25.763161Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:25.764971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21989 TClient is connected to server localhost:21989 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:26.222024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.240265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.364108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.504613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.565066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.163992Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792208013133799:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.164084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.416972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.442703Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.469695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.497875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.522504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.553599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.631327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792208013134311:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.631418Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.631700Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792208013134316:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.635927Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:28.662656Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792208013134318:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:28.748428Z node 1 :TX_PROXY ERROR: Actor# [1:7486792208013134374:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:29.553518Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-28T09:37:29.564352Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:37:29.564533Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:37:29.564744Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792212308101984:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7486792212308101960:2496]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7486792212308101984:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:37:29.565298Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792212308101977:2496], SessionActorId: [1:7486792212308101960:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7486792212308101960:2496]. isRollback=0 2025-03-28T09:37:29.565502Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2Y3NTA4MzQtMzI5YmEzMjItZWU2MTZkY2MtYTU0MWIxNTA=, ActorId: [1:7486792212308101960:2496], ActorState: ExecuteState, TraceId: 01jqe1t6d058xy37d2k12h91bd, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486792212308101978:2496] from: [1:7486792212308101977:2496] 2025-03-28T09:37:29.565585Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792212308101978:2496] TxId: 281474976710671. Ctx: { TraceId: 01jqe1t6d058xy37d2k12h91bd, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2Y3NTA4MzQtMzI5YmEzMjItZWU2MTZkY2MtYTU0MWIxNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:37:29.566404Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2Y3NTA4MzQtMzI5YmEzMjItZWU2MTZkY2MtYTU0MWIxNTA=, ActorId: [1:7486792212308101960:2496], ActorState: ExecuteState, TraceId: 01jqe1t6d058xy37d2k12h91bd, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 4693, MsgBus: 63832 2025-03-28T09:37:30.621841Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792214005566547:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:30.621908Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139f/r3tmp/tmpoiWzw1/pdisk_1.dat 2025-03-28T09:37:30.719969Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4693, node 2 2025-03-28T09:37:30.760756Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:30.760863Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:30.774260Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:30.811106Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:30.811122Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:30.811127Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:30.811204Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63832 TClient is connected to server localhost:63832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:31.232515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.248657Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.327884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.479995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.572291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:33.580989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792226890470214:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.581052Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.622836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.653650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.722399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.751223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.779448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.811767Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.856398Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792226890470726:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.856471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.856537Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792226890470731:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.859961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:33.868139Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792226890470733:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:33.947501Z node 2 :TX_PROXY ERROR: Actor# [2:7486792226890470787:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:34.866106Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 16336, MsgBus: 27920 2025-03-28T09:37:24.650574Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792190364200599:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:24.650651Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a8/r3tmp/tmpVifOR3/pdisk_1.dat 2025-03-28T09:37:24.897523Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16336, node 1 2025-03-28T09:37:24.954254Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:24.954304Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:24.954312Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:24.954446Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:25.013680Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:25.013914Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:25.016070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27920 TClient is connected to server localhost:27920 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:25.357414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.378743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.511077Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.673528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:25.746917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.245504Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792203249104274:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.245626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.534926Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.560068Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.585805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.613067Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.637564Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.685398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:27.724259Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792203249104785:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.724309Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.724438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792203249104790:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.728303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:27.736335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792203249104792:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:27.823445Z node 1 :TX_PROXY ERROR: Actor# [1:7486792203249104846:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:28.635449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 62841, MsgBus: 18638 2025-03-28T09:37:30.074944Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792214691793167:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:30.075030Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a8/r3tmp/tmpTnRp77/pdisk_1.dat 2025-03-28T09:37:30.173992Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:30.205499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:30.205591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 62841, node 2 2025-03-28T09:37:30.206867Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:30.240875Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:30.240903Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:30.240912Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:30.241055Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18638 TClient is connected to server localhost:18638 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:30.631371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.637556Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:30.642076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.698852Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.857680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.938066Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:33.107403Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792227576696836:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.107487Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.146679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.173691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.242930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.270281Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.295613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.323472Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.361669Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792227576697348:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.361746Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.361794Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792227576697353:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.364780Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:33.373045Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792227576697355:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:33.474206Z node 2 :TX_PROXY ERROR: Actor# [2:7486792227576697409:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:34.429262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.075440Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792214691793167:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:35.075515Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:37:35.258285Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NDIyMWRjYTUtNDJjNmY4ODctZGYyMGQyNmEtZGY0NzcwNjE=, ActorId: [2:7486792231871664965:2490], ActorState: ExecuteState, TraceId: 01jqe1tbxv8fxeqa8caym98c1b, Create QueryResponse for error on request, msg: Error while locks merge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2025-03-28T09:30:56.848542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:56.848638Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:56.916345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:30:58.105651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:58.105715Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:58.150087Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:30:59.218479Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:30:59.218547Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:30:59.270767Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:00.286628Z node 4 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:00.286719Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:00.326281Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:01.363543Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:01.363620Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:01.413017Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:02.456996Z node 6 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:02.457060Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:02.507555Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:03.508726Z node 7 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:03.508789Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:03.552192Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:04.614095Z node 8 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:04.614171Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:04.662063Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:06.309068Z node 9 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:06.309144Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:06.352822Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:08.004466Z node 10 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:08.004550Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:08.079141Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:09.635469Z node 11 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:09.635536Z node 11 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:09.675369Z node 11 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:11.353974Z node 12 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:11.354042Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:11.401144Z node 12 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:13.132536Z node 13 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:13.132638Z node 13 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:13.188296Z node 13 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:15.051596Z node 14 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:15.051677Z node 14 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:15.096257Z node 14 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:16.734852Z node 15 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:16.734912Z node 15 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:16.779876Z node 15 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:18.485922Z node 16 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:18.486025Z node 16 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:18.532583Z node 16 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:20.623496Z node 17 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:20.623590Z node 17 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:20.678869Z node 17 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:21.202144Z node 17 :CMS_CONFIGS ERROR: Unexpected config sender died for subscription id=1 2025-03-28T09:31:21.819910Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:21.819997Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:21.883581Z node 18 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:22.701198Z node 18 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:31:22.701275Z node 18 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:22.776668Z node 18 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-03-28T09:31:23.401983Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:23.402071Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:23.447228Z node 19 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:24.333286Z node 19 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:31:24.333395Z node 19 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:24.437067Z node 19 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[19:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2025-03-28T09:31:25.166346Z node 20 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:25.166444Z node 20 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:25.210704Z node 20 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:28.722965Z node 21 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:28.723055Z node 21 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:28.767330Z node 21 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:31.998651Z node 22 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:31.998745Z node 22 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:32.051499Z node 22 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:33.121066Z node 23 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:33.121135Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:33.163626Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:34.343403Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:31:34.343493Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:31:34.417647Z node 24 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:31:40.942221Z node 24 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:31:40.942310Z node 24 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:34.564407Z node 24 :CMS_CONFIGS ERROR: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2025-03-28T09:35:35.186852Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:35:35.186936Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:35.225065Z node 25 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:35:41.270326Z node 25 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:35:41.270423Z node 25 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:31.488853Z node 26 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:31.488950Z node 26 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:31.528881Z node 26 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:37:32.899876Z node 27 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:32.899982Z node 27 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:32.951496Z node 27 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:37:34.100421Z node 28 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:34.100515Z node 28 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:34.151000Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:37:35.529405Z node 29 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:35.529505Z node 29 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:35.591256Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2025-03-28T09:36:15.512208Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:36:15.528237Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:36:15.530299Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:36:15.531164Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:15.570679Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:36:15.629153Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:36:15.629202Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:15.637132Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:15.638070Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:36:15.641549Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:36:15.641615Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:36:15.641652Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:36:15.643275Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:36:15.643353Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:36:15.643403Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:36:15.726303Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:36:15.747534Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:36:15.748649Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:36:15.748790Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:36:15.748819Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:36:15.748857Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:36:15.748893Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:15.749083Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:15.749727Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:15.750779Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:36:15.750866Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:36:15.750943Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:15.751015Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:36:15.751052Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:36:15.751094Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:36:15.751133Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:36:15.751162Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:36:15.751200Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:36:15.751285Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:15.751315Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:15.751361Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:36:15.759343Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:36:15.759404Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:36:15.759477Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:36:15.759691Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:36:15.759744Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:36:15.760220Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:36:15.760265Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:15.760307Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:36:15.760338Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:36:15.760367Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:15.760657Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:36:15.760685Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:36:15.760712Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:36:15.760736Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:15.760794Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:36:15.760817Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:36:15.760842Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:36:15.760871Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:15.760896Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:36:15.773069Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:36:15.773143Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:36:15.773185Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:36:15.773235Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:36:15.773314Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:36:15.775620Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:15.775703Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:36:15.775754Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:36:15.775852Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:36:15.775886Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:36:15.776010Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:36:15.776071Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:15.776132Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:36:15.776179Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:36:15.780003Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:36:15.780065Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:36:15.780295Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:15.780329Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:36:15.780375Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:36:15.780405Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:36:15.780434Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:36:15.780476Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:36:15.780505Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:36:15.780539Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:15.780590Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:36:15.780623Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:36:15.780647Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:36:15.780769Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:36:15.780791Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:36:15.780806Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:36:15.780820Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:36:15.780835Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:36:15.780883Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:36:15.780898Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:36:15.780921Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:36:15.780947Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:36:15.780999Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:36:15.781026Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:36:15.781060Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:36:15.781091Z node 1 :TX_DATA ... -28T09:37:35.617712Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.617945Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.617973Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:24] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.618011Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 24] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.618040Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.618177Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.618203Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:25] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.618239Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 25] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.618267Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.618461Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.618491Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:26] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.618527Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 26] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.618555Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.618723Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.618759Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:27] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.618799Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 27] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.618830Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.619014Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.619044Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:28] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.619083Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 28] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.619110Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.619257Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.619284Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:29] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.619321Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 29] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.619350Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.619576Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.619606Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:30] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.619644Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 30] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.619673Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.619792Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.619816Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:31] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.619852Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 31] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.619881Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.620008Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.620036Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:32] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.620074Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 32] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.620106Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.620228Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.620258Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:33] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.620295Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 33] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.620324Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.620501Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.620536Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:34] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.620579Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 34] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.620608Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.620770Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.620800Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:35] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.620836Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 35] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.620866Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.621020Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.621051Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:36] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.621089Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 36] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.621116Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.621288Z node 32 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:35.621312Z node 32 :TX_DATASHARD TRACE: Complete execution for [1000004:37] at 9437184 on unit CompleteOperation 2025-03-28T09:37:35.621347Z node 32 :TX_DATASHARD DEBUG: Complete [1000004 : 37] from 9437184 at tablet 9437184 send result to client [32:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:37:35.621375Z node 32 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:35.621656Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2025-03-28T09:37:35.621705Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:35.621744Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 2025-03-28T09:37:35.621862Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2025-03-28T09:37:35.621897Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:35.621921Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2025-03-28T09:37:35.621996Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2025-03-28T09:37:35.622024Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:35.622052Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2025-03-28T09:37:35.622131Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-28T09:37:35.622164Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:35.622192Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2025-03-28T09:37:35.622271Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-28T09:37:35.622303Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:35.622330Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2025-03-28T09:37:35.622404Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-28T09:37:35.622434Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:35.622462Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2025-03-28T09:37:35.622553Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 13 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 9} 2025-03-28T09:37:35.622585Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:35.622610Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 13 2025-03-28T09:37:35.622664Z node 32 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [32:803:2729], Recipient [32:345:2312]: {TEvReadSet step# 1000004 txid# 14 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 10} 2025-03-28T09:37:35.622712Z node 32 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:37:35.622740Z node 32 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 14 expect 30 28 28 26 19 22 31 29 23 31 23 31 31 23 20 30 30 24 31 - 31 18 28 20 20 18 5 20 16 16 20 - actual 30 28 28 26 19 22 31 29 23 31 23 31 31 23 20 30 30 24 31 - 31 18 28 20 20 18 5 20 16 16 20 - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> KqpInplaceUpdate::SingleRowSimple+UseSink >> KqpEffects::InsertRevert_Literal_Success >> KqpImmediateEffects::DeleteAfterInsert [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink >> KqpImmediateEffects::UpsertAfterInsertWithIndex |95.7%| [TA] $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 3824, MsgBus: 27101 2025-03-28T09:37:25.398427Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792196072791230:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:25.398518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a2/r3tmp/tmp3fFd43/pdisk_1.dat 2025-03-28T09:37:25.694836Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3824, node 1 2025-03-28T09:37:25.757654Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:25.757733Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:25.757741Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:25.757748Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:25.757767Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:25.757841Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:25.759439Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:27101 TClient is connected to server localhost:27101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:26.216645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.234327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.365831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.540920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.606265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.980942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792204662727597:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.981092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.285141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.312205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.338363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.365221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.394614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.428745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.506941Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792208957695409:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.507055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.507257Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792208957695414:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.510672Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:28.519105Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792208957695416:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:28.584588Z node 1 :TX_PROXY ERROR: Actor# [1:7486792208957695469:3451] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:29.591699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5293, MsgBus: 64151 2025-03-28T09:37:30.964929Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792216075700376:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:30.965009Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a2/r3tmp/tmpsF4SnK/pdisk_1.dat 2025-03-28T09:37:31.076522Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5293, node 2 2025-03-28T09:37:31.098465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:31.098555Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:31.100136Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:31.137672Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:31.137693Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:31.137700Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:31.137810Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64151 TClient is connected to server localhost:64151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:31.549684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.555498Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:31.565336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.637371Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.802317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.876025Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.041851Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792233255571316:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.041906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.087292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.155093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.187223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.214914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.243137Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.271812Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.348901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792233255571833:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.348972Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792233255571839:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.348980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.351926Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:34.361939Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792233255571841:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:34.436234Z node 2 :TX_PROXY ERROR: Actor# [2:7486792233255571896:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:35.448136Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.965479Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792216075700376:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:35.965558Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:37:36.116731Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZWFiZDMzMS0yYTQ5ZjZkNS0xMjVlNzUxZi0zYzkyY2Q1YQ==, ActorId: [2:7486792237550539452:2490], ActorState: ExecuteState, TraceId: 01jqe1tcrx31bzjbb26t8mn870, Create QueryResponse for error on request, msg: Error while locks merge |95.7%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 18998, MsgBus: 18465 2025-03-28T09:36:50.766796Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792042283492879:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:50.766860Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001481/r3tmp/tmpBTvOLU/pdisk_1.dat 2025-03-28T09:36:50.969022Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18998, node 1 2025-03-28T09:36:51.024907Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:51.024924Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:51.024928Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:51.025049Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:36:51.095965Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:51.096127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:51.097733Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18465 TClient is connected to server localhost:18465 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:36:51.339404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:51.351006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:51.471292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:51.570517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:51.614847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:52.706758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792050873429243:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:52.706858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:52.887830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:36:52.907133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:36:52.925742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:36:52.944442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:36:52.965267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:36:53.028884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:36:53.096333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792055168397056:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:53.096376Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:53.096381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792055168397061:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:53.098490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:36:53.104001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792055168397063:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:36:53.187397Z node 1 :TX_PROXY ERROR: Actor# [1:7486792055168397116:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:36:53.789954Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:55.767434Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792042283492879:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:55.767679Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:36:56.060771Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTQ2ZWIyZjktZGQ5ODVlNWMtNDlkNzE5NmQtMjY4YjBiM2I=, ActorId: [1:7486792063758332875:2594], ActorState: ExecuteState, TraceId: 01jqe1s57ef203357qsw53f00k, Create QueryResponse for error on request, msg:
: Error: Query result size limit exceeded. (80001697 > 50331648), code: 2013 Trying to start YDB, gRPC: 13353, MsgBus: 1277 2025-03-28T09:36:56.608791Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792070515118192:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:56.608874Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001481/r3tmp/tmpLa1Q4X/pdisk_1.dat 2025-03-28T09:36:56.710406Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:56.739765Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:56.739878Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 13353, node 2 2025-03-28T09:36:56.741478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:36:56.773410Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:56.773436Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:56.773443Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:56.773592Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1277 TClient is connected to server localhost:1277 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:36:57.120334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:57.134811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:57.182360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474 ... WY4ODYxYi1hYmE2YTY0MC1lN2Q3ZjcwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Abort execution: TIMEOUT, [ {
: Error: Request timeout 100ms exceeded } {
: Error: Cancelling after 100ms during execution } ] 2025-03-28T09:37:25.508991Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486792194504516280:2710] TxId: 281474976715672. Ctx: { TraceId: 01jqe1t2cyevvjpvvkew07c03b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2RiMjY1YTMtNWY4ODYxYi1hYmE2YTY0MC1lN2Q3ZjcwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Sending timeout response to: [3:7486792190209548958:2710] 2025-03-28T09:37:25.509340Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2RiMjY1YTMtNWY4ODYxYi1hYmE2YTY0MC1lN2Q3ZjcwNA==, ActorId: [3:7486792190209548958:2710], ActorState: ExecuteState, TraceId: 01jqe1t2cyevvjpvvkew07c03b, Create QueryResponse for error on request, msg:
: Error: Request timeout 100ms exceeded
: Error: Cancelling after 100ms during execution 2025-03-28T09:37:25.511244Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486792194504516309:2729], TxId: 281474976715672, task: 7. Ctx: { TraceId : 01jqe1t2cyevvjpvvkew07c03b. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=Y2RiMjY1YTMtNWY4ODYxYi1hYmE2YTY0MC1lN2Q3ZjcwNA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7486792194504516280:2710], status: TIMEOUT, reason: [ {
: Error: Request timeout 100ms exceeded } {
: Error: Cancelling after 100ms during execution } ] 2025-03-28T09:37:25.632980Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486792194504516356:2710] TxId: 281474976715674. Ctx: { TraceId: 01jqe1t2gv6ray0ecwavwcqmrh, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Y2RiMjY1YTMtNWY4ODYxYi1hYmE2YTY0MC1lN2Q3ZjcwNA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 100ms } {
: Error: Cancelling after 100ms during execution } ] 2025-03-28T09:37:25.633298Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486792194504516371:2743], TxId: 281474976715674, task: 9. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1t2gv6ray0ecwavwcqmrh. SessionId : ydb://session/3?node_id=3&id=Y2RiMjY1YTMtNWY4ODYxYi1hYmE2YTY0MC1lN2Q3ZjcwNA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [3:7486792194504516356:2710], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:37:25.702409Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486792194504516369:2741], TxId: 281474976715674, task: 7. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=3&id=Y2RiMjY1YTMtNWY4ODYxYi1hYmE2YTY0MC1lN2Q3ZjcwNA==. TraceId : 01jqe1t2gv6ray0ecwavwcqmrh. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [3:7486792194504516356:2710], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:37:25.724692Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=Y2RiMjY1YTMtNWY4ODYxYi1hYmE2YTY0MC1lN2Q3ZjcwNA==, ActorId: [3:7486792190209548958:2710], ActorState: ExecuteState, TraceId: 01jqe1t2gv6ray0ecwavwcqmrh, Create QueryResponse for error on request, msg:
: Error: Request canceled after 100ms
: Error: Cancelling after 100ms during execution Trying to start YDB, gRPC: 23484, MsgBus: 6747 2025-03-28T09:37:26.642050Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486792199893529317:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:26.642116Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001481/r3tmp/tmpbPWvVo/pdisk_1.dat 2025-03-28T09:37:26.750531Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:26.783260Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:26.783358Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:26.785531Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23484, node 4 2025-03-28T09:37:26.822298Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:26.822349Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:26.822360Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:26.822525Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6747 TClient is connected to server localhost:6747 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:27.274855Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.286714Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.375694Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.572379Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.653481Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.490781Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792217073400276:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.490895Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.546645Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.585596Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.627887Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.669285Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.711123Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.748356Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.794954Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792217073400791:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.795034Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.795218Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792217073400796:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.798806Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:30.809584Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486792217073400798:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:30.878440Z node 4 :TX_PROXY ERROR: Actor# [4:7486792217073400851:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:31.642387Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486792199893529317:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:31.642447Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:37:32.169645Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.654857Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=MWM5NDUxYTQtYzkzMWI0MDItZDA2NGYzZjMtNGU3YzgxMWM=, ActorId: [4:7486792225663335707:2489], ActorState: ExecuteState, TraceId: 01jqe1tc0f0jw7arn212k90twg, Create QueryResponse for error on request, msg: >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10983, MsgBus: 22297 2025-03-28T09:37:25.870915Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792192972354545:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:25.871126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139c/r3tmp/tmpmthaxJ/pdisk_1.dat 2025-03-28T09:37:26.178361Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10983, node 1 2025-03-28T09:37:26.227780Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:26.227886Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:26.229525Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:26.238108Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:26.238130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:26.238141Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:26.238278Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22297 TClient is connected to server localhost:22297 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:26.711082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.728946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.856258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.995608Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.057442Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.483387Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792205857258217:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.483510Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.753707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.788992Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.820752Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.855783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.894409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.937240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.996011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792205857258729:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.996121Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.996401Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792205857258734:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.000371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:29.010334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792205857258736:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:29.076779Z node 1 :TX_PROXY ERROR: Actor# [1:7486792210152226085:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:29.974813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 5596, MsgBus: 20240 2025-03-28T09:37:31.261044Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792221840909010:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:31.261130Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139c/r3tmp/tmprwEOKO/pdisk_1.dat 2025-03-28T09:37:31.354701Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5596, node 2 2025-03-28T09:37:31.396138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:31.396257Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:31.397959Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:31.407080Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:31.407098Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:31.407103Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:31.407182Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20240 TClient is connected to server localhost:20240 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:31.829043Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:31.851928Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.929387Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.075967Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.151771Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.279653Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792234725812664:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.279745Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.309947Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.339679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.373921Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.402930Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.429844Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.460092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.539486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792234725813179:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.539572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792234725813184:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.539599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.542719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:34.551031Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792234725813186:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:34.637207Z node 2 :TX_PROXY ERROR: Actor# [2:7486792234725813242:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:35.710639Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.261400Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792221840909010:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:36.261464Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 3220, MsgBus: 9636 2025-03-28T09:37:25.337436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792194030542507:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:25.337533Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a5/r3tmp/tmpjnPB38/pdisk_1.dat 2025-03-28T09:37:25.641010Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3220, node 1 2025-03-28T09:37:25.722165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:25.722316Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:25.724006Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:25.728058Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:25.728100Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:25.728116Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:25.728254Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9636 TClient is connected to server localhost:9636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:26.167724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.189405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.330516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.488013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.555376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.257437Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792206915446164:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.257550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.518225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.545282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.574080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.602636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.631687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.663685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.710838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792206915446679:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.710930Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.711276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792206915446684:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.714900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:28.724713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792206915446686:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:28.788576Z node 1 :TX_PROXY ERROR: Actor# [1:7486792206915446739:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:29.702600Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.337743Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792194030542507:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:30.337804Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17874, MsgBus: 17913 2025-03-28T09:37:31.592598Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792219901362326:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:31.592673Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a5/r3tmp/tmpTavkBU/pdisk_1.dat 2025-03-28T09:37:31.682645Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 17874, node 2 2025-03-28T09:37:31.722945Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:31.723023Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:31.724499Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:31.771189Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:31.771213Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:31.771223Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:31.771349Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17913 TClient is connected to server localhost:17913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:37:32.192774Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.198785Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:32.204615Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:32.280529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.425019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.509038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.466995Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792232786265985:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.467119Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.491479Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.558187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.582716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.616441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.648261Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.680245Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.721177Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792232786266498:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.721233Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.721270Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792232786266503:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.724477Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:34.733221Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792232786266505:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:34.809618Z node 2 :TX_PROXY ERROR: Actor# [2:7486792232786266558:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:35.861628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.594862Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792219901362326:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:36.603153Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpImmediateEffects::Insert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 3243, MsgBus: 12052 2025-03-28T09:37:25.209412Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792193328403444:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:25.209502Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a7/r3tmp/tmpXZWqbk/pdisk_1.dat 2025-03-28T09:37:25.522344Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3243, node 1 2025-03-28T09:37:25.598973Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:25.599153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:25.606787Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:25.631313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:25.631343Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:25.631351Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:25.631527Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12052 TClient is connected to server localhost:12052 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:26.043195Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.068097Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.206898Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.355645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.428550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.807909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792201918339820:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:27.808016Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.047764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.074159Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.103763Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.131238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.179228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.205695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:28.281762Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792206213307634:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.281821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.281866Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792206213307639:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.285526Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:28.294738Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792206213307641:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:28.388943Z node 1 :TX_PROXY ERROR: Actor# [1:7486792206213307696:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:29.326895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.209384Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792193328403444:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:30.209461Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6173, MsgBus: 4713 2025-03-28T09:37:31.001340Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792222142983791:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:31.001450Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0013a7/r3tmp/tmpZqU07m/pdisk_1.dat 2025-03-28T09:37:31.138377Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:31.147098Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:31.147173Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:31.148307Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6173, node 2 2025-03-28T09:37:31.190679Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:31.190720Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:31.190728Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:31.190859Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4713 TClient is connected to server localhost:4713 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:31.548032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.554045Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:31.565855Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.639135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.803931Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.885363Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.092740Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792235027887452:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.092899Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.134078Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.170223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.200259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.267502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.298426Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.330009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.370339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792235027887962:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.370416Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.370471Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792235027887967:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.373616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:34.381857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792235027887969:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:34.457239Z node 2 :TX_PROXY ERROR: Actor# [2:7486792235027888021:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:35.312611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.001632Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792222142983791:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:36.001704Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 24023, MsgBus: 2499 2025-03-28T09:37:26.098764Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792199645432957:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:26.098865Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139b/r3tmp/tmp5PChTa/pdisk_1.dat 2025-03-28T09:37:26.441301Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24023, node 1 2025-03-28T09:37:26.485435Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:26.485556Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:26.487444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:26.517929Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:26.517962Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:26.517976Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:26.518107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2499 TClient is connected to server localhost:2499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:26.951773Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:26.976876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.090749Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.230590Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.302307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.940752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792208235369306:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:28.940850Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.243981Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.312835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.343295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.368540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.396952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.464075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.515800Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792212530337122:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.515886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.515971Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792212530337127:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.519602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:29.530222Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792212530337129:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:29.609293Z node 1 :TX_PROXY ERROR: Actor# [1:7486792212530337182:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:30.570063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.098906Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792199645432957:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:31.098960Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24357, MsgBus: 16065 2025-03-28T09:37:32.087414Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792223972301061:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:32.087482Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00139b/r3tmp/tmpcBmtHk/pdisk_1.dat 2025-03-28T09:37:32.207986Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24357, node 2 2025-03-28T09:37:32.251381Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:32.251483Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:32.254476Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:32.282678Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:32.282719Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:32.282725Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:32.282827Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16065 TClient is connected to server localhost:16065 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:32.687237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.692811Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:32.699784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.773379Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.913338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:32.983191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.054387Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792236857204728:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.054479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.103337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.137053Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.166150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.198026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.227264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.260691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.305964Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792236857205239:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.306030Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.306089Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792236857205244:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.309420Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:35.321554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792236857205246:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:35.392544Z node 2 :TX_PROXY ERROR: Actor# [2:7486792236857205299:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:36.339411Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.089238Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792223972301061:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:37.089322Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] >> KqpImmediateEffects::Replace >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink >> KqpEffects::UpdateOn_Select [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 26613, MsgBus: 24994 2025-03-28T09:37:26.978265Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792198674646881:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:26.978414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001395/r3tmp/tmpCiARIR/pdisk_1.dat 2025-03-28T09:37:27.273083Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26613, node 1 2025-03-28T09:37:27.337680Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:27.337704Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:27.337714Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:27.337867Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:27.339298Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:27.339443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:27.340979Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24994 TClient is connected to server localhost:24994 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:27.785918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.815658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.940405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.083328Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.149946Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.585474Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792211559550556:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.585563Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.893106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.963764Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.993104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.026298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.056909Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.086821Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.132882Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792215854518367:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.132950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.133215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792215854518372:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.136682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:30.162341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792215854518374:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:30.248176Z node 1 :TX_PROXY ERROR: Actor# [1:7486792215854518428:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:31.211125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 10868, MsgBus: 12412 2025-03-28T09:37:32.578150Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792222414580876:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:32.578210Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001395/r3tmp/tmpNKnmlt/pdisk_1.dat 2025-03-28T09:37:32.684302Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:32.698177Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:32.698259Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:32.699810Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10868, node 2 2025-03-28T09:37:32.744816Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:32.744838Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:32.744845Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:32.744948Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12412 TClient is connected to server localhost:12412 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:33.136872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:33.154221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:33.228062Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:33.359914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:33.418258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.582865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792235299484516:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.583062Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.626757Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.657108Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.686560Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.715868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.741220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.768919Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.815790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792235299485027:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.815872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.815913Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792235299485032:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.819944Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:35.828068Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792235299485034:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:35.885790Z node 2 :TX_PROXY ERROR: Actor# [2:7486792235299485088:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:36.800985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.497193Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjAzYjE0ZDEtZmFlYjRmMzEtMTgzM2VjMzMtZWFlYjAxZQ==, ActorId: [2:7486792239594452643:2490], ActorState: ExecuteState, TraceId: 01jqe1te3n0ra5fp031j8gzfct, Create QueryResponse for error on request, msg: >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> KqpImmediateEffects::UpsertDuplicates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10083, MsgBus: 7022 2025-03-28T09:37:27.847387Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792202655638699:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:27.847443Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001390/r3tmp/tmpvxwLV3/pdisk_1.dat 2025-03-28T09:37:28.172547Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10083, node 1 2025-03-28T09:37:28.234739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:28.234866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:28.236411Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:28.241116Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:28.241147Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:28.241155Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:28.241305Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7022 TClient is connected to server localhost:7022 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:28.669766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.684281Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:37:28.697089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.834906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.989650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:29.063969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:37:30.852815Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792215540542344:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.852911Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.173434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.199893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.223263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.248976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.275813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.310622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.395984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792219835510157:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.396061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.396079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792219835510162:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.399428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:31.410059Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792219835510164:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:31.481994Z node 1 :TX_PROXY ERROR: Actor# [1:7486792219835510219:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:32.273422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.847350Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792202655638699:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:32.847430Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28318, MsgBus: 17440 2025-03-28T09:37:33.430594Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792230196478859:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.430670Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001390/r3tmp/tmpEIskSt/pdisk_1.dat 2025-03-28T09:37:33.521043Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28318, node 2 2025-03-28T09:37:33.558470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:33.558567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:33.560201Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:33.591076Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:33.591094Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:33.591098Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:33.591181Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17440 TClient is connected to server localhost:17440 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:33.992818Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.004086Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.078725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.236679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.298317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:36.333897Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792243081382508:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.333988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.384416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.412642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.480008Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.507236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.537786Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.610467Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.655862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792243081383024:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.655957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.656003Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792243081383029:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.660246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:36.669043Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792243081383031:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:36.761520Z node 2 :TX_PROXY ERROR: Actor# [2:7486792243081383085:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:37.657607Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.430734Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792230196478859:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:38.430811Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Select [GOOD] Test command err: Trying to start YDB, gRPC: 4682, MsgBus: 4392 2025-03-28T09:37:27.741502Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792203774920566:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:27.741602Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001393/r3tmp/tmpY9hR69/pdisk_1.dat 2025-03-28T09:37:28.022671Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4682, node 1 2025-03-28T09:37:28.098037Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:28.098064Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:28.098071Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:28.098199Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:28.113674Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:28.113814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:28.115699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4392 TClient is connected to server localhost:4392 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:28.562334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.588712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.708559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.877385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:28.957695Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.744796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792216659824237:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:30.744879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.060296Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.094026Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.124553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.148895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.217130Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.249878Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.297751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792220954792050:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.297828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.298454Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792220954792055:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.301831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:31.312784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792220954792057:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:31.412188Z node 1 :TX_PROXY ERROR: Actor# [1:7486792220954792110:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:32.741533Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792203774920566:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:32.741601Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8778, MsgBus: 29730 2025-03-28T09:37:33.572273Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792226723037534:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.572356Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001393/r3tmp/tmpaoIEql/pdisk_1.dat 2025-03-28T09:37:33.679246Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8778, node 2 2025-03-28T09:37:33.720119Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:33.720206Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:33.721517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:33.745349Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:33.745369Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:33.745376Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:33.745485Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29730 TClient is connected to server localhost:29730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:34.144251Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.172752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.222364Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.362637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.441550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:36.581130Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792239607941198:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.581225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.632338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.669804Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.704402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.735681Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.766722Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.800655Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.838187Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792239607941710:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.838246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.838292Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792239607941715:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.841181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:36.848263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792239607941717:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:36.901628Z node 2 :TX_PROXY ERROR: Actor# [2:7486792239607941770:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:38.572192Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792226723037534:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:38.572249Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] >> KqpImmediateEffects::ReplaceExistingKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 32380, MsgBus: 28880 2025-03-28T09:37:28.461360Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792208172674572:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:28.461858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00138b/r3tmp/tmpFxealr/pdisk_1.dat 2025-03-28T09:37:28.742139Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32380, node 1 2025-03-28T09:37:28.825030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:28.825175Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:28.834262Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:28.855214Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:28.855240Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:28.855248Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:28.855389Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28880 TClient is connected to server localhost:28880 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:29.351634Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.372285Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.515875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.667173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.743609Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.382919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792221057578253:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.383032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.659085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.729949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.762494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.791358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.860860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.899367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.983797Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792221057578778:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.983879Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.983944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792221057578783:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.987144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:31.995953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792221057578785:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:32.073946Z node 1 :TX_PROXY ERROR: Actor# [1:7486792225352546133:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:32.916583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.461668Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792208172674572:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.461737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7653, MsgBus: 5510 2025-03-28T09:37:34.010601Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792234447942479:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:34.010665Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00138b/r3tmp/tmpg2GHNW/pdisk_1.dat 2025-03-28T09:37:34.091379Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7653, node 2 2025-03-28T09:37:34.138343Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:34.138367Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:34.138374Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:34.138482Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:34.145715Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:34.145764Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:34.147194Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5510 TClient is connected to server localhost:5510 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:34.536007Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.554056Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.634061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.771800Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.840653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:36.838078Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792243037878827:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.838162Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.870814Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.901769Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.966434Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.996842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.031292Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.064311Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.118974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792247332846639:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.119076Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.119216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792247332846644:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.122343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:37.134248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792247332846646:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:37.216053Z node 2 :TX_PROXY ERROR: Actor# [2:7486792247332846700:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:38.073933Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] >> KqpWrite::CastValuesOptional [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ReplaceExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 3236, MsgBus: 17067 2025-03-28T09:37:28.660458Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792205962385529:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:28.661579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00138a/r3tmp/tmpKtfPC7/pdisk_1.dat 2025-03-28T09:37:28.985283Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3236, node 1 2025-03-28T09:37:29.038643Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:29.038839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:29.049378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:29.091594Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:29.091613Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:29.091619Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:29.091723Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17067 TClient is connected to server localhost:17067 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:29.589577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.615965Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.630240Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.764868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:29.913668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.991742Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.623043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792218847289184:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.623174Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.919761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.953869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.983012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.009706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.039645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.111840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.192247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792223142257002:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.192334Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.192377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792223142257007:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.195896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:32.208581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792223142257009:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:32.291483Z node 1 :TX_PROXY ERROR: Actor# [1:7486792223142257064:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:33.294014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.658915Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792205962385529:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.658971Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10444, MsgBus: 24836 2025-03-28T09:37:34.679585Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792231602702373:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:34.679672Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00138a/r3tmp/tmpr8OwlM/pdisk_1.dat 2025-03-28T09:37:34.798666Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:34.810395Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:34.810461Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:34.812656Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10444, node 2 2025-03-28T09:37:34.866180Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:34.866201Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:34.866209Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:34.866335Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24836 TClient is connected to server localhost:24836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:35.301566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.314514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.369449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.565965Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.638807Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.704129Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792244487606020:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.704230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.752423Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.790448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.826396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.863701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.891115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.969349Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.032533Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792248782573833:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.032609Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.032814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792248782573838:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.035684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:38.046865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792248782573840:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:38.145473Z node 2 :TX_PROXY ERROR: Actor# [2:7486792248782573895:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:39.079413Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.717712Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792231602702373:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:39.717992Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63260, MsgBus: 25593 2025-03-28T09:37:28.762756Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792207693282603:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:28.762829Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001389/r3tmp/tmp6JB6xa/pdisk_1.dat 2025-03-28T09:37:29.140462Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63260, node 1 2025-03-28T09:37:29.157152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:29.159112Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:29.166508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:29.201144Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:29.201168Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:29.201189Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:29.201311Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25593 TClient is connected to server localhost:25593 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:29.731023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.759341Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.901072Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.051111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.133630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.853170Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792220578186287:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:31.853276Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.125867Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.162113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.229772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.261911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.294624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.338777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.379146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792224873154094:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.379234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.379311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792224873154099:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.382966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:32.391933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792224873154101:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:32.483153Z node 1 :TX_PROXY ERROR: Actor# [1:7486792224873154155:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:33.262452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.549328Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976710673; 2025-03-28T09:37:33.557711Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792229168121901:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7486792229168121742:2496]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7486792229168121901:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:37:33.558275Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792229168121890:2496], SessionActorId: [1:7486792229168121742:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7486792229168121742:2496]. isRollback=0 2025-03-28T09:37:33.558507Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmRhOTQzMTAtYmM3MTMyM2EtZjQ0MTdkODEtNDg1YTUzZjc=, ActorId: [1:7486792229168121742:2496], ActorState: ExecuteState, TraceId: 01jqe1ta6e7mt1ahs0wc8dqje8, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486792229168121891:2496] from: [1:7486792229168121890:2496] 2025-03-28T09:37:33.558610Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792229168121891:2496] TxId: 281474976710673. Ctx: { TraceId: 01jqe1ta6e7mt1ahs0wc8dqje8, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmRhOTQzMTAtYmM3MTMyM2EtZjQ0MTdkODEtNDg1YTUzZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:37:33.558897Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmRhOTQzMTAtYmM3MTMyM2EtZjQ0MTdkODEtNDg1YTUzZjc=, ActorId: [1:7486792229168121742:2496], ActorState: ExecuteState, TraceId: 01jqe1ta6e7mt1ahs0wc8dqje8, Create QueryResponse for error on request, msg: 2025-03-28T09:37:33.764254Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792207693282603:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.764309Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12372, MsgBus: 7222 2025-03-28T09:37:34.588758Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792231378465384:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:34.588801Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001389/r3tmp/tmpjUezw1/pdisk_1.dat 2025-03-28T09:37:34.710570Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12372, node 2 2025-03-28T09:37:34.746650Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:34.746736Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:34.751984Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:34.790300Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:34.790329Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:34.790337Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:34.790453Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7222 TClient is connected to server localhost:7222 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:35.159410Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.173171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.254004Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.434171Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.496198Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.554836Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792244263369036:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.554895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.591473Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.661860Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.696440Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.730602Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.768563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.812563Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.860407Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792244263369550:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.860504Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.861539Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792244263369555:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.865406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:37.877232Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792244263369557:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:37.932005Z node 2 :TX_PROXY ERROR: Actor# [2:7486792244263369610:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:38.898219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.301857Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792252853304624:2515], TxId: 281474976715675, task: 1. Ctx: { TraceId : 01jqe1tfpa18zee72qfrmm14kj. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZmEwZmFjNWUtNzQ0NWIzMDctZGY4YzhjZTItMWRhYTE2ODU=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:37:39.302834Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792252853304625:2516], TxId: 281474976715675, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZmEwZmFjNWUtNzQ0NWIzMDctZGY4YzhjZTItMWRhYTE2ODU=. TraceId : 01jqe1tfpa18zee72qfrmm14kj. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486792252853304621:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:37:39.303158Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmEwZmFjNWUtNzQ0NWIzMDctZGY4YzhjZTItMWRhYTE2ODU=, ActorId: [2:7486792248558337162:2488], ActorState: ExecuteState, TraceId: 01jqe1tfpa18zee72qfrmm14kj, Create QueryResponse for error on request, msg: 2025-03-28T09:37:39.588456Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792231378465384:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:39.588506Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 9029, MsgBus: 2943 2025-03-28T09:37:30.266118Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792214159197943:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:30.266231Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001383/r3tmp/tmpeERL1j/pdisk_1.dat 2025-03-28T09:37:30.591094Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9029, node 1 2025-03-28T09:37:30.668874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:30.668992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:30.670396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:30.676568Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:30.676588Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:30.676594Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:30.676693Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2943 TClient is connected to server localhost:2943 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:31.148565Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.174999Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.300707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.469774Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.534407Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:33.041707Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792227044101535:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.041816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.431554Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.457729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.484063Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.512840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.538432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.571473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.612308Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792227044102044:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.612375Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.612581Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792227044102049:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.616351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:33.627689Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792227044102051:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:33.726113Z node 1 :TX_PROXY ERROR: Actor# [1:7486792227044102106:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20427, MsgBus: 12730 2025-03-28T09:37:35.669126Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792236766668285:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:35.669200Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001383/r3tmp/tmp2vtHHf/pdisk_1.dat 2025-03-28T09:37:35.762878Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20427, node 2 2025-03-28T09:37:35.804408Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:35.804517Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:35.810705Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:35.827876Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:35.827896Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:35.827903Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:35.828015Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12730 TClient is connected to server localhost:12730 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:36.169468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:36.185651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:36.247098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:36.367435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.447221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.743684Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792249651571932:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.743771Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.780678Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.809686Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.839751Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.867893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.897533Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.931463Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.970672Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792249651572438:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.970748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.970822Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792249651572443:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.973809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:38.982380Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792249651572445:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:39.043395Z node 2 :TX_PROXY ERROR: Actor# [2:7486792253946539795:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26140, MsgBus: 24821 2025-03-28T09:37:29.274721Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792213162380208:2168];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:29.274828Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001384/r3tmp/tmpjIAETN/pdisk_1.dat 2025-03-28T09:37:29.615259Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26140, node 1 2025-03-28T09:37:29.660280Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:29.660641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:29.662820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:29.731246Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:29.731264Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:29.731273Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:29.731391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24821 TClient is connected to server localhost:24821 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:30.226131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.251498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.385910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.558040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:30.646274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.181960Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792226047283764:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.182044Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.495667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.527401Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.551628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.582945Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.611050Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.684371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:32.733639Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792226047284280:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.733723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792226047284285:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.733721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:32.736575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:32.744989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792226047284287:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:32.839554Z node 1 :TX_PROXY ERROR: Actor# [1:7486792226047284340:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:33.547196Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.873440Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=4; 2025-03-28T09:37:33.881802Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 4 at tablet 72075186224037919 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:37:33.881997Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 4 at tablet 72075186224037919 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:37:33.882331Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792230342252091:2496], Table: `/Root/TestImmediateEffects` ([72057594046644480:16:1]), SessionActorId: [1:7486792230342251925:2496]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037919, Sink=[1:7486792230342252091:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:37:33.882996Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792230342252075:2496], SessionActorId: [1:7486792230342251925:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7486792230342251925:2496]. isRollback=0 2025-03-28T09:37:33.883272Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTc1NzQ5M2EtZThhNWEzYTYtOWFjZGEzY2UtYTkyNGJkMTg=, ActorId: [1:7486792230342251925:2496], ActorState: ExecuteState, TraceId: 01jqe1taesfjged9r2eg9nta4b, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486792230342252085:2496] from: [1:7486792230342252075:2496] 2025-03-28T09:37:33.883774Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792230342252085:2496] TxId: 281474976710674. Ctx: { TraceId: 01jqe1taesfjged9r2eg9nta4b, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTc1NzQ5M2EtZThhNWEzYTYtOWFjZGEzY2UtYTkyNGJkMTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:37:33.884751Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTc1NzQ5M2EtZThhNWEzYTYtOWFjZGEzY2UtYTkyNGJkMTg=, ActorId: [1:7486792230342251925:2496], ActorState: ExecuteState, TraceId: 01jqe1taesfjged9r2eg9nta4b, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 27992, MsgBus: 20566 2025-03-28T09:37:34.732480Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792234579099607:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:34.747962Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001384/r3tmp/tmppQunLu/pdisk_1.dat 2025-03-28T09:37:34.821748Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27992, node 2 2025-03-28T09:37:34.856551Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:34.856648Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:34.859658Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:34.877126Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:34.877143Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:34.877149Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:34.877229Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20566 TClient is connected to server localhost:20566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:35.308821Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:35.323914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.399476Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.538402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.633191Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.828622Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792247464003110:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.828707Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.894586Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.926442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.997337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.032122Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.060787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.102748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.196974Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792251758970924:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.197057Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.197454Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792251758970929:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.200723Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:38.213296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792251758970931:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:38.292335Z node 2 :TX_PROXY ERROR: Actor# [2:7486792251758970986:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:39.218254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.711380Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792234579099607:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:39.711462Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:37:39.896267Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792256053938762:2520], TxId: 281474976715676, task: 1. Ctx: { TraceId : 01jqe1tg5z03g4442ka82jerry. SessionId : ydb://session/3?node_id=2&id=MmMyODg1N2UtZjRkZGMxMzAtZmRkOTI4N2YtMzg2Y2FkNTA=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-28T09:37:39.896688Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792256053938764:2521], TxId: 281474976715676, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MmMyODg1N2UtZjRkZGMxMzAtZmRkOTI4N2YtMzg2Y2FkNTA=. TraceId : 01jqe1tg5z03g4442ka82jerry. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486792256053938759:2488], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:37:39.897076Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MmMyODg1N2UtZjRkZGMxMzAtZmRkOTI4N2YtMzg2Y2FkNTA=, ActorId: [2:7486792256053938539:2488], ActorState: ExecuteState, TraceId: 01jqe1tg5z03g4442ka82jerry, Create QueryResponse for error on request, msg: >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf-UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::Interactive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9760, MsgBus: 13134 2025-03-28T09:37:30.312927Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792217777905206:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:30.313210Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001379/r3tmp/tmp3GaE6g/pdisk_1.dat 2025-03-28T09:37:30.616132Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9760, node 1 2025-03-28T09:37:30.701913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:30.702049Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:30.703815Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:30.716840Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:30.716863Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:30.716869Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:30.716978Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13134 TClient is connected to server localhost:13134 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:31.201198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.216981Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:31.224816Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.352601Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.497343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.573392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:33.193260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792230662808861:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.193393Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.525110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.552147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.579775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.605911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.636886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.675300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.721102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792230662809373:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.721192Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.721268Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792230662809378:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.725303Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:33.736781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792230662809380:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:33.832971Z node 1 :TX_PROXY ERROR: Actor# [1:7486792230662809434:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:34.687066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.313003Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792217777905206:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:35.313070Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 22656, MsgBus: 32113 2025-03-28T09:37:35.868881Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792238066551315:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:35.868939Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001379/r3tmp/tmpUlpvnI/pdisk_1.dat 2025-03-28T09:37:35.960197Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22656, node 2 2025-03-28T09:37:36.001647Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:36.001737Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:36.004668Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:36.027146Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:36.027186Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:36.027193Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:36.027289Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32113 TClient is connected to server localhost:32113 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:37:36.420259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.452846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:36.503545Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:36.650229Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:36.724664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.807957Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792250951454981:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.808051Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:38.837801Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.866391Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.900561Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.969691Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.032857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.104385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.150633Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792255246422797:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.150736Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.150914Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792255246422802:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.154126Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:39.165360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792255246422804:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:39.263515Z node 2 :TX_PROXY ERROR: Actor# [2:7486792255246422858:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:40.217499Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 23967, MsgBus: 28034 2025-03-28T09:37:26.286244Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792197997603833:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:26.286396Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001398/r3tmp/tmp52A37Q/pdisk_1.dat 2025-03-28T09:37:26.630264Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23967, node 1 2025-03-28T09:37:26.636455Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:37:26.636759Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:37:26.672148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:26.672174Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:26.672183Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:26.672335Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:26.693084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:26.693179Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:26.694801Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28034 TClient is connected to server localhost:28034 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:27.119765Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.143537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.280321Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.443232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:27.497149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:29.106228Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792210882507489:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.106374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.379316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.406958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.431124Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.456572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.494614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.528052Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:29.572293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792210882508002:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.572414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.572494Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792210882508007:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:29.576711Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:29.588363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792210882508009:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:29.701630Z node 1 :TX_PROXY ERROR: Actor# [1:7486792210882508063:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:30.654293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:31.286901Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792197997603833:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:31.286957Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1868, MsgBus: 9703 2025-03-28T09:37:32.302605Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792222676616179:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:32.304329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001398/r3tmp/tmpwHINZw/pdisk_1.dat 2025-03-28T09:37:32.395817Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:32.447865Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:32.447946Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:32.449306Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1868, node 2 2025-03-28T09:37:32.505037Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:32.505061Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:32.505071Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:32.505266Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9703 TClient is connected to server localhost:9703 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:32.924187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.941629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.991653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:33.131153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:37:33.190538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.202825Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792235561519705:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.202920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.240089Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.304779Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.376088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.412286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.447759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.515030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.600169Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792235561520233:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.600250Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.600448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792235561520238:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:35.603871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:35.613108Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792235561520240:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:35.673181Z node 2 :TX_PROXY ERROR: Actor# [2:7486792235561520293:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:36.561046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.638553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.685837Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.302997Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792222676616179:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:37.303130Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> UpsertLoad::ShouldWriteKqpUpsert >> UpsertLoad::ShouldWriteDataBulkUpsert >> UpsertLoad::ShouldCreateTable >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink >> KqpInplaceUpdate::SingleRowSimple+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowSimple-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 5449, MsgBus: 11724 2025-03-28T09:37:31.429485Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792219761160516:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:31.429605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001375/r3tmp/tmphia5W5/pdisk_1.dat 2025-03-28T09:37:31.758922Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5449, node 1 2025-03-28T09:37:31.818726Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:31.819512Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:31.825570Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:31.857380Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:31.857433Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:31.857448Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:31.857543Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11724 TClient is connected to server localhost:11724 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:32.337211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.350764Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:37:32.365853Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.497164Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.656045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:32.725815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.349649Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792232646064178:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.349755Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.685113Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.714423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.745904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.778457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.809294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.857120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.903785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792232646064689:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.903868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.903897Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792232646064694:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.906677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:34.915061Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792232646064696:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:34.980222Z node 1 :TX_PROXY ERROR: Actor# [1:7486792232646064750:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:35.908122Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-28T09:37:35.914745Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:37:35.914932Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:37:35.915116Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792236941032362:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7486792236941032336:2496]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7486792236941032362:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:37:35.915696Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792236941032355:2496], SessionActorId: [1:7486792236941032336:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7486792236941032336:2496]. isRollback=0 2025-03-28T09:37:35.915893Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGY0OTZlM2QtODczMmZlMTAtYTg3MDg5NTUtMWExYjI0Zjg=, ActorId: [1:7486792236941032336:2496], ActorState: ExecuteState, TraceId: 01jqe1tckb3emea9z476sfympp, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486792236941032356:2496] from: [1:7486792236941032355:2496] 2025-03-28T09:37:35.915986Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792236941032356:2496] TxId: 281474976710671. Ctx: { TraceId: 01jqe1tckb3emea9z476sfympp, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NGY0OTZlM2QtODczMmZlMTAtYTg3MDg5NTUtMWExYjI0Zjg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:37:35.916852Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NGY0OTZlM2QtODczMmZlMTAtYTg3MDg5NTUtMWExYjI0Zjg=, ActorId: [1:7486792236941032336:2496], ActorState: ExecuteState, TraceId: 01jqe1tckb3emea9z476sfympp, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 15342, MsgBus: 6972 2025-03-28T09:37:36.859846Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792241189182562:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:36.859960Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001375/r3tmp/tmpcpZoxQ/pdisk_1.dat 2025-03-28T09:37:36.973593Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15342, node 2 2025-03-28T09:37:37.014967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:37.015056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:37.017364Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:37.056304Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:37.056328Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:37.056336Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:37.056470Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6972 TClient is connected to server localhost:6972 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:37.496283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.512961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.591728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.733564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.814442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.744777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792254074086225:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.744876Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.794595Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.822514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.850427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.875851Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.905651Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.942970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.980018Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792254074086735:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.980093Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.980112Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792254074086740:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.982777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:39.990628Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792254074086742:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:40.087397Z node 2 :TX_PROXY ERROR: Actor# [2:7486792258369054093:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:41.083887Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792262664021699:2501], TxId: 281474976715672, task: 1. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1thfh6wcn4ka18at6j3f5. SessionId : ydb://session/3?node_id=2&id=OGI1YWY4Zi0xMmY0ZWNjZC1iZDhiNTY0Ny05MjhhMmUwMw==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:37:41.084168Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792262664021701:2502], TxId: 281474976715672, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=OGI1YWY4Zi0xMmY0ZWNjZC1iZDhiNTY0Ny05MjhhMmUwMw==. CustomerSuppliedId : . TraceId : 01jqe1thfh6wcn4ka18at6j3f5. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486792262664021696:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:37:41.084480Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGI1YWY4Zi0xMmY0ZWNjZC1iZDhiNTY0Ny05MjhhMmUwMw==, ActorId: [2:7486792258369054345:2487], ActorState: ExecuteState, TraceId: 01jqe1thfh6wcn4ka18at6j3f5, Create QueryResponse for error on request, msg: >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::UpdateOn_Literal >> TFetchRequestTests::HappyWay >> TPQTabletTests::DropTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 7114, MsgBus: 32464 2025-03-28T09:37:30.770563Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792217150067834:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:30.770652Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001377/r3tmp/tmp2xuUc3/pdisk_1.dat 2025-03-28T09:37:31.110853Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7114, node 1 2025-03-28T09:37:31.174398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:31.174517Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:31.176575Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:31.187012Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:31.187040Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:31.187045Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:31.187164Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32464 TClient is connected to server localhost:32464 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:31.634513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.658542Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.793353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.934591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:31.997361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:33.692260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792230034971490:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.692368Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:33.991655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.016090Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.043188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.070356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.102483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.136644Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:34.188472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792234329939299:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.188562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.188615Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792234329939304:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:34.191390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:34.199781Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792234329939306:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:34.295156Z node 1 :TX_PROXY ERROR: Actor# [1:7486792234329939360:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:35.318661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:35.770679Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792217150067834:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:35.770746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:37:36.124964Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODY5YzI3NDktMTY0MmQ3NjktNDc3ZWJkYS1iYWI5MTE5MQ==, ActorId: [1:7486792238624907215:2526], ActorState: ExecuteState, TraceId: 01jqe1tcsvf0a5pwhwht82w1hs, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken 2025-03-28T09:37:36.131965Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODY5YzI3NDktMTY0MmQ3NjktNDc3ZWJkYS1iYWI5MTE5MQ==, ActorId: [1:7486792238624907215:2526], ActorState: ReadyState, TraceId: 01jqe1tcw39s3sbbday2dhcjqb, Create QueryResponse for error on request, msg: Trying to start YDB, gRPC: 29569, MsgBus: 7204 2025-03-28T09:37:36.996374Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792240671982398:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:36.996444Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001377/r3tmp/tmpseYgdz/pdisk_1.dat 2025-03-28T09:37:37.097724Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:37.125873Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:37.125950Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29569, node 2 2025-03-28T09:37:37.128575Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:37.168063Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:37.168091Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:37.168099Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:37.168224Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7204 TClient is connected to server localhost:7204 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:37.554241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.562156Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:37.576273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.655024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.828937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.906042Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.877743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792253556886044:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.877795Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.923115Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.949725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.974116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.999006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.027950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.059129Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.132757Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792257851853857:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.132833Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.133032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792257851853862:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.136719Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:40.146332Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792257851853864:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:40.236988Z node 2 :TX_PROXY ERROR: Actor# [2:7486792257851853920:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:41.053181Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> TPQTabletTests::Multiple_PQTablets_1 >> TPQTabletTests::ProposeTx_Missing_Operations >> TListAllTopicsTests::PlainList >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> TPQTest::TestPQPartialRead >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMicrosecondsSlidingWindow::Basic [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TMultiBucketCounter::ManyCounters >> TPQTest::TestPartitionTotalQuota >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> KqpStreamLookup::ReadTableWithIndexDuringSplit >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> TPQTabletTests::DropTablet [GOOD] >> TPQTabletTests::Multiple_PQTablets_1 [GOOD] >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction >> TPQTabletTests::Multiple_PQTablets_2 >> TPQTabletTests::ProposeTx_Unknown_Partition_1 >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TPartitionTests::CorrectRange_Multiple_Transactions >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients >> TPartitionTests::ShadowPartitionCountersFirstClass >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> TPQTabletTests::Multiple_PQTablets_2 [GOOD] >> AnalyzeColumnshard::AnalyzeServerless [GOOD] >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestPartitionedBigTest >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction [GOOD] >> TPQTabletTests::Cancel_Tx >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] >> TPQTabletTests::ProposeTx_Unknown_WriteId >> TPQTabletTests::DropTablet_And_Tx >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex >> TPQTabletTests::Cancel_Tx [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> TPQTabletTests::Config_TEvTxCommit_After_Restart >> TPQTabletTests::DropTablet_Before_Write >> TPQTabletTests::ProposeTx_Unknown_Partition_2 >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> TPartitionTests::UserActCount >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction >> TPQTabletTests::ProposeTx_Command_After_Propose >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::UpsertExistingKey >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQTabletTests::All_New_Partitions_In_Another_Tablet >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction [GOOD] >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61988, MsgBus: 12832 2025-03-28T09:37:33.658367Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792227442828887:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:33.658420Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00136d/r3tmp/tmp1pvEC4/pdisk_1.dat 2025-03-28T09:37:33.965879Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:33.969320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:33.969429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:33.973743Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 61988, node 1 2025-03-28T09:37:34.060998Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:34.061037Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:34.061045Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:34.061243Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12832 TClient is connected to server localhost:12832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:34.534214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.563579Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.723194Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.891780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:34.957588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:36.610395Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792240327732534:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.610495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:36.929171Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.966483Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:36.990577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.015678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.043712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.074280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.121064Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792244622700342:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.121136Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.121336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792244622700347:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.124691Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:37.134888Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792244622700349:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:37.231325Z node 1 :TX_PROXY ERROR: Actor# [1:7486792244622700404:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:38.658423Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792227442828887:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:38.658494Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 25638, MsgBus: 31944 2025-03-28T09:37:39.467197Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792252359694795:2070];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:39.467275Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00136d/r3tmp/tmpeNM5tr/pdisk_1.dat 2025-03-28T09:37:39.544089Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:39.567439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:39.567509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:39.568713Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25638, node 2 2025-03-28T09:37:39.611073Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:39.611099Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:39.611108Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:39.611217Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31944 TClient is connected to server localhost:31944 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:37:39.952778Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.968626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.041105Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.182252Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.265917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.301711Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792265244598434:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.301803Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.342323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.364158Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.390173Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.416847Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.439619Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.505669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.579731Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792265244598954:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.579800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.579854Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792265244598959:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.582553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:42.590342Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792265244598962:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:42.671254Z node 2 :TX_PROXY ERROR: Actor# [2:7486792265244599016:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:43.424354Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:43.637597Z node 2 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=281474976715673; 2025-03-28T09:37:43.649062Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792269539566744:2495], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [2:7486792269539566598:2495]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[2:7486792269539566744:2495].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:37:43.649567Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792269539566733:2495], SessionActorId: [2:7486792269539566598:2495], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[2:7486792269539566598:2495]. isRollback=0 2025-03-28T09:37:43.649792Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2QyYTc2YTYtMzAyYjg5ZWUtYjIzMWQxY2QtODY0NjJmZjc=, ActorId: [2:7486792269539566598:2495], ActorState: ExecuteState, TraceId: 01jqe1tm3jf7pnph1se615q184, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7486792269539566734:2495] from: [2:7486792269539566733:2495] 2025-03-28T09:37:43.649874Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486792269539566734:2495] TxId: 281474976715673. Ctx: { TraceId: 01jqe1tm3jf7pnph1se615q184, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=Y2QyYTc2YTYtMzAyYjg5ZWUtYjIzMWQxY2QtODY0NjJmZjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:37:43.650089Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=Y2QyYTc2YTYtMzAyYjg5ZWUtYjIzMWQxY2QtODY0NjJmZjc=, ActorId: [2:7486792269539566598:2495], ActorState: ExecuteState, TraceId: 01jqe1tm3jf7pnph1se615q184, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: 2025-03-28T09:35:09.970433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:09.970654Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:09.970725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b24/r3tmp/tmp4C98lt/pdisk_1.dat 2025-03-28T09:35:10.237057Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30065, node 1 2025-03-28T09:35:10.458595Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:10.458651Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:10.458679Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:10.459156Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:10.461559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:10.549487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:10.549644Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:10.562629Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17616 2025-03-28T09:35:11.033678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:13.475626Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:13.500702Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:13.500832Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:13.537564Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:13.539116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:13.749094Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:13.749480Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:13.749912Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:13.750010Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:13.750185Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:13.750242Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:13.750291Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:13.750336Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:13.750379Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:13.909341Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:13.909463Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:13.922302Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:14.027235Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:14.064814Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:14.064885Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:14.088251Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:14.088366Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:14.088518Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:14.088562Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:14.088598Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:14.088633Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:14.088669Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:14.088701Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:14.089031Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:14.115327Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:14.115407Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:14.120911Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:14.125817Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:14.125995Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:14.130908Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2025-03-28T09:35:14.144194Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:14.144241Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:14.144289Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Shared/.metadata/_statistics 2025-03-28T09:35:14.156679Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:14.162778Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:14.162915Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:14.299299Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:14.490968Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:14.610774Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:15.238313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:35:15.885435Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:16.025941Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2025-03-28T09:35:16.026007Z node 2 :STATISTICS DEBUG: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:35:16.026090Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:2594:2948], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2025-03-28T09:35:16.028109Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:2596:2950] 2025-03-28T09:35:16.029022Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2596:2950], schemeshard id = 72075186224037899 2025-03-28T09:35:17.133279Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2723:3244], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:17.133462Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:17.152304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715661:0, at schemeshard: 72075186224037899 2025-03-28T09:35:17.242097Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2811:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:17.242256Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2811:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:17.242452Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2811:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:17.242535Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2811:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:17.242614Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2811:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:17.242754Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2811:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:17.242893Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2811:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:17.242976Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2811:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:17.243052Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037905;self_id=[2:2811:3051];tablet_id=72075186224037905;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09 ... 36.573021Z node 1 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2025-03-28T09:37:38.468273Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037897 2025-03-28T09:37:38.468360Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 201.000000s, at schemeshard: 72075186224037897 2025-03-28T09:37:38.468685Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037897, stats size# 25 2025-03-28T09:37:38.488126Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:37:39.887434Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:39.887541Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:37:39.887580Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:37:39.887659Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 3] is data table. 2025-03-28T09:37:39.887704Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. Skip traversal for datashard table [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:39.888107Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:37:39.892561Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2025-03-28T09:37:39.896542Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8041:5942], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.896643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:8052:5947], DatabaseId: /Root/Shared, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.896763Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Shared, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.910006Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897 2025-03-28T09:37:39.974990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:8055:5950], DatabaseId: /Root/Shared, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2025-03-28T09:37:40.191910Z node 2 :TX_PROXY ERROR: Actor# [2:8153:5998] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Shared/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:40.263899Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:8182:6013]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:40.264128Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2025-03-28T09:37:40.264225Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id = [2:8184:6015] 2025-03-28T09:37:40.264296Z node 2 :STATISTICS DEBUG: SyncNode(), pipe client id = [2:8184:6015] 2025-03-28T09:37:40.264698Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8185:6016] 2025-03-28T09:37:40.264856Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8184:6015], server id = [2:8185:6016], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:40.264928Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8185:6016], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2025-03-28T09:37:40.264995Z node 2 :STATISTICS DEBUG: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2025-03-28T09:37:40.265145Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:37:40.265223Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 1, ReplyToActorId = [2:8182:6013], StatRequests.size() = 1 2025-03-28T09:37:40.395534Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjFkMzhjOTAtNThmZjBiODYtNGNjMzcwYjQtNTYzYzNiZTQ=, TxId: 2025-03-28T09:37:40.395622Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjFkMzhjOTAtNThmZjBiODYtNGNjMzcwYjQtNTYzYzNiZTQ=, TxId: 2025-03-28T09:37:40.396555Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:40.410510Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:40.410582Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:40.464288Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:37:40.464365Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:37:40.539605Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8184:6015], schemeshard count = 1 2025-03-28T09:37:40.870952Z node 2 :STATISTICS DEBUG: SendBaseStatsToSA(), path count: 1, at schemeshard: 72075186224037899 2025-03-28T09:37:40.871022Z node 2 :STATISTICS DEBUG: Schedule next SendBaseStatsToSA in 206.000000s, at schemeshard: 72075186224037899 2025-03-28T09:37:40.871523Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id# 72075186224037899, stats size# 28 2025-03-28T09:37:40.884967Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxSchemeShardStats::Complete 2025-03-28T09:37:41.778553Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:37:41.778659Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-03-28T09:37:41.782119Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:41.799315Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:41.799900Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:41.799960Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::ExecuteAnalyze. Table OperationId operationId, PathId [OwnerId: 72075186224037899, LocalPathId: 2], AnalyzedShards 1 2025-03-28T09:37:41.813920Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:41.825128Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableRequest::Complete. Send 1 events. 2025-03-28T09:37:41.826271Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute 2025-03-28T09:37:41.826403Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Execute. All shards are analyzed 2025-03-28T09:37:41.840359Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAnalyzeTableResponse::Complete. 2025-03-28T09:37:43.207028Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:43.207152Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 2] is column table. 2025-03-28T09:37:43.207196Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:37:43.207952Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:43.222490Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:43.222966Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:43.223041Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:43.223880Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:43.238328Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:43.238655Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:43.239455Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8319:6105], server id = [2:8320:6106], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:43.239579Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8319:6105], path = { OwnerId: 72075186224037899 LocalId: 2 } 2025-03-28T09:37:43.243841Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:43.243937Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:43.244095Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:43.244290Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:43.244681Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Shared 2025-03-28T09:37:43.246711Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8319:6105], server id = [2:8320:6106], tablet id = 72075186224037905 2025-03-28T09:37:43.246758Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:43.247408Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:43.277095Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8340:6125]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:43.277310Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:43.277353Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8340:6125], StatRequests.size() = 1 2025-03-28T09:37:43.396886Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDZjM2ZkNzQtYTE3ZWRhNzAtZjVkZDgwZjAtYzMxOTQ1Mjc=, TxId: 2025-03-28T09:37:43.396957Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDZjM2ZkNzQtYTE3ZWRhNzAtZjVkZDgwZjAtYzMxOTQ1Mjc=, TxId: 2025-03-28T09:37:43.397933Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:43.416346Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 2] 2025-03-28T09:37:43.416423Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3317:3398] >> TPQTabletTests::Huge_ProposeTransacton >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> TPartitionTests::CorrectRange_Rollback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 65335, MsgBus: 22953 2025-03-28T09:37:34.490515Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792231437257492:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:34.490633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001368/r3tmp/tmpKPTZ9Y/pdisk_1.dat 2025-03-28T09:37:34.851091Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65335, node 1 2025-03-28T09:37:34.894543Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:34.894916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:34.901729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:34.951168Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:34.951189Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:34.951197Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:34.951294Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22953 TClient is connected to server localhost:22953 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:35.484965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.510298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.646256Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.804881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:35.882670Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.326528Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792244322161147:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.326768Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.664388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.696561Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.732718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.759318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.791965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.837298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:37.885034Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792244322161659:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.885142Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.885438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792244322161664:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:37.889403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:37.897940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792244322161666:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:37.957946Z node 1 :TX_PROXY ERROR: Actor# [1:7486792244322161720:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:38.967793Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-28T09:37:38.977161Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:37:38.977324Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:37:38.977569Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792248617129333:2496], Table: `/Root/TwoShard` ([72057594046644480:2:1]), SessionActorId: [1:7486792248617129308:2496]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[1:7486792248617129333:2496].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:37:38.993010Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792248617129326:2496], SessionActorId: [1:7486792248617129308:2496], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7486792248617129308:2496]. isRollback=0 2025-03-28T09:37:38.993248Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTQ1MTk2NWItYmZmYmQwOTctZWRhZjI0NjMtNTNkNzJmM2M=, ActorId: [1:7486792248617129308:2496], ActorState: ExecuteState, TraceId: 01jqe1tfhe1cgg130hkxc44fbe, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486792248617129327:2496] from: [1:7486792248617129326:2496] 2025-03-28T09:37:38.993317Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792248617129327:2496] TxId: 281474976710671. Ctx: { TraceId: 01jqe1tfhe1cgg130hkxc44fbe, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTQ1MTk2NWItYmZmYmQwOTctZWRhZjI0NjMtNTNkNzJmM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:37:38.994121Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTQ1MTk2NWItYmZmYmQwOTctZWRhZjI0NjMtNTNkNzJmM2M=, ActorId: [1:7486792248617129308:2496], ActorState: ExecuteState, TraceId: 01jqe1tfhe1cgg130hkxc44fbe, Create QueryResponse for error on request, msg: 2025-03-28T09:37:39.490398Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792231437257492:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:39.490466Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1094, MsgBus: 1151 2025-03-28T09:37:40.111129Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792258182158512:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:40.111240Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001368/r3tmp/tmpxxYcDg/pdisk_1.dat 2025-03-28T09:37:40.209691Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1094, node 2 2025-03-28T09:37:40.246312Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:40.246416Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:40.247911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:40.277570Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:40.277595Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:40.277605Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:40.277716Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1151 TClient is connected to server localhost:1151 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:40.668095Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.686035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.756396Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.889867Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.961630Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.723486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792266772094861:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.723572Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.747372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.770273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.792093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.813605Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.837803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.867758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.909555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792266772095372:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.909656Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.909721Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792266772095377:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.913367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:42.922732Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792266772095379:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:43.005339Z node 2 :TX_PROXY ERROR: Actor# [2:7486792271067062730:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:44.108119Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792275362030339:2499], TxId: 281474976715672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=YjhlNjg5YzktZjI0OTc4Yi0zMTI5MDhjYy00YjAxZTgzNA==. CustomerSuppliedId : . TraceId : 01jqe1tmcs90f1nc6x17h03jjm. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-28T09:37:44.108595Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792275362030340:2500], TxId: 281474976715672, task: 2. Ctx: { CustomerSuppliedId : . TraceId : 01jqe1tmcs90f1nc6x17h03jjm. SessionId : ydb://session/3?node_id=2&id=YjhlNjg5YzktZjI0OTc4Yi0zMTI5MDhjYy00YjAxZTgzNA==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486792275362030336:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:37:44.108977Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YjhlNjg5YzktZjI0OTc4Yi0zMTI5MDhjYy00YjAxZTgzNA==, ActorId: [2:7486792271067062985:2487], ActorState: ExecuteState, TraceId: 01jqe1tmcs90f1nc6x17h03jjm, Create QueryResponse for error on request, msg: >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::TestToHex [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] >> TPartitionTests::CorrectRange_Rollback [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] Test command err: 2025-03-28T09:37:43.959522Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:43.970067Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:43.970404Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-28T09:37:43.970453Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:37:43.970490Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-28T09:37:43.970533Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:37:43.970580Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:43.970636Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:43.991242Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2025-03-28T09:37:43.991333Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:37:44.014078Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:44.017047Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:44.017191Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.018198Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:44.019480Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:44.019969Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:44.020384Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-03-28T09:37:44.021393Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:44.021464Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:184:2197] 2025-03-28T09:37:44.021522Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:44.022029Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:44.022193Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-28T09:37:44.022237Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-28T09:37:44.022479Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:44.022523Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:44.022564Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:44.022602Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:44.022640Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:37:44.022668Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:37:44.022717Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:44.022758Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:44.022847Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:44.023014Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:44.027533Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:44.027923Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:191:2202], now have 1 active actors on pipe 2025-03-28T09:37:44.029736Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:194:2204], now have 1 active actors on pipe 2025-03-28T09:37:44.030626Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 2 Data { Immediate: false } 2025-03-28T09:37:44.030715Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 2 empty list of operations 2025-03-28T09:37:44.030880Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 2 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-03-28T09:37:44.502264Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:44.505788Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:44.506040Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-28T09:37:44.506100Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:37:44.506135Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-28T09:37:44.506168Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:37:44.506209Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.506256Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:44.522628Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [2:178:2193], now have 1 active actors on pipe 2025-03-28T09:37:44.522770Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:37:44.523027Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:37:44.525226Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:37:44.525330Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.526073Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:37:44.526210Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:44.526577Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:44.526875Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:186:2199] 2025-03-28T09:37:44.527843Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:44.527904Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [2:186:2199] 2025-03-28T09:37:44.527961Z node 2 :PERSQUEUE DEBUG: [PQ: 7205759403792793 ... DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:46.281493Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:304:2290] 2025-03-28T09:37:46.282141Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-03-28T09:37:46.283200Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-03-28T09:37:46.283465Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-03-28T09:37:46.284143Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-03-28T09:37:46.284416Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:37:46.284463Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:37:46.284503Z node 6 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:37:46.284537Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:46.284579Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [6:304:2290] 2025-03-28T09:37:46.284629Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:46.284676Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:46.284740Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 6 2025-03-28T09:37:46.284871Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-03-28T09:37:46.284913Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNED 2025-03-28T09:37:46.284942Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State PLANNED FrontTxId 67890 2025-03-28T09:37:46.284967Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-03-28T09:37:46.284995Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-03-28T09:37:46.285040Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-03-28T09:37:46.285074Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-03-28T09:37:46.285325Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-03-28T09:37:46.285569Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2025-03-28T09:37:46.285605Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-03-28T09:37:46.285638Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-03-28T09:37:46.285666Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-03-28T09:37:46.285695Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-03-28T09:37:46.285731Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-03-28T09:37:46.285768Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-03-28T09:37:46.285825Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-03-28T09:37:46.285883Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-03-28T09:37:46.286076Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-28T09:37:46.286172Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:46.289793Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:46.289848Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-03-28T09:37:46.289892Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-03-28T09:37:46.289931Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-03-28T09:37:46.289968Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-03-28T09:37:46.290013Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-03-28T09:37:46.290055Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-28T09:37:46.290096Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-28T09:37:46.290212Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-03-28T09:37:46.291778Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-03-28T09:37:46.291833Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-03-28T09:37:46.293766Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:329:2308], now have 1 active actors on pipe 2025-03-28T09:37:46.293921Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-03-28T09:37:46.293966Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-03-28T09:37:46.294003Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-03-28T09:37:46.294058Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2025-03-28T09:37:46.294097Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2025-03-28T09:37:46.294136Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2025-03-28T09:37:46.294181Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-03-28T09:37:46.294258Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-03-28T09:37:46.294304Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-03-28T09:37:46.294335Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 1 2025-03-28T09:37:46.294408Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-03-28T09:37:46.294455Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 67890 2025-03-28T09:37:46.294610Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:46.294654Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:46.294713Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:46.294749Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:46.294779Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:37:46.294802Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:37:46.294827Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:37:46.294859Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:46.294912Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:46.294995Z node 6 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2025-03-28T09:37:46.295047Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:46.297213Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:46.297343Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-03-28T09:37:46.297389Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-03-28T09:37:46.297422Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-03-28T09:37:46.297459Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-03-28T09:37:46.297502Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-03-28T09:37:46.297547Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-28T09:37:46.297600Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-03-28T09:37:46.297637Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-03-28T09:37:46.297671Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-03-28T09:37:46.297712Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-03-28T09:37:46.297893Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 100 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-28T09:37:46.297974Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:46.301744Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:46.301808Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-03-28T09:37:46.301849Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-03-28T09:37:46.301899Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-03-28T09:37:46.301946Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:37:46.302011Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-03-28T09:37:46.302053Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-03-28T09:37:46.302090Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:37:46.302131Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-28T09:37:46.302159Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:37:46.302190Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 >> TPartitionTests::DataTxCalcPredicateOk >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] Test command err: 2025-03-28T09:37:44.227284Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:44.231588Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:44.231908Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-28T09:37:44.231946Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:37:44.231979Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-28T09:37:44.232024Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:37:44.232071Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.232118Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:44.248216Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2025-03-28T09:37:44.248312Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:37:44.263275Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:44.266712Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:44.266876Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.268579Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:44.268702Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:44.268788Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:44.269241Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:44.269619Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-03-28T09:37:44.270524Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:44.270587Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:184:2197] 2025-03-28T09:37:44.270640Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:44.271200Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:44.271305Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-28T09:37:44.271349Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-28T09:37:44.271520Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:44.271567Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:44.271603Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:44.271656Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:44.271690Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:37:44.271715Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:37:44.271740Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:44.271795Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:44.271874Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:44.272204Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:44.272422Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:37:44.273103Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-03-28T09:37:44.273141Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:37:44.273191Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:44.273528Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:44.273583Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-03-28T09:37:44.273613Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-03-28T09:37:44.273709Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:44.273744Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:44.273770Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-28T09:37:44.273797Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-28T09:37:44.273821Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-03-28T09:37:44.273843Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-03-28T09:37:44.273879Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:44.273923Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-28T09:37:44.273973Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:44.274062Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:44.274265Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:44.282659Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:44.282881Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:44.283275Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:197:2206], now have 1 active actors on pipe 2025-03-28T09:37:44.284037Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:200:2208], now have 1 active actors on pipe 2025-03-28T09:37:44.284853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Immediate: false } 2025-03-28T09:37:44.284919Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-28T09:37:44.285030Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-03-28T09:37:44.285071Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-28T09:37:44.285105Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-03-28T09:37:44.285146Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-28T09:37:44.285206Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-03-28T09:37:44.285373Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 229 MaxStep: 30229 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Operations { PartitionId: 1 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2025-03-28T09:37:44.285452Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:44.288922Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:44.288982Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING ... itions# 0 2025-03-28T09:37:46.749422Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:221:2223], now have 1 active actors on pipe 2025-03-28T09:37:46.749980Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:224:2225], now have 1 active actors on pipe 2025-03-28T09:37:46.750172Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 177 RawX2: 25769805968 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 1 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-03-28T09:37:46.750214Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-28T09:37:46.750285Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-03-28T09:37:46.750323Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-28T09:37:46.750360Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-03-28T09:37:46.750404Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-28T09:37:46.750443Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-03-28T09:37:46.750747Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 1 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-28T09:37:46.750834Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:46.754282Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:46.754343Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-03-28T09:37:46.754383Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-03-28T09:37:46.754420Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-03-28T09:37:46.754674Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-03-28T09:37:46.754740Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSet 2025-03-28T09:37:46.754778Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicates 1/1 2025-03-28T09:37:46.754891Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 177 RawX2: 25769805968 } } Step: 100 2025-03-28T09:37:46.754944Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARED 2025-03-28T09:37:46.754978Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARED 2025-03-28T09:37:46.755018Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNING 2025-03-28T09:37:46.755058Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2025-03-28T09:37:46.755204Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PLANNED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 1 Consumer: "user" Path: "/topic" } Step: 100 Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-28T09:37:46.755285Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:46.758191Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:46.758255Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNING 2025-03-28T09:37:46.758289Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PLANNING 2025-03-28T09:37:46.758328Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PLANNED 2025-03-28T09:37:46.758365Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNING to PLANNED 2025-03-28T09:37:46.758400Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-03-28T09:37:46.758434Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2025-03-28T09:37:46.758494Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATING 2025-03-28T09:37:46.758539Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2025-03-28T09:37:46.758617Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2025-03-28T09:37:46.758718Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Partition 0 Consumer 'user' Bad request (behind the last offset) EndOffset 0 End 1 2025-03-28T09:37:46.758881Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 0 2025-03-28T09:37:46.758916Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvTxCalcPredicateResult 2025-03-28T09:37:46.758955Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Partition responses 1/1 2025-03-28T09:37:46.758986Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-03-28T09:37:46.759017Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2025-03-28T09:37:46.759054Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2025-03-28T09:37:46.759090Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-03-28T09:37:46.759133Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState CALCULATED 2025-03-28T09:37:46.759168Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2025-03-28T09:37:46.759319Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: CALCULATED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 1 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-28T09:37:46.759401Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:46.765338Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:46.765399Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-03-28T09:37:46.765429Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-03-28T09:37:46.765462Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-03-28T09:37:46.765510Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-03-28T09:37:46.765550Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-03-28T09:37:46.765595Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-28T09:37:46.765656Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-28T09:37:46.765778Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 1 2025-03-28T09:37:46.765830Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTING 2025-03-28T09:37:46.765864Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2025-03-28T09:37:46.765897Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 0, Expected 0 2025-03-28T09:37:46.765939Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2025-03-28T09:37:46.765976Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-03-28T09:37:46.766014Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-03-28T09:37:46.766056Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-03-28T09:37:46.766092Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-03-28T09:37:46.766287Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 30134 PredicatesReceived { TabletId: 22222 Predicate: true } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 1 Consumer: "user" Path: "/topic" } Step: 100 Predicate: false Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-28T09:37:46.766379Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:46.766846Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:46.766899Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:46.766939Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:46.766975Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:46.767008Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:37:46.767035Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:46.767074Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:46.767220Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:46.769635Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-03-28T09:37:46.769685Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-03-28T09:37:46.771848Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:46.772400Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:46.772436Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-03-28T09:37:46.772463Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-03-28T09:37:46.772490Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-03-28T09:37:46.772526Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:37:46.772569Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2025-03-28T09:37:46.772601Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-03-28T09:37:46.772627Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:37:46.772658Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-28T09:37:46.772692Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:37:46.772721Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::After_Restarting_The_Tablet_Sends_A_TEvReadSet_For_Transactions_In_The_EXECUTED_State [GOOD] Test command err: 2025-03-28T09:37:43.959518Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:43.970036Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:43.970313Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-28T09:37:43.970352Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:37:43.970380Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-28T09:37:43.970410Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:37:43.970441Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:43.970482Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:43.997658Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2025-03-28T09:37:43.997753Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:37:44.019120Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:44.021827Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:44.021932Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.022742Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:44.022876Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:44.023323Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:44.023745Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-03-28T09:37:44.024648Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:44.024714Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:184:2197] 2025-03-28T09:37:44.024763Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:44.025271Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:44.025418Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-28T09:37:44.025466Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-28T09:37:44.025609Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:44.025649Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:44.025688Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:44.025723Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:44.025759Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:37:44.025782Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:37:44.025812Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:44.025914Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:44.026022Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:44.026161Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:44.031815Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:44.032205Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:191:2202], now have 1 active actors on pipe 2025-03-28T09:37:44.033727Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:194:2204], now have 1 active actors on pipe 2025-03-28T09:37:44.033826Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvDropTablet 2025-03-28T09:37:44.496885Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:44.504362Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:44.504667Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-28T09:37:44.504714Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:37:44.504750Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-28T09:37:44.504786Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:37:44.504827Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.504879Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:44.522768Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [2:178:2193], now have 1 active actors on pipe 2025-03-28T09:37:44.522888Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:37:44.523173Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:37:44.525137Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:37:44.525243Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.526349Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:37:44.526476Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:44.526545Z node 2 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:44.526941Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:44.527251Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:186:2199] 2025-03-28T09:37:44.528182Z node 2 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:44.528244Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [2:186:2199] 2025-03-28T09:37:44.528303Z node 2 :PERSQUEUE DEBUG ... LATING 2025-03-28T09:37:46.641425Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from PLANNED to CALCULATING 2025-03-28T09:37:46.641513Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 110, TxId 67891 2025-03-28T09:37:46.641812Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 110, TxId 67891, Partition 0, Predicate 1 2025-03-28T09:37:46.641853Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Handle TEvTxCalcPredicateResult 2025-03-28T09:37:46.641887Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Partition responses 1/1 2025-03-28T09:37:46.641925Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-03-28T09:37:46.641962Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATING 2025-03-28T09:37:46.642000Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATING FrontTxId 67891 2025-03-28T09:37:46.642036Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-03-28T09:37:46.642081Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATED 2025-03-28T09:37:46.642120Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-03-28T09:37:46.642294Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 152 MaxStep: 30152 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-28T09:37:46.642428Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:46.649664Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:46.649739Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-03-28T09:37:46.649778Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-03-28T09:37:46.649820Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-03-28T09:37:46.649860Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-03-28T09:37:46.649896Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-03-28T09:37:46.649938Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-28T09:37:46.649981Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-28T09:37:46.650061Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 2025-03-28T09:37:46.654669Z node 6 :PERSQUEUE DEBUG: Client pipe to tablet 72057594037927937 from 22222 is reset 2025-03-28T09:37:46.677952Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:46.680561Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:46.681459Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] has a tx info 2025-03-28T09:37:46.681501Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 110, PlanTxId 67891, ExecStep 110, ExecTxId 67891 2025-03-28T09:37:46.681607Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067890, Status 0 2025-03-28T09:37:46.681679Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Restore Tx. TxId: 67890, Step: 100, State: EXECUTED, WriteId: 2025-03-28T09:37:46.681727Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] ReadRange pair. Key tx_00000000000000067891, Status 0 2025-03-28T09:37:46.681755Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Restore Tx. TxId: 67891, Step: 110, State: CALCULATED, WriteId: 2025-03-28T09:37:46.681775Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Fix tx state 2025-03-28T09:37:46.681809Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=2, PlannedTxs.size=2 2025-03-28T09:37:46.681842Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] top tx queue (100, 67890) 2025-03-28T09:37:46.681876Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67890 EXECUTED 0 2025-03-28T09:37:46.681906Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxsOrder: 67891 PLANNED 0 2025-03-28T09:37:46.682330Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:46.682362Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937] has a tx writes info 2025-03-28T09:37:46.682434Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:46.682668Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:46.682881Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [6:360:2338] 2025-03-28T09:37:46.683487Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-03-28T09:37:46.684238Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-03-28T09:37:46.684405Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-03-28T09:37:46.684830Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-03-28T09:37:46.685030Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:37:46.685072Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:37:46.685107Z node 6 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:37:46.685141Z node 6 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:46.685182Z node 6 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [6:360:2338] 2025-03-28T09:37:46.685237Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:46.685288Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:46.685367Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 6 2025-03-28T09:37:46.685594Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-28T09:37:46.685633Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-28T09:37:46.685747Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-03-28T09:37:46.685774Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-03-28T09:37:46.685801Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-03-28T09:37:46.685828Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:37:46.685857Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-03-28T09:37:46.685882Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:37:46.685910Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-28T09:37:46.685937Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:37:46.685962Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-28T09:37:46.685987Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PLANNED 2025-03-28T09:37:46.686004Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PLANNED 2025-03-28T09:37:46.686019Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State PLANNED FrontTxId 67891 2025-03-28T09:37:46.686035Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxQueue.size 1 2025-03-28T09:37:46.686060Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] New ExecStep 110, ExecTxId 67891 2025-03-28T09:37:46.686098Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATING 2025-03-28T09:37:46.686142Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from PLANNED to CALCULATING 2025-03-28T09:37:46.686241Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 110, TxId 67891 2025-03-28T09:37:46.686546Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 110, TxId 67891, Partition 0, Predicate 1 2025-03-28T09:37:46.686569Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Handle TEvTxCalcPredicateResult 2025-03-28T09:37:46.686598Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] Partition responses 1/1 2025-03-28T09:37:46.686631Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2025-03-28T09:37:46.686655Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATING 2025-03-28T09:37:46.686704Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATING FrontTxId 67891 2025-03-28T09:37:46.686734Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 1 2025-03-28T09:37:46.686763Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState CALCULATED 2025-03-28T09:37:46.686791Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATING to CALCULATED 2025-03-28T09:37:46.686907Z node 6 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: CALCULATED MinStep: 152 MaxStep: 30152 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "user" Path: "/topic" } Step: 110 Predicate: true Kind: KIND_DATA SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { } 2025-03-28T09:37:46.686973Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:46.687200Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-03-28T09:37:46.687229Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-03-28T09:37:46.689855Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:46.689893Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-03-28T09:37:46.689933Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State CALCULATED 2025-03-28T09:37:46.689966Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 State CALCULATED FrontTxId 67891 2025-03-28T09:37:46.689998Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState WAIT_RS 2025-03-28T09:37:46.690035Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891 moved from CALCULATED to WAIT_RS 2025-03-28T09:37:46.690064Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-28T09:37:46.690093Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 2025-03-28T09:37:46.690148Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 >> TPartitionTests::Batching >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldDropCreateTable >> TPQTabletTests::UpdateConfig_1 >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> TPQTest::TestReadRuleVersions |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TPartitionTests::Batching [GOOD] >> TPartitionTests::ShadowPartitionCountersRestore >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] >> KqpImmediateEffects::Interactive [GOOD] >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TColumnShardTestSchema::InternalTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63678, MsgBus: 23774 2025-03-28T09:37:36.400221Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792241852419278:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:36.400347Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001366/r3tmp/tmpw9Plfb/pdisk_1.dat 2025-03-28T09:37:36.725423Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63678, node 1 2025-03-28T09:37:36.772786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:36.772889Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:36.774736Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:36.822409Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:36.822466Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:36.822480Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:36.822587Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23774 TClient is connected to server localhost:23774 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:37.332346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.355910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.494809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.655840Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.730419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.383499Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792254737322925:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.383606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.743974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.769292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.794487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.820745Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.851458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.885156Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:39.927069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792254737323438:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.927146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.927386Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792254737323443:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.931028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:39.940456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792254737323445:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:40.026447Z node 1 :TX_PROXY ERROR: Actor# [1:7486792259032290796:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:40.855150Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.400321Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792241852419278:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:41.400380Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3222, MsgBus: 23736 2025-03-28T09:37:42.198096Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792268182046618:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:42.198153Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001366/r3tmp/tmpsqxJRY/pdisk_1.dat 2025-03-28T09:37:42.281189Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3222, node 2 2025-03-28T09:37:42.326435Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:42.326524Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:42.328507Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:42.344337Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:42.344356Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:42.344364Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:42.344470Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23736 TClient is connected to server localhost:23736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:42.695782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.713620Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.758538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.918726Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.982517Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:44.941917Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792276771982981:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:44.941984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:44.985569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.014399Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.047392Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.078228Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.109216Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.141571Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.187661Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792281066950786:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.187739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.187834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792281066950791:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.191315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:45.200070Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792281066950793:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:45.265660Z node 2 :TX_PROXY ERROR: Actor# [2:7486792281066950848:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:46.184028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TPartitionTests::CommitOffsetRanges >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] >> TPQTabletTests::UpdateConfig_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 24583, MsgBus: 3675 2025-03-28T09:37:37.138052Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792246903274171:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:37.138101Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00135c/r3tmp/tmp7vUT3n/pdisk_1.dat 2025-03-28T09:37:37.467980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:37.508737Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:37.508856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:37.510856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24583, node 1 2025-03-28T09:37:37.555774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:37.555792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:37.555801Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:37.555900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3675 TClient is connected to server localhost:3675 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:38.108675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.140766Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.316042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:38.475140Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T09:37:38.539721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.186126Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792259788177847:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.186238Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.481343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.512692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.533199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.556059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.580345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.609625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.648463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792259788178354:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.648549Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792259788178359:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.648562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.651786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:40.662197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792259788178361:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:40.725950Z node 1 :TX_PROXY ERROR: Actor# [1:7486792259788178415:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 31927, MsgBus: 17505 2025-03-28T09:37:42.413735Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792265261357670:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:42.413809Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00135c/r3tmp/tmpSQvO73/pdisk_1.dat 2025-03-28T09:37:42.493852Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31927, node 2 2025-03-28T09:37:42.541486Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:42.541556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:42.542835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:42.553322Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:42.553342Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:42.553347Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:42.553416Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17505 TClient is connected to server localhost:17505 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:42.878286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.896494Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.949677Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.086468Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.153647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:45.214144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792278146261331:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.214229Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.254030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.291035Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.323241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.348361Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.399471Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.437690Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.485073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792278146261841:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.485168Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.485474Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792278146261846:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.489012Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:45.498791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792278146261849:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:45.567420Z node 2 :TX_PROXY ERROR: Actor# [2:7486792278146261902:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpEffects::UpdateOn_Literal [GOOD] >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageTestClean >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TPartitionTests::TestBatchingWithChangeConfig >> TPQTest::TestAccountReadQuota >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TPartitionTests::CommitOffsetRanges [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders >> TPartitionTests::AfterRestart_1 >> TPQTest::TestUserInfoCompatibility >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 14074, MsgBus: 18897 2025-03-28T09:37:36.907024Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792241955875399:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:36.907135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001361/r3tmp/tmprwdGSm/pdisk_1.dat 2025-03-28T09:37:37.286426Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14074, node 1 2025-03-28T09:37:37.296961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:37.297044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:37.304383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:37.351755Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:37.351776Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:37.351783Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:37.351900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18897 TClient is connected to server localhost:18897 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:37.935343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:37.975884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.124746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.301474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.369806Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.868710Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792254840779057:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:39.868805Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.171169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.196263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.223825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.272200Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.302653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.333206Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.381342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792259135746865:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.381464Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.382881Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792259135746870:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.386497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:40.415262Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792259135746872:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:40.502034Z node 1 :TX_PROXY ERROR: Actor# [1:7486792259135746928:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:41.140642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 1375, MsgBus: 13975 2025-03-28T09:37:42.179007Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792267895636444:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:42.179085Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001361/r3tmp/tmpuAk1UB/pdisk_1.dat 2025-03-28T09:37:42.267914Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1375, node 2 2025-03-28T09:37:42.311956Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:42.312037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:42.313511Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:42.323561Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:42.323576Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:42.323582Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:42.323663Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13975 TClient is connected to server localhost:13975 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:42.688050Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.706524Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.775835Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.904253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.969061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:45.120180Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792280780540097:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.120285Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.151194Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.185031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.221668Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.290103Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.321782Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.350562Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.431621Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792280780540614:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.431699Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.431868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792280780540619:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.435236Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:45.445020Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792280780540621:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:45.503739Z node 2 :TX_PROXY ERROR: Actor# [2:7486792280780540674:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:46.446966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.179450Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792267895636444:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:47.179518Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: 2025-03-28T09:37:45.950075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:45.950545Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:45.950607Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001d46/r3tmp/tmprfLoHZ/pdisk_1.dat 2025-03-28T09:37:46.323910Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.368194Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:46.408546Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:46.408686Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:46.420109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:46.502107Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.799673Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2025-03-28T09:37:46.799780Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2025-03-28T09:37:46.808291Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} started# 5 actors each with inflight# 4 2025-03-28T09:37:46.808388Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-28T09:37:46.808454Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-28T09:37:46.808489Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-28T09:37:46.808513Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-28T09:37:46.808567Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2025-03-28T09:37:46.811654Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} session: ydb://session/3?node_id=1&id=ZTc3ZmVmMjctM2JiODBiMGItOTA4NjNjODEtYmYyYjc0ZA== 2025-03-28T09:37:46.813388Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} session: ydb://session/3?node_id=1&id=ZGRiYzUzLTNhZGUxMmY1LTgzNDRmYTE2LTg1ZGQxYjBm 2025-03-28T09:37:46.816172Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} session: ydb://session/3?node_id=1&id=Yzk3MGNjYTktNTZjNGY3Zi05OGNmN2NmYy1kMTFhZWM2Yw== 2025-03-28T09:37:46.816223Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} session: ydb://session/3?node_id=1&id=N2I2NDE0NDQtMTgyNjgxZmMtMjFkODMxYWQtZjU4MDlhMWM= 2025-03-28T09:37:46.817667Z node 1 :DS_LOAD_TEST DEBUG: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} session: ydb://session/3?node_id=1&id=ODQzNzBmMmItMTA5ZDdkNjgtOWFlOGVkNTYtNWJjNDdkNWE= 2025-03-28T09:37:46.822434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:752:2634], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.822565Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.822636Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.822743Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:782:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.822803Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:783:2659], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.823237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:781:2657], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.823693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.835271Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:37:46.884289Z node 1 :TX_PROXY ERROR: Actor# [1:799:2675] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:37:46.885266Z node 1 :TX_PROXY ERROR: Actor# [1:800:2676] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:37:46.885946Z node 1 :TX_PROXY ERROR: Actor# [1:801:2677] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:37:46.886350Z node 1 :TX_PROXY ERROR: Actor# [1:811:2678] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:37:47.038134Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:795:2671], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:37:47.038314Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:790:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:37:47.038402Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:791:2667], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:37:47.038456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:792:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:37:47.038503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:793:2669], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:37:47.073137Z node 1 :TX_PROXY ERROR: Actor# [1:900:2741] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.747408Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 2} finished in 1743154667.747353s, errors=0 2025-03-28T09:37:47.747718Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1743154667747 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:37:47.761139Z node 1 :TX_PROXY ERROR: Actor# [1:953:2779] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.822489Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 5} finished in 1743154667.822450s, errors=0 2025-03-28T09:37:47.822671Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1743154667822 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:37:47.836426Z node 1 :TX_PROXY ERROR: Actor# [1:984:2801] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.897788Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 3} finished in 1743154667.897747s, errors=0 2025-03-28T09:37:47.898052Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1743154667897 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:37:47.911658Z node 1 :TX_PROXY ERROR: Actor# [1:1015:2823] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.975389Z node 1 :TX_PROXY ERROR: Actor# [1:1045:2844] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.991714Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 4} finished in 1743154667.991659s, errors=0 2025-03-28T09:37:47.992058Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1743154667991 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:37:48.036731Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActor# {Tag: 0, parent: [1:739:2621], subTag: 1} finished in 1743154668.036698s, errors=0 2025-03-28T09:37:48.036948Z node 1 :DS_LOAD_TEST INFO: kqp# {Tag: 0, parent: [1:738:2620], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1743154668036 OperationsOK: 4 OperationsError: 0 } 2025-03-28T09:37:48.037039Z node 1 :DS_LOAD_TEST NOTICE: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:738:2620], subTag: 2} finished in 1.229005s, oks# 20, errors# 0 2025-03-28T09:37:48.037164Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 >> TPQTest::DirectReadBadSessionOrPipe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2405, MsgBus: 25636 2025-03-28T09:37:37.702205Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792245521800454:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:37.702253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001355/r3tmp/tmp9SV6tc/pdisk_1.dat 2025-03-28T09:37:38.079327Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2405, node 1 2025-03-28T09:37:38.122498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:38.122616Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:38.124275Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:38.181843Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:38.181866Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:38.181872Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:38.182026Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25636 TClient is connected to server localhost:25636 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:38.696083Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.718494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.829125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.970897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.026781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.570484Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792258406704111:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.570613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.901827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.928186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.953480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.982849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.009157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.038032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.075128Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792262701671919:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.075188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.075289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792262701671924:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.078375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:41.087069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792262701671926:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:41.201328Z node 1 :TX_PROXY ERROR: Actor# [1:7486792262701671981:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:41.863000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 63635, MsgBus: 5793 2025-03-28T09:37:43.024700Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792273255108121:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.024910Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001355/r3tmp/tmpJXbQHG/pdisk_1.dat 2025-03-28T09:37:43.131051Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63635, node 2 2025-03-28T09:37:43.160326Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:43.160428Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:43.162151Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:43.187556Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:43.187576Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:43.187583Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:43.187709Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5793 TClient is connected to server localhost:5793 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:43.548568Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.555551Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:43.564393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.608956Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.773958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.834230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:45.805344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792281845044477:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.805423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:45.844856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.873210Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.899606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.955530Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.023577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.092178Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.131904Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792286140012290:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.131973Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792286140012295:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.131984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.135200Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:46.143872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792286140012297:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:46.239906Z node 2 :TX_PROXY ERROR: Actor# [2:7486792286140012351:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.244063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TPQTest::TestWritePQCompact >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTest::TestReserveBytes >> TPartitionTests::AfterRestart_1 [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::UpdateOn_Literal [GOOD] Test command err: Trying to start YDB, gRPC: 22597, MsgBus: 10984 2025-03-28T09:37:37.674136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792247292118966:2241];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:37.674186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00135b/r3tmp/tmpUhmDRv/pdisk_1.dat 2025-03-28T09:37:38.006063Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:38.025032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:38.025157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:38.026949Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22597, node 1 2025-03-28T09:37:38.125396Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:38.125417Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:38.125420Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:38.125498Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10984 TClient is connected to server localhost:10984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:38.634583Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.656885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.800880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.946855Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.999996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.584256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792260177022447:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.584369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.896293Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.921625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.943066Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.965968Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.990417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.017312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.052320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792264471990252:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.052407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.052472Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792264471990257:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.055412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:41.063989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792264471990259:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:41.136197Z node 1 :TX_PROXY ERROR: Actor# [1:7486792264471990313:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24186, MsgBus: 23969 2025-03-28T09:37:43.260818Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792273214034456:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.260898Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00135b/r3tmp/tmpWn1tNx/pdisk_1.dat 2025-03-28T09:37:43.357641Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24186, node 2 2025-03-28T09:37:43.387205Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:43.387333Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:43.389134Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:43.407307Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:43.407331Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:43.407338Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:43.407491Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23969 TClient is connected to server localhost:23969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:43.781051Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.787168Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:37:43.797465Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.875323Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:44.031909Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:44.097227Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.382708Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792286098938102:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.382790Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.426575Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.454485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.480295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.504865Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.529003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.557672Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.645156Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792286098938615:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.645243Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.645413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792286098938620:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.648293Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:46.655883Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792286098938622:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:46.746042Z node 2 :TX_PROXY ERROR: Actor# [2:7486792286098938677:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::InternalTTL [GOOD] Test command err: 2025-03-28T09:37:22.999879Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:37:23.067294Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:23.071150Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:23.071472Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:37:23.089210Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:37:23.089511Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:37:23.096351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:37:23.096508Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:37:23.096682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:37:23.096754Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:37:23.096813Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:37:23.096888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:37:23.096968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:37:23.097045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:37:23.097153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:37:23.097239Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:37:23.097312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:37:23.097382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:37:23.115091Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:102:2136], Recipient [1:139:2171]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:23.118735Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:37:23.118988Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:37:23.119026Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:37:23.119178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:23.119297Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:37:23.119354Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:37:23.119392Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:37:23.119452Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:37:23.119506Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:37:23.119548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:37:23.119574Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:37:23.119704Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:23.119762Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:37:23.119794Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:37:23.119817Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:37:23.119908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:37:23.119979Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:37:23.120035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:37:23.120073Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:37:23.120154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:37:23.120191Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:37:23.120227Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:37:23.120292Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:37:23.120331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:37:23.120359Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:37:23.120735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-28T09:37:23.120840Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-03-28T09:37:23.120907Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=28; 2025-03-28T09:37:23.120984Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-03-28T09:37:23.121180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:37:23.121248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:37:23.121282Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:37:23.121487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:37:23.121534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:37:23.121577Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:37:23.121772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:37:23.121816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:37:23.121845Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:37:23.122005Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:37:23.122036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:37:23.122055Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T0 ... mp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:47.635054Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:47.635075Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:37:47.635098Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:37:47.635158Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:37:47.635214Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:71;schema=saved_at: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:47.635235Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:37:47.635280Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;);columns=1;rows=71; 2025-03-28T09:37:47.635305Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=568;num_rows=71;batch_columns=saved_at; 2025-03-28T09:37:47.635455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:684:2700];bytes=568;rows=71;faults=0;finished=0;fault=0;schema=saved_at: timestamp[us]; 2025-03-28T09:37:47.635527Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:47.635591Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:47.635683Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:47.635758Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:37:47.635807Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:47.635850Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:47.635878Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:691:2707] finished for tablet 9437184 2025-03-28T09:37:47.636267Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:684:2700];stats={"p":[{"events":["f_bootstrap"],"t":0.052},{"events":["f_ProduceResults"],"t":0.315},{"events":["l_bootstrap"],"t":0.511},{"events":["f_processing","f_task_result"],"t":0.53},{"events":["l_task_result"],"t":6.318},{"events":["f_ack"],"t":6.353},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":7.047}],"full":{"a":1743154660588546,"name":"_full_task","f":1743154660588546,"d_finished":0,"c":0,"l":1743154667635932,"d":7047386},"events":[{"name":"bootstrap","f":1743154660640585,"d_finished":459401,"c":1,"l":1743154661099986,"d":459401},{"a":1743154667635749,"name":"ack","f":1743154666941962,"d_finished":642752,"c":903,"l":1743154667635708,"d":642935},{"a":1743154667635741,"name":"processing","f":1743154661119291,"d_finished":3105876,"c":4515,"l":1743154667635710,"d":3106067},{"name":"ProduceResults","f":1743154660904004,"d_finished":1251726,"c":5420,"l":1743154667635862,"d":1251726},{"a":1743154667635864,"name":"Finish","f":1743154667635864,"d_finished":0,"c":0,"l":1743154667635932,"d":68},{"name":"task_result","f":1743154661119333,"d_finished":2386228,"c":3612,"l":1743154666906799,"d":2386228}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:47.636330Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:684:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:37:47.636657Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:684:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0.052},{"events":["f_ProduceResults"],"t":0.315},{"events":["l_bootstrap"],"t":0.511},{"events":["f_processing","f_task_result"],"t":0.53},{"events":["l_task_result"],"t":6.318},{"events":["f_ack"],"t":6.353},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":7.047}],"full":{"a":1743154660588546,"name":"_full_task","f":1743154660588546,"d_finished":0,"c":0,"l":1743154667636367,"d":7047821},"events":[{"name":"bootstrap","f":1743154660640585,"d_finished":459401,"c":1,"l":1743154661099986,"d":459401},{"a":1743154667635749,"name":"ack","f":1743154666941962,"d_finished":642752,"c":903,"l":1743154667635708,"d":643370},{"a":1743154667635741,"name":"processing","f":1743154661119291,"d_finished":3105876,"c":4515,"l":1743154667635710,"d":3106502},{"name":"ProduceResults","f":1743154660904004,"d_finished":1251726,"c":5420,"l":1743154667635862,"d":1251726},{"a":1743154667635864,"name":"Finish","f":1743154667635864,"d_finished":0,"c":0,"l":1743154667636367,"d":503},{"name":"task_result","f":1743154661119333,"d_finished":2386228,"c":3612,"l":1743154666906799,"d":2386228}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;;); 2025-03-28T09:37:47.636711Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:37:40.535657Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=903;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=7037528;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7037528;selected_rows=0; 2025-03-28T09:37:47.636748Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:37:47.636907Z node 1 :TX_COLUMNSHARD_SCAN INFO: SelfId=[1:691:2707];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29635, MsgBus: 8762 2025-03-28T09:37:37.777913Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792244426912956:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:37.777958Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001352/r3tmp/tmpcm9OZM/pdisk_1.dat 2025-03-28T09:37:38.185131Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:38.192217Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:38.192334Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:38.199536Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29635, node 1 2025-03-28T09:37:38.263735Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:38.263766Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:38.263779Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:38.263866Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8762 TClient is connected to server localhost:8762 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:38.801217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.833129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.948649Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.090771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:39.153259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.616717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792257311816628:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.616838Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.868085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.891848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.915217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.939166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.964810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.994257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.031948Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792261606784434:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.032072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792261606784439:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.032090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.035574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:41.046821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792261606784441:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:41.126584Z node 1 :TX_PROXY ERROR: Actor# [1:7486792261606784495:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:41.942131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2979, MsgBus: 61054 2025-03-28T09:37:42.969982Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792266492269702:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:42.970038Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001352/r3tmp/tmp88qrxM/pdisk_1.dat 2025-03-28T09:37:43.068240Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2979, node 2 2025-03-28T09:37:43.113972Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:43.114078Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:43.115706Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:43.134803Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:43.134833Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:43.134842Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:43.134966Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61054 TClient is connected to server localhost:61054 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:43.534298Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.547407Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.596528Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.728887Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:43.804291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.142064Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792283672140661:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.142210Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.184900Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.214028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.240153Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.267405Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.292536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.323464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.401197Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792283672141172:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.401315Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.401381Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792283672141177:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.404341Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:46.411614Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792283672141179:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:46.499561Z node 2 :TX_PROXY ERROR: Actor# [2:7486792283672141235:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.335395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.970254Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792266492269702:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:47.970325Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TPartitionTests::AfterRestart_2 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] >> CacheEviction::DeleteKeys [GOOD] >> PQCountersLabeled::Partition >> TPartitionTests::AfterRestart_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] Test command err: Trying to start YDB, gRPC: 11395, MsgBus: 30156 2025-03-28T09:37:37.963758Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792246544340740:2184];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:37.964206Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00134a/r3tmp/tmpd4uqQJ/pdisk_1.dat 2025-03-28T09:37:38.389403Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:38.397354Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:38.397484Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:38.402054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 11395, node 1 2025-03-28T09:37:38.476138Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:38.476161Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:38.476171Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:38.476306Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30156 TClient is connected to server localhost:30156 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:38.973330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:38.996759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.110028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.251660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.328414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.978651Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792259429244292:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.978778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.256169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.282307Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.345312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.366614Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.393772Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.422294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.455397Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792263724212098:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.455480Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.455573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792263724212103:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.458987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:41.468288Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792263724212105:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:41.533259Z node 1 :TX_PROXY ERROR: Actor# [1:7486792263724212157:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:42.337368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.955262Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792246544340740:2184];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:42.988030Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21507, MsgBus: 24173 2025-03-28T09:37:43.944601Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792270868478306:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.944640Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00134a/r3tmp/tmp888B3C/pdisk_1.dat 2025-03-28T09:37:44.065702Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21507, node 2 2025-03-28T09:37:44.103393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:44.103513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:44.104716Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:44.116568Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:44.116588Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:44.116595Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:44.116734Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24173 TClient is connected to server localhost:24173 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:44.545092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:44.557445Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:44.608616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:44.740215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:44.814317Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.767460Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792283753381961:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.767523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.814496Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.841085Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.866223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.893985Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.924258Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.960360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.000705Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792283753382473:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.000772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.000872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792288048349774:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.004335Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:47.013935Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792288048349776:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:47.112167Z node 2 :TX_PROXY ERROR: Actor# [2:7486792288048349831:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.916282Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TPartitionTests::ChangeConfig >> TPQTest::DirectReadBadSessionOrPipe [GOOD] >> TPQTest::DirectReadOldPipe >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TTypeCodecsTest::TestBoolCodec [GOOD] >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] >> TTypeCodecsTest::TestFixedLenCodec [GOOD] >> TTypeCodecsTest::TestVarLenCodec [GOOD] >> TTypeCodecsTest::TestVarIntCodec [GOOD] >> TTypeCodecsTest::TestZigZagCodec [GOOD] >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28138, MsgBus: 1284 2025-03-28T09:37:39.225730Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792256488369329:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:39.225785Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00133f/r3tmp/tmpB7K5tq/pdisk_1.dat 2025-03-28T09:37:39.556567Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28138, node 1 2025-03-28T09:37:39.615675Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:39.615791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:39.617739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:39.643275Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:39.643344Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:39.643357Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:39.643517Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1284 TClient is connected to server localhost:1284 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:40.108287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.134680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:40.268552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.412675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.489025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:41.797047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792265078305691:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.797175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.041304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.065455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.087467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.112080Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.136188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.178220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.216751Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792269373273495:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.216828Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.217054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792269373273500:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.220382Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:42.228919Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792269373273502:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:42.283989Z node 1 :TX_PROXY ERROR: Actor# [1:7486792269373273558:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:42.964218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 8361, MsgBus: 29674 2025-03-28T09:37:44.459314Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792276241663880:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:44.460919Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00133f/r3tmp/tmpzU81KC/pdisk_1.dat 2025-03-28T09:37:44.567231Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:44.587509Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:44.587583Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:44.589735Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8361, node 2 2025-03-28T09:37:44.638914Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:44.638949Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:44.638957Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:44.639064Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29674 TClient is connected to server localhost:29674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. waiting... 2025-03-28T09:37:45.036868Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:45.060402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.118534Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:45.288694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:45.355334Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:47.569313Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792289126567535:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.569395Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.611961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.637220Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.663374Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.690254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.718850Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.747521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.781142Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792289126568042:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.781216Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.781225Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792289126568047:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.783624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:47.791506Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792289126568049:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:47.858376Z node 2 :TX_PROXY ERROR: Actor# [2:7486792289126568103:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:48.589513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] >> TPartitionTests::SetOffset >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> TPartitionTests::ChangeConfig [GOOD] >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestWaitInOwners >> TPQTabletTests::Parallel_Transactions_1 >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaZigZagCodec [GOOD] Test command err: Size: 8002 Create chunk: 0.000237s Read by index: 0.000012s Iterate: 0.000149s Size: 8256 Create chunk: 0.000120s Read by index: 0.000019s Iterate: 0.000090s Size: 8532 Create chunk: 0.000105s Read by index: 0.000025s Iterate: 0.000023s Size: 7769 Create chunk: 0.000084s Read by index: 0.000028s Iterate: 0.000025s Size: 2853 Create chunk: 0.000059s Read by index: 0.000048s Iterate: 0.000022s Size: 2419 Create chunk: 0.000056s Read by index: 0.000046s Iterate: 0.000025s Size: 2929 Create chunk: 0.000093s Read by index: 0.000080s Iterate: 0.000025s Size: 2472 Create chunk: 0.000075s Read by index: 0.000047s Iterate: 0.000026s Size: 2407 Create chunk: 0.000068s Read by index: 0.000056s Iterate: 0.000026s Size: 2061 Create chunk: 0.000066s Read by index: 0.000058s Iterate: 0.000041s >> TPQTest::TestSeveralOwners >> TPQTest::TestMessageNo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: 2025-03-28T09:35:20.468756Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:20.468982Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:20.469019Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b11/r3tmp/tmpubp61L/pdisk_1.dat 2025-03-28T09:35:20.745882Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13487, node 1 2025-03-28T09:35:20.933836Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:20.933881Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:20.933901Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:20.934178Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:20.938509Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:21.015493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:21.015658Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:21.028757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22602 2025-03-28T09:35:21.517470Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:23.858803Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:23.881968Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:23.882053Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:23.918970Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:23.920828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:24.136716Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:24.137447Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:24.138090Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:24.138250Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:24.138730Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:24.139037Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:24.139143Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:24.139243Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:24.139314Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:24.305379Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:24.305547Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:24.318791Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:24.474720Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:24.517863Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:24.518061Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:24.544712Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:24.544827Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:24.544974Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:24.545014Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:24.545051Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:24.545102Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:24.545146Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:24.545184Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:24.545503Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:24.571558Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:24.571646Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:24.577443Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:24.582409Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:24.582623Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:24.587331Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:24.600500Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:24.600548Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:24.600635Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:24.610882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:24.616090Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:24.616202Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:24.780208Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:24.909102Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:24.985646Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:25.803717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:25.803820Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:25.819000Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:26.113144Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:26.113400Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:26.113656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:26.113775Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:26.113894Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:26.114033Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:26.114189Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:26.114298Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:26.114379Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:26.114454Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:26.114526Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:26.114597Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:26.165536Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:26.165641Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... 49:6444], schemeshard count = 1 2025-03-28T09:37:47.892531Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:47.892585Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:37:47.892634Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:47.892666Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:47.895971Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:47.910756Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:47.911473Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:47.911547Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:47.912295Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:47.912342Z node 2 :STATISTICS WARN: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets do not exist in Hive anymore; tablet count = 3 2025-03-28T09:37:47.912384Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:48.991483Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:48.991572Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:48.992021Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:49.004945Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:49.005157Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:49.005877Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8712:6531], server id = [2:8717:6536], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:49.006281Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8712:6531], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:49.007270Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8713:6532], server id = [2:8718:6537], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:49.007323Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8713:6532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:49.008035Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8714:6533], server id = [2:8719:6538], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:49.008109Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8714:6533], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:49.009024Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8715:6534], server id = [2:8720:6539], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:49.009077Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8715:6534], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:49.009580Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8716:6535], server id = [2:8721:6540], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:49.009632Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8716:6535], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:49.014192Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:49.014875Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8712:6531], server id = [2:8717:6536], tablet id = 72075186224037899 2025-03-28T09:37:49.014935Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:49.016452Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:49.016822Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8734:6551], server id = [2:8738:6552], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:49.016888Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8734:6551], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:49.017189Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8713:6532], server id = [2:8718:6537], tablet id = 72075186224037900 2025-03-28T09:37:49.017210Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:49.017988Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:49.018415Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8714:6533], server id = [2:8719:6538], tablet id = 72075186224037901 2025-03-28T09:37:49.018454Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:49.018757Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8739:6553], server id = [2:8740:6554], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:49.018815Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8739:6553], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:49.019193Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:49.019929Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8715:6534], server id = [2:8720:6539], tablet id = 72075186224037902 2025-03-28T09:37:49.019954Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:49.020181Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:49.020477Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8742:6556], server id = [2:8744:6558], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:49.020523Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8742:6556], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:49.020817Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8716:6535], server id = [2:8721:6540], tablet id = 72075186224037903 2025-03-28T09:37:49.020836Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:49.021270Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8745:6559], server id = [2:8749:6563], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:49.021311Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8745:6559], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:49.021691Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8748:6562], server id = [2:8751:6564], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:49.021748Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8748:6562], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:49.024136Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:49.024971Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8734:6551], server id = [2:8738:6552], tablet id = 72075186224037904 2025-03-28T09:37:49.025002Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:49.025887Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:49.026163Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8739:6553], server id = [2:8740:6554], tablet id = 72075186224037905 2025-03-28T09:37:49.026192Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:49.026887Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:49.027399Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8742:6556], server id = [2:8744:6558], tablet id = 72075186224037906 2025-03-28T09:37:49.027467Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:49.028075Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:49.028278Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8748:6562], server id = [2:8751:6564], tablet id = 72075186224037908 2025-03-28T09:37:49.028304Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:49.028623Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:49.028676Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:49.028932Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:49.029124Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:49.029363Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:49.030679Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8745:6559], server id = [2:8749:6563], tablet id = 72075186224037907 2025-03-28T09:37:49.030728Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:49.031491Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:49.056464Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8778:6587]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:49.056665Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:49.056704Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8778:6587], StatRequests.size() = 1 2025-03-28T09:37:49.173425Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODg2YjE5MDMtMTUxOWY3M2QtZDdiZTM2NWEtM2MxM2Y5MA==, TxId: 2025-03-28T09:37:49.173478Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODg2YjE5MDMtMTUxOWY3M2QtZDdiZTM2NWEtM2MxM2Y5MA==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-28T09:37:49.173955Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8786:6593]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:49.174129Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:49.174470Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:49.174504Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:49.177441Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:49.177496Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:49.177533Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:49.182338Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TTypeCodecsTest::TestDeltaVarIntCodecAndRev [GOOD] Test command err: Size: 128 Create chunk: 0.000029s Read by index: 0.000014s Iterate: 0.000012s Size: 252 Create chunk: 0.000037s Read by index: 0.000015s Iterate: 0.000016s Size: 1887 Create chunk: 0.000071s Read by index: 0.000121s Iterate: 0.000087s Size: 1658 Create chunk: 0.000080s Read by index: 0.000224s Iterate: 0.000075s Size: 1889 Create chunk: 0.000068s Read by index: 0.000081s Iterate: 0.000035s Size: 1660 Create chunk: 0.000089s Read by index: 0.000098s Iterate: 0.000038s >> TPQTabletTests::Parallel_Transactions_1 [GOOD] >> TPartitionTests::SetOffset [GOOD] >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> TPQTabletTests::Parallel_Transactions_2 >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ChangeConfig [GOOD] Test command err: 2025-03-28T09:37:47.936016Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:47.936099Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:47.950646Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:177:2192] 2025-03-28T09:37:47.951677Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:177:2192] Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } 2025-03-28T09:37:48.517067Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:48.517139Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:48.533118Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:179:2194] 2025-03-28T09:37:48.534776Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:48.000000Z 2025-03-28T09:37:48.534841Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:179:2194] Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\340\303\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\340\303\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\340\303\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-28T09:37:49.305785Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:49.305840Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:49.315957Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [3:179:2194] 2025-03-28T09:37:49.317234Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:49.000000Z 2025-03-28T09:37:49.317275Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\310\313\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\316\255\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\004\020\000\030\000\"\007session(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session" StorageChannel: INLINE } 2025-03-28T09:37:49.925576Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:49.925651Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:49.940777Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [4:179:2194] 2025-03-28T09:37:49.942934Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:49.000000Z 2025-03-28T09:37:49.942997Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [4:179:2194] 2025-03-28T09:37:50.708749Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:50.708819Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:50.723808Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [5:179:2194] 2025-03-28T09:37:50.726128Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:50.000000Z 2025-03-28T09:37:50.726186Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [5:179:2194] Send change config Wait cmd write (initial) Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\260\323\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } Wait commit 1 done Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\260\323\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-1@\000H\000\252\002\016\n\010client-3@\007H\000" StorageChannel: INLINE } Wait config changed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 7993, MsgBus: 61580 2025-03-28T09:37:38.896268Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792250980804494:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:38.896328Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001341/r3tmp/tmpqWbtWS/pdisk_1.dat 2025-03-28T09:37:39.199287Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7993, node 1 2025-03-28T09:37:39.279387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:39.279534Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:39.281259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:39.304088Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:39.304120Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:39.304128Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:39.304267Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61580 TClient is connected to server localhost:61580 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:39.822233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.852269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.990443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.122851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.199192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:41.824621Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792263865708164:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.824713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.126948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.150247Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.172923Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.196901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.224718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.252278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.290567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792268160675972:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.290657Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.290712Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792268160675977:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.293965Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:42.303054Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792268160675979:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:42.368949Z node 1 :TX_PROXY ERROR: Actor# [1:7486792268160676033:3447] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:43.342776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:43.896479Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792250980804494:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.896538Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32752, MsgBus: 14881 2025-03-28T09:37:45.514106Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792281204315741:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:45.514179Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001341/r3tmp/tmpqnsiUF/pdisk_1.dat 2025-03-28T09:37:45.611242Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32752, node 2 2025-03-28T09:37:45.647496Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:45.647604Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:45.648931Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:45.686896Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:45.686921Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:45.686929Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:45.687029Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14881 TClient is connected to server localhost:14881 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:46.089054Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.102119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.174150Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.324784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.400030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:48.222988Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792294089219386:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.223080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.280486Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.308820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.335448Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.362564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.389205Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.418083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.453393Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792294089219896:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.453464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.453497Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792294089219901:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.456432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:48.464048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792294089219903:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:48.530748Z node 2 :TX_PROXY ERROR: Actor# [2:7486792294089219957:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:49.415031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:50.052995Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792302679155129:2525], TxId: 281474976715677, task: 1. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZjEyOTNhYzUtM2MxOGE4MTQtYTQ4NWRiNDMtYWU1MjkwY2I=. TraceId : 01jqe1tt5remfwzv1vkpxfy747. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:37:50.053388Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792302679155131:2526], TxId: 281474976715677, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZjEyOTNhYzUtM2MxOGE4MTQtYTQ4NWRiNDMtYWU1MjkwY2I=. TraceId : 01jqe1tt5remfwzv1vkpxfy747. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486792302679155126:2487], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:37:50.053721Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZjEyOTNhYzUtM2MxOGE4MTQtYTQ4NWRiNDMtYWU1MjkwY2I=, ActorId: [2:7486792298384187511:2487], ActorState: ExecuteState, TraceId: 01jqe1tt5remfwzv1vkpxfy747, Create QueryResponse for error on request, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: 2025-03-28T09:37:45.696159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:45.696693Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:45.696782Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001db7/r3tmp/tmpCiuVlK/pdisk_1.dat 2025-03-28T09:37:46.180887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.234946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:46.281074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:46.281628Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:46.294420Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:46.388030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.701167Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-03-28T09:37:46.701325Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "usertable" 2025-03-28T09:37:46.771153Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:738:2620], subTag: 2} TUpsertActor finished in 0.069403s, errors=0 2025-03-28T09:37:46.771256Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:739:2621] with tag# 2 2025-03-28T09:37:49.658114Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:49.658402Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:49.658480Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001db7/r3tmp/tmpuWGzAL/pdisk_1.dat 2025-03-28T09:37:49.880848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:49.906314Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:49.942234Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:49.942328Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:49.953613Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:50.033994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:50.279713Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-03-28T09:37:50.279803Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 TableName: "JustTable" 2025-03-28T09:37:50.346542Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.066395s, errors=0 2025-03-28T09:37:50.346617Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 >> TPartitionTests::OldPlanStep >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete >> TPQTabletTests::Parallel_Transactions_2 [GOOD] >> TPQTabletTests::PQTablet_Send_RS_With_Abort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertExistingKey [GOOD] Test command err: Trying to start YDB, gRPC: 4187, MsgBus: 5335 2025-03-28T09:37:40.200301Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792257242499280:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:40.200918Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001339/r3tmp/tmpekPVKK/pdisk_1.dat 2025-03-28T09:37:40.540505Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4187, node 1 2025-03-28T09:37:40.584555Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:40.584636Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:40.586474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:40.610627Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:40.610668Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:40.610702Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:40.610851Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5335 TClient is connected to server localhost:5335 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:41.041754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:41.058718Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:41.168972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:41.321263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:41.386440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:42.746251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792265832435647:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.746439Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:43.074712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:43.108404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:43.139197Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:43.167791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:43.198480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:43.243136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:43.324678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792270127403460:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:43.324757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:43.324907Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792270127403465:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:43.327894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:43.341233Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792270127403467:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:43.396984Z node 1 :TX_PROXY ERROR: Actor# [1:7486792270127403520:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:44.414432Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 20764, MsgBus: 17443 2025-03-28T09:37:45.735642Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792280994544337:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:45.735750Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001339/r3tmp/tmpxobVgg/pdisk_1.dat 2025-03-28T09:37:45.830878Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20764, node 2 2025-03-28T09:37:45.860901Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:45.860999Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:45.862548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:45.893167Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:45.893189Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:45.893197Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:45.893322Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:17443 TClient is connected to server localhost:17443 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:46.279853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.299119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.377765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.503074Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:46.577223Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:48.531145Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792293879447976:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.531236Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.574846Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.598362Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.623629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.650950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.679098Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.709330Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.750417Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792293879448487:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.750510Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.750642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792293879448492:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.754028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:48.762330Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792293879448494:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:48.826825Z node 2 :TX_PROXY ERROR: Actor# [2:7486792293879448548:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:49.610667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TPartitionTests::DataTxCalcPredicateOk [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] >> TPartitionTests::OldPlanStep [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] >> TPQTest::TestWriteTimeStampEstimate >> TPartitionTests::DataTxCalcPredicateError >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestOwnership >> TPQTabletTests::Partition_Send_Predicate_With_False >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestSourceIdDropByUserWrites >> TPartitionTests::IncorrectRange >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TPartitionTests::TestBatchingWithProposeConfig >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestAlreadyWritten >> TPQTabletTests::One_Tablet_For_All_Partitions >> UpsertLoad::ShouldDropCreateTable [GOOD] >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TPartitionTests::ShadowPartitionCounters >> TPartitionTests::IncorrectRange [GOOD] >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> TPartitionTests::GetPartitionWriteInfoSuccess >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: 2025-03-28T09:37:52.708328Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.708407Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:52.721810Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2025-03-28T09:37:52.721896Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:37:52.739532Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-03-28T09:37:52.742234Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-03-28T09:37:52.742370Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.744043Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-03-28T09:37:52.744162Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:52.744228Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:52.744636Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:52.745048Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-03-28T09:37:52.745971Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:52.746039Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:184:2197] 2025-03-28T09:37:52.746089Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:52.746653Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:52.746763Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-28T09:37:52.746793Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-28T09:37:52.746836Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa reinit request with generation 1 2025-03-28T09:37:52.746853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa reinit with generation 1 done 2025-03-28T09:37:52.746998Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:52.747030Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:52.747071Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:52.747109Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:52.747133Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:37:52.747148Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:37:52.747161Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000caaa 2025-03-28T09:37:52.747172Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uaaa 2025-03-28T09:37:52.747191Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:52.747217Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:52.747282Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:52.747311Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:52.747541Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:52.747715Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:37:52.748159Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-03-28T09:37:52.748184Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:37:52.748208Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:52.748645Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:52.748707Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-03-28T09:37:52.748732Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-03-28T09:37:52.748751Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa reinit request with generation 1 2025-03-28T09:37:52.748765Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa reinit with generation 1 done 2025-03-28T09:37:52.748825Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:52.748846Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:52.748873Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-28T09:37:52.748891Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-28T09:37:52.748905Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-03-28T09:37:52.748921Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-03-28T09:37:52.748932Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001caaa 2025-03-28T09:37:52.748943Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uaaa 2025-03-28T09:37:52.748968Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:52.748993Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-28T09:37:52.749029Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:52.749045Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:52.749121Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:52.749259Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:52.752502Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:52.752852Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:52.753096Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:199:2208], now have 1 active actors on pipe 2025-03-28T09:37:52.753589Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:202:2210], now have 1 active actors on pipe 2025-03-28T09:37:52.753632Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:52.753668Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:52.753737Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid0' SeqNo: 1 partNo : 0 messageNo: 0 size 7 offset: -1 2025-03-28T09:37:52.753800Z node 1 :PERSQUEUE DEBUG: tablet ... opic:0:Initializer] Initializing completed. 2025-03-28T09:37:52.799900Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [1:290:2281] 2025-03-28T09:37:52.799959Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 1 Head Offset 0 PartNo 0 PackedSize 65 count 1 nextOffset 1 batches 1 SYNC INIT sourceId sourceid0 seqNo 1 offset 0 SYNC INIT HEAD KEY: d0000000000_00000000000000000000_00000_0000000001_00000| size 65 2025-03-28T09:37:52.800010Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:52.800092Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Init complete for topic 'topic' Partition: 0 SourceId: sourceid0 SeqNo: 1 offset: 0 MaxOffset: 1 2025-03-28T09:37:52.800146Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-03-28T09:37:52.800195Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-28T09:37:52.800255Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-28T09:37:52.800664Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-28T09:37:52.800712Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-28T09:37:52.800759Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-28T09:37:52.800796Z node 1 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:37:52.800902Z node 1 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user user readTimeStamp done, result 1000000 queuesize 1 startOffset 0 2025-03-28T09:37:52.800944Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-03-28T09:37:52.800985Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user aaa send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-28T09:37:52.801208Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 Topic 'topic' partition 0 user aaa offset 0 count 1 size 1024000 endOffset 1 max time lag 0ms effective offset 0 2025-03-28T09:37:52.801241Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 1 2025-03-28T09:37:52.801272Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 1. All data is from uncompacted head. 2025-03-28T09:37:52.801309Z node 1 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:37:52.801379Z node 1 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user aaa readTimeStamp done, result 1000000 queuesize 0 startOffset 0 2025-03-28T09:37:52.801866Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:314:2294], now have 1 active actors on pipe 2025-03-28T09:37:52.801953Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:52.801990Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:52.802436Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:37:52.802797Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:317:2296], now have 1 active actors on pipe 2025-03-28T09:37:52.802870Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:52.802905Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:52.802964Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid0' SeqNo: 2 partNo : 0 messageNo: 1 size 7 offset: -1 2025-03-28T09:37:52.803034Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-03-28T09:37:52.803109Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 2, Error new GetOwnership request needed for owner 2025-03-28T09:37:52.803141Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-03-28T09:37:52.803431Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:319:2298], now have 1 active actors on pipe 2025-03-28T09:37:52.803537Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:52.803574Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:52.803656Z node 1 :PERSQUEUE INFO: new Cookie default|554f8b45-cba4dcd8-9e2e4294-71dd487d_0 generated for partition 0 topic 'topic' owner default 2025-03-28T09:37:52.803773Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:37:52.803844Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:37:52.804140Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:321:2300], now have 1 active actors on pipe 2025-03-28T09:37:52.804212Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:52.804237Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:52.804323Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid0' SeqNo: 2 partNo : 0 messageNo: 0 size 7 offset: -1 2025-03-28T09:37:52.804404Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 16. Cookie: 1 2025-03-28T09:37:52.804500Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 1 2025-03-28T09:37:52.804645Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid0' seqNo 2 partNo 0 2025-03-28T09:37:52.805592Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid0' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 79 count 1 nextOffset 2 batches 1 2025-03-28T09:37:52.806130Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 1,1 HeadOffset 0 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000| size 65 WTime 2000000 2025-03-28T09:37:52.806276Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:52.806317Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:52.806353Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:37:52.806405Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:52.806457Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid0 2025-03-28T09:37:52.806496Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000001_00000_0000000001_00000| 2025-03-28T09:37:52.806518Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:52.806546Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:52.806580Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:52.806648Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:52.806757Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 65 2025-03-28T09:37:52.809042Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 1 count 1 size 65 actorID [1:281:2274] 2025-03-28T09:37:52.809152Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 16 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:52.809204Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:52.809256Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid0', Topic: 'topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-03-28T09:37:52.809452Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 1 partno 0 count 1 parts 0 size 65 2025-03-28T09:37:52.809529Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] topic 'topicCounters. CacheSize 65 CachedBlobs 1 2025-03-28T09:37:52.809590Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:37:52.809963Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:327:2305], now have 1 active actors on pipe 2025-03-28T09:37:52.810098Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:52.810146Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:52.810250Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:37:52.810529Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:329:2307], now have 1 active actors on pipe 2025-03-28T09:37:52.810642Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:52.810679Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:52.810780Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:37:52.811032Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:331:2309], now have 1 active actors on pipe 2025-03-28T09:37:52.811109Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:52.811154Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:52.811257Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> TPartitionTests::DataTxCalcPredicateOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 17976, MsgBus: 28071 2025-03-28T09:37:38.055280Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792250445603044:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:38.055338Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00134b/r3tmp/tmpLHb2Pu/pdisk_1.dat 2025-03-28T09:37:38.438828Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:38.471899Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:38.472037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17976, node 1 2025-03-28T09:37:38.474002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:38.529324Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:38.529360Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:38.529370Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:38.529566Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28071 TClient is connected to server localhost:28071 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:39.001274Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.025803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.153405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.307751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:39.375234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.987580Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792259035539385:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:40.987677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.292022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.313334Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.333693Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.354599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.376918Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.405298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:41.440196Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792263330507191:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.440266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.440273Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792263330507196:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.443094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:41.451327Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792263330507198:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:41.506713Z node 1 :TX_PROXY ERROR: Actor# [1:7486792263330507251:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:42.364919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.393828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.465047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480 2025-03-28T09:37:43.056866Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792250445603044:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.056997Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10422, MsgBus: 23913 2025-03-28T09:37:47.377253Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792289859598545:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:47.377347Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00134b/r3tmp/tmpEoka4K/pdisk_1.dat 2025-03-28T09:37:47.480693Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10422, node 2 2025-03-28T09:37:47.517117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:47.517219Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:47.519505Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:47.547141Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:47.547162Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:47.547167Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:47.547254Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23913 TClient is connected to server localhost:23913 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:47.953139Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:47.963858Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:48.020279Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:48.162187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:48.226296Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:50.155667Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792302744502205:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:50.155750Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:50.187192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:50.209254Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:50.232024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:50.255572Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:50.280244Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:50.310871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:50.342783Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792302744502712:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:50.342845Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792302744502717:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:50.342889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:50.345624Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:50.353827Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792302744502719:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:50.446335Z node 2 :TX_PROXY ERROR: Actor# [2:7486792302744502776:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:51.317299Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 >> TraverseColumnShard::TraverseColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: 2025-03-28T09:37:45.901855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:45.902316Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:45.902373Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbe/r3tmp/tmpVIulzH/pdisk_1.dat 2025-03-28T09:37:46.298882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:46.337811Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:46.380099Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:46.380255Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:46.391839Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:46.490566Z node 1 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# BrandNewTable in dir# /Root 2025-03-28T09:37:46.884292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:645:2552], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.884544Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.983297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.321811Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2025-03-28T09:37:47.323379Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-03-28T09:37:47.345894Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 1} TUpsertActor finished in 0.022114s, errors=0 2025-03-28T09:37:47.346228Z node 1 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-03-28T09:37:47.346371Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 WorkingDir: "/Root" TableName: "BrandNewTable" 2025-03-28T09:37:47.406540Z node 1 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [1:641:2549], subTag: 3} TUpsertActor finished in 0.059766s, errors=0 2025-03-28T09:37:47.406652Z node 1 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [1:754:2630] with tag# 3 2025-03-28T09:37:50.912958Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:50.913364Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:50.913425Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001dbe/r3tmp/tmpQMM2ip/pdisk_1.dat 2025-03-28T09:37:51.190356Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:51.220121Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:51.258135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:51.258265Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:51.269893Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:51.351848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:51.598548Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 2 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2025-03-28T09:37:51.598703Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 2 2025-03-28T09:37:52.017291Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:738:2620], subTag: 2} TUpsertActor finished in 0.418217s, errors=0 2025-03-28T09:37:52.017368Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:739:2621] with tag# 2 2025-03-28T09:37:52.022187Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 drops table# table in dir# /Root 2025-03-28T09:37:52.034828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:781:2662], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:52.034975Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:52.263501Z node 2 :DS_LOAD_TEST NOTICE: TLoad# 0 creates table# table in dir# /Root 2025-03-28T09:37:52.281681Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:847:2708], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:52.281872Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:52.294497Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:37:52.352321Z node 2 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2025-03-28T09:37:52.535724Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2025-03-28T09:37:52.536027Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-03-28T09:37:52.548047Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 1} TUpsertActor finished in 0.011747s, errors=0 2025-03-28T09:37:52.548348Z node 2 :DS_LOAD_TEST DEBUG: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2025-03-28T09:37:52.548481Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 3 WorkingDir: "/Root" TableName: "table" 2025-03-28T09:37:52.605298Z node 2 :DS_LOAD_TEST NOTICE: Id# {Tag: 0, parent: [2:777:2659], subTag: 3} TUpsertActor finished in 0.056534s, errors=0 2025-03-28T09:37:52.605408Z node 2 :DS_LOAD_TEST INFO: TLoad# 0 received finished from actor# [2:940:2782] with tag# 3 >> TListAllTopicsTests::PlainList [GOOD] >> TListAllTopicsTests::RecursiveList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] Test command err: 2025-03-28T09:37:51.366630Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:51.370187Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:51.370480Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-28T09:37:51.370531Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:37:51.370561Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-28T09:37:51.370601Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:37:51.370656Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.370737Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:51.397489Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2210], now have 1 active actors on pipe 2025-03-28T09:37:51.397625Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:37:51.414194Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-28T09:37:51.416609Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-28T09:37:51.416739Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.417631Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-28T09:37:51.417755Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:51.418120Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:51.418480Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:212:2216] 2025-03-28T09:37:51.419325Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:51.419395Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:212:2216] 2025-03-28T09:37:51.419458Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:51.420312Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:51.420427Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-28T09:37:51.420474Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-28T09:37:51.420515Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-03-28T09:37:51.420537Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-03-28T09:37:51.420677Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:51.420711Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:51.420744Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:51.420781Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:51.420825Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:37:51.420847Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:37:51.420876Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer 2025-03-28T09:37:51.420900Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer 2025-03-28T09:37:51.420924Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:51.420962Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:51.421042Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:51.421075Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:51.421226Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:51.423822Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:51.424110Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:219:2221], now have 1 active actors on pipe 2025-03-28T09:37:51.424557Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:222:2223], now have 1 active actors on pipe 2025-03-28T09:37:51.425091Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-03-28T09:37:51.425173Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-28T09:37:51.425240Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-03-28T09:37:51.425277Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-28T09:37:51.425305Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-03-28T09:37:51.425332Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-28T09:37:51.425358Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-03-28T09:37:51.425449Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2025-03-28T09:37:51.425503Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:51.428234Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:51.428276Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-03-28T09:37:51.428302Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-03-28T09:37:51.428337Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-03-28T09:37:51.428590Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67891 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-03-28T09:37:51.428624Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-28T09:37:51.428669Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67891, WriteId (empty maybe) 2025-03-28T09:37:51.428701Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-28T09:37:51.428742Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-03-28T09:37:51.428779Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-28T09:37:51.428807Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-03-28T09:37:51.428929Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 135 MaxStep: 30135 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2025-03-28T09:37:51.429001Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:51.432189Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:51.432232Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-03-28T09:37:51.432259Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREP ... t with generation 2 done 2025-03-28T09:37:53.561190Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2025-03-28T09:37:53.561213Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user drop done 2025-03-28T09:37:53.561385Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:53.561415Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:53.561444Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000cuser, m0000000000cuser] 2025-03-28T09:37:53.561468Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000uuser, m0000000000uuser] 2025-03-28T09:37:53.561490Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:53.561520Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:53.561542Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:37:53.561558Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-03-28T09:37:53.561574Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-03-28T09:37:53.561591Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-3 2025-03-28T09:37:53.561607Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-3 2025-03-28T09:37:53.561623Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-03-28T09:37:53.561644Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:53.561671Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:53.561722Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2025-03-28T09:37:53.561849Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:53.561869Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:53.561887Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-28T09:37:53.561904Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-28T09:37:53.561920Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] I0000000001 2025-03-28T09:37:53.561936Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-1 2025-03-28T09:37:53.561952Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-1 2025-03-28T09:37:53.561968Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cclient-3 2025-03-28T09:37:53.561984Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uclient-3 2025-03-28T09:37:53.561999Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-03-28T09:37:53.562014Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:53.562028Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-28T09:37:53.562059Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:53.562247Z node 6 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:53.564075Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2025-03-28T09:37:53.564119Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Connected to tablet 22222 2025-03-28T09:37:53.565612Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:53.565823Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 0 2025-03-28T09:37:53.565856Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-03-28T09:37:53.565883Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-03-28T09:37:53.565909Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-03-28T09:37:53.565933Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 1, Expected 2 2025-03-28T09:37:53.565959Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 status has not changed 2025-03-28T09:37:53.567174Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:53.567235Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCommitDone Step 100, TxId 67890, Partition 1 2025-03-28T09:37:53.567255Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2025-03-28T09:37:53.567274Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2025-03-28T09:37:53.567293Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2025-03-28T09:37:53.567312Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Received 2, Expected 2 2025-03-28T09:37:53.567341Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-28T09:37:53.567368Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] complete TxId 67890 2025-03-28T09:37:53.567546Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } 2025-03-28T09:37:53.567588Z node 6 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.567649Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete partitions for TxId 67890 2025-03-28T09:37:53.567681Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState EXECUTED 2025-03-28T09:37:53.567709Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2025-03-28T09:37:53.567897Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: EXECUTED MinStep: 134 MaxStep: 18446744073709551615 PredicateRecipients: 22222 Step: 100 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ChildPartitionIds: 1 ChildPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 0 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ParentPartitionIds: 0 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 Important: false } Consumers { Name: "client-3" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-03-28T09:37:53.568064Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:53.569989Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:53.570018Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state EXECUTED 2025-03-28T09:37:53.570037Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State EXECUTED 2025-03-28T09:37:53.570057Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State EXECUTED FrontTxId 67890 2025-03-28T09:37:53.570083Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:37:53.570111Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS_ACKS 2025-03-28T09:37:53.570139Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:37:53.570166Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-28T09:37:53.570186Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:37:53.570212Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 0/1 2025-03-28T09:37:53.573098Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [6:365:2337], now have 1 active actors on pipe 2025-03-28T09:37:53.573215Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\001" Seqno: 0 2025-03-28T09:37:53.573249Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] send TEvReadSetAck to 22222 2025-03-28T09:37:53.573283Z node 6 :PERSQUEUE DEBUG: Connected to tablet 72057594037927937 from tablet 22222 2025-03-28T09:37:53.573343Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSetAck Step: 100 TxId: 67890 TabletSource: 72057594037927937 TabletDest: 22222 TabletConsumer: 22222 Flags: 0 Seqno: 0 2025-03-28T09:37:53.573367Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Handle TEvReadSetAck 2025-03-28T09:37:53.573392Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] Predicate acks 1/1 2025-03-28T09:37:53.573420Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2025-03-28T09:37:53.573447Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2025-03-28T09:37:53.573477Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 1/1 2025-03-28T09:37:53.573497Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:37:53.573521Z node 6 :PERSQUEUE DEBUG: [TxId: 67890] PredicateAcks: 1/1 2025-03-28T09:37:53.573542Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2025-03-28T09:37:53.573567Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState DELETING 2025-03-28T09:37:53.573597Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] delete key for TxId 67890 2025-03-28T09:37:53.573640Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 18023, MsgBus: 31058 2025-03-28T09:37:39.120092Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792254803567582:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:39.120233Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00133b/r3tmp/tmpgxh39a/pdisk_1.dat 2025-03-28T09:37:39.417426Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18023, node 1 2025-03-28T09:37:39.464574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:39.464803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:39.466929Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:39.483208Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:39.483229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:39.483236Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:39.483353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31058 TClient is connected to server localhost:31058 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:39.944490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:39.970860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.108099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:37:40.253422Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:40.315137Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:41.980487Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792263393503942:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:41.980575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.270911Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.297685Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.322793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.344126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.366550Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.393658Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:42.433975Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792267688471748:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.434053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.434085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792267688471753:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:42.437617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:42.446906Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792267688471755:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:37:42.513697Z node 1 :TX_PROXY ERROR: Actor# [1:7486792267688471809:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:43.319580Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:44.121765Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792254803567582:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:44.121826Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 12489, MsgBus: 11268 2025-03-28T09:37:44.953664Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792277624938993:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:44.953725Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00133b/r3tmp/tmpJYxAmZ/pdisk_1.dat 2025-03-28T09:37:45.057313Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12489, node 2 2025-03-28T09:37:45.087980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:45.088093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:45.090508Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:45.110725Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:45.110751Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:45.110758Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:45.110864Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11268 TClient is connected to server localhost:11268 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:45.498304Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:45.513540Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:45.563459Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 2025-03-28T09:37:45.717949Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:45.791207Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:37:47.594479Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792290509842647:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.594552Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.612598Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.640199Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.666569Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.695684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.721791Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.750663Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.788820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792290509843153:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.788890Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.788908Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792290509843158:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.792026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:37:47.800355Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792290509843160:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:37:47.854390Z node 2 :TX_PROXY ERROR: Actor# [2:7486792290509843213:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:48.741381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.772781Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.801305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:37:49.953767Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792277624938993:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:49.953888Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] |95.9%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] Test command err: 2025-03-28T09:37:48.167202Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:48.170460Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:48.170715Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-28T09:37:48.170762Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:37:48.170798Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-28T09:37:48.170854Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:37:48.170895Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:48.170956Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:48.185833Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:176:2191], now have 1 active actors on pipe 2025-03-28T09:37:48.185897Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:37:48.199982Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:48.202346Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:48.202471Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:48.203746Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:48.203843Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:48.203914Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:48.204274Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:48.204579Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-03-28T09:37:48.205285Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:48.205344Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:184:2197] 2025-03-28T09:37:48.205398Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:48.205983Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:48.206119Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-28T09:37:48.206214Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-28T09:37:48.206383Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:48.206424Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:48.206459Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:48.206495Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:48.206524Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:37:48.206545Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:37:48.206572Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:48.206612Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:48.206723Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:48.207048Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:48.207284Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:37:48.208064Z node 1 :PERSQUEUE DEBUG: [topic:1:Initializer] Initializing completed. 2025-03-28T09:37:48.208107Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:37:48.208147Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:48.208537Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:48.208599Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2025-03-28T09:37:48.208631Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2025-03-28T09:37:48.208737Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:48.208771Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:48.208819Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-28T09:37:48.208859Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-28T09:37:48.208885Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-03-28T09:37:48.208907Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-03-28T09:37:48.208932Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:48.208962Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-28T09:37:48.209014Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:48.209106Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:48.209309Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:48.212981Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:48.213112Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:48.213516Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:197:2206], now have 1 active actors on pipe 2025-03-28T09:37:48.214271Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:200:2208], now have 1 active actors on pipe 2025-03-28T09:37:48.215289Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Config { TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "client-1" Generation: 0 Important: false } Consumers { Name: "client-3" Generation: 7 Important: false } } BootstrapConfig { } } 2025-03-28T09:37:48.215440Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-03-28T09:37:48.215500Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-28T09:37:48.215545Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-03-28T09:37:48.215588Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-28T09:37:48.215630Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-03-28T09:37:48.215892Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 229 MaxStep: 18446744073709551615 Kind: KIND_CONFIG TabletConfig { PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" Ydb ... rmAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.581045Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.581891Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.582772Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.583650Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.584919Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.585844Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.586725Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.587584Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.587937Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 2 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-03-28T09:37:53.589401Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.590647Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.591492Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.592326Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.593155Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.594130Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.595008Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.595915Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.596920Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.597745Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.598710Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.599568Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.600441Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.601442Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2025-03-28T09:37:53.601826Z node 10 :PERSQUEUE DEBUG: FormAnswer processing batch offset 3 totakecount 1 count 1 size 172682 from pos 0 cbcount 1 2025-03-28T09:37:53.603937Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 123 2025-03-28T09:37:53.632159Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:444:2420], now have 1 active actors on pipe 2025-03-28T09:37:53.632276Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:53.632326Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:53.632390Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 1 size 102400 offset: 14 2025-03-28T09:37:53.632498Z node 10 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-03-28T09:37:53.632638Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:53.632679Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:53.632729Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000002_00000_0000000001_00014, d0000000000_00000000000000000002_00000_0000000001_00014] 2025-03-28T09:37:53.632777Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000003_00000_0000000001_00014, d0000000000_00000000000000000003_00000_0000000001_00014] 2025-03-28T09:37:53.632811Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:53.632858Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:53.632904Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:53.632946Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:53.633019Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 45, Error new GetOwnership request needed for owner 2025-03-28T09:37:53.633050Z node 10 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-03-28T09:37:53.633099Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:53.633148Z node 10 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000002_00000_0000000001_00014(+) to d0000000000_00000000000000000002_00000_0000000001_00014(+) 2025-03-28T09:37:53.633179Z node 10 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000003_00000_0000000001_00014(+) to d0000000000_00000000000000000003_00000_0000000001_00014(+) 2025-03-28T09:37:53.635488Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:53.638556Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:456:2431], now have 1 active actors on pipe 2025-03-28T09:37:53.638654Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:53.638720Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:53.638807Z node 10 :PERSQUEUE INFO: new Cookie default|77d27ed2-1da76f3f-67e8ca91-6ebf9a8_14 generated for partition 0 topic 'topic' owner default 2025-03-28T09:37:53.638897Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:37:53.638956Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:37:53.639214Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:458:2433], now have 1 active actors on pipe 2025-03-28T09:37:53.639270Z node 10 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:37:53.639295Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:37:53.639327Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 15 partNo : 0 messageNo: 0 size 102400 offset: 14 2025-03-28T09:37:53.639393Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 102409. Cookie: 15 2025-03-28T09:37:53.897416Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 15 2025-03-28T09:37:53.897582Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid1' seqNo 15 partNo 0 2025-03-28T09:37:53.898293Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid1' seqNo 15 partNo 0 FormedBlobsCount 0 NewHead: Offset 14 PartNo 0 PackedSize 102472 count 1 nextOffset 15 batches 1 2025-03-28T09:37:53.899050Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 14,1 HeadOffset 14 endOffset 14 curOffset 15 d0000000000_00000000000000000014_00000_0000000001_00000| size 102462 WTime 2103 2025-03-28T09:37:53.899255Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:53.899293Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:53.899328Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:37:53.899368Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:53.899420Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid1 2025-03-28T09:37:53.899464Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000014_00000_0000000001_00000| 2025-03-28T09:37:53.899483Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:53.899532Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:53.899566Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:53.899699Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:53.899778Z node 10 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 14 partNo 0 count 1 size 102462 2025-03-28T09:37:53.903271Z node 10 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 14 count 1 size 102462 actorID [10:134:2160] 2025-03-28T09:37:53.903375Z node 10 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 14 partno 0 count 1 parts 0 size 102462 2025-03-28T09:37:53.903460Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 102409 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:53.903508Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:53.903561Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid1', Topic: 'topic', Partition: 0, SeqNo: 15, partNo: 0, Offset: 14 is stored on disk 2025-03-28T09:37:53.903818Z node 10 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:37:53.904240Z node 10 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [10:469:2442], now have 1 active actors on pipe |95.9%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: 2025-03-28T09:35:27.930132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:27.930344Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:27.930379Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b04/r3tmp/tmpiU6wzM/pdisk_1.dat 2025-03-28T09:35:28.199207Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26955, node 1 2025-03-28T09:35:28.400997Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:28.401043Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:28.401065Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:28.401353Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:28.402917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:28.478947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:28.479057Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:28.492659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12266 2025-03-28T09:35:29.011621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:31.443553Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:31.470386Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:31.470528Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:31.507772Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:31.509271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:31.726931Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.727328Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.727686Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.727789Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.727975Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.728035Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.728116Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.728191Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.728239Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.887054Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:31.887147Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:31.900113Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:32.043083Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:32.082203Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:32.082289Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:32.104489Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:32.104601Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:32.104780Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:32.104830Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:32.104887Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:32.104935Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:32.104995Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:32.105039Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:32.105396Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:32.131626Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:32.131763Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:32.137479Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:32.142889Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:32.143096Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:32.148290Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:32.163377Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:32.163431Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:32.163489Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:32.175508Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:32.181603Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:32.181709Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:32.320988Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:32.495151Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:32.583125Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:33.377910Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:33.378032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:33.396425Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:33.690556Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:33.690909Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:33.691223Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:33.691373Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:33.691512Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:33.691656Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:33.691806Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:33.691937Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:33.692077Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:33.692213Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:33.692350Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:33.692484Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:33.743910Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:33.743988Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... ode 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 3] 2025-03-28T09:37:50.568195Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:50.622258Z node 2 :STATISTICS DEBUG: [72075186224037894] EvFastPropagateCheck 2025-03-28T09:37:50.622361Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2025-03-28T09:37:50.666559Z node 2 :STATISTICS DEBUG: EvRequestTimeout, pipe client id = [2:8543:6442], schemeshard count = 1 2025-03-28T09:37:52.803086Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:37:52.803146Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. No force traversals. 2025-03-28T09:37:52.803182Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:37:52.803225Z node 2 :STATISTICS DEBUG: [72075186224037894] Start schedule traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:52.806888Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:52.823535Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:52.824106Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:52.824191Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:52.825223Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:52.838941Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:52.839093Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2025-03-28T09:37:52.839782Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8668:6510], server id = [2:8673:6515], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:52.840059Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8668:6510], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:52.840238Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8669:6511], server id = [2:8674:6516], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:52.840273Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8669:6511], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:52.841363Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8670:6512], server id = [2:8675:6517], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:52.841409Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8670:6512], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:52.841872Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8671:6513], server id = [2:8676:6518], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:52.841910Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8671:6513], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:52.842830Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8672:6514], server id = [2:8677:6519], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:52.842878Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8672:6514], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:52.846959Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:52.847484Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8668:6510], server id = [2:8673:6515], tablet id = 72075186224037899 2025-03-28T09:37:52.847523Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:52.848222Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:52.848735Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8669:6511], server id = [2:8674:6516], tablet id = 72075186224037900 2025-03-28T09:37:52.848760Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:52.849190Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8692:6530], server id = [2:8693:6531], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:52.849241Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8692:6530], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:52.849549Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:52.850293Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:52.850569Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8670:6512], server id = [2:8675:6517], tablet id = 72075186224037901 2025-03-28T09:37:52.850597Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:52.851067Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8694:6532], server id = [2:8696:6533], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:52.851113Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8694:6532], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:52.851168Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8671:6513], server id = [2:8676:6518], tablet id = 72075186224037902 2025-03-28T09:37:52.851195Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:52.851516Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:52.852046Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8697:6534], server id = [2:8701:6538], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:52.852096Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8697:6534], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:52.852641Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8699:6536], server id = [2:8702:6539], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:52.852684Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8699:6536], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:52.852840Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8672:6514], server id = [2:8677:6519], tablet id = 72075186224037903 2025-03-28T09:37:52.852857Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:52.854140Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8703:6540], server id = [2:8707:6543], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:52.854182Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8703:6540], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:52.854580Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:52.855357Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8692:6530], server id = [2:8693:6531], tablet id = 72075186224037904 2025-03-28T09:37:52.855379Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:52.856640Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:52.856939Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8694:6532], server id = [2:8696:6533], tablet id = 72075186224037905 2025-03-28T09:37:52.856960Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:52.857677Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:52.858084Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8697:6534], server id = [2:8701:6538], tablet id = 72075186224037906 2025-03-28T09:37:52.858106Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:52.858625Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:52.858833Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8699:6536], server id = [2:8702:6539], tablet id = 72075186224037907 2025-03-28T09:37:52.858853Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:52.859024Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:52.859066Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:52.859212Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:52.868199Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:52.868871Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:52.870399Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8703:6540], server id = [2:8707:6543], tablet id = 72075186224037908 2025-03-28T09:37:52.870433Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:52.871018Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:52.897018Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8734:6566]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:52.897197Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:52.897235Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8734:6566], StatRequests.size() = 1 2025-03-28T09:37:53.007830Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzNhZWVkODEtNmRmMmUxM2QtNzdiOTZiZWMtNTkyMDI5Yg==, TxId: 2025-03-28T09:37:53.007889Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzNhZWVkODEtNmRmMmUxM2QtNzdiOTZiZWMtNTkyMDI5Yg==, TxId: ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2025-03-28T09:37:53.008456Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8742:6572]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:53.008673Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:53.009140Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:53.009197Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:53.011514Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:53.011571Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:53.011610Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:53.016441Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> TPartitionTests::GetPartitionWriteInfoError |95.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155201.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155201.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143155201.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155201.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155201.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143155201.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154001.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123155201.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123155201.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154001.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154001.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123154001.000000s;Name=;Codec=}; 2025-03-28T09:36:41.598458Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:36:41.658594Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:36:41.671883Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:36:41.672110Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:36:41.677649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:36:41.677787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:36:41.677942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:36:41.678007Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:36:41.678070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:36:41.678128Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:36:41.678186Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:36:41.678264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:36:41.678330Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:36:41.678403Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:36:41.678471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:36:41.678527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:36:41.695969Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:36:41.696176Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:36:41.696215Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:36:41.696327Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:41.696429Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:36:41.696480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:36:41.696508Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:36:41.696565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:36:41.696609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:36:41.696634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:36:41.696654Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:36:41.696759Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:36:41.696799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:36:41.696822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:36:41.696840Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:36:41.696888Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:36:41.696917Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:36:41.696950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:36:41.696970Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:36:41.697013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:36:41.697034Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:36:41.697050Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:36:41.697076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:36:41.697105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:36:41.697137Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:36:41.697443Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-28T09:36:41.697501Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=27; 2025-03-28T09:36:41.697551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=21; 2025-03-28T09:36:41.697606Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=26; 2025-03-28T09:36:41.697722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:36:41.697788Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:36:41.697816Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:36:41.697947Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:36:41.697973Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:36:41.697997Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=94 ... a.cpp:29;EXECUTE:finishLoadingTime=335; 2025-03-28T09:37:53.559116Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=44611; 2025-03-28T09:37:53.566401Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7188; 2025-03-28T09:37:53.578222Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=10521; 2025-03-28T09:37:53.578377Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=11858; 2025-03-28T09:37:53.578608Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=142; 2025-03-28T09:37:53.578766Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=93; 2025-03-28T09:37:53.578947Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=122; 2025-03-28T09:37:53.579114Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=99; 2025-03-28T09:37:53.587549Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8351; 2025-03-28T09:37:53.597437Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9764; 2025-03-28T09:37:53.597577Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=47; 2025-03-28T09:37:53.597675Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=38; 2025-03-28T09:37:53.597737Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2025-03-28T09:37:53.597791Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2025-03-28T09:37:53.597836Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-03-28T09:37:53.597922Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=47; 2025-03-28T09:37:53.597972Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-03-28T09:37:53.598055Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=50; 2025-03-28T09:37:53.598098Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-03-28T09:37:53.598163Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2025-03-28T09:37:53.598248Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=44; 2025-03-28T09:37:53.598598Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=304; 2025-03-28T09:37:53.598635Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=90775; 2025-03-28T09:37:53.598802Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:37:53.598901Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:37:53.598949Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:37:53.599005Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:37:53.615843Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:37:53.616003Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:37:53.616054Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:37:53.616174Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:37:53.616241Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:37:53.616284Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:37:53.616332Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:53.616370Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:53.616459Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:37:53.617007Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:37:53.617104Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2654:4528];tablet_id=9437184;parent=[1:2612:4493];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-03-28T09:37:53.617934Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:37:53.618057Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:37:53.618083Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:37:53.618102Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:37:53.618142Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:37:53.618198Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:37:53.618248Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:37:53.618302Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:37:53.618343Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:37:53.618382Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:53.618415Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:37:53.618490Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:37:53.619640Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-28T09:37:53.620908Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> TPartitionTests::DataTxCalcPredicateError [GOOD] >> TPartitionTests::GetPartitionWriteInfoError [GOOD] >> TPQRBDescribes::PartitionLocations [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: 2025-03-28T09:35:27.302545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:27.302756Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:27.302792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b08/r3tmp/tmpiabyYG/pdisk_1.dat 2025-03-28T09:35:27.559740Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19635, node 1 2025-03-28T09:35:27.747958Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:27.747998Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:27.748020Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:27.748355Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:27.750131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:27.829651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:27.829825Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:27.844063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22426 2025-03-28T09:35:28.345009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:30.834635Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:30.860109Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:30.860264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:30.897746Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:30.899611Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:31.113147Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.113579Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.113942Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.114033Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.114205Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.114262Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.114317Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.114365Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.114410Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:31.274305Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:31.274421Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:31.287771Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:31.406107Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:31.445185Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:31.445264Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:31.471762Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:31.471919Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:31.472110Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:31.472169Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:31.472217Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:31.472272Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:31.472317Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:31.472356Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:31.472723Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:31.502734Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:31.502874Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:31.510575Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:31.517099Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:31.517412Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:31.522524Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:31.535637Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:31.535684Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:31.535763Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:31.546585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:31.552306Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:31.552418Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:31.693477Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:31.877510Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:31.986266Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:32.739705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:32.739880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:32.757937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:33.044927Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:33.045131Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:33.045374Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:33.045473Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:33.045549Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:33.045643Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:33.045720Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:33.045808Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:33.045892Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:33.045986Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:33.046078Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:33.046171Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2394:2890];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:33.087328Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:33.087404Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037900;self_id=[2:2402:2894];tablet_id=72075186224037900;process= ... [72075186224037894] Subscribed for config changes 2025-03-28T09:37:53.322056Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:37:53.322107Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:37:53.322209Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:37:53.323705Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:37:53.323786Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:37:53.324696Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:37:53.396946Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:37:53.397224Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2025-03-28T09:37:53.398358Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8766:6582], server id = [2:8771:6587], tablet id = 72075186224037899, status = OK 2025-03-28T09:37:53.398471Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8766:6582], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:53.398865Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8767:6583], server id = [2:8772:6588], tablet id = 72075186224037900, status = OK 2025-03-28T09:37:53.398930Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8767:6583], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:53.399108Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8768:6584], server id = [2:8773:6589], tablet id = 72075186224037901, status = OK 2025-03-28T09:37:53.399155Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8768:6584], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:53.401319Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8769:6585], server id = [2:8774:6590], tablet id = 72075186224037902, status = OK 2025-03-28T09:37:53.401369Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8769:6585], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:53.401744Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8770:6586], server id = [2:8775:6591], tablet id = 72075186224037903, status = OK 2025-03-28T09:37:53.401800Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8770:6586], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:53.402487Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:37:53.403047Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037900 2025-03-28T09:37:53.403700Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8766:6582], server id = [2:8771:6587], tablet id = 72075186224037899 2025-03-28T09:37:53.403735Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:53.403872Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037901 2025-03-28T09:37:53.404295Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8767:6583], server id = [2:8772:6588], tablet id = 72075186224037900 2025-03-28T09:37:53.404316Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:53.404583Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037902 2025-03-28T09:37:53.404842Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8768:6584], server id = [2:8773:6589], tablet id = 72075186224037901 2025-03-28T09:37:53.404861Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:53.404986Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037903 2025-03-28T09:37:53.405157Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8780:6596], server id = [2:8784:6600], tablet id = 72075186224037904, status = OK 2025-03-28T09:37:53.405199Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8780:6596], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:53.405361Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8781:6597], server id = [2:8785:6601], tablet id = 72075186224037905, status = OK 2025-03-28T09:37:53.405389Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8781:6597], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:53.405442Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8769:6585], server id = [2:8774:6590], tablet id = 72075186224037902 2025-03-28T09:37:53.405462Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:53.406127Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8783:6599], server id = [2:8787:6603], tablet id = 72075186224037906, status = OK 2025-03-28T09:37:53.406170Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8783:6599], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:53.406615Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8770:6586], server id = [2:8775:6591], tablet id = 72075186224037903 2025-03-28T09:37:53.406640Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:53.407244Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8786:6602], server id = [2:8789:6605], tablet id = 72075186224037907, status = OK 2025-03-28T09:37:53.407285Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8786:6602], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:53.407356Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8788:6604], server id = [2:8790:6606], tablet id = 72075186224037908, status = OK 2025-03-28T09:37:53.407389Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:8788:6604], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:37:53.408469Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037904 2025-03-28T09:37:53.408671Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037905 2025-03-28T09:37:53.408973Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037906 2025-03-28T09:37:53.409239Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8780:6596], server id = [2:8784:6600], tablet id = 72075186224037904 2025-03-28T09:37:53.409256Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:53.409307Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8781:6597], server id = [2:8785:6601], tablet id = 72075186224037905 2025-03-28T09:37:53.409322Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:53.409421Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037908 2025-03-28T09:37:53.409514Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8783:6599], server id = [2:8787:6603], tablet id = 72075186224037906 2025-03-28T09:37:53.409542Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:53.409622Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037907 2025-03-28T09:37:53.409665Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:37:53.409932Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:37:53.410133Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:37:53.410320Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:37:53.412525Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8788:6604], server id = [2:8790:6606], tablet id = 72075186224037908 2025-03-28T09:37:53.412548Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:53.413007Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:8786:6602], server id = [2:8789:6605], tablet id = 72075186224037907 2025-03-28T09:37:53.413036Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:37:53.413257Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:37:53.442713Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:8812:6628]], StatType[ 0 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:53.442925Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2025-03-28T09:37:53.442958Z node 2 :STATISTICS DEBUG: ReplySuccess(), request id = 2, ReplyToActorId = [2:8812:6628], StatRequests.size() = 1 2025-03-28T09:37:53.559379Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTA0YmNlZWEtYWE4NjI4N2MtZWZjNjZlMzAtMzA3MzUzNzk=, TxId: 2025-03-28T09:37:53.559488Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTA0YmNlZWEtYWE4NjI4N2MtZWZjNjZlMzAtMzA3MzUzNzk=, TxId: 2025-03-28T09:37:53.560076Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:37:53.572621Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8822:6634] 2025-03-28T09:37:53.572803Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:8822:6634], schemeshard id = 72075186224037897 2025-03-28T09:37:53.572865Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:8719:6551], server id = [2:8823:6635], tablet id = 72075186224037894, status = OK 2025-03-28T09:37:53.572953Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:8823:6635] 2025-03-28T09:37:53.572986Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectNode, pipe server id = [2:8823:6635], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2025-03-28T09:37:53.586308Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete schedule traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:37:53.586370Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2025-03-28T09:37:53.640768Z node 2 :STATISTICS DEBUG: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:8826:6638]], StatType[ 2 ], StatRequestsCount[ 1 ] 2025-03-28T09:37:53.641028Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:53.641088Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 2] ] 2025-03-28T09:37:53.643683Z node 2 :STATISTICS DEBUG: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2025-03-28T09:37:53.643752Z node 2 :STATISTICS DEBUG: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2025-03-28T09:37:53.643804Z node 2 :STATISTICS DEBUG: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 4] ], StatType[ 2 ], ColumnTag[ 1 ] 2025-03-28T09:37:53.648149Z node 2 :STATISTICS DEBUG: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> TPartitionTests::FailedTxsDontBlock >> TSchemeShardTTLTests::ShouldCheckQuotas >> TSchemeShardTTLTests::ShouldSkipDroppedColumn >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> TPQTabletTests::Huge_ProposeTransacton [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::DataTxCalcPredicateError [GOOD] Test command err: 2025-03-28T09:37:44.781150Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.781247Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:44.797650Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] 2025-03-28T09:37:44.799399Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:44.000000Z 2025-03-28T09:37:44.799471Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\300\244\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\300\244\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-28T09:37:45.607772Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:45.607846Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:45.624835Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:179:2194] 2025-03-28T09:37:45.626604Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:45.000000Z 2025-03-28T09:37:45.626673Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\250\254\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\250\254\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\250\254\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\250\254\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } 2025-03-28T09:37:46.396413Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:46.396484Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:46.412071Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [3:179:2194] 2025-03-28T09:37:46.413311Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:46.000000Z 2025-03-28T09:37:46.413357Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:179:2194] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\220\264\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-28T09:37:47.218542Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:47.218629Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:47.238131Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:37:47.238391Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:47.238734Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:178:2193] 2025-03-28T09:37:47.239857Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:37:47.240089Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request Got KV request 2025-03-28T09:37:47.240304Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:37:47.240545Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:37:47.241063Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-28T09:37:47.241174Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:37:47.241220Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:37:47.241271Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:47.000000Z 2025-03-28T09:37:47.241314Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:37:47.241377Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:178:2193] 2025-03-28T09:37:47.241450Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-28T09:37:47.241506Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:48.507217Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client session is set to 0 (startOffset 0) session session 2025-03-28T09:37:48.507403Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:48.507457Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:48.507493Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:48.507530Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:48.507560Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient 2025-03-28T09:37:48.507578Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient 2025-03-28T09:37:48.507601Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:48.507636Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Got batch complete: 1 Got KV request Got KV request Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\370\273\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-28T09:37:48.529050Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Create distr tx with id = 0 and act no: 1 2025-03-28T09:37:48.529391Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 Wait first predicate result 2025-03-28T09:37:49.749494Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Create distr tx with id = 2 and act no: 3 2025-03-28T09:37:49.749864Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 Wait second predicate result 2025-03-28T09:37:50.966677Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:37:50.966795Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-03-28T09:37:50.966857Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-03-28T09:37:50.966925Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:37:50.966980Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 ... ] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-03-28T09:37:50.967278Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-03-28T09:37:50.967329Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:37:50.967387Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-28T09:37:50.967614Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:50.967664Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:50.967729Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:50.967771Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-28T09:37:50.967808Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:50.967833Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:50.967855Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:37:50.967895Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:50.967966Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 2025-03-28T09:37:50.968216Z node 4 :PERSQUEUE INFO: new Cookie owner1|18501a14-8e9d1088-9d362e88-28300657_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-03-28T09:37:50.979025Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 2025-03-28T09:37:50.979167Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 2025-03-28T09:37:50.980015Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'SourceId' seqNo 5 partNo 0 2025-03-28T09:37:50.980692Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'SourceId' seqNo 5 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 118 count 1 nextOffset 52 batches 1 2025-03-28T09:37:50.981224Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 51,1 HeadOffset 50 endOffset 50 curOffset 52 d0000000000_00000000000000000051_00000_0000000001_00000| size 104 WTime 15244 2025-03-28T09:37:50.981338Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:50.981368Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:50.981399Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:37:50.981437Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:50.981468Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000pSourceId 2025-03-28T09:37:50.981503Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000051_00000_0000000001_00000| 2025-03-28T09:37:50.981523Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:50.981549Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:50.981577Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 2025-03-28T09:37:51.013156Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:51.013262Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:51.013348Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 51 is stored on disk Wait third predicate result Create distr tx with id = 4 and act no: 5 2025-03-28T09:37:51.013802Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-03-28T09:37:52.204167Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 2025-03-28T09:37:52.645479Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.645559Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:52.660796Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:37:52.661042Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:52.661320Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-03-28T09:37:52.662333Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:37:52.662513Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-28T09:37:52.662675Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:37:52.662907Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:37:52.663336Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-03-28T09:37:52.663446Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:37:52.663498Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:37:52.663550Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:52.000000Z 2025-03-28T09:37:52.663590Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:37:52.663645Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-03-28T09:37:52.663710Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-03-28T09:37:52.663759Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:53.935258Z node 5 :PERSQUEUE INFO: new Cookie SourceId|e087cae0-8c253378-4d3b6311-f5c1db5a_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner SourceId 2025-03-28T09:37:53.935381Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Wait write response Wait kv request 2025-03-28T09:37:53.935635Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-03-28T09:37:53.936406Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 11 PartNo 0 PackedSize 118 count 1 nextOffset 12 batches 1 2025-03-28T09:37:53.936977Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 11,1 HeadOffset 1 endOffset 1 curOffset 12 d0000000000_00000000000000000011_00000_0000000001_00000| size 104 WTime 5132 2025-03-28T09:37:53.937137Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:53.937174Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:53.937210Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:37:53.937243Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:53.937275Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000pSourceId 2025-03-28T09:37:53.937318Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000011_00000_0000000001_00000| 2025-03-28T09:37:53.937341Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:53.937369Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:53.937403Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Got batch complete: 1 Got KV request Got KV request 2025-03-28T09:37:53.947824Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:53.947900Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:53.947958Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 11 is stored on disk Wait second predicate result Create distr tx with id = 0 and act no: 1 2025-03-28T09:37:53.948246Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-28T09:37:55.197492Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTable >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 25428, MsgBus: 8007 2025-03-28T09:36:14.960181Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791890183062788:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:36:14.960290Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001899/r3tmp/tmptO1Ual/pdisk_1.dat 2025-03-28T09:36:15.282552Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:36:15.313950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:36:15.314504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:36:15.316290Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25428, node 1 2025-03-28T09:36:15.450789Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:36:15.450809Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:36:15.450819Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:36:15.450930Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8007 TClient is connected to server localhost:8007 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:36:15.936147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:15.967443Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:16.096543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:16.214866Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:16.283201Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:36:16.959208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791898772999127:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:16.959280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:17.360316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:36:17.380623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:36:17.401032Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:36:17.422574Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:36:17.443872Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:36:17.481403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:36:17.562218Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791903067966969:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:17.562286Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:17.562336Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486791903067966974:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:36:17.566198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:36:17.572609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486791903067966976:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:36:17.645875Z node 1 :TX_PROXY ERROR: Actor# [1:7486791903067967030:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:36:18.437184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:36:19.062836Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715676. Ctx: { TraceId: 01jqe1r1k57cw1dwb05eak00wc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmVkYjU3YTItMTVkMDk1ZTAtZTQwOWRjM2MtYjdhNDQyNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.064632Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715679. Ctx: { TraceId: 01jqe1r1k7b1abp5ey60jmkwk1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmEwYTA2MDItZDQ4NjMyY2QtM2Y4NjA3ODktYWI0MDJjNWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.067046Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715672. Ctx: { TraceId: 01jqe1r1k4b9ds8sqbam76mmtk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzhjYWM1ZTMtZWM5YzdmNjItMTYxMDdlODEtMTQwN2QwNWU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.067908Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715673. Ctx: { TraceId: 01jqe1r1k420qqd263jc0w4jfm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NzMwNWZiZDctMTYwZjgyNDgtZjY3NzU2OTgtNGE1NzFkOTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.069074Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715674. Ctx: { TraceId: 01jqe1r1k519g7q4jdmt8j800b, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YTAyMzE0MTQtMjFjODU0OTktZDg2Y2ZkNGItNDk2OWY2Yjc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.075178Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715675. Ctx: { TraceId: 01jqe1r1k403ad1pk00xd7xqbn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTIzNTFhZDMtM2ExMGQ5MWUtZmVmOTQxNzUtNzA3NmQ5Nw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.075734Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715677. Ctx: { TraceId: 01jqe1r1k6cntpsgy0h43bd5vs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWI0ZDIwYjktYWJiZTFiN2UtZDcwYmFlOTktMzA2MDQ3Yzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.076354Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715678. Ctx: { TraceId: 01jqe1r1k705myz02qncc40jwb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UwYmUzOTItN2Q5YzgwODEtYTlhZTM4NDgtODk5MThmNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.092276Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715680. Ctx: { TraceId: 01jqe1r1k82782wvpstfa8v06n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YThjMjliYWUtYzRiODk1ZjktMzhhODc4MTktOTMzMjZhYTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.093251Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715681. Ctx: { TraceId: 01jqe1r1k704ffb3hss7g68x9h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDZkNGZhZTctODk2ZjQ5YWMtMTEzZGI5ZjktNzRjNzVkY2U=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.107467Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715682. Ctx: { TraceId: 01jqe1r1k57cw1dwb05eak00wc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MmVkYjU3YTItMTVkMDk1ZTAtZTQwOWRjM2MtYjdhNDQyNzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.112546Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715684. Ctx: { TraceId: 01jqe1r1k705myz02qncc40jwb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2UwYmUzOTItN2Q5YzgwODEtYTlhZTM4NDgtODk5MThmNTA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:36:19.113189Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715683. Ctx: { TraceId: 01jqe1r1k403ad1pk00xd7xqbn, Database: , DatabaseId: /Root, SessionId: ydb://se ... sion/3?node_id=3&id=NzUxNzlhMGQtODRmYTY5Y2EtMjY1MzVmMTMtNDhkMmFiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.067051Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721510. Ctx: { TraceId: 01jqe1twdj0vrc71v6ptttg4kt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDk0NTQzMDAtNThmYThjYTItMzVhYjJhNzYtZWM0NjhjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.068218Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721511. Ctx: { TraceId: 01jqe1twdj3r4fttn0k6d10e1y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTZiMzI2MzctMTQ5Y2YyZmEtYTFlZDlhYWUtOTg3MmZjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.069652Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721512. Ctx: { TraceId: 01jqe1twdkfgvyaem2c21y56we, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTkwMzNjODktMTY1MDgyOC02ZmFlN2JlZC1iNjFhMTUxOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.072458Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721513. Ctx: { TraceId: 01jqe1twdj948f7668erb1af3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzUxNzlhMGQtODRmYTY5Y2EtMjY1MzVmMTMtNDhkMmFiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.073757Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721514. Ctx: { TraceId: 01jqe1twdj0vrc71v6ptttg4kt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDk0NTQzMDAtNThmYThjYTItMzVhYjJhNzYtZWM0NjhjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.074502Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721515. Ctx: { TraceId: 01jqe1twdj3r4fttn0k6d10e1y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTZiMzI2MzctMTQ5Y2YyZmEtYTFlZDlhYWUtOTg3MmZjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.075953Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721516. Ctx: { TraceId: 01jqe1twdq5ntdcfftr3bnk10f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWMxYjZjZmItOTE5YjZiNjAtOWJkZjExMzEtYTViMTQyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.081971Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721517. Ctx: { TraceId: 01jqe1twdkfgvyaem2c21y56we, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTkwMzNjODktMTY1MDgyOC02ZmFlN2JlZC1iNjFhMTUxOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.082063Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721518. Ctx: { TraceId: 01jqe1twdj948f7668erb1af3h, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzUxNzlhMGQtODRmYTY5Y2EtMjY1MzVmMTMtNDhkMmFiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.088421Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721519. Ctx: { TraceId: 01jqe1twdj0vrc71v6ptttg4kt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDk0NTQzMDAtNThmYThjYTItMzVhYjJhNzYtZWM0NjhjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.090199Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721520. Ctx: { TraceId: 01jqe1twdq5ntdcfftr3bnk10f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWMxYjZjZmItOTE5YjZiNjAtOWJkZjExMzEtYTViMTQyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.093094Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721521. Ctx: { TraceId: 01jqe1twdj3r4fttn0k6d10e1y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTZiMzI2MzctMTQ5Y2YyZmEtYTFlZDlhYWUtOTg3MmZjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.101239Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721522. Ctx: { TraceId: 01jqe1twdq5ntdcfftr3bnk10f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWMxYjZjZmItOTE5YjZiNjAtOWJkZjExMzEtYTViMTQyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.109603Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721524. Ctx: { TraceId: 01jqe1twf97c3vr828mnfeypkt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGVmNWZkNGMtMzkwZTdiNjMtNDllNTA1MzgtNThkMzQ0ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.109798Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721523. Ctx: { TraceId: 01jqe1twfb67zc0xhvekhpskk7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTkwMzNjODktMTY1MDgyOC02ZmFlN2JlZC1iNjFhMTUxOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.115466Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721525. Ctx: { TraceId: 01jqe1twfd8a6jbx7bc94n34kd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDk0NTQzMDAtNThmYThjYTItMzVhYjJhNzYtZWM0NjhjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.117724Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721526. Ctx: { TraceId: 01jqe1twfb67zc0xhvekhpskk7, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTkwMzNjODktMTY1MDgyOC02ZmFlN2JlZC1iNjFhMTUxOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.117750Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721527. Ctx: { TraceId: 01jqe1twf97c3vr828mnfeypkt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGVmNWZkNGMtMzkwZTdiNjMtNDllNTA1MzgtNThkMzQ0ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.120204Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721528. Ctx: { TraceId: 01jqe1twfdavwwys001ne3wxdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzUxNzlhMGQtODRmYTY5Y2EtMjY1MzVmMTMtNDhkMmFiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.124378Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721530. Ctx: { TraceId: 01jqe1twfp08w1wap5mgq4j086, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTZiMzI2MzctMTQ5Y2YyZmEtYTFlZDlhYWUtOTg3MmZjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.126450Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721529. Ctx: { TraceId: 01jqe1twf97c3vr828mnfeypkt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGVmNWZkNGMtMzkwZTdiNjMtNDllNTA1MzgtNThkMzQ0ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.129861Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721531. Ctx: { TraceId: 01jqe1twfd8a6jbx7bc94n34kd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDk0NTQzMDAtNThmYThjYTItMzVhYjJhNzYtZWM0NjhjMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.131932Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721532. Ctx: { TraceId: 01jqe1twfp08w1wap5mgq4j086, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTZiMzI2MzctMTQ5Y2YyZmEtYTFlZDlhYWUtOTg3MmZjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.136295Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721534. Ctx: { TraceId: 01jqe1twfdavwwys001ne3wxdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzUxNzlhMGQtODRmYTY5Y2EtMjY1MzVmMTMtNDhkMmFiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.136479Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721533. Ctx: { TraceId: 01jqe1twf97c3vr828mnfeypkt, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZGVmNWZkNGMtMzkwZTdiNjMtNDllNTA1MzgtNThkMzQ0ZDg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.141151Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721535. Ctx: { TraceId: 01jqe1twfdavwwys001ne3wxdc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NzUxNzlhMGQtODRmYTY5Y2EtMjY1MzVmMTMtNDhkMmFiMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS 2025-03-28T09:37:52.146861Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721536. Ctx: { TraceId: 01jqe1twga5nxkxwy3cmqp7czn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWMxYjZjZmItOTE5YjZiNjAtOWJkZjExMzEtYTViMTQyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.151594Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721537. Ctx: { TraceId: 01jqe1twgj7qnnmssrwsj3m4ya, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTkwMzNjODktMTY1MDgyOC02ZmFlN2JlZC1iNjFhMTUxOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.155252Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721538. Ctx: { TraceId: 01jqe1twgm51bk454735wvwceb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTZiMzI2MzctMTQ5Y2YyZmEtYTFlZDlhYWUtOTg3MmZjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.172555Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721539. Ctx: { TraceId: 01jqe1twgj7qnnmssrwsj3m4ya, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTkwMzNjODktMTY1MDgyOC02ZmFlN2JlZC1iNjFhMTUxOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.179530Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721540. Ctx: { TraceId: 01jqe1twgm51bk454735wvwceb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTZiMzI2MzctMTQ5Y2YyZmEtYTFlZDlhYWUtOTg3MmZjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.190337Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721541. Ctx: { TraceId: 01jqe1twgj7qnnmssrwsj3m4ya, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTkwMzNjODktMTY1MDgyOC02ZmFlN2JlZC1iNjFhMTUxOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.191522Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721542. Ctx: { TraceId: 01jqe1twgm51bk454735wvwceb, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MTZiMzI2MzctMTQ5Y2YyZmEtYTFlZDlhYWUtOTg3MmZjOGU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.192769Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721543. Ctx: { TraceId: 01jqe1twga5nxkxwy3cmqp7czn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWMxYjZjZmItOTE5YjZiNjAtOWJkZjExMzEtYTViMTQyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:52.197215Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976721544. Ctx: { TraceId: 01jqe1twga5nxkxwy3cmqp7czn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWMxYjZjZmItOTE5YjZiNjAtOWJkZjExMzEtYTViMTQyNTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TPartitionTests::NonConflictingCommitsBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQRBDescribes::PartitionLocations [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2025-03-28T09:37:43.884155Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792273327953342:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.884232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:37:43.950398Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792272639565925:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.950752Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:37:44.170232Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0019a0/r3tmp/tmpnp05AV/pdisk_1.dat 2025-03-28T09:37:44.182387Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:37:44.391621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:44.391768Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:44.392909Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:44.393000Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:44.397601Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:37:44.397784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:44.398510Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:44.435992Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18155, node 1 2025-03-28T09:37:44.519072Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:37:44.519110Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:37:44.544640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/0019a0/r3tmp/yandexz65krf.tmp 2025-03-28T09:37:44.544671Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/0019a0/r3tmp/yandexz65krf.tmp 2025-03-28T09:37:44.544922Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/0019a0/r3tmp/yandexz65krf.tmp 2025-03-28T09:37:44.545047Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:44.622165Z INFO: TTestServer started on Port 20545 GrpcPort 18155 TClient is connected to server localhost:20545 PQClient connected to localhost:18155 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:44.957084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:44.997949Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:37:47.197232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792290507823633:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.197325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792290507823624:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.197512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.200803Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:37:47.230659Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-03-28T09:37:47.231014Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792290507823638:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:37:47.485419Z node 1 :TX_PROXY ERROR: Actor# [1:7486792290507823726:2766] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.515272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.530536Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792290507823736:2349], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:47.532408Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjkxOGQ4LTI5YWM4MGY2LWIzOTgyNmY5LWJhNzVjMzM3, ActorId: [1:7486792290507823621:2337], ActorState: ExecuteState, TraceId: 01jqe1tqnvdgzr2mh5hfacy0xx, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:47.534407Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:47.536622Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486792289819435512:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:47.536907Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZTdiZTg0NmYtNDkwODI1OGItZTcwM2Q3NGYtNWQ3YWIwNGI=, ActorId: [2:7486792289819435473:2312], ActorState: ExecuteState, TraceId: 01jqe1tqvx6e2x40p7mdg3n9y3, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:47.537359Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:47.594416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.722868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:37:47.963224Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe1tr8fd2q9tb7m2bctyj3z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmJhMTEyYTItNDliMmMwNy00NDI1MDlkYy0zNGM0MzIwYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486792290507824177:3102] 2025-03-28T09:37:48.884290Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792273327953342:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:48.884359Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:37:48.949831Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792272639565925:2081];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:48.949983Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic with 5 partitions CallPersQueueGRPC request to localhost:18155 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-03-28T09:37:53.843293Z node 1 :PER ... :37:53.991543Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679, State EXECUTED 2025-03-28T09:37:53.991556Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679 State EXECUTED FrontTxId 281474976710679 2025-03-28T09:37:53.991577Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:37:53.991601Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679, NewState WAIT_RS_ACKS 2025-03-28T09:37:53.991621Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:37:53.991646Z node 2 :PERSQUEUE DEBUG: [TxId: 281474976710679] PredicateAcks: 0/0 2025-03-28T09:37:53.991654Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:37:53.991682Z node 2 :PERSQUEUE DEBUG: [TxId: 281474976710679] PredicateAcks: 0/0 2025-03-28T09:37:53.991698Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] add an TxId 281474976710679 to the list for deletion 2025-03-28T09:37:53.991723Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679, NewState DELETING 2025-03-28T09:37:53.991751Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete key for TxId 281474976710679 2025-03-28T09:37:53.991799Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:53.993142Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:53.993161Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Try execute txs with state DELETING 2025-03-28T09:37:53.993171Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] TxId 281474976710679, State DELETING 2025-03-28T09:37:53.993185Z node 2 :PERSQUEUE DEBUG: [PQ: 72075186224037892] delete TxId 281474976710679 2025-03-28T09:37:53.996405Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710680. Ctx: { TraceId: 01jqe1ty7m2dn2dm6t26m366ve, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJjOWNjZWEtYTc5M2U0OWEtNDEyNjY3YWItN2NhNTMyMjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===Query complete TClient::Ls request: /Root/PQ/rt3.dc1--topic TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710679 CreateStep: 1743154674002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2000... (TRUNCATED) GetTopicVersionFromPath: record Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710679 CreateStep: 1743154674002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 3 TabletId: 72075186224037892 Status: Active } Partitions { PartitionId: 4 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 5 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--topic" name rt3.dc1--topic version1 CallPersQueueGRPC request to localhost:18155 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-03-28T09:37:54.012572Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:18155 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic" } } 2025-03-28T09:37:54.516046Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic" NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 TClient::Ls request: /Root/PQ/rt3.dc1--topic TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710679 CreateStep: 1743154674002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic" PathId: 13 TotalGroupCount: 5 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 2000... (TRUNCATED) 2025-03-28T09:37:54.519594Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7486792320572596046:3583] connected; active server actors: 1 2025-03-28T09:37:54.519651Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2025-03-28T09:37:54.519670Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 2, Generation 1 2025-03-28T09:37:54.519682Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 2, Generation 1 2025-03-28T09:37:54.519696Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 2025-03-28T09:37:54.519709Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 2, Generation 1 2025-03-28T09:37:54.519961Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7486792320572596048:3585] connected; active server actors: 1 2025-03-28T09:37:54.519992Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 0, NodeId 2, Generation 1 2025-03-28T09:37:54.520004Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 1, NodeId 2, Generation 1 2025-03-28T09:37:54.520014Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 2, NodeId 2, Generation 1 2025-03-28T09:37:54.520027Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 response: 2025-03-28T09:37:54.520037Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 4, NodeId 2, Generation 1 Status: true Locations { PartitionId: 0 NodeId: 2 Generation: 1 } Locations { PartitionId: 1 NodeId: 2 Generation: 1 } Locations { PartitionId: 2 NodeId: 2 Generation: 1 } Locations { PartitionId: 3 NodeId: 2 Generation: 1 } Locations { PartitionId: 4 NodeId: 2 Generation: 1 } 2025-03-28T09:37:54.520347Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7486792320572596050:3587] connected; active server actors: 1 2025-03-28T09:37:54.520394Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][rt3.dc1--topic] addPartitionToResponse tabletId 72075186224037892, partitionId 3, NodeId 2, Generation 1 response: Status: true Locations { PartitionId: 3 NodeId: 2 Generation: 1 } 2025-03-28T09:37:54.520615Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic] pipe [1:7486792320572596052:3589] connected; active server actors: 1 response: Status: false >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.0%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::BadTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:56.249977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:56.250054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.250083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:56.250110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:56.251049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:56.251082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:56.251163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.251221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:56.252355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:56.315415Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:56.315455Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:56.326322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:56.326532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:56.326656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:56.333449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:56.333676Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:56.335756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.336509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.341084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345216Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345288Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:56.345890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.345934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:56.346027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.351812Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:56.449097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.449290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.449463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:56.449640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:56.449686Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.451707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.451819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:56.451960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.452012Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:56.452056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:56.452082Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:56.453762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.453808Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:56.453838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:56.455274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.455309Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.455336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.455389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.458117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:56.459667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:56.459795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:56.460609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.460726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.460784Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.461027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:56.461072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.461191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:56.461245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.462569Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.462602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.462762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.462791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:56.463104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.463194Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:56.463262Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.463286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.463314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.463335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.463375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:56.463416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.463439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:56.463461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:56.463532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:56.463572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:56.463597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:56.464851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.464937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.464963Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:37:56.464996Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:37:56.465034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:56.465113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:37:56.467348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:37:56.467730Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:37:56.473862Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:37:56.488816Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:37:56.490730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.491023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.493797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2025-03-28T09:37:56.496307Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:37:56.498773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.498931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2025-03-28T09:37:56.499242Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTTLTests::BuildIndexShouldSucceed |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2025-03-28T09:37:43.980082Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:43.980154Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:44.011141Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] 2025-03-28T09:37:44.013599Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:44.000000Z 2025-03-28T09:37:44.013659Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\300\244\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\016\n\010client-2@\000H\000" StorageChannel: INLINE } 2025-03-28T09:37:44.855497Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.855548Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:44.870356Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [2:179:2194] 2025-03-28T09:37:44.871875Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:37:44.871945Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:179:2194] 2025-03-28T09:37:45.175008Z node 2 :PERSQUEUE INFO: new Cookie owner1|3dd91f25-11c19523-87c77889-1b0e71c0_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send write: 0 Send write: 1 Send write: 2 Send write: 3 Send write: 4 Send write: 5 Send write: 6 Send write: 7 Send write: 8 Send write: 9 Got write info response. Body keys: 0, head: 10, src id info: 1 2025-03-28T09:37:48.248756Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:48.248832Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:48.264348Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [3:179:2194] 2025-03-28T09:37:48.267457Z node 3 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:37:48.267525Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [3:179:2194] 2025-03-28T09:37:48.880931Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:48.880993Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:48.895876Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:37:48.896195Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:48.896477Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:178:2193] 2025-03-28T09:37:48.897521Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:37:48.897770Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request Got KV request 2025-03-28T09:37:48.897957Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:37:48.898802Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:37:48.899318Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-28T09:37:48.899436Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:37:48.899481Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:37:48.899683Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:48.000000Z 2025-03-28T09:37:48.899743Z node 4 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:37:48.899804Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [4:178:2193] 2025-03-28T09:37:48.899862Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-28T09:37:48.899915Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:48.900009Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:48.900075Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:37:48.900119Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:37:48.900476Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-03-28T09:37:48.900725Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-03-28T09:37:48.900780Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Create distr tx with id = 0 and act no: 1 2025-03-28T09:37:50.162235Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 Wait batch completion 2025-03-28T09:37:51.385517Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:37:51.385719Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-28T09:37:51.399010Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-28T09:37:51.399246Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:51.399292Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:51.399329Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:51.399362Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:51.399388Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:37:51.399404Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-28T09:37:51.399436Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-28T09:37:51.399462Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:51.399494Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 2 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 2 2025-03-28T09:37:52.579057Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:52.579152Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 reinit with generation 7 done 2025-03-28T09:37:52.579346Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:52.579419Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:52.579457Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:52.579503Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:52.579532Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-03-28T09:37:52.579566Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-03-28T09:37:52.579590Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-28T09:37:52.579618Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-28T09:37:52.579634Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-03-28T09:37:52.579658Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:52.579697Z node 4 :PERSQUEUE DEBUG: [PQ: 7205 ... 27937] doesn't have tx writes info 2025-03-28T09:37:52.887089Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:37:52.887329Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:52.887549Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-03-28T09:37:52.888268Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:37:52.888411Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-28T09:37:52.888531Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:37:52.889115Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:37:52.889484Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-28T09:37:52.889574Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:37:52.889604Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:37:52.889644Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:52.000000Z 2025-03-28T09:37:52.889672Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:37:52.889711Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-03-28T09:37:52.889774Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-28T09:37:52.889817Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:52.889876Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:52.889914Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:37:52.889961Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:37:52.890197Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-03-28T09:37:52.890344Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-03-28T09:37:52.890380Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. Create distr tx with id = 0 and act no: 1 2025-03-28T09:37:54.171784Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-28T09:37:54.171909Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1, TxId 3 Wait batch completion 2025-03-28T09:37:55.410216Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:37:55.410386Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-28T09:37:55.410449Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-28T09:37:55.410664Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:55.410720Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:55.410763Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:55.410805Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:55.410840Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:37:55.410863Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-28T09:37:55.410886Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-28T09:37:55.410930Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:55.410987Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 2 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 2 2025-03-28T09:37:56.582359Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got batch complete: 1 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 Wait batch completion 2025-03-28T09:37:56.582622Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-03-28T09:37:56.582759Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 drop done 2025-03-28T09:37:56.583033Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:56.583094Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:56.583132Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000cclient-1, m0000000000cclient-1] 2025-03-28T09:37:56.583159Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [m0000000000uclient-1, m0000000000uclient-1] 2025-03-28T09:37:56.583188Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:56.583223Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:56.583248Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:37:56.583270Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-28T09:37:56.583291Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-28T09:37:56.583311Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-03-28T09:37:56.583340Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:56.583378Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 2025-03-28T09:37:56.604918Z node 5 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'Root/PQ/rt3.dc1--account--topic' partition 0 error: cannot finish read request. Consumer client-1 is gone from partition 2025-03-28T09:37:56.605073Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:56.605165Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-28T09:37:56.605210Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-28T09:37:56.605347Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:56.605392Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:56.605427Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:56.605463Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:56.605490Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-28T09:37:56.605510Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-28T09:37:56.605537Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:56.605572Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request 2025-03-28T09:37:56.605988Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:56.606035Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 send read request for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request 2025-03-28T09:37:56.606410Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset 5 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 5 2025-03-28T09:37:56.606574Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 1 added 1 blobs, size 0 count 45 last offset 6, current partition end offset: 50 2025-03-28T09:37:56.606615Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 1. Send blob request. Got KV request Got KV request Wait batch completion Send disk status response with cookie: 0 Wait immediate tx complete 4 2025-03-28T09:37:56.637648Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSetClientOffset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:56.702979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:56.703042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.703068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:56.703093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:56.703120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:56.703140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:56.703192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.703253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:56.703536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:56.761971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:56.762010Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:56.771533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:56.771733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:56.771870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:56.775727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:56.775897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:56.776301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.776456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.777742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.778625Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.778668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.778810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:56.778841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.778878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:56.778938Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.783220Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:56.877792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.877987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.878187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:56.878398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:56.878457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.880336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.880454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:56.880587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.880647Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:56.880706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:56.880744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:56.882346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.882392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:56.882423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:56.883769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.883805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.883843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.883895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.887118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:56.888681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:56.888854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:56.889825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.889932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.889994Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.890248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:56.890298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.890450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:56.890516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.892053Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.892098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.892243Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.892276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:56.892651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.892697Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:56.892776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.892804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.892836Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.892865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.892893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:56.892928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.892959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:56.892984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:56.893042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:56.893092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:56.893120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:56.894802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.894888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.894916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:37:56.894947Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:37:56.894989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:56.895071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:37:56.897508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:37:56.897970Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:37:56.898981Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:37:56.915636Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:37:56.918097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.918410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.918499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2025-03-28T09:37:56.918892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2025-03-28T09:37:56.919667Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:37:56.922485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.922619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-28T09:37:56.922960Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:56.249988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:56.250081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.250118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:56.250158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:56.251093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:56.251142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:56.251230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.251322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:56.252382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:56.319527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:56.319565Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:56.329010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:56.329217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:56.329315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:56.333541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:56.333686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:56.335771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.336482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.340904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345113Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:56.345855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.345907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:56.346007Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.350892Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:56.447563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.447729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.447884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:56.448056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:56.448103Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.449641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.449746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:56.449869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.449918Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:56.449963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:56.449986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:56.451443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.451495Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:56.451531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:56.453161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.453207Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.453244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.453284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.460097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:56.461621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:56.461741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:56.462370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.462462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.462509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.462815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:56.462876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.463052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:56.463135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.464436Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.464474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.464577Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.464603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:56.464896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.464933Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:56.464991Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.465012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.465035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.465053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.465079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:56.465105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.465127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:56.465145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:56.465180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:56.465205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:56.465226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:56.466364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.466430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.466458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 200 2025-03-28T09:37:56.874329Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 128 -> 129 2025-03-28T09:37:56.874452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:37:56.874531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:37:56.880241Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.880303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:37:56.880494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:37:56.880707Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.880743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 2025-03-28T09:37:56.880785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 4 2025-03-28T09:37:56.882084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.882140Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:37:56.882766Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:37:56.882856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:37:56.882889Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:37:56.882929Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2025-03-28T09:37:56.882969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2025-03-28T09:37:56.883812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:37:56.883887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:37:56.883912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:37:56.883938Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 3 2025-03-28T09:37:56.883965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:37:56.884022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-28T09:37:56.884570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1068 } } 2025-03-28T09:37:56.884607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-03-28T09:37:56.884743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1068 } } 2025-03-28T09:37:56.884847Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 200 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1068 } } 2025-03-28T09:37:56.885322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 540 RawX2: 4294969780 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:37:56.885360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2025-03-28T09:37:56.885465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 540 RawX2: 4294969780 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:37:56.885506Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:37:56.885593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 540 RawX2: 4294969780 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2025-03-28T09:37:56.885667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, datashard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.885702Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.885749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:37:56.885790Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 129 -> 240 2025-03-28T09:37:56.888225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:37:56.889557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:37:56.889666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.889779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.889887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.889929Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:37:56.890023Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:37:56.890054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:37:56.890089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:37:56.890119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:37:56.890182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:37:56.890244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:403:2369] message: TxId: 103 2025-03-28T09:37:56.890286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:37:56.890320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:37:56.890350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:37:56.890445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:37:56.892108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:37:56.892149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:568:2504] TestWaitNotification: OK eventTxId 103 W0000 00:00:1743154676.892691 639543 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2025-03-28T09:37:56.895447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.895816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.895937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2025-03-28T09:37:56.896298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2025-03-28T09:37:56.898113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.898314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: 2025-03-28T09:37:47.084507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:47.085043Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:47.085118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00174a/r3tmp/tmp8IL3p1/pdisk_1.dat 2025-03-28T09:37:47.538221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.590200Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:47.634021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:47.634934Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:47.647960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:47.746710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:37:48.117124Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:789:2660], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.117354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:799:2665], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.117456Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:48.127123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:37:48.287148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:803:2668], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:37:48.371281Z node 1 :TX_PROXY ERROR: Actor# [1:881:2715] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:55.840355Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1trjh61nqpmgpsmqv8ah9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGMwOTMwYmMtZDYzMzRlMzUtNGM0M2Y0OTMtM2I4ZDhiOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:55.915247Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1trjh61nqpmgpsmqv8ah9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGMwOTMwYmMtZDYzMzRlMzUtNGM0M2Y0OTMtM2I4ZDhiOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:55.957303Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1trjh61nqpmgpsmqv8ah9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZGMwOTMwYmMtZDYzMzRlMzUtNGM0M2Y0OTMtM2I4ZDhiOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:37:56.268974Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1v0aj8ejxkga7v7ppqa7c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjFmY2VhMjgtMjA3YmE5NDUtYzY0NWUzMDAtNmY0NWM3Mjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Huge_ProposeTransacton [GOOD] Test command err: 2025-03-28T09:37:43.959530Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:43.970023Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:43.970405Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] doesn't have tx info 2025-03-28T09:37:43.970465Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:37:43.970523Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] no config, start with empty partitions and default config 2025-03-28T09:37:43.970586Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:37:43.970637Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:43.970718Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:44.005599Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:204:2210], now have 1 active actors on pipe 2025-03-28T09:37:44.005745Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:37:44.023354Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-28T09:37:44.029373Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-28T09:37:44.029507Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.030548Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "consumer" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "consumer" Generation: 1 Important: true } 2025-03-28T09:37:44.030679Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:44.031092Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:44.031494Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:212:2216] 2025-03-28T09:37:44.032475Z node 1 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:37:44.032564Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:212:2216] 2025-03-28T09:37:44.032624Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:44.033502Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:44.033621Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2025-03-28T09:37:44.033679Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2025-03-28T09:37:44.033737Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2025-03-28T09:37:44.033772Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2025-03-28T09:37:44.033923Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:44.033968Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:44.034025Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:44.034066Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:44.034114Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:37:44.034141Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:37:44.034164Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cconsumer 2025-03-28T09:37:44.034188Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uconsumer 2025-03-28T09:37:44.034219Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:44.034262Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:44.034359Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:44.034428Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:44.034586Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:44.040358Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:44.040756Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:219:2221], now have 1 active actors on pipe 2025-03-28T09:37:44.041443Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:222:2223], now have 1 active actors on pipe 2025-03-28T09:37:44.042316Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67890 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-03-28T09:37:44.042420Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-28T09:37:44.042617Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2025-03-28T09:37:44.042664Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-28T09:37:44.042832Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2025-03-28T09:37:44.042886Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-28T09:37:44.042928Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARING 2025-03-28T09:37:44.043102Z node 1 :PERSQUEUE DEBUG: [TxId: 67890] save tx TxId: 67890 State: PREPARED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2025-03-28T09:37:44.043218Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:44.051645Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:44.051713Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-03-28T09:37:44.051754Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State PREPARING 2025-03-28T09:37:44.051793Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState PREPARED 2025-03-28T09:37:44.052157Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 175 RawX2: 4294969486 } TxId: 67891 Data { Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2025-03-28T09:37:44.052209Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] distributed transaction 2025-03-28T09:37:44.052295Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Propose TxId 67891, WriteId (empty maybe) 2025-03-28T09:37:44.052334Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2025-03-28T09:37:44.052370Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State UNKNOWN 2025-03-28T09:37:44.052410Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2025-03-28T09:37:44.052453Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, NewState PREPARING 2025-03-28T09:37:44.052626Z node 1 :PERSQUEUE DEBUG: [TxId: 67891] save tx TxId: 67891 State: PREPARED MinStep: 135 MaxStep: 30135 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 Begin: 0 End: 0 Consumer: "consumer" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 175 RawX2: 4294969486 } Partitions { } 2025-03-28T09:37:44.052731Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:44.059853Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:44.059923Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state PREPARING 2025-03-28T09:37:44.059965Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67891, State PREP ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2496" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2497" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2498" Generation: 2 Important: false } Consumers { Name: "fake-consumer-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-2499" Generation: 2 Important: false } } BootstrapConfig { } SourceActor { RawX1: 177 RawX2: 25769805968 } Partitions { Partition { PartitionId: 0 } Partition { PartitionId: 1 } } 2025-03-28T09:37:55.818164Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:37:55.832938Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:37:55.832990Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Try execute txs with state CALCULATED 2025-03-28T09:37:55.833019Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, State CALCULATED 2025-03-28T09:37:55.833055Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 State CALCULATED FrontTxId 67890 2025-03-28T09:37:55.833085Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890, NewState WAIT_RS 2025-03-28T09:37:55.833118Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2025-03-28T09:37:55.833187Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2025-03-28T09:37:55.833228Z node 6 :PERSQUEUE DEBUG: [PQ: 72057594037927937] HaveParticipantsDecision 0 >> TPartitionTests::DifferentWriteTxBatchingOptions >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test |96.1%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:56.819345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:56.819447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.819485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:56.819524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:56.819585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:56.819614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:56.819685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.819761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:56.820096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:56.893054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:56.893122Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:56.907703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:56.908019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:56.908198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:56.913896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:56.914118Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:56.914704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.914946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.916829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.918201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.918280Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.918449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:56.918487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.918534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:56.918617Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.925373Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:57.045141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:57.045400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.045644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:57.045864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:57.045967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.048609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.048727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:57.048874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.048926Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:57.049002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:57.049045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:57.051306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.051368Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:57.051422Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:57.053050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.053102Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.053148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:57.053227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.062911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:57.065162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:57.065400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:57.066255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.066371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:57.066420Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:57.066752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:57.066799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:57.066956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:57.067014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:57.068984Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:57.069054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:57.069224Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:57.069256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:57.069590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.069633Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:57.069728Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:57.069754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.069784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:57.069812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.069839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:57.069875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.069902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:57.069925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:57.069978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:57.070014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:57.070040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:57.071533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:57.071626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:57.071657Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ublishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:37:57.381248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:37:57.388545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:37:57.388645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:37:57.390812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1471 } } 2025-03-28T09:37:57.390862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:37:57.390996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1471 } } 2025-03-28T09:37:57.391092Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1471 } } 2025-03-28T09:37:57.392057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 408 RawX2: 4294969673 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:37:57.392105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:37:57.392222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 408 RawX2: 4294969673 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:37:57.392265Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:37:57.392354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 408 RawX2: 4294969673 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:37:57.392424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.392477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-03-28T09:37:57.394294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.394823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.407462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:37:57.407523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:37:57.407646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:37:57.407688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:37:57.407754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:37:57.407803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.407848Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.407907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:37:57.407946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:37:57.407972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:37:57.409739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.410175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.410250Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2025-03-28T09:37:57.410313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:37:57.410350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2025-03-28T09:37:57.410442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-03-28T09:37:57.410490Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 240 -> 240 2025-03-28T09:37:57.412214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.412276Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:37:57.412386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:37:57.412424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:37:57.412462Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:37:57.412492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:37:57.412529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:37:57.412620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:37:57.412698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:37:57.412743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:37:57.412775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:37:57.412914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:37:57.412950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:37:57.414514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:37:57.414563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:435:2396] TestWaitNotification: OK eventTxId 102 2025-03-28T09:37:57.415079Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:37:57.415332Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 264us result status StatusSuccess 2025-03-28T09:37:57.415796Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead >> TSchemeShardTTLTestsWithReboots::CopyTable >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:57.396714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:57.396781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:57.396812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:57.396867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:57.396901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:57.396923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:57.396976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:57.397034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:57.397301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:57.459262Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:57.459310Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:57.469419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:57.469638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:57.469780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:57.474258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:57.474450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:57.474947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.475110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:57.476646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:57.477648Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:57.477694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:57.477830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:57.477867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:57.477896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:57.477975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.482772Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:57.613039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:57.613292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.613517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:57.613734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:57.613831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.616270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.616403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:57.616591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.616656Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:57.616725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:57.616762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:57.618870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.618922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:57.618959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:57.620630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.620673Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.620714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:57.620761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.624522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:57.626339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:57.626524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:57.627528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.627657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:57.627714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:57.628040Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:57.628097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:57.628246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:57.628313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:57.630450Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:57.630507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:57.630709Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:57.630766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:57.631192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.631238Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:57.631332Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:57.631365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.631428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:57.631465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.631507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:57.631554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.631589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:57.631621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:57.631687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:57.631740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:57.631775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:57.633641Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:57.633744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:57.633781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ate->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 102 at step: 5000003 2025-03-28T09:37:57.938186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.938269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:57.938316Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:37:57.938560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:37:57.938678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:37:57.941167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:57.941208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:37:57.941459Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:57.941497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:37:57.941830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.941882Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:37:57.942235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:37:57.942322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:37:57.942351Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:37:57.942379Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-28T09:37:57.942427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:37:57.942488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:37:57.944513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:37:57.967434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 933 } } 2025-03-28T09:37:57.967511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:37:57.967661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 933 } } 2025-03-28T09:37:57.967779Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 933 } } FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:37:57.968717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:37:57.968767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2025-03-28T09:37:57.968898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:37:57.968948Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:37:57.969051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:37:57.969119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.969159Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.969210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:37:57.969255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:37:57.971675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.972093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.972366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.972404Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:37:57.972497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:37:57.972547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:37:57.972589Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:37:57.972621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:37:57.972663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:37:57.972726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 102 2025-03-28T09:37:57.972769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:37:57.972803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:37:57.972827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:37:57.972938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:37:57.974402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:37:57.974442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:458:2419] TestWaitNotification: OK eventTxId 102 2025-03-28T09:37:57.974875Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:37:57.975105Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 233us result status StatusSuccess 2025-03-28T09:37:57.975476Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] >> TSchemeShardTTLUtility::ValidateTiers [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> TPartitionTests::NonConflictingCommitsBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:57.755919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:57.756012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:57.756050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:57.756085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:57.756142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:57.756166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:57.756242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:57.756322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:57.756661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:57.844830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:57.844879Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:57.859871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:57.860152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:57.860314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:57.866022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:57.866240Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:57.866851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.867044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:57.868773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:57.869995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:57.870054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:57.870210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:57.870254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:57.870334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:57.870415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.876558Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:57.981744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:57.981905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.982078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:57.982222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:57.982289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.984006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.984115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:57.984240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.984282Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:57.984347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:57.984368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:57.986031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.986093Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:57.986125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:57.987842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.987884Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.987924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:57.987971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.997000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:57.999040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:57.999235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:58.000226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:58.000355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:58.000414Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:58.000694Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:58.000747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:58.000949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:58.001040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:58.003009Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:58.003057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:58.003219Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:58.003257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:58.003702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:58.003751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:58.003837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:58.003871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:58.003904Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:58.003930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:58.003963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:58.004012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:58.004043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:58.004070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:58.004125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:58.004163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:58.004193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:58.006029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:58.006130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:58.006166Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :37:58.505330Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:37:58.505386Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-03-28T09:37:58.505573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-28T09:37:58.505623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-03-28T09:37:58.505656Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-28T09:37:58.505744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:58.505825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:58.505882Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-03-28T09:37:58.505918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-03-28T09:37:58.507436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:37:58.507487Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-28T09:37:58.507552Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:37:58.507574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:37:58.507601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:37:58.507626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:37:58.507656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-28T09:37:58.507701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:123:2149] message: TxId: 281474976710760 2025-03-28T09:37:58.507745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:37:58.507773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-28T09:37:58.507795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-28T09:37:58.507851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-28T09:37:58.509155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-28T09:37:58.509198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-28T09:37:58.509251Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-03-28T09:37:58.509335Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:37:58.510621Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:37:58.510703Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:37:58.510740Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:37:58.512034Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:37:58.512122Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:37:58.512148Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-28T09:37:58.512219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:37:58.512254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:474:2435] TestWaitNotification: OK eventTxId 102 2025-03-28T09:37:58.512651Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:37:58.512868Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 216us result status StatusSuccess 2025-03-28T09:37:58.513268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] >> TPartitionTests::FailedTxsDontBlock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:58.337806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:58.337871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:58.337896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:58.337919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:58.337948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:58.337966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:58.338011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:58.338064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:58.338315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:58.396343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:58.396380Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:58.406431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:58.406638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:58.406817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:58.410758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:58.410933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:58.411329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:58.411480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:58.412876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:58.413759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:58.413797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:58.413914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:58.413956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:58.413991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:58.414088Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:58.418937Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:58.497583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:58.497744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:58.497890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:58.498070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:58.498108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:58.499659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:58.499769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:58.499905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:58.499949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:58.499996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:58.500021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:58.501435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:58.501469Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:58.501491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:58.502536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:58.502563Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:58.502589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:58.502626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:58.504976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:58.506188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:58.506300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:58.506947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:58.507028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:58.507066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:58.507260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:58.507319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:58.507502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:58.507637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:58.508857Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:58.508887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:58.508989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:58.509012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:58.509286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:58.509313Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:58.509371Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:58.509402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:58.509429Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:58.509448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:58.509472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:58.509517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:58.509538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:58.509556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:58.509589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:58.509613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:58.509635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:58.510805Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:58.510873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:58.510895Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nc, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:37:59.002152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2025-03-28T09:37:59.002219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:37:59.002328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2025-03-28T09:37:59.002351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2025-03-28T09:37:59.002375Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000006 2025-03-28T09:37:59.002558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000006, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.002659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000006 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:59.002738Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000006 2025-03-28T09:37:59.002771Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710760:0 128 -> 240 2025-03-28T09:37:59.004014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.004043Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2025-03-28T09:37:59.004109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:37:59.004136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:37:59.004156Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976710760:0 progress is 1/1 2025-03-28T09:37:59.004171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:37:59.004202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2025-03-28T09:37:59.004263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:123:2149] message: TxId: 281474976710760 2025-03-28T09:37:59.004293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2025-03-28T09:37:59.004321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976710760:0 2025-03-28T09:37:59.004338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976710760:0 2025-03-28T09:37:59.004375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2025-03-28T09:37:59.005453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2025-03-28T09:37:59.005488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Message: TxId: 281474976710760 2025-03-28T09:37:59.005544Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfoId: 102 2025-03-28T09:37:59.005605Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, txId# 281474976710760, buildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:37:59.006916Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:37:59.006969Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:37:59.007006Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2025-03-28T09:37:59.007998Z node 1 :BUILD_INDEX INFO: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: id# 102 2025-03-28T09:37:59.008069Z node 1 :BUILD_INDEX DEBUG: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Resume: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 2], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:383:2355], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000004, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }, Billed: { upload rows: 0, upload bytes: 0, read rows: 0, read bytes: 0 }} 2025-03-28T09:37:59.008095Z node 1 :BUILD_INDEX TRACE: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2025-03-28T09:37:59.008166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:37:59.008191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:474:2435] TestWaitNotification: OK eventTxId 102 2025-03-28T09:37:59.008529Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:37:59.008739Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 217us result status StatusSuccess 2025-03-28T09:37:59.009107Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionTests::GetUsedStorage >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::NonConflictingCommitsBatch [GOOD] Test command err: 2025-03-28T09:37:51.249478Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.249555Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:51.265448Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:177:2192] 2025-03-28T09:37:51.267143Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:51.000000Z 2025-03-28T09:37:51.267200Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:177:2192] Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\230\333\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\230\333\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\230\333\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-28T09:37:51.653567Z node 1 :PERSQUEUE WARN: [PQ: 72057594037927937, Partition: 0, State: StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\020\n\030\000(\230\333\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } 2025-03-28T09:37:51.990865Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.990925Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:52.004463Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [2:179:2194] 2025-03-28T09:37:52.005885Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:52.000000Z 2025-03-28T09:37:52.005931Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:179:2194] 2025-03-28T09:37:52.635317Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.635397Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:52.651518Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:179:2194] 2025-03-28T09:37:52.652499Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:179:2194] 2025-03-28T09:37:52.653211Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2025-03-28T09:37:52.653301Z node 3 :PERSQUEUE INFO: new Cookie owner1|4a047a6-24a6dc2c-4deb9a25-213e586b_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send disk status response with cookie: 0 2025-03-28T09:37:52.963859Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2025-03-28T09:37:53.395379Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.395461Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:53.411973Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] bootstrapping {0, {0, 1111}, 123} [4:179:2194] 2025-03-28T09:37:53.416244Z node 4 :PERSQUEUE INFO: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:37:53.416324Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {0, {0, 1111}, 123}, State: StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [4:179:2194] 2025-03-28T09:37:53.726462Z node 4 :PERSQUEUE INFO: new Cookie owner1|e3b981b9-de41952a-601c7a0e-77b0b621_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 Send write: 1 Send write: 2 Send write: 3 Send write: 4 Send write: 5 Send write: 6 Send write: 7 Send write: 8 Send write: 9 Got write info response. Body keys: 0, head: 10, src id info: 1 2025-03-28T09:37:56.784883Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:56.784932Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:56.796358Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:37:56.796581Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:56.796791Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:178:2193] 2025-03-28T09:37:56.797401Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:37:56.797508Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-28T09:37:56.797611Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:37:56.798443Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:37:56.798832Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-28T09:37:56.798898Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:37:56.798925Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:37:56.798958Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:56.000000Z 2025-03-28T09:37:56.798984Z node 5 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:37:56.799019Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [5:178:2193] 2025-03-28T09:37:56.799070Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-28T09:37:56.799111Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:56.799161Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:56.799193Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:37:56.799224Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:37:56.799246Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:37:56.799484Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-2 offset 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 0 2025-03-28T09:37:56.799605Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 1 blobs, size 684 count 50 last offset 1, current partition end offset: 50 2025-03-28T09:37:56.799636Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. Send blob request. 2025-03-28T09:37:57.109425Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 session is set to 0 (startOffset 0) session session-client-0 2025-03-28T09:37:57.109597Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:57.109645Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:57.109706Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:57.109751Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:57.109782Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-28T09:37:57.109808Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-28T09:37:57.109843Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:57.109882Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Got cmd write: CmdWrite { Key: "i0000000000" Value: "\010\000\0202\030\000(\240\202\366\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000000cclient-0" Value: "\010\000\020\001\030\001\"\020session-client-0(\0000\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient-0" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-client-0" StorageChannel: INLINE } 2025-03-28T09:37:57.140812Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Create distr tx with id = 0 and act no: 1 Created Tx with id 3 as act# 3 Created Tx with id 4 as act# 4 2025-03-28T09:37:58.076498Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-28T09:37:58.076603Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 3 2025-03-28T09:37:58.076627Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 4 2025-03-28T09:37:59.354966Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 6 Wait batch completion Wait kv request 2025-03-28T09:37:59.355331Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 5 (startOffset 0) session session-client-0 2025-03-28T09:37:59.355386Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2025-03-28T09:37:59.355444Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 3 2025-03-28T09:37:59.355496Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2025-03-28T09:37:59.355525Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 4 2025-03-28T09:37:59.355573Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 10 (startOffset 0) session session-client-0 2025-03-28T09:37:59.369749Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2025-03-28T09:37:59.370069Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:59.370111Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:59.370150Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:59.370198Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:59.370224Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:37:59.370241Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-2 2025-03-28T09:37:59.370257Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-2 2025-03-28T09:37:59.370272Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-1 2025-03-28T09:37:59.370296Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-1 2025-03-28T09:37:59.370312Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cclient-0 2025-03-28T09:37:59.370325Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uclient-0 2025-03-28T09:37:59.370350Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:59.370380Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Wait tx committed for tx 3 2025-03-28T09:37:59.391138Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:59.071221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:59.071296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:59.071321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:59.071345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:59.071374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:59.071410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:59.071463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:59.071527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:59.071813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:59.130833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:59.130868Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:59.140975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:59.141236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:59.141416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:59.145590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:59.145779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:59.146263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.146416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:59.147827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.148737Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.148775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.148894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:59.148926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.148964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:59.149047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.154288Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:59.240382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:59.240557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.240721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:59.240909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:59.240948Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.242533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.242641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:59.242783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.242832Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:59.242879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:59.242912Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:59.244242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.244277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:59.244301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:59.245491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.245519Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.245544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.245577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.247952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:59.249173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:59.249316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:59.249994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.250075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:59.250118Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.250320Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:59.250354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.250466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:59.250517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:59.251758Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.251793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.251915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.251941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:59.252259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.252294Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:59.252389Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:59.252412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.252435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:59.252455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.252478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:59.252509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.252532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:59.252550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:59.252589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:59.252614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:59.252634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:59.253935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:59.254020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:59.254053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... T09:37:59.406207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.406241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.406370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:37:59.406504Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.406548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:37:59.406585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:37:59.406876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.406913Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:37:59.407924Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:37:59.408007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:37:59.408031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:37:59.408067Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:37:59.408105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:59.408550Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:37:59.408605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:37:59.408625Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:37:59.408642Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:37:59.408657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:37:59.408705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:37:59.409703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1024 } } 2025-03-28T09:37:59.409740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:37:59.409826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1024 } } 2025-03-28T09:37:59.409898Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1024 } } 2025-03-28T09:37:59.410797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:37:59.410831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:37:59.410924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:37:59.410954Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:37:59.411078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:37:59.411136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.411161Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.411188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:37:59.411211Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:37:59.411947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:37:59.413172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:37:59.413236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.413295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.413674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.413703Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:37:59.413779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:37:59.413805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:37:59.413829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:37:59.413849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:37:59.413872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-28T09:37:59.413911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 101 2025-03-28T09:37:59.413943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:37:59.413970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:37:59.414002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:37:59.414089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:37:59.415229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:37:59.415258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2315] TestWaitNotification: OK eventTxId 101 2025-03-28T09:37:59.415589Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:37:59.415812Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 206us result status StatusSuccess 2025-03-28T09:37:59.416097Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase >> TPartitionTests::GetUsedStorage [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:59.460770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:59.460838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:59.460865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:59.460891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:59.460924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:59.460945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:59.460994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:59.461056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:59.461302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:59.521239Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:59.521301Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:59.532029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:59.532249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:59.532395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:59.536917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:59.537107Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:59.537640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.537815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:59.539316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.540275Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.540321Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.540458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:59.540496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.540526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:59.540602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.545749Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:59.649825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:59.650055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.650293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:59.650561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:59.650616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.652904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.653038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:59.653207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.653284Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:59.653354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:59.653393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:59.655269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.655319Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:59.655355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:59.656932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.656981Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.657021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.657067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.660809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:59.662575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:59.662769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:59.663777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.663897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:59.663959Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.664260Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:59.664320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.664468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:59.664586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:59.666331Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.666371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.666492Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.666521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:59.666851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.666887Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:59.666979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:59.667006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.667039Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:59.667072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.667101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:59.667133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.667160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:59.667182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:59.667243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:59.667275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:59.667305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:59.668571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:59.668647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:59.668675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 78944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000002 2025-03-28T09:37:59.846672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.846770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:59.846810Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateTable TPropose operationId# 101:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000002 2025-03-28T09:37:59.846913Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 129 2025-03-28T09:37:59.847012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:59.847066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 2025-03-28T09:37:59.851523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.851568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.851702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:37:59.851840Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.851885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:37:59.851936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:37:59.852257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.852307Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:37:59.853445Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:37:59.853532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:37:59.853564Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:37:59.853597Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:37:59.853661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:59.854142Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:37:59.854190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:37:59.854217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:37:59.854237Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:37:59.854257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:37:59.854309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:37:59.855447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1254 } } 2025-03-28T09:37:59.855500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:37:59.855643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1254 } } 2025-03-28T09:37:59.855750Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1254 } } 2025-03-28T09:37:59.856958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:37:59.857011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:37:59.857117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:37:59.857156Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:37:59.857219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:37:59.857293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.857319Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.857346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:37:59.857379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:37:59.857991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:37:59.859275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:37:59.859365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.859451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.859691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.859722Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:37:59.859801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:37:59.859838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:37:59.859878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:37:59.859903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:37:59.859930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-28T09:37:59.859987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 101 2025-03-28T09:37:59.860038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:37:59.860075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:37:59.860111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:37:59.860224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:37:59.861505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:37:59.861555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:336:2315] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-28T09:37:59.864045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:59.864282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.864645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2025-03-28T09:37:59.866447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:59.866580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> TColumnShardTestSchema::OneColdTier [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:59.525173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:59.525255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:59.525289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:59.525322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:59.525360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:59.525387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:59.525456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:59.525529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:59.525842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:59.605482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:59.605519Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:59.615183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:59.615382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:59.615516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:59.619359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:59.619537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:59.619973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.620132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:59.621641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.622874Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.622929Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.623092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:59.623138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.623175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:59.623280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.629409Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:59.730719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:59.730941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.731143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:59.731355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:59.731421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.733766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.733890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:59.734057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.734120Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:59.734205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:59.734241Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:59.736225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.736274Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:59.736305Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:59.737885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.737922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.737961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.738014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.741529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:59.743191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:59.743346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:59.744218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.744328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:59.744381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.744662Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:59.744706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.744843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:59.744930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:59.746567Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.746609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.746775Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.746810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:59.747208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.747249Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:59.747344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:59.747379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.747428Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:59.747457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.747490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:59.747533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.747564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:59.747590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:59.747645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:59.747684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:59.747715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:59.749505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:59.749598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:59.749632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2025-03-28T09:38:00.054739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 104 at step: 5000004 2025-03-28T09:38:00.055645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.055749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:00.055803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 104:0 HandleReply TEvOperationPlan, operationId: 104:0, stepId: 5000004, at schemeshard: 72057594046678944 2025-03-28T09:38:00.056011Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 129 2025-03-28T09:38:00.056127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:38:00.058515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:00.058563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:38:00.058779Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.058822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 104, path id: 2 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2025-03-28T09:38:00.059233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.059287Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:38:00.059741Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:38:00.059846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:38:00.059887Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:38:00.059928Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:38:00.059971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:38:00.060055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2025-03-28T09:38:00.062864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2025-03-28T09:38:00.075121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1023 } } 2025-03-28T09:38:00.075198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-03-28T09:38:00.075345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1023 } } 2025-03-28T09:38:00.075474Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000004 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1023 } } FAKE_COORDINATOR: Erasing txId 104 2025-03-28T09:38:00.076384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:38:00.076438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2025-03-28T09:38:00.076566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:38:00.076615Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:38:00.076697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2025-03-28T09:38:00.076766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.076807Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.076851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:38:00.076917Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 129 -> 240 2025-03-28T09:38:00.079407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.079719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.079983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.080026Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-28T09:38:00.080128Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:38:00.080185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:38:00.080229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:38:00.080264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:38:00.080302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2025-03-28T09:38:00.080363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 104 2025-03-28T09:38:00.080413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:38:00.080452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:38:00.080483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:38:00.080604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:38:00.082134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:38:00.082181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:440:2412] TestWaitNotification: OK eventTxId 104 2025-03-28T09:38:00.082724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:00.082943Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 234us result status StatusSuccess 2025-03-28T09:38:00.083364Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2025-03-28T09:37:52.881563Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.881659Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:52.894872Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [1:177:2192] 2025-03-28T09:37:52.896738Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:52.000000Z 2025-03-28T09:37:52.896801Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:177:2192] Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\200\343\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\200\343\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000003" Value: "\010\000\020\n\030\000(\200\343\365\340\3352" StorageChannel: INLINE } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } 2025-03-28T09:37:53.669027Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.669103Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:53.685108Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:53.685335Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:53.685571Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [2:178:2193] 2025-03-28T09:37:53.686399Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2025-03-28T09:37:53.686451Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [2:178:2193] 2025-03-28T09:37:53.686509Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:53.686559Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:53.686729Z node 2 :PERSQUEUE INFO: new Cookie owner1|1b999d31-7510a87d-7d3e962e-b784a82b_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2025-03-28T09:37:53.686839Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} 2025-03-28T09:37:53.687167Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2025-03-28T09:37:53.688071Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2025-03-28T09:37:53.688688Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000| size 104 WTime 128 2025-03-28T09:37:53.688849Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:53.688894Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:53.688938Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-03-28T09:37:53.688977Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-03-28T09:37:53.689013Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-03-28T09:37:53.689053Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000100_00000_0000000001_00000| 2025-03-28T09:37:53.689095Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-03-28T09:37:53.689130Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:53.689170Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] =========================== 2025-03-28T09:37:53.730489Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:53.730621Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-03-28T09:37:53.730730Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk 2025-03-28T09:37:53.989820Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 4 partNo 0 2025-03-28T09:37:53.990533Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 4 partNo 0 FormedBlobsCount 0 NewHead: Offset 101 PartNo 0 PackedSize 118 count 1 nextOffset 102 batches 1 2025-03-28T09:37:53.990971Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 101,1 HeadOffset 100 endOffset 101 curOffset 102 D0000100001_00000000000000000101_00000_0000000001_00000| size 104 WTime 1129 2025-03-28T09:37:53.991075Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:53.991103Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:53.991126Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-03-28T09:37:53.991153Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-03-28T09:37:53.991186Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-03-28T09:37:53.991208Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000101_00000_0000000001_00000| 2025-03-28T09:37:53.991226Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-03-28T09:37:53.991257Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:53.991295Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] =========================== 2025-03-28T09:37:54.022108Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:54.022215Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2025-03-28T09:37:54.022273Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 4, partNo: 0, Offset: 101 is stored on disk 2025-03-28T09:37:54.228763Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 6 partNo 0 2025-03-28T09:37:54.231361Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 6 partNo 0 FormedBlobsCount 0 NewHead: Offset 102 PartNo 0 PackedSize 118 count 1 nextOffset 103 batches 1 2025-03-28T09:37:54.231758Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 102,1 HeadOffset 100 endOffset 102 curOffset 103 D0000100001_00000000000000000102_00000_0000000001_00000| size 104 WTime 2130 2025-03-28T09:37:54.231851Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:54.231877Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:54.231900Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] [X0000100001, X0000100002) 2025-03-28T09:37:54.231924Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- write ----------------- 2025-03-28T09:37:54.231948Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] M0000100001pSourceId 2025-03-28T09:37:54.231975Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] D0000100001_00000000000000000102_00000_0000000001_00000| 2025-03-28T09:37:54.231996Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] J0000100001 2025-03-28T09:37:54.232016Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateIdle] --- rename ----- ... rc2' seqNo 10 partNo 0 2025-03-28T09:37:58.588233Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src2' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 70 PartNo 0 PackedSize 552 count 10 nextOffset 80 batches 1 2025-03-28T09:37:58.588880Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 70,10 HeadOffset 20 endOffset 25 curOffset 80 d0000000000_00000000000000000070_00000_0000000010_00000| size 299 WTime 11240 2025-03-28T09:37:58.589049Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:58.589094Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:58.589141Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:37:58.589199Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:58.589239Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000020_00000_0000000005_00000 2025-03-28T09:37:58.589276Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-03-28T09:37:58.589303Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000070_00000_0000000010_00000| 2025-03-28T09:37:58.589327Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:58.589362Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:58.589404Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 10 Got KV request Got KV request Send disk status response with cookie: 0 2025-03-28T09:37:58.610345Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 170 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:58.610457Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:58.610552Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 70 is stored on disk 2025-03-28T09:37:58.610610Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:58.610656Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 71 is stored on disk 2025-03-28T09:37:58.610712Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:58.610752Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 72 is stored on disk 2025-03-28T09:37:58.610785Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:58.610844Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 73 is stored on disk 2025-03-28T09:37:58.610880Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:58.610919Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 74 is stored on disk 2025-03-28T09:37:58.610951Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:58.610992Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 75 is stored on disk 2025-03-28T09:37:58.611030Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:58.611068Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 76 is stored on disk 2025-03-28T09:37:58.611103Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:58.611147Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 77 is stored on disk 2025-03-28T09:37:58.611179Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:58.611219Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 78 is stored on disk 2025-03-28T09:37:58.611249Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:58.611290Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src2', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 79 is stored on disk 2025-03-28T09:37:58.611606Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:58.611662Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:58.611717Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000020_00000_0000000005_00000|, d0000000000_00000000000000000020_00000_0000000005_00000|] 2025-03-28T09:37:58.611766Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:58.611833Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:58.611884Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:58.611931Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 8 and act no: 9 Create immediate tx with id = 10 and act no: 11 Create distr tx with id = 12 and act no: 13 2025-03-28T09:37:58.612272Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 8 2025-03-28T09:37:58.612422Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 12 2025-03-28T09:37:59.490737Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:37:59.490861Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait batch completion 2025-03-28T09:37:59.491048Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:37:59.491108Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:37:59.491173Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=MinSeqNo violation failure on src2 Got batch complete: 3 2025-03-28T09:37:59.698333Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 12 2025-03-28T09:37:59.698410Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 12 2025-03-28T09:37:59.698475Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:37:59.698523Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 70 PartNo 0 PackedSize 299 count 10 nextOffset 80 batches 1, NewHead=Offset 80 PartNo 0 PackedSize 0 count 0 nextOffset 80 batches 0 2025-03-28T09:37:59.698778Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:59.698827Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:59.698871Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:59.698930Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrcId2 2025-03-28T09:37:59.698983Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:59.699011Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:59.699028Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:37:59.699053Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:59.699086Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 10 2025-03-28T09:37:59.730481Z node 4 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "MinSeqNo violation failure on src2" } Wait tx committed for tx 12 2025-03-28T09:38:00.023023Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:00.023097Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:00.036415Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] bootstrapping {2, {0, 10}, 100001} [5:179:2194] 2025-03-28T09:38:00.038138Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:38:00.038194Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: {2, {0, 10}, 100001}, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [5:179:2194] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:00.275702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:00.275772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:00.275798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:00.275823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:00.275856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:00.275874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:00.275929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:00.276000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:00.276256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:00.334423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:00.334465Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:00.344926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:00.345159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:00.345310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:00.349760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:00.349955Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:00.350419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.350583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:00.352063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.353124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:00.353177Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.353337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:00.353375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:00.353406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:00.353482Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.359119Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:00.456100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:00.456310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.456488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:00.456689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:00.456750Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.458710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.458820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:00.458960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.459006Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:00.459061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:00.459091Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:00.460703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.460753Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:00.460782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:00.462101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.462141Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.462172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:00.462211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.465011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:00.466607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:00.466831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:00.467805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.468105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:00.468166Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:00.468437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:00.468492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:00.468642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:00.468730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:00.470589Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:00.470639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:00.470855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.470903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:00.471348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.471418Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:00.471517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:00.471559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.471599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:00.471630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.471665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:00.471722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.471772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:00.471803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:00.471866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:00.471906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:00.471940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:00.473865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:00.473988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:00.474027Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 09:38:00.740730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:38:00.740776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/3, is published: true 2025-03-28T09:38:00.741127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:00.741159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 2 2025-03-28T09:38:00.741260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:2, at schemeshard: 72057594046678944, message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:00.741313Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:38:00.741372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:2 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 319 RawX2: 4294969599 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:00.741421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:2, shardIdx: 72057594046678944:2, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.741466Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:38:00.741520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:38:00.741553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-03-28T09:38:00.741876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:00.741900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-03-28T09:38:00.741964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:00.741988Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:38:00.742036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:00.742064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.742082Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.742099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:38:00.742115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:38:00.744382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:00.744440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:00.746478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:00.746546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:38:00.746618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.746646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:00.746718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:38:00.746808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.747036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:38:00.747071Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-03-28T09:38:00.747150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-28T09:38:00.747176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-28T09:38:00.747213Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-28T09:38:00.747237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-28T09:38:00.747267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-03-28T09:38:00.747497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.747532Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:38:00.747583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-28T09:38:00.747603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:38:00.747640Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-28T09:38:00.747658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:38:00.747675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-03-28T09:38:00.747724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 101 2025-03-28T09:38:00.747754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:38:00.747802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:38:00.747830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:38:00.747938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:38:00.747968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-03-28T09:38:00.747982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-03-28T09:38:00.748002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:38:00.748014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-03-28T09:38:00.748037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-03-28T09:38:00.748085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:38:00.749614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:38:00.749663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:376:2344] TestWaitNotification: OK eventTxId 101 2025-03-28T09:38:00.750028Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:00.750265Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 178us result status StatusSuccess 2025-03-28T09:38:00.750608Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:00.148201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:00.148285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:00.148321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:00.148356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:00.148396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:00.148423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:00.148490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:00.148566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:00.148873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:00.235571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:00.235627Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:00.250468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:00.250797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:00.250966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:00.256277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:00.256479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:00.257078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.257279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:00.258983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.260248Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:00.260323Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.260478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:00.260523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:00.260555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:00.260632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.266724Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:00.387688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:00.387851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.388030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:00.388238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:00.388284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.390078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.390187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:00.390317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.390374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:00.390429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:00.390453Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:00.392091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.392133Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:00.392158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:00.393501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.393543Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.393580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:00.393617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.396361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:00.397985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:00.398125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:00.398854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.398963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:00.399010Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:00.399226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:00.399280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:00.399464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:00.399544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:00.401145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:00.401185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:00.401310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.401339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:00.401676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.401712Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:00.401779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:00.401803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.401832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:00.401856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.401898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:00.401926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.401949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:00.401969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:00.402024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:00.402053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:00.402077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:00.403379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:00.403486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:00.403515Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-28T09:38:00.614084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.614158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:00.614189Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:38:00.614358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:38:00.614475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:38:00.618455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:00.618505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:38:00.618713Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.618757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:38:00.619093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.619139Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:38:00.619601Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:00.619686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:00.619710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:38:00.619736Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-28T09:38:00.619763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:38:00.619815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:38:00.620195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 841 } } 2025-03-28T09:38:00.620231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:38:00.620304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 841 } } 2025-03-28T09:38:00.620372Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 841 } } 2025-03-28T09:38:00.620745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:38:00.620776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:38:00.620877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:38:00.620907Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:38:00.620969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:38:00.621023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.621046Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.621069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:38:00.621102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:38:00.623650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:38:00.623732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.623924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.623993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.624041Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:38:00.624123Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:38:00.624155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:38:00.624183Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:38:00.624206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:38:00.624230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:38:00.624287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:38:00.624324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:38:00.624355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:38:00.624375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:38:00.624462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:38:00.625773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:38:00.625806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:392:2364] TestWaitNotification: OK eventTxId 102 2025-03-28T09:38:00.626193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:00.626370Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 175us result status StatusSuccess 2025-03-28T09:38:00.626772Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155230.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143155230.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155230.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123155230.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154030.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123154030.000000s;Name=;Codec=}; 2025-03-28T09:37:11.821974Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:37:11.886885Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:37:11.901545Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:37:11.901788Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:37:11.908231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:37:11.908394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:37:11.908609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:37:11.908692Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:37:11.908784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:37:11.908855Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:37:11.908922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:37:11.909003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:37:11.909084Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:37:11.909172Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:37:11.909250Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:37:11.909331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:37:11.927753Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:37:11.928036Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:37:11.928084Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:37:11.928236Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:11.928392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:37:11.928460Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:37:11.928498Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:37:11.928565Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:37:11.928611Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:37:11.928639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:37:11.928664Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:37:11.928778Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:11.928822Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:37:11.928848Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:37:11.928867Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:37:11.928928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:37:11.928962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:37:11.928990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:37:11.929020Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:37:11.929066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:37:11.929093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:37:11.929117Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:37:11.929152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:37:11.929178Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:37:11.929199Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:37:11.929519Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=40; 2025-03-28T09:37:11.929585Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=30; 2025-03-28T09:37:11.929641Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=25; 2025-03-28T09:37:11.929706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=30; 2025-03-28T09:37:11.929866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:37:11.929911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:37:11.929939Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:37:11.930068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:37:11.930094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:37:11.930113Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:37:11.930231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:37:11.930262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:37:11.930283Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:37:11.930418Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... :"Finish","f":1743154680520968,"d_finished":0,"c":0,"l":1743154680521401,"d":433},{"name":"task_result","f":1743154680048587,"d_finished":146863,"c":28,"l":1743154680518373,"d":146863}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1284:3291]->[1:1283:3290] 2025-03-28T09:38:00.521775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:38:00.040126Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-03-28T09:38:00.521805Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:38:00.522004Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:38:00.523476Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-03-28T09:38:00.523753Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000006:max} readable: {1000000006:max} at tablet 9437184 2025-03-28T09:38:00.523890Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:38:00.524047Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:38:00.524096Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:38:00.524532Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:38:00.524618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:38:00.525019Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1300:3307];trace_detailed=; 2025-03-28T09:38:00.525332Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:38:00.525523Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:38:00.525656Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:38:00.525768Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:38:00.525988Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:38:00.526077Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:38:00.526218Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:38:00.526268Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1300:3307] finished for tablet 9437184 2025-03-28T09:38:00.526630Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1299:3306];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154680524967,"name":"_full_task","f":1743154680524967,"d_finished":0,"c":0,"l":1743154680526320,"d":1353},"events":[{"name":"bootstrap","f":1743154680525101,"d_finished":702,"c":1,"l":1743154680525803,"d":702},{"a":1743154680525967,"name":"ack","f":1743154680525967,"d_finished":0,"c":0,"l":1743154680526320,"d":353},{"a":1743154680525953,"name":"processing","f":1743154680525953,"d_finished":0,"c":0,"l":1743154680526320,"d":367},{"name":"ProduceResults","f":1743154680525594,"d_finished":445,"c":2,"l":1743154680526247,"d":445},{"a":1743154680526252,"name":"Finish","f":1743154680526252,"d_finished":0,"c":0,"l":1743154680526320,"d":68}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:38:00.526713Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1299:3306];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:38:00.527143Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1299:3306];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_processing","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154680524967,"name":"_full_task","f":1743154680524967,"d_finished":0,"c":0,"l":1743154680526761,"d":1794},"events":[{"name":"bootstrap","f":1743154680525101,"d_finished":702,"c":1,"l":1743154680525803,"d":702},{"a":1743154680525967,"name":"ack","f":1743154680525967,"d_finished":0,"c":0,"l":1743154680526761,"d":794},{"a":1743154680525953,"name":"processing","f":1743154680525953,"d_finished":0,"c":0,"l":1743154680526761,"d":808},{"name":"ProduceResults","f":1743154680525594,"d_finished":445,"c":2,"l":1743154680526247,"d":445},{"a":1743154680526252,"name":"Finish","f":1743154680526252,"d_finished":0,"c":0,"l":1743154680526761,"d":509}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1300:3307]->[1:1299:3306] 2025-03-28T09:38:00.527224Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:38:00.524594Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:38:00.527270Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:38:00.527374Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:00.790877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:00.790935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:00.790960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:00.790983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:00.791016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:00.791034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:00.791084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:00.791133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:00.791369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:00.859335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:00.859373Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:00.868896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:00.869100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:00.869225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:00.873440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:00.873597Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:00.874069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.874210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:00.875619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.876544Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:00.876594Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.876725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:00.876760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:00.876786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:00.876850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.881575Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:00.995502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:00.995667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.995824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:00.996005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:00.996055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.997633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.997739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:00.997857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.997904Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:00.997953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:00.997976Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:00.999241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.999277Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:00.999299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:01.000447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:01.000477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:01.000505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:01.000534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:01.002862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:01.004053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:01.004210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:01.004966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:01.005051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:01.005097Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:01.005281Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:01.005311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:01.005407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:01.005452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:01.006651Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:01.006708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:01.006803Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:01.006827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:01.007105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:01.007141Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:01.007201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:01.007222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:01.007244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:01.007266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:01.007290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:01.007316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:01.007346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:01.007373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:01.007424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:01.007452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:01.007475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:01.013038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:01.013119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:01.013143Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:38:01.013167Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:38:01.013194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:01.013275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:38:01.015420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:38:01.015805Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:38:01.016753Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:38:01.029118Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:38:01.030898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:01.031142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:01.031207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2025-03-28T09:38:01.031518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2025-03-28T09:38:01.032110Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:38:01.034534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:01.034637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-28T09:38:01.034965Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] >> TPQTest::TestMaxTimeLagRewind >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:01.771564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:01.771647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:01.771674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:01.771698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:01.771730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:01.771751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:01.771797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:01.771849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:01.772092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:01.837739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:01.837783Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:01.851650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:01.851959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:01.852131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:01.857883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:01.858125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:01.858752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:01.858963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:01.860823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:01.862106Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:01.862173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:01.862363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:01.862408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:01.862446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:01.862542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:01.868705Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:01.993418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:01.993666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:01.993890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:01.994103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:01.994213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:01.996588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:01.996720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:01.996894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:01.996956Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:01.997033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:01.997071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:01.998801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:01.998840Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:01.998865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:02.000316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:02.000355Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:02.000391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:02.000438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:02.004172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:02.006105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:02.006309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:02.007379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:02.007536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:02.007591Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:02.007882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:02.007932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:02.008084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:02.008152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:02.010134Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:02.010177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:02.010336Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:02.010375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:02.010825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:02.010885Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:02.010975Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:02.011004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:02.011038Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:02.011066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:02.011100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:02.011139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:02.011202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:02.011234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:02.011292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:02.011351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:02.011395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:02.012751Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:02.012822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:02.012845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:38:02.012869Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:38:02.012899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:02.013020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:38:02.015920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:38:02.016484Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1743154682.017596 643202 text_format.cc:398] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2025-03-28T09:38:02.018052Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:38:02.035196Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:38:02.037850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:02.038107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:02.038205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2025-03-28T09:38:02.038554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1743154682 seconds (20175 days, 55 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2025-03-28T09:38:02.039133Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:38:02.041846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1743154682 seconds (20175 days, 55 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:02.041992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1743154682 seconds (20175 days, 55 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-28T09:38:02.042418Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> TKeyValueTest::TestCleanUpDataOnEmptyTablet >> TKeyValueTest::TestWriteReadPatchRead >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi >> TListAllTopicsTests::RecursiveList [GOOD] >> TListAllTopicsTests::ListLimitAndPaging >> TKeyValueTest::TestCopyRangeWorks >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> TKeyValueTest::TestObtainLockNewApi >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2025-03-28T09:38:03.401972Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-28T09:38:03.403883Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-03-28T09:38:03.411514Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-03-28T09:38:03.411574Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-03-28T09:38:03.416425Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:122} Inline read request KeyValue# 1 Status# OK 2025-03-28T09:38:03.416463Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:134} Expected OK or UNKNOWN and given OK readCount# 0 2025-03-28T09:38:03.416499Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> TKeyValueTest::TestInlineCopyRangeWorks >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> TDataShardLocksTest::Points_OneTx >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2025-03-28T09:38:03.968116Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2025-03-28T09:38:03.969969Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test >> TSchemeShardTTLTests::ConditionalErase [GOOD] >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:00.657177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:00.657260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:00.657297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:00.657345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:00.657384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:00.657412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:00.657479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:00.657550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:00.657865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:00.731712Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:00.731752Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:00.741702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:00.741927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:00.742053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:00.745594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:00.745769Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:00.746169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.746301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:00.747874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.749143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:00.749201Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.749375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:00.749422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:00.749463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:00.749544Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.755578Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:00.868031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:00.868249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.868437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:00.868666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:00.868715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.870603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.870738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:00.870887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.870943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:00.871006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:00.871037Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:00.872725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.872770Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:00.872807Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:00.874201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.874245Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.874278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:00.874318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.877632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:00.879185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:00.879334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:00.880192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:00.880294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:00.880345Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:00.880612Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:00.880662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:00.880824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:00.880887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:00.882395Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:00.882435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:00.882568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:00.882602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:00.882987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:00.883032Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:00.883109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:00.883154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.883188Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:00.883214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.883244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:00.883278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:00.883307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:00.883332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:00.883379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:00.883433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:00.883460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:00.885102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:00.885204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:00.885234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... Size 627 rowCount 2 cpuUsage 0 2025-03-28T09:38:04.132379Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-03-28T09:38:04.132454Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.132571Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.132694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409546, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640244000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-28T09:38:04.132775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409550, request: TableId: 6 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640244000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-28T09:38:04.132815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409549, request: TableId: 5 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640244000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-28T09:38:04.132850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409548, request: TableId: 4 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640244000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-28T09:38:04.132885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 3 Expiration { ColumnId: 2 WallClockTimestamp: 1600463040244000 ColumnUnit: UNIT_AUTO } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-28T09:38:04.132919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409551, request: TableId: 7 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640244000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-28T09:38:04.133288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:38:04.133600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-03-28T09:38:04.133897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:38:04.134376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:38:04.134576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-03-28T09:38:04.134807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:38:04.135882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.135922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:38:04.136590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.136628Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2025-03-28T09:38:04.137525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.137551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2025-03-28T09:38:04.141234Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.141273Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2025-03-28T09:38:04.141742Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.141768Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:38:04.142281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.142384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.142416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.245000Z, at schemeshard: 72057594046678944 2025-03-28T09:38:04.142457Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.142516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.142543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.245000Z, at schemeshard: 72057594046678944 2025-03-28T09:38:04.142590Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.142638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.142673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.245000Z, at schemeshard: 72057594046678944 2025-03-28T09:38:04.144048Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.144105Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.144145Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.144175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.144188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.144222Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.144257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.247000Z, at schemeshard: 72057594046678944 2025-03-28T09:38:04.144286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.144322Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.144341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.247000Z, at schemeshard: 72057594046678944 2025-03-28T09:38:04.144357Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.209624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2025-03-28T09:38:04.209756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 3 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-28T09:38:04.209807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], pathId map=TTLEnabledTable2, is column=0, is olap=0 2025-03-28T09:38:04.209849Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 3: RowCount 0, DataSize 0 2025-03-28T09:38:04.209931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 5 shard idx 72057594046678944:4 data size 43 row count 1 2025-03-28T09:38:04.209970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], pathId map=TTLEnabledTable4, is column=0, is olap=0 2025-03-28T09:38:04.209989Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409549 followerId=0, pathId 5: RowCount 1, DataSize 43 2025-03-28T09:38:04.210037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-28T09:38:04.210059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable1, is column=0, is olap=0 2025-03-28T09:38:04.210076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:38:04.210100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:3 data size 603 row count 2 2025-03-28T09:38:04.210137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=TTLEnabledTable3, is column=0, is olap=0 2025-03-28T09:38:04.210162Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409548 followerId=0, pathId 4: RowCount 2, DataSize 603 2025-03-28T09:38:04.210196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 6 shard idx 72057594046678944:5 data size 627 row count 2 2025-03-28T09:38:04.210216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], pathId map=TTLEnabledTable5, is column=0, is olap=0 2025-03-28T09:38:04.210232Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409550 followerId=0, pathId 6: RowCount 2, DataSize 627, with borrowed parts 2025-03-28T09:38:04.222169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.222217Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2025-03-28T09:38:04.223759Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-28T09:38:04.223849Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:38:04.223880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.249000Z, at schemeshard: 72057594046678944 2025-03-28T09:38:04.223913Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:58.922299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:58.922382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:58.922416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:58.922450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:58.922505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:58.922530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:58.922599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:58.922675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:58.922997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:58.987473Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:58.987509Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:58.997820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:58.998033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:58.998159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:59.003227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:59.003470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:59.003932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.004084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:59.005396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.006324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.006364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.006483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:59.006513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.006551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:59.006605Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.011212Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:59.109698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:59.109863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.110012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:59.110180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:59.110265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.111871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.111991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:59.112096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.112151Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:59.112201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:59.112224Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:59.113638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.113688Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:59.113713Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:59.115030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.115066Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.115093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.115126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.117438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:59.119089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:59.119297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:59.120074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.120155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:59.120198Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.120386Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:59.120419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.120534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:59.120584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:59.121820Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.121858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.121959Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.121983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:59.122270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.122303Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:59.122366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:59.122390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.122421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:59.122445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.122469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:59.122497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.122521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:59.122540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:59.122577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:59.122611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:59.122631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:59.123854Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:59.123920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:59.123944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... d::TEvNotifyTxCompletionResult> complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.323420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.323546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.323601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.323640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.323700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.323751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.323825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.323886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.325857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.325938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.325991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.326028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.326087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.326154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.326181Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:38:04.326259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:38:04.326285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:38:04.326313Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:38:04.326335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:38:04.326376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:38:04.326433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:2816:4081] message: TxId: 103 2025-03-28T09:38:04.326464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:38:04.326500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:38:04.326522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:38:04.327298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-03-28T09:38:04.329950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:38:04.329986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:3998:5203] TestWaitNotification: OK eventTxId 103 2025-03-28T09:38:04.330451Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:04.330634Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 215us result status StatusSuccess 2025-03-28T09:38:04.331026Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2025-03-28T09:38:04.333179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:04.333303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:04.337267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2025-03-28T09:38:04.339103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:04.339206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:38:04.339451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:38:04.339486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:38:04.339811Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:38:04.339905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:38:04.339942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4334:5539] TestWaitNotification: OK eventTxId 104 |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2025-03-28T09:38:06.124726Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-28T09:38:06.127044Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-03-28T09:38:06.132183Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-28T09:38:06.132233Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:492} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2025-03-28T09:38:06.137174Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-28T09:38:06.137270Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:310} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2025-03-28T09:38:06.137041Z ErrorReason# >> TKeyValueTest::TestWrite200KDeleteThenResponseError >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> TKeyValueTest::TestBasicWriteRead >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota >> TPQTest::TestSetClientOffset [GOOD] >> TPQTest::TestReadSessions >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:57.323211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:57.323274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:57.323300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:57.323325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:57.323354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:57.323373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:57.323437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:57.323489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:57.323769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:57.380345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:57.380397Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:57.390308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:57.390515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:57.390660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:57.394931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:57.395097Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:57.395553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.395709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:57.397034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:57.397928Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:57.397967Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:57.398098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:57.398127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:57.398152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:57.398227Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.402963Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:57.492779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:57.492946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.493098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:57.493247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:57.493313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.495184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.495317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:57.495465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.495518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:57.495567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:57.495590Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:57.496965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.497002Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:57.497026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:57.498045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.498073Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.498105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:57.498142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.500590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:57.501860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:57.501999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:57.502671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:57.502778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:57.502820Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:57.503015Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:57.503046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:57.503158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:57.503210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:57.504414Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:57.504450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:57.504574Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:57.504605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:57.504906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:57.504941Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:57.505003Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:57.505035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.505071Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:57.505092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.505115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:57.505140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:57.505165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:57.505184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:57.505221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:57.505255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:57.505290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:57.506439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:57.506507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:57.506532Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... th id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:09.333461Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:38:09.333653Z node 18 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:09.333693Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:38:09.333736Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [18:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:38:09.333931Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:09.333966Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:38:09.335374Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:09.335449Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:09.335474Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:38:09.335516Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:38:09.335545Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:09.336313Z node 18 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:09.336373Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:09.336396Z node 18 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:38:09.336420Z node 18 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:38:09.336445Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:38:09.336496Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:38:09.338019Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 942 } } 2025-03-28T09:38:09.338068Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:38:09.338212Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 942 } } 2025-03-28T09:38:09.338321Z node 18 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 942 } } 2025-03-28T09:38:09.339173Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 77309413622 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:09.339222Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:38:09.339342Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 77309413622 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:09.339401Z node 18 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:38:09.339489Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 77309413622 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:09.339550Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:09.339595Z node 18 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:09.339639Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:38:09.339686Z node 18 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:38:09.341747Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:09.341862Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:09.343425Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:09.343558Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:09.343790Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:09.343837Z node 18 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:38:09.343938Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:38:09.343978Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:09.344022Z node 18 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:38:09.344057Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:09.344103Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-28T09:38:09.344172Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:333:2312] message: TxId: 101 2025-03-28T09:38:09.344223Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:09.344268Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:38:09.344305Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:38:09.344429Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:38:09.346056Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:38:09.346101Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:334:2313] TestWaitNotification: OK eventTxId 101 2025-03-28T09:38:09.346561Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:09.346780Z node 18 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 238us result status StatusSuccess 2025-03-28T09:38:09.347308Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] >> TPartitionTests::EndWriteTimestamp_DataKeysBody >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> TKeyValueTest::TestConcatWorks >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] >> TPartitionTests::EndWriteTimestamp_FromMeta >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] >> TPartitionTests::UserActCount [GOOD] >> TPartitionTests::EndWriteTimestamp_HeadKeys >> TPartitionTests::TooManyImmediateTxs >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] >> TListAllTopicsTests::ListLimitAndPaging [GOOD] >> TMeteringSink::FlushPutEventsV1 [GOOD] >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2025-03-28T09:37:53.959321Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.959437Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:53.973119Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:37:53.973320Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:53.973710Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:176:2191] 2025-03-28T09:37:53.974545Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:37:53.974733Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-28T09:37:53.974875Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:37:53.975034Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:37:53.975387Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2025-03-28T09:37:53.975478Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:37:53.975507Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:37:53.975548Z node 1 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:53.000000Z 2025-03-28T09:37:53.975581Z node 1 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:37:53.975634Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:176:2191] 2025-03-28T09:37:53.975680Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2025-03-28T09:37:53.975740Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 Create distr tx with id = 0 and act no: 1 2025-03-28T09:37:55.243452Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-28T09:37:56.519294Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 Create distr tx with id = 2 and act no: 3 2025-03-28T09:37:56.519638Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 2 2025-03-28T09:37:56.519756Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-03-28T09:37:56.519805Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-03-28T09:37:56.519855Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:37:56.519926Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-28T09:37:57.690840Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 1 2025-03-28T09:37:57.691053Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 2 2025-03-28T09:37:57.691111Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 2 2025-03-28T09:37:57.691157Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:37:57.691206Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-28T09:37:57.691450Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:57.691493Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:57.691531Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:57.691566Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-28T09:37:57.691599Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:57.691619Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:57.691639Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:37:57.691668Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:57.691735Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Send disk status response with cookie: 0 Wait tx committed for tx 0 2025-03-28T09:37:57.713616Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait tx committed for tx 2 2025-03-28T09:37:58.181884Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:58.181964Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:58.196984Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2025-03-28T09:37:58.197189Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:58.197437Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:178:2193] 2025-03-28T09:37:58.198401Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request 2025-03-28T09:37:58.198569Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2025-03-28T09:37:58.198750Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep Got KV request 2025-03-28T09:37:58.198944Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep Got KV request 2025-03-28T09:37:58.199356Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2025-03-28T09:37:58.199469Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:37:58.199530Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:37:58.199586Z node 2 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:37:58.000000Z 2025-03-28T09:37:58.199625Z node 2 :PERSQUEUE DEBUG: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2025-03-28T09:37:58.199692Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:178:2193] 2025-03-28T09:37:58.199751Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2025-03-28T09:37:58.199800Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:58.499822Z node 2 :PERSQUEUE INFO: new Cookie src1|523ee6f4-81795513-9461a1ea-be2af0a6_0 generated for partition 0 topic 'Root/PQ/rt3.dc1--account--topic' owner src1 2025-03-28T09:37:58.500009Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 Got batch complete: 1 Create distr tx with id = 0 and act no: 1 Create immediate tx with id = 3 and act no: 4 Create immediate tx with id = 6 and act no: 7 2025-03-28T09:37:59.459248Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 0 2025-03-28T09:38:00.697460Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Wait batch completion 2025-03-28T09:38:00.697668Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:38:00.697725Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:38:00.697862Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 1 partNo 0 2025-03-28T09:38:00.698597Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 20 PartNo 0 PackedSize 84 count 1 nextOffset 21 batches 1 2025-03-28T09:38:00.698705Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-28T09:38:00.698772Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:38:00.698840Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 20 PartNo 0 PackedSize 84 count 1 nextOffset 21 batches 1 2025-03-28T09:38:00.712429Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-28T09:38:00.712556Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 3 partNo 0 2025-03-28T09:38:00.716961Z node 2 :PE ... ' partition 0 part blob processing sourceId 'src1' seqNo 10 partNo 0 2025-03-28T09:38:08.379846Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob sourceId 'src1' seqNo 10 partNo 0 result is x0000000000_00000000000000000160_00000_0000000001_00000 size 71 2025-03-28T09:38:08.379912Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] writing blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 old key x0000000000_00000000000000000160_00000_0000000001_00000 new key d0000000000_00000000000000000160_00000_0000000001_00000 size 71 WTime 41173 2025-03-28T09:38:08.381595Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 10 partNo 0 FormedBlobsCount 1 NewHead: Offset 230 PartNo 0 PackedSize 84 count 1 nextOffset 231 batches 1 2025-03-28T09:38:08.382260Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 230,1 HeadOffset 160 endOffset 161 curOffset 231 d0000000000_00000000000000000230_00000_0000000001_00000| size 71 WTime 41173 2025-03-28T09:38:08.382500Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:38:08.382552Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:38:08.382602Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:38:08.382653Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:38:08.382723Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000160_00000_0000000001_00000 2025-03-28T09:38:08.382774Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-28T09:38:08.382802Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000230_00000_0000000001_00000| 2025-03-28T09:38:08.382827Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:08.382844Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:08.382859Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:38:08.382883Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:38:08.382915Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 Wait tx committed for tx 21 2025-03-28T09:38:08.414205Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 1 2025-03-28T09:38:08.414277Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:08.414343Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 230 is stored on disk 2025-03-28T09:38:08.414739Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:38:08.414803Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:38:08.414859Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000160_00000_0000000001_00000|, d0000000000_00000000000000000160_00000_0000000001_00000|] 2025-03-28T09:38:08.414909Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:38:08.414955Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:08.414999Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:38:08.415049Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Create distr tx with id = 24 and act no: 25 Create distr tx with id = 26 and act no: 27 Create immediate tx with id = 28 and act no: 29 2025-03-28T09:38:09.400881Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 24 2025-03-28T09:38:09.400947Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 26 2025-03-28T09:38:10.646225Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:38:10.646305Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 Wait batch completion 2025-03-28T09:38:10.646418Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:38:10.646464Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 2 Wait batch completion 2025-03-28T09:38:10.646570Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 26 2025-03-28T09:38:10.646606Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 26 2025-03-28T09:38:10.646648Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:38:10.646703Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 230 PartNo 0 PackedSize 71 count 1 nextOffset 231 batches 1, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2025-03-28T09:38:10.646761Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-28T09:38:10.646792Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:38:10.646822Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 230 PartNo 0 PackedSize 71 count 1 nextOffset 231 batches 1, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2025-03-28T09:38:10.646858Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-28T09:38:10.646987Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:38:10.647013Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:38:10.647041Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:38:10.647066Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-28T09:38:10.647103Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:10.647118Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:10.647132Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:38:10.647152Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:38:10.647177Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got batch complete: 1 Got KV request Got KV request Wait for no tx committed Send disk status response with cookie: 0 Wait tx committed for tx 26 2025-03-28T09:38:10.895074Z node 2 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 2 Wait immediate tx complete 28 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 28 2025-03-28T09:38:11.180472Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:11.180528Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:11.191247Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [3:179:2194] 2025-03-28T09:38:11.192511Z node 3 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:38:11.000000Z 2025-03-28T09:38:11.192555Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [3:179:2194] 2025-03-28T09:38:11.731939Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:11.732003Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:11.745265Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [4:179:2194] 2025-03-28T09:38:11.746967Z node 4 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:38:11.747026Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:179:2194] 2025-03-28T09:38:12.306326Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:12.306374Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:12.318304Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [5:179:2194] >>>> ADD BLOB 0 writeTimestamp=2025-03-28T09:38:12.310165Z >>>> ADD BLOB 1 writeTimestamp=2025-03-28T09:38:12.310190Z >>>> ADD BLOB 2 writeTimestamp=2025-03-28T09:38:12.310207Z >>>> ADD BLOB 3 writeTimestamp=2025-03-28T09:38:12.310222Z >>>> ADD BLOB 4 writeTimestamp=2025-03-28T09:38:12.310234Z >>>> ADD BLOB 5 writeTimestamp=2025-03-28T09:38:12.310249Z >>>> ADD BLOB 6 writeTimestamp=2025-03-28T09:38:12.310262Z >>>> ADD BLOB 7 writeTimestamp=2025-03-28T09:38:12.310274Z >>>> ADD BLOB 8 writeTimestamp=2025-03-28T09:38:12.310287Z >>>> ADD BLOB 9 writeTimestamp=2025-03-28T09:38:12.310303Z 2025-03-28T09:38:12.321292Z node 5 :PERSQUEUE INFO: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2025-03-28T09:38:12.000000Z 2025-03-28T09:38:12.321357Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:179:2194] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:79:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:78:2110] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:82:2057] recipient: [5:78:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:81:2111] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:77:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:79:2110] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:81:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:83:2057] recipient: [6:79:2110] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:82:2111] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:136:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:36:40.840287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:36:40.840388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:36:40.840438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:36:40.840469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:36:40.840506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:36:40.840529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:36:40.840572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:36:40.840634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:36:40.840874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:36:40.895182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:36:40.895235Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:36:40.900395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:36:40.900482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:36:40.900580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:36:40.905181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:36:40.905314Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:36:40.905799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:36:40.905989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:36:40.907479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:36:40.908498Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:36:40.908543Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:36:40.908636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:36:40.908674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:36:40.908715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:36:40.908845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:36:40.914467Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:36:41.004290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:36:41.004485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:41.004654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:36:41.004833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:36:41.004887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:41.007060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:36:41.007157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:36:41.007346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:41.007412Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:36:41.007455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:36:41.007497Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:36:41.009245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:41.009286Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:36:41.009338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:36:41.010518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:41.010549Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:41.010592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:36:41.010648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:36:41.012980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:36:41.014056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:36:41.014183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:36:41.014903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:36:41.014992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:36:41.015030Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:36:41.015214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:36:41.015252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:36:41.015389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:36:41.015446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:36:41.016729Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:36:41.016769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:36:41.016899Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:36:41.016927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:36:41.017107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:36:41.017137Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:36:41.017209Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:36:41.017230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:36:41.017259Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:36:41.017282Z no ... nId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:12.580298Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:38:12.580476Z node 46 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 206us result status StatusSuccess 2025-03-28T09:38:12.581069Z node 46 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:12.592077Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1034:2808] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:38:12.592150Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1035:2808] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:38:12.592208Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:962:2808] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-03-28T09:38:12.592260Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:962:2808] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-03-28T09:38:12.592348Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1034:2808] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743154692569241 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743154692569241 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:38:12.592466Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1035:2808] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1743154692569241 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:38:12.594822Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][46:1034:2808] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-03-28T09:38:12.594892Z node 46 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][46:1035:2808] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-28T09:38:12.594942Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:962:2808] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-03-28T09:38:12.594999Z node 46 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][46:962:2808] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TMeteringSink::FlushStorageV1 [GOOD] Test command err: 2025-03-28T09:37:43.717739Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792272871168296:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.717938Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:37:43.870090Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00197c/r3tmp/tmpeAY0NA/pdisk_1.dat 2025-03-28T09:37:44.057585Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:44.057661Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:44.075924Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:44.110187Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25470, node 1 2025-03-28T09:37:44.140894Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:37:44.140931Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:37:44.360008Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00197c/r3tmp/yandex8CIIam.tmp 2025-03-28T09:37:44.360036Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00197c/r3tmp/yandex8CIIam.tmp 2025-03-28T09:37:44.362007Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00197c/r3tmp/yandex8CIIam.tmp 2025-03-28T09:37:44.362219Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:44.619191Z INFO: TTestServer started on Port 25083 GrpcPort 25470 TClient is connected to server localhost:25083 PQClient connected to localhost:25470 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:44.913660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:44.956827Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:37:46.590307Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792285756070927:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.591001Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.591323Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792285756070940:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.597746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:37:46.607081Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792285756070942:2341], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:37:46.693042Z node 1 :TX_PROXY ERROR: Actor# [1:7486792285756071006:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.035956Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792285756071014:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:47.041251Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjE0OWJkZjQtNzMzZGE0N2MtNTAyM2JkMWEtMjJhZGJhZWE=, ActorId: [1:7486792285756070925:2335], ActorState: ExecuteState, TraceId: 01jqe1tq2w1b9hwzabd2n7xfep, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:47.044953Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:47.086126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.110738Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.181952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:37:47.605079Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe1tqq301p1ayp9e4vxqhac, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NmYyZGUyOWUtMTFhN2JkMmUtZmY0Nzg2YS1jYTUzZmM3Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486792290051038599:2637] 2025-03-28T09:37:48.717634Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792272871168296:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:48.717753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:37:53.412453Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-03-28T09:37:53.414273Z node 1 :PQ_READ_PROXY DEBUG: new Create topic request 2025-03-28T09:37:53.439107Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:53.439224Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2025-03-28T09:37:53.439389Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Registered with mediator time cast 2025-03-28T09:37:53.439392Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Registered with mediator time cast 2025-03-28T09:37:53.439858Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:53.439859Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037895][] pipe [1:7486792315820842730:2842] connected; active server actors: 1 2025-03-28T09:37:53.439879Z node 1 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][] pipe [1:7486792315820842710:2835] connected; active server actors: 1 2025-03-28T09:37:53.440023Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] doesn't have tx info 2025-03-28T09:37:53.440063Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:37:53.440087Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] no config, start with empty partitions and default config 2025-03-28T09:37:53.440107Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:37:53.440126Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.440163Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] updating configuration. Deleted partitions []. Added partitions [0] 2025-03-28T09:37:53.440170Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037892] doesn't have tx writes info 2025-03-28T09:37:53.440286Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] updating configuration. Deleted partitions []. Added partitions [0] 2025-03-28T09:37:53.440524Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037895][topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037895 2025-03-28T09:37:53.440585Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2025-03-28T09:37:53.440605Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: [72075186224037893][topic2] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2025-03-28T09:37:53.440774Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] doesn't have tx info 2025-03-28T09:37:53.440800Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2025-03-28T09:37:53.440811Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] no config, start with empty partitions and default config 2025-03-28T09:37:53.440823Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Txs.size=0, PlannedTxs.size=0 2025-03-28T09:37:53.440840Z node 1 :PERSQUEUE NOTICE: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.440856Z node 1 :PERSQUEUE INFO: [PQ: 72075186224037894] doesn't hav ... 281474976715677, Partition 0 2025-03-28T09:38:12.281365Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] Handle TEvProposePartitionConfigResult 2025-03-28T09:38:12.281390Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] Partition responses 1/1 2025-03-28T09:38:12.281404Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state CALCULATING 2025-03-28T09:38:12.281418Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State CALCULATING 2025-03-28T09:38:12.281435Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State CALCULATING FrontTxId 281474976715677 2025-03-28T09:38:12.281455Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Received 1, Expected 1 2025-03-28T09:38:12.281488Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState CALCULATED 2025-03-28T09:38:12.281508Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from CALCULATING to CALCULATED 2025-03-28T09:38:12.281783Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] save tx TxId: 281474976715677 State: CALCULATED MinStep: 1743154692300 MaxStep: 18446744073709551615 Step: 1743154692321 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7486792359066525710 RawX2: 12884904041 } Partitions { Partition { PartitionId: 0 } } 2025-03-28T09:38:12.281874Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:38:12.282613Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:38:12.282638Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state CALCULATED 2025-03-28T09:38:12.282656Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State CALCULATED 2025-03-28T09:38:12.282672Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State CALCULATED FrontTxId 281474976715677 2025-03-28T09:38:12.282705Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState WAIT_RS 2025-03-28T09:38:12.282719Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from CALCULATED to WAIT_RS 2025-03-28T09:38:12.282752Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 0 senders. 2025-03-28T09:38:12.282776Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] HaveParticipantsDecision 1 2025-03-28T09:38:12.282824Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState EXECUTING 2025-03-28T09:38:12.282844Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from WAIT_RS to EXECUTING 2025-03-28T09:38:12.282847Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1743154692321, TxId 281474976715677 2025-03-28T09:38:12.282855Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Received 0, Expected 1 2025-03-28T09:38:12.283051Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:38:12.283075Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:38:12.283086Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:38:12.283099Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:12.283104Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:38:12.283110Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] _config_0 2025-03-28T09:38:12.283120Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:38:12.283131Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:38:12.283152Z node 3 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:38:12.283756Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvPQ::TEvTxCommitDone Step 1743154692321, TxId 281474976715677, Partition 0 2025-03-28T09:38:12.283779Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state EXECUTING 2025-03-28T09:38:12.283801Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:38:12.283808Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State EXECUTING 2025-03-28T09:38:12.283822Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State EXECUTING FrontTxId 281474976715677 2025-03-28T09:38:12.283833Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Received 1, Expected 1 2025-03-28T09:38:12.283852Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId: 281474976715677 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2025-03-28T09:38:12.283875Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] complete TxId 281474976715677 2025-03-28T09:38:12.284074Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } 2025-03-28T09:38:12.284159Z node 3 :PERSQUEUE NOTICE: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:12.284215Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete partitions for TxId 281474976715677 2025-03-28T09:38:12.284234Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState EXECUTED 2025-03-28T09:38:12.284251Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from EXECUTING to EXECUTED 2025-03-28T09:38:12.284490Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] save tx TxId: 281474976715677 State: EXECUTED MinStep: 1743154692300 MaxStep: 18446744073709551615 Step: 1743154692321 Predicate: true Kind: KIND_CONFIG TabletConfig { PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } } BootstrapConfig { } SourceActor { RawX1: 7486792359066525710 RawX2: 12884904041 } Partitions { Partition { PartitionId: 0 } } 2025-03-28T09:38:12.284612Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:38:12.285660Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:38:12.285683Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state EXECUTED 2025-03-28T09:38:12.285695Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State EXECUTED 2025-03-28T09:38:12.285709Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 State EXECUTED FrontTxId 281474976715677 2025-03-28T09:38:12.285722Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TPersQueue::SendEvReadSetAckToSenders 2025-03-28T09:38:12.285735Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState WAIT_RS_ACKS 2025-03-28T09:38:12.285755Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677 moved from EXECUTED to WAIT_RS_ACKS 2025-03-28T09:38:12.285776Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] PredicateAcks: 0/0 2025-03-28T09:38:12.285783Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2025-03-28T09:38:12.285792Z node 3 :PERSQUEUE DEBUG: [TxId: 281474976715677] PredicateAcks: 0/0 2025-03-28T09:38:12.285812Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] add an TxId 281474976715677 to the list for deletion 2025-03-28T09:38:12.285827Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, NewState DELETING 2025-03-28T09:38:12.285843Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete key for TxId 281474976715677 2025-03-28T09:38:12.285883Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2025-03-28T09:38:12.286393Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2025-03-28T09:38:12.286413Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] Try execute txs with state DELETING 2025-03-28T09:38:12.286424Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] TxId 281474976715677, State DELETING 2025-03-28T09:38:12.286464Z node 3 :PERSQUEUE DEBUG: [PQ: 72075186224037896] delete TxId 281474976715677 >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] >> TDataShardLocksTest::UseLocksCache [GOOD] >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace >> TKeyValueTest::TestWriteLongKey [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiersWithStat [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155235.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155235.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155235.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155235.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155235.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155235.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154035.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123155235.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123155235.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154035.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154035.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123154035.000000s;Name=;Codec=}; 2025-03-28T09:37:15.350727Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:37:15.420030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:37:15.435607Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:37:15.435834Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:37:15.441658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:37:15.441830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:37:15.441981Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:37:15.442055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:37:15.442126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:37:15.442212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:37:15.442275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:37:15.442359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:37:15.442434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:37:15.442525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:37:15.442597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:37:15.442665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:37:15.461422Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:37:15.461639Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:37:15.461681Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:37:15.461810Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:15.461925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:37:15.461987Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:37:15.462019Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:37:15.462091Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:37:15.462132Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:37:15.462158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:37:15.462178Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:37:15.462276Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:15.462314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:37:15.462341Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:37:15.462357Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:37:15.462408Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:37:15.462441Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:37:15.462466Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:37:15.462483Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:37:15.462525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:37:15.462547Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:37:15.462563Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:37:15.462591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:37:15.462614Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:37:15.462638Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:37:15.462948Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=36; 2025-03-28T09:37:15.463000Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=22; 2025-03-28T09:37:15.463050Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=22; 2025-03-28T09:37:15.463112Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=28; 2025-03-28T09:37:15.463217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:37:15.463257Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:37:15.463277Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:37:15.463422Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:37:15.463453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:37:15.463473Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:37:15.463601Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:37:15.463629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... D DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-28T09:38:13.732091Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:38:13.732148Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:38:13.732211Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:38:13.732260Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:38:13.732370Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:38:13.732580Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000007:max} readable: {1000000007:max} at tablet 9437184 2025-03-28T09:38:13.732702Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:38:13.732861Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:38:13.732923Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:38:13.733376Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:38:13.733463Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:38:13.733946Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1392:3397];trace_detailed=; 2025-03-28T09:38:13.734355Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:38:13.734585Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:38:13.734783Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:38:13.734918Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:38:13.735250Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:38:13.735377Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:38:13.735513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:38:13.735557Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1392:3397] finished for tablet 9437184 2025-03-28T09:38:13.735980Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1391:3396];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154693733881,"name":"_full_task","f":1743154693733881,"d_finished":0,"c":0,"l":1743154693735623,"d":1742},"events":[{"name":"bootstrap","f":1743154693734069,"d_finished":884,"c":1,"l":1743154693734953,"d":884},{"a":1743154693735226,"name":"ack","f":1743154693735226,"d_finished":0,"c":0,"l":1743154693735623,"d":397},{"a":1743154693735207,"name":"processing","f":1743154693735207,"d_finished":0,"c":0,"l":1743154693735623,"d":416},{"name":"ProduceResults","f":1743154693734677,"d_finished":535,"c":2,"l":1743154693735541,"d":535},{"a":1743154693735544,"name":"Finish","f":1743154693735544,"d_finished":0,"c":0,"l":1743154693735623,"d":79}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:38:13.736058Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1391:3396];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:38:13.736452Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1391:3396];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743154693733881,"name":"_full_task","f":1743154693733881,"d_finished":0,"c":0,"l":1743154693736104,"d":2223},"events":[{"name":"bootstrap","f":1743154693734069,"d_finished":884,"c":1,"l":1743154693734953,"d":884},{"a":1743154693735226,"name":"ack","f":1743154693735226,"d_finished":0,"c":0,"l":1743154693736104,"d":878},{"a":1743154693735207,"name":"processing","f":1743154693735207,"d_finished":0,"c":0,"l":1743154693736104,"d":897},{"name":"ProduceResults","f":1743154693734677,"d_finished":535,"c":2,"l":1743154693735541,"d":535},{"a":1743154693735544,"name":"Finish","f":1743154693735544,"d_finished":0,"c":0,"l":1743154693736104,"d":560}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1392:3397]->[1:1391:3396] 2025-03-28T09:38:13.736546Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:38:13.733432Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:38:13.736590Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:38:13.736706Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> TKeyValueTest::TestRewriteThenLastValue >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:81:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:136:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:83:2113] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:83:2113] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:140:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> TPartitionTests::TestNonConflictingActsBatchOk >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: 2025-03-28T09:38:06.783301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:38:06.783718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:38:06.783757Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ade/r3tmp/tmpAvdH1U/pdisk_1.dat 2025-03-28T09:38:07.217305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:38:07.285136Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:07.330204Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:07.330833Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:07.344157Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:07.434055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:38:07.485704Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:671:2573]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:38:07.487209Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:671:2573]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:38:07.488601Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:671:2573] 2025-03-28T09:38:07.488832Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:07.501167Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:671:2573]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:38:07.526116Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:658:2565], Recipient [1:675:2575]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:38:07.527162Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:07.527336Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:07.528752Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:38:07.528815Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:38:07.528874Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:38:07.529750Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:07.529828Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:658:2565], Recipient [1:675:2575]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:38:07.530154Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:675:2575] 2025-03-28T09:38:07.530301Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:07.537148Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:07.537213Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:701:2573] in generation 1 2025-03-28T09:38:07.537343Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:658:2565], Recipient [1:675:2575]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:38:07.537787Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:07.537858Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:07.538882Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:38:07.538968Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:38:07.539003Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:38:07.539258Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:07.539390Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:07.539439Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:706:2575] in generation 1 2025-03-28T09:38:07.550280Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:07.573641Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:38:07.579549Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:07.579766Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:709:2594] 2025-03-28T09:38:07.579800Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:07.579855Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:38:07.579898Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:07.580147Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:671:2573], Recipient [1:671:2573]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:38:07.580196Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:38:07.580947Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:07.580981Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:38:07.581055Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:07.581118Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:710:2595] 2025-03-28T09:38:07.581139Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:38:07.581166Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:38:07.581186Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:07.581411Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:675:2575], Recipient [1:675:2575]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:38:07.581444Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:38:07.581596Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:38:07.581691Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:38:07.581778Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:07.581859Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:07.581904Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:38:07.581946Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:38:07.581977Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:38:07.582016Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:38:07.582049Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:07.582085Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:38:07.582142Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:38:07.582239Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:690:2584], Recipient [1:671:2573]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:38:07.582265Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:38:07.582300Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:665:2569], serverId# [1:690:2584], sessionId# [0:0:0] 2025-03-28T09:38:07.582333Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:07.582350Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:07.582368Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037889 2025-03-28T09:38:07.582386Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-28T09:38:07.582413Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-28T09:38:07.582440Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:38:07.582467Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:07.582522Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:690:2584] 2025-03-28T09:38:07.582565Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:38:07.582666Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:07.582905Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:38:07.582958Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:38:07.583045Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:38:07.583093Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:38:07.583143Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:38:07.583175Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:38:07.583202Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:38:07.583435Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:38:07.583459Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:38:07.583485Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:38:07.583530Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:38:07.583569Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:38:07.583597Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 720751 ... 976715663] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:38:13.542589Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:281474976715663] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:38:13.542632Z node 2 :TX_DATASHARD TRACE: Execution status for [0:281474976715663] at 72075186224037888 is Executed 2025-03-28T09:38:13.542666Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715663] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:38:13.542735Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:281474976715663] at 72075186224037888 has finished 2025-03-28T09:38:13.553681Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:13.553759Z node 2 :TX_DATASHARD TRACE: Complete execution for [2500:281474976715662] at 72075186224037888 on unit CompleteOperation 2025-03-28T09:38:13.553841Z node 2 :TX_DATASHARD DEBUG: Complete [2500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [2:937:2727], exec latency: 8 ms, propose latency: 9 ms 2025-03-28T09:38:13.553927Z node 2 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 72075186224037888 {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-03-28T09:38:13.553982Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:13.554225Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:13.554263Z node 2 :TX_DATASHARD TRACE: Complete execution for [0:281474976715663] at 72075186224037888 on unit FinishPropose 2025-03-28T09:38:13.554302Z node 2 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715663 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:38:13.554374Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:13.554546Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [2:977:2784], Recipient [2:674:2575]: {TEvReadSet step# 2500 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2025-03-28T09:38:13.554588Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:38:13.554653Z node 2 :TX_DATASHARD DEBUG: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2025-03-28T09:38:13.556206Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:61:2108], Recipient [2:674:2575]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2025-03-28T09:38:13.556393Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [2:61:2108], Recipient [2:977:2784]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2025-03-28T09:38:13.653264Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1vhdq5d186b7apq84pcm4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=NzcyNDI2NWItYWUzYzNmMDktYWNmMTQzNjItMjE3NWQ0NGE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:13.655406Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1020:2810], Recipient [2:977:2784]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-28T09:38:13.655523Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:38:13.655588Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2025-03-28T09:38:13.655686Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:38:13.655731Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:38:13.655761Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:38:13.655787Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:38:13.655825Z node 2 :TX_DATASHARD TRACE: Activated operation [0:3] at 72075186224037888 2025-03-28T09:38:13.655866Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:38:13.655889Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:38:13.655910Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:38:13.655924Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:38:13.656016Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-28T09:38:13.656197Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-03-28T09:38:13.656250Z node 2 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[2:1020:2810], 0} after executionsCount# 1 2025-03-28T09:38:13.656293Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1020:2810], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:38:13.656358Z node 2 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[2:1020:2810], 0} finished in read 2025-03-28T09:38:13.656408Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:38:13.656425Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:38:13.656440Z node 2 :TX_DATASHARD TRACE: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:38:13.656455Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:38:13.656480Z node 2 :TX_DATASHARD TRACE: Execution status for [0:3] at 72075186224037888 is Executed 2025-03-28T09:38:13.656495Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:38:13.656512Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:3] at 72075186224037888 has finished 2025-03-28T09:38:13.656537Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:38:13.656611Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:38:13.657355Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [2:1020:2810], Recipient [2:674:2575]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2025-03-28T09:38:13.657439Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1020:2810], Recipient [2:977:2784]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:38:13.657473Z node 2 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-28T09:38:13.657563Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2025-03-28T09:38:13.657597Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2025-03-28T09:38:13.657633Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-28T09:38:13.657649Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2025-03-28T09:38:13.657667Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2025-03-28T09:38:13.657684Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2025-03-28T09:38:13.657709Z node 2 :TX_DATASHARD TRACE: Activated operation [0:6] at 72075186224037889 2025-03-28T09:38:13.657729Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-28T09:38:13.657744Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2025-03-28T09:38:13.657775Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2025-03-28T09:38:13.657791Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2025-03-28T09:38:13.657855Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2025-03-28T09:38:13.657973Z node 2 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2500/18446744073709551615 2025-03-28T09:38:13.657999Z node 2 :TX_DATASHARD TRACE: 72075186224037889 Complete read# {[2:1020:2810], 1} after executionsCount# 1 2025-03-28T09:38:13.658023Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1020:2810], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:38:13.658059Z node 2 :TX_DATASHARD TRACE: 72075186224037889 read iterator# {[2:1020:2810], 1} finished in read 2025-03-28T09:38:13.658085Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-28T09:38:13.658101Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2025-03-28T09:38:13.658115Z node 2 :TX_DATASHARD TRACE: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2025-03-28T09:38:13.658130Z node 2 :TX_DATASHARD TRACE: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2025-03-28T09:38:13.658151Z node 2 :TX_DATASHARD TRACE: Execution status for [0:6] at 72075186224037889 is Executed 2025-03-28T09:38:13.658175Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:38:13.658207Z node 2 :TX_DATASHARD TRACE: Execution plan for [0:6] at 72075186224037889 has finished 2025-03-28T09:38:13.658228Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2025-03-28T09:38:13.658291Z node 2 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2025-03-28T09:38:13.658714Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [2:1020:2810], Recipient [2:674:2575]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2025-03-28T09:38:13.658758Z node 2 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 1 } |96.2%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:56.249978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:56.250069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.250106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:56.250145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:56.251057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:56.251104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:56.251197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.251268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:56.252363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:56.336339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:56.336385Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:56.350534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:56.350820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:56.350981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:56.356227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:56.356454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:56.357018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.357207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.358889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.360091Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.360154Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.360314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:56.360358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.360403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:56.360499Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.366396Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:56.474470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.474632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.474821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:56.475049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:56.475095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.477149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.477278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:56.477458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.477524Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:56.477589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:56.477624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:56.479682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.479739Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:56.479774Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:56.481520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.481564Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.481603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.481659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.485491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:56.487627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:56.487798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:56.488760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.488878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.488935Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.489210Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:56.489268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.489415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:56.489486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.491443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.491496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.491650Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.491688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:56.492098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.492145Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:56.492235Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.492268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.492301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.492328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.492360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:56.492405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.492437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:56.492465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:56.492521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:56.492578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:56.492616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:56.494446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.494547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.494580Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 2057594046678944, LocalPathId: 1] 2025-03-28T09:38:14.652237Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:38:14.652351Z node 28 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:14.652374Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:38:14.652401Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [28:205:2207], at schemeshard: 72057594046678944, txId: 101, path id: 2 2025-03-28T09:38:14.652706Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:14.652736Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:38:14.653798Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:14.653859Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:14.653880Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:38:14.653903Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:38:14.653926Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:14.654611Z node 28 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:14.654662Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:14.654701Z node 28 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:38:14.654729Z node 28 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:38:14.654751Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:38:14.654798Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2025-03-28T09:38:14.655392Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 798 } } 2025-03-28T09:38:14.655420Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:38:14.655498Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 798 } } 2025-03-28T09:38:14.655567Z node 28 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000002 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 798 } } FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:38:14.656512Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:14.656539Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2025-03-28T09:38:14.656607Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 306 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:14.656643Z node 28 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:38:14.656699Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 306 RawX2: 120259086581 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:14.656733Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:14.656758Z node 28 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:14.656782Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:38:14.656808Z node 28 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:38:14.658191Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:14.658383Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:14.659252Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:14.659364Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:14.659462Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:14.659496Z node 28 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:38:14.659568Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:38:14.659588Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:14.659613Z node 28 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:38:14.659633Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:14.659656Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-28T09:38:14.659692Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:334:2313] message: TxId: 101 2025-03-28T09:38:14.659721Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:14.659750Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:38:14.659773Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:38:14.659852Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:38:14.661050Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:38:14.661080Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:335:2314] TestWaitNotification: OK eventTxId 101 2025-03-28T09:38:14.661345Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:14.661468Z node 28 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 138us result status StatusSuccess 2025-03-28T09:38:14.661778Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2025-03-28T09:38:15.846113Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-28T09:38:15.846166Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:270} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1743154695845 ErrorReason# 2025-03-28T09:38:15.852660Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-28T09:38:15.852713Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:240} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1743154695852 ErrorReason# 2025-03-28T09:38:15.857178Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:209} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2025-03-28T09:38:15.857224Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:222} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1743154695857 ErrorReason# >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota >> TPQTest::TestAlreadyWritten [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:56.249979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:56.250085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.250120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:56.250153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:56.251097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:56.251154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:56.251249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.251345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:56.252369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:56.315902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:56.315946Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:56.326482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:56.326699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:56.326803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:56.332616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:56.332764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:56.335757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.336496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.341108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345439Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:56.345916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.345973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:56.346108Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.352844Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:56.489093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.489347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.489578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:56.489807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:56.489928Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.492275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.492417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:56.492608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.492675Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:56.492752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:56.492795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:56.494805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.494869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:56.494910Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:56.496758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.496811Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.496874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.496937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.501046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:56.503017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:56.503255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:56.504303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.504435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.504499Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.504816Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:56.504878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.505048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:56.505190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.507146Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.507197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.507359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.507420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:56.507824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.507880Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:56.507985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.508021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.508085Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.508119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.508156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:56.508202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.508239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:56.508272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:56.508335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:56.508377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:56.508414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:56.510466Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.510575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.510614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 137 } } 2025-03-28T09:38:17.175285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-28T09:38:17.175376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-03-28T09:38:17.175402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-03-28T09:38:17.176912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:17.177054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:17.177096Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:17.177152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-28T09:38:17.177268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:17.178621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:38:17.178757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-28T09:38:17.179898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:17.180027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:17.180090Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:38:17.180416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:38:17.180543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:38:17.186107Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:17.186145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:38:17.186409Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:17.186472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:38:17.187041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:17.187099Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:38:17.187985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:17.188085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:17.188121Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:38:17.188167Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-28T09:38:17.188209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:38:17.188312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:38:17.189453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1231 } } 2025-03-28T09:38:17.189506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:38:17.189647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1231 } } 2025-03-28T09:38:17.189754Z node 1 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1231 } } 2025-03-28T09:38:17.191161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:38:17.191207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:38:17.191360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:38:17.191406Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:38:17.191485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 307 RawX2: 4294969590 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:38:17.191558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:17.191603Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:17.191639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:38:17.191674Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:38:17.194244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:38:17.194834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:17.195331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:17.195625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:17.195662Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:38:17.195761Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:38:17.195795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:38:17.195855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:38:17.195887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:38:17.195940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:38:17.196006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:335:2314] message: TxId: 102 2025-03-28T09:38:17.196058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:38:17.196099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:38:17.196150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:38:17.196284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:38:17.198031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:38:17.198080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:613:2568] TestWaitNotification: OK eventTxId 102 2025-03-28T09:38:17.198462Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-28T09:38:17.198519Z node 1 :FLAT_TX_SCHEMESHARD ERROR: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2025-03-28T09:38:17.200230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-28T09:38:17.200362Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:38:17.200409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.038500Z, at schemeshard: 72057594046678944 2025-03-28T09:38:17.200453Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:56.249981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:56.250075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.250114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:56.250150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:56.251082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:56.251127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:56.251208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.251299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:56.252377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:56.315387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:56.315446Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:56.326321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:56.326520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:56.326659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:56.332508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:56.332744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:56.335779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.336501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.340993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345190Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345254Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.345858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:56.345912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.345977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:56.346063Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.351962Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:56.436758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.438856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.440743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:56.441771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:56.441833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.444464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.444566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:56.444696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.444738Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:56.444788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:56.444814Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:56.446503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.446548Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:56.446573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:56.447912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.447942Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.447967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.448017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.451479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:56.452926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:56.453134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:56.453995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.454086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.454129Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.455330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:56.455373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.456203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:56.456286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.457850Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.457885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.458017Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.458055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:56.459018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.459145Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:56.459271Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.459303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.459342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.459370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.459415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:56.459457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.459493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:56.459520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:56.459577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:56.459618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:56.459647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:56.461503Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.461601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.461631Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 6678944 2025-03-28T09:37:56.864281Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 103:0 ProgressState 2025-03-28T09:37:56.864344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:37:56.864369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:37:56.864404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:37:56.864430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:37:56.864452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2025-03-28T09:37:56.864502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 103 2025-03-28T09:37:56.864539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:37:56.864565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:37:56.864590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:37:56.864683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:37:56.866083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:37:56.866141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:501:2462] TestWaitNotification: OK eventTxId 103 2025-03-28T09:38:02.149169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:38:02.149257Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:03.917130Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0321 2025-03-28T09:38:03.927749Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0589 2025-03-28T09:38:03.968750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-28T09:38:03.968906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-28T09:38:03.969020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-03-28T09:38:03.969089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-28T09:38:03.969212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-28T09:38:03.969263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-03-28T09:38:03.969296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:38:03.979697Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:38:07.356317Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0088 2025-03-28T09:38:07.366831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0154 2025-03-28T09:38:07.407687Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-28T09:38:07.407802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-28T09:38:07.407845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-03-28T09:38:07.407869Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-28T09:38:07.407942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-28T09:38:07.407984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-03-28T09:38:07.408006Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:38:07.418367Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:38:10.750656Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0088 2025-03-28T09:38:10.761228Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0154 2025-03-28T09:38:10.802155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-28T09:38:10.802277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-28T09:38:10.802341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-03-28T09:38:10.802376Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-28T09:38:10.802458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-28T09:38:10.802488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-03-28T09:38:10.802510Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:38:10.812806Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:38:14.094812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 4] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0031 2025-03-28T09:38:14.105402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 2] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0048 2025-03-28T09:38:14.146583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2025-03-28T09:38:14.146769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 4 shard idx 72057594046678944:2 data size 0 row count 0 2025-03-28T09:38:14.146850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], pathId map=indexImplTable, is column=0, is olap=0 2025-03-28T09:38:14.146892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409546 followerId=0, pathId 4: RowCount 0, DataSize 0 2025-03-28T09:38:14.146993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 2 shard idx 72057594046678944:1 data size 0 row count 0 2025-03-28T09:38:14.147033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], pathId map=TTLEnabledTable, is column=0, is olap=0 2025-03-28T09:38:14.147060Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186233409547 followerId=0, pathId 2: RowCount 0, DataSize 0 2025-03-28T09:38:14.157481Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2025-03-28T09:38:17.484364Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2025-03-28T09:38:17.484505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:38:17.484725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2025-03-28T09:38:17.484922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Run conditional erase, tabletId: 72075186233409547, request: TableId: 2 Expiration { ColumnId: 2 WallClockTimestamp: 60024000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 4 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 }, at schemeshard: 72057594046678944 2025-03-28T09:38:17.485401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:38:17.486259Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2025-03-28T09:38:17.486312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:38:17.489769Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2025-03-28T09:38:17.489942Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2025-03-28T09:38:17.489998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.024000Z, at schemeshard: 72057594046678944 2025-03-28T09:38:17.490056Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:56.433821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:56.433915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.433948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:56.433979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:56.434013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:56.434030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:56.434087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.434174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:56.434404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:56.489975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:56.490010Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:56.499880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:56.500099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:56.500225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:56.503995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:56.504173Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:56.504580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.504738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.506019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.506944Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.506997Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.507125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:56.507157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.507189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:56.507285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.512003Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:56.589423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.589660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.589854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:56.590044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:56.590084Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.591921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.592010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:56.592131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.592169Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:56.592214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:56.592244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:56.593892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.593930Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:56.593951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:56.595103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.595144Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.595176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.595229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.602183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:56.603813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:56.603942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:56.604649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.604757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.604805Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.605035Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:56.605073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.605180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:56.605232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.607086Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.607132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.607285Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.607323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:56.607694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.607726Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:56.607794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.607814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.607901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.607919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.607943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:56.607978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.608002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:56.608020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:56.608061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:56.608088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:56.608112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:56.609460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.609535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.609555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 44 2025-03-28T09:38:16.728883Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.733062Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.733282Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.733362Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.733705Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.733983Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.734463Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.734529Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.734610Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.735683Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.735769Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.735826Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.735882Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.735940Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.736002Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.736080Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.736128Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.736179Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.736231Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.736280Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.736327Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.738196Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.738276Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.738349Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.738399Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.738474Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:16.738511Z node 4 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:38:16.738627Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:38:16.738663Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:16.738729Z node 4 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:38:16.738763Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:16.738801Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2025-03-28T09:38:16.738870Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:2779:4044] message: TxId: 101 2025-03-28T09:38:16.738916Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:16.739006Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:38:16.739047Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:38:16.740029Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 66 2025-03-28T09:38:16.742240Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:38:16.742292Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:2780:4045] TestWaitNotification: OK eventTxId 101 2025-03-28T09:38:16.742749Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:16.742990Z node 4 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 288us result status StatusSuccess 2025-03-28T09:38:16.743563Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] >> TPartitionTests::TestTxBatchInFederation |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> PQCountersSimple::Partition |96.3%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.3%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> PQCountersSimple::Partition [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> TSyncBrokerTests::ShouldEnqueue >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> TSyncBrokerTests::ShouldReturnToken >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=143155224.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155224.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155224.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143155224.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155224.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155224.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=143155224.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154024.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123155224.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123155224.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154024.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154024.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=123154024.000000s;Name=;Codec=}; 2025-03-28T09:37:04.558031Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:37:04.634559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:37:04.650152Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:37:04.650440Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:37:04.657147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:37:04.657317Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:37:04.657518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:37:04.657592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:37:04.657659Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:37:04.657736Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:37:04.657811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:37:04.657913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:37:04.657999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:37:04.658078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:37:04.658156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:37:04.658221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:37:04.678660Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:37:04.679083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:37:04.679173Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:37:04.679395Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:04.679592Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:37:04.679665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:37:04.679703Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:37:04.679773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:37:04.679816Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:37:04.679843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:37:04.679862Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:37:04.679974Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:04.680018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:37:04.680045Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:37:04.680062Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:37:04.680122Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:37:04.680156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:37:04.680184Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:37:04.680202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:37:04.680251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:37:04.680276Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:37:04.680294Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:37:04.680326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:37:04.680351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:37:04.680372Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:37:04.680668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=32; 2025-03-28T09:37:04.680729Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=26; 2025-03-28T09:37:04.680786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=23; 2025-03-28T09:37:04.680845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=26; 2025-03-28T09:37:04.680958Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:37:04.680994Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:37:04.681017Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:37:04.681162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:37:04.681192Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:37:04.681213Z node 1 :TX_COLUMNSHARD NOTICE: tabl ... on_data.cpp:29;EXECUTE:finishLoadingTime=311; 2025-03-28T09:38:20.518098Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=33604; 2025-03-28T09:38:20.524458Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=6301; 2025-03-28T09:38:20.531148Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5950; 2025-03-28T09:38:20.531236Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6705; 2025-03-28T09:38:20.531364Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=80; 2025-03-28T09:38:20.531446Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=48; 2025-03-28T09:38:20.531548Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=68; 2025-03-28T09:38:20.531646Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=65; 2025-03-28T09:38:20.538966Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=7257; 2025-03-28T09:38:20.548637Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9576; 2025-03-28T09:38:20.548743Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=28; 2025-03-28T09:38:20.548813Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=34; 2025-03-28T09:38:20.548852Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=5; 2025-03-28T09:38:20.548889Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-03-28T09:38:20.548925Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-03-28T09:38:20.549021Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=54; 2025-03-28T09:38:20.549078Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-28T09:38:20.549158Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=46; 2025-03-28T09:38:20.549192Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-03-28T09:38:20.549251Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-03-28T09:38:20.549334Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=48; 2025-03-28T09:38:20.549604Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=237; 2025-03-28T09:38:20.549632Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=70306; 2025-03-28T09:38:20.549751Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:38:20.549830Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:38:20.549869Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:38:20.549915Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:38:20.563246Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:38:20.563389Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:38:20.563433Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:38:20.563487Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:38:20.563534Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:38:20.563566Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:38:20.563602Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:38:20.563629Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:38:20.563695Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:38:20.564073Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:38:20.564135Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2625:4499];tablet_id=9437184;parent=[1:2585:4466];fline=manager.cpp:82;event=ask_data;request=request_id=151;1={portions_count=29};; 2025-03-28T09:38:20.564592Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:38:20.564816Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:38:20.564839Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:38:20.564857Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:38:20.564888Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:38:20.564931Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:38:20.564970Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=6; 2025-03-28T09:38:20.565011Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:38:20.565044Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=6;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:38:20.565080Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:38:20.565109Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:38:20.565171Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:38:20.566254Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-28T09:38:20.567375Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2585:4466];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402096 160000/10402096 160000/10402096 80000/5203544 0/0 >> TSyncBrokerTests::ShouldProcessAfterRelease >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId >> TSyncBrokerTests::ShouldReturnToken [GOOD] >> TSyncBrokerTests::ShouldReleaseToken >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] >> TSyncNeighborsTests::SerDes1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::Partition [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:126:2057] recipient: [1:124:2158] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:126:2057] recipient: [1:124:2158] Leader for TabletID 72057594037927937 is [1:130:2162] sender: [1:131:2057] recipient: [1:124:2158] 2025-03-28T09:37:50.540145Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:50.540246Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:172:2057] recipient: [1:170:2193] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:172:2057] recipient: [1:170:2193] Leader for TabletID 72057594037927938 is [1:176:2197] sender: [1:177:2057] recipient: [1:170:2193] Leader for TabletID 72057594037927937 is [1:130:2162] sender: [1:202:2057] recipient: [1:14:2061] 2025-03-28T09:37:50.559791Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:50.576572Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:200:2215] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:50.577382Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:208:2221] 2025-03-28T09:37:50.579362Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:208:2221] 2025-03-28T09:37:50.581010Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:209:2222] 2025-03-28T09:37:50.582462Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:209:2222] 2025-03-28T09:37:50.589808Z node 1 :PERSQUEUE INFO: new Cookie default|6cf458d7-b36bbcab-31bdbbf-ef94dd2a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.596794Z node 1 :PERSQUEUE INFO: new Cookie default|608c1c69-57467c31-8808555f-c052e4f7_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.603312Z node 1 :PERSQUEUE INFO: new Cookie default|2154524c-78d7787c-cd145557-418226de_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByCommitted" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TimeSinceLastReadMs" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalMessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalSizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TotalTimeLagMsByLastRead" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/UserPartitionsAnswered" }, "value": 2 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/WriteTimeLagMsByLastRead" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/WriteTimeLagMsByLastReadOld" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "total", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": ... Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2025-03-28T09:38:00.248159Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:00.252238Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 5 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 5 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/0/total ANS GROUP user/total/total ANS GROUP user/0/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total/0/rt3.dc1--asdfgs--topic ANS GROUP total CHECKING GROUP user/0/rt3.dc1--asdfgs--topic 2025-03-28T09:38:02.170928Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:02.178185Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 6 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 6 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 6 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } Consumers { Name: "user2" Generation: 6 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user2/1/total ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user2/total/total ANS GROUP user/total/total ANS GROUP user2/1/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic CHECKING GROUP user2/1/rt3.dc1--asdfgs--topic 2025-03-28T09:38:04.288894Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:04.292814Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 7 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 7 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 6 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } Consumers { Name: "user2" Generation: 6 Important: false } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user2/0/total ANS GROUP user/1/total ANS GROUP user2/total/total ANS GROUP user/total/total ANS GROUP user2/0/rt3.dc1--asdfgs--topic ANS GROUP total/total/total ANS GROUP total/0/rt3.dc1--asdfgs--topic ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic CHECKING GROUP user2/0/rt3.dc1--asdfgs--topic 2025-03-28T09:38:06.380362Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:06.385334Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 8 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 8 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: true } NEW ANS: ANS GROUP total/total/rt3.dc1--asdfgs--topic ANS GROUP user/1/rt3.dc1--asdfgs--topic ANS GROUP user/1/total ANS GROUP user/total/total ANS GROUP total/total/total ANS GROUP rt3.dc1--asdfgs--topic ANS GROUP total ANS GROUP total/1/rt3.dc1--asdfgs--topic CHECKING GROUP user/1/rt3.dc1--asdfgs--topic 2025-03-28T09:38:08.822339Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:08.822406Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:08.835536Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:08.836257Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 9 actor [4:200:2215] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 3600 ImportantClientId: "client" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 9 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 9 ReadRuleGenerations: 9 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 9 Important: false } Consumers { Name: "client" Generation: 9 Important: true } 2025-03-28T09:38:08.836767Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:208:2221] 2025-03-28T09:38:08.837617Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [4:208:2221] 2025-03-28T09:38:08.838427Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:209:2222] 2025-03-28T09:38:08.838892Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [4:209:2222] 2025-03-28T09:38:08.852276Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][] pipe [4:253:2252] connected; active server actors: 1 2025-03-28T09:38:08.855739Z node 4 :PERSQUEUE INFO: new Cookie default|45e277bd-24c74a1-842a980e-e410cf31_0 generated for partition 0 topic 'topic' owner default 2025-03-28T09:38:08.860186Z node 4 :PERSQUEUE INFO: new Cookie default|9c354deb-775552dc-479bc468-ef8595d_1 generated for partition 0 topic 'topic' owner default 2025-03-28T09:38:08.865593Z node 4 :PERSQUEUE INFO: new Cookie default|3dd7c52c-914b1e7a-fa046879-d5ecdf2_2 generated for partition 0 topic 'topic' owner default 2025-03-28T09:38:08.869583Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][topic] pipe [4:300:2293] connected; active server actors: 1 2025-03-28T09:38:14.590541Z node 4 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][topic] pipe [4:399:2379] connected; active server actors: 1 2025-03-28T09:38:21.272594Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:21.272657Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:21.286088Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:21.286763Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 10 actor [5:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 10 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 10 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 10 Important: false } 2025-03-28T09:38:21.287269Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [5:206:2219] 2025-03-28T09:38:21.289057Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [5:206:2219] 2025-03-28T09:38:21.290299Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [5:207:2220] 2025-03-28T09:38:21.291630Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [5:207:2220] 2025-03-28T09:38:21.296389Z node 5 :PERSQUEUE INFO: new Cookie default|ec1c4016-eaba62ec-bfe4d44e-60a912cd_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:21.300806Z node 5 :PERSQUEUE INFO: new Cookie default|8dae8cc3-5e1310d7-9740ad5-660390af_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:21.306214Z node 5 :PERSQUEUE INFO: new Cookie default|350a754f-caa63c5-37ec3363-a7b5d4f4_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:21.310320Z node 5 :PERSQUEUE INFO: new Cookie default|f06ce70d-2857d4f9-ff30c318-ecdba939_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:21.311479Z node 5 :PERSQUEUE INFO: new Cookie default|816ee980-ec53e8d8-5a7807a0-3c8a8f61_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] >> TSyncBrokerTests::ShouldReleaseToken [GOOD] >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2025-03-28T09:38:21.975366Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-03-28T09:38:21.975503Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-03-28T09:38:21.975564Z node 1 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token released, active: 1, waiting: 1 2025-03-28T09:38:21.975609Z node 1 :BS_SYNCER DEBUG: ProcessQueue(), VDisk actor id: [0:1:2], actor id: [1:6:2053], token sent, active: 0, waiting: 1 2025-03-28T09:38:22.049779Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-03-28T09:38:22.049887Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-03-28T09:38:22.049924Z node 2 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], removed from queue, active: 1, waiting: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] Test command err: 2025-03-28T09:38:21.952938Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-03-28T09:38:21.953024Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:6:2053], token sent, active: 1, waiting: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:79:2057] recipient: [3:78:2110] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:78:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:77:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:81:2057] recipient: [5:79:2110] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:83:2057] recipient: [5:79:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:82:2111] Leader for TabletID 72057594037927937 is [5:82:2111] sender: [5:136:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:81:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:83:2113] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:87:2057] recipient: [8:83:2113] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:86:2114] Leader for TabletID 72057594037927937 is [8:86:2114] sender: [8:104:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:85:2115] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:89:2057] recipient: [10:85:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:88:2116] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:84:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:86:2115] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:90:2057] recipient: [11:86:2115] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:89:2116] Leader for TabletID 72057594037927937 is [11:89:2116] sender: [11:143:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:85:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:88:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:89:2057] recipient: [12:87:2116] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:91:2057] recipient: [12:87:2116] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:90:2117] Leader for TabletID 72057594037927937 is [12:90:2117] sender: [12:110:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:86:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:89:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:88:2117] Leader for TabletID 72057594037927937 is [13:91:2118] sender: [13:92:2057] recipient: [13:88:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:91:2118] Leader for TabletID 72057594037927937 is [13:91:2118] sender: [13:111:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:89:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:92:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:93:2057] recipient: [14:91:2120] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:95:2057] recipient: [14:91:2120] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:94:2121] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:148:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:89:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:92:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:93:2057] recipient: [15:91:2120] Leader for TabletID 72057594037927937 is [15:94:2121] sender: [15:95:2057] recipient: [15:91:2120] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:94:2121] Leader for TabletID 72057594037927937 is [15:94:2121] sender: [15:148:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2025-03-28T09:38:21.952946Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-03-28T09:38:22.031225Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-03-28T09:38:22.031343Z node 2 :BS_SYNCER DEBUG: TEvReleaseSyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token released, active: 1, waiting: 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2025-03-28T09:38:21.952967Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [1:5:2052], token sent, active: 1, waiting: 0 2025-03-28T09:38:21.953073Z node 1 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [1:6:2053], enqueued, active: 1, waiting: 1 2025-03-28T09:38:22.028589Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:1], actor id: [2:5:2052], token sent, active: 1, waiting: 0 2025-03-28T09:38:22.028713Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:6:2053], enqueued, active: 1, waiting: 1 2025-03-28T09:38:22.028780Z node 2 :BS_SYNCER DEBUG: TEvQuerySyncToken, VDisk actor id: [0:1:2], actor id: [2:7:2054], enqueued, active: 1, waiting: 1 >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] 2025-03-28T09:38:08.626146Z node 1 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] 2025-03-28T09:38:14.886749Z node 2 :KEYVALUE ERROR: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:449:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:452:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:453:2057] recipient: [4:451:2377] Leader for TabletID 72057594037927937 is [4:454:2378] sender: [4:455:2057] recipient: [4:451:2377] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:454:2378] Leader for TabletID 72057594037927937 is [4:454:2378] sender: [4:508:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:449:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:452:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:453:2057] recipient: [5:451:2377] Leader for TabletID 72057594037927937 is [5:454:2378] sender: [5:455:2057] recipient: [5:451:2377] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:454:2378] Leader for TabletID 72057594037927937 is [5:454:2378] sender: [5:508:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:450:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:453:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:454:2057] recipient: [6:452:2377] Leader for TabletID 72057594037927937 is [6:455:2378] sender: [6:456:2057] recipient: [6:452:2377] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:455:2378] Leader for TabletID 72057594037927937 is [6:455:2378] sender: [6:509:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:37:56.249957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:56.250132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.250172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:56.250222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:56.251094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:56.251131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:56.251201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:56.251277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:56.252359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:56.334633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:56.334676Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:56.348417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:56.348670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:56.348833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:56.353672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:56.353861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:56.354394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.354573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.356173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.357309Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.357368Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.357530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:56.357566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.357606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:56.357701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.363292Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:37:56.478069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:56.478270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.478463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:56.478721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:56.478778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.480717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.480824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:56.480969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.481022Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:56.481080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:56.481111Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:56.482673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.482738Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:56.482770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:56.484362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.484398Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.484434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.484484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.488194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:56.489836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:56.489986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:56.490888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:56.490994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:56.491044Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.491341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:56.491390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:56.491540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:56.491598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:56.493162Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:56.493209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:56.493337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:56.493372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:56.493786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:56.493824Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:56.493918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.493946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.493977Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:56.494006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.494034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:37:56.494122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:56.494152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:37:56.494182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:37:56.494244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:37:56.494283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:37:56.494315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:37:56.496113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.496200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:37:56.496234Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:22.322897Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:22.323075Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:22.331628Z node 37 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [37:125:2151] sender: [37:240:2058] recipient: [37:15:2062] 2025-03-28T09:38:22.341342Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:22.341634Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:22.341870Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:22.342155Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:22.342240Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:22.344680Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:22.344812Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:22.345048Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:22.345131Z node 37 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:22.345201Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:22.345266Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:22.346905Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:22.346976Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:22.347042Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:22.348251Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:22.348296Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:22.348371Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:22.348455Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:22.348657Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:22.349771Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:22.350046Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:22.350915Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:22.351088Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 133 RawX2: 158913792108 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:22.351168Z node 37 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:22.351508Z node 37 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:22.351592Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:22.351898Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:22.352011Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:22.353513Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:22.353583Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:22.353814Z node 37 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:22.353885Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [37:207:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:38:22.354283Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:22.354351Z node 37 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:22.354557Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:22.354608Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:22.354679Z node 37 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:22.354764Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:22.354832Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:22.354893Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:22.354953Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:22.355005Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:22.355091Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:22.355154Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:22.355210Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:22.355785Z node 37 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:22.355928Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:22.355991Z node 37 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:38:22.356057Z node 37 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:38:22.356141Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:22.356272Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:38:22.358249Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:38:22.358890Z node 37 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:38:22.362400Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:22.362958Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:22.363132Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2025-03-28T09:38:22.363814Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2025-03-28T09:38:22.364234Z node 37 :TX_PROXY DEBUG: actor# [37:270:2261] Bootstrap 2025-03-28T09:38:22.397136Z node 37 :TX_PROXY DEBUG: actor# [37:270:2261] Become StateWork (SchemeCache [37:275:2266]) 2025-03-28T09:38:22.399146Z node 37 :TX_PROXY DEBUG: actor# [37:270:2261] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:38:22.401408Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:22.401661Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2025-03-28T09:38:22.402300Z node 37 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestCopyRangeWorksNewApi |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] >> TColumnShardTestSchema::ForgetWithLostAnswer |96.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] >> TPQTest::TestTimeRetention >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TKeyValueTest::TestBlockedEvGetRequest >> RetryPolicy::RetryWithBatching [GOOD] >> TPartitionTests::TestTxBatchInFederation [GOOD] >> CdcStreamChangeCollector::InsertSingleRow >> CdcStreamChangeCollector::UpsertToSameKey >> AsyncIndexChangeCollector::UpsertSingleRow >> AsyncIndexChangeCollector::DeleteNothing >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn >> CdcStreamChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::UpsertToSameKey >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] >> AsyncIndexChangeCollector::InsertSingleRow >> TPQTest::TestTabletRestoreEventsOrder [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestTxBatchInFederation [GOOD] Test command err: 2025-03-28T09:37:45.517978Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:45.518077Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:45.537338Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:177:2192] 2025-03-28T09:37:45.538328Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:177:2192] Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } Got cmd write: CmdWrite { Key: "i0000000001" Value: "\010\000\020\000\030\000(\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" St ... .772399Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 1, TxId 10 2025-03-28T09:38:23.772438Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:38:23.772471Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:38:24.500830Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:38:25.737176Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2025-03-28T09:38:25.737347Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse Got batch complete: 17 Wait batch completion 2025-03-28T09:38:25.737504Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 10 Wait kv request 2025-03-28T09:38:25.983683Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2025-03-28T09:38:25.983727Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 0 2025-03-28T09:38:25.983767Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:38:25.983799Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-28T09:38:25.983849Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:38:25.983871Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::DropOwner. 2025-03-28T09:38:25.983953Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-28T09:38:25.983990Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:38:25.984033Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2025-03-28T09:38:25.984073Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-28T09:38:25.984133Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Already written message. Topic: 'rt3.dc1--account--topic' Partition: 0 SourceId: 'src4'. Message seqNo: 7. Committed seqNo: (NULL). Writing seqNo: 7. EndOffset: 50. CurOffset: 50. Offset: 50 2025-03-28T09:38:25.984201Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 8 partNo 0 2025-03-28T09:38:25.984819Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 8 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 84 count 1 nextOffset 52 batches 1 2025-03-28T09:38:25.984868Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 9 partNo 0 2025-03-28T09:38:25.984899Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 9 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 136 count 2 nextOffset 53 batches 1 2025-03-28T09:38:25.984924Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 10 partNo 0 2025-03-28T09:38:25.984946Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 10 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 188 count 3 nextOffset 54 batches 1 2025-03-28T09:38:25.984970Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 11 partNo 0 2025-03-28T09:38:25.984997Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 240 count 4 nextOffset 55 batches 1 2025-03-28T09:38:25.985032Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src4' seqNo 12 partNo 0 2025-03-28T09:38:25.985055Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src4' seqNo 12 partNo 0 FormedBlobsCount 0 NewHead: Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-03-28T09:38:25.985078Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2025-03-28T09:38:25.985102Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:38:25.985142Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-03-28T09:38:25.985168Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2025-03-28T09:38:25.985229Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::CommitWriteOperations TxId: 10 2025-03-28T09:38:25.985262Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] t.WriteInfo->BodyKeys.size=0, t.WriteInfo->BlobsFromHead.size=0 2025-03-28T09:38:25.985296Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 51 PartNo 0 PackedSize 292 count 5 nextOffset 56 batches 1 2025-03-28T09:38:25.985630Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--account--topic' partition 0 compactOffset 51,5 HeadOffset 50 endOffset 50 curOffset 56 d0000000000_00000000000000000051_00000_0000000005_00000| size 189 WTime 21151 2025-03-28T09:38:25.985743Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:38:25.985780Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:38:25.985819Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:38:25.985843Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:38:25.985874Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc2 2025-03-28T09:38:25.985894Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc4 2025-03-28T09:38:25.985910Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psrc1 2025-03-28T09:38:25.985940Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000051_00000_0000000005_00000| 2025-03-28T09:38:25.985954Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:25.985968Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:25.985983Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] I0000000000 2025-03-28T09:38:25.986002Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:38:25.986023Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Got KV request Wait tx committed for tx 0 2025-03-28T09:38:26.016891Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 85 WriteNewSizeFromSupportivePartitions# 4 2025-03-28T09:38:26.016950Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:26.016995Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 50 is already written 2025-03-28T09:38:26.017021Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:26.017047Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 50 is already written 2025-03-28T09:38:26.017073Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:26.017117Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 50 is already written 2025-03-28T09:38:26.017140Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:26.017163Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 50 is already written 2025-03-28T09:38:26.017182Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:26.017204Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 50 is already written 2025-03-28T09:38:26.017221Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:26.017255Z node 5 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'src4', Topic: 'rt3.dc1--account--topic', Partition: 0, SeqNo: 12, partNo: 0, Offset: 50 is already written Wait immediate tx complete 3 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 3 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 6 Wait tx committed for tx 10 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:79:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:78:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:78:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:78:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:79:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:79:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:80:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:79:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:79:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] 2025-03-28T09:38:26.023180Z node 17 :KEYVALUE ERROR: {KV323@keyvalue_storage_read_request.cpp:254} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2025-03-28T09:38:26.026496Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2025-03-28T09:38:26.026556Z node 17 :TABLET_MAIN ERROR: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-28T09:37:49.500698Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:49.500796Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-28T09:37:49.514440Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:178:2193], now have 1 active actors on pipe 2025-03-28T09:37:49.514538Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:37:49.527492Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-03-28T09:37:49.530323Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-03-28T09:37:49.530482Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:49.532909Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } 2025-03-28T09:37:49.533019Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:49.533098Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:49.533153Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:49.533186Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:37:49.533774Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:49.534197Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-28T09:37:49.535950Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2025-03-28T09:37:49.536013Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-28T09:37:49.536059Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:49.537555Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:49.537667Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2025-03-28T09:37:49.537704Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2025-03-28T09:37:49.537737Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2025-03-28T09:37:49.537763Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2025-03-28T09:37:49.537876Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:49.537909Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:49.537964Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:49.537994Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:49.538018Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:37:49.538033Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:37:49.538062Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000ctest 2025-03-28T09:37:49.538092Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000utest 2025-03-28T09:37:49.538120Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:49.538162Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:49.538246Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:49.538287Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:49.538407Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:49.538665Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:37:49.538897Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:37:49.540087Z node 1 :PERSQUEUE DEBUG: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2025-03-28T09:37:49.540122Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:37:49.540166Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2025-03-28T09:37:49.541402Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Process pending events. Count 0 2025-03-28T09:37:49.541469Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2025-03-28T09:37:49.541493Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2025-03-28T09:37:49.541516Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit request with generation 1 2025-03-28T09:37:49.541537Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user test reinit with generation 1 done 2025-03-28T09:37:49.541632Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:49.541660Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:49.541682Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-28T09:37:49.541702Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-28T09:37:49.541731Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-03-28T09:37:49.541758Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-03-28T09:37:49.541779Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001ctest 2025-03-28T09:37:49.541794Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001utest 2025-03-28T09:37:49.541812Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:49.541849Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-28T09:37:49.541904Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:37:49.541929Z node 1 :PERSQUEUE DEBUG: [P ... TabletID 72057594037927938 is [33:153:2174] sender: [33:154:2057] recipient: [33:147:2170] Leader for TabletID 72057594037927937 is [33:107:2139] sender: [33:179:2057] recipient: [33:14:2061] 2025-03-28T09:38:25.315435Z node 33 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:25.316149Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 32 actor [33:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 1000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 32 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 32 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 32 Important: false } 2025-03-28T09:38:25.316646Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [33:185:2198] 2025-03-28T09:38:25.318677Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [33:185:2198] 2025-03-28T09:38:25.320142Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [33:186:2199] 2025-03-28T09:38:25.321542Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [33:186:2199] 2025-03-28T09:38:25.326218Z node 33 :PERSQUEUE INFO: new Cookie default|7058d8-48e338a5-da20404d-8494152b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:25.332317Z node 33 :PERSQUEUE INFO: new Cookie default|91ce9080-288791a8-fb1058fe-d8ac7f01_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:25.337671Z node 33 :PERSQUEUE INFO: new Cookie default|3a8a09cb-22c30e9a-3034f935-4e02f56e_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:25.346047Z node 33 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:25.348757Z node 33 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 33 actor [33:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 33 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 32 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 32 Important: false } 2025-03-28T09:38:25.354280Z node 33 :PERSQUEUE INFO: new Cookie default|3d9a79cd-4f345e48-fd9eb147-f368f8b9_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:25.363946Z node 33 :PERSQUEUE INFO: new Cookie default|7ca70e91-e1cf472d-c9e5ad2-85901190_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:25.375766Z node 33 :PERSQUEUE INFO: new Cookie default|2f5657bc-afba876d-dece5223-bf0de45b_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:103:2057] recipient: [34:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:103:2057] recipient: [34:101:2135] Leader for TabletID 72057594037927937 is [34:107:2139] sender: [34:108:2057] recipient: [34:101:2135] 2025-03-28T09:38:25.684702Z node 34 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:25.684757Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [34:149:2057] recipient: [34:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [34:149:2057] recipient: [34:147:2170] Leader for TabletID 72057594037927938 is [34:153:2174] sender: [34:154:2057] recipient: [34:147:2170] Leader for TabletID 72057594037927937 is [34:107:2139] sender: [34:177:2057] recipient: [34:14:2061] 2025-03-28T09:38:25.697684Z node 34 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:25.698305Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 34 actor [34:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 1000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 34 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 34 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 34 Important: false } 2025-03-28T09:38:25.698828Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [34:183:2196] 2025-03-28T09:38:25.700636Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [34:183:2196] 2025-03-28T09:38:25.701714Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [34:184:2197] 2025-03-28T09:38:25.703049Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [34:184:2197] 2025-03-28T09:38:25.707894Z node 34 :PERSQUEUE INFO: new Cookie default|4fe9b0f-1e3d0ae0-8269d983-20bd4d59_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:25.712766Z node 34 :PERSQUEUE INFO: new Cookie default|eebe744a-9cca2f9d-b236c284-97a98e64_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:25.717153Z node 34 :PERSQUEUE INFO: new Cookie default|6bcb741b-727bfba2-f988be66-f6a058f6_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:25.724117Z node 34 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:25.727599Z node 34 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 35 actor [34:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 35 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 34 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 34 Important: false } 2025-03-28T09:38:25.732526Z node 34 :PERSQUEUE INFO: new Cookie default|549045-cd1da5a1-bb4e5906-b324d0e6_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:25.741288Z node 34 :PERSQUEUE INFO: new Cookie default|88a8da26-7b30827c-74747f60-d049ff12_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:25.752606Z node 34 :PERSQUEUE INFO: new Cookie default|27ad34f3-9bf12637-169d9f6a-6ece1b9_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:26.096688Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:26.096766Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:26.125966Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:26.126041Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:26.128161Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:26.129083Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 36 actor [35:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 36 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 36 ReadRuleGenerations: 36 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 36 Important: false } Consumers { Name: "aaa" Generation: 36 Important: true } 2025-03-28T09:38:26.129666Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [35:245:2245] 2025-03-28T09:38:26.130470Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [35:245:2245] 2025-03-28T09:38:26.131413Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [35:247:2247] 2025-03-28T09:38:26.131954Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 3 [35:247:2247] 2025-03-28T09:38:26.157452Z node 35 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:26.157528Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:26.158350Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [35:324:2307] 2025-03-28T09:38:26.159625Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [35:326:2309] 2025-03-28T09:38:26.162703Z node 35 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:38:26.162765Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 4 [35:324:2307] 2025-03-28T09:38:26.163021Z node 35 :PERSQUEUE INFO: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:38:26.163049Z node 35 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 4 [35:326:2309] >> TColumnShardTestSchema::ColdTiers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:110:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:86:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:89:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:90:2057] recipient: [12:88:2117] Leader for TabletID 72057594037927937 is [12:91:2118] sender: [12:92:2057] recipient: [12:88:2117] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:91:2118] Leader for TabletID 72057594037927937 is [12:91:2118] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:89:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:92:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:93:2057] recipient: [13:91:2120] Leader for TabletID 72057594037927937 is [13:94:2121] sender: [13:95:2057] recipient: [13:91:2120] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:94:2121] Leader for TabletID 72057594037927937 is [13:94:2121] sender: [13:148:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:89:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:92:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:93:2057] recipient: [14:91:2120] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:95:2057] recipient: [14:91:2120] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:94:2121] Leader for TabletID 72057594037927937 is [14:94:2121] sender: [14:148:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:90:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:93:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:94:2057] recipient: [15:92:2120] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:96:2057] recipient: [15:92:2120] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:95:2121] Leader for TabletID 72057594037927937 is [15:95:2121] sender: [15:149:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2025-03-28T09:33:31.926632Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.926705Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.926728Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2025-03-28T09:33:31.927199Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:33:31.927245Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.927273Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.928514Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007786s 2025-03-28T09:33:31.928948Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:33:31.928970Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.928988Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.929032Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006603s 2025-03-28T09:33:31.929458Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2025-03-28T09:33:31.929485Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.929504Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:33:31.929549Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.008478s 2025-03-28T09:33:31.943092Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1743154411943062 2025-03-28T09:33:32.330279Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486791192442202822:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.336485Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:33:32.475630Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486791195437522715:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:32.475674Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001cdb/r3tmp/tmpLlRGjX/pdisk_1.dat 2025-03-28T09:33:32.692633Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:32.723856Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:33:33.230294Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:33:33.309738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.309856Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.320361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:33:33.320448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:33:33.337140Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:33:33.337341Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:33.342142Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:33:33.342949Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; TServer::EnableGrpc on GrpcPort 8068, node 1 2025-03-28T09:33:33.535415Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001cdb/r3tmp/yandexg8HaCI.tmp 2025-03-28T09:33:33.535444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001cdb/r3tmp/yandexg8HaCI.tmp 2025-03-28T09:33:33.535634Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001cdb/r3tmp/yandexg8HaCI.tmp 2025-03-28T09:33:33.535753Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:33.675522Z INFO: TTestServer started on Port 29132 GrpcPort 8068 TClient is connected to server localhost:29132 PQClient connected to localhost:8068 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:33:34.203449Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:33:36.715767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791212617392197:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.715916Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.726868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486791212617392231:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:33:36.768364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:33:36.830892Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486791212617392234:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:33:37.082857Z node 2 :TX_PROXY ERROR: Actor# [2:7486791212617392262:2131] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:33:37.119236Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486791209622073102:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:37.119233Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486791216912359565:2318], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:33:37.120933Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDM1ZjNlNDMtZjI0MzAxMDctMzdhYjE2YTItNjlkZWQzOGM=, ActorId: [1:7486791209622072997:2337], ActorState: ExecuteState, TraceId: 01jqe1k35m2fj21f7vy77xpzcs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:33:37.120534Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YTUwOGQ2NjktZjhiYWFhZDUtY2UzNTQzMzgtMmRhMWUxZDg=, ActorId: [2:7486791212617392191:2306], ActorState: ExecuteState, TraceId: 01jqe1k31ze056375cds3n82gd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:33:37.122954Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:33:37.124124Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:33:37.169028Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720661:0, at schemeshard: 72057594046644480 2025-03-28T09:33:37.311508Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486791192442202822:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:33:37.311882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:33:37.466074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720662:0, at schemeshard: 72057594046644480 2025-03-28T09:33:37.475849Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:5 ... Q: 72075186224037892, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000010_00000| 2025-03-28T09:38:24.271009Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:24.271032Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:38:24.271055Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:38:24.271095Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:38:24.271182Z node 17 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 10 size 1208 2025-03-28T09:38:24.273650Z node 17 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 10 size 1208 actorID [17:7486792437027408952:2603] 2025-03-28T09:38:24.273747Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1230 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:38:24.273795Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:24.273793Z node 17 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 size 1208 2025-03-28T09:38:24.273851Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-28T09:38:24.273880Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:24.273915Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-03-28T09:38:24.273929Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:24.273952Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-03-28T09:38:24.273968Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:24.273990Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 4, partNo: 0, Offset: 3 is stored on disk 2025-03-28T09:38:24.274004Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:24.274025Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 5, partNo: 0, Offset: 4 is stored on disk 2025-03-28T09:38:24.274040Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:24.274065Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2025-03-28T09:38:24.274080Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:24.274102Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 7, partNo: 0, Offset: 6 is stored on disk 2025-03-28T09:38:24.274117Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:24.274142Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 7 is stored on disk 2025-03-28T09:38:24.274155Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:24.274175Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 9, partNo: 0, Offset: 8 is stored on disk 2025-03-28T09:38:24.274191Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:24.274215Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Answering for message sourceid: '\0test-message-group-id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 9 is stored on disk 2025-03-28T09:38:24.274239Z node 17 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2025-03-28T09:38:24.274325Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2025-03-28T09:38:24.274379Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2025-03-28T09:38:24.274411Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Topic 'rt3.dc1--test-topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 2025-03-28T09:38:24.274515Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--test-topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 10 max time lag 0ms effective offset 0 2025-03-28T09:38:24.274551Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 10 2025-03-28T09:38:24.274714Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-28T09:38:24.274734Z node 17 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:38:24.274786Z node 17 :PERSQUEUE DEBUG: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1743154704268 queuesize 0 startOffset 0 2025-03-28T09:38:24.275162Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 1 } 2025-03-28T09:38:24.275220Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: acknoledged message 1 2025-03-28T09:38:24.275268Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: acknoledged message 2 2025-03-28T09:38:24.275301Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: acknoledged message 3 2025-03-28T09:38:24.275338Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: acknoledged message 4 2025-03-28T09:38:24.275363Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: acknoledged message 5 2025-03-28T09:38:24.275393Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: acknoledged message 6 2025-03-28T09:38:24.275415Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: acknoledged message 7 2025-03-28T09:38:24.275438Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: acknoledged message 8 2025-03-28T09:38:24.275478Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: acknoledged message 9 2025-03-28T09:38:24.275502Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: acknoledged message 10 2025-03-28T09:38:24.275792Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: close. Timeout = 0 ms 2025-03-28T09:38:24.275857Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session will now close 2025-03-28T09:38:24.275906Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: aborting 2025-03-28T09:38:24.276770Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:38:24.276775Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session is aborting and will not restart 2025-03-28T09:38:24.276843Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0] Write session: destroy 2025-03-28T09:38:24.277045Z node 17 :PQ_WRITE_PROXY DEBUG: session v1 cookie: 7 sessionId: test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0 grpc read done: success: 0 data: 2025-03-28T09:38:24.277077Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0 grpc read failed 2025-03-28T09:38:24.277494Z node 17 :PQ_WRITE_PROXY INFO: session v1 closed cookie: 7 sessionId: test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0 2025-03-28T09:38:24.277530Z node 17 :PQ_WRITE_PROXY INFO: session v1 cookie: 7 sessionId: test-message-group-id|d94cef0c-ab61ecfd-4d104224-22c5a4c2_0 is DEAD 2025-03-28T09:38:24.278014Z node 17 :PQ_WRITE_PROXY DEBUG: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2025-03-28T09:38:24.278211Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [17:7486792445617343850:2636] destroyed 2025-03-28T09:38:24.278223Z node 17 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 0, State: StateIdle] TPartition::DropOwner. >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi |96.5%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TSubscriberTest::SyncPartial |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TSubscriberTest::SyncPartial [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] Test command err: 2025-03-28T09:38:28.522159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::TEvStateStorage::TEvResolveReplicasList 2025-03-28T09:38:28.523894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2025-03-28T09:38:28.524007Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2025-03-28T09:38:28.524073Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2025-03-28T09:38:28.524134Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-28T09:38:28.524216Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-28T09:38:28.524256Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: [main][1:34:2065][path] Set up state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:38:28.524311Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-03-28T09:38:28.524347Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:38:28.524546Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 1 2025-03-28T09:38:28.524647Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:38:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:35:2065], cookie# 1 2025-03-28T09:38:28.524696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 1 2025-03-28T09:38:28.524738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 1 2025-03-28T09:38:28.524771Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:6:2053], cookie# 1 2025-03-28T09:38:28.524803Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 1 2025-03-28T09:38:28.524846Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 1 2025-03-28T09:38:28.524873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 0, faulires# 1 2025-03-28T09:38:28.524900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:35:2065] 2025-03-28T09:38:28.524934Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:38:28.524966Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:36:2065], cookie# 1 2025-03-28T09:38:28.524982Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 1, size# 3, half# 1, successes# 1, faulires# 1 2025-03-28T09:38:28.524997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 1 2025-03-28T09:38:28.525019Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is done: cookie# 1, size# 3, half# 1, successes# 2, faulires# 1, partial# 0 2025-03-28T09:38:28.525080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 2 2025-03-28T09:38:28.525130Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 2 2025-03-28T09:38:28.525144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 2, size# 3, half# 1, successes# 0, faulires# 1 2025-03-28T09:38:28.525170Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:39:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:36:2065], cookie# 2 2025-03-28T09:38:28.525206Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 2 2025-03-28T09:38:28.525242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:9:2056], cookie# 2 2025-03-28T09:38:28.525287Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 2 2025-03-28T09:38:28.525326Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 2, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-28T09:38:28.525371Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:36:2065] 2025-03-28T09:38:28.525394Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2025-03-28T09:38:28.525413Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 }: sender# [1:37:2065], cookie# 2 2025-03-28T09:38:28.525440Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 2 2025-03-28T09:38:28.525502Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:33:2064], cookie# 3 2025-03-28T09:38:28.525567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:35:2065], cookie# 3 2025-03-28T09:38:28.525582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Sync is in progress: cookie# 3, size# 3, half# 1, successes# 0, faulires# 1 2025-03-28T09:38:28.525598Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:36:2065], cookie# 3 2025-03-28T09:38:28.525612Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: [main][1:34:2065][path] Sync is done: cookie# 3, size# 3, half# 1, successes# 0, faulires# 2, partial# 1 2025-03-28T09:38:28.525630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [replica][1:40:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2065], cookie# 3 2025-03-28T09:38:28.525689Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 }: sender# [1:37:2065], cookie# 3 2025-03-28T09:38:28.525702Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Unexpected sync response: sender# [1:37:2065], cookie# 3 2025-03-28T09:38:28.525726Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: [main][1:34:2065][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2065] 2025-03-28T09:38:28.525751Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: [main][1:34:2065][path] Ignore empty state: owner# [1:33:2064], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> TColumnShardTestSchema::CreateTable >> TColumnShardTestSchema::RebootForgetAfterFail |96.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::PrecomputeRange >> TPQTest::TestReadSubscription [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> CdcStreamChangeCollector::UpsertToSameKeyWithImages >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadSubscription [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-28T09:37:48.277698Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:48.277793Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-28T09:37:48.295214Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:48.311327Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2025-03-28T09:37:48.312326Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-28T09:37:48.313871Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-28T09:37:48.318005Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:37:48.319085Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:37:48.321167Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [1:187:2200] 2025-03-28T09:37:48.322206Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:187:2200] 2025-03-28T09:37:48.330246Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:207:2213], now have 1 active actors on pipe 2025-03-28T09:37:48.330340Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-03-28T09:37:48.330380Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-03-28T09:37:48.330563Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-03-28T09:37:48.330595Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-03-28T09:37:48.330656Z node 1 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2025-03-28T09:37:48.330748Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-03-28T09:37:48.330778Z node 1 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2025-03-28T09:37:48.331005Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:209:2215], now have 1 active actors on pipe 2025-03-28T09:37:48.331065Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-03-28T09:37:48.331092Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-03-28T09:37:48.331147Z node 1 :PERSQUEUE INFO: new Cookie default|b2d17452-2358f019-3315b937-94f53e0f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:48.331233Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:37:48.331308Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:37:48.331531Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [1:211:2217], now have 1 active actors on pipe 2025-03-28T09:37:48.331591Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2025-03-28T09:37:48.331618Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2025-03-28T09:37:48.331648Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2025-03-28T09:37:48.331682Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2025-03-28T09:37:48.331799Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2025-03-28T09:37:48.332487Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2025-03-28T09:37:48.332555Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2025-03-28T09:37:48.332595Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2025-03-28T09:37:48.333059Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'rt3.dc1--asdfgs--topic' partition 0 compactOffset 0,2 HeadOffset 0 endOffset 0 curOffset 2 d0000000000_00000000000000000000_00000_0000000002_00000| size 94 WTime 331 2025-03-28T09:37:48.333172Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:37:48.333202Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:37:48.333226Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:37:48.333252Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:37:48.333275Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid 2025-03-28T09:37:48.333298Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000000_00000_0000000002_00000| 2025-03-28T09:37:48.333329Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:37:48.333360Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:37:48.333383Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:37:48.333452Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:37:48.333539Z node 1 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 0 partNo 0 count 2 size 94 2025-03-28T09:37:48.335513Z node 1 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 0 count 2 size 94 actorID [1:134:2160] 2025-03-28T09:37:48.335613Z node 1 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 2 parts 0 size 94 2025-03-28T09:37:48.335683Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 18 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:37:48.335727Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:48.335783Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 1, partNo: 0, Offset: 0 is stored on disk 2025-03-28T09:37:48.335812Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:37:48.335832Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid', Topic: 'rt3.dc1--asdfgs--topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2025-03-28T09:37:48.335930Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2025-03-28T09:37:48.335966Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-28T09:37:48.335993Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user another-user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-28T09:37:48.336014Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 1 startOffset 0 ReadingTimestamp 1 rrg 1 2025-03-28T09:37:48.336123Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:37:48.336192Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'rt3.dc1--asdfgs--topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 2 max time lag 0ms effective offset 0 2025-03-28T09:37:48.336237Z node 1 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-03-28T09:37:48.336463Z node 1 :PERSQUEUE DEBUG: [PQ: 72 ... ds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 55 ReadRuleGenerations: 55 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 Important: false } Consumers { Name: "user1" Generation: 55 Important: true } 2025-03-28T09:38:25.857035Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [52:185:2198] 2025-03-28T09:38:25.859019Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [52:185:2198] 2025-03-28T09:38:25.861019Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [52:186:2199] 2025-03-28T09:38:25.862316Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [52:186:2199] 2025-03-28T09:38:25.863625Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [52:187:2200] 2025-03-28T09:38:25.864777Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [52:187:2200] 2025-03-28T09:38:25.866208Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [52:188:2201] 2025-03-28T09:38:25.867376Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [52:188:2201] 2025-03-28T09:38:25.868895Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [52:189:2202] 2025-03-28T09:38:25.869975Z node 52 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [52:189:2202] 2025-03-28T09:38:25.877392Z node 52 :PERSQUEUE INFO: new Cookie default|105627ec-99ca9c61-da72eab9-e49b4d43_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:26.832105Z node 52 :PERSQUEUE INFO: new Cookie default|c8964687-52f1ea12-e66817bf-455281ff_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:26.842278Z node 52 :PERSQUEUE INFO: new Cookie default|48370b1b-83d10bca-8b83c7ef-6a7fd524_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:103:2057] recipient: [53:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:103:2057] recipient: [53:101:2135] Leader for TabletID 72057594037927937 is [53:107:2139] sender: [53:108:2057] recipient: [53:101:2135] 2025-03-28T09:38:27.269046Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:27.269117Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [53:149:2057] recipient: [53:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [53:149:2057] recipient: [53:147:2170] Leader for TabletID 72057594037927938 is [53:153:2174] sender: [53:154:2057] recipient: [53:147:2170] Leader for TabletID 72057594037927937 is [53:107:2139] sender: [53:177:2057] recipient: [53:14:2061] 2025-03-28T09:38:27.284004Z node 53 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:27.285384Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 56 actor [53:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 56 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 56 ReadRuleGenerations: 56 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 56 Important: false } Consumers { Name: "user1" Generation: 56 Important: true } 2025-03-28T09:38:27.286051Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [53:183:2196] 2025-03-28T09:38:27.287995Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [53:183:2196] 2025-03-28T09:38:27.289997Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [53:184:2197] 2025-03-28T09:38:27.291270Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [53:184:2197] 2025-03-28T09:38:27.292878Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [53:185:2198] 2025-03-28T09:38:27.294044Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [53:185:2198] 2025-03-28T09:38:27.295611Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [53:186:2199] 2025-03-28T09:38:27.296746Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [53:186:2199] 2025-03-28T09:38:27.298317Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [53:187:2200] 2025-03-28T09:38:27.299441Z node 53 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [53:187:2200] 2025-03-28T09:38:27.306994Z node 53 :PERSQUEUE INFO: new Cookie default|f0a72411-c534b4f4-56516467-5363f3a8_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:28.263793Z node 53 :PERSQUEUE INFO: new Cookie default|bd4064bd-5e225ee0-188eadc6-a8e13069_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:28.271675Z node 53 :PERSQUEUE INFO: new Cookie default|6ab93975-dc424848-948f2851-73a260a9_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:103:2057] recipient: [54:101:2135] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:108:2057] recipient: [54:101:2135] 2025-03-28T09:38:28.695151Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:28.695225Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [54:149:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927938 is [54:153:2174] sender: [54:154:2057] recipient: [54:147:2170] Leader for TabletID 72057594037927937 is [54:107:2139] sender: [54:179:2057] recipient: [54:14:2061] 2025-03-28T09:38:28.714401Z node 54 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:28.715776Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 57 actor [54:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 57 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } ReadRuleGenerations: 57 ReadRuleGenerations: 57 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 57 Important: false } Consumers { Name: "user1" Generation: 57 Important: true } 2025-03-28T09:38:28.716519Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [54:185:2198] 2025-03-28T09:38:28.718294Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [54:185:2198] 2025-03-28T09:38:28.720164Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [54:186:2199] 2025-03-28T09:38:28.721462Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [54:186:2199] 2025-03-28T09:38:28.723026Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] bootstrapping 2 [54:187:2200] 2025-03-28T09:38:28.724273Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 2, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [54:187:2200] 2025-03-28T09:38:28.725745Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] bootstrapping 3 [54:188:2201] 2025-03-28T09:38:28.727039Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 3, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [54:188:2201] 2025-03-28T09:38:28.728527Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] bootstrapping 4 [54:189:2202] 2025-03-28T09:38:28.729714Z node 54 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 4, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [54:189:2202] 2025-03-28T09:38:28.737718Z node 54 :PERSQUEUE INFO: new Cookie default|12b4401-e6023d65-557adc38-f4c427cc_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:29.715888Z node 54 :PERSQUEUE INFO: new Cookie default|66696ef0-d4fcbfbf-b85a0112-7cb2302b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:38:29.724305Z node 54 :PERSQUEUE INFO: new Cookie default|61f63e56-17471d3c-e209145-a3fe2516_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> TColumnShardTestSchema::CreateTable [GOOD] >> TFetchRequestTests::CheckAccess [GOOD] >> PQCountersSimple::PartitionWriteQuota >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> TVectorIndexTests::CreateTable >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable [GOOD] Test command err: 2025-03-28T09:38:29.565679Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:38:29.636118Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:38:29.659665Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:138:2170];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:38:29.659970Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:38:29.667554Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:38:29.667789Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:38:29.667998Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:38:29.668139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:38:29.668240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:38:29.668336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:38:29.668449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:38:29.668553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:38:29.668669Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:38:29.668796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:38:29.668902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:38:29.669012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:138:2170];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:38:29.696794Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:38:29.697002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:38:29.697044Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:38:29.697195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:29.697311Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:38:29.697365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:38:29.697400Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:38:29.697471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:38:29.697527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:38:29.697567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:38:29.697587Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:38:29.697706Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:29.697752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:38:29.697791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:38:29.697812Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:38:29.697869Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:38:29.697902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:38:29.697925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:38:29.697942Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:38:29.698000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:38:29.698025Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:38:29.698040Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:38:29.698072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:38:29.698100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:38:29.698131Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:38:29.698426Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=35; 2025-03-28T09:38:29.698481Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=21; 2025-03-28T09:38:29.698533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=26; 2025-03-28T09:38:29.698600Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=33; 2025-03-28T09:38:29.698744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:38:29.698784Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:38:29.698806Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:38:29.698984Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:38:29.699011Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:38:29.699030Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:38:29.699125Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:38:29.699165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:38:29.699184Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:38:29.699344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:38:29.699373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:38:29.699400Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:38:29.699476Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:38:29.699503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:38:29.699533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... ame: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-03-28T09:38:30.715348Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=118;this=88923004873920;method=TTxController::StartProposeOnExecute;tx_info=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;fline=schema.h:36;event=sync_schema; 2025-03-28T09:38:30.727422Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004873920;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881219264;fline=columnshard__propose_transaction.cpp:103;event=actual tx operator; 2025-03-28T09:38:30.727511Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004873920;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881219264;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-03-28T09:38:30.727554Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;request_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;this=88923004873920;op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_op_tx=118:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=019:0;;int_this=89197881219264;method=TTxController::FinishProposeOnComplete;tx_id=118;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=118; 2025-03-28T09:38:30.727864Z node 1 :TX_COLUMNSHARD DEBUG: TTxNotifyTxCompletion.Execute at tablet 9437184 2025-03-28T09:38:30.728000Z node 1 :TX_COLUMNSHARD DEBUG: PlanStep 1018 at tablet 9437184, mediator 0 2025-03-28T09:38:30.728042Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] execute at tablet 9437184 2025-03-28T09:38:30.728307Z node 1 :TX_COLUMNSHARD DEBUG: EnsureTable for pathId: 19 ttl settings: { Version: 1 } at tablet 9437184 2025-03-28T09:38:30.728365Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=tables_manager.cpp:245;method=RegisterTable;path_id=19; 2025-03-28T09:38:30.728408Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=column_engine.h:144;event=RegisterTable;path_id=19; 2025-03-28T09:38:30.728794Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=column_engine_logs.cpp:488;event=OnTieringModified;path_id=19; 2025-03-28T09:38:30.728915Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=118;fline=tx_controller.cpp:211;event=finished_tx;tx_id=118; 2025-03-28T09:38:30.740643Z node 1 :TX_COLUMNSHARD DEBUG: TxPlanStep[34] complete at tablet 9437184 2025-03-28T09:38:30.740750Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 20 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-03-28T09:38:30.741739Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=119;this=88923004877056;method=TTxController::StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=tx_controller.cpp:345;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-03-28T09:38:30.753717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;this=88923004877056;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-03-28T09:38:30.753792Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;this=88923004877056;op_tx=119:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=119; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-03-28T09:38:30.755156Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=88923004878624;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=tx_controller.cpp:345;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-03-28T09:38:30.767160Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;this=88923004878624;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-03-28T09:38:30.767220Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;this=88923004878624;op_tx=120:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2025-03-28T09:38:30.769017Z node 1 :TX_COLUMNSHARD_TX ERROR: tablet_id=9437184;self_id=[1:138:2170];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=88923004880192;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=tx_controller.cpp:345;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2025-03-28T09:38:30.781016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;this=88923004880192;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:168:2193]; 2025-03-28T09:38:30.781079Z node 1 :TX_COLUMNSHARD ERROR: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;this=88923004880192;op_tx=121:TX_KIND_SCHEMA;min=0;max=18446744073709551615;plan=0;src=[1:168:2193];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; >> TPQTest::TestManyConsumers [GOOD] >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestRenameToLongKey >> TVectorIndexTests::CreateTable [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTablet [GOOD] >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:31.687277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:31.687404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:31.687454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:31.687487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:31.687525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:31.687552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:31.687634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:31.687727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:31.688024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:31.757060Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:31.757097Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:31.767091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:31.767357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:31.767517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:31.772475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:31.772675Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:31.773280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:31.773483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:31.775168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:31.776390Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:31.776442Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:31.776592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:31.776638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:31.776673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:31.776770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:31.782580Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:31.921793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:31.922012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:31.922211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:31.922447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:31.922503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:31.924805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:31.924957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:31.925131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:31.925182Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:31.925214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:31.925242Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:31.927056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:31.927108Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:31.927153Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:31.928655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:31.928693Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:31.928739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:31.928780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:31.932293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:31.933826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:31.933977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:31.935027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:31.935144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:31.935203Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:31.935456Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:31.935501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:31.935644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:31.935735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:31.937471Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:31.937532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:31.937696Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:31.937737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:31.938130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:31.938178Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:31.938264Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:31.938292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:31.938327Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:31.938356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:31.938387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:31.938433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:31.938462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:31.938524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:31.938581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:31.938614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:31.938643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:31.940144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:31.940221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:31.940246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 9:38:32.283692Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:38:32.283718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:38:32.284400Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.284501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.284520Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:38:32.284712Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.284763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.284778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:38:32.284794Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-28T09:38:32.284847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:32.285062Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.285107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.285141Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:38:32.285244Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.285290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.285307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:38:32.286160Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.286252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.286276Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:38:32.286300Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:38:32.286323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:38:32.287474Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.287525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:38:32.287542Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:38:32.287558Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-03-28T09:38:32.287574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-28T09:38:32.287629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/4, is published: true 2025-03-28T09:38:32.289355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-03-28T09:38:32.289403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:32.289699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:38:32.289818Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-03-28T09:38:32.289857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-03-28T09:38:32.289908Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/4 2025-03-28T09:38:32.289946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/4 2025-03-28T09:38:32.289980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/4, is published: true 2025-03-28T09:38:32.290586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:38:32.290849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:38:32.290882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:38:32.290966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-28T09:38:32.290987Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:32.291111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:38:32.291180Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-03-28T09:38:32.291224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-03-28T09:38:32.291247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 3/4 2025-03-28T09:38:32.291270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2025-03-28T09:38:32.291288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2025-03-28T09:38:32.291441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:32.291466Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:32.291575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:38:32.291620Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-03-28T09:38:32.291634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-28T09:38:32.291651Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 4/4 2025-03-28T09:38:32.291676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-28T09:38:32.291702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2025-03-28T09:38:32.291743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:415:2372] message: TxId: 102 2025-03-28T09:38:32.291783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2025-03-28T09:38:32.291826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:38:32.291849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:38:32.291912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:38:32.291943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-28T09:38:32.291955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-28T09:38:32.291980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:38:32.291992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-03-28T09:38:32.292001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-03-28T09:38:32.292034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:38:32.292057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-03-28T09:38:32.292072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-03-28T09:38:32.292097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-28T09:38:32.292347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:38:32.292377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:38:32.292407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:38:32.292496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:38:32.293416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:38:32.295025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:38:32.295060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:527:2477] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-28T09:37:51.492972Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.493056Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-28T09:37:51.506946Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.521079Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:51.521834Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-28T09:37:51.523528Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-28T09:37:51.525001Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:37:51.526084Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:37:51.530863Z node 1 :PERSQUEUE INFO: new Cookie default|81605e87-24052ae4-fc04810c-9aeb454c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-28T09:37:51.916980Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.917055Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-03-28T09:37:51.933298Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.933989Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:37:51.934469Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-03-28T09:37:51.936291Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-03-28T09:37:51.937696Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:184:2197] 2025-03-28T09:37:51.939068Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:184:2197] 2025-03-28T09:37:51.943576Z node 2 :PERSQUEUE INFO: new Cookie default|3e06fef4-8f320408-ea849c79-c09f5b67_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-28T09:37:52.318062Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.318133Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-03-28T09:37:52.332765Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.333512Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-28T09:37:52.334067Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-03-28T09:37:52.335807Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-03-28T09:37:52.337098Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-03-28T09:37:52.338474Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-03-28T09:37:52.343118Z node 3 :PERSQUEUE INFO: new Cookie default|c9525f86-653d9f53-8dc15f20-2ae2b7b1_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-03-28T09:37:52.722121Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.722183Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-03-28T09:37:52.735577Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.736240Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 10 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } 2025-03-28T09:37:52.736708Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-03-28T09:37:52.738432Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-03-28T09:37:52.739703Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:186:2199] 2025-03-28T09:37:52.741133Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:186:2199] 2025-03-28T09:37:52.744688Z node 4 :PERSQUEUE INFO: new Cookie default|556527b6-42eb8747-b632ae38-1b04b946_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:52.745057Z node 4 :PERSQUEUE INFO: new Cookie default|d7a8844a-f3598d12-e4f84ffb-959d199b_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:103:2057] recipient: [5:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:103:2057] recipient: [5:101:2135] Leader for TabletID 72057594037927937 is [5:107:2139] sender: [5:108:2057] recipient: [5:101:2135] 2025-03-28T09:37:53.152248Z node 5 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.152322Z node 5 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [5:149:2057] recipient: [5:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [5:149:2057] recipient: [5:147:2170] Leader for TabletID 72057594037927938 is [5:153:2174] sender: [5:154:2057] recipient: [5:147:2170] Leader for TabletID 72057594037927 ... 6 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.704062Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.725189Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:999:2995], now have 1 active actors on pipe 2025-03-28T09:38:30.726873Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.737768Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.757600Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1002:2998], now have 1 active actors on pipe 2025-03-28T09:38:30.759331Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.770096Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.807176Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1005:3001], now have 1 active actors on pipe 2025-03-28T09:38:30.808874Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.820572Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.841937Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1008:3004], now have 1 active actors on pipe 2025-03-28T09:38:30.843661Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.854807Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.876680Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1011:3007], now have 1 active actors on pipe 2025-03-28T09:38:30.878300Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.888847Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.909441Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1014:3010], now have 1 active actors on pipe 2025-03-28T09:38:30.911112Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.921532Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.957014Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1017:3013], now have 1 active actors on pipe 2025-03-28T09:38:30.958674Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.971758Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:30.995147Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1020:3016], now have 1 active actors on pipe 2025-03-28T09:38:30.996827Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.007118Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.027926Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1023:3019], now have 1 active actors on pipe 2025-03-28T09:38:31.029785Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.042741Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.063670Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1026:3022], now have 1 active actors on pipe 2025-03-28T09:38:31.065554Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.078750Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.100935Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1029:3025], now have 1 active actors on pipe 2025-03-28T09:38:31.102565Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.115406Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.152639Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1032:3028], now have 1 active actors on pipe 2025-03-28T09:38:31.154304Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.164793Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.185934Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [40:1035:3031], now have 1 active actors on pipe 2025-03-28T09:38:31.187638Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.197211Z node 40 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2025-03-28T09:38:31.216240Z node 40 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [40:1038:3034] connected; active server actors: 1 >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::PartitionFirstClass >> TKeyValueTest::TestCleanUpDataOnEmptyTabletResetGeneration [GOOD] >> TKeyValueTest::TestCleanUpDataWithMockDisk >> TColumnShardTestSchema::RebootExportAfterFail >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId [GOOD] Test command err: 2025-03-28T09:34:54.225358Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:34:54.225556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:34:54.225605Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000e74/r3tmp/tmp7LFsKv/pdisk_1.dat 2025-03-28T09:34:54.602777Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9717, node 1 2025-03-28T09:34:55.103689Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:34:55.103737Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:34:55.103786Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:34:55.104272Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:34:55.111541Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:34:55.200486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:55.200761Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:55.215487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:23489 2025-03-28T09:34:55.758345Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:34:58.768270Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:34:58.803782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:58.803908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:58.848959Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:34:58.851320Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.100279Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.100962Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.101572Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.101744Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.102042Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.102137Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.102224Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.102316Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.102408Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:34:59.268952Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:34:59.269066Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:34:59.282308Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:34:59.425395Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:34:59.469570Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:34:59.469632Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:34:59.499874Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:34:59.500056Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:34:59.500273Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:34:59.500336Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:34:59.500408Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:34:59.500478Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:34:59.500540Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:34:59.500602Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:34:59.501030Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:34:59.531903Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.532020Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:34:59.539375Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:34:59.545798Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:34:59.546104Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:34:59.552711Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:34:59.571452Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:34:59.571516Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:34:59.571596Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:34:59.586395Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:34:59.594183Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:34:59.594325Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:34:59.763438Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:34:59.914027Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:34:59.992533Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:00.886837Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.886985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:00.904345Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:01.015249Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:01.015506Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:01.015800Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:01.015918Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:01.016021Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:01.016112Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:01.016234Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:01.016341Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:01.016442Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:01.016543Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:01.016625Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:01.016718Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:01.040445Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:01.040529Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... MtNThiMjMzMzQtZGM0MTFmNGY=, TxId: 2025-03-28T09:38:20.854713Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTYyYjM0NTctNTQ4M2MxYjMtNThiMjMzMzQtZGM0MTFmNGY=, TxId: 2025-03-28T09:38:20.854937Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:50.000000Z, event interval end# 2025-03-28T09:38:18.000000Z 2025-03-28T09:38:20.855392Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:38:20.879207Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:38:20.879258Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId7, ActorId=[1:2802:3221] 2025-03-28T09:38:21.454981Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-03-28T09:38:21.455040Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:38:22.123835Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:38:22.124000Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:38:22.145299Z node 2 :STATISTICS DEBUG: Event round 9 is different from the current 0 2025-03-28T09:38:22.145356Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-03-28T09:38:22.145426Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:38:22.145453Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-03-28T09:38:22.145476Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-03-28T09:38:22.145491Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:38:23.496817Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:38:24.765851Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:38:24.765912Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId8 2025-03-28T09:38:24.765939Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-03-28T09:38:24.765956Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:38:25.988960Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:38:26.010345Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:38:26.010448Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:38:26.010487Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:38:26.010943Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:38:26.023621Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:38:26.023894Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:38:26.023936Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:38:26.024182Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:38:26.036724Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:38:26.036858Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 11, current Round: 0 2025-03-28T09:38:26.037246Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9370:6415], server id = [2:9371:6416], tablet id = 72075186224037899, status = OK 2025-03-28T09:38:26.037315Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9370:6415], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:38:26.038151Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:38:26.038245Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:38:26.038404Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:38:26.038516Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:38:26.038808Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:38:26.040293Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9370:6415], server id = [2:9371:6416], tablet id = 72075186224037899 2025-03-28T09:38:26.040318Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:38:26.040852Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:38:26.057974Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTY3ZTkxMDktOWViOTI5ZTQtMzRiNzFjNC1lYzYxZGQ=, TxId: 2025-03-28T09:38:26.058024Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTY3ZTkxMDktOWViOTI5ZTQtMzRiNzFjNC1lYzYxZGQ=, TxId: 2025-03-28T09:38:26.058401Z node 2 :SYSTEM_VIEWS WARN: [72075186224037891] TEvIntervalQuerySummary, time mismath: node id# 2, interval end# 1970-01-01T00:02:54.000000Z, event interval end# 2025-03-28T09:38:24.000000Z 2025-03-28T09:38:26.058462Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:38:26.093057Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:38:26.093113Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId8, ActorId=[1:2802:3221] 2025-03-28T09:38:26.630065Z node 2 :STATISTICS DEBUG: Event round 11 is different from the current 0 2025-03-28T09:38:26.630121Z node 2 :STATISTICS DEBUG: Skip TEvDispatchKeepAlive 2025-03-28T09:38:27.297167Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:38:27.297234Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-03-28T09:38:27.297262Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:38:27.297380Z node 2 :STATISTICS DEBUG: Event round 10 is different from the current 0 2025-03-28T09:38:27.297430Z node 2 :STATISTICS DEBUG: Skip TEvStatisticsRequestTimeout 2025-03-28T09:38:28.566744Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:38:28.566911Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:38:28.588335Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:38:29.793446Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:38:29.793519Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId9 2025-03-28T09:38:29.793571Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:38:31.091616Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:38:31.091749Z node 2 :STATISTICS DEBUG: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 4] is column table. 2025-03-28T09:38:31.091784Z node 2 :STATISTICS DEBUG: [72075186224037894] Start force traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:38:31.092212Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Execute 2025-03-28T09:38:31.105098Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxNavigate::Complete 2025-03-28T09:38:31.105378Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Execute 2025-03-28T09:38:31.105424Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResolve::Complete 2025-03-28T09:38:31.105693Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2025-03-28T09:38:31.118522Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxResponseTabletDistribution::Complete 2025-03-28T09:38:31.118672Z node 2 :STATISTICS DEBUG: Received TEvAggregateStatistics from node: 2, Round: 12, current Round: 0 2025-03-28T09:38:31.119108Z node 2 :STATISTICS DEBUG: EvClientConnected, node id = 2, client id = [2:9534:6504], server id = [2:9535:6505], tablet id = 72075186224037899, status = OK 2025-03-28T09:38:31.119179Z node 2 :STATISTICS DEBUG: TEvStatisticsRequest send, client id = [2:9534:6504], path = { OwnerId: 72075186224037897 LocalId: 4 } 2025-03-28T09:38:31.120143Z node 2 :STATISTICS DEBUG: Received TEvStatisticsResponse TabletId: 72075186224037899 2025-03-28T09:38:31.120206Z node 2 :STATISTICS DEBUG: Send aggregate statistics response to node: 2 2025-03-28T09:38:31.120329Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2025-03-28T09:38:31.120462Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2025-03-28T09:38:31.120718Z node 2 :STATISTICS DEBUG: [TQueryBase] Bootstrap. Database: /Root/Database 2025-03-28T09:38:31.122287Z node 2 :STATISTICS DEBUG: EvClientDestroyed, node id = 2, client id = [2:9534:6504], server id = [2:9535:6505], tablet id = 72075186224037899 2025-03-28T09:38:31.122313Z node 2 :STATISTICS DEBUG: Skip EvClientDestroyed 2025-03-28T09:38:31.122911Z node 2 :STATISTICS DEBUG: [TQueryBase] RunDataQuery: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tags AS List; DECLARE $data AS List; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) VALUES ($owner_id, $local_path_id, $stat_type, $column_tags[0], $data[0]), ($owner_id, $local_path_id, $stat_type, $column_tags[1], $data[1]); 2025-03-28T09:38:31.151046Z node 2 :STATISTICS DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzhkNzE4Y2QtMmI0ZDlmNTQtZDJkNjJlZWYtZDNjNGEzZGQ=, TxId: 2025-03-28T09:38:31.151097Z node 2 :STATISTICS DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzhkNzE4Y2QtMmI0ZDlmNTQtZDJkNjJlZWYtZDNjNGEzZGQ=, TxId: 2025-03-28T09:38:31.151477Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Execute 2025-03-28T09:38:31.175882Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 4] 2025-03-28T09:38:31.175947Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId9, ActorId=[1:2802:3221] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] >> PQCountersSimple::PartitionFirstClass [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::MultiUsedStage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:79:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:78:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:78:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:78:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:79:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:79:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:79:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:80:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:80:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:140:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:87:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:88:2057] recipient: [16:86:2116] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:90:2057] recipient: [16:86:2116] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:89:2117] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:143:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:87:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:88:2057] recipient: [17:86:2116] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:90:2057] recipient: [17:86:2116] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:89:2117] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:143:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:88:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:89:2057] recipient: [18:87:2116] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:91:2057] recipient: [18:87:2116] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:90:2117] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:144:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:76:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:79:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:80:2057] recipient: [10:78:2110] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:82:2057] recipient: [10:78:2110] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:81:2111] Leader for TabletID 72057594037927937 is [10:81:2111] sender: [10:135:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:76:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:78:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:80:2057] recipient: [11:79:2110] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:82:2057] recipient: [11:79:2110] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:81:2111] Leader for TabletID 72057594037927937 is [11:81:2111] sender: [11:135:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:77:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:79:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:81:2057] recipient: [12:80:2110] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:83:2057] recipient: [12:80:2110] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:82:2111] Leader for TabletID 72057594037927937 is [12:82:2111] sender: [12:136:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:80:2057] recipient: [13:36:2083] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:83:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:84:2057] recipient: [13:82:2113] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:86:2057] recipient: [13:82:2113] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:85:2114] Leader for TabletID 72057594037927937 is [13:85:2114] sender: [13:139:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:80:2057] recipient: [14:36:2083] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:83:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:84:2057] recipient: [14:82:2113] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:86:2057] recipient: [14:82:2113] !Reboot 72057594037927937 (actor [14:56:2097]) rebooted! !Reboot 72057594037927937 (actor [14:56:2097]) tablet resolver refreshed! new actor is[14:85:2114] Leader for TabletID 72057594037927937 is [14:85:2114] sender: [14:139:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:81:2057] recipient: [15:36:2083] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:84:2057] recipient: [15:83:2113] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:85:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:87:2057] recipient: [15:83:2113] !Reboot 72057594037927937 (actor [15:56:2097]) rebooted! !Reboot 72057594037927937 (actor [15:56:2097]) tablet resolver refreshed! new actor is[15:86:2114] Leader for TabletID 72057594037927937 is [15:86:2114] sender: [15:140:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:84:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:87:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:88:2057] recipient: [16:86:2116] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:90:2057] recipient: [16:86:2116] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:89:2117] Leader for TabletID 72057594037927937 is [16:89:2117] sender: [16:143:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:84:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:87:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:88:2057] recipient: [17:86:2116] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:90:2057] recipient: [17:86:2116] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:89:2117] Leader for TabletID 72057594037927937 is [17:89:2117] sender: [17:143:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:85:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:88:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:89:2057] recipient: [18:87:2116] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:91:2057] recipient: [18:87:2116] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:90:2117] Leader for TabletID 72057594037927937 is [18:90:2117] sender: [18:144:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:88:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:91:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:92:2057] recipient: [19:90:2119] Leader for TabletID 72057594037927937 is [19:93:2120] sender: [19:94:2057] recipient: [19:90:2119] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:93:2120] Leader for TabletID 72057594037927937 is [19:93:2120] sender: [19:147:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:88:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:91:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:92:2057] recipient: [20:90:2119] Leader for TabletID 72057594037927937 is [20:93:2120] sender: [20:94:2057] recipient: [20:90:2119] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:93:2120] Leader for TabletID 72057594037927937 is [20:93:2120] sender: [20:147:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:89:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:92:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:93:2057] recipient: [21:91:2119] Leader for TabletID 72057594037927937 is [21:94:2120] sender: [21:95:2057] recipient: [21:91:2119] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:76:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:78:2110] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:82:2057] recipient: [4:78:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:81:2111] Leader for TabletID 72057594037927937 is [4:81:2111] sender: [4:135:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:76:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:79:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:78:2110] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:82:2057] recipient: [5:78:2110] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:81:2111] Leader for TabletID 72057594037927937 is [5:81:2111] sender: [5:135:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:77:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:79:2110] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:81:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:83:2057] recipient: [6:79:2110] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:82:2111] Leader for TabletID 72057594037927937 is [6:82:2111] sender: [6:136:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:79:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:81:2112] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:85:2057] recipient: [7:81:2112] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:84:2113] Leader for TabletID 72057594037927937 is [7:84:2113] sender: [7:138:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:79:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:81:2112] Leader for TabletID 72057594037927937 is [8:84:2113] sender: [8:85:2057] recipient: [8:81:2112] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:84:2113] Leader for TabletID 72057594037927937 is [8:84:2113] sender: [8:138:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:80:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:82:2112] Leader for TabletID 72057594037927937 is [9:85:2113] sender: [9:86:2057] recipient: [9:82:2112] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:85:2113] Leader for TabletID 72057594037927937 is [9:85:2113] sender: [9:139:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:83:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:86:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:85:2115] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:89:2057] recipient: [10:85:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:88:2116] Leader for TabletID 72057594037927937 is [10:88:2116] sender: [10:142:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:83:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:85:2115] Leader for TabletID 72057594037927937 is [11:88:2116] sender: [11:89:2057] recipient: [11:85:2115] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:88:2116] Leader for TabletID 72057594037927937 is [11:88:2116] sender: [11:142:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:84:2057] recipient: [12:36:2083] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:87:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:88:2057] recipient: [12:86:2115] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:90:2057] recipient: [12:86:2115] !Reboot 72057594037927937 (actor [12:56:2097]) rebooted! !Reboot 72057594037927937 (actor [12:56:2097]) tablet resolver refreshed! new actor is[12:89:2116] Leader for TabletID 72057594037927937 is [12:89:2116] sender: [12:143:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:54:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:57:2057] recipient: [13:51:2095] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:74:2057] recipient: [13:14:2061] >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> TSchemeShardTest::InitRootAgain >> TSchemeShardTest::Boot >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> TSchemeShardTest::RmDirTwice >> TSchemeShardCheckProposeSize::CopyTable >> TSchemeShardTest::MkRmDir >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestChangeConfig >> TSchemeShardTest::CreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2025-03-28T09:37:43.490897Z :HappyWay INFO: Random seed for debugging is 1743154663490869 2025-03-28T09:37:43.860284Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792273635259805:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.863050Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:37:43.913085Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792271471823533:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.913268Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00198a/r3tmp/tmpLH6zYt/pdisk_1.dat 2025-03-28T09:37:44.120830Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:37:44.123073Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:37:44.324477Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:44.328613Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:44.328731Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:44.329960Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:44.330033Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:44.364869Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:44.372045Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:37:44.375723Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 12107, node 1 2025-03-28T09:37:44.462649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00198a/r3tmp/yandexWSPWcm.tmp 2025-03-28T09:37:44.462676Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00198a/r3tmp/yandexWSPWcm.tmp 2025-03-28T09:37:44.462905Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00198a/r3tmp/yandexWSPWcm.tmp 2025-03-28T09:37:44.463054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:44.627240Z INFO: TTestServer started on Port 63784 GrpcPort 12107 TClient is connected to server localhost:63784 PQClient connected to localhost:12107 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:44.921895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... waiting... 2025-03-28T09:37:46.894528Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792284356725743:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.894629Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792284356725734:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.894770Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:46.900119Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:37:46.915834Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792284356725748:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:37:46.987789Z node 2 :TX_PROXY ERROR: Actor# [2:7486792284356725778:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.261633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.265276Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792286520162768:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:47.265606Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTE4NzE3MWMtOGY4NDQ4NWEtZDA0ZmUwNS05ZDE0MTY1Ng==, ActorId: [1:7486792286520162732:2334], ActorState: ExecuteState, TraceId: 01jqe1tqecaft2m88mxfv3ky5n, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:47.268207Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:47.267190Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486792284356725793:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:47.267416Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MzllMzdiMmYtOTNlNWU4YWEtNjEwNWVkYjctODg3NTMwODQ=, ActorId: [2:7486792284356725732:2307], ActorState: ExecuteState, TraceId: 01jqe1tqcc0kpabq3cjm4djv2x, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:47.268199Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:47.369041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.495643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost:12107", true, true, 1000); 2025-03-28T09:37:47.697910Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710664. Ctx: { TraceId: 01jqe1tr183h28206znjqp0zkf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzYwZmUxYmUtNmM2NTM3MWQtODEyZjBhYjMtNzVhNWQ5MDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486792290815130516:3010] 2025-03-28T09:37:48.860483Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792273635259805:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:48.860562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:37:48.913220Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792271471823533:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:48.913327Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:37:53.594047Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480 waiting... PQ Client: create topic: rt3.dc1--test-topic with 1 partitions CallPersQueueGRPC request to localhost:12107 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--test-topic" } } 2025-03-28T09:37:53.648537Z node 1 :PERSQUEUE INFO: proxy answer CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--test-topic, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:12107 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--test-topic" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 ... NumPartitions: 5 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic1" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-03-28T09:38:19.803905Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-03-28T09:38:19.807192Z node 5 :PERSQUEUE DEBUG: sending HasDataInfoResponse Partition: 1 Offset: 1 Deadline: 1743154709806 2025-03-28T09:38:19.807579Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server connected, pipe [5:7486792426922802706:3693], now have 1 active actors on pipe 2025-03-28T09:38:24.428573Z node 5 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:38:24.428607Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:29.806589Z node 5 :PERSQUEUE DEBUG: got HasDatainfoResponse 2025-03-28T09:38:29.806979Z node 6 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: request-id-0-1 2025-03-28T09:38:29.807021Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-03-28T09:38:29.807102Z node 6 :PERSQUEUE ERROR: [PQ: 72075186224037894, Partition: 1, State: StateIdle] reading from too big offset - topic rt3.dc1--topic1 partition 1 client $without_consumer EndOffset 0 offset 1 2025-03-28T09:38:29.807124Z node 6 :PERSQUEUE DEBUG: tablet 72075186224037894 topic 'rt3.dc1--topic1' partition 1 error: trying to read from future. ReadOffset 1, 0 EndOffset 0 2025-03-28T09:38:29.807160Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] Handle TEvPQ::TEvError Cookie 1, Error trying to read from future. ReadOffset 1, 0 EndOffset 0 2025-03-28T09:38:29.807171Z node 6 :PERSQUEUE DEBUG: Answer error topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: request-id-0-1 error: trying to read from future. ReadOffset 1, 0 EndOffset 0 2025-03-28T09:38:29.807521Z node 6 :PERSQUEUE DEBUG: [PQ: 72075186224037894] server disconnected, pipe [5:7486792426922802706:3693] destroyed 2025-03-28T09:38:31.296750Z node 7 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:31.296833Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:31.312088Z node 7 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:31.312922Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [7:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:38:31.313426Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [7:207:2220] 2025-03-28T09:38:31.315891Z node 7 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [7:207:2220] 2025-03-28T09:38:31.320319Z node 7 :PERSQUEUE INFO: new Cookie default|4ad4e181-42d45b06-41eb3810-5c10eff2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-03-28T09:38:31.326478Z node 7 :PERSQUEUE INFO: new Cookie default|7bc84788-ee5b70cc-dcaf19af-f9adca49_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-03-28T09:38:31.633511Z node 7 :PERSQUEUE INFO: new Cookie default|b513fd8a-db8f723f-a4ee5c3b-507b06b3_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-03-28T09:38:31.898593Z node 7 :PERSQUEUE INFO: new Cookie default|e3481c33-3c587240-fd0d6a23-f178655e_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-03-28T09:38:32.148434Z node 7 :PERSQUEUE INFO: new Cookie default|e575ee34-9abc917a-e14ab0a6-145ae349_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] 2025-03-28T09:38:32.394251Z node 7 :PERSQUEUE INFO: new Cookie default|dc7dcc19-6b67b36e-72cfa7a5-576961b6_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured kesus quota request event from [7:223:2233] **** Total histogram: ****
Interval=0ms: 1
Interval=10000ms: 0
Interval=1000ms: 3
Interval=100ms: 0
Interval=10ms: 0
Interval=1ms: 0
Interval=20ms: 0
Interval=2500ms: 2
Interval=5000ms: 0
Interval=500ms: 0
Interval=50ms: 0
Interval=5ms: 0
Interval=999999ms: 0
**** **** **** **** 2025-03-28T09:38:33.233924Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:33.234015Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:33.252025Z node 8 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:33.252990Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [8:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:38:33.253694Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [8:206:2219] 2025-03-28T09:38:33.254663Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [8:206:2219] 2025-03-28T09:38:33.255737Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [8:207:2220] 2025-03-28T09:38:33.256378Z node 8 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'topic' partition 1 generation 2 [8:207:2220] 2025-03-28T09:38:33.261584Z node 8 :PERSQUEUE INFO: new Cookie default|bf0bc43-78000553-eb171a78-ec5319fe_0 generated for partition 0 topic 'topic' owner default 2025-03-28T09:38:33.267724Z node 8 :PERSQUEUE INFO: new Cookie default|cc1f1bd-291e1433-5dba01b4-b7dc0138_1 generated for partition 0 topic 'topic' owner default 2025-03-28T09:38:33.275486Z node 8 :PERSQUEUE INFO: new Cookie default|848a7302-38041d6d-6a94d376-4531b8e1_2 generated for partition 0 topic 'topic' owner default 2025-03-28T09:38:33.281428Z node 8 :PERSQUEUE INFO: new Cookie default|15223340-7c5886a-3da32606-6dbeb9b6_3 generated for partition 0 topic 'topic' owner default 2025-03-28T09:38:33.756961Z node 9 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:33.757070Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:33.779096Z node 9 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:33.780062Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [9:198:2213] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-28T09:38:33.780820Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [9:207:2220] 2025-03-28T09:38:33.785128Z node 9 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [9:207:2220] 2025-03-28T09:38:33.791802Z node 9 :PERSQUEUE INFO: new Cookie default|b2353851-c7389896-18353015-948a13a0_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-03-28T09:38:33.799610Z node 9 :PERSQUEUE INFO: new Cookie default|1c9c892-c60474de-aa97d8e5-1433ce9_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] Captured TEvRequest, cmd write size: 3 2025-03-28T09:38:34.118677Z node 9 :PERSQUEUE INFO: new Cookie default|f7a21c27-812baf79-6aa8b98-b8cfde3d_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-03-28T09:38:34.385264Z node 9 :PERSQUEUE INFO: new Cookie default|8960f49d-9148530-49c195ed-74acfd8_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] Captured TEvRequest, cmd write size: 1 Captured TEvRequest, cmd write size: 1 2025-03-28T09:38:34.634519Z node 9 :PERSQUEUE INFO: new Cookie default|a6eddade-d08c079c-d6eb9721-94a3bfec_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] 2025-03-28T09:38:34.879129Z node 9 :PERSQUEUE INFO: new Cookie default|780e0fe-401db12d-717dfd59-f28e34b3_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 3 Captured TEvRequest, cmd write size: 3 Captured kesus quota request event from [9:223:2233] >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::AlterTableKeyColumns |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> TPQTest::TestChangeConfig [GOOD] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] >> KqpExplain::MultiUsedStage [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> KqpExplain::Predicates >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] Test command err: 2025-03-28T09:37:49.221656Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:49.221755Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:49.240145Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:49.258097Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-03-28T09:37:49.259087Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:184:2197] 2025-03-28T09:37:49.260095Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 2 [1:184:2197] Run 1 CmdWrite 2025-03-28T09:37:49.266008Z node 1 :PERSQUEUE INFO: new Cookie default|f72b9a42-4ebb802a-df6247b-cc50f092_0 generated for partition 0 topic 'topic' owner default Captured kesus quota request event from [1:205:2214] Captured kesus quota request event from [1:206:2215] CmdRead Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:175:2190] Captured kesus quota request event from [1:205:2214] Currently have 3 quoter requests Run 2 CmdWrite 2025-03-28T09:37:50.288217Z node 1 :PERSQUEUE INFO: new Cookie default|6fcfac6d-ca4b7db7-1650746e-f90b3995_1 generated for partition 0 topic 'topic' owner default CmdRead Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:175:2190] Captured kesus quota request event from [1:205:2214] Currently have 4 quoter requests Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-28T09:37:53.009080Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.009160Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-03-28T09:37:53.026507Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.027574Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:37:53.028215Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-03-28T09:37:53.030834Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-03-28T09:37:53.032504Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:184:2197] 2025-03-28T09:37:53.034403Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:184:2197] 2025-03-28T09:37:53.040840Z node 2 :PERSQUEUE INFO: new Cookie default|2ca9bd82-c7e75f1a-abda7e96-a85e4e3f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:53.045945Z node 2 :PERSQUEUE INFO: new Cookie default|ae6db688-403e443a-3242ce84-2524259d_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:53.050989Z node 2 :PERSQUEUE INFO: new Cookie default|ef18454b-1e53af72-57a56b9c-c4479705_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-28T09:37:53.375483Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.375552Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-03-28T09:37:53.389612Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.390329Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-28T09:37:53.390831Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-03-28T09:37:53.392729Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-03-28T09:37:53.394113Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-03-28T09:37:53.395354Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvPersQueue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:202:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:204:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:206:2057] recipient: [3:205:2210] Leader for TabletID 72057594037927937 is [3:207:2211] sender: [3:208:2057] recipient: [3:205:2210] 2025-03-28T09:37:53.425774Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:53.425829Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:53.426489Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:260:2256] 2025-03-28T09:37:53.428529Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:261:2257] 2025-03-28T09:37:53.434553Z node 3 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:37:53.434619Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:260:2256] 2025-03-28T09:37:53.434945Z node 3 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:37:53.434976Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:261:2257] !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:207:2211] Leader for TabletID 72057594037927937 is [3:207:2211] sender: [3:305:2057] recipient: [3:14:2061] 2025-03-28T09:37:54.660944Z node 3 :PERSQUEUE INFO: new Cookie default|f0746c32-b21bf80-729c0bc5-5216cf11_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:54.667045Z node 3 :PERSQUEUE INFO: new Cookie default|e752307e-3c19a498-a4c56dc6-ba3dd545_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:54.678811Z node 3 :PERSQUEUE INFO: new Cookie default|8fc77719-aba1953d-1947ba0-e5eb1dd8_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-03-28T09:37:54.984606Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:54.984657Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-03-28T09:37:54.997173Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927 ... .size=0 2025-03-28T09:38:38.344538Z node 36 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:38.344578Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:38:38.344651Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitConfigStep 2025-03-28T09:38:38.344950Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2025-03-28T09:38:38.345181Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [36:278:2272] 2025-03-28T09:38:38.346006Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDiskStatusStep 2025-03-28T09:38:38.346899Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitMetaStep 2025-03-28T09:38:38.347110Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitInfoRangeStep 2025-03-28T09:38:38.347718Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataRangeStep 2025-03-28T09:38:38.348296Z node 36 :PERSQUEUE DEBUG: [topic:0:TInitDataRangeStep] Got data offset 0 count 2 size 1049870 so 0 eo 2 d0000000000_00000000000000000000_00000_0000000002_00002| 2025-03-28T09:38:38.348420Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitDataStep 2025-03-28T09:38:38.353768Z node 36 :PERSQUEUE DEBUG: [topic:0:TInitDataStep] read res partition offset 0 endOffset 2 key 0,2 valuesize 1049870 expected 1049870 2025-03-28T09:38:38.355580Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2025-03-28T09:38:38.355637Z node 36 :PERSQUEUE INFO: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:38:38.355679Z node 36 :PERSQUEUE DEBUG: [topic:0:Initializer] Initializing completed. 2025-03-28T09:38:38.355730Z node 36 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'topic' partition 0 generation 3 [36:278:2272] 2025-03-28T09:38:38.355802Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 2 Head Offset 0 PartNo 0 PackedSize 1049870 count 2 nextOffset 2 batches 4 SYNC INIT sourceId sourceid1 seqNo 2 offset 1 SYNC INIT HEAD KEY: d0000000000_00000000000000000000_00000_0000000002_00002| size 1049870 2025-03-28T09:38:38.355872Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Process pending events. Count 0 2025-03-28T09:38:38.356006Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:38:38.356051Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:38:38.356097Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [d0000000000_00000000000000000000_00000_0000000001_00000|, d0000000000_00000000000000000000_00000_0000000001_00000|] 2025-03-28T09:38:38.356144Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:38:38.356186Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:38.356230Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:38:38.356278Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:38:38.356352Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Init complete for topic 'topic' Partition: 0 SourceId: sourceid1 SeqNo: 2 offset: 1 MaxOffset: 2 2025-03-28T09:38:38.356414Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 39 2025-03-28T09:38:38.356474Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 39 2025-03-28T09:38:38.356746Z node 36 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:38:38.356791Z node 36 :PERSQUEUE DEBUG: CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000001_00000|(+) to d0000000000_00000000000000000000_00000_0000000001_00000|(+) 2025-03-28T09:38:38.357123Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 Topic 'topic' partition 0 user user offset 0 count 1 size 1024000 endOffset 2 max time lag 0ms effective offset 0 2025-03-28T09:38:38.357174Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] read cookie 0 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 2 2025-03-28T09:38:38.358028Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Reading cookie 0. All data is from uncompacted head. 2025-03-28T09:38:38.358078Z node 36 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T09:38:38.358280Z node 36 :PERSQUEUE DEBUG: Topic 'topic' partition 0 user user readTimeStamp done, result 130 queuesize 0 startOffset 0 2025-03-28T09:38:38.360465Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 >>> write #3 2025-03-28T09:38:38.363543Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:303:2290], now have 1 active actors on pipe 2025-03-28T09:38:38.363614Z node 36 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:38:38.363656Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:38:38.363714Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 3 partNo : 0 messageNo: 1 size 1024 offset: 2 2025-03-28T09:38:38.363791Z node 36 :PERSQUEUE DEBUG: tablet 72057594037927937 topic 'topic' partition 0 error: new GetOwnership request needed for owner 2025-03-28T09:38:38.363854Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2025-03-28T09:38:38.363890Z node 36 :PERSQUEUE DEBUG: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2025-03-28T09:38:38.364150Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:306:2292], now have 1 active actors on pipe 2025-03-28T09:38:38.364229Z node 36 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:38:38.364275Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:38:38.364368Z node 36 :PERSQUEUE INFO: new Cookie default|53e32335-9a3b9c08-e1b3bc8d-23bb5228_0 generated for partition 0 topic 'topic' owner default 2025-03-28T09:38:38.364469Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2025-03-28T09:38:38.364537Z node 36 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:38:38.364874Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:308:2294], now have 1 active actors on pipe 2025-03-28T09:38:38.364910Z node 36 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:38:38.364935Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:38:38.364966Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message topic: topic partition: 0 SourceId: 'sourceid1' SeqNo: 3 partNo : 0 messageNo: 0 size 1024 offset: 2 2025-03-28T09:38:38.365029Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Send write quota request. Topic: "topic". Partition: 0. Amount: 1033. Cookie: 1 2025-03-28T09:38:38.365116Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Got quota. Topic: "topic". Partition: 0: Cookie: 1 2025-03-28T09:38:38.365246Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob processing sourceId 'sourceid1' seqNo 3 partNo 0 2025-03-28T09:38:38.366721Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 part blob complete sourceId 'sourceid1' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 1096 count 1 nextOffset 3 batches 1 2025-03-28T09:38:38.367337Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Add new write blob: topic 'topic' partition 0 compactOffset 2,1 HeadOffset 0 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000| size 1084 WTime 253 2025-03-28T09:38:38.367510Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:38:38.367574Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:38:38.367625Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] [x0000000000, x0000000001) 2025-03-28T09:38:38.367676Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:38:38.367733Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000psourceid1 2025-03-28T09:38:38.367771Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] d0000000000_00000000000000000002_00000_0000000001_00000| 2025-03-28T09:38:38.367795Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:38:38.367830Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:38:38.367882Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:38:38.367959Z node 36 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:38:38.368048Z node 36 :PERSQUEUE DEBUG: CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 1084 2025-03-28T09:38:38.371102Z node 36 :PERSQUEUE DEBUG: Caching head blob in L1. Partition 0 offset 2 count 1 size 1084 actorID [36:269:2265] 2025-03-28T09:38:38.371229Z node 36 :PERSQUEUE DEBUG: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 0 count 1 parts 0 size 1084 2025-03-28T09:38:38.371331Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 1033 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:38:38.371394Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::ReplyWrite. Partition: 0 2025-03-28T09:38:38.371471Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Answering for message sourceid: 'sourceid1', Topic: 'topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2025-03-28T09:38:38.371710Z node 36 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >>> write #1 2025-03-28T09:38:38.372194Z node 36 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [36:315:2300], now have 1 active actors on pipe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true Test command err: 2025-03-28T09:38:28.454572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:38:28.454945Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:38:28.454993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008e4/r3tmp/tmpoCZ4DP/pdisk_1.dat 2025-03-28T09:38:28.752698Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.791168Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:28.794101Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-03-28T09:38:28.828559Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:28.828683Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:28.839744Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:28.918815Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.952116Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:38:28.952332Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.984666Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.984818Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.986656Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:38:28.986754Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:38:28.986810Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:38:28.987200Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.987327Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.987411Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:38:28.998202Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.025396Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:38:29.025564Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.025653Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:38:29.025680Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:29.025707Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:38:29.025744Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.026062Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:38:29.026126Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:38:29.026404Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.026430Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.026468Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:38:29.026498Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.026551Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:38:29.026665Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:29.026878Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:38:29.026950Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:38:29.028180Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.038747Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.038831Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.185891Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:38:29.193536Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.193598Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.194138Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.194187Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.194235Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.194411Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.194526Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.195119Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.195185Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:29.196529Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:38:29.196810Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.197745Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:38:29.197785Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.198246Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:38:29.198291Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.198970Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.199037Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:29.199075Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:38:29.199127Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:29.199161Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:29.199217Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.201679Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.203369Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:29.203424Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:29.203596Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:29.211799Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:29.211898Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:29.211938Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-28T09:38:29.211968Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-28T09:38:29.212622Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.236842Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.455512Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.455602Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.456391Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.456441Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.456486Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.456666Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-28T09:38:29.456807Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.457120Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.458609Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.518335Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-28T09:38:29.518405Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.518434Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.518464Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:38:36.994185Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:36.994771Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:38:36.994832Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:36.995645Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:36.995682Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:36.995723Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:38:36.995782Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:36.995824Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:36.995904Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:36.996917Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:36.998512Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:36.998569Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:36.998737Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:37.008104Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:37.008257Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:37.008329Z node 3 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-28T09:38:37.008369Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-28T09:38:37.009292Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:37.033764Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:37.239702Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:37.239771Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:37.240628Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:37.240690Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:37.240743Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:37.240945Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-28T09:38:37.241080Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:37.241429Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:37.242219Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:37.277288Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-28T09:38:37.277395Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:37.277435Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:37.277487Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:37.277567Z node 3 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:37.277647Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-03-28T09:38:37.277750Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:37.279688Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-03-28T09:38:37.279765Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:37.285184Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:887:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:37.285251Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:897:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:37.285311Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:37.288774Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:38:37.292966Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:37.448115Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:37.451208Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:901:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:38:37.487179Z node 3 :TX_PROXY ERROR: Actor# [3:957:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:37.549352Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1w8k3d0mafbtjrgdf2ps3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=MWJkYmVhYmYtN2ExNjU3YTYtYTJiYWUxZTEtNWUyOTI2ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:37.550010Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:984:2787], serverId# [3:985:2788], sessionId# [0:0:0] 2025-03-28T09:38:37.550188Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:37.551568Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743154717551471 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:38:37.562675Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:37.562888Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-28T09:38:37.562951Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:37.618876Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1w8vzavcf5fyh6vb2tjkq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjBkMzFjNDAtNzA1ZmM1Y2EtNDM0ZTA3YTktZmYyZWUzMA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:37.619223Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:37.619796Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743154717619711 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:38:37.630754Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:37.630886Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-28T09:38:37.630925Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:37.703201Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1w8y1fft45xpm85rv57bk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWQ5OGZhNjItMmIzNGJkMTYtNmQ5ZTZlZmQtZmI1YzJmMTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:37.703541Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:37.704377Z node 3 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1743154717704291 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:38:37.715186Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:37.715325Z node 3 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-28T09:38:37.715359Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:37.716752Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:1023:2819], serverId# [3:1024:2820], sessionId# [0:0:0] 2025-03-28T09:38:37.721091Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:1025:2821], serverId# [3:1026:2822], sessionId# [0:0:0] >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit >> TVectorIndexTests::CreateTablePrefixInvalidKeyType >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> TSchemeShardTest::ParallelCreateSameTable >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::DependentOps >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefixInvalidKeyType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:40.603029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:40.603107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:40.603143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:40.603166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:40.603194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:40.603212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:40.603295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:40.603364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:40.603570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:40.664756Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:40.664803Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:40.679906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:40.680145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:40.680321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:40.686111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:40.686286Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:40.686893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:40.687085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:40.688843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:40.690076Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:40.690135Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:40.690300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:40.690347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:40.690387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:40.690496Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.696740Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:40.780003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:40.780205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.780350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:40.780538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:40.780581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.782306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:40.782398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:40.782520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.782551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:40.782573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:40.782594Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:40.783940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.783978Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:40.784012Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:40.785105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.785134Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.785170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:40.785198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:40.787584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:40.788763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:40.788885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:40.789553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:40.789635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:40.789667Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:40.789837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:40.789869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:40.789970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:40.790022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:40.791355Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:40.791402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:40.791522Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:40.791550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:40.791823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.791855Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:40.791947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:40.791972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:40.791995Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:40.792016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:40.792039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:40.792078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:40.792107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:40.792148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:40.792203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:40.792225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:40.792247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:40.793518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:40.793584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:40.793609Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:38:40.793657Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:38:40.793698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:40.793781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:38:40.795955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:38:40.796331Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:38:40.797864Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:38:40.810460Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:38:40.812274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } Columns { Name: "covered" Type: "String" } Columns { Name: "prefix" Type: "Float" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "prefix" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "covered" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } clusters: 4 levels: 5 } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:40.812679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-03-28T09:38:40.812822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-03-28T09:38:40.812859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, at schemeshard: 72057594046678944 2025-03-28T09:38:40.815817Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:38:40.818137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Column \'prefix\' has wrong key type Float for being key" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:40.818282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'prefix' has wrong key type Float for being key, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-03-28T09:38:40.818596Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:38:40.818797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:38:40.818838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:38:40.819257Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:38:40.819337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:38:40.819361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:282:2273] TestWaitNotification: OK eventTxId 101 2025-03-28T09:38:40.819744Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/vectors/idx_vector" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:38:40.819882Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/vectors/idx_vector" took 154us result status StatusPathDoesNotExist 2025-03-28T09:38:40.820013Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/vectors/idx_vector\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/vectors/idx_vector" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionGraphTest::BuildGraph [GOOD] Test command err: 2025-03-28T09:37:43.858622Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792272692335748:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.861055Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:37:43.918143Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792273462601008:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:43.918283Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:37:44.124663Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:37:44.125404Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001996/r3tmp/tmpE8DC2H/pdisk_1.dat 2025-03-28T09:37:44.369650Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:44.369734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:44.370240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:44.370285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:44.375255Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:37:44.375516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:44.376682Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31892, node 1 2025-03-28T09:37:44.411094Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:44.413838Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:37:44.418215Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:37:44.500036Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001996/r3tmp/yandexv4kIpi.tmp 2025-03-28T09:37:44.500056Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001996/r3tmp/yandexv4kIpi.tmp 2025-03-28T09:37:44.500539Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001996/r3tmp/yandexv4kIpi.tmp 2025-03-28T09:37:44.500657Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:44.619133Z INFO: TTestServer started on Port 9633 GrpcPort 31892 TClient is connected to server localhost:9633 PQClient connected to localhost:31892 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:44.913834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:44.973642Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:37:47.281719Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792290642470478:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.281918Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.282320Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792290642470505:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:47.287464Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:37:47.301487Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792290642470507:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:37:47.366891Z node 2 :TX_PROXY ERROR: Actor# [2:7486792290642470535:2130] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:47.694837Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486792290642470550:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:47.694933Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792289872206179:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:47.695139Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWY5Yzk3YTYtNzUyMTEwMDctYzg4OTk4M2YtNzRiOTk1ZDM=, ActorId: [2:7486792290642470466:2306], ActorState: ExecuteState, TraceId: 01jqe1tqre6cr1qhvndhg19ybd, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:47.695139Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWUzZGYxMjQtOTQ2M2IwYTgtYTUyNjRlYmEtYzI0MDY1MDc=, ActorId: [1:7486792289872206139:2341], ActorState: ExecuteState, TraceId: 01jqe1tqwfd0nz7axyvd8sdpra, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:47.696972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.697567Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:47.697574Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:47.777323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:47.859371Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:37:48.096599Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe1trcpf99cpf9bmzhh9wfx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWEwZDliNTItZDc1ZmQ1OWYtM2ZjZDQ4NmYtM2ViZmUzMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486792294167173831:3077] 2025-03-28T09:37:48.857356Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792272692335748:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:48.857415Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:37:48.915717Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792273462601008:2080];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:48.915788Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:37:53.986211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480 2025-03-28T09:37:54.493014Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:37:54.855365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propos ... NTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:38:28.808846Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1w04k0q7mjsyvss14r2yv, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NmFiNTE1N2UtZjY3NmQyN2UtOTM4NDViZDctZWZmMDgwOTQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [7:7486792464878833621:3074] 2025-03-28T09:38:30.177285Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486792451993930136:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:30.177362Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:38:30.184018Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7486792450307193956:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:30.184120Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:38:34.967619Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:1, at schemeshard: 72057594046644480 2025-03-28T09:38:35.637938Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715684:0, at schemeshard: 72057594046644480 2025-03-28T09:38:36.203778Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-28T09:38:36.743715Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-03-28T09:38:37.317558Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715698:0, at schemeshard: 72057594046644480 2025-03-28T09:38:37.925821Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715704:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (11131928866524144434, "Root", "00415F536F757263655F35", 1743154718471, 1743154718471, 0, 13); 2025-03-28T09:38:38.679028Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715707. Ctx: { TraceId: 01jqe1w9rv4zf5nsmvaa17q6ya, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Mjc0YmI1NWYtZTRkMTk3ZGEtMTM0MDgxYS00MjFjMWI1OQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:38.691769Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-28T09:38:38.691797Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:38:38.691809Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:38:38.691834Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7486792507828508175:4026] (SourceId=A_Source_5, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2025-03-28T09:38:38.691984Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [7:7486792507828508176:4026], Recipient [7:7486792490648637974:3396]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [7:7486792507828508175:4026] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-03-28T09:38:38.692078Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [7:7486792507828508175:4026], Recipient [7:7486792490648637974:3396]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_5" 2025-03-28T09:38:38.692164Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [7:7486792490648637974:3396], Recipient [7:7486792507828508175:4026]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-03-28T09:38:38.692199Z node 7 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [7:7486792507828508175:4026] (SourceId=A_Source_5, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_5 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-03-28T09:38:38.692267Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [7:7486792507828508175:4026], Recipient [7:7486792490648637974:3396]: NActors::TEvents::TEvPoison 2025-03-28T09:38:38.692325Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [7:7486792451993930132:2070], Recipient [7:7486792507828508175:4026]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-03-28T09:38:38.692354Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7486792507828508175:4026] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2025-03-28T09:38:38.695126Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [7:7486792451993930357:2280], Recipient [7:7486792507828508175:4026]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=7&id=ODI4M2U4NTEtODMyMmE0YTgtZTE4N2U3ZTUtNTVjMTZjZmY=" NodeId: 7 } YdbStatus: SUCCESS ResourceExhausted: false 2025-03-28T09:38:38.695190Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7486792507828508175:4026] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2025-03-28T09:38:38.863156Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [7:7486792451993930357:2280], Recipient [7:7486792507828508175:4026]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=7&id=ODI4M2U4NTEtODMyMmE0YTgtZTE4N2U3ZTUtNTVjMTZjZmY=" PreparedQuery: "da212e9f-2af05c1e-bee8fed8-1ddd2fc6" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jqe1wa488pbyavpez989t4ky" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1743154718471 } items { uint64_value: 1743154718471 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 103 2025-03-28T09:38:38.863328Z node 7 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [7:7486792507828508175:4026] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2025-03-28T09:38:38.863355Z node 7 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [7:7486792507828508175:4026] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2025-03-28T09:38:38.863472Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [7:7486792507828508219:4026], Recipient [7:7486792490648637974:3396]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [7:7486792507828508175:4026] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-03-28T09:38:38.863534Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [7:7486792507828508175:4026], Recipient [7:7486792490648637974:3396]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2025-03-28T09:38:38.863607Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateCheckPartition, received event# 271188558, Sender [7:7486792490648637974:3396], Recipient [7:7486792507828508175:4026]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-03-28T09:38:38.863633Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7486792507828508175:4026] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2025-03-28T09:38:38.863798Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [7:7486792507828508175:4026], Recipient [7:7486792490648637974:3396]: NActors::TEvents::TEvPoison Received TEvChooseResult: 1 2025-03-28T09:38:39.122445Z node 7 :PQ_PARTITION_CHOOSER TRACE: StateUpdate, received event# 271646721, Sender [7:7486792451993930357:2280], Recipient [7:7486792507828508175:4026]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=7&id=ODI4M2U4NTEtODMyMmE0YTgtZTE4N2U3ZTUtNTVjMTZjZmY=" PreparedQuery: "a0ab477d-6fd1828a-84c7cd4c-bb924d50" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 82 2025-03-28T09:38:39.122480Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7486792507828508175:4026] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2025-03-28T09:38:39.122499Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7486792507828508175:4026] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2025-03-28T09:38:39.122536Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7486792507828508175:4026] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" 2025-03-28T09:38:39.293330Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715713. Ctx: { TraceId: 01jqe1wad75neh5q7akn6r7spn, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=MzQyZjQxYzYtMWZiYWFjODktMTQ0MGUzOGItODA3NmNmZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableDropColumnReCreateSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: 2025-03-28T09:38:28.302267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:38:28.302803Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:38:28.302862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a41/r3tmp/tmpZ0zHUR/pdisk_1.dat 2025-03-28T09:38:28.704339Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.752218Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:28.800093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:28.800264Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:28.811916Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:28.891620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.928607Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-03-28T09:38:28.928838Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.964388Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.964512Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.965867Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:38:28.965935Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:38:28.965974Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:38:28.966251Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.966483Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.966535Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-28T09:38:28.967565Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-03-28T09:38:28.967716Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.974679Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.974779Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.975786Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:38:28.975842Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:38:28.975883Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:38:28.976089Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.976167Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.976213Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-28T09:38:28.986899Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.014993Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:38:29.015192Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.015360Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-28T09:38:29.015402Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:29.015439Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:38:29.015473Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.015714Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.015756Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:38:29.015794Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.015851Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-28T09:38:29.015866Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:38:29.015880Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:38:29.015893Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:29.016185Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:38:29.016251Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:38:29.016309Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.016335Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.016386Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:38:29.016456Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.016515Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:38:29.016579Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:38:29.016913Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:686:2582], sessionId# [0:0:0] 2025-03-28T09:38:29.016948Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.016967Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.016984Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:38:29.017004Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:29.017084Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:29.017239Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:38:29.017325Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:38:29.017704Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:696:2590], sessionId# [0:0:0] 2025-03-28T09:38:29.017845Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:29.017956Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:29.017997Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:38:29.019266Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.019344Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:29.030005Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.030089Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.030219Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:29.030248Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.178270Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-28T09:38:29.178382Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-28T09:38:29.181664Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-28T09:38:29.181723Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:29.182020Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.182047Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.182116Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.182147Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.182208Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-28T09:38:29.182442Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.182557Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.182777Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.182815Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.182841Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.182964Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.183035Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.183100Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.183152Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:29.184788Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T ... :38:39.922897Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:39.922941Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:39.922984Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:39.923269Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:39.923446Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:39.924064Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:39.924145Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:39.924508Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:38:39.924899Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:39.926270Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:38:39.926311Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:39.926923Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:38:39.927010Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:39.927726Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:39.927762Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:39.927797Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:38:39.927859Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:39.927903Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:39.927971Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:39.928774Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:39.930679Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:39.930756Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:39.930857Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:39.940003Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:39.940159Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:39.940208Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-28T09:38:39.940241Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-28T09:38:39.941065Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:39.964735Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:40.144784Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:40.144853Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:40.145120Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:40.145186Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:40.145223Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:40.145364Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-28T09:38:40.145469Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:40.145658Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:40.146201Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:40.178798Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-28T09:38:40.178895Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:40.178939Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:40.178981Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:40.179034Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:40.179084Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-03-28T09:38:40.179157Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:40.181597Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-03-28T09:38:40.181684Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:40.186753Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:40.186837Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:896:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:40.187106Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:40.190344Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:38:40.195204Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:40.349266Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:40.352339Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:38:40.377377Z node 4 :TX_PROXY ERROR: Actor# [4:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:40.431844Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1wbds14j2h940dekykcfx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YTM2Mjc4MDAtYzY0MDY3MDQtN2RkYWZmMDktZjM0OTVkNjA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:40.432360Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:983:2787], serverId# [4:984:2788], sessionId# [0:0:0] 2025-03-28T09:38:40.432545Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:40.433645Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743154720433567 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:38:40.444540Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:40.444690Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-28T09:38:40.444746Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:40.499410Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1wbnz1sfj705rhbjngbkc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZjljNDkwMTMtYjc5Yjc3ODktZjk0NzE4NzMtM2Q5NmUxMzg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:40.499732Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:40.500484Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743154720500405 Step: 2000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:38:40.511249Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:40.511374Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-28T09:38:40.511404Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:40.512692Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1007:2807], serverId# [4:1008:2808], sessionId# [0:0:0] 2025-03-28T09:38:40.517295Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1009:2809], serverId# [4:1010:2810], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: 2025-03-28T09:38:28.302208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:38:28.302617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:38:28.302656Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00089f/r3tmp/tmp5Jv3Wj/pdisk_1.dat 2025-03-28T09:38:28.700096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.743466Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:28.781870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:28.782010Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:28.794218Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:28.882624Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.930983Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-03-28T09:38:28.931229Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.978172Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.978311Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.979966Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:38:28.980046Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:38:28.980105Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:38:28.980437Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.980713Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.980792Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-28T09:38:28.981948Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-03-28T09:38:28.982145Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.990897Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.990990Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.992183Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:38:28.992300Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:38:28.992365Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:38:28.992621Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.992716Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.992763Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-28T09:38:29.003561Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.027923Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:38:29.028058Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.028126Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-28T09:38:29.028165Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:29.028194Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:38:29.028219Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.028428Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.028472Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:38:29.028527Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.028609Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-28T09:38:29.028631Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:38:29.028652Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:38:29.028671Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:29.028973Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:38:29.029039Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:38:29.029093Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.029119Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.029154Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:38:29.029217Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.029264Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:38:29.029315Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:38:29.029597Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:686:2582], sessionId# [0:0:0] 2025-03-28T09:38:29.029656Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.029672Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.029687Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:38:29.029705Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:29.029770Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:29.029899Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:38:29.029958Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:38:29.030303Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:696:2590], sessionId# [0:0:0] 2025-03-28T09:38:29.030427Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:29.030524Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:29.030561Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:38:29.031722Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.031774Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:29.042379Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.042473Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.042567Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:29.042590Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.189760Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-28T09:38:29.189856Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-28T09:38:29.192840Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-28T09:38:29.192886Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:29.193148Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.193167Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.193226Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.193253Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.193300Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-28T09:38:29.193462Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.193554Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.193680Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.193699Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.193717Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.193812Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.193876Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.193926Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.193960Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:29.195374Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T ... 2025-03-28T09:38:40.097226Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:40.097287Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:40.097727Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:38:40.098115Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:40.099544Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:40.099582Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:40.099614Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:40.099786Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:40.099885Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:40.099971Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-03-28T09:38:40.100014Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:40.101023Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:40.101079Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-03-28T09:38:40.101378Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:38:40.101654Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:40.103089Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-03-28T09:38:40.103162Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:40.104890Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:40.104958Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:38:40.105006Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-03-28T09:38:40.105068Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:40.105120Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:40.105200Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:40.105933Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:38:40.105970Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:40.106201Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:38:40.106258Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:40.107494Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:40.107534Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:40.107566Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:38:40.107608Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:40.107645Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:40.107717Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:40.108167Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:40.108237Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:40.111480Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:40.111687Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:40.112063Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-03-28T09:38:40.112123Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-28T09:38:40.112595Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:40.112636Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:40.121078Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:40.121173Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2663], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:40.121237Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:40.126047Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:38:40.131431Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:40.131538Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:40.279590Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:40.279691Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:40.282654Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:802:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:38:40.317172Z node 4 :TX_PROXY ERROR: Actor# [4:880:2713] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:40.437874Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1wbbq65sq94rke9vndjrs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OWNiNzZjZi1mNjdmN2ZjOS03MzM1MDM4Mi1hYjJiNWRmNg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:40.438394Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:945:2745], serverId# [4:946:2746], sessionId# [0:0:0] 2025-03-28T09:38:40.438582Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:40.439786Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743154720439708 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:40.450781Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:40.450935Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-28T09:38:40.450997Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:40.503063Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1wbp6fxz7k3cp9e7vhsdq, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MmZjMzZiNzEtMjQyMjE3MTktYWUzZTliYzMtMzVjZmQyM2M=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:40.503388Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:40.504199Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743154720504125 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:40.504346Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1743154720504125 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:40.515182Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:40.515337Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-28T09:38:40.515378Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:40.519073Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:982:2778], serverId# [4:983:2779], sessionId# [0:0:0] 2025-03-28T09:38:40.524457Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:984:2780], serverId# [4:985:2781], sessionId# [0:0:0] >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::MultipleColumnFamilies >> TSchemeShardTest::DropPQ >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> TSchemeShardTest::CreateIndexedTable >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropBlockStoreVolume ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: 2025-03-28T09:38:28.386335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:38:28.386655Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:38:28.386711Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007d3/r3tmp/tmpkCuIqr/pdisk_1.dat 2025-03-28T09:38:28.702284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.741090Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:28.781836Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:28.781948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:28.794267Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:28.882701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.930463Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-03-28T09:38:28.930658Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.963918Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.964029Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.965346Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:38:28.965411Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:38:28.965460Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:38:28.966322Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.966543Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.966588Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-28T09:38:28.967566Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-03-28T09:38:28.967712Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.974598Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.974669Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.975667Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:38:28.975710Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:38:28.975776Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:38:28.975965Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.976033Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.976072Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-28T09:38:28.986800Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.011049Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:38:29.011272Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.011403Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-28T09:38:29.011433Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:29.011465Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:38:29.011495Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.011722Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.011747Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:38:29.011780Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.011871Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-28T09:38:29.011899Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:38:29.011916Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:38:29.011940Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:29.012301Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:38:29.012369Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:38:29.012433Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.012469Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.012511Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:38:29.012541Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.012568Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:38:29.012602Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:38:29.012955Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:686:2582], sessionId# [0:0:0] 2025-03-28T09:38:29.012998Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.013018Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.013034Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:38:29.013055Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:29.013154Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:29.013404Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:38:29.013494Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:38:29.013931Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:696:2590], sessionId# [0:0:0] 2025-03-28T09:38:29.014073Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:29.014247Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:29.014306Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:38:29.015637Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.015708Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:29.026385Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.026476Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.026566Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:29.026589Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.175860Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-28T09:38:29.176012Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-28T09:38:29.186840Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-28T09:38:29.186933Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:29.187466Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.187513Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.187649Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.187693Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.187779Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-28T09:38:29.188019Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.188179Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.188377Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.188407Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.188437Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.188617Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.188730Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.188823Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.188889Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:29.191219Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T ... 25-03-28T09:38:40.751173Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:40.751200Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037890 2025-03-28T09:38:40.751379Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037890 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:40.751472Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:40.751767Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-03-28T09:38:40.751841Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:40.751894Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:38:40.751933Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037890 tableId# [OwnerId: 72057594046644480, LocalPathId: 6] schema version# 1 2025-03-28T09:38:40.752175Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:38:40.752420Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:40.755472Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:38:40.755530Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:40.756456Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:38:40.756491Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:40.756988Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:40.757044Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:38:40.757090Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-03-28T09:38:40.757151Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:40.757198Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:40.757286Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:40.758035Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:40.758070Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:40.758099Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:38:40.758134Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:40.758165Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:40.758218Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:40.758875Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2025-03-28T09:38:40.758908Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:38:40.759362Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1000} 2025-03-28T09:38:40.759407Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:38:40.760080Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:40.760155Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:40.760198Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:38:40.760694Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:38:40.760727Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-28T09:38:40.760756Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-03-28T09:38:40.760795Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:40.760827Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:40.760879Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:38:40.765207Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:40.765712Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:40.765764Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-03-28T09:38:40.765828Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-28T09:38:40.766522Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:40.766678Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:40.766738Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:40.767316Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-03-28T09:38:40.767356Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-03-28T09:38:40.776705Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:839:2697], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:40.776820Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:850:2702], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:40.776906Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:40.782036Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:38:40.788161Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:40.788278Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:40.788329Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:38:40.937296Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:40.937402Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:40.937452Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:38:40.940364Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:853:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:38:40.975227Z node 4 :TX_PROXY ERROR: Actor# [4:935:2756] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:41.053431Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1wc075560kz94qysr1ev2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YWIzNjJiNDUtOWJiODliZDMtZDliNmJhYjctODhkYWIxMDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:41.053860Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1036:2800], serverId# [4:1037:2801], sessionId# [0:0:0] 2025-03-28T09:38:41.054031Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:41.055065Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743154721054995 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:41.055188Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743154721054995 Step: 1500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:41.066034Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:41.066199Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-28T09:38:41.066248Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:41.069682Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1043:2806], serverId# [4:1044:2807], sessionId# [0:0:0] 2025-03-28T09:38:41.108476Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1045:2808], serverId# [4:1046:2809], sessionId# [0:0:0] >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: 2025-03-28T09:38:28.772760Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:38:28.773134Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:38:28.773175Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000814/r3tmp/tmpLK7qBe/pdisk_1.dat 2025-03-28T09:38:29.045362Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:38:29.073502Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:29.110438Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:29.110544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:29.121739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:29.200522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:38:29.239851Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-03-28T09:38:29.240057Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:29.271876Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:29.271992Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:29.273250Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:38:29.273307Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:38:29.273344Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:38:29.273613Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:29.273822Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:29.273880Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-28T09:38:29.274773Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-03-28T09:38:29.274947Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:29.281322Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:29.281397Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:29.282325Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:38:29.282370Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:38:29.282424Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:38:29.282636Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:29.282719Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:29.282754Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-28T09:38:29.293340Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.318733Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:38:29.318905Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.318994Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-28T09:38:29.319019Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:29.319046Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:38:29.319077Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.319296Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.319349Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:38:29.319394Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.319448Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-28T09:38:29.319464Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:38:29.319479Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:38:29.319493Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:29.319784Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:38:29.319849Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:38:29.319912Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.319938Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.319982Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:38:29.320011Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.320039Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:38:29.320125Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:38:29.320463Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:686:2582], sessionId# [0:0:0] 2025-03-28T09:38:29.320506Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.320525Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.320554Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:38:29.320578Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:29.320678Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:29.320869Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:38:29.320947Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:38:29.321286Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:696:2590], sessionId# [0:0:0] 2025-03-28T09:38:29.321401Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:29.321503Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:29.321547Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:38:29.322846Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.322907Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:29.333562Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.333656Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.333739Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:29.333762Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.482433Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-28T09:38:29.482589Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-28T09:38:29.487222Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-28T09:38:29.487303Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:29.487746Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.487788Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.487893Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.487931Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.488004Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-28T09:38:29.488280Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.488439Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.488635Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.488667Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.488695Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.488847Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.488955Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.489039Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.489104Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:29.496779Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T ... T09:38:41.106625Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:41.106794Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:41.107094Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:41.107127Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:41.107217Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:41.107302Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:41.107783Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:38:41.108231Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:41.109755Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:41.109798Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:41.109833Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:41.110038Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:41.110153Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:41.110272Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2025-03-28T09:38:41.110316Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:41.111487Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:41.111555Z node 4 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 4] schema version# 1 2025-03-28T09:38:41.111899Z node 4 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:38:41.112209Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:41.113749Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1000} 2025-03-28T09:38:41.113832Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:41.115822Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:41.115890Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:38:41.115933Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037889 2025-03-28T09:38:41.116003Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:41.116057Z node 4 :TX_DATASHARD INFO: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:41.116157Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:41.116996Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:38:41.117034Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:41.117302Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:38:41.117353Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:41.118772Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:41.118814Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:41.118858Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:38:41.118914Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:41.118958Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:41.119024Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:41.119572Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:41.119657Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:41.123238Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:41.123516Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:41.123901Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-03-28T09:38:41.123979Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-28T09:38:41.124517Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:41.124565Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:41.134346Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:788:2658], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:41.134447Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:798:2663], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:41.134529Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:41.139817Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:38:41.145863Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:41.145971Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:41.293915Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:41.293993Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:41.296351Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:802:2666], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:38:41.329666Z node 4 :TX_PROXY ERROR: Actor# [4:880:2713] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:41.489044Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1wcbcf0wjfgrkv7ga7tqk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NjMwYzBhZjMtZTVkODk1MmQtYWM0NTExZDEtZjk3MjhiMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:41.493056Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:959:2754], serverId# [4:960:2755], sessionId# [0:0:0] 2025-03-28T09:38:41.493503Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:38:41.497251Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1wcbcf0wjfgrkv7ga7tqk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NjMwYzBhZjMtZTVkODk1MmQtYWM0NTExZDEtZjk3MjhiMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:41.501192Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1wcbcf0wjfgrkv7ga7tqk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NjMwYzBhZjMtZTVkODk1MmQtYWM0NTExZDEtZjk3MjhiMWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:41.501710Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:41.503141Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743154721503039 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:41.514299Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:41.514423Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-03-28T09:38:41.514524Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-28T09:38:41.514611Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:41.515580Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037889 2025-03-28T09:38:41.515651Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:41.520105Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:976:2764], serverId# [4:977:2765], sessionId# [0:0:0] 2025-03-28T09:38:41.525757Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:978:2766], serverId# [4:979:2767], sessionId# [0:0:0] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning >> CdcStreamChangeCollector::NewImage [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: 2025-03-28T09:38:28.435660Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:38:28.436174Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:38:28.436232Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009f7/r3tmp/tmp78Vmmi/pdisk_1.dat 2025-03-28T09:38:28.712791Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.750956Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:28.789980Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:28.790140Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:28.801467Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:28.882762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.931764Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:677:2578] 2025-03-28T09:38:28.932025Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.970564Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.970664Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.971902Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:38:28.971961Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:38:28.972020Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:38:28.972262Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.972510Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.972559Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:703:2578] in generation 1 2025-03-28T09:38:28.973415Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:681:2580] 2025-03-28T09:38:28.973584Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.979757Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.979833Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.980693Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:38:28.980733Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:38:28.980772Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:38:28.980943Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.981008Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.981040Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:711:2580] in generation 1 2025-03-28T09:38:28.991579Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.012498Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:38:29.012654Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.012726Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:714:2599] 2025-03-28T09:38:29.012750Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:29.012775Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:38:29.012799Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.013018Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:29.013057Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:38:29.013090Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:29.013138Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:715:2600] 2025-03-28T09:38:29.013152Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:38:29.013165Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:38:29.013177Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:29.013484Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:38:29.013603Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:38:29.013668Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.013695Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.013730Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:38:29.013755Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.013782Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:38:29.013826Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:38:29.014201Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:670:2574], serverId# [1:686:2582], sessionId# [0:0:0] 2025-03-28T09:38:29.014250Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.014268Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.014284Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:38:29.014303Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:38:29.014385Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:29.014558Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:38:29.014628Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:38:29.015080Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:671:2575], serverId# [1:696:2590], sessionId# [0:0:0] 2025-03-28T09:38:29.015206Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:29.015321Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:29.015358Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2025-03-28T09:38:29.016498Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.016557Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:29.027205Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.027286Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.027381Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:29.027403Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.175794Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:739:2618], serverId# [1:742:2621], sessionId# [0:0:0] 2025-03-28T09:38:29.175940Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:740:2619], serverId# [1:743:2622], sessionId# [0:0:0] 2025-03-28T09:38:29.179976Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037889 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2025-03-28T09:38:29.180054Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:29.180465Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.180495Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.180592Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.180625Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.180690Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037889 2025-03-28T09:38:29.180929Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037889 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.181061Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.181236Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.181264Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.181292Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.181425Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.181507Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.181583Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:38:29.181633Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:29.183615Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T ... 24037890 2025-03-28T09:38:41.149136Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:41.149215Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:41.149261Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:38:41.149734Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:38:41.149767Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-28T09:38:41.149796Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037890 2025-03-28T09:38:41.149835Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:41.149870Z node 4 :TX_DATASHARD INFO: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:41.149926Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:38:41.154128Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:41.154701Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:41.154765Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2025-03-28T09:38:41.154822Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2025-03-28T09:38:41.155533Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:41.155694Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:41.155740Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:41.156302Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2025-03-28T09:38:41.156348Z node 4 :TX_DATASHARD DEBUG: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2025-03-28T09:38:41.165451Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:839:2697], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:41.165550Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:850:2702], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:41.165621Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:41.170674Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:38:41.176621Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:41.176732Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:41.176807Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:38:41.324546Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:41.324622Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2025-03-28T09:38:41.324657Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2025-03-28T09:38:41.326823Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:853:2705], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:38:41.361797Z node 4 :TX_PROXY ERROR: Actor# [4:935:2756] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 10], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:41.537822Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1wccc912jzb4580kypr54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YzE5ZmM5ZjItNjU5NmQ2MTItM2U2MmQzYmQtZTdhMGQ4OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:41.540919Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1050:2809], serverId# [4:1051:2810], sessionId# [0:0:0] 2025-03-28T09:38:41.541237Z node 4 :TX_DATASHARD DEBUG: 72075186224037889 Acquired lock# 281474976715660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:38:41.543862Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1wccc912jzb4580kypr54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YzE5ZmM5ZjItNjU5NmQ2MTItM2U2MmQzYmQtZTdhMGQ4OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:41.546657Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1wccc912jzb4580kypr54, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YzE5ZmM5ZjItNjU5NmQ2MTItM2U2MmQzYmQtZTdhMGQ4OTY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:41.547130Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:41.548178Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743154721548087 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:41.548306Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743154721548087 Step: 1501 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:41.559150Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:41.559250Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-03-28T09:38:41.559363Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-28T09:38:41.559424Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:41.560142Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037889 2025-03-28T09:38:41.560191Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:41.639721Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1wcrv2jvck5hm1j0nwcpy, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjhlZGIwMzctYWYyNTkxNGEtNzgwNjQ5ODctZDJlNDY2NmQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:41.640120Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:38:41.641222Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 3 Group: 1743154721641125 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:41.641380Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 4 Group: 1743154721641125 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:41.641479Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 5 Group: 1743154721641125 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:41.641561Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 6 Group: 1743154721641125 Step: 1501 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2025-03-28T09:38:41.652431Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037889 2025-03-28T09:38:41.652570Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 5] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 1 } 2025-03-28T09:38:41.652616Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:38:41.655665Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1105:2854], serverId# [4:1106:2855], sessionId# [0:0:0] 2025-03-28T09:38:41.660884Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [4:1107:2856], serverId# [4:1108:2857], sessionId# [0:0:0] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::RejectSystemViewPath [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> KqpExplain::Predicates [GOOD] >> KqpExplain::MultiJoinCteLinks >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateAlterTableWithCodec >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableById >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: 2025-03-28T09:38:28.396586Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:38:28.397073Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:38:28.397126Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00088b/r3tmp/tmplHyyKx/pdisk_1.dat 2025-03-28T09:38:28.719798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.752520Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:28.759698Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-03-28T09:38:28.794626Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:28.794784Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:28.806114Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:28.886177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.928573Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:38:28.928824Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.962207Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.962292Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.964602Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:38:28.964668Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:38:28.964715Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:38:28.966185Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.966293Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.966345Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:38:28.976894Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:28.991782Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:38:28.993160Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:28.993254Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:38:28.993290Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:28.993314Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:38:28.993336Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:28.994617Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:38:28.994710Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:38:28.995996Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:28.996031Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:28.996097Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:38:28.996128Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:28.996215Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:38:28.996316Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:28.996576Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:38:28.996653Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:38:28.998019Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.008482Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.008565Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.155616Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:38:29.158484Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.158555Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.159179Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.159215Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.159264Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.159465Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.159589Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.160134Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.160203Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:29.166148Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:38:29.167688Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.169278Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:38:29.169340Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.170165Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:38:29.170233Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.171220Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.171249Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:29.171284Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:38:29.171368Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:29.171407Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:29.171465Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.173926Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.175577Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:29.175631Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:29.175841Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:29.191292Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:29.191408Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:29.191440Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-28T09:38:29.191468Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-28T09:38:29.192164Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.215454Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.465062Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.465129Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.465963Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.466016Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.466059Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.466242Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-28T09:38:29.466382Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.466715Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.467413Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.523062Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-28T09:38:29.523129Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.523173Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.523219Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... ode 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:41.836038Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:38:41.836082Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:41.836658Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:38:41.836732Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:41.837623Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:41.837659Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:41.837710Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:38:41.837776Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:41.837820Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:41.837896Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:41.838772Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:41.840629Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:41.840688Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:41.840836Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:41.849887Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:41.850031Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:41.850080Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-28T09:38:41.850119Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-28T09:38:41.850906Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:41.875081Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:42.060069Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:42.060152Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:42.060474Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:42.060535Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:42.060588Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:42.060828Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-28T09:38:42.060974Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:42.061228Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:42.062004Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:42.099889Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-28T09:38:42.100003Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:42.100049Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:42.100106Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:42.100191Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:42.100261Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-03-28T09:38:42.100394Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:42.103454Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-03-28T09:38:42.103560Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:42.110934Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:42.111040Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:896:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:42.111432Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:42.116653Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:38:42.123108Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:42.281187Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:42.284989Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:38:42.311898Z node 4 :TX_PROXY ERROR: Actor# [4:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:42.532966Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1wd9xa1r6q79wrpek9y90, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWZkYTBmNTEtYzNmYmQxMzQtNjk4MzhlZWUtYzZjMTVjZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:42.536105Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:997:2796], serverId# [4:998:2797], sessionId# [0:0:0] 2025-03-28T09:38:42.536493Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:38:42.539782Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1wd9xa1r6q79wrpek9y90, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWZkYTBmNTEtYzNmYmQxMzQtNjk4MzhlZWUtYzZjMTVjZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:42.542804Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1wd9xa1r6q79wrpek9y90, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=ZWZkYTBmNTEtYzNmYmQxMzQtNjk4MzhlZWUtYzZjMTVjZWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:42.543253Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:42.544432Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743154722544344 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:38:42.555514Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:42.555645Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2025-03-28T09:38:42.555747Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-28T09:38:42.555814Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:42.556592Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2025-03-28T09:38:42.556656Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:42.629136Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1wdqz8v153n6bfpx50jsm, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MmMwYWQ4Mi1mNTE1NjQ0Yy1mZGM1MmVlNC1kOWU0NGM2Mg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:42.629497Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:42.630363Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743154722630273 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:38:42.641216Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:42.641339Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-28T09:38:42.641382Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:42.642894Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1032:2821], serverId# [4:1033:2822], sessionId# [0:0:0] 2025-03-28T09:38:42.647699Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1034:2823], serverId# [4:1035:2824], sessionId# [0:0:0] >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableConfig >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] >> KqpTypes::QuerySpecialTypes >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> KqpExplain::SortStage >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropPQFail >> KqpLimits::TooBigQuery+useSink >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:79:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:78:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:78:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:104:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:83:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:86:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:85:2115] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:89:2057] recipient: [22:85:2115] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:88:2116] Leader for TabletID 72057594037927937 is [22:88:2116] sender: [22:142:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:86:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:85:2115] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:89:2057] recipient: [23:85:2115] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:88:2116] Leader for TabletID 72057594037927937 is [23:88:2116] sender: [23:142:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:86:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:88:2057] recipient: [24:87:2115] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:90:2057] recipient: [24:87:2115] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:89:2116] Leader for TabletID 72057594037927937 is [24:89:2116] sender: [24:143:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:90:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:91:2057] recipient: [25:89:2118] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:93:2057] recipient: [25:89:2118] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:92:2119] Leader for TabletID 72057594037927937 is [25:92:2119] sender: [25:146:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:90:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:91:2057] recipient: [26:89:2118] Leader for TabletID 72057594037927937 is [26:92:2119] sender: [26:93:2057] recipient: [26:89:2118] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:92:2119] Leader for TabletID 72057594037927937 is [26:92:2119] sender: [26:146:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2025-03-28T09:37:51.411581Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792304981629018:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:51.411732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:37:51.430970Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792305236376474:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:51.431072Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:37:51.564102Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:37:51.564431Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001903/r3tmp/tmpQeoP6x/pdisk_1.dat 2025-03-28T09:37:51.730504Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:51.730568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:51.735826Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:37:51.736544Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:51.737353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13721, node 1 2025-03-28T09:37:51.770230Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:51.770293Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:51.773181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:51.796914Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001903/r3tmp/yandexvTGdjJ.tmp 2025-03-28T09:37:51.796949Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001903/r3tmp/yandexvTGdjJ.tmp 2025-03-28T09:37:51.797108Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001903/r3tmp/yandexvTGdjJ.tmp 2025-03-28T09:37:51.797309Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:51.845195Z INFO: TTestServer started on Port 3558 GrpcPort 13721 TClient is connected to server localhost:3558 PQClient connected to localhost:13721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:52.080117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:52.114903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:37:54.088356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792317866532068:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:54.088423Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792317866532094:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:54.088461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:54.091737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480 2025-03-28T09:37:54.092144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792318121278685:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:54.092204Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792318121278676:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:54.092344Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:54.094945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792317866532129:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:54.095030Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:54.101098Z node 2 :TX_PROXY ERROR: Actor# [2:7486792318121278692:2123] txid# 281474976710657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T09:37:54.109077Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792317866532097:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-28T09:37:54.109131Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792318121278691:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2025-03-28T09:37:54.190144Z node 2 :TX_PROXY ERROR: Actor# [2:7486792318121278719:2129] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:54.197897Z node 1 :TX_PROXY ERROR: Actor# [1:7486792317866532184:2832] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:54.323864Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792317866532202:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:54.324069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:37:54.324098Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ODg4MDVlYTAtNmQ4YTA5OWUtMmIyMDZiMjctMmM5ZDg2MTY=, ActorId: [1:7486792317866532065:2340], ActorState: ExecuteState, TraceId: 01jqe1tyd089927jwv3tjkbc6a, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:54.326227Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486792318121278734:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:54.326400Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OTQ4ZWM4ZjgtMzRlOTI3MjQtMzc2ZTkyZGMtNTJmOGJmODY=, ActorId: [2:7486792318121278660:2307], ActorState: ExecuteState, TraceId: 01jqe1tydaczwq6nnn60hykn9f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:54.326435Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:54.326805Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:54.405932Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: ... ://session/3?node_id=5&id=Mjc4ODUzZmMtODA2MjM1YzAtOTFjNmMzMWQtMWYzYzdiNGM=. TraceId : 01jqe1w2sr7s18jykp3s1zhwqz. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [5:7486792476746488670:2670], status: UNAVAILABLE, reason: {
: Error: Terminate execution } 2025-03-28T09:38:33.160877Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486792486086683842:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:33.160970Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:38:33.168013Z node 8 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7486792485609743155:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:33.168080Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:38:33.185761Z node 7 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001903/r3tmp/tmpv5I6YY/pdisk_1.dat 2025-03-28T09:38:33.198189Z node 8 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:38:33.280122Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:33.313595Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:33.313675Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:33.313801Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:33.313839Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:33.315659Z node 7 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2025-03-28T09:38:33.315897Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:33.316181Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4651, node 7 2025-03-28T09:38:33.353881Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/001903/r3tmp/yandexraT7S1.tmp 2025-03-28T09:38:33.353906Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/001903/r3tmp/yandexraT7S1.tmp 2025-03-28T09:38:33.354002Z node 7 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/001903/r3tmp/yandexraT7S1.tmp 2025-03-28T09:38:33.354148Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:33.421551Z INFO: TTestServer started on Port 7437 GrpcPort 4651 TClient is connected to server localhost:7437 PQClient connected to localhost:4651 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:33.618209Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:38:33.647476Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:38:36.316143Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486792498494645387:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:36.316254Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7486792498494645365:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:36.316589Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:36.322458Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480 2025-03-28T09:38:36.338813Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7486792498494645394:2313], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2025-03-28T09:38:36.415589Z node 8 :TX_PROXY ERROR: Actor# [8:7486792498494645422:2130] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:36.446549Z node 8 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [8:7486792498494645437:2317], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:38:36.446819Z node 8 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=8&id=YjAwYTQ3MGMtZmY3MGZhODgtNGM0ZDMxNzAtNTUzZTM4NTE=, ActorId: [8:7486792498494645363:2308], ActorState: ExecuteState, TraceId: 01jqe1w7mtdexgjg9nrg725b8w, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:38:36.447350Z node 8 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:38:36.622630Z node 7 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [7:7486792498971586955:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:38:36.624522Z node 7 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=7&id=MzZiY2NhMzUtMTJhM2FlM2MtYzI1NDFjYTgtYTkxMzc3NDg=, ActorId: [7:7486792498971586915:2341], ActorState: ExecuteState, TraceId: 01jqe1w7xp5zzy1cw399bfrjev, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:38:36.624955Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:38:36.636635Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:36.719456Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:36.807554Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:38:37.116103Z node 7 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1w86k1n637qhpfzeg0v4s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=NDk2MDlhMTAtMzJiNDlmNjMtZjJhMWY4NDgtNWVkN2I0YjQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [7:7486792503266554612:3067] 2025-03-28T09:38:38.161356Z node 7 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7486792486086683842:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:38.161428Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:38:38.167968Z node 8 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7486792485609743155:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:38.168034Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2025-03-28T09:38:43.323670Z node 7 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [7:7486792529036358857:3315] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2025-03-28T09:38:43.323703Z node 7 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [7:7486792529036358857:3315] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestPQSmallRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:37:01.202449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:01.202535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:01.202576Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:01.202606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:01.202638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:01.202657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:01.202720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:01.202777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:01.203017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:01.274174Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:37:01.274236Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:37:01.280901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:01.281028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:01.281137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:01.285936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:01.286102Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:01.286739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:01.286959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:01.288728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:01.289868Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:01.289915Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:01.290028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:01.290065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:01.290098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:01.290237Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:37:01.295796Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:37:01.366147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:01.366313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:01.366463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:01.366618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:01.366653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:01.368603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:01.368699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:01.368821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:01.368863Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:01.368898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:01.368918Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:01.370340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:01.370392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:01.370442Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:01.371684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:01.371714Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:01.371750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:01.371785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:01.374083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:01.375301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:01.375441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:01.376269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:01.376368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:01.376405Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:01.376599Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:01.376630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:01.376759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:01.376817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:01.378192Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:01.378232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:01.378358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:01.378401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:37:01.378581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:01.378612Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:01.378710Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:01.378740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:01.378770Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:01.378789Z no ... 412738Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:45.413040Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-28T09:38:45.413160Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-03-28T09:38:45.413189Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-03-28T09:38:45.413222Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-03-28T09:38:45.413251Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-03-28T09:38:45.413282Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-03-28T09:38:45.417640Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:38:45.417763Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:38:45.417795Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-28T09:38:45.418006Z node 114 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:38:45.418062Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:38:45.418081Z node 114 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-28T09:38:45.418108Z node 114 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-03-28T09:38:45.418150Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:38:45.418244Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-03-28T09:38:45.420171Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:38:45.420328Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:38:45.420387Z node 114 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:45.420615Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:38:45.420722Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-03-28T09:38:45.420753Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-28T09:38:45.420788Z node 114 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-03-28T09:38:45.420816Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-28T09:38:45.420847Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-03-28T09:38:45.420877Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-28T09:38:45.420916Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-03-28T09:38:45.420946Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-03-28T09:38:45.421031Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:38:45.421065Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-03-28T09:38:45.421086Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-03-28T09:38:45.421113Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:38:45.421135Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-03-28T09:38:45.421154Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-03-28T09:38:45.421189Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-28T09:38:45.422819Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:38:45.422903Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:38:45.422948Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:38:45.427689Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:38:45.427932Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:38:45.430842Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 341 RawX2: 489626274069 } TabletId: 72075186233409546 State: 4 2025-03-28T09:38:45.430918Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-03-28T09:38:45.432544Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:38:45.432996Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-03-28T09:38:45.433174Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:38:45.433428Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-03-28T09:38:45.435990Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:38:45.436037Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-28T09:38:45.436099Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:38:45.436136Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:38:45.436167Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:38:45.438979Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:38:45.439058Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-03-28T09:38:45.439580Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-03-28T09:38:45.439784Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-03-28T09:38:45.439821Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-03-28T09:38:45.440749Z node 114 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-03-28T09:38:45.440983Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-03-28T09:38:45.441020Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [114:628:2554] 2025-03-28T09:38:45.445424Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 349 RawX2: 489626274075 } TabletId: 72075186233409547 State: 4 2025-03-28T09:38:45.445513Z node 114 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-03-28T09:38:45.447178Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:38:45.447617Z node 114 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 2025-03-28T09:38:45.447791Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:45.448010Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409547 2025-03-28T09:38:45.450615Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:38:45.450661Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:38:45.450750Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:45.453946Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:38:45.454024Z node 114 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-03-28T09:38:45.454257Z node 114 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-03-28T09:38:45.454540Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-28T09:38:45.454630Z node 114 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> KqpScripting::StreamExecuteYqlScriptScan >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> KqpPragma::Auth >> KqpYql::UuidPrimaryKey >> KqpYql::InsertCV+useSink >> KqpYql::NonStrictDml >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion >> StreamCreator::WithResolvedTimestamps >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::CopyTableOmitFollowers >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> CdcStreamChangeCollector::OldImage [GOOD] >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir >> TKeyValueTest::TestRenameToLongKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:36.219110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:36.219234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.219281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:36.219330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:36.220255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:36.220297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:36.220365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.220452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:36.222190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:36.316287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:36.316344Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:36.330365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:36.330614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:36.330813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:36.336091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:36.336322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:36.336945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.337575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.342002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351379Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351455Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:36.351697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.351749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:36.351826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.358286Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:36.489326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:36.489481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.489633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:36.489822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:36.489868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.491563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.491674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:36.491799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.491837Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:36.491864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:36.491894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:36.493274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.493321Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:36.493349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:36.494479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.494509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.494540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.494594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.497167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:36.498493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:36.498634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:36.499424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.499519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:36.499552Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.499782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:36.499820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.499952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:36.500018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.501443Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.501479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.501608Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.501639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:36.501888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.501918Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:36.502014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.502043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.502073Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.502107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.502137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:36.502164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.502201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:36.502223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:36.502270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:36.502296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:36.502320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:36.503623Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.503712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.503740Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... oseTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 2, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-28T09:38:47.158167Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-03-28T09:38:47.158195Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2025-03-28T09:38:47.158446Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2025-03-28T09:38:47.158526Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 5000005 2025-03-28T09:38:47.158571Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 5000005 2025-03-28T09:38:47.158606Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-28T09:38:47.158632Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2025-03-28T09:38:47.158660Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2025-03-28T09:38:47.158903Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409549, partId: 0 2025-03-28T09:38:47.158984Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 5000005 2025-03-28T09:38:47.159033Z node 13 :FLAT_TX_SCHEMESHARD INFO: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409549 Status: COMPLETE TxId: 104 Step: 5000005 2025-03-28T09:38:47.159067Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2025-03-28T09:38:47.159098Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2025-03-28T09:38:47.159295Z node 13 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 104:0 128 -> 240 2025-03-28T09:38:47.159566Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-03-28T09:38:47.169063Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:47.169241Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:47.169339Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:47.169418Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:47.169512Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:47.169582Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:47.169753Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:47.169787Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:38:47.170058Z node 13 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:47.170096Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [13:205:2207], at schemeshard: 72057594046678944, txId: 104, path id: 2 2025-03-28T09:38:47.170361Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2025-03-28T09:38:47.170409Z node 13 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 104:0 ProgressState 2025-03-28T09:38:47.170601Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:38:47.170658Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:38:47.170740Z node 13 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#104:0 progress is 1/1 2025-03-28T09:38:47.170793Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:38:47.170848Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2025-03-28T09:38:47.170923Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2025-03-28T09:38:47.170993Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 104:0 2025-03-28T09:38:47.171048Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 104:0 2025-03-28T09:38:47.171314Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 10 2025-03-28T09:38:47.171376Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2025-03-28T09:38:47.171428Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-28T09:38:47.172077Z node 13 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:38:47.172157Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2025-03-28T09:38:47.172201Z node 13 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2025-03-28T09:38:47.172269Z node 13 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:38:47.172330Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-03-28T09:38:47.172439Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2025-03-28T09:38:47.180767Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2025-03-28T09:38:47.208949Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2025-03-28T09:38:47.209021Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2025-03-28T09:38:47.209609Z node 13 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2025-03-28T09:38:47.209733Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2025-03-28T09:38:47.209795Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [13:1468:3271] TestWaitNotification: OK eventTxId 104 2025-03-28T09:38:47.210497Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:47.210730Z node 13 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 254us result status StatusSuccess 2025-03-28T09:38:47.211422Z node 13 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 2 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterPersQueueGroup >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort >> KqpYql::UpdatePk >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CopyTableForBackup >> KqpExplain::MultiJoinCteLinks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:82:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:86:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:141:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:82:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:84:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:84:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:141:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:86:2117] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2117] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:90:2057] recipient: [10:86:2117] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2118] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2119] Leader for TabletID 72057594037927937 is [11:91:2120] sender: [11:92:2057] recipient: [11:88:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2120] Leader for TabletID 72057594037927937 is [11:91:2120] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Re ... is [18:56:2097] sender: [18:95:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:96:2057] recipient: [18:94:2123] Leader for TabletID 72057594037927937 is [18:97:2124] sender: [18:98:2057] recipient: [18:94:2123] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:97:2124] Leader for TabletID 72057594037927937 is [18:97:2124] sender: [18:151:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:76:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:78:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:80:2057] recipient: [21:79:2110] Leader for TabletID 72057594037927937 is [21:81:2111] sender: [21:82:2057] recipient: [21:79:2110] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:81:2111] Leader for TabletID 72057594037927937 is [21:81:2111] sender: [21:135:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:76:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:79:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:80:2057] recipient: [22:78:2110] Leader for TabletID 72057594037927937 is [22:81:2111] sender: [22:82:2057] recipient: [22:78:2110] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:81:2111] Leader for TabletID 72057594037927937 is [22:81:2111] sender: [22:135:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:77:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:80:2057] recipient: [23:79:2110] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:81:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:82:2111] sender: [23:83:2057] recipient: [23:79:2110] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:82:2111] Leader for TabletID 72057594037927937 is [23:82:2111] sender: [23:136:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:80:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:83:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:82:2113] Leader for TabletID 72057594037927937 is [24:85:2114] sender: [24:86:2057] recipient: [24:82:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:85:2114] Leader for TabletID 72057594037927937 is [24:85:2114] sender: [24:139:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:80:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:83:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:84:2057] recipient: [25:82:2113] Leader for TabletID 72057594037927937 is [25:85:2114] sender: [25:86:2057] recipient: [25:82:2113] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:85:2114] Leader for TabletID 72057594037927937 is [25:85:2114] sender: [25:139:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:81:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:84:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:85:2057] recipient: [26:83:2113] Leader for TabletID 72057594037927937 is [26:86:2114] sender: [26:87:2057] recipient: [26:83:2113] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:86:2114] Leader for TabletID 72057594037927937 is [26:86:2114] sender: [26:140:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:87:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:86:2116] Leader for TabletID 72057594037927937 is [27:89:2117] sender: [27:90:2057] recipient: [27:86:2116] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:89:2117] Leader for TabletID 72057594037927937 is [27:89:2117] sender: [27:143:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:84:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:86:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:87:2116] Leader for TabletID 72057594037927937 is [28:89:2117] sender: [28:90:2057] recipient: [28:87:2116] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:89:2117] Leader for TabletID 72057594037927937 is [28:89:2117] sender: [28:143:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:85:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:88:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:87:2116] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:91:2057] recipient: [29:87:2116] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:90:2117] Leader for TabletID 72057594037927937 is [29:90:2117] sender: [29:144:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::OldImage [GOOD] Test command err: 2025-03-28T09:38:28.303691Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:38:28.303988Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:38:28.304022Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000989/r3tmp/tmppYKRft/pdisk_1.dat 2025-03-28T09:38:28.699715Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.739802Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:28.744937Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-03-28T09:38:28.781830Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:28.781950Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:28.794018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:28.882627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:38:28.929086Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:38:28.929309Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:28.962207Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:28.962294Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:38:28.964629Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:38:28.964707Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:38:28.964750Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:38:28.966142Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:38:28.966243Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:38:28.966303Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:38:28.976747Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:38:28.994125Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:38:28.994263Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:38:28.994341Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:38:28.994365Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:28.994390Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:38:28.994425Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:28.994744Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:38:28.994802Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:38:28.996027Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:28.996063Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:28.996105Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:38:28.996141Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:28.996217Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:38:28.996314Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:28.996523Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:38:28.996638Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:38:28.997982Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.008532Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.008618Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:38:29.154938Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:38:29.164547Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.164618Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.165243Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.165285Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.165335Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.165526Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:38:29.165644Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.166291Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.166353Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:38:29.167980Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:38:29.168293Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.169480Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:38:29.169539Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.170217Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:38:29.170267Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.171010Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.171047Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:29.171098Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:38:29.171167Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:29.171204Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:29.171303Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.173743Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.175684Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:29.175744Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:29.175919Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:29.191363Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:29.191453Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:29.191503Z node 1 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-28T09:38:29.191540Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-28T09:38:29.192221Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:29.215897Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:29.460720Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:29.460764Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:29.461284Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.461316Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:29.461346Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:29.461480Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-28T09:38:29.461572Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:29.461764Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:29.462192Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:29.511966Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-28T09:38:29.512051Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.512084Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:29.512136Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tab ... ode 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:46.583421Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:38:46.583469Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:46.584085Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:38:46.584162Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:46.585157Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:46.585203Z node 4 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:38:46.585264Z node 4 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:38:46.585329Z node 4 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:46.585376Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:38:46.585462Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:46.586548Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:46.588635Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:38:46.588691Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:46.588812Z node 4 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:38:46.597868Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:46.598025Z node 4 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2025-03-28T09:38:46.598070Z node 4 :TX_DATASHARD INFO: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2025-03-28T09:38:46.598126Z node 4 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2025-03-28T09:38:46.598939Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:46.622920Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:46.808584Z node 4 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:38:46.808685Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:46.809069Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:46.809140Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:38:46.809204Z node 4 :TX_DATASHARD DEBUG: Found ready operation [1500:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-28T09:38:46.809439Z node 4 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1500:281474976715658 keys extracted: 0 2025-03-28T09:38:46.809598Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:38:46.809931Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:38:46.810840Z node 4 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:38:46.847152Z node 4 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1500} 2025-03-28T09:38:46.847290Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:46.847343Z node 4 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:38:46.847423Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:46.847514Z node 4 :TX_DATASHARD DEBUG: Complete [1500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:38:46.847594Z node 4 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2025-03-28T09:38:46.847721Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:46.850809Z node 4 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2025-03-28T09:38:46.850903Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:38:46.858365Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:886:2724], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:46.858475Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:896:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:46.858937Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:46.864815Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:38:46.871905Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:47.028454Z node 4 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:38:47.031065Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:900:2732], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:38:47.057254Z node 4 :TX_PROXY ERROR: Actor# [4:956:2769] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 8], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:47.328157Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1why81k1awe2s6yq9n791, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzI4ZDE5MzQtMWYwYzJlYmItNDM2ZWQwYWUtYWNiOTdmYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:47.332531Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:997:2796], serverId# [4:998:2797], sessionId# [0:0:0] 2025-03-28T09:38:47.333065Z node 4 :TX_DATASHARD DEBUG: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:38:47.337786Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1why81k1awe2s6yq9n791, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzI4ZDE5MzQtMWYwYzJlYmItNDM2ZWQwYWUtYWNiOTdmYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:47.342439Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1why81k1awe2s6yq9n791, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=MzI4ZDE5MzQtMWYwYzJlYmItNDM2ZWQwYWUtYWNiOTdmYTU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:47.343110Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:47.344624Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 1 Group: 1743154727344510 Step: 2001 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:38:47.355964Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:47.356103Z node 4 :TX_DATASHARD DEBUG: Waiting for PlanStep# 2001 from mediator time cast 2025-03-28T09:38:47.356217Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-28T09:38:47.356291Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:47.357321Z node 4 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 2001 at tablet 72075186224037888 2025-03-28T09:38:47.357393Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:47.441859Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1wje1f78hcgyg8sxh7h5s, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=Yjk0Y2MzNzgtMTg4ODNhMTEtZWFmZjgyMWUtYzU4NDE3MzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:47.442243Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:38:47.443186Z node 4 :TX_DATASHARD DEBUG: PersistChangeRecord: record: { Order: 2 Group: 1743154727443073 Step: 2001 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2025-03-28T09:38:47.454434Z node 4 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:38:47.454595Z node 4 :TX_DATASHARD DEBUG: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 2] SchemaVersion: 2 } 2025-03-28T09:38:47.454653Z node 4 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:38:47.456799Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1032:2821], serverId# [4:1033:2822], sessionId# [0:0:0] 2025-03-28T09:38:47.463473Z node 4 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [4:1034:2823], serverId# [4:1035:2824], sessionId# [0:0:0] >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true |96.7%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> KqpExplain::SortStage [GOOD] >> KqpExplain::SqlIn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:104:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:107:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (a ... 594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:85:2114] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:139:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:83:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:84:2113] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:84:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:140:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:84:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:86:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:88:2057] recipient: [25:87:2116] Leader for TabletID 72057594037927937 is [25:89:2117] sender: [25:90:2057] recipient: [25:87:2116] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:89:2117] Leader for TabletID 72057594037927937 is [25:89:2117] sender: [25:143:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:84:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:88:2057] recipient: [26:86:2116] Leader for TabletID 72057594037927937 is [26:89:2117] sender: [26:90:2057] recipient: [26:86:2116] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:89:2117] Leader for TabletID 72057594037927937 is [26:89:2117] sender: [26:143:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:85:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:89:2057] recipient: [27:87:2116] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:91:2057] recipient: [27:87:2116] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:90:2117] Leader for TabletID 72057594037927937 is [27:90:2117] sender: [27:144:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:88:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:91:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:92:2057] recipient: [28:90:2119] Leader for TabletID 72057594037927937 is [28:93:2120] sender: [28:94:2057] recipient: [28:90:2119] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:93:2120] Leader for TabletID 72057594037927937 is [28:93:2120] sender: [28:147:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:88:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:91:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:92:2057] recipient: [29:90:2119] Leader for TabletID 72057594037927937 is [29:93:2120] sender: [29:94:2057] recipient: [29:90:2119] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:93:2120] Leader for TabletID 72057594037927937 is [29:93:2120] sender: [29:147:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:89:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:93:2057] recipient: [30:92:2119] Leader for TabletID 72057594037927937 is [30:94:2120] sender: [30:95:2057] recipient: [30:92:2119] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:94:2120] Leader for TabletID 72057594037927937 is [30:94:2120] sender: [30:148:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:91:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:95:2057] recipient: [31:93:2121] Leader for TabletID 72057594037927937 is [31:96:2122] sender: [31:97:2057] recipient: [31:93:2121] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:96:2122] Leader for TabletID 72057594037927937 is [31:96:2122] sender: [31:150:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:91:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:95:2057] recipient: [32:93:2121] Leader for TabletID 72057594037927937 is [32:96:2122] sender: [32:97:2057] recipient: [32:93:2121] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:96:2122] Leader for TabletID 72057594037927937 is [32:96:2122] sender: [32:150:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:92:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:95:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:96:2057] recipient: [33:94:2121] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:98:2057] recipient: [33:94:2121] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:97:2122] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:151:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 24658, MsgBus: 9919 2025-03-28T09:38:29.834962Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792469812573994:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:29.836394Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00149d/r3tmp/tmp6syeuS/pdisk_1.dat 2025-03-28T09:38:30.053028Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24658, node 1 2025-03-28T09:38:30.111429Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:30.111473Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:30.111481Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:30.111677Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:30.170789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:30.170907Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:30.172656Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9919 TClient is connected to server localhost:9919 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:30.490292Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:30.512399Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:30.631212Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:30.762917Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:30.840246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:31.947097Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792478402510351:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:31.947249Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:32.141337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:32.168388Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:32.233128Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:32.256640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:32.281203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:32.311699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:32.385468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792482697478167:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:32.385534Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:32.385571Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792482697478172:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:32.389002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:32.397773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792482697478174:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:32.491268Z node 1 :TX_PROXY ERROR: Actor# [1:7486792482697478230:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"4"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 15762, MsgBus: 22969 2025-03-28T09:38:33.971771Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792486098092485:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:33.971831Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00149d/r3tmp/tmpqW3sbH/pdisk_1.dat 2025-03-28T09:38:34.044661Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15762, node 2 2025-03-28T09:38:34.095831Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:34.095859Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:34.095866Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:34.095973Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:34.100346Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:34.100427Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:34.102036Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22969 TClient is connected to server localhost:22969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl ... ublechecking } 2025-03-28T09:38:41.561506Z node 3 :TX_PROXY ERROR: Actor# [3:7486792521973065575:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:42.400274Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 21730, MsgBus: 24538 2025-03-28T09:38:43.696091Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486792531195729767:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:43.696203Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00149d/r3tmp/tmpsB3KPK/pdisk_1.dat 2025-03-28T09:38:43.791261Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21730, node 4 2025-03-28T09:38:43.824623Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:43.824708Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:43.826249Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:43.844775Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:43.844801Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:43.844809Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:43.844928Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24538 TClient is connected to server localhost:24538 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:44.234496Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:44.251481Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:44.320372Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:44.432159Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:44.483852Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:46.400964Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792544080633450:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:46.401029Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:46.463450Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:46.490829Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:46.517063Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:46.543661Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:46.569917Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:46.599814Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:46.637294Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792544080633956:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:46.637361Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792544080633961:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:46.637376Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:46.640841Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:46.650295Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486792544080633963:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:38:46.739950Z node 4 :TX_PROXY ERROR: Actor# [4:7486792544080634020:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","PlanNodeId":8,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"No estimate","Table":"EightShard","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"No estimate","Columns":["Data","Key","Text"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestPartitionedBlobFails >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> StreamCreator::WithResolvedTimestamps [GOOD] >> KqpScripting::SelectNullType >> KqpScripting::StreamExecuteYqlScriptData >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2025-03-28T09:38:47.461868Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792547152241094:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:47.461940Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012f2/r3tmp/tmprSFx9b/pdisk_1.dat 2025-03-28T09:38:47.858267Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:47.887075Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:47.887165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:47.890093Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1817 TServer::EnableGrpc on GrpcPort 29662, node 1 2025-03-28T09:38:48.246864Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:48.246896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:48.246909Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:48.247073Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1817 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:48.787325Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.820478Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154728938 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743154728854 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154728938 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-28T09:38:48.950306Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:38:48.950569Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:38:48.950595Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:38:48.951173Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:38:49.853190Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743154728938, tx_id: 281474976710658 } } } 2025-03-28T09:38:49.854489Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:38:49.856452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.857258Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-28T09:38:49.857287Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-03-28T09:38:49.884584Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-28T09:38:49.884615Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-03-28T09:38:49.885217Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-03-28T09:38:49.995460Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7486792555742176596:2345] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-28T09:38:50.043374Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-03-28T09:38:50.043393Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-03-28T09:38:50.069440Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:50.083512Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-03-28T09:38:50.083546Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154728938 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) >> KqpYql::InsertCV+useSink [GOOD] >> KqpYql::InsertCV-useSink >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> TAsyncIndexTests::Decimal >> KqpYql::NonStrictDml [GOOD] >> KqpYql::JsonNumberPrecision >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TAsyncIndexTests::Decimal [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpYql::UuidPrimaryKey [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon >> KqpYql::UpdatePk [GOOD] >> KqpYql::EvaluateExprPgNull |96.7%| [TA] $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:36.219120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:36.219237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.219279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:36.219327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:36.220268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:36.220311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:36.220387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.220498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:36.222166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:36.305986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:36.306052Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:36.322190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:36.322468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:36.322718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:36.330904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:36.332002Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:36.335597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.337536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.341991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351403Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351480Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:36.351714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.351766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:36.351852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.358378Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:36.485639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:36.485867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.486092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:36.486350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:36.486409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.488852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.488986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:36.489148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.489200Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:36.489237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:36.489280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:36.491228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.491329Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:36.491369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:36.493081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.493132Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.493176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.493229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.496988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:36.498985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:36.499186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:36.500224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.500369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:36.500416Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.500716Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:36.500776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.500941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:36.501039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.503035Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.503085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.503257Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.503320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:36.503670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.503716Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:36.503813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.503853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.503891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.503949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.503992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:36.504055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.504122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:36.504158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:36.504222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:36.504263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:36.504297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:36.506274Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.506391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.506427Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 31 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:51.768366Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:51.768590Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" took 245us result status StatusSuccess 2025-03-28T09:38:51.769035Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA" PathDescription { Self { Name: "DirA" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 28 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 29 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:51.770101Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:51.770407Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" took 320us result status StatusSuccess 2025-03-28T09:38:51.770956Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 32 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 32 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:51.772195Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:51.772430Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" took 246us result status StatusSuccess 2025-03-28T09:38:51.772791Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 29 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:51.774004Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:51.774328Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" took 347us result status StatusSuccess 2025-03-28T09:38:51.774797Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000000Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 33 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000010 ParentPathId: 30 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_deleted" Type: "Bool" TypeId: 6 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 32 PathsLimit: 10000 ShardsInside: 18 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 } UserAttributes { Key: "__incremental_backup" Value: "{}" } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 33 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::PathErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... boot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:85:2114] Leader for TabletID 72057594037927937 is [22:85:2114] sender: [22:139:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:80:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:83:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:84:2057] recipient: [23:82:2113] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:86:2057] recipient: [23:82:2113] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:85:2114] Leader for TabletID 72057594037927937 is [23:85:2114] sender: [23:139:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:81:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:84:2057] recipient: [24:83:2113] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:87:2057] recipient: [24:83:2113] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:86:2114] Leader for TabletID 72057594037927937 is [24:86:2114] sender: [24:104:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:83:2057] recipient: [25:36:2083] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:86:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:87:2057] recipient: [25:85:2115] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:89:2057] recipient: [25:85:2115] !Reboot 72057594037927937 (actor [25:56:2097]) rebooted! !Reboot 72057594037927937 (actor [25:56:2097]) tablet resolver refreshed! new actor is[25:88:2116] Leader for TabletID 72057594037927937 is [25:88:2116] sender: [25:142:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:54:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:57:2057] recipient: [26:51:2095] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:74:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:83:2057] recipient: [26:36:2083] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:86:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [26:56:2097] sender: [26:87:2057] recipient: [26:85:2115] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:89:2057] recipient: [26:85:2115] !Reboot 72057594037927937 (actor [26:56:2097]) rebooted! !Reboot 72057594037927937 (actor [26:56:2097]) tablet resolver refreshed! new actor is[26:88:2116] Leader for TabletID 72057594037927937 is [26:88:2116] sender: [26:142:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:54:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:57:2057] recipient: [27:51:2095] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:74:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:84:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:87:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:88:2057] recipient: [27:86:2115] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:90:2057] recipient: [27:86:2115] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:89:2116] Leader for TabletID 72057594037927937 is [27:89:2116] sender: [27:107:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:86:2057] recipient: [28:36:2083] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:89:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:90:2057] recipient: [28:88:2117] Leader for TabletID 72057594037927937 is [28:91:2118] sender: [28:92:2057] recipient: [28:88:2117] !Reboot 72057594037927937 (actor [28:56:2097]) rebooted! !Reboot 72057594037927937 (actor [28:56:2097]) tablet resolver refreshed! new actor is[28:91:2118] Leader for TabletID 72057594037927937 is [28:91:2118] sender: [28:145:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:86:2057] recipient: [29:36:2083] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:90:2057] recipient: [29:88:2117] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:92:2057] recipient: [29:88:2117] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:91:2118] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:145:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:87:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:89:2117] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:93:2057] recipient: [30:89:2117] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:92:2118] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:146:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:93:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:92:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:92:2120] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:51.803016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:51.803131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:51.803185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:51.803224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:51.803292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:51.803325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:51.803384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:51.803470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:51.803778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:51.891433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:51.891490Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:51.906659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:51.906936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:51.907089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:51.912546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:51.912762Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:51.913378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:51.913598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:51.915480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:51.916735Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:51.916793Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:51.916958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:51.917002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:51.917040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:51.917129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:51.923846Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:52.055229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:52.055467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.055663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:52.055899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:52.055959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.058188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:52.058302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:52.058481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.058533Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:52.058570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:52.058602Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:52.060337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.060390Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:52.060437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:52.061884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.061928Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.061980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:52.062024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:52.065896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:52.067518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:52.067689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:52.068657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:52.068794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:52.068854Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:52.069113Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:52.069168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:52.069340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:52.069412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:52.071310Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:52.071369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:52.071535Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:52.071576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:52.071963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.072006Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:52.072100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:52.072133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:52.072170Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:52.072201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:52.072243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:52.072281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:52.072316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:52.072373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:52.072442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:52.072479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:52.072510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:52.074519Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:52.074628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:52.074664Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:52.365288Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:38:52.365323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:38:52.365358Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-03-28T09:38:52.366000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:52.366042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-03-28T09:38:52.366131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:52.366161Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:38:52.366215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:38:52.366257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:52.366288Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.366322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:38:52.366355Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:38:52.372982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:52.373432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:52.378570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:52.378830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:38:52.379102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.379275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:52.379379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:38:52.379834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:38:52.379883Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-03-28T09:38:52.379982Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-28T09:38:52.380014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-28T09:38:52.380048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-28T09:38:52.380076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-28T09:38:52.380106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-03-28T09:38:52.380595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.381015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:52.381051Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:38:52.381110Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-28T09:38:52.381132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:38:52.381158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-28T09:38:52.381189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:38:52.381221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-03-28T09:38:52.381290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 101 2025-03-28T09:38:52.381331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:38:52.381376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:38:52.381410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:38:52.381543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:38:52.381605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-03-28T09:38:52.381633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-03-28T09:38:52.381665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:38:52.381683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-03-28T09:38:52.381700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-03-28T09:38:52.381751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:38:52.385022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:38:52.385195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:376:2344] TestWaitNotification: OK eventTxId 101 2025-03-28T09:38:52.385714Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:38:52.385974Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 274us result status StatusSuccess 2025-03-28T09:38:52.386906Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 26476, MsgBus: 8736 2025-03-28T09:38:47.010664Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792547442568504:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:47.010801Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae1/r3tmp/tmp4xar3v/pdisk_1.dat 2025-03-28T09:38:47.333035Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26476, node 1 2025-03-28T09:38:47.408896Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:47.409020Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:47.410790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:47.571813Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:47.571843Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:47.571874Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:47.571977Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8736 TClient is connected to server localhost:8736 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:48.194064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.312943Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792556032503751:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.313079Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.741563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.878904Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792556032503853:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.878969Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792556032503858:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.878984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.886534Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:38:49.895606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792556032503860:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:38:49.987183Z node 1 :TX_PROXY ERROR: Actor# [1:7486792556032503912:2396] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:51.230334Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792564622438694:2397], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2025-03-28T09:38:51.230580Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWVjOWRiNWQtMTY1NzZhN2EtYzQ4NzQ5ODAtMWU2ZDAwM2Y=, ActorId: [1:7486792556032503733:2328], ActorState: ExecuteState, TraceId: 01jqe1wp6p4gyps0qpsy2sw5hs, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: 2025-03-28T09:38:52.010676Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792547442568504:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:52.010752Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64 >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 4042, MsgBus: 21474 2025-03-28T09:38:48.693426Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792550133236825:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:48.693531Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ac4/r3tmp/tmpu1tZxs/pdisk_1.dat 2025-03-28T09:38:48.963940Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4042, node 1 2025-03-28T09:38:49.025005Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:49.025020Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:49.025030Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:49.025147Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:49.054535Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:49.054653Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:49.056191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21474 TClient is connected to server localhost:21474 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:49.457848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.476085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.580410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.713346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.772701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:51.105282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792563018140481:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:51.105400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:51.309492Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:51.335361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:51.358739Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:51.380971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:51.428805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:51.491942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:51.523299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792563018140995:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:51.523366Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:51.523465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792563018141000:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:51.526225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:51.533779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792563018141002:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:51.599289Z node 1 :TX_PROXY ERROR: Actor# [1:7486792563018141055:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:20: Warning: At function: AsStruct
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateSystemColumn >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::ManyDirs >> KqpPragma::ResetPerQuery |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce >> TSchemeShardTest::CreateSystemColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:39.846247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:39.846324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:39.846354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:39.846377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:39.846410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:39.846436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:39.846480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:39.846546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:39.846900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:39.914781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:39.914829Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:39.924908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:39.925079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:39.925221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:39.929588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:39.929781Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:39.930235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:39.930387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:39.931712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:39.932689Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:39.932731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:39.932863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:39.932898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:39.932939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:39.932994Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:39.937751Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:40.052979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:40.053115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.053258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:40.053412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:40.053446Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.055101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:40.055199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:40.055313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.055345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:40.055367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:40.055388Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:40.056624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.056662Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:40.056686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:40.057672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.057698Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.057722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:40.057760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:40.060031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:40.061193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:40.061317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:40.061981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:40.062071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:40.062110Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:40.062303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:40.062336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:40.062441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:40.062501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:40.063725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:40.063757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:40.063882Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:40.063908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:40.064141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:40.064167Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:40.064229Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:40.064252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:40.064285Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:40.064324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:40.064354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:40.064381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:40.064403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:40.064432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:40.064477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:40.064501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:40.064522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:40.065779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:40.065866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:40.065891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8T09:38:54.219520Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:54.220490Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:54.220586Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:54.220618Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:54.220694Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:38:54.220748Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2025-03-28T09:38:54.220914Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:54.222585Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2025-03-28T09:38:54.222678Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2025-03-28T09:38:54.222951Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:54.223075Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 64424511595 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:54.223129Z node 15 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:38:54.223462Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 128 -> 240 2025-03-28T09:38:54.223528Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2025-03-28T09:38:54.223757Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:54.223821Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:38:54.223873Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:38:54.225388Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:54.225419Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:54.225532Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:38:54.225624Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:54.225647Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 1 2025-03-28T09:38:54.225673Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:206:2208], at schemeshard: 72057594046678944, txId: 101, path id: 2 FAKE_COORDINATOR: Erasing txId 101 2025-03-28T09:38:54.225892Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:38:54.225932Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:38:54.226083Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:38:54.226125Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:54.226175Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 1/1 2025-03-28T09:38:54.226220Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:54.226269Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2025-03-28T09:38:54.226310Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2025-03-28T09:38:54.226354Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:38:54.226392Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:38:54.226562Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:38:54.226615Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2025-03-28T09:38:54.226659Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:38:54.226710Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:38:54.227510Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:54.227594Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:54.227630Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:38:54.227692Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:38:54.227756Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:54.228360Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:54.228437Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2025-03-28T09:38:54.228466Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2025-03-28T09:38:54.228494Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:38:54.228523Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:38:54.228596Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2025-03-28T09:38:54.232266Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:38:54.232522Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2025-03-28T09:38:54.232847Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2025-03-28T09:38:54.232936Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2025-03-28T09:38:54.233492Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:38:54.233622Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:38:54.233684Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [15:407:2374] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2025-03-28T09:38:54.237367Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpCreatePersQueueGroup CreatePersQueueGroup { Name: "Topic1" TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 121 } MeteringMode: METERING_MODE_RESERVED_CAPACITY } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:54.237794Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCreatePQ Propose, path: /MyRoot/USER_1/Topic1, opId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:38:54.238088Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, at schemeshard: 72057594046678944 2025-03-28T09:38:54.240699Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusResourceExhausted Reason: "Check failed: path: \'/MyRoot/USER_1/Topic1\', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:54.240952Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot/USER_1, subject: , status: StatusResourceExhausted, reason: Check failed: path: '/MyRoot/USER_1/Topic1', error: database size limit exceeded, limit: 1 bytes, available: 1 bytes, delta: 363 bytes, operation: CREATE PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:38:54.241302Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:38:54.241364Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:38:54.241870Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:38:54.241988Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:38:54.242039Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:414:2381] TestWaitNotification: OK eventTxId 102 >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:36.447204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:36.447302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.447330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:36.447352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:36.447386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:36.447402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:36.447437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.447498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:36.447802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:36.512466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:36.512517Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:36.526461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:36.526701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:36.526882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:36.532153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:36.532368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:36.532953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.533136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.534746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.536069Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.536124Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.536289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:36.536331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.536382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:36.536450Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.542379Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:36.631701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:36.631892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.632101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:36.632366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:36.632417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.634391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.634515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:36.634705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.634756Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:36.634793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:36.634833Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:36.636711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.636768Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:36.636802Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:36.638340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.638381Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.638419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.638472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.642004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:36.643724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:36.643905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:36.644841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.644961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:36.645002Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.645299Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:36.645350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.645510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:36.645591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.647324Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.647369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.647523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.647566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:36.647887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.647926Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:36.648019Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.648054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.648086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.648128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.648161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:36.648194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.648233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:36.648275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:36.648345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:36.648379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:36.648409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:36.650144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.650246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.650277Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... eadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:38:54.555798Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:38:54.555840Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:38:54.555895Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:38:54.555969Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [15:341:2320] message: TxId: 102 2025-03-28T09:38:54.556025Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:38:54.556079Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:38:54.556116Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:38:54.556235Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:38:54.557717Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:38:54.557774Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:342:2321] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2025-03-28T09:38:54.560465Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "SystemColumnInCopyAllowed" CopyFromTable: "/MyRoot/SystemColumnAllowed" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:54.560714Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TCopyTable Propose, path: /MyRoot/SystemColumnInCopyAllowed, opId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:54.561119Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: SystemColumnInCopyAllowed, child id: [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:38:54.561176Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 0 2025-03-28T09:38:54.561218Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:38:54.561266Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:38:54.561345Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:38:54.561490Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:54.561946Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:54.562000Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:38:54.563844Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 3, at schemeshard: 72057594046678944 2025-03-28T09:38:54.564010Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2025-03-28T09:38:54.564226Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:54.564286Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:54.564475Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:38:54.564549Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:54.564595Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 1 2025-03-28T09:38:54.564641Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:206:2208], at schemeshard: 72057594046678944, txId: 103, path id: 3 2025-03-28T09:38:54.564698Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:54.564759Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2025-03-28T09:38:54.564972Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-03-28T09:38:54.565943Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:38:54.566036Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:38:54.566075Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:38:54.566122Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-03-28T09:38:54.566164Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:38:54.567559Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:38:54.567635Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:38:54.567664Z node 15 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:38:54.567690Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 1 2025-03-28T09:38:54.567720Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:38:54.567789Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2025-03-28T09:38:54.568804Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2025-03-28T09:38:54.568953Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2025-03-28T09:38:54.569009Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-03-28T09:38:54.569318Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 3 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2025-03-28T09:38:54.569536Z node 15 :HIVE INFO: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2025-03-28T09:38:54.570043Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-03-28T09:38:54.570091Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2025-03-28T09:38:54.570224Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-03-28T09:38:54.570275Z node 15 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2025-03-28T09:38:54.570351Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2025-03-28T09:38:54.570458Z node 15 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 103:0 2 -> 3 2025-03-28T09:38:54.571704Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:38:54.572805Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:38:54.574355Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:54.574634Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:38:54.574715Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2025-03-28T09:38:54.574772Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2025-03-28T09:38:54.578053Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2025-03-28T09:38:54.578196Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2025-03-28T09:38:54.578268Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2025-03-28T09:38:54.578293Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> KqpYql::InsertCV-useSink [GOOD] >> KqpYql::JsonNumberPrecision [GOOD] >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> KqpYql::FlexibleTypes >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon >> KqpYql::InsertIgnore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonNumberPrecision [GOOD] Test command err: Trying to start YDB, gRPC: 5170, MsgBus: 25924 2025-03-28T09:38:47.010718Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792544659927710:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:47.010762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000acd/r3tmp/tmp41mvHf/pdisk_1.dat 2025-03-28T09:38:47.366480Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:47.404078Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:47.404190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:47.405707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5170, node 1 2025-03-28T09:38:47.571819Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:47.571853Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:47.571861Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:47.571989Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25924 TClient is connected to server localhost:25924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:48.194637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.229640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.375233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.521228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.593218Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.372548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792553249864052:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.372650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.740848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.765986Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.792162Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.816411Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.841630Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.895822Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.930600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792553249864591:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.930669Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.930722Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792553249864596:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.934038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:49.942553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792553249864598:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:49.997559Z node 1 :TX_PROXY ERROR: Actor# [1:7486792553249864651:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 32298, MsgBus: 32731 2025-03-28T09:38:51.711698Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792565420412811:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:51.711812Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000acd/r3tmp/tmpySjEOE/pdisk_1.dat 2025-03-28T09:38:51.816111Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32298, node 2 2025-03-28T09:38:51.856246Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:51.856328Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:51.858041Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:51.866677Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:51.866716Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:51.866724Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:51.866808Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32731 TClient is connected to server localhost:32731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:52.228724Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.244935Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.318773Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.451277Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.525368Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:54.124530Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792578305316473:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.124602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.151432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.175914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.200381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.227856Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.254359Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.283579Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.321643Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792578305316983:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.321716Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.321718Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792578305316988:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.324521Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:54.332515Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792578305316990:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:38:54.423688Z node 2 :TX_PROXY ERROR: Actor# [2:7486792578305317045:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCV-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 22838, MsgBus: 4078 2025-03-28T09:38:47.010521Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792547533508259:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:47.010587Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ad8/r3tmp/tmps1YLWx/pdisk_1.dat 2025-03-28T09:38:47.334952Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:47.381518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:47.382223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:47.383822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22838, node 1 2025-03-28T09:38:47.571820Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:47.571845Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:47.571855Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:47.571958Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4078 TClient is connected to server localhost:4078 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:48.228260Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.251430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.369042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.478593Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.552709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.344045Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792556123444592:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.344256Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.740770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.767104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.794122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.821660Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.848552Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.894744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.930950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792556123445132:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.931002Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792556123445137:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.931017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.934305Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:49.943260Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792556123445139:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:50.021653Z node 1 :TX_PROXY ERROR: Actor# [1:7486792560418412489:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:50.773133Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-28T09:38:50.783226Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:38:50.783436Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:38:50.783620Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792560418412806:2500], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7486792560418412786:2500]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7486792560418412806:2500].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:38:50.784049Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792560418412799:2500], SessionActorId: [1:7486792560418412786:2500], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7486792560418412786:2500]. isRollback=0 2025-03-28T09:38:50.784245Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjI4MjBjMzYtZjllY2ZjMWEtMzE4ODQxNWMtM2NjYjliOGY=, ActorId: [1:7486792560418412786:2500], ActorState: ExecuteState, TraceId: 01jqe1wnp65nt9jy2k625v2075, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486792560418412800:2500] from: [1:7486792560418412799:2500] 2025-03-28T09:38:50.784318Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792560418412800:2500] TxId: 281474976710671. Ctx: { TraceId: 01jqe1wnp65nt9jy2k625v2075, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZjI4MjBjMzYtZjllY2ZjMWEtMzE4ODQxNWMtM2NjYjliOGY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:38:50.785028Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjI4MjBjMzYtZjllY2ZjMWEtMzE4ODQxNWMtM2NjYjliOGY=, ActorId: [1:7486792560418412786:2500], ActorState: ExecuteState, TraceId: 01jqe1wnp65nt9jy2k625v2075, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 16159, MsgBus: 29216 2025-03-28T09:38:51.502325Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792563558209160:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:51.502389Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ad8/r3tmp/tmpTAvBSj/pdisk_1.dat 2025-03-28T09:38:51.640280Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16159, node 2 2025-03-28T09:38:51.673171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:51.673238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:51.674653Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:51.708064Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:51.708087Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:51.708093Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:51.708204Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29216 TClient is connected to server localhost:29216 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:52.046308Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.058738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.129333Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.258348Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.328664Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:53.986375Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792572148145511:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:53.986444Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.004367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.027853Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.050960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.073489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.097489Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.124673Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.158966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792576443113314:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.159066Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.159149Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792576443113319:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.161943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:54.171166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792576443113321:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:38:54.249627Z node 2 :TX_PROXY ERROR: Actor# [2:7486792576443113375:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:55.217765Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792580738080986:2501], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jqe1wswj0sr4sq3x4ge5gqg5. SessionId : ydb://session/3?node_id=2&id=OGQxMWUyMWEtOTQ0YTAzZjAtZTA0Njk1MjEtYmQ2OGMxNTc=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T09:38:55.218043Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792580738080988:2502], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jqe1wswj0sr4sq3x4ge5gqg5. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=OGQxMWUyMWEtOTQ0YTAzZjAtZTA0Njk1MjEtYmQ2OGMxNTc=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486792580738080983:2492], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:38:55.218370Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OGQxMWUyMWEtOTQ0YTAzZjAtZTA0Njk1MjEtYmQ2OGMxNTc=, ActorId: [2:7486792580738080936:2492], ActorState: ExecuteState, TraceId: 01jqe1wswj0sr4sq3x4ge5gqg5, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Conflict with existing key., code: 2012 >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] >> KqpYql::EvaluateExprPgNull [GOOD] >> KqpYql::EvaluateExprYsonAndType >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... TabletID 72057594037927937 is [13:56:2097] sender: [13:90:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [13:56:2097] sender: [13:91:2057] recipient: [13:89:2117] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:93:2057] recipient: [13:89:2117] !Reboot 72057594037927937 (actor [13:56:2097]) rebooted! !Reboot 72057594037927937 (actor [13:56:2097]) tablet resolver refreshed! new actor is[13:92:2118] Leader for TabletID 72057594037927937 is [13:92:2118] sender: [13:146:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:54:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:57:2057] recipient: [14:51:2095] Leader for TabletID 72057594037927937 is [14:56:2097] sender: [14:74:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:54:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:57:2057] recipient: [15:51:2095] Leader for TabletID 72057594037927937 is [15:56:2097] sender: [15:74:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:54:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:57:2057] recipient: [16:51:2095] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:74:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:76:2057] recipient: [16:36:2083] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:79:2057] recipient: [16:78:2110] Leader for TabletID 72057594037927937 is [16:56:2097] sender: [16:80:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:82:2057] recipient: [16:78:2110] !Reboot 72057594037927937 (actor [16:56:2097]) rebooted! !Reboot 72057594037927937 (actor [16:56:2097]) tablet resolver refreshed! new actor is[16:81:2111] Leader for TabletID 72057594037927937 is [16:81:2111] sender: [16:135:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:54:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:57:2057] recipient: [17:51:2095] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:74:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:76:2057] recipient: [17:36:2083] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:79:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [17:56:2097] sender: [17:80:2057] recipient: [17:78:2110] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:82:2057] recipient: [17:78:2110] !Reboot 72057594037927937 (actor [17:56:2097]) rebooted! !Reboot 72057594037927937 (actor [17:56:2097]) tablet resolver refreshed! new actor is[17:81:2111] Leader for TabletID 72057594037927937 is [17:81:2111] sender: [17:135:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:54:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:57:2057] recipient: [18:51:2095] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:74:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:77:2057] recipient: [18:36:2083] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:80:2057] recipient: [18:79:2110] Leader for TabletID 72057594037927937 is [18:56:2097] sender: [18:81:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:83:2057] recipient: [18:79:2110] !Reboot 72057594037927937 (actor [18:56:2097]) rebooted! !Reboot 72057594037927937 (actor [18:56:2097]) tablet resolver refreshed! new actor is[18:82:2111] Leader for TabletID 72057594037927937 is [18:82:2111] sender: [18:136:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:54:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:57:2057] recipient: [19:51:2095] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:74:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:80:2057] recipient: [19:36:2083] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:83:2057] recipient: [19:82:2113] Leader for TabletID 72057594037927937 is [19:56:2097] sender: [19:84:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:86:2057] recipient: [19:82:2113] !Reboot 72057594037927937 (actor [19:56:2097]) rebooted! !Reboot 72057594037927937 (actor [19:56:2097]) tablet resolver refreshed! new actor is[19:85:2114] Leader for TabletID 72057594037927937 is [19:85:2114] sender: [19:139:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:54:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:57:2057] recipient: [20:50:2095] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:74:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:80:2057] recipient: [20:36:2083] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:83:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:56:2097] sender: [20:84:2057] recipient: [20:82:2113] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:86:2057] recipient: [20:82:2113] !Reboot 72057594037927937 (actor [20:56:2097]) rebooted! !Reboot 72057594037927937 (actor [20:56:2097]) tablet resolver refreshed! new actor is[20:85:2114] Leader for TabletID 72057594037927937 is [20:85:2114] sender: [20:139:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:54:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:57:2057] recipient: [21:52:2095] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:74:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:81:2057] recipient: [21:36:2083] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:84:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:56:2097] sender: [21:85:2057] recipient: [21:83:2113] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:87:2057] recipient: [21:83:2113] !Reboot 72057594037927937 (actor [21:56:2097]) rebooted! !Reboot 72057594037927937 (actor [21:56:2097]) tablet resolver refreshed! new actor is[21:86:2114] Leader for TabletID 72057594037927937 is [21:86:2114] sender: [21:140:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:54:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:57:2057] recipient: [22:51:2095] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:74:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:84:2057] recipient: [22:36:2083] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:87:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [22:56:2097] sender: [22:88:2057] recipient: [22:86:2116] Leader for TabletID 72057594037927937 is [22:89:2117] sender: [22:90:2057] recipient: [22:86:2116] !Reboot 72057594037927937 (actor [22:56:2097]) rebooted! !Reboot 72057594037927937 (actor [22:56:2097]) tablet resolver refreshed! new actor is[22:89:2117] Leader for TabletID 72057594037927937 is [22:89:2117] sender: [22:143:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:54:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:57:2057] recipient: [23:52:2095] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:74:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:84:2057] recipient: [23:36:2083] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:87:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [23:56:2097] sender: [23:88:2057] recipient: [23:86:2116] Leader for TabletID 72057594037927937 is [23:89:2117] sender: [23:90:2057] recipient: [23:86:2116] !Reboot 72057594037927937 (actor [23:56:2097]) rebooted! !Reboot 72057594037927937 (actor [23:56:2097]) tablet resolver refreshed! new actor is[23:89:2117] Leader for TabletID 72057594037927937 is [23:89:2117] sender: [23:143:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:54:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:57:2057] recipient: [24:51:2095] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:74:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:85:2057] recipient: [24:36:2083] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:87:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [24:56:2097] sender: [24:89:2057] recipient: [24:88:2116] Leader for TabletID 72057594037927937 is [24:90:2117] sender: [24:91:2057] recipient: [24:88:2116] !Reboot 72057594037927937 (actor [24:56:2097]) rebooted! !Reboot 72057594037927937 (actor [24:56:2097]) tablet resolver refreshed! new actor is[24:90:2117] Leader for TabletID 72057594037927937 is [24:90:2117] sender: [24:144:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:54:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:57:2057] recipient: [25:51:2095] Leader for TabletID 72057594037927937 is [25:56:2097] sender: [25:74:2057] recipient: [25:14:2061] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 23818, MsgBus: 21371 2025-03-28T09:38:47.010714Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792545798953845:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:47.010779Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ad5/r3tmp/tmp4syanu/pdisk_1.dat 2025-03-28T09:38:47.339727Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23818, node 1 2025-03-28T09:38:47.422008Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:47.422135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:47.423640Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:47.571783Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:47.571827Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:47.571836Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:47.571947Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21371 TClient is connected to server localhost:21371 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:48.204883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.229616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.348572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.497076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.559353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.183278Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792554388890155:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.183438Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.740938Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.769746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.794245Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.816214Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.843587Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.875369Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:49.975404Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792554388890719:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.975492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.975562Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792554388890724:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.978466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:49.985705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792554388890726:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:50.081045Z node 1 :TX_PROXY ERROR: Actor# [1:7486792558683858079:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:50.943647Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792558683858339:2492], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2025-03-28T09:38:50.943892Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWY5NTZkNDUtYjllZTEyMWYtZThmMDE5NjMtMjViY2Y5MzQ=, ActorId: [1:7486792558683858330:2487], ActorState: ExecuteState, TraceId: 01jqe1wnv2d5dgxwqh3wzfdf0d, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 6537, MsgBus: 29106 2025-03-28T09:38:51.451895Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792564499378852:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:51.451995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ad5/r3tmp/tmpNB0GUM/pdisk_1.dat 2025-03-28T09:38:51.563608Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6537, node 2 2025-03-28T09:38:51.607301Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:51.607393Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:51.609138Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:51.617894Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:51.617917Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:51.617922Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:51.618028Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29106 TClient is connected to server localhost:29106 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:51.950022Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:51.966291Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.033422Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.156501Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.226204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:54.039488Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792577384282525:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.039602Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.075166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.100849Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.124704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.151093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.177436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.203009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.237464Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792577384283032:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.237527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792577384283037:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.237532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.240438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:54.247340Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792577384283039:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:38:54.311433Z node 2 :TX_PROXY ERROR: Actor# [2:7486792577384283092:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:55.301262Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:38:56.232873Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154736253, txId: 281474976715675] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64 [GOOD] Test command err: Trying to start YDB, gRPC: 30614, MsgBus: 15473 2025-03-28T09:38:44.668511Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792534549973728:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:44.668638Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00146f/r3tmp/tmpehViw3/pdisk_1.dat 2025-03-28T09:38:44.933205Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30614, node 1 2025-03-28T09:38:44.989536Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:44.989556Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:44.989563Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:44.989661Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:45.034253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:45.034383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:45.036047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15473 TClient is connected to server localhost:15473 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:45.420823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:45.445620Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:45.561789Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:45.723790Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:45.785896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:46.843049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792543139910110:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:46.843175Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:47.089467Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.115252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.141585Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.164074Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.188059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.218002Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.253316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792547434877917:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:47.253384Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:47.253427Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792547434877922:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:47.256213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:47.263964Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792547434877924:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:38:47.342818Z node 1 :TX_PROXY ERROR: Actor# [1:7486792547434877978:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 3472, MsgBus: 64403 2025-03-28T09:38:49.085862Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792553509655764:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:49.085916Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00146f/r3tmp/tmp90ONHW/pdisk_1.dat 2025-03-28T09:38:49.168110Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3472, node 2 2025-03-28T09:38:49.208090Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:49.208171Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:49.209614Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:49.223098Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:49.223122Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:49.223132Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:49.223239Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64403 TClient is connected to server localhost:64403 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:49.617803Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.633130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.677674Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.818455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.910451Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:51.797430Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792562099592116:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:51.797545Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:51.839946Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:51.866831Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:51.890442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:51.916759Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:51.946188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:52.013828Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:52.053658Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792566394559925:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:52.053734Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:52.053788Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792566394559931:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:52.057149Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:52.065911Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792566394559933:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:38:52.130792Z node 2 :TX_PROXY ERROR: Actor# [2:7486792566394559988:3439] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29454, MsgBus: 30761 2025-03-28T09:38:53.674726Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486792574306467437:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:53.674786Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00146f/r3tmp/tmp7Hequn/pdisk_1.dat 2025-03-28T09:38:53.775184Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29454, node 3 2025-03-28T09:38:53.813845Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:53.813908Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:53.815027Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:53.823103Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:53.823121Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:53.823128Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:53.823216Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30761 TClient is connected to server localhost:30761 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:54.207642Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:56.237910Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792587191369984:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:56.237998Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:56.256643Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:38:56.296356Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792587191370084:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:56.296448Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:56.296492Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792587191370089:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:56.300584Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:38:56.312947Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486792587191370091:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:38:56.367989Z node 3 :TX_PROXY ERROR: Actor# [3:7486792587191370142:2392] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 16820, MsgBus: 28382 2025-03-28T09:38:47.010536Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792547798547297:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:47.010579Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae2/r3tmp/tmpakp4bO/pdisk_1.dat 2025-03-28T09:38:47.342571Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:47.383419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:47.383547Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:47.384947Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16820, node 1 2025-03-28T09:38:47.571771Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:47.571791Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:47.571797Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:47.571900Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28382 TClient is connected to server localhost:28382 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:48.201881Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.229547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.383577Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.515707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:48.588375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:49.784195Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792556388483666:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:49.784340Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:50.031335Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:50.055049Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:50.080110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:50.105961Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:50.148182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:50.174743Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:50.245780Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792560683451480:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:50.245873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:50.245938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792560683451485:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:50.249238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:50.259107Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792560683451487:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:50.352976Z node 1 :TX_PROXY ERROR: Actor# [1:7486792560683451542:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:51.676067Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154731682, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 6923, MsgBus: 25549 2025-03-28T09:38:52.561429Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792568600347306:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:52.561490Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ae2/r3tmp/tmph79rqj/pdisk_1.dat 2025-03-28T09:38:52.661537Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6923, node 2 2025-03-28T09:38:52.689093Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:52.689186Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:52.690843Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:52.705582Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:52.705602Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:52.705614Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:52.705744Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25549 TClient is connected to server localhost:25549 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:53.027754Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:53.034788Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:53.083958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:53.207469Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:53.280682Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:54.969523Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792577190283676:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.969603Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:55.024437Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.051613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.077195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.101942Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.128998Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.158848Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.192743Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792581485251479:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:55.192823Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:55.192828Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792581485251484:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:55.195192Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:55.201938Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792581485251486:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:38:55.272870Z node 2 :TX_PROXY ERROR: Actor# [2:7486792581485251540:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:56.295903Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154736330, txId: 281474976715671] shutting down 2025-03-28T09:38:56.524133Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154736519, txId: 281474976715673] shutting down 2025-03-28T09:38:56.717335Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154736750, txId: 281474976715675] shutting down >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced >> KqpScripting::ScriptExplainCreatedTable |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] Test command err: canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "4@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "1@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 7 } canonic: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } canonic: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "3@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "6@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } result: ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 3 Inherited: true } canonic: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } result: ACE { AccessType: 0 AccessRight: 59391 SID: "2@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 521 SID: "5@staff" InheritanceType: 1 Inherited: true } ACE { AccessType: 0 AccessRight: 59391 SID: "22@staff" InheritanceType: 1 } ACE { AccessType: 0 AccessRight: 521 SID: "44@staff" InheritanceType: 4 } ACE { AccessType: 0 AccessRight: 521 SID: "55@staff" InheritanceType: 5 } ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 } ACE { AccessType: 1 AccessRight: 32768 SID: "11@staff" InheritanceType: 0 } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 6 } ACE { AccessType: 1 AccessRight: 521 SID: "77@staff" InheritanceType: 7 } canonic: ACE { AccessType: 1 AccessRight: 59391 SID: "0@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "7@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "00@staff" InheritanceType: 3 Inherited: true } ACE { AccessType: 1 AccessRight: 59391 SID: "33@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "66@staff" InheritanceType: 2 Inherited: true } ACE { AccessType: 1 AccessRight: 521 SID: "77@sta ... athId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCopying Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:57.813959Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:57.814213Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 252us result status StatusSuccess 2025-03-28T09:38:57.814540Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:57.815298Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:57.815541Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy1" took 275us result status StatusSuccess 2025-03-28T09:38:57.815873Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy1" PathDescription { Self { Name: "Copy1" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000007 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 6 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:57.816647Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:57.816923Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy2" took 288us result status StatusSuccess 2025-03-28T09:38:57.817340Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy2" PathDescription { Self { Name: "Copy2" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Copy2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:57.818280Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Copy3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:38:57.818581Z node 14 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Copy3" took 300us result status StatusSuccess 2025-03-28T09:38:57.819054Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Copy3" PathDescription { Self { Name: "Copy3" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000009 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Copy3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 8 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 9 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] >> TVectorIndexTests::CreateTablePrefix >> KqpYql::ColumnNameConflict >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] >> KqpYql::RefSelect >> KqpScripting::StreamDdlAndDml [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:87:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:87:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:86:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:86:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [27:56:2097] sender: [27:89:2057] recipient: [27:36:2083] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:92:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [27:56:2097] sender: [27:93:2057] recipient: [27:91:2119] Leader for TabletID 72057594037927937 is [27:94:2120] sender: [27:95:2057] recipient: [27:91:2119] !Reboot 72057594037927937 (actor [27:56:2097]) rebooted! !Reboot 72057594037927937 (actor [27:56:2097]) tablet resolver refreshed! new actor is[27:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:54:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:57:2057] recipient: [28:51:2095] Leader for TabletID 72057594037927937 is [28:56:2097] sender: [28:74:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:54:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:57:2057] recipient: [29:51:2095] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:74:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:76:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:78:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:80:2057] recipient: [30:79:2110] Leader for TabletID 72057594037927937 is [30:81:2111] sender: [30:82:2057] recipient: [30:79:2110] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:81:2111] Leader for TabletID 72057594037927937 is [30:81:2111] sender: [30:135:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:76:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:79:2057] recipient: [31:78:2110] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:80:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:81:2111] sender: [31:82:2057] recipient: [31:78:2110] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:81:2111] Leader for TabletID 72057594037927937 is [31:81:2111] sender: [31:135:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:77:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:80:2057] recipient: [32:79:2110] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:81:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:82:2111] sender: [32:83:2057] recipient: [32:79:2110] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:82:2111] Leader for TabletID 72057594037927937 is [32:82:2111] sender: [32:136:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:80:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:83:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:84:2057] recipient: [33:82:2113] Leader for TabletID 72057594037927937 is [33:85:2114] sender: [33:86:2057] recipient: [33:82:2113] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:85:2114] Leader for TabletID 72057594037927937 is [33:85:2114] sender: [33:139:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:80:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:83:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:84:2057] recipient: [34:82:2113] Leader for TabletID 72057594037927937 is [34:85:2114] sender: [34:86:2057] recipient: [34:82:2113] !Reboot 72057594037927937 (actor [34:56:2097]) rebooted! !Reboot 72057594037927937 (actor [34:56:2097]) tablet resolver refreshed! new actor is[34:85:2114] Leader for TabletID 72057594037927937 is [34:85:2114] sender: [34:139:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:81:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:84:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:85:2057] recipient: [35:83:2113] Leader for TabletID 72057594037927937 is [35:86:2114] sender: [35:87:2057] recipient: [35:83:2113] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:86:2114] Leader for TabletID 72057594037927937 is [35:86:2114] sender: [35:140:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:84:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:86:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:88:2057] recipient: [36:87:2116] Leader for TabletID 72057594037927937 is [36:89:2117] sender: [36:90:2057] recipient: [36:87:2116] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:89:2117] Leader for TabletID 72057594037927937 is [36:89:2117] sender: [36:143:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:52:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:52:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:84:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:87:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:88:2057] recipient: [37:86:2116] Leader for TabletID 72057594037927937 is [37:89:2117] sender: [37:90:2057] recipient: [37:86:2116] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:89:2117] Leader for TabletID 72057594037927937 is [37:89:2117] sender: [37:143:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:85:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:88:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:89:2057] recipient: [38:87:2116] Leader for TabletID 72057594037927937 is [38:90:2117] sender: [38:91:2057] recipient: [38:87:2116] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:90:2117] Leader for TabletID 72057594037927937 is [38:90:2117] sender: [38:144:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey >> KqpYql::InsertCVList+useSink >> KqpYql::EvaluateIf >> KqpQuery::RowsLimit >> TVectorIndexTests::CreateTablePrefix [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::FromBytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 32361, MsgBus: 15472 2025-03-28T09:38:50.711593Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792559448781619:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:50.711714Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ab3/r3tmp/tmpQ8rF67/pdisk_1.dat 2025-03-28T09:38:50.988555Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32361, node 1 2025-03-28T09:38:51.055556Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:51.055575Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:51.055580Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:51.055670Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:51.079602Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:51.079730Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:51.081532Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15472 TClient is connected to server localhost:15472 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:51.426762Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:51.454793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:51.558110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:51.681136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:51.757775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:53.158116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792572333685285:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:53.158247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:53.398395Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:53.424243Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:53.451042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:53.474913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:53.500508Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:53.531553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:53.566005Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792572333685792:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:53.566091Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792572333685797:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:53.566099Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:53.569075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:53.576461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792572333685799:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:53.678857Z node 1 :TX_PROXY ERROR: Actor# [1:7486792572333685854:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:54.357547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.632366Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154734671, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 18237, MsgBus: 7426 2025-03-28T09:38:55.296032Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792578955154094:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:55.296130Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ab3/r3tmp/tmpoTbkzB/pdisk_1.dat 2025-03-28T09:38:55.410159Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18237, node 2 2025-03-28T09:38:55.450068Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:55.450133Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:55.451739Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:55.473796Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:55.473825Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:55.473839Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:55.473969Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7426 TClient is connected to server localhost:7426 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:55.854319Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.870815Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.956128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:56.078157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:56.152968Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.841056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792587545090455:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.841135Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.884388Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.910283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.934752Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.959552Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.984617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:58.012643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:58.046450Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792591840058259:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:58.046527Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792591840058264:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:58.046526Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:58.048960Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:58.056989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792591840058266:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:38:58.130222Z node 2 :TX_PROXY ERROR: Actor# [2:7486792591840058320:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:58.901076Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.282598Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154739319, txId: 281474976715673] shutting down >> KqpYql::InsertIgnore [GOOD] >> KqpYql::JsonCast ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTablePrefix [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:59.793097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:59.793192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:59.793230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:59.793254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:59.793285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:59.793307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:59.793351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:59.793405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:59.793670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:59.868145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:59.868210Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:59.884105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:59.884384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:59.884551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:59.890269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:59.890485Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:59.891172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:59.891432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:59.893283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:59.894562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:59.894620Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:59.894804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:59.894849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:59.894895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:59.895020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:59.902678Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:39:00.006479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:00.006716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.006923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:00.007166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:00.007234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.009422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:00.009544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:00.009721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.009767Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:00.009801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:00.009831Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:00.011864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.011922Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:00.011972Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:00.013543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.013580Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.013637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:00.013682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:00.022505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:00.024740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:00.024911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:00.025957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:00.026093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:00.026150Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:00.026433Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:00.026480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:00.026642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:00.026746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:00.029003Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:00.029060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:00.029237Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:00.029272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:00.029659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.029699Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:00.029791Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:00.029821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:00.029857Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:00.029885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:00.029918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:39:00.029957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:00.029992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:39:00.030041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:39:00.030135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:00.030175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:39:00.030204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:39:00.037421Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:00.037558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:00.037598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ration: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:00.492794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:00.492811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:00.492873Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:00.492907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:00.492919Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:00.492935Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:39:00.492953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2025-03-28T09:39:00.493133Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:00.493176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:00.493189Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:00.493213Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-03-28T09:39:00.493246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-28T09:39:00.493437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:00.493485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 6 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:00.493500Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:00.493515Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 6], version: 18446744073709551615 2025-03-28T09:39:00.493530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 4 2025-03-28T09:39:00.493582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/5, is published: true 2025-03-28T09:39:00.496908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:3, at schemeshard: 72057594046678944 2025-03-28T09:39:00.496945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:3 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:00.497149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:39:00.497250Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/5 2025-03-28T09:39:00.497275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-03-28T09:39:00.497301Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:3 progress is 2/5 2025-03-28T09:39:00.497356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/5 2025-03-28T09:39:00.497394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/5, is published: true 2025-03-28T09:39:00.498350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:4, at schemeshard: 72057594046678944 2025-03-28T09:39:00.498384Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:4 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:00.498558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2025-03-28T09:39:00.498616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:4 progress is 3/5 2025-03-28T09:39:00.498631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-03-28T09:39:00.498648Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:4 progress is 3/5 2025-03-28T09:39:00.498677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 3/5 2025-03-28T09:39:00.498722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/5, is published: true 2025-03-28T09:39:00.499509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2025-03-28T09:39:00.499551Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:00.499738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:39:00.499820Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 4/5 2025-03-28T09:39:00.499848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-03-28T09:39:00.499876Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:2 progress is 4/5 2025-03-28T09:39:00.499897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 4/5 2025-03-28T09:39:00.499921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/5, is published: true 2025-03-28T09:39:00.500175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:00.500382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:00.500458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:00.500514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:00.500884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.500922Z node 1 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:00.501077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:39:00.501145Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 5/5 2025-03-28T09:39:00.501170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-03-28T09:39:00.501196Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 5/5 2025-03-28T09:39:00.501217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-03-28T09:39:00.501242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 5/5, is published: true 2025-03-28T09:39:00.501295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:450:2396] message: TxId: 102 2025-03-28T09:39:00.501333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 5/5 2025-03-28T09:39:00.501384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:39:00.501419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:39:00.501503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:39:00.501590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-28T09:39:00.501612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-28T09:39:00.501640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:39:00.501661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:2 2025-03-28T09:39:00.501678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:2 2025-03-28T09:39:00.501731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:39:00.501761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:3 2025-03-28T09:39:00.501783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:3 2025-03-28T09:39:00.501817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-28T09:39:00.501840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:4 2025-03-28T09:39:00.501858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:4 2025-03-28T09:39:00.501893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 2 2025-03-28T09:39:00.502256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:00.502295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:00.502335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:00.502455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:00.502527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:00.502571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:00.504925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:39:00.504984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:597:2536] TestWaitNotification: OK eventTxId 102 >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true >> StreamCreator::Basic >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpLimits::AffectedShardsLimit >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] >> KqpYql::EvaluateExprYsonAndType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 9587, MsgBus: 31499 2025-03-28T09:38:51.300409Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792564971108014:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:51.300474Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000aae/r3tmp/tmpAZM5ie/pdisk_1.dat 2025-03-28T09:38:51.552155Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 9587, node 1 2025-03-28T09:38:51.591592Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:51.591617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:51.591634Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:51.591763Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:51.627584Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:51.627707Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:51.629452Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:31499 TClient is connected to server localhost:31499 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:51.997038Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.016276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.140424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.287394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.360813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:53.765679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792573561044384:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:53.765779Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.011673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.037793Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.060902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.102444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.125118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.151433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:54.186400Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792577856012189:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.186455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.186496Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792577856012194:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:54.189490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:54.197784Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792577856012196:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:54.254699Z node 1 :TX_PROXY ERROR: Actor# [1:7486792577856012249:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:55.519329Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154735546, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 7410, MsgBus: 8722 2025-03-28T09:38:56.262442Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792587065615247:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:56.262581Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000aae/r3tmp/tmpeD6ACp/pdisk_1.dat 2025-03-28T09:38:56.370803Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7410, node 2 2025-03-28T09:38:56.403261Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:56.403400Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:56.405087Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:56.430667Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:56.430706Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:56.430714Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:56.430844Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8722 TClient is connected to server localhost:8722 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:56.745728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:56.752542Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:56.821221Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:56.997750Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.063241Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:58.790207Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792595655551614:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:58.790299Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:58.827555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:58.852028Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:58.893898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:58.920725Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:58.948589Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:58.975160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.008894Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792599950519417:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.008978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792599950519422:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.008986Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.012215Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:59.021248Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792599950519424:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:38:59.121130Z node 2 :TX_PROXY ERROR: Actor# [2:7486792599950519479:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:00.206111Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154740236, txId: 281474976715671] shutting down 2025-03-28T09:39:00.299592Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154740334, txId: 281474976715673] shutting down >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> TargetDiscoverer::InvalidCredentials >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestPQReadAhead >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] Test command err: 2025-03-28T09:37:50.262658Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792301473938570:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:50.262784Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:37:50.288712Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792302817614711:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:50.288777Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00192e/r3tmp/tmpfN7cSE/pdisk_1.dat 2025-03-28T09:37:50.417562Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:37:50.422022Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T09:37:50.559368Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:50.566920Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:50.567082Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:37:50.576339Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:37:50.578890Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:50.611491Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:37:50.611568Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 17315, node 1 2025-03-28T09:37:50.614729Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:37:50.651522Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00192e/r3tmp/yandextFxAWE.tmp 2025-03-28T09:37:50.651546Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00192e/r3tmp/yandextFxAWE.tmp 2025-03-28T09:37:50.651669Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00192e/r3tmp/yandextFxAWE.tmp 2025-03-28T09:37:50.651770Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:37:50.694083Z INFO: TTestServer started on Port 25089 GrpcPort 17315 TClient is connected to server localhost:25089 PQClient connected to localhost:17315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:37:50.925387Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:37:50.969034Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:37:53.012247Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792315702516909:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:53.012303Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792315702516917:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:53.012348Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:37:53.018363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480 2025-03-28T09:37:53.032920Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792315702516924:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2025-03-28T09:37:53.094297Z node 2 :TX_PROXY ERROR: Actor# [2:7486792315702516952:2129] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:37:53.314799Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486792315702516966:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:53.314802Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792314358841681:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:37:53.315055Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MThiYjExMjYtZjMwM2M2YzAtZjhhMGJhZmEtZGJlZjg1ZDY=, ActorId: [1:7486792314358841656:2340], ActorState: ExecuteState, TraceId: 01jqe1txdc6gefr48w11zg7ghs, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:53.315068Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjU0ZTllYjEtNjQwZjA4MmMtN2FjMTE3YjAtZmU5ZWRhMzA=, ActorId: [2:7486792315702516893:2307], ActorState: ExecuteState, TraceId: 01jqe1txbjbazd3e5c1skst9fk, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:37:53.315901Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:37:53.317449Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:53.317458Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:37:53.393982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:37:53.463734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:37:53.698642Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710665. Ctx: { TraceId: 01jqe1txvs07rxcwqy1a0b12qr, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDA0ZjQ4YmQtYjdmYzRhMTUtZjcxYjdjOWUtMjY2OTc4Mw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486792314358842053:3076] 2025-03-28T09:37:55.261438Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792301473938570:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:55.261500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:37:55.288569Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792302817614711:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:37:55.288634Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:37:59.490180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480 2025-03-28T09:37:59.952950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480 2025-03-28T09:38:00.320740Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480 2025-03-28T09:38:00.758794Z node 1 :FLAT_TX_SC ... ydb://session/3?node_id=10&id=YTJiNzBjN2UtYWU1NDdjNTAtNDkxMjQ2Zi04ODgyNmQ0MQ==, ActorId: [10:7486792553624073204:2310], ActorState: ExecuteState, TraceId: 01jqe1wmraergt4s714z242rbf, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:38:49.787891Z node 10 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:38:49.861391Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:38:50.155977Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqe1wmyx9cxhw0hgcmkzdszc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ODFiZjdkYmQtY2RjNWIyMWQtMjMzZDA1MmUtYmJkMzUxNmE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [9:7486792560004376608:3089] 2025-03-28T09:38:50.905132Z node 9 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7486792538529538487:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:50.905209Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:38:50.913302Z node 10 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7486792536444203625:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:50.913365Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2025-03-28T09:38:55.294197Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:1, at schemeshard: 72057594046644480 2025-03-28T09:38:56.023087Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-03-28T09:38:56.636827Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.287697Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715692:0, at schemeshard: 72057594046644480 2025-03-28T09:38:58.034764Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715695:0, at schemeshard: 72057594046644480 2025-03-28T09:38:58.683663Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715701:0, at schemeshard: 72057594046644480 Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (16261273835729377752, "Root", "00415F536F757263655F3130", 1743154739541, 1743154739541, 0, 13); 2025-03-28T09:38:59.693449Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715707. Ctx: { TraceId: 01jqe1wybf841e4ncbf4wctn2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=YWU4Njc5MzAtYThmYjUwMzItZTM0NTJjY2MtZjdhNWIzMDM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:38:59.706598Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2025-03-28T09:38:59.706631Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2025-03-28T09:38:59.706646Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2025-03-28T09:38:59.706670Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7486792598659083808:4016] (SourceId=A_Source_10, PreferedPartition=1) GetOwnershipFast Partition=1 TabletId=1001 2025-03-28T09:38:59.706849Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 269877760, Sender [9:7486792598659083809:4016], Recipient [9:7486792581479213535:3352]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [9:7486792598659083808:4016] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2025-03-28T09:38:59.706949Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 271188557, Sender [9:7486792598659083808:4016], Recipient [9:7486792581479213535:3352]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_10" 2025-03-28T09:38:59.707056Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateOwnershipFast, received event# 271188558, Sender [9:7486792581479213535:3352], Recipient [9:7486792598659083808:4016]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2025-03-28T09:38:59.707098Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7486792598659083808:4016] (SourceId=A_Source_10, PreferedPartition=1) InitTable: SourceId=A_Source_10 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2025-03-28T09:38:59.707172Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateMockWork, received event# 65543, Sender [9:7486792598659083808:4016], Recipient [9:7486792581479213535:3352]: NActors::TEvents::TEvPoison 2025-03-28T09:38:59.707271Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateInitTable, received event# 277020685, Sender [9:7486792538529538483:2070], Recipient [9:7486792598659083808:4016]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2025-03-28T09:38:59.707307Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7486792598659083808:4016] (SourceId=A_Source_10, PreferedPartition=1) StartKqpSession 2025-03-28T09:38:59.710565Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateCreateKqpSession, received event# 271646728, Sender [9:7486792538529538707:2280], Recipient [9:7486792598659083808:4016]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=9&id=ZmY0ODYxMzYtNjMyNzE4ZTUtOTM3MzRmYjItYTkwYTAxM2E=" NodeId: 9 } YdbStatus: SUCCESS ResourceExhausted: false 2025-03-28T09:38:59.710599Z node 9 :PQ_PARTITION_CHOOSER DEBUG: TPartitionChooser [9:7486792598659083808:4016] (SourceId=A_Source_10, PreferedPartition=1) Select from the table 2025-03-28T09:38:59.903989Z node 9 :PQ_PARTITION_CHOOSER TRACE: StateSelect, received event# 271646721, Sender [9:7486792538529538707:2280], Recipient [9:7486792598659083808:4016]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=9&id=ZmY0ODYxMzYtNjMyNzE4ZTUtOTM3MzRmYjItYTkwYTAxM2E=" PreparedQuery: "49287e20-e5e92266-648ab461-ae52fe9c" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01jqe1wynjctjzmbs36yst4dw5" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1743154739541 } items { uint64_value: 1743154739541 } items { uint64_value: 13 } } } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 118 2025-03-28T09:38:59.904188Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7486792598659083808:4016] (SourceId=A_Source_10, PreferedPartition=1) Selected from table PartitionId=0 SeqNo=13 2025-03-28T09:38:59.904231Z node 9 :PQ_PARTITION_CHOOSER TRACE: TPartitionChooser [9:7486792598659083808:4016] (SourceId=A_Source_10, PreferedPartition=1) OnPartitionChosen Received TEvChooseError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 16261273835729377752 AND Topic = "Root" AND ProducerId = "00415F536F757263655F3130" 2025-03-28T09:38:59.904299Z node 9 :PQ_PARTITION_CHOOSER INFO: TPartitionChooser [9:7486792598659083808:4016] (SourceId=A_Source_10, PreferedPartition=1) ReplyError: MessageGroupId A_Source_10 is already bound to PartitionGroupId 1, but client provided 2. MessageGroupId->PartitionGroupId binding cannot be changed, either use another MessageGroupId, specify PartitionGroupId 1, or do not specify PartitionGroupId at all. 2025-03-28T09:39:00.090382Z node 9 :KQP_EXECUTER ERROR: TxId: 281474976715710. Ctx: { TraceId: 01jqe1wypr28cdznm0dkhfgdx6, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZGEwYWVkMTYtOGY0MzVjMmEtN2Y3ODE3OGYtN2E4OWZkZA==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:39:00.757433Z node 9 :KQP_EXECUTER ERROR: ActorId: [9:7486792602954051250:2710] TxId: 281474976715711. Ctx: { TraceId: 01jqe1wzbrbs0ee4kaffzb1e2j, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=9&id=ZjZiNGRkYzUtMzBjYWUxMWYtODBhZmMzYjYtODRkN2MxNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 2025-03-28T09:39:00.757608Z node 9 :KQP_COMPUTE ERROR: SelfId: [9:7486792602954051257:2710], TxId: 281474976715711, task: 2. Ctx: { TraceId : 01jqe1wzbrbs0ee4kaffzb1e2j. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=9&id=ZjZiNGRkYzUtMzBjYWUxMWYtODBhZmMzYjYtODRkN2MxNjM=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [9:7486792602954051250:2710], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprYsonAndType [GOOD] Test command err: Trying to start YDB, gRPC: 64879, MsgBus: 63640 2025-03-28T09:38:53.010383Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792571632480299:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:53.010452Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a9e/r3tmp/tmpFzO7g0/pdisk_1.dat 2025-03-28T09:38:53.309839Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 64879, node 1 2025-03-28T09:38:53.315487Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:38:53.315687Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:38:53.341678Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:53.341698Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:53.341705Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:53.341813Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:53.376698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:53.376805Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:53.378490Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63640 TClient is connected to server localhost:63640 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:53.717169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:53.733410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:53.840029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:53.983462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:54.057982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.380554Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792580222416673:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:55.380690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:55.607507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.632315Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.661209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.687953Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.713857Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.747650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:55.782272Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792580222417184:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:55.782333Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792580222417189:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:55.782380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:55.786487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:55.797009Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792580222417191:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:55.883404Z node 1 :TX_PROXY ERROR: Actor# [1:7486792580222417246:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 12273, MsgBus: 19984 2025-03-28T09:38:57.585599Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792590598627539:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:57.585688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a9e/r3tmp/tmpISBppB/pdisk_1.dat 2025-03-28T09:38:57.652138Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12273, node 2 2025-03-28T09:38:57.711669Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:57.711746Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:57.713056Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:57.723438Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:57.723457Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:57.723463Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:57.723545Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19984 TClient is connected to server localhost:19984 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:58.093370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:58.100249Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:58.146966Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:58.282971Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:58.346048Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:00.160814Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792603483531218:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:00.160891Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:00.194638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:00.221961Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:00.253699Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:00.282436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:00.311416Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:00.379175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:00.432800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792603483531733:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:00.432866Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792603483531738:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:00.432870Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:00.436444Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:00.443652Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792603483531740:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:00.539743Z node 2 :TX_PROXY ERROR: Actor# [2:7486792603483531793:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters >> KqpPragma::Warning [GOOD] >> KqpScripting::ScanQueryInvalid >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false >> StreamCreator::Basic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 4697, MsgBus: 8055 2025-03-28T09:38:54.287116Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792574969750096:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:54.287281Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a90/r3tmp/tmpyNxVYv/pdisk_1.dat 2025-03-28T09:38:54.534590Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4697, node 1 2025-03-28T09:38:54.577141Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:54.577169Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:54.577176Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:54.577307Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:54.627171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:54.627299Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:54.628925Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8055 TClient is connected to server localhost:8055 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:54.992987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.006404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.123828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.239562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.309217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:56.686208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792583559686467:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:56.686289Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:56.946121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:56.971976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:56.997770Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.024828Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.047891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.114048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.148821Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792587854654276:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.148883Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.148956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792587854654281:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.152056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:57.161247Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792587854654283:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:57.264737Z node 1 :TX_PROXY ERROR: Actor# [1:7486792587854654337:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:58.360214Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792592149621953:2499], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: AssumeColumnOrderPartial
:2:20: Error: At function: Aggregate /lib/yql/aggregate.yqls:648:18: Error: At function: AggregationTraits /lib/yql/aggregate.yqls:60:31: Error: At function: AggrCountInit
:2:20: Error: At function: PersistableRepr
:2:26: Error: At function: Member
:2:26: Error: Member not found: _yql_partition_id 2025-03-28T09:38:58.360405Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjNhMDg4YjQtMTk5MmU1NDctZmU5M2YwZGEtZTUzZDVjYzI=, ActorId: [1:7486792587854654586:2487], ActorState: ExecuteState, TraceId: 01jqe1wx4m79qbavbjyrzmxt84, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id: Trying to start YDB, gRPC: 28032, MsgBus: 26930 2025-03-28T09:38:59.053355Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792599669961937:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:59.053584Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a90/r3tmp/tmpbxHsok/pdisk_1.dat 2025-03-28T09:38:59.170407Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:59.186471Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:59.186569Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 28032, node 2 2025-03-28T09:38:59.188089Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:59.219337Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:59.219361Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:59.219369Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:59.219512Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26930 TClient is connected to server localhost:26930 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:59.604427Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.622038Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.692286Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.858132Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.931527Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.612156Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792608259898295:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.612257Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.648442Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.672898Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.703246Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.729208Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.757142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.788983Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.835737Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792608259898805:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.835868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.835934Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792608259898811:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.839222Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:01.848321Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792608259898813:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:01.911874Z node 2 :TX_PROXY ERROR: Actor# [2:7486792608259898866:3439] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::BinaryJsonOffsetNormal >> KqpYql::BinaryJsonOffsetBound >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume >> KqpYql::RefSelect [GOOD] >> KqpYql::PgIntPrimaryKey >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain >> KqpYql::InsertCVList+useSink [GOOD] >> KqpYql::InsertCVList-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] Test command err: Trying to start YDB, gRPC: 27912, MsgBus: 26829 2025-03-28T09:38:54.633946Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792575272355141:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:54.634053Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a8b/r3tmp/tmp31TzDR/pdisk_1.dat 2025-03-28T09:38:54.937511Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27912, node 1 2025-03-28T09:38:55.009609Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:55.009704Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:55.010302Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:55.010321Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:55.010331Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:55.010463Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:55.011444Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26829 TClient is connected to server localhost:26829 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:55.370869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.393094Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.502800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.636962Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.685930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.081655Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792588157258819:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.081771Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.339834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.368448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.393189Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.439252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.465723Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.492919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.528873Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792588157259328:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.528938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.528958Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792588157259333:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.531729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:57.537900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792588157259335:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:57.606628Z node 1 :TX_PROXY ERROR: Actor# [1:7486792588157259389:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 1424, MsgBus: 14246 2025-03-28T09:38:59.178220Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792598148607875:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:59.178288Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a8b/r3tmp/tmpT70WIZ/pdisk_1.dat 2025-03-28T09:38:59.308268Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:59.311337Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:59.311411Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:59.313316Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1424, node 2 2025-03-28T09:38:59.349357Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:59.349390Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:59.349399Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:59.349513Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14246 TClient is connected to server localhost:14246 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:59.711758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.729372Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.770798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.921217Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.994717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.921966Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792606738544241:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.922050Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.961187Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.988432Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.014430Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.063305Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.090631Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.119614Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.159566Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792611033512045:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:02.159642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:02.159820Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792611033512050:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:02.163135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:02.171942Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792611033512052:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:02.235039Z node 2 :TX_PROXY ERROR: Actor# [2:7486792611033512107:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> KqpYql::EvaluateIf [GOOD] >> KqpYql::EvaluateFor >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::RowsLimitServiceOverride ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:87:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:87:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:87:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:87:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 29:78:2110] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:81:2111] Leader for TabletID 72057594037927937 is [29:81:2111] sender: [29:135:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:77:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:80:2057] recipient: [30:79:2110] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:81:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:83:2057] recipient: [30:79:2110] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:82:2111] Leader for TabletID 72057594037927937 is [30:82:2111] sender: [30:136:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:80:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:83:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:84:2057] recipient: [31:82:2113] Leader for TabletID 72057594037927937 is [31:85:2114] sender: [31:86:2057] recipient: [31:82:2113] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:85:2114] Leader for TabletID 72057594037927937 is [31:85:2114] sender: [31:139:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:80:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:83:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:84:2057] recipient: [32:82:2113] Leader for TabletID 72057594037927937 is [32:85:2114] sender: [32:86:2057] recipient: [32:82:2113] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:85:2114] Leader for TabletID 72057594037927937 is [32:85:2114] sender: [32:139:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:81:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:84:2057] recipient: [33:83:2113] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:85:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:87:2057] recipient: [33:83:2113] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:86:2114] Leader for TabletID 72057594037927937 is [33:86:2114] sender: [33:140:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:84:2057] recipient: [34:36:2083] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:87:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:88:2057] recipient: [34:86:2116] Leader for TabletID 72057594037927937 is [34:89:2117] sender: [34:90:2057] recipient: [34:86:2116] !Reboot 72057594037927937 (actor [34:56:2097]) rebooted! !Reboot 72057594037927937 (actor [34:56:2097]) tablet resolver refreshed! new actor is[34:89:2117] Leader for TabletID 72057594037927937 is [34:89:2117] sender: [34:143:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:84:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:87:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:88:2057] recipient: [35:86:2116] Leader for TabletID 72057594037927937 is [35:89:2117] sender: [35:90:2057] recipient: [35:86:2116] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:89:2117] Leader for TabletID 72057594037927937 is [35:89:2117] sender: [35:143:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:85:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:88:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:89:2057] recipient: [36:87:2116] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:91:2057] recipient: [36:87:2116] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:90:2117] Leader for TabletID 72057594037927937 is [36:90:2117] sender: [36:144:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:52:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:52:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:88:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:91:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:92:2057] recipient: [37:90:2119] Leader for TabletID 72057594037927937 is [37:93:2120] sender: [37:94:2057] recipient: [37:90:2119] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:93:2120] Leader for TabletID 72057594037927937 is [37:93:2120] sender: [37:147:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:88:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:91:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:92:2057] recipient: [38:90:2119] Leader for TabletID 72057594037927937 is [38:93:2120] sender: [38:94:2057] recipient: [38:90:2119] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:93:2120] Leader for TabletID 72057594037927937 is [38:93:2120] sender: [38:147:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:89:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:91:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:93:2057] recipient: [39:92:2119] Leader for TabletID 72057594037927937 is [39:94:2120] sender: [39:95:2057] recipient: [39:92:2119] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:94:2120] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2025-03-28T09:39:01.808319Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792607198916282:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:01.808429Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0012e2/r3tmp/tmpNiysbK/pdisk_1.dat 2025-03-28T09:39:02.096965Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:02.110739Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:02.110804Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:02.113908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63924 TServer::EnableGrpc on GrpcPort 26887, node 1 2025-03-28T09:39:02.299746Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:02.299767Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:02.299776Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:02.299894Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63924 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:02.587361Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.603220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154742707 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" Key... (TRUNCATED) TClient::Ls request: /Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743154742644 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154742707 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: ".sys" PathId: 1844... (TRUNCATED) 2025-03-28T09:39:02.701572Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:39:02.701789Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2025-03-28T09:39:02.701813Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Get table profiles 2025-03-28T09:39:02.702210Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2025-03-28T09:39:04.069092Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743154742707, tx_id: 281474976710658 } } } 2025-03-28T09:39:04.069495Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2025-03-28T09:39:04.071317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.072023Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2025-03-28T09:39:04.072044Z node 1 :REPLICATION_CONTROLLER DEBUG: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2025-03-28T09:39:04.093915Z node 1 :REPLICATION_CONTROLLER TRACE: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2025-03-28T09:39:04.093955Z node 1 :REPLICATION_CONTROLLER INFO: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 3] 2025-03-28T09:39:04.094614Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2025-03-28T09:39:04.168298Z node 1 :CHANGE_EXCHANGE WARN: [CdcChangeSenderMain][72075186224037888:1][1:7486792620083819093:2347] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:5:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2025-03-28T09:39:04.187900Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2025-03-28T09:39:04.187923Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# 2025-03-28T09:39:04.195223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.211588Z node 1 :REPLICATION_CONTROLLER TRACE: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2025-03-28T09:39:04.211617Z node 1 :REPLICATION_CONTROLLER INFO: [StreamCreator][rid 1][tid 1] Success: issues# TClient::Ls request: /Root/Table TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Table" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154742707 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyC... (TRUNCATED) >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles >> KqpYql::FromBytes [GOOD] >> TargetDiscoverer::InvalidCredentials [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true >> KqpYql::JsonCast [GOOD] >> KqpYql::TableConcat >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2025-03-28T09:39:02.971863Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792609643768369:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:02.971927Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018b6/r3tmp/tmpJh6Jd4/pdisk_1.dat 2025-03-28T09:39:03.309735Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:03.337994Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:03.338482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:03.363092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2122 TServer::EnableGrpc on GrpcPort 1192, node 1 2025-03-28T09:39:03.624650Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:03.624675Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:03.624684Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:03.624832Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2122 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:04.109018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.139263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.336308Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } } } 2025-03-28T09:39:04.336878Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:217: Cannot find user: user } >> KqpScripting::ScanQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 10799, MsgBus: 7995 2025-03-28T09:38:56.368400Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792583427211033:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:56.368455Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a7d/r3tmp/tmpgDCEtk/pdisk_1.dat 2025-03-28T09:38:56.646394Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10799, node 1 2025-03-28T09:38:56.723861Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:56.723893Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:56.723904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:56.724030Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:56.725381Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:56.725529Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:56.727247Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7995 TClient is connected to server localhost:7995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:57.181148Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.212000Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.335907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.457576Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.531023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:58.894022Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792592017147411:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:58.894159Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.179883Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.207312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.234666Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.265282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.293265Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.319588Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.395681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792596312115222:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.395748Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.395835Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792596312115227:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.399450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:59.408871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792596312115229:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:59.511338Z node 1 :TX_PROXY ERROR: Actor# [1:7486792596312115285:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 27319, MsgBus: 7167 2025-03-28T09:39:01.097770Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792607782741480:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:01.097844Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a7d/r3tmp/tmpWqrJ6A/pdisk_1.dat 2025-03-28T09:39:01.220707Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:01.249677Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected TServer::EnableGrpc on GrpcPort 27319, node 2 2025-03-28T09:39:01.249792Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:01.251643Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:01.283716Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:01.283746Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:01.283755Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:01.283894Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7167 TClient is connected to server localhost:7167 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:01.655643Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.671101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.723559Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.849260Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.909160Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.003665Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792616372677832:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:04.003744Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:04.021532Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.049167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.074796Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.105577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.133669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.201687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.249372Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792620667645644:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:04.249423Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:04.249462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792620667645649:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:04.252565Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:04.261901Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792620667645651:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:04.335313Z node 2 :TX_PROXY ERROR: Actor# [2:7486792620667645704:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 2418, MsgBus: 1460 2025-03-28T09:38:49.803436Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792554823176235:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:49.803505Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000ac3/r3tmp/tmpgWVoTz/pdisk_1.dat 2025-03-28T09:38:50.096712Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2418, node 1 2025-03-28T09:38:50.146193Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:50.146219Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:50.146228Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:50.146371Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:50.177396Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:50.177519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:50.179347Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1460 TClient is connected to server localhost:1460 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:50.536138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:50.556782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:50.647862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:50.775106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:50.846557Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:52.146845Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792567708079908:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:52.146973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:52.429820Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:52.455771Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:52.478112Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:52.501544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:52.525297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:52.565611Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:52.600950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792567708080416:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:52.601062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:52.601093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792567708080421:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:52.603877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:52.611606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792567708080423:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:52.686653Z node 1 :TX_PROXY ERROR: Actor# [1:7486792567708080477:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:54.543970Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792572003048028:2486] 2025-03-28T09:38:54.544709Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792572003048094:2492] TxId: 281474976710672. Ctx: { TraceId: 01jqe1wrf78s0pty001fs5k948, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTlhZWVmOTYtNDhlYzFlYTgtZjI3YTM1MDktYjVkMWQ0MDY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:38:54.544946Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTlhZWVmOTYtNDhlYzFlYTgtZjI3YTM1MDktYjVkMWQ0MDY=, ActorId: [1:7486792572003048045:2492], ActorState: ExecuteState, TraceId: 01jqe1wrf78s0pty001fs5k948, Create QueryResponse for error on request, msg: 2025-03-28T09:38:54.551952Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154733775, txId: 281474976710671] shutting down 2025-03-28T09:38:54.551974Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792572003048099:2496], TxId: 281474976710672, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTlhZWVmOTYtNDhlYzFlYTgtZjI3YTM1MDktYjVkMWQ0MDY=. TraceId : 01jqe1wrf78s0pty001fs5k948. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486792572003048094:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:38:54.552199Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792572003048100:2497], TxId: 281474976710672, task: 2. Ctx: { TraceId : 01jqe1wrf78s0pty001fs5k948. SessionId : ydb://session/3?node_id=1&id=OTlhZWVmOTYtNDhlYzFlYTgtZjI3YTM1MDktYjVkMWQ0MDY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486792572003048094:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:38:54.552494Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792572003048102:2498], TxId: 281474976710672, task: 3. Ctx: { TraceId : 01jqe1wrf78s0pty001fs5k948. SessionId : ydb://session/3?node_id=1&id=OTlhZWVmOTYtNDhlYzFlYTgtZjI3YTM1MDktYjVkMWQ0MDY=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486792572003048094:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:38:54.552632Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792572003048103:2499], TxId: 281474976710672, task: 4. Ctx: { TraceId : 01jqe1wrf78s0pty001fs5k948. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTlhZWVmOTYtNDhlYzFlYTgtZjI3YTM1MDktYjVkMWQ0MDY=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486792572003048094:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:38:54.552733Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792572003048104:2500], TxId: 281474976710672, task: 5. Ctx: { TraceId : 01jqe1wrf78s0pty001fs5k948. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTlhZWVmOTYtNDhlYzFlYTgtZjI3YTM1MDktYjVkMWQ0MDY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486792572003048094:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:38:54.552816Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792572003048105:2501], TxId: 281474976710672, task: 6. Ctx: { SessionId : ydb://session/3?node_id=1&id=OTlhZWVmOTYtNDhlYzFlYTgtZjI3YTM1MDktYjVkMWQ0MDY=. TraceId : 01jqe1wrf78s0pty001fs5k948. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486792572003048094:2492], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:38:54.552865Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792572003048106:2502], TxId: 281474976710672, task: 7. Ctx: { TraceId : 01jqe1wrf78s0pty001fs5k948. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=OTlhZWVmOTYtNDhlYzFlYTgtZjI3YTM1MDktYjVkMWQ0MDY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486792572003048094:2492], ... ERROR: ActorId: [2:7486792604355084863:2647] TxId: 281474976715673. Ctx: { TraceId: 01jqe1wzh03qp09j00001tt6ty, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YzgyOTg2OC1iOGNhNGE4My1kZmY3OTBiYy1lZmYyNzU5Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:39:00.846501Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YzgyOTg2OC1iOGNhNGE4My1kZmY3OTBiYy1lZmYyNzU5Zg==, ActorId: [2:7486792604355084837:2647], ActorState: ExecuteState, TraceId: 01jqe1wzh03qp09j00001tt6ty, Create QueryResponse for error on request, msg: 2025-03-28T09:39:00.846605Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792604355084873:2652], TxId: 281474976715673, task: 5. Ctx: { TraceId : 01jqe1wzh03qp09j00001tt6ty. SessionId : ydb://session/3?node_id=2&id=YzgyOTg2OC1iOGNhNGE4My1kZmY3OTBiYy1lZmYyNzU5Zg==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486792604355084863:2647], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:00.846917Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792604355084827:2642] 2025-03-28T09:39:00.847035Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154740887, txId: 281474976715672] shutting down 2025-03-28T09:39:00.847268Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792604355084872:2651], TxId: 281474976715673, task: 4. Ctx: { SessionId : ydb://session/3?node_id=2&id=YzgyOTg2OC1iOGNhNGE4My1kZmY3OTBiYy1lZmYyNzU5Zg==. TraceId : 01jqe1wzh03qp09j00001tt6ty. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486792604355084863:2647], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:00.849002Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792604355084868:2648], TxId: 281474976715673, task: 1. Ctx: { TraceId : 01jqe1wzh03qp09j00001tt6ty. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzgyOTg2OC1iOGNhNGE4My1kZmY3OTBiYy1lZmYyNzU5Zg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486792604355084863:2647], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:00.849684Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792604355084870:2649], TxId: 281474976715673, task: 2. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzgyOTg2OC1iOGNhNGE4My1kZmY3OTBiYy1lZmYyNzU5Zg==. TraceId : 01jqe1wzh03qp09j00001tt6ty. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486792604355084863:2647], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:00.849902Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792604355084871:2650], TxId: 281474976715673, task: 3. Ctx: { TraceId : 01jqe1wzh03qp09j00001tt6ty. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=YzgyOTg2OC1iOGNhNGE4My1kZmY3OTBiYy1lZmYyNzU5Zg==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486792604355084863:2647], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:00.871264Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715673. Snapshot is not valid, tabletId: 72075186224037896, step: 1743154740887 2025-03-28T09:39:00.872730Z node 2 :TX_DATASHARD ERROR: Undelivered event: 65542, at: [2:7486792604355084905:2053], tablet: [2:7486792587175213300:2322], scanId: 1, table: /Root/EightShard 2025-03-28T09:39:00.872763Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715673. Snapshot is not valid, tabletId: 72075186224037895, step: 1743154740887 2025-03-28T09:39:00.876721Z node 2 :TX_DATASHARD ERROR: TxId: 281474976715673. Snapshot is not valid, tabletId: 72075186224037897, step: 1743154740887 2025-03-28T09:39:00.929854Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792604355084909:2657] 2025-03-28T09:39:01.017702Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792604355084937:2666] 2025-03-28T09:39:01.114452Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792608650052247:2672] 2025-03-28T09:39:01.156239Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792587175212508:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:01.156528Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:01.197054Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741230, txId: 281474976715675] shutting down 2025-03-28T09:39:01.202208Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792608650052266:2681] 2025-03-28T09:39:01.298599Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792608650052372:2697] 2025-03-28T09:39:01.397957Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792608650052395:2706] 2025-03-28T09:39:01.487618Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741524, txId: 281474976715677] shutting down 2025-03-28T09:39:01.501226Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792608650052416:2715] 2025-03-28T09:39:01.609612Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792608650052518:2730] 2025-03-28T09:39:01.714866Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792608650052545:2743] 2025-03-28T09:39:01.799374Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741832, txId: 281474976715679] shutting down 2025-03-28T09:39:01.931401Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792608650052678:2767] 2025-03-28T09:39:02.049206Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792608650052703:2776] 2025-03-28T09:39:02.169775Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792612945020031:2785] 2025-03-28T09:39:02.292685Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792612945020056:2796] 2025-03-28T09:39:02.418909Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792612945020077:2805] 2025-03-28T09:39:02.548633Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792612945020136:2814] 2025-03-28T09:39:02.681825Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792612945020157:2823] 2025-03-28T09:39:02.816805Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792612945020180:2833] 2025-03-28T09:39:02.954357Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792612945020201:2842] 2025-03-28T09:39:03.095562Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792612945020223:2851] 2025-03-28T09:39:03.240506Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792617239987540:2860] 2025-03-28T09:39:03.387910Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792617239987603:2871] 2025-03-28T09:39:03.539237Z node 2 :KQP_EXECUTER ERROR: ActorId: [2:7486792617239987667:2885] TxId: 281474976715684. Ctx: { TraceId: 01jqe1x232bzgjjhxxsv54tc8s, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZThkMTMyY2ItMTI3NTY5NDMtMzQzNjdkNWQtY2RmMWQ1NTM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:39:03.539555Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792617239987676:2892], TxId: 281474976715684, task: 4. Ctx: { TraceId : 01jqe1x232bzgjjhxxsv54tc8s. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZThkMTMyY2ItMTI3NTY5NDMtMzQzNjdkNWQtY2RmMWQ1NTM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486792617239987667:2885], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:03.539917Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792617239987624:2880] 2025-03-28T09:39:03.540909Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZThkMTMyY2ItMTI3NTY5NDMtMzQzNjdkNWQtY2RmMWQ1NTM=, ActorId: [2:7486792617239987634:2885], ActorState: ExecuteState, TraceId: 01jqe1x232bzgjjhxxsv54tc8s, Create QueryResponse for error on request, msg: 2025-03-28T09:39:03.541520Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154743575, txId: 281474976715683] shutting down 2025-03-28T09:39:03.541827Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792617239987677:2893], TxId: 281474976715684, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=ZThkMTMyY2ItMTI3NTY5NDMtMzQzNjdkNWQtY2RmMWQ1NTM=. TraceId : 01jqe1x232bzgjjhxxsv54tc8s. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486792617239987667:2885], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:03.542186Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792617239987674:2890], TxId: 281474976715684, task: 2. Ctx: { SessionId : ydb://session/3?node_id=2&id=ZThkMTMyY2ItMTI3NTY5NDMtMzQzNjdkNWQtY2RmMWQ1NTM=. TraceId : 01jqe1x232bzgjjhxxsv54tc8s. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486792617239987667:2885], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:03.542749Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792617239987672:2889], TxId: 281474976715684, task: 1. Ctx: { TraceId : 01jqe1x232bzgjjhxxsv54tc8s. SessionId : ydb://session/3?node_id=2&id=ZThkMTMyY2ItMTI3NTY5NDMtMzQzNjdkNWQtY2RmMWQ1NTM=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [2:7486792617239987667:2885], status: ABORTED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:03.691977Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792617239987728:2898] 2025-03-28T09:39:03.835729Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154743869, txId: 281474976715686] shutting down 2025-03-28T09:39:04.010010Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792617239987876:2926] 2025-03-28T09:39:04.163886Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154744198, txId: 281474976715688] shutting down 2025-03-28T09:39:04.169917Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792621534955194:2935] 2025-03-28T09:39:04.335912Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792621534955306:2955] 2025-03-28T09:39:04.504826Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792621534955328:2964] 2025-03-28T09:39:04.675528Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792621534955373:2973] 2025-03-28T09:39:04.854547Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154744884, txId: 281474976715691] shutting down 2025-03-28T09:39:04.891665Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792621534955410:2983] 2025-03-28T09:39:05.028535Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792621534955511:3001] 2025-03-28T09:39:05.191751Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154745227, txId: 281474976715694] shutting down 2025-03-28T09:39:05.209324Z node 2 :RPC_REQUEST WARN: Client lost, ActorId: [2:7486792625829922874:3010] 2025-03-28T09:39:05.368450Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154745402, txId: 281474976715696] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 2313, MsgBus: 14340 2025-03-28T09:38:56.850938Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792585281865491:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:56.851064Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a74/r3tmp/tmpgDAwzi/pdisk_1.dat 2025-03-28T09:38:57.113058Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2313, node 1 2025-03-28T09:38:57.173986Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:57.174009Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:57.174017Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:57.174141Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:57.207963Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:57.208072Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:57.210182Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:14340 TClient is connected to server localhost:14340 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:57.622009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.638454Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.756538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.866929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.939416Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.287355Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792598166769165:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.287478Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.548232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.574581Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.601191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.627073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.674228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.738708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:59.773182Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792598166769679:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.773234Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.773248Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792598166769684:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:59.775849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:59.783447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792598166769686:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:59.868290Z node 1 :TX_PROXY ERROR: Actor# [1:7486792598166769741:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. Trying to start YDB, gRPC: 2616, MsgBus: 8708 2025-03-28T09:39:01.485803Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792607249765847:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:01.485880Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a74/r3tmp/tmp4GbyL3/pdisk_1.dat 2025-03-28T09:39:01.563565Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2616, node 2 2025-03-28T09:39:01.614590Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:01.619998Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:01.623352Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:01.645664Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:01.645692Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:01.645703Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:01.645814Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8708 TClient is connected to server localhost:8708 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:02.010383Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.023436Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.095384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.217893Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.290582Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.361406Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792620134669517:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:04.361469Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:04.399957Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.426316Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.453604Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.479775Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.509758Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.539230Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:04.578370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792620134670030:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:04.578445Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:04.578512Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792620134670035:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:04.581266Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:04.589231Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792620134670037:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:04.678703Z node 2 :TX_PROXY ERROR: Actor# [2:7486792620134670092:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } [[#]] >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced >> KqpYql::ScriptUdf >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> KqpYql::ColumnTypeMismatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:36.219102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:36.219214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.219259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:36.219305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:36.220224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:36.220259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:36.220316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.220412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:36.222137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:36.316798Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:36.316852Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:36.330924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:36.331094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:36.331255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:36.336882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:36.337104Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:36.337728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.337905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.341899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:36.351650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.351735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:36.351815Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.357437Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:36.453955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:36.455360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.457390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:36.458920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:36.458987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.461703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.462355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:36.462512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.462553Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:36.462581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:36.462614Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:36.464661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.464719Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:36.464748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:36.466291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.466328Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.466360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.466402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.477477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:36.479644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:36.479864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:36.482271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.482399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:36.482448Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.483737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:36.483822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.484056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:36.484213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.487066Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.487143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.487312Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.487357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:36.487715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.487784Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:36.487880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.487936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.488052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.488103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.488152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:36.488194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.488228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:36.488264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:36.488339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:36.488392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:36.488436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:36.490341Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.490488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.490529Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 09:39:06.870095Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 59500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 202 } } 2025-03-28T09:39:06.870214Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TConfigureParts operationId# 102:0 HandleReply TEvProposeTransactionResult, at schemeshard: 72057594046678944 message# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: PREPARED TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 PrepareArriveTime: 59500 ExecLatency: 0 ProposeLatency: 1 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 202 } } 2025-03-28T09:39:06.870258Z node 12 :FLAT_TX_SCHEMESHARD INFO: TEvProposeTransactionResult at tablet: 72057594046678944 2025-03-28T09:39:06.870379Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectProposeTransactionResults accept TEvProposeTransactionResult, shard: 72075186233409546, shardIdx: 72057594046678944:1, operationId: 102:0, left await: 0, at schemeshard: 72057594046678944 2025-03-28T09:39:06.870433Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 3 -> 128 2025-03-28T09:39:06.872681Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:06.872857Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:06.872912Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:06.873033Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-28T09:39:06.873196Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 5000003 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:06.875242Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:39:06.875449Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000003 2025-03-28T09:39:06.876469Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:06.876666Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 131 RawX2: 51539609707 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:06.876773Z node 12 :FLAT_TX_SCHEMESHARD INFO: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000003, at schemeshard: 72057594046678944 2025-03-28T09:39:06.877162Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 129 2025-03-28T09:39:06.877383Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2025-03-28T09:39:06.885406Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:06.885500Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:39:06.885843Z node 12 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:06.885912Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [12:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:39:06.886413Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:06.886485Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2025-03-28T09:39:06.887359Z node 12 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:06.887500Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:06.887552Z node 12 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:06.887620Z node 12 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2025-03-28T09:39:06.887682Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:39:06.887789Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:39:06.888573Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1799 } } 2025-03-28T09:39:06.888606Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:39:06.888753Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1799 } } 2025-03-28T09:39:06.888865Z node 12 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000003 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1799 } } FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:39:06.889512Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:39:06.889559Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2025-03-28T09:39:06.889706Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 305 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:39:06.889764Z node 12 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:39:06.889884Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 305 RawX2: 51539609844 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2025-03-28T09:39:06.889959Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:06.890005Z node 12 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:06.890045Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:39:06.890097Z node 12 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 129 -> 240 2025-03-28T09:39:06.893093Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:06.893996Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:06.894128Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:06.894403Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:06.894452Z node 12 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:39:06.894588Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:39:06.894634Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:39:06.894674Z node 12 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:39:06.894743Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:39:06.894801Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2025-03-28T09:39:06.894879Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [12:331:2310] message: TxId: 102 2025-03-28T09:39:06.894971Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:39:06.895029Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:39:06.895081Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:39:06.895232Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:39:06.896902Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:39:06.896967Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [12:389:2361] TestWaitNotification: OK eventTxId 102 >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus >> KqpScripting::UnsafeTimestampCast >> KqpYql::TestUuidDefaultColumn >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate >> KqpLimits::AffectedShardsLimit [GOOD] >> KqpYql::EvaluateExpr2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 62384, MsgBus: 8596 2025-03-28T09:38:54.770766Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792576197060114:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:54.770884Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a7f/r3tmp/tmp6nw6lB/pdisk_1.dat 2025-03-28T09:38:55.023984Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:55.024115Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:55.026277Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:55.027045Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62384, node 1 2025-03-28T09:38:55.076640Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:55.076695Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:55.076708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:55.076837Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8596 TClient is connected to server localhost:8596 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:55.499055Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.516190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.634246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.748221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:55.822645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:57.027984Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792589081963782:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.028102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.309301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.335847Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.362540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.389461Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.416225Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.456989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:57.490092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792589081964295:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.490153Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792589081964300:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.490169Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:57.493367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:57.502572Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792589081964302:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:57.592373Z node 1 :TX_PROXY ERROR: Actor# [1:7486792589081964357:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:58.549801Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDBjNTQ3YzMtYjA5NDRlMjMtM2JhMjAxNzMtMmM1Nzk0ZTk=, ActorId: [1:7486792593376931911:2487], ActorState: ExecuteState, TraceId: 01jqe1wxaj2eygm205zrvbrw1a, Create QueryResponse for error on request, msg: 2025-03-28T09:38:58.567401Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDAwMGYzY2EtMzJkZDY1Ni00NWNiYTNkMC1iZWExMWE=, ActorId: [1:7486792593376931922:2492], ActorState: ExecuteState, TraceId: 01jqe1wxbw0btehjyvnaty1b3c, Create QueryResponse for error on request, msg: 2025-03-28T09:38:58.584803Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTcxZTJiMmUtNTg5OTdhYTQtOWRlNmIwMDItMTY2NmZjZDg=, ActorId: [1:7486792593376931931:2496], ActorState: ExecuteState, TraceId: 01jqe1wxcd3hc6st3ydt7dcpkp, Create QueryResponse for error on request, msg: 2025-03-28T09:38:58.628324Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzhmMjUxZGQtMzlhNTA0MjYtYzBhMTc5NzYtMzBkNDI3MGE=, ActorId: [1:7486792593376931940:2500], ActorState: ExecuteState, TraceId: 01jqe1wxcz84q9bs2hkayknd3d, Create QueryResponse for error on request, msg: 2025-03-28T09:38:58.670482Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZjI3MzY5NjgtYWNjMTFjOTktZGRmNzRiODgtNTdiNzhmMjQ=, ActorId: [1:7486792593376931972:2509], ActorState: ExecuteState, TraceId: 01jqe1wxea7bh8697tx51v5dmc, Create QueryResponse for error on request, msg: 2025-03-28T09:38:58.724243Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjQxYWM1M2QtMWFjZDkwMWQtNWEzYTgzMzktM2Q4MDdmMg==, ActorId: [1:7486792593376931986:2515], ActorState: ExecuteState, TraceId: 01jqe1wxfn3gar55423j3n70sa, Create QueryResponse for error on request, msg: 2025-03-28T09:38:58.793415Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154738773, txId: 281474976710671] shutting down 2025-03-28T09:38:58.795271Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154738773, txId: 281474976710672] shutting down 2025-03-28T09:38:58.798062Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YWY2MjE0YjAtZDk1NjU5ZjItNjgzYTNlZDctODhkZGY2NzU=, ActorId: [1:7486792593376932033:2522], ActorState: ExecuteState, TraceId: 01jqe1wxhb57j0kemxzsmgbptm, Create QueryResponse for error on request, msg: 2025-03-28T09:38:58.842489Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NTgxZTZkYzMtZTRiOTU3Y2YtODZkNmRlMDgtMTI5MmEwMWQ=, ActorId: [1:7486792593376932179:2549], ActorState: ExecuteState, TraceId: 01jqe1wxkm5w3vjyxzqdawajkt, Create QueryResponse for error on request, msg: 2025-03-28T09:38:58.862418Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154738878, txId: 281474976710675] shutting down 2025-03-28T09:38:58.866398Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154738878, txId: 281474976710676] shutting down 2025-03-28T09:38:58.902452Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDMyMjZiYTQtYjQ5YTZmYTctN2YyMTM3NTYtMWJiNTYwNGI=, ActorId: [1:7486792593376932327:2574], ActorState: ExecuteState, TraceId: 01jqe1wxn2820ry7g56v4de7ra, Create QueryResponse for error on request, msg: 2025-03-28T09:38:58.944128Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjVmYjUyODUtZmJmYWU1ZTAtNmY5ODZhYWUtMjZhMGM3N2Q=, ActorId: [1:7486792593376932372:2582], ActorState: ExecuteState, TraceId: 01jqe1wxpx0bd42s4pjzt56vt3, Create QueryResponse for error on request, msg: 2025-03-28T09:38:58.984488Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTRmYjRiNDItNjg3ZGJkYjEtNTBlMzA1ZTgtYzA0MzUzNDk=, ActorId: [1:7486792593376932384:2588], ActorState: ExecuteState, TraceId: 01jqe1wxr6f16mdxqa2egfw9ww, Create QueryResponse for error on request, msg: 2025-03-28T09:38:59.059607Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzY1Y2YzNDItODJkYTk1MzUtOWM5ZGNjZjItMzRhMTRhYmI=, ActorId: [1:7486792593376932443:2597], ActorState: ExecuteState, TraceId: 01jqe1wxsf299s6091y9dzkjnp, Create QueryResponse for error on request, msg: 2025-03-28T09:38:59.086140Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154739039, txId: 281474976710681] ... Id: ydb://session/3?node_id=1&id=MzUyNTUxMTktYjJmMjU4YmEtMWQwOTA1YjUtYjMyNmM4MDk=, ActorId: [1:7486792601966869342:2993], ActorState: ExecuteState, TraceId: 01jqe1wzgd3qvacc8t2a1he379, Create QueryResponse for error on request, msg: 2025-03-28T09:39:00.925258Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154740957, txId: 281474976710731] shutting down 2025-03-28T09:39:00.925923Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154740957, txId: 281474976710732] shutting down 2025-03-28T09:39:01.072716Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjEwOWM5MTUtYjRkMDAzY2EtYzRmNTQyZDMtM2NmMmJiZTE=, ActorId: [1:7486792601966869631:3037], ActorState: ExecuteState, TraceId: 01jqe1wzq4410xwak14bmxra9z, Create QueryResponse for error on request, msg: 2025-03-28T09:39:01.131472Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741153, txId: 281474976710735] shutting down 2025-03-28T09:39:01.191458Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWU3Yjc4ZGYtYzhlYTQyOWYtNWQxNDY3YTUtMTM4N2FhZTI=, ActorId: [1:7486792606261836946:3046], ActorState: ExecuteState, TraceId: 01jqe1wztq34ehv4a6rgy9sqjm, Create QueryResponse for error on request, msg: 2025-03-28T09:39:01.226414Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741258, txId: 281474976710737] shutting down 2025-03-28T09:39:01.311263Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OWI2YWJiNTEtNDNkNmI5MWMtZmE0MjFiODItOTMzYzQ2ZWU=, ActorId: [1:7486792606261837063:3065], ActorState: ExecuteState, TraceId: 01jqe1wzydc2fy8d9791cc42sb, Create QueryResponse for error on request, msg: 2025-03-28T09:39:01.380975Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741412, txId: 281474976710740] shutting down 2025-03-28T09:39:01.381305Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741412, txId: 281474976710739] shutting down 2025-03-28T09:39:01.522222Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NzhkOWEzNDctNmQ1NDFhNzMtZjA1ODhlN2QtMjEzZDU4YTM=, ActorId: [1:7486792606261837335:3107], ActorState: ExecuteState, TraceId: 01jqe1x04tcqtnanyysv3d5baq, Create QueryResponse for error on request, msg: 2025-03-28T09:39:01.565181Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741594, txId: 281474976710743] shutting down 2025-03-28T09:39:01.652253Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDU5NzdlM2MtZGRiOWNlMWMtYzA4YjAwNjMtMTUxY2Y0YTM=, ActorId: [1:7486792606261837354:3116], ActorState: ExecuteState, TraceId: 01jqe1x08s0whb1wygfkqtx8q1, Create QueryResponse for error on request, msg: 2025-03-28T09:39:01.702550Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741727, txId: 281474976710745] shutting down 2025-03-28T09:39:01.785918Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzZlNWE0MTgtYzA0NDA4YWYtZjMxNzBjZWUtNzI0ZDNlNg==, ActorId: [1:7486792606261837469:3134], ActorState: ExecuteState, TraceId: 01jqe1x0cvafpnapjamd4wvyak, Create QueryResponse for error on request, msg: 2025-03-28T09:39:01.806403Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741839, txId: 281474976710747] shutting down 2025-03-28T09:39:01.915845Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154741951, txId: 281474976710749] shutting down 2025-03-28T09:39:01.929960Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YTcyOTUzYzgtZjBjZGM3NWQtODgxMzcxOTEtMjllOGI2YQ==, ActorId: [1:7486792606261837609:3153], ActorState: ExecuteState, TraceId: 01jqe1x0h127dzt52h5m000kpc, Create QueryResponse for error on request, msg: 2025-03-28T09:39:02.054200Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154742091, txId: 281474976710751] shutting down Trying to start YDB, gRPC: 4391, MsgBus: 16832 2025-03-28T09:39:02.848568Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792608831598235:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:02.848640Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a7f/r3tmp/tmp8Ku4Bj/pdisk_1.dat 2025-03-28T09:39:02.965845Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:02.977915Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:02.977994Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:02.979317Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4391, node 2 2025-03-28T09:39:03.010804Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:03.010835Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:03.010845Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:03.010987Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16832 TClient is connected to server localhost:16832 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:03.405654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:03.414956Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:39:03.428891Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:03.477461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:03.644943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:03.705370Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.685985Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792621716501891:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:05.686126Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:05.726358Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.756162Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.785204Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.812019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.842872Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.876970Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.955868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792621716502407:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:05.956023Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:05.956117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792621716502412:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:05.959047Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:05.966857Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792621716502414:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:06.045669Z node 2 :TX_PROXY ERROR: Actor# [2:7486792626011469765:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::EndOfQueryCommit >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 13480, MsgBus: 1824 2025-03-28T09:38:59.836516Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792596281901731:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:59.836658Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a4c/r3tmp/tmpRlARdz/pdisk_1.dat 2025-03-28T09:39:00.088270Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13480, node 1 2025-03-28T09:39:00.157105Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:00.157130Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:00.157136Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:00.157290Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:00.167627Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:00.167782Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:00.169518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:1824 TClient is connected to server localhost:1824 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:00.568377Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:00.594951Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:00.720258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:00.847023Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:00.907653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.345956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792609166805383:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:02.346197Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:02.553220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.574825Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.600199Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.625783Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.671039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.696337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:02.731046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792609166805889:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:02.731113Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:02.731167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792609166805894:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:02.734441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:02.741932Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792609166805896:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:02.796176Z node 1 :TX_PROXY ERROR: Actor# [1:7486792609166805950:3440] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. Trying to start YDB, gRPC: 19016, MsgBus: 28731 2025-03-28T09:39:04.079846Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792617746637463:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:04.079955Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a4c/r3tmp/tmp7AWhWB/pdisk_1.dat 2025-03-28T09:39:04.166779Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19016, node 2 2025-03-28T09:39:04.215862Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:04.215953Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:04.217698Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:04.224193Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:04.224210Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:04.224215Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:04.224300Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28731 TClient is connected to server localhost:28731 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:04.564259Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.591667Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.629373Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.761653Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.817675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.479132Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792626336573827:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:06.479212Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:06.517529Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.543793Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.606884Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.632366Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.660401Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.691737Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.729673Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792626336574336:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:06.729733Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792626336574341:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:06.729738Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:06.732564Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:06.740275Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792626336574343:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:06.826237Z node 2 :TX_PROXY ERROR: Actor# [2:7486792626336574397:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:07.543236Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486792630631541954:2492], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2025-03-28T09:39:07.543472Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=OWE1OWUwMmYtMzhiYWYzYTQtMmQ4OTFjZDYtYjdjNWRmYjA=, ActorId: [2:7486792630631541944:2487], ActorState: ExecuteState, TraceId: 01jqe1x646b9zct49mwf66gq1d, ReplyQueryCompileError, status GENERIC_ERROR remove tx with tx_id:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> KqpYql::PgIntPrimaryKey [GOOD] >> KqpScripting::ScriptExplain [GOOD] >> KqpYql::EvaluateFor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::AffectedShardsLimit [GOOD] Test command err: Trying to start YDB, gRPC: 23971, MsgBus: 12453 2025-03-28T09:38:45.463666Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792539829708786:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:45.463882Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00147b/r3tmp/tmp1Oc4DC/pdisk_1.dat 2025-03-28T09:38:45.705083Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23971, node 1 2025-03-28T09:38:45.761394Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:45.761414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:45.761424Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:45.761546Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:45.799250Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:45.799353Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:45.801215Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12453 TClient is connected to server localhost:12453 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:46.171423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:46.191297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:46.306873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:46.442844Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:46.504950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:47.664754Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792548419645156:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:47.664858Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:47.889546Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.915147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.942744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.969661Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.996182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:48.025860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:48.089507Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792552714612964:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:48.089574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:48.089635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792552714612969:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:48.093088Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:48.102236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792552714612971:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:48.160691Z node 1 :TX_PROXY ERROR: Actor# [1:7486792552714613024:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"Sort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key [150, 266]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"row.Text","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 1812, MsgBus: 32458 2025-03-28T09:38:49.760311Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792556704711945:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:49.760511Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00147b/r3tmp/tmpenDhEk/pdisk_1.dat 2025-03-28T09:38:49.853468Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1812, node 2 2025-03-28T09:38:49.888017Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:49.888106Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:49.890453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:38:49.913650Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:49.913673Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:49.913677Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:49.913749Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32458 TClient is connected to server localhost:32458 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:50.249013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDoma ... 2075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976715671; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["OlapTable"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"Value \u003E 0","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/OlapTable","E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/OlapTable","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/OlapTable","E-Rows":"No estimate","Table":"OlapTable","ReadColumns":["Key","Value"],"SsaProgram":{"Version":5,"Command":[{"Assign":{"Constant":{"Int32":0},"Column":{"Id":3}}},{"Assign":{"Function":{"YqlOperationId":15,"KernelIdx":0,"FunctionType":2,"Arguments":[{"Id":2},{"Id":3}]},"Column":{"Id":4}}},{"Assign":{"Constant":{"Uint8":0},"Column":{"Id":5}}},{"Assign":{"Function":{"YqlOperationId":17,"KernelIdx":1,"FunctionType":2,"Arguments":[{"Id":4},{"Id":5}]},"Column":{"Id":6}}},{"Filter":{"Predicate":{"Id":6}}},{"Projection":{"Columns":[{"Id":1},{"Id":2}]}}]},"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Value \u003E 0","Pushdown":"True","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 1643, MsgBus: 18346 2025-03-28T09:39:01.888608Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486792605739340003:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:01.888683Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00147b/r3tmp/tmpCjXJa7/pdisk_1.dat 2025-03-28T09:39:01.978905Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 1643, node 4 2025-03-28T09:39:02.022664Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:02.022787Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:02.024104Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:02.030581Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:02.030614Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:02.030622Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:02.030814Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18346 TClient is connected to server localhost:18346 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:02.552232Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.568957Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.619437Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.763737Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.835022Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.331292Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792622919210960:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:05.331378Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:05.403373Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.434333Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.463795Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.489895Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.515349Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.543455Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:05.583431Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792622919211466:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:05.583508Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792622919211471:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:05.583518Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:05.587025Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:05.596872Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486792622919211473:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:05.680164Z node 4 :TX_PROXY ERROR: Actor# [4:7486792622919211527:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:06.698754Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.894074Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486792605739340003:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:06.894761Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:06.926871Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.806103Z node 4 :KQP_EXECUTER WARN: ActorId: [4:7486792631509148508:2614] TxId: 281474976715674. Ctx: { TraceId: 01jqe1x6341p0811q6fwpm9aax, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NTFiOGFlZmUtZWZmZjYwOTYtMmI1NzM2NWMtOGE5YjI4MTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Too many affected shards: datashardTasks=21, limit: 20 2025-03-28T09:39:07.806357Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NTFiOGFlZmUtZWZmZjYwOTYtMmI1NzM2NWMtOGE5YjI4MTE=, ActorId: [4:7486792631509148106:2614], ActorState: ExecuteState, TraceId: 01jqe1x6341p0811q6fwpm9aax, Create QueryResponse for error on request, msg:
: Error: Affected too many shards: 0, code: 2029 >> KqpYql::UuidPrimaryKeyDisabled >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateDropSolomon >> KqpYql::InsertCVList-useSink [GOOD] >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteSplit >> KqpScripting::ScriptingCreateAndAlterTableTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 3385, MsgBus: 22279 2025-03-28T09:39:00.072377Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792604440549976:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:00.072480Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a45/r3tmp/tmpIw0sYU/pdisk_1.dat 2025-03-28T09:39:00.326984Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3385, node 1 2025-03-28T09:39:00.398076Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:00.398108Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:00.398119Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:00.398255Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:00.425756Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:00.425922Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:00.427943Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22279 TClient is connected to server localhost:22279 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:00.837850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:00.851850Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:00.961035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.080155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.151141Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:02.848172Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792613030486343:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:02.848294Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.155638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.182045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.207713Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.231370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.255059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.284622Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.321325Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792617325454149:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.321425Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.321575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792617325454154:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.325451Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:03.333987Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792617325454156:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:03.431797Z node 1 :TX_PROXY ERROR: Actor# [1:7486792617325454210:3443] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 2555, MsgBus: 23934 2025-03-28T09:39:05.052351Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792625123015765:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:05.052454Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a45/r3tmp/tmpEhceg0/pdisk_1.dat 2025-03-28T09:39:05.153958Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2555, node 2 2025-03-28T09:39:05.192788Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:05.192865Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:05.194426Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:05.219234Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:05.219267Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:05.219275Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:05.219392Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23934 TClient is connected to server localhost:23934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:05.559566Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.516963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792633712951015:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.517044Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.525687Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.562774Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792633712951115:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.562860Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.562889Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792633712951120:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.567092Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:39:07.580343Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792633712951122:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:39:07.638573Z node 2 :TX_PROXY ERROR: Actor# [2:7486792633712951173:2391] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 3859, MsgBus: 20504 2025-03-28T09:39:00.581104Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792604456478606:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:00.581171Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009ff/r3tmp/tmp2MpfK7/pdisk_1.dat 2025-03-28T09:39:00.906892Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3859, node 1 2025-03-28T09:39:00.954346Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:00.954367Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:00.954376Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:00.954573Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:00.965345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:00.965440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:00.967144Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:20504 TClient is connected to server localhost:20504 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:01.428714Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.444873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.545865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.684528Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.759364Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:03.408617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792617341382269:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.408713Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.727353Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.753734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.781122Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.808087Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.835653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.865877Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.905217Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792617341382784:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.905263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.905316Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792617341382789:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.908880Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:03.917379Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792617341382791:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:03.984709Z node 1 :TX_PROXY ERROR: Actor# [1:7486792617341382845:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 4706, MsgBus: 2969 2025-03-28T09:39:05.568974Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792625564987944:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:05.569075Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009ff/r3tmp/tmpXoxBha/pdisk_1.dat 2025-03-28T09:39:05.675674Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4706, node 2 2025-03-28T09:39:05.711518Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:05.711599Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:05.713362Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:05.739525Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:05.739559Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:05.739568Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:05.739667Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2969 TClient is connected to server localhost:2969 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:06.087609Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.104743Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.161485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.289124Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.350394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.403524Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792638449891597:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.403611Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.433984Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.461937Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.489031Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.514772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.541133Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.606447Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.648258Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792638449892110:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.648339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792638449892115:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.648339Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.650662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:08.658438Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792638449892117:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:08.726286Z node 2 :TX_PROXY ERROR: Actor# [2:7486792638449892171:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 29002, MsgBus: 7077 2025-03-28T09:38:58.573824Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792595775684143:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:58.573929Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a5e/r3tmp/tmpy6xraS/pdisk_1.dat 2025-03-28T09:38:58.864220Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29002, node 1 2025-03-28T09:38:58.902795Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:58.902815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:58.902821Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:58.902949Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:58.932045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:58.932198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:58.933727Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7077 TClient is connected to server localhost:7077 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:59.295544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.327062Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.453781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.583301Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.659098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.278806Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792608660587802:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.278917Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.494263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.525930Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.554259Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.586048Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.616191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.683035Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.760046Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792608660588321:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.760141Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.760299Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792608660588326:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.763466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:01.771728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792608660588328:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:01.866017Z node 1 :TX_PROXY ERROR: Actor# [1:7486792608660588383:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:02.693267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.574221Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792595775684143:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:03.574332Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6172, MsgBus: 9583 2025-03-28T09:39:05.065415Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792624132496279:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:05.065519Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a5e/r3tmp/tmp4oMOai/pdisk_1.dat 2025-03-28T09:39:05.160065Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6172, node 2 2025-03-28T09:39:05.200755Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:05.200858Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:05.202603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:05.222718Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:05.222739Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:05.222747Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:05.222874Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:9583 TClient is connected to server localhost:9583 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:05.594101Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.601761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.644119Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.785449Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.865094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.594873Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792632722432635:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.594970Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.621728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.647617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.670072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.695553Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.724701Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.792772Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.829739Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792632722433149:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.829804Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.829816Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792632722433154:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.832661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:07.840455Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792632722433156:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:07.902458Z node 2 :TX_PROXY ERROR: Actor# [2:7486792632722433210:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:09.284023Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486792641312368124:2493], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:39:09.284234Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjAzMjExYzYtNjIyNjFjNS00Y2FlODkyMC04ZmRhZGU2YQ==, ActorId: [2:7486792641312368122:2492], ActorState: ExecuteState, TraceId: 01jqe1x7tq81cpxmtt6mbphjr7, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: >> KqpYql::TableConcat [GOOD] >> KqpYql::TableNameConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 16763, MsgBus: 21581 2025-03-28T09:39:00.609350Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792602875024091:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:00.609434Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a2d/r3tmp/tmpYHezBg/pdisk_1.dat 2025-03-28T09:39:00.873132Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16763, node 1 2025-03-28T09:39:00.948500Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:00.948523Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:00.948536Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:00.948647Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:00.951192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:00.951310Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:00.953028Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21581 TClient is connected to server localhost:21581 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:01.336780Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.367396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.495709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.631167Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.710191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:03.115864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792615759927756:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.116017Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.362875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.390636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.415242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.440836Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.467676Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.503929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.578679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792615759928268:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.578766Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.579062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792615759928273:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.582375Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:03.592547Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792615759928275:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:03.656907Z node 1 :TX_PROXY ERROR: Actor# [1:7486792615759928330:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:04.336557Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Duplicate keys have been found.;tx_id=3; 2025-03-28T09:39:04.343332Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:39:04.343468Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Duplicate keys have been found." issue_code: 2012 severity: 1 } 2025-03-28T09:39:04.343650Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792620054895939:2500], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [1:7486792620054895919:2500]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[1:7486792620054895939:2500].{
: Error: Duplicate keys have been found., code: 2012 } 2025-03-28T09:39:04.344023Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792620054895930:2500], SessionActorId: [1:7486792620054895919:2500], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 . sessionActorId=[1:7486792620054895919:2500]. isRollback=0 2025-03-28T09:39:04.344208Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTIzYTBjZjktMzE1ZTNkMmEtMjU5NGJlY2QtYjU3YTM4Yzk=, ActorId: [1:7486792620054895919:2500], ActorState: ExecuteState, TraceId: 01jqe1x2yk31pzx3hj9597kmef, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486792620054895931:2500] from: [1:7486792620054895930:2500] 2025-03-28T09:39:04.344281Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792620054895931:2500] TxId: 281474976710671. Ctx: { TraceId: 01jqe1x2yk31pzx3hj9597kmef, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTIzYTBjZjktMzE1ZTNkMmEtMjU5NGJlY2QtYjU3YTM4Yzk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Duplicate keys have been found., code: 2012 } } 2025-03-28T09:39:04.344953Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MTIzYTBjZjktMzE1ZTNkMmEtMjU5NGJlY2QtYjU3YTM4Yzk=, ActorId: [1:7486792620054895919:2500], ActorState: ExecuteState, TraceId: 01jqe1x2yk31pzx3hj9597kmef, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Duplicate keys have been found., code: 2012 Trying to start YDB, gRPC: 8964, MsgBus: 8735 2025-03-28T09:39:05.085070Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792622711532178:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:05.085146Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a2d/r3tmp/tmpeLwKXh/pdisk_1.dat 2025-03-28T09:39:05.199735Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:05.211795Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:05.211882Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:05.213513Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8964, node 2 2025-03-28T09:39:05.256868Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:05.256891Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:05.256899Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:05.257011Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8735 TClient is connected to server localhost:8735 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:05.601273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.616608Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.665130Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.799384Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.866147Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.196411Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792635596435844:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.196532Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.228841Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.251661Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.315404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.339896Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.370917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.397438Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.471694Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792635596436360:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.471781Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.471800Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792635596436365:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.475102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:08.484048Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792635596436367:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:08.547582Z node 2 :TX_PROXY ERROR: Actor# [2:7486792635596436421:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:09.523639Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792639891404033:2502], TxId: 281474976715672, task: 1. Ctx: { TraceId : 01jqe1x7tr9ptdb9ssakj8fmhf. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=2&id=MWI3Mjc0NWQtNDBjMzAxNGEtOGY0ZTg3ODMtNGNmZWFhOTg=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2025-03-28T09:39:09.523900Z node 2 :KQP_COMPUTE ERROR: SelfId: [2:7486792639891404035:2503], TxId: 281474976715672, task: 2. Ctx: { TraceId : 01jqe1x7tr9ptdb9ssakj8fmhf. SessionId : ydb://session/3?node_id=2&id=MWI3Mjc0NWQtNDBjMzAxNGEtOGY0ZTg3ODMtNGNmZWFhOTg=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [2:7486792639891404030:2493], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:09.524198Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=MWI3Mjc0NWQtNDBjMzAxNGEtOGY0ZTg3ODMtNGNmZWFhOTg=, ActorId: [2:7486792639891403982:2493], ActorState: ExecuteState, TraceId: 01jqe1x7tr9ptdb9ssakj8fmhf, Create QueryResponse for error on request, msg:
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 >> KqpYql::ScriptUdf [GOOD] >> KqpYql::SelectNoAsciiValue >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScanQueryDisable >> KqpYql::UuidPrimaryKeyBulkUpsert >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> KqpYql::TestUuidDefaultColumn [GOOD] >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeLag >> KqpScripting::StreamExecuteYqlScriptMixed >> KqpYql::UpdateBadType >> KqpPragma::OrderedColumns >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce >> TKeyValueTest::TestGetStatusWorks [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> KqpYql::EvaluateExpr2 [GOOD] >> KqpYql::EvaluateExpr3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 13567, MsgBus: 13242 2025-03-28T09:39:08.580780Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792636836980950:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:08.580871Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000832/r3tmp/tmph67cBg/pdisk_1.dat 2025-03-28T09:39:08.903119Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13567, node 1 2025-03-28T09:39:08.965916Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:08.965941Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:08.965947Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:08.966081Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:08.975345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:08.975495Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:08.977119Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13242 TClient is connected to server localhost:13242 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:09.396096Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.025901Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792649721883500:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.026020Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.323781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.442747Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792649721883602:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.442851Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.442880Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792649721883607:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.446242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:39:11.454525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792649721883609:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:39:11.529109Z node 1 :TX_PROXY ERROR: Actor# [1:7486792649721883660:2393] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> KqpScripting::ScanQueryTruncate [GOOD] >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:86:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:92:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:91:2118] Leader for TabletID 72057594037927937 is [11:91:2118] sender: [11:145:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... or TabletID 72057594037927937 is [29:56:2097] sender: [29:89:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [29:56:2097] sender: [29:90:2057] recipient: [29:88:2117] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:92:2057] recipient: [29:88:2117] !Reboot 72057594037927937 (actor [29:56:2097]) rebooted! !Reboot 72057594037927937 (actor [29:56:2097]) tablet resolver refreshed! new actor is[29:91:2118] Leader for TabletID 72057594037927937 is [29:91:2118] sender: [29:145:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:54:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:57:2057] recipient: [30:51:2095] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:74:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:87:2057] recipient: [30:36:2083] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:90:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [30:56:2097] sender: [30:91:2057] recipient: [30:89:2117] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:93:2057] recipient: [30:89:2117] !Reboot 72057594037927937 (actor [30:56:2097]) rebooted! !Reboot 72057594037927937 (actor [30:56:2097]) tablet resolver refreshed! new actor is[30:92:2118] Leader for TabletID 72057594037927937 is [30:92:2118] sender: [30:146:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:54:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:57:2057] recipient: [31:51:2095] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:74:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:90:2057] recipient: [31:36:2083] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:93:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [31:56:2097] sender: [31:94:2057] recipient: [31:92:2120] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:96:2057] recipient: [31:92:2120] !Reboot 72057594037927937 (actor [31:56:2097]) rebooted! !Reboot 72057594037927937 (actor [31:56:2097]) tablet resolver refreshed! new actor is[31:95:2121] Leader for TabletID 72057594037927937 is [31:95:2121] sender: [31:149:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:54:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:57:2057] recipient: [32:51:2095] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:74:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:90:2057] recipient: [32:36:2083] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:93:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [32:56:2097] sender: [32:94:2057] recipient: [32:92:2120] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:96:2057] recipient: [32:92:2120] !Reboot 72057594037927937 (actor [32:56:2097]) rebooted! !Reboot 72057594037927937 (actor [32:56:2097]) tablet resolver refreshed! new actor is[32:95:2121] Leader for TabletID 72057594037927937 is [32:95:2121] sender: [32:149:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:54:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:57:2057] recipient: [33:51:2095] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:74:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:76:2057] recipient: [35:36:2083] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:78:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:80:2057] recipient: [35:79:2110] Leader for TabletID 72057594037927937 is [35:81:2111] sender: [35:82:2057] recipient: [35:79:2110] !Reboot 72057594037927937 (actor [35:56:2097]) rebooted! !Reboot 72057594037927937 (actor [35:56:2097]) tablet resolver refreshed! new actor is[35:81:2111] Leader for TabletID 72057594037927937 is [35:81:2111] sender: [35:135:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:76:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:79:2057] recipient: [36:78:2110] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:80:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:82:2057] recipient: [36:78:2110] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:81:2111] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:135:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:52:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:52:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:77:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:80:2057] recipient: [37:79:2110] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:81:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:83:2057] recipient: [37:79:2110] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:82:2111] Leader for TabletID 72057594037927937 is [37:82:2111] sender: [37:136:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:79:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:82:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:83:2057] recipient: [38:81:2112] Leader for TabletID 72057594037927937 is [38:84:2113] sender: [38:85:2057] recipient: [38:81:2112] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:84:2113] Leader for TabletID 72057594037927937 is [38:84:2113] sender: [38:138:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:79:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:82:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:83:2057] recipient: [39:81:2112] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:85:2057] recipient: [39:81:2112] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:84:2113] Leader for TabletID 72057594037927937 is [39:84:2113] sender: [39:138:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:80:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:83:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:84:2057] recipient: [40:82:2112] Leader for TabletID 72057594037927937 is [40:85:2113] sender: [40:86:2057] recipient: [40:82:2112] !Reboot 72057594037927937 (actor [40:56:2097]) rebooted! !Reboot 72057594037927937 (actor [40:56:2097]) tablet resolver refreshed! new actor is[40:85:2113] Leader for TabletID 72057594037927937 is [40:85:2113] sender: [40:139:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:57:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:74:2057] recipient: [41:14:2061] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCleanUpDataWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:80:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:82:2113] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:86:2057] recipient: [7:82:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:85:2114] Leader for TabletID 72057594037927937 is [7:85:2114] sender: [7:139:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:85:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:89:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:90:2118] sender: [8:91:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:90:2118] Leader for TabletID 72057594037927937 is [8:90:2118] sender: [8:144:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:89:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:92:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:93:2057] recipient: [9:91:2121] Leader for TabletID 72057594037927937 is [9:94:2122] sender: [9:95:2057] recipient: [9:91:2121] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:94:2122] Leader for TabletID 72057594037927937 is [9:94:2122] sender: [9:148:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:92:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:93:2057] recipient: [10:91:2121] Leader for TabletID 72057594037927937 is [10:94:2122] sender: [10:95:2057] recipient: [10:91:2121] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:94:2122] Leader for TabletID 72057594037927937 is [10:94:2122] sender: [10:148:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:94:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:95:2057] recipient: [11:93:2123] Leader for TabletID 72057594037927937 is [11:96:2124] sender: [11:97:2057] recipient: [11:93:2123] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:96:2124] Leader for TabletID 72057594037927937 is [11:96:2124] sender: [11:150:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:1 ... 57594037927937 is [35:99:2125] sender: [35:153:2057] recipient: [35:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:17:2064] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:95:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:98:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:99:2057] recipient: [36:97:2124] Leader for TabletID 72057594037927937 is [36:100:2125] sender: [36:101:2057] recipient: [36:97:2124] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:100:2125] Leader for TabletID 72057594037927937 is [36:100:2125] sender: [36:154:2057] recipient: [36:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:52:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:52:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:17:2064] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:98:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:101:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:102:2057] recipient: [37:100:2127] Leader for TabletID 72057594037927937 is [37:103:2128] sender: [37:104:2057] recipient: [37:100:2127] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:103:2128] Leader for TabletID 72057594037927937 is [37:103:2128] sender: [37:157:2057] recipient: [37:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:17:2064] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:98:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:101:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:102:2057] recipient: [38:100:2127] Leader for TabletID 72057594037927937 is [38:103:2128] sender: [38:104:2057] recipient: [38:100:2127] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:103:2128] Leader for TabletID 72057594037927937 is [38:103:2128] sender: [38:157:2057] recipient: [38:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:53:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:17:2064] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:99:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:102:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:103:2057] recipient: [39:101:2127] Leader for TabletID 72057594037927937 is [39:104:2128] sender: [39:105:2057] recipient: [39:101:2127] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:104:2128] Leader for TabletID 72057594037927937 is [39:104:2128] sender: [39:158:2057] recipient: [39:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:17:2064] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:100:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:102:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:104:2057] recipient: [40:103:2128] Leader for TabletID 72057594037927937 is [40:105:2129] sender: [40:106:2057] recipient: [40:103:2128] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:105:2129] Leader for TabletID 72057594037927937 is [40:105:2129] sender: [40:125:2057] recipient: [40:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:17:2064] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:101:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:104:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:105:2057] recipient: [41:103:2129] Leader for TabletID 72057594037927937 is [41:106:2130] sender: [41:107:2057] recipient: [41:103:2129] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:106:2130] Leader for TabletID 72057594037927937 is [41:106:2130] sender: [41:126:2057] recipient: [41:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:53:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:17:2064] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:104:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:107:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:108:2057] recipient: [42:106:2132] Leader for TabletID 72057594037927937 is [42:109:2133] sender: [42:110:2057] recipient: [42:106:2132] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:109:2133] Leader for TabletID 72057594037927937 is [42:109:2133] sender: [42:163:2057] recipient: [42:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:17:2064] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvCleanUpDataRequest ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:104:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:107:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:108:2057] recipient: [43:106:2132] Leader for TabletID 72057594037927937 is [43:109:2133] sender: [43:110:2057] recipient: [43:106:2132] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:109:2133] Leader for TabletID 72057594037927937 is [43:109:2133] sender: [43:163:2057] recipient: [43:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:17:2064] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletDataCleanup ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:104:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:107:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:108:2057] recipient: [44:106:2132] Leader for TabletID 72057594037927937 is [44:109:2133] sender: [44:110:2057] recipient: [44:106:2132] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:109:2133] Leader for TabletID 72057594037927937 is [44:109:2133] sender: [44:163:2057] recipient: [44:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:17:2064] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:109:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:111:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:113:2057] recipient: [45:112:2136] Leader for TabletID 72057594037927937 is [45:114:2137] sender: [45:115:2057] recipient: [45:112:2136] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:114:2137] Leader for TabletID 72057594037927937 is [45:114:2137] sender: [45:168:2057] recipient: [45:17:2064] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:54:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:54:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:17:2064] >> TPQTest::TestWriteTimeLag [GOOD] >> KqpYql::AnsiIn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:42.728068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:42.728145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:42.728173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:42.728196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:42.728227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:42.728246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:42.728296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:42.728383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:42.728696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:42.784365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:42.784401Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:42.794652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:42.794865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:42.794995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:42.800464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:42.800727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:42.801478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:42.801737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:42.803807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:42.804903Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:42.804949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:42.805113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:42.805151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:42.805189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:42.805248Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:42.810578Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:42.913522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:42.913695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:42.913883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:42.914102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:42.914160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:42.916172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:42.916275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:42.916399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:42.916456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:42.916480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:42.916505Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:42.917853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:42.917894Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:42.917922Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:42.919024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:42.919055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:42.919089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:42.919134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:42.921467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:42.922744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:42.922877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:42.923613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:42.923726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:42.923764Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:42.923981Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:42.924020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:42.924154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:42.924230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:42.925576Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:42.925641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:42.925772Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:42.925804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:42.926095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:42.926130Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:42.926199Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:42.926226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:42.926255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:42.926290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:42.926319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:42.926349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:42.926377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:42.926401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:42.926448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:42.926484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:42.926514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:42.927813Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:42.927909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:42.927937Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ], 18446744073709551615 2025-03-28T09:39:12.814048Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:39:12.814139Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:39:12.814180Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:39:12.814248Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2025-03-28T09:39:12.814310Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2025-03-28T09:39:12.814724Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:39:12.814801Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:39:12.814831Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:39:12.814863Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2025-03-28T09:39:12.814898Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:12.814976Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-28T09:39:12.817893Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:39:12.817964Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:39:12.817998Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:39:12.818029Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:39:12.818188Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:39:12.818762Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2025-03-28T09:39:12.819050Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 3 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2025-03-28T09:39:12.820290Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2025-03-28T09:39:12.820675Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:39:12.821345Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409546 2025-03-28T09:39:12.821568Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:12.821825Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:39:12.822620Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 4 TabletID: 72075186233409549 2025-03-28T09:39:12.822869Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 4 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2025-03-28T09:39:12.823054Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409546 2025-03-28T09:39:12.824570Z node 15 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409547 2025-03-28T09:39:12.824735Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:39:12.824920Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:39:12.825195Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:39:12.825275Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:39:12.825382Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409549 Forgetting tablet 72075186233409547 2025-03-28T09:39:12.827703Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:3 2025-03-28T09:39:12.827786Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2025-03-28T09:39:12.830282Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:39:12.830327Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2025-03-28T09:39:12.830798Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:4 2025-03-28T09:39:12.830838Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2025-03-28T09:39:12.830975Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:39:12.831030Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2025-03-28T09:39:12.831581Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:39:12.831923Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:39:12.831991Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:39:12.832571Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:39:12.832702Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:39:12.832767Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [15:539:2493] TestWaitNotification: OK eventTxId 103 2025-03-28T09:39:12.833482Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:12.833748Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 306us result status StatusPathDoesNotExist 2025-03-28T09:39:12.833958Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2025-03-28T09:39:12.834517Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-28T09:39:12.834611Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2025-03-28T09:39:12.834664Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2025-03-28T09:39:12.834755Z node 15 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2025-03-28T09:39:12.835333Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:12.835555Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 255us result status StatusSuccess 2025-03-28T09:39:12.836040Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155304.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=143155304.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155304.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123155304.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154104.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=123154104.000000s;Name=;Codec=}; 2025-03-28T09:38:25.488390Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:38:25.558398Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:38:25.581735Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:38:25.581968Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:38:25.588158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:38:25.588307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:38:25.588535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:38:25.588616Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:38:25.588705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:38:25.588766Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:38:25.588831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:38:25.588908Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:38:25.588982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:38:25.589050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:38:25.589130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:38:25.589230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:38:25.607134Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:38:25.607391Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:38:25.607435Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:38:25.607578Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:25.607711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:38:25.607778Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:38:25.607812Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:38:25.607876Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:38:25.607918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:38:25.607944Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:38:25.607962Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:38:25.608077Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:25.608121Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:38:25.608147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:38:25.608169Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:38:25.608230Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:38:25.608268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:38:25.608294Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:38:25.608315Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:38:25.608358Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:38:25.608383Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:38:25.608403Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:38:25.608434Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:38:25.608458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:38:25.608480Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:38:25.608844Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2025-03-28T09:38:25.608926Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=37; 2025-03-28T09:38:25.608992Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=29; 2025-03-28T09:38:25.609068Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-03-28T09:38:25.609237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:38:25.609296Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:38:25.609334Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:38:25.609528Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:38:25.609567Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:38:25.609594Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:38:25.609688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:38:25.609712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:38:25.609734Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:38:25.609910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normaliz ... :"Finish","f":1743154753093273,"d_finished":0,"c":0,"l":1743154753093722,"d":449},{"name":"task_result","f":1743154752623822,"d_finished":196400,"c":28,"l":1743154753090298,"d":196400}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1284:3291]->[1:1283:3290] 2025-03-28T09:39:13.094078Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:39:12.616533Z;index_granules=0;index_portions=4;index_batches=1731;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=5203504;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=5203504;selected_rows=0; 2025-03-28T09:39:13.094107Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:39:13.094297Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1284:3291];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:39:13.095734Z node 1 :TX_COLUMNSHARD DEBUG: Finished read cookie: 7 at tablet 9437184 2025-03-28T09:39:13.095952Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000006:max} readable: {1000000006:max} at tablet 9437184 2025-03-28T09:39:13.096064Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:39:13.096199Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:39:13.096245Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:39:13.096598Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:39:13.096663Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:39:13.097057Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000006:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1300:3307];trace_detailed=; 2025-03-28T09:39:13.097401Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:39:13.097580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:39:13.097711Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:13.097811Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:13.098012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:39:13.098109Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:13.098218Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:13.098257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1300:3307] finished for tablet 9437184 2025-03-28T09:39:13.098620Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1299:3306];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154753096998,"name":"_full_task","f":1743154753096998,"d_finished":0,"c":0,"l":1743154753098306,"d":1308},"events":[{"name":"bootstrap","f":1743154753097179,"d_finished":654,"c":1,"l":1743154753097833,"d":654},{"a":1743154753097994,"name":"ack","f":1743154753097994,"d_finished":0,"c":0,"l":1743154753098306,"d":312},{"a":1743154753097980,"name":"processing","f":1743154753097980,"d_finished":0,"c":0,"l":1743154753098306,"d":326},{"name":"ProduceResults","f":1743154753097650,"d_finished":392,"c":2,"l":1743154753098245,"d":392},{"a":1743154753098248,"name":"Finish","f":1743154753098248,"d_finished":0,"c":0,"l":1743154753098306,"d":58}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:13.098705Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1299:3306];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:39:13.099038Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1299:3306];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154753096998,"name":"_full_task","f":1743154753096998,"d_finished":0,"c":0,"l":1743154753098751,"d":1753},"events":[{"name":"bootstrap","f":1743154753097179,"d_finished":654,"c":1,"l":1743154753097833,"d":654},{"a":1743154753097994,"name":"ack","f":1743154753097994,"d_finished":0,"c":0,"l":1743154753098751,"d":757},{"a":1743154753097980,"name":"processing","f":1743154753097980,"d_finished":0,"c":0,"l":1743154753098751,"d":771},{"name":"ProduceResults","f":1743154753097650,"d_finished":392,"c":2,"l":1743154753098245,"d":392},{"a":1743154753098248,"name":"Finish","f":1743154753098248,"d_finished":0,"c":0,"l":1743154753098751,"d":503}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1300:3307]->[1:1299:3306] 2025-03-28T09:39:13.099111Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:39:13.096642Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:39:13.099149Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:39:13.099245Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1300:3307];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 80000/5203504 0/0 >> KqpYql::Closure [GOOD] >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 26156, MsgBus: 27306 2025-03-28T09:39:10.326050Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792644845986554:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:10.326136Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007b8/r3tmp/tmpPkGHc1/pdisk_1.dat 2025-03-28T09:39:10.644683Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26156, node 1 2025-03-28T09:39:10.702374Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:10.702485Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:10.704544Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:10.726897Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:10.726923Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:10.726932Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:10.727017Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27306 TClient is connected to server localhost:27306 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:11.125474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.614125Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792653435921807:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.614241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.838422Z node 1 :TX_PROXY ERROR: Actor# [1:7486792653435921828:2305] txid# 281474976715658, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-03-28T09:39:12.855798Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792653435921836:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.855896Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.869741Z node 1 :TX_PROXY ERROR: Actor# [1:7486792653435921843:2313] txid# 281474976715659, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2025-03-28T09:39:12.877687Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792653435921851:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.877763Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.897494Z node 1 :TX_PROXY ERROR: Actor# [1:7486792653435921858:2321] txid# 281474976715660, issues: { message: "Uuid as primary key is forbiden by configuration: val" severity: 1 } 2025-03-28T09:39:12.905765Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792653435921866:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.905839Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.920710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.024377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792657730889251:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.024465Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteTimeLag [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-28T09:37:49.915472Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:49.915574Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-28T09:37:49.932114Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:49.946359Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:49.947242Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-28T09:37:49.949055Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-28T09:37:49.951079Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:37:49.952691Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:37:49.971232Z node 1 :PERSQUEUE INFO: new Cookie default|5f735121-a079cf13-162a75c4-b0bd9102_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.077791Z node 1 :PERSQUEUE INFO: new Cookie default|e6d7d41b-b9627731-8a9ac3b6-566771a2_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.149903Z node 1 :PERSQUEUE INFO: new Cookie default|a8b56e22-3dd611f6-c4d487c9-90c8ec9c_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.185270Z node 1 :PERSQUEUE INFO: new Cookie default|d159c66d-4a9e5334-cb181e9e-75c13711_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.193152Z node 1 :PERSQUEUE INFO: new Cookie default|b59c8cbb-b198038b-a162889a-1a1950d2_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.198468Z node 1 :PERSQUEUE INFO: new Cookie default|d82ca711-adee3f70-c971dde0-cdba6d2d_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-28T09:37:50.702646Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:50.702743Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-03-28T09:37:50.715318Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:50.716062Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:37:50.716497Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-03-28T09:37:50.718079Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-03-28T09:37:50.719156Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:184:2197] 2025-03-28T09:37:50.720599Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:184:2197] 2025-03-28T09:37:50.738041Z node 2 :PERSQUEUE INFO: new Cookie default|6f63ad5a-bceb1448-1b7189ec-d72c987c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.831243Z node 2 :PERSQUEUE INFO: new Cookie default|9cf1c7b0-ce8fb7ad-819409a9-44247fcf_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.900651Z node 2 :PERSQUEUE INFO: new Cookie default|23aec608-701b0ed7-43bc4d23-812e5192_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.935600Z node 2 :PERSQUEUE INFO: new Cookie default|c9f1064a-28d76f1b-55c06d79-5c92437c_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:37:50.942473Z node 2 :PERSQUEUE INFO: new Cookie default|ddb655ec-88589482-86bc5c51-588a46bc_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvRequest ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:293:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:295:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:297:2057] recipient: [2:296:2293] Leader for TabletID 72057594037927937 is [2:298:2294] sender: [2:299:2057] recipient: [2:296:2293] 2025-03-28T09:37:50.984829Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:50.984894Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:37:50.985796Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:347:2335] 2025-03-28T09:37:50.988220Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:348:2336] 2025-03-28T09:37:50.998064Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:37:50.998141Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:348:2336] 2025-03-28T09:37:50.999148Z node 2 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:37:50.999208Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:347:2335] !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:298:2294] Leader for TabletID 72057594037927937 is [2:298:2294] sender: [2:391:2057] recipient: [2:14:2061] 2025-03-28T09:37:52.173577Z node 2 :PERSQUEUE INFO: new Cookie default|ffe155cd-713747af-304530b5-6d00274f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-28T09:37:52.515200Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.515266Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-03-28T09:37:52.531401Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.532090Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 7864320 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-28T09:37:52.532594Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-03-28T09:37:52.534974Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-03-28T09:37:52.536559Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-03-28T09:37:52.538403Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-03-28T09:37:52.559052Z node 3 :PERSQUEUE INFO: new Cookie default|4f41773-cf1beb9c-82b785f3-b3dfa343_0 generated for partition 0 topic 'rt3.dc1--asdfgs--to ... Q: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2025-03-28T09:39:13.461120Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Config update version 63(current 62) received from actor [60:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 63 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 ReadRuleGenerations: 62 ReadRuleGenerations: 61 ReadRuleGenerations: 63 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: false } Consumers { Name: "another1" Generation: 62 Important: true } Consumers { Name: "important" Generation: 61 Important: true } Consumers { Name: "another" Generation: 63 Important: false } 2025-03-28T09:39:13.464488Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 63 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 ReadRuleGenerations: 62 ReadRuleGenerations: 61 ReadRuleGenerations: 63 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: false } Consumers { Name: "another1" Generation: 62 Important: true } Consumers { Name: "important" Generation: 61 Important: true } Consumers { Name: "another" Generation: 63 Important: false } 2025-03-28T09:39:13.464591Z node 60 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:13.464880Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] Topic 'topic' partition 0 user another reinit with generation 63 done 2025-03-28T09:39:13.465219Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:39:13.465290Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- delete ---------------- 2025-03-28T09:39:13.465348Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- write ----------------- 2025-03-28T09:39:13.465407Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] i0000000000 2025-03-28T09:39:13.465454Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cuser 2025-03-28T09:39:13.465484Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uuser 2025-03-28T09:39:13.465518Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000caaa 2025-03-28T09:39:13.465546Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uaaa 2025-03-28T09:39:13.465577Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000canother1 2025-03-28T09:39:13.465608Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uanother1 2025-03-28T09:39:13.465639Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000canother 2025-03-28T09:39:13.465669Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uanother 2025-03-28T09:39:13.465698Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000cimportant 2025-03-28T09:39:13.465726Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] m0000000000uimportant 2025-03-28T09:39:13.465756Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] _config_0 2025-03-28T09:39:13.465812Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] --- rename ---------------- 2025-03-28T09:39:13.465869Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] =========================== 2025-03-28T09:39:13.465998Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] Topic 'topic' partition 1 user another reinit with generation 63 done 2025-03-28T09:39:13.466228Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] === DumpKeyValueRequest === 2025-03-28T09:39:13.466261Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- delete ---------------- 2025-03-28T09:39:13.466296Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- write ----------------- 2025-03-28T09:39:13.466329Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] i0000000001 2025-03-28T09:39:13.466363Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cuser 2025-03-28T09:39:13.466395Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uuser 2025-03-28T09:39:13.466424Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001caaa 2025-03-28T09:39:13.466454Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uaaa 2025-03-28T09:39:13.466483Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001canother1 2025-03-28T09:39:13.466516Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uanother1 2025-03-28T09:39:13.466547Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001canother 2025-03-28T09:39:13.466579Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uanother 2025-03-28T09:39:13.466609Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001cimportant 2025-03-28T09:39:13.466642Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] m0000000001uimportant 2025-03-28T09:39:13.466673Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] _config_1 2025-03-28T09:39:13.466729Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] --- rename ---------------- 2025-03-28T09:39:13.466763Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] =========================== 2025-03-28T09:39:13.466844Z node 60 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:39:13.467065Z node 60 :PERSQUEUE DEBUG: CacheProxy. Passthrough write request to KV 2025-03-28T09:39:13.472349Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 0, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:39:13.472682Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-03-28T09:39:13.473930Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937, Partition: 1, State: StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2025-03-28T09:39:13.474161Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] Handle TEvPQ::TEvPartitionConfigChanged 2025-03-28T09:39:13.474479Z node 60 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 63 actor [60:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 1099511627776 LifetimeSeconds: 0 ImportantClientId: "another1" ImportantClientId: "important" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 63 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 60 ReadRuleGenerations: 60 ReadRuleGenerations: 62 ReadRuleGenerations: 61 ReadRuleGenerations: 63 FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 Important: false } Consumers { Name: "aaa" Generation: 60 Important: false } Consumers { Name: "another1" Generation: 62 Important: true } Consumers { Name: "important" Generation: 61 Important: true } Consumers { Name: "another" Generation: 63 Important: false } 2025-03-28T09:39:13.475308Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:617:2564], now have 1 active actors on pipe 2025-03-28T09:39:13.476239Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:620:2566], now have 1 active actors on pipe 2025-03-28T09:39:13.476436Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:39:13.476510Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:39:13.476637Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:39:13.477313Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:622:2568], now have 1 active actors on pipe 2025-03-28T09:39:13.477394Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:39:13.477448Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:39:13.477556Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:39:13.478098Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:624:2570], now have 1 active actors on pipe 2025-03-28T09:39:13.478260Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:39:13.478322Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:39:13.478441Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 2025-03-28T09:39:13.478985Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [60:626:2572], now have 1 active actors on pipe 2025-03-28T09:39:13.479198Z node 60 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'topic' requestId: 2025-03-28T09:39:13.479252Z node 60 :PERSQUEUE DEBUG: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2025-03-28T09:39:13.479365Z node 60 :PERSQUEUE DEBUG: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 0 >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 7512, MsgBus: 22557 2025-03-28T09:39:03.875594Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792615159556749:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:03.875681Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000999/r3tmp/tmpgtXqpp/pdisk_1.dat 2025-03-28T09:39:04.129798Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:04.134373Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:04.134498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:04.137158Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 7512, node 1 2025-03-28T09:39:04.195147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:04.195183Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:04.195187Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:04.195336Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:22557 TClient is connected to server localhost:22557 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:04.596521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.620176Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.723520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.857316Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:04.911706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.336548Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792628044460424:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:06.336690Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:06.661505Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.687725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.710662Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.732270Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.768322Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.791163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:06.823542Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792628044460931:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:06.823612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:06.823671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792628044460936:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:06.826242Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:06.833641Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792628044460938:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:06.932721Z node 1 :TX_PROXY ERROR: Actor# [1:7486792628044460994:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:07.781103Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792632339428563:2495], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-03-28T09:39:07.781298Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YmM2MzQ2ZWQtYjAwYjI2MDUtNGU0ZjVkYTAtMzczYWY1Yjc=, ActorId: [1:7486792632339428561:2494], ActorState: ExecuteState, TraceId: 01jqe1x6br0g91bwnbes7ey926, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:746: Error: Scan query should have a single result set., code: 2029 2025-03-28T09:39:07.876169Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792632339428608:2508], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 2025-03-28T09:39:07.876360Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=M2RkZDIzZDItNzVlYmFlYmYtOGMyYTIyYzQtZjMxZjBmZjc=, ActorId: [1:7486792632339428606:2507], ActorState: ExecuteState, TraceId: 01jqe1x6eyezjy925yb362r5vx, ReplyQueryCompileError, status PRECONDITION_FAILED remove tx with tx_id:
: Error: Execution, code: 1060
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 25721, MsgBus: 62441 2025-03-28T09:39:08.681540Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792635672030166:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:08.681647Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000999/r3tmp/tmp1d2IP4/pdisk_1.dat 2025-03-28T09:39:08.778178Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25721, node 2 2025-03-28T09:39:08.819091Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:08.819216Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:08.820536Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:08.838921Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:08.838946Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:08.838955Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:08.839083Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62441 TClient is connected to server localhost:62441 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:09.181555Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.196209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.266989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.417412Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.485871Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.746144Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792648556933839:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.746213Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.797570Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.822166Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.844720Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.868264Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.890738Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.919393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.954486Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792648556934345:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.954555Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792648556934350:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.954554Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.957515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:11.965376Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792648556934352:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:12.030187Z node 2 :TX_PROXY ERROR: Actor# [2:7486792652851901701:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:12.867529Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154752899, txId: 281474976715671] shutting down >> KqpYql::TableRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 23285, MsgBus: 26571 2025-03-28T09:39:04.864884Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792618223485285:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:04.864966Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009ba/r3tmp/tmp85HWz5/pdisk_1.dat 2025-03-28T09:39:05.110149Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23285, node 1 2025-03-28T09:39:05.189854Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:05.189896Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:05.189904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:05.190056Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:05.196084Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:05.196181Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:05.198004Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:26571 TClient is connected to server localhost:26571 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:05.639680Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.669759Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.812628Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.955095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.012234Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.557972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792631108388953:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.558102Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.862913Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.893327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.921045Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.947040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.970890Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.037373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.090864Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792635403356763:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.090949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.090961Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792635403356768:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.094419Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:08.104966Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792635403356770:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:08.193496Z node 1 :TX_PROXY ERROR: Actor# [1:7486792635403356823:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:08.941125Z node 1 :KQP_SESSION ERROR: SessionId: ydb://session/3?node_id=1&id=ZWE2MWY0YTgtMmVjYTVlMmEtNTBhMWZlZTUtYTUxYjBhNDc=, ActorId: [1:7486792635403357070:2487], ActorState: ExecuteState, TraceId: 01jqe1x7dx2yw1dvfd3mh37wv7, Internal error, message: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer 2025-03-28T09:39:08.941162Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWE2MWY0YTgtMmVjYTVlMmEtNTBhMWZlZTUtYTUxYjBhNDc=, ActorId: [1:7486792635403357070:2487], ActorState: ExecuteState, TraceId: 01jqe1x7dx2yw1dvfd3mh37wv7, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:161: StringOffset must be inside buffer Trying to start YDB, gRPC: 15816, MsgBus: 16671 2025-03-28T09:39:09.622724Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792641022452198:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:09.622815Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009ba/r3tmp/tmpMU8EpF/pdisk_1.dat 2025-03-28T09:39:09.733024Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:09.763930Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:09.764013Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 15816, node 2 2025-03-28T09:39:09.765446Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:09.798726Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:09.798756Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:09.798765Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:09.798905Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16671 TClient is connected to server localhost:16671 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:10.185142Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:10.201441Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:10.287869Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:10.429688Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:10.501989Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.169550Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792653907355878:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.169642Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.212899Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.240916Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.272024Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.299232Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.322068Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.351404Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.389485Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792653907356385:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.389575Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.389722Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792653907356390:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.392613Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:12.400074Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792653907356392:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:12.460218Z node 2 :TX_PROXY ERROR: Actor# [2:7486792653907356445:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::QueryStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 24857, MsgBus: 4149 2025-03-28T09:39:04.847956Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792620789951448:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:04.848059Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000982/r3tmp/tmpRZRGKq/pdisk_1.dat 2025-03-28T09:39:05.125806Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24857, node 1 2025-03-28T09:39:05.174734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:05.174887Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:05.176475Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:05.182606Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:05.182630Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:05.182637Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:05.182808Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4149 TClient is connected to server localhost:4149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:05.602202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.622551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.729846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.878126Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:05.947716Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.324857Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792633674855106:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.324988Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.567845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.596882Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.624173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.656860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.684884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.729813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:07.808426Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792633674855621:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.808521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.808753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792633674855626:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:07.812591Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:07.821714Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792633674855628:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:07.922671Z node 1 :TX_PROXY ERROR: Actor# [1:7486792633674855685:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24714, MsgBus: 63209 2025-03-28T09:39:09.713210Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792641258751487:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:09.713307Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000982/r3tmp/tmpcrWkD7/pdisk_1.dat 2025-03-28T09:39:09.828178Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24714, node 2 2025-03-28T09:39:09.863702Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:09.863809Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:09.865167Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:09.897196Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:09.897229Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:09.897237Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:09.897380Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63209 TClient is connected to server localhost:63209 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:10.266748Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:10.283013Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:10.335789Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:10.487914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:10.550478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.299090Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792654143655144:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.299246Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.320911Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.347083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.372030Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.397606Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.426876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.459040Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.493733Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792654143655651:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.493819Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.493870Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792654143655656:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.496706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:12.504978Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792654143655658:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:12.570045Z node 2 :TX_PROXY ERROR: Actor# [2:7486792654143655712:3437] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpYql::TableNameConflict [GOOD] >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 20876, MsgBus: 2451 2025-03-28T09:39:09.957114Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792642182266200:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:09.957209Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007e8/r3tmp/tmpqra4jV/pdisk_1.dat 2025-03-28T09:39:10.238597Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20876, node 1 2025-03-28T09:39:10.299905Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:10.299936Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:10.299947Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:10.300088Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:10.305246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:10.305400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:10.307046Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2451 TClient is connected to server localhost:2451 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:10.695731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.415545Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792655067168752:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.415679Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.661155Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.760239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792655067168854:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.760303Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.760457Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792655067168859:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.763722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:39:12.772759Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792655067168861:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:39:12.829075Z node 1 :TX_PROXY ERROR: Actor# [1:7486792655067168912:2393] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpQuery::SelectCountAsteriskFromVar [GOOD] >> KqpYql::DdlDmlMix ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 7669, MsgBus: 5830 2025-03-28T09:39:11.815981Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792649358039747:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:11.816052Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00071f/r3tmp/tmpFZHH1Z/pdisk_1.dat 2025-03-28T09:39:12.156168Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7669, node 1 2025-03-28T09:39:12.211315Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:12.211492Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:12.213254Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:12.233009Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:12.233058Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:12.233069Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:12.233197Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5830 TClient is connected to server localhost:5830 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:12.665413Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.441934Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792662242942295:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.442018Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.732617Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:39:14.855165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792662242942406:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.855232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.855319Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792662242942411:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.858535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:39:14.867082Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792662242942413:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:39:14.946706Z node 1 :TX_PROXY ERROR: Actor# [1:7486792662242942464:2400] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TKeyValueTest::TestConcatToLongKey [GOOD] >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpScripting::SecondaryIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 7325, MsgBus: 22455 2025-03-28T09:39:06.537962Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792627509879964:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:06.538024Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000919/r3tmp/tmp8PsZsb/pdisk_1.dat 2025-03-28T09:39:06.805713Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7325, node 1 2025-03-28T09:39:06.887181Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:06.887233Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:06.887241Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:06.887391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:06.892770Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:06.892873Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:06.894655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:22455 TClient is connected to server localhost:22455 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:07.344964Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.377578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.469702Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.618544Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.677351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.991090Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792636099816341:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.991251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.262525Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.288573Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.314863Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.343166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.371228Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.437498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.519874Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792640394784156:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.519938Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.519985Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792640394784161:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.522647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:09.530431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792640394784163:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:09.630714Z node 1 :TX_PROXY ERROR: Actor# [1:7486792640394784220:3453] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. Trying to start YDB, gRPC: 28803, MsgBus: 16720 2025-03-28T09:39:11.217393Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792651525694627:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:11.217498Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000919/r3tmp/tmpVACu7X/pdisk_1.dat 2025-03-28T09:39:11.336457Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 28803, node 2 2025-03-28T09:39:11.352980Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:11.353044Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:11.354100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:11.386988Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:11.387030Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:11.387038Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:11.387172Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16720 TClient is connected to server localhost:16720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:11.735616Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.752515Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.822732Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.962190Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.020019Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.799775Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792660115631000:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.799885Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.826226Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.851646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.881021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.910514Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.938787Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.972011Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:14.013360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792664410598802:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.013458Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.013509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792664410598808:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.016995Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:14.024019Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792664410598810:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:14.091231Z node 2 :TX_PROXY ERROR: Actor# [2:7486792664410598864:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 30582, MsgBus: 15945 2025-03-28T09:39:00.669664Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792602167941906:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:00.669781Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001493/r3tmp/tmpg5MmqM/pdisk_1.dat 2025-03-28T09:39:00.939958Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:00.940032Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:00.941621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:00.949075Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30582, node 1 2025-03-28T09:39:01.006277Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:01.006308Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:01.006316Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:01.006447Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15945 TClient is connected to server localhost:15945 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:01.437118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.460729Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.575974Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.715089Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:01.776372Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:03.396807Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792615052845576:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.396940Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.658849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.687572Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.715976Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.743637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.767497Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.796919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:03.834705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792615052846085:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.834785Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.834900Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792615052846090:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:03.838730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:03.847635Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792615052846092:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:03.931551Z node 1 :TX_PROXY ERROR: Actor# [1:7486792615052846146:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25614, MsgBus: 11800 2025-03-28T09:39:05.661600Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792623651146932:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:05.661688Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001493/r3tmp/tmpIhBqAx/pdisk_1.dat 2025-03-28T09:39:05.778394Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:05.807096Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:05.807182Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:05.808805Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25614, node 2 2025-03-28T09:39:05.853450Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:05.853485Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:05.853494Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:05.853633Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11800 TClient is connected to server localhost:11800 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:06.215629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.229857Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.301836Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.416121Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:06.497635Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.294710Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792636536050597:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.294812Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.339188Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.366783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.396235Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.420912Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.446218Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.514094Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:08.590961Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792636536051113:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.591032Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792636536051118:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.591042Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:08.593914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:08.601717Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792636536051120:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:08.704443Z node 2 :TX_PROXY ERROR: Actor# [2:7486792636536051177:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 11534, MsgBus: 6839 2025-03-28T09:39:10.435342Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486792646149115991:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:10.435410Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001493/r3tmp/tmpnws8k0/pdisk_1.dat 2025-03-28T09:39:10.537867Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11534, node 3 2025-03-28T09:39:10.560546Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:10.560605Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:10.562011Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:10.592848Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:10.592875Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:10.592885Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:10.593022Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6839 TClient is connected to server localhost:6839 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:11.041813Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.056168Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.128978Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.289109Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.359254Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.589691Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792659034019640:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.589775Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.639674Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.668896Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.695087Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.726014Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.755644Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.790969Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.841516Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792659034020155:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.841629Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.841932Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792659034020160:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.845150Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:13.853637Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486792659034020162:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:13.942025Z node 3 :TX_PROXY ERROR: Actor# [3:7486792659034020216:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpYql::SelectNoAsciiValue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:87:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:87:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:87:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:90:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:93:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:92:2119] Leader for TabletID 72057594037927937 is [11:92:2119] sender: [11:146:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 7 is [33:56:2097] sender: [33:92:2057] recipient: [33:36:2083] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:95:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [33:56:2097] sender: [33:96:2057] recipient: [33:94:2121] Leader for TabletID 72057594037927937 is [33:97:2122] sender: [33:98:2057] recipient: [33:94:2121] !Reboot 72057594037927937 (actor [33:56:2097]) rebooted! !Reboot 72057594037927937 (actor [33:56:2097]) tablet resolver refreshed! new actor is[33:97:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:54:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:57:2057] recipient: [34:51:2095] Leader for TabletID 72057594037927937 is [34:56:2097] sender: [34:74:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:54:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:57:2057] recipient: [35:52:2095] Leader for TabletID 72057594037927937 is [35:56:2097] sender: [35:74:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:54:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:57:2057] recipient: [36:51:2095] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:74:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:76:2057] recipient: [36:36:2083] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:79:2057] recipient: [36:78:2110] Leader for TabletID 72057594037927937 is [36:56:2097] sender: [36:80:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:82:2057] recipient: [36:78:2110] !Reboot 72057594037927937 (actor [36:56:2097]) rebooted! !Reboot 72057594037927937 (actor [36:56:2097]) tablet resolver refreshed! new actor is[36:81:2111] Leader for TabletID 72057594037927937 is [36:81:2111] sender: [36:135:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:54:2057] recipient: [37:52:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:57:2057] recipient: [37:52:2095] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:74:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:56:2097]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:76:2057] recipient: [37:36:2083] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:78:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [37:56:2097] sender: [37:80:2057] recipient: [37:79:2110] Leader for TabletID 72057594037927937 is [37:81:2111] sender: [37:82:2057] recipient: [37:79:2110] !Reboot 72057594037927937 (actor [37:56:2097]) rebooted! !Reboot 72057594037927937 (actor [37:56:2097]) tablet resolver refreshed! new actor is[37:81:2111] Leader for TabletID 72057594037927937 is [37:81:2111] sender: [37:135:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:54:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:57:2057] recipient: [38:51:2095] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:74:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:77:2057] recipient: [38:36:2083] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:80:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [38:56:2097] sender: [38:81:2057] recipient: [38:79:2110] Leader for TabletID 72057594037927937 is [38:82:2111] sender: [38:83:2057] recipient: [38:79:2110] !Reboot 72057594037927937 (actor [38:56:2097]) rebooted! !Reboot 72057594037927937 (actor [38:56:2097]) tablet resolver refreshed! new actor is[38:82:2111] Leader for TabletID 72057594037927937 is [38:82:2111] sender: [38:136:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:54:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:57:2057] recipient: [39:50:2095] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:74:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:80:2057] recipient: [39:36:2083] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:83:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [39:56:2097] sender: [39:84:2057] recipient: [39:82:2113] Leader for TabletID 72057594037927937 is [39:85:2114] sender: [39:86:2057] recipient: [39:82:2113] !Reboot 72057594037927937 (actor [39:56:2097]) rebooted! !Reboot 72057594037927937 (actor [39:56:2097]) tablet resolver refreshed! new actor is[39:85:2114] Leader for TabletID 72057594037927937 is [39:85:2114] sender: [39:139:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:54:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:57:2057] recipient: [40:51:2095] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:74:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:80:2057] recipient: [40:36:2083] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:83:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [40:56:2097] sender: [40:84:2057] recipient: [40:82:2113] Leader for TabletID 72057594037927937 is [40:85:2114] sender: [40:86:2057] recipient: [40:82:2113] !Reboot 72057594037927937 (actor [40:56:2097]) rebooted! !Reboot 72057594037927937 (actor [40:56:2097]) tablet resolver refreshed! new actor is[40:85:2114] Leader for TabletID 72057594037927937 is [40:85:2114] sender: [40:139:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:54:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:57:2057] recipient: [41:51:2095] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:74:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:81:2057] recipient: [41:36:2083] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:84:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [41:56:2097] sender: [41:85:2057] recipient: [41:83:2113] Leader for TabletID 72057594037927937 is [41:86:2114] sender: [41:87:2057] recipient: [41:83:2113] !Reboot 72057594037927937 (actor [41:56:2097]) rebooted! !Reboot 72057594037927937 (actor [41:56:2097]) tablet resolver refreshed! new actor is[41:86:2114] Leader for TabletID 72057594037927937 is [41:86:2114] sender: [41:140:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:54:2057] recipient: [42:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:54:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:57:2057] recipient: [42:51:2095] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:74:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:84:2057] recipient: [42:36:2083] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:86:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [42:56:2097] sender: [42:88:2057] recipient: [42:87:2116] Leader for TabletID 72057594037927937 is [42:89:2117] sender: [42:90:2057] recipient: [42:87:2116] !Reboot 72057594037927937 (actor [42:56:2097]) rebooted! !Reboot 72057594037927937 (actor [42:56:2097]) tablet resolver refreshed! new actor is[42:89:2117] Leader for TabletID 72057594037927937 is [42:89:2117] sender: [42:143:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:54:2057] recipient: [43:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:54:2057] recipient: [43:51:2095] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:57:2057] recipient: [43:51:2095] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:74:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:84:2057] recipient: [43:36:2083] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:87:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [43:56:2097] sender: [43:88:2057] recipient: [43:86:2116] Leader for TabletID 72057594037927937 is [43:89:2117] sender: [43:90:2057] recipient: [43:86:2116] !Reboot 72057594037927937 (actor [43:56:2097]) rebooted! !Reboot 72057594037927937 (actor [43:56:2097]) tablet resolver refreshed! new actor is[43:89:2117] Leader for TabletID 72057594037927937 is [43:89:2117] sender: [43:143:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:54:2057] recipient: [44:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:54:2057] recipient: [44:51:2095] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:57:2057] recipient: [44:51:2095] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:74:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:85:2057] recipient: [44:36:2083] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:88:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [44:56:2097] sender: [44:89:2057] recipient: [44:87:2116] Leader for TabletID 72057594037927937 is [44:90:2117] sender: [44:91:2057] recipient: [44:87:2116] !Reboot 72057594037927937 (actor [44:56:2097]) rebooted! !Reboot 72057594037927937 (actor [44:56:2097]) tablet resolver refreshed! new actor is[44:90:2117] Leader for TabletID 72057594037927937 is [44:90:2117] sender: [44:144:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:57:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:74:2057] recipient: [45:14:2061] >> KqpYql::UpdateBadType [GOOD] >> KqpScripting::ScanQueryDisable [GOOD] >> KqpYql::EvaluateExpr3 [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 20400, MsgBus: 12315 2025-03-28T09:39:07.319530Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792630362600605:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:07.319604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008ae/r3tmp/tmp2l1TO5/pdisk_1.dat 2025-03-28T09:39:07.574707Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20400, node 1 2025-03-28T09:39:07.637736Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:07.637753Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:07.637759Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:07.637901Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:07.665430Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:07.665509Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:07.666868Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12315 TClient is connected to server localhost:12315 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:08.052177Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.074884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.190971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.338237Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.405885Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.753148Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792638952536981:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.753280Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.977900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.002205Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.027385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.052188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.078571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.132005Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.167372Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792643247504790:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:10.167458Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:10.167498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792643247504795:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:10.170618Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:10.179886Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792643247504797:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:10.238222Z node 1 :TX_PROXY ERROR: Actor# [1:7486792643247504850:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 Trying to start YDB, gRPC: 6472, MsgBus: 18128 2025-03-28T09:39:11.634460Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792650042051712:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:11.634555Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008ae/r3tmp/tmpxVUhwQ/pdisk_1.dat 2025-03-28T09:39:11.712785Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 6472, node 2 2025-03-28T09:39:11.765394Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:11.765478Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:11.766603Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:11.769211Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:11.769228Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:11.769233Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:11.769327Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18128 TClient is connected to server localhost:18128 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:12.181195Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.197182Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.241943Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.385347Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.471455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.812620Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792662926955377:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.812704Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.860461Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:14.888785Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:14.912760Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:14.939550Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:14.967505Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.034548Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.109378Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792667221923192:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.109448Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.109461Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792667221923197:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.112523Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:15.120440Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792667221923199:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:15.214905Z node 2 :TX_PROXY ERROR: Actor# [2:7486792667221923253:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:15.926903Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.286824Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154756329, txId: 281474976715675] shutting down >> TSchemeShardUserAttrsTest::SpecialAttributes >> TSchemeShardUserAttrsTest::MkDir >> TSchemeShardUserAttrsTest::VariousUse >> TSchemeShardUserAttrsTest::SetAttrs >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase >> TSchemeShardUserAttrsTest::UserConditionsAtAlter >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 24881, MsgBus: 28511 2025-03-28T09:39:12.738476Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792654758617314:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:12.738547Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00069e/r3tmp/tmpVAcZ6h/pdisk_1.dat 2025-03-28T09:39:13.020314Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24881, node 1 2025-03-28T09:39:13.027001Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:13.027126Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:13.059493Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:13.059522Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:13.059537Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:13.059651Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:13.074599Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:13.074779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:13.076967Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:28511 TClient is connected to server localhost:28511 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:13.480972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.491360Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:39:13.495782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.613435Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.737758Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.810629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.647596Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792667643520990:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.647711Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.934258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.955744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.977003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.000181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.022085Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.046452Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.078950Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792671938488795:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.079013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.079062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792671938488800:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.081903Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:16.088855Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792671938488802:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:16.170309Z node 1 :TX_PROXY ERROR: Actor# [1:7486792671938488856:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 23537, MsgBus: 18208 2025-03-28T09:39:06.861942Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792628901972499:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:06.862040Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00090e/r3tmp/tmpFFaSrl/pdisk_1.dat 2025-03-28T09:39:07.165529Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23537, node 1 2025-03-28T09:39:07.217831Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:07.217868Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:07.217878Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:07.218016Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:07.242308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:07.242443Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:07.243989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:18208 TClient is connected to server localhost:18208 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:07.712707Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.731895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.846120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:07.985118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.044329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.416508Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792641786876166:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.416617Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.716182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.743567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.768967Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.796626Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.821380Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.851263Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:09.891377Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792641786876672:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.891477Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.891693Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792641786876677:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.894653Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:09.902072Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792641786876679:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:09.971542Z node 1 :TX_PROXY ERROR: Actor# [1:7486792641786876735:3445] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:11.166304Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154751191, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 62213, MsgBus: 10786 2025-03-28T09:39:11.751588Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792651054982003:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:11.751731Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00090e/r3tmp/tmpoUkg7l/pdisk_1.dat 2025-03-28T09:39:11.855532Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62213, node 2 2025-03-28T09:39:11.892895Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:11.892979Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:11.894249Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:11.906903Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:11.906926Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:11.906935Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:11.907037Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10786 TClient is connected to server localhost:10786 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:12.240270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.256490Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.297314Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.445706Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.517164Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.913360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792663939885657:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.913463Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:14.942460Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:14.970385Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:14.998485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.026934Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.053977Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.120381Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.192977Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792668234853472:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.193027Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.193096Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792668234853477:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.196116Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:15.204791Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792668234853479:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:15.284002Z node 2 :TX_PROXY ERROR: Actor# [2:7486792668234853532:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:16.516844Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154756553, txId: 281474976715671] shutting down 2025-03-28T09:39:16.752020Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792651054982003:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:16.752076Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr3 [GOOD] Test command err: Trying to start YDB, gRPC: 17977, MsgBus: 31677 2025-03-28T09:39:08.787545Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792638801109751:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:08.787604Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007f3/r3tmp/tmpiwM0Wx/pdisk_1.dat 2025-03-28T09:39:09.091667Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:09.096027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:09.096134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:09.100699Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17977, node 1 2025-03-28T09:39:09.160387Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:09.160406Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:09.160411Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:09.160560Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:31677 TClient is connected to server localhost:31677 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:09.615277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.638490Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.750016Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.870348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.943540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.280180Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792651686013430:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.280344Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.511139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.535392Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.560219Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.584569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.647006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.676638Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.713360Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792651686013937:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.713424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.713463Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792651686013942:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.716665Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:11.724728Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792651686013944:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:11.828357Z node 1 :TX_PROXY ERROR: Actor# [1:7486792651686013998:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13954, MsgBus: 24428 2025-03-28T09:39:13.289446Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792658600643556:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:13.289534Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007f3/r3tmp/tmpG3HgwS/pdisk_1.dat 2025-03-28T09:39:13.423561Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13954, node 2 2025-03-28T09:39:13.467995Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:13.468112Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:13.469860Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:13.485047Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:13.485068Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:13.485075Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:13.485201Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24428 TClient is connected to server localhost:24428 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:13.846939Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.854287Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:39:13.860093Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.940331Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.095063Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.163628Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.082366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792671485547230:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.082449Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.121110Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.145859Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.170064Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.193585Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.218914Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.248693Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.286084Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792671485547738:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.286166Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.286263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792671485547743:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.289455Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:16.303888Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792671485547745:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:16.378037Z node 2 :TX_PROXY ERROR: Actor# [2:7486792671485547801:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan >> KqpScripting::ExecuteYqlScriptPg [GOOD] >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> KqpYql::TableRange [GOOD] >> TSchemeShardUserAttrsTest::Boot >> KqpScripting::SystemTables [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:39:18.559932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:18.560054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.560094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:18.560134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:18.560822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:18.560867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:18.560933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.561001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:18.562032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:18.643172Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:18.643223Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:18.656901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:18.657141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:18.657314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:18.664298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:18.664459Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:18.667466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.667731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.672003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677762Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677827Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.678363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:18.678436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.678491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:18.678571Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.684636Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:39:18.814645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:18.814881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.815122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:18.815355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:18.815404Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.817210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.817358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:18.817501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.817557Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:18.817588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:18.817618Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:18.819205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.819249Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:18.819280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:18.820677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.820717Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.820767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.820811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.824380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:18.825843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:18.826019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:18.826902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.827015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.827063Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.827304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:18.827354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.827543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:18.827622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.829186Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.829229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.829359Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.829409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:18.829734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.829783Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:18.829882Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.829914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.829951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.829980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.830011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:39:18.830045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.830075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:39:18.830105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:39:18.830153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.830196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:39:18.830240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:39:18.831979Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.832072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.832103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... ationSubscriber for txId 102: satisfy waiter [1:318:2309] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2025-03-28T09:39:18.886921Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.887056Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 151us result status StatusSuccess 2025-03-28T09:39:18.887390Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-03-28T09:39:18.889800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "MyRoot" UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:18.889921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.889983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:18.890071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:18.890104Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.891705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.891812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2025-03-28T09:39:18.891961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.891991Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.892025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-03-28T09:39:18.892110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:18.893453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-03-28T09:39:18.893544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-03-28T09:39:18.893735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.893823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.893862Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-03-28T09:39:18.893993Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:39:18.894018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:39:18.894042Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:39:18.894063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:39:18.894094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.894129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-28T09:39:18.894165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.894216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:39:18.894243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:39:18.894264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:39:18.894300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:39:18.894324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-03-28T09:39:18.894355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2025-03-28T09:39:18.895701Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.895741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.895876Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.895909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2025-03-28T09:39:18.896246Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:39:18.896323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:39:18.896356Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:39:18.896381Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2025-03-28T09:39:18.896404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.896473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-28T09:39:18.897675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:39:18.897900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:39:18.897948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:39:18.898253Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:39:18.898323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:39:18.898347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:337:2328] TestWaitNotification: OK eventTxId 103 2025-03-28T09:39:18.898729Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.898859Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 143us result status StatusSuccess 2025-03-28T09:39:18.899129Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:39:18.559922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:18.560076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.560122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:18.560157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:18.560844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:18.560884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:18.560937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.561001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:18.561950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:18.641081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:18.641140Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:18.656906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:18.657146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:18.657316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:18.664094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:18.664349Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:18.667494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.667681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.672071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677644Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677704Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.678286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:18.678346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.678426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:18.678509Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.684573Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:39:18.803406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:18.803611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.803822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:18.804075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:18.804127Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.805967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.806115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:18.806277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.806330Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:18.806363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:18.806390Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:18.807996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.808046Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:18.808076Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:18.809484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.809521Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.809555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.809597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.817651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:18.819209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:18.819383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:18.820290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.820415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.820458Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.820692Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:18.820741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.820900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:18.820961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.822421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.822462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.822607Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.822657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:18.823050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.823086Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:18.823233Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.823270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.823303Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.823344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.823375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:39:18.823413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.823443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:39:18.823472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:39:18.823521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.823568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:39:18.823611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:39:18.825238Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.825335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.825368Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 3-28T09:39:18.863449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:39:18.863493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:39:18.863745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.863783Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 102:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:18.863818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2025-03-28T09:39:18.863907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:18.864439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:18.864540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:18.864576Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:18.864621Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2025-03-28T09:39:18.864652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.865200Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:18.865272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:18.865296Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:18.865329Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 2 2025-03-28T09:39:18.865355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:39:18.865404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2025-03-28T09:39:18.866966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:39:18.867066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 2025-03-28T09:39:18.867834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.867922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.867970Z node 1 :FLAT_TX_SCHEMESHARD INFO: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000002, at schemeshard: 72057594046678944 2025-03-28T09:39:18.868066Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:0 128 -> 240 2025-03-28T09:39:18.868213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:18.868270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:39:18.869593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:18.869734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:39:18.870225Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.870257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.870354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:39:18.870421Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.870460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 1 2025-03-28T09:39:18.870490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:39:18.870744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.870777Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:39:18.870852Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:39:18.870894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:39:18.870930Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:39:18.870956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:39:18.870987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:39:18.871019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:39:18.871048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:39:18.871079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:39:18.871127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:39:18.871215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:39:18.871245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2025-03-28T09:39:18.871273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2025-03-28T09:39:18.871707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:18.871769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:18.871812Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:18.871857Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2025-03-28T09:39:18.871901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.872935Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:18.873000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:18.873029Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:18.873068Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2025-03-28T09:39:18.873104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:39:18.873167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:39:18.875049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:18.876367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2025-03-28T09:39:18.878910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:18.879106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.879242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2025-03-28T09:39:18.881048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.881205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:39:18.559925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:18.560075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.560115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:18.560144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:18.560824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:18.560863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:18.560927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.561003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:18.562011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:18.646560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:18.646608Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:18.659612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:18.659842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:18.660000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:18.664603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:18.664750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:18.667490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.667689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.671915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677739Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677801Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.678332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:18.678396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.678447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:18.678527Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.684532Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:39:18.785787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:18.785965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.786140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:18.786308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:18.786346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.788004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.788106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:18.788219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.788263Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:18.788287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:18.788307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:18.789590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.789623Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:18.789645Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:18.790806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.790838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.790862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.790901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.797157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:18.798343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:18.798516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:18.799277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.799372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.799407Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.799591Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:18.799633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.799754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:18.799817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.801132Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.801166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.801281Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.801321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:18.801585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.801615Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:18.801699Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.801724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.801750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.801768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.801790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:39:18.801814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.801848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:39:18.801872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:39:18.801916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.801941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:39:18.801972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:39:18.803269Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.803345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.803370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 5 msg type: 269090816 2025-03-28T09:39:18.917126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000005 2025-03-28T09:39:18.917456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000005, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.917525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000005 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.917561Z node 1 :FLAT_TX_SCHEMESHARD INFO: TRmDir HandleReply TEvOperationPlan, opId: 105:0, step: 5000005, at schemeshard: 72057594046678944 2025-03-28T09:39:18.917632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RmDir is done, opId: 105:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.917671Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:39:18.917692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:39:18.917727Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#105:0 progress is 1/1 2025-03-28T09:39:18.917754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:39:18.917783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:39:18.917822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:39:18.917850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2025-03-28T09:39:18.917884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2025-03-28T09:39:18.917906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 105:0 2025-03-28T09:39:18.917926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 105:0 2025-03-28T09:39:18.917980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:39:18.918029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2025-03-28T09:39:18.918068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2025-03-28T09:39:18.918090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-28T09:39:18.919274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:39:18.919335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:39:18.920122Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.920150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.920247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:39:18.920337Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.920358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 1 2025-03-28T09:39:18.920381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 105, path id: 4 FAKE_COORDINATOR: Erasing txId 105 2025-03-28T09:39:18.920737Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:39:18.920801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:39:18.920822Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:39:18.920846Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2025-03-28T09:39:18.920870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2025-03-28T09:39:18.921109Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:39:18.921152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2025-03-28T09:39:18.921168Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2025-03-28T09:39:18.921184Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:39:18.921210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:39:18.921275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2025-03-28T09:39:18.921526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:39:18.921554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:39:18.921593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:39:18.923008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:39:18.923750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2025-03-28T09:39:18.923841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2025-03-28T09:39:18.924057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2025-03-28T09:39:18.924083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2025-03-28T09:39:18.924432Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2025-03-28T09:39:18.924505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2025-03-28T09:39:18.924529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:395:2386] TestWaitNotification: OK eventTxId 105 2025-03-28T09:39:18.924976Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.925100Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 123us result status StatusPathDoesNotExist 2025-03-28T09:39:18.925226Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2025-03-28T09:39:18.925723Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.925908Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 146us result status StatusSuccess 2025-03-28T09:39:18.926293Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:39:18.559919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:18.560035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.560073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:18.560107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:18.560846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:18.560890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:18.560953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.561022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:18.561993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:18.647616Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:18.647660Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:18.661461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:18.661681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:18.661819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:18.666792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:18.666951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:18.667505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.667680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.671922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677694Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677754Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.678322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:18.678388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.678436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:18.678517Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.684694Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:39:18.811364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:18.811595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.811837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:18.812032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:18.812078Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.813987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.814129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:18.814275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.814326Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:18.814355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:18.814382Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:18.815978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.816020Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:18.816047Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:18.817346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.817397Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.817444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.817492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.820805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:18.822255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:18.822439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:18.823352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.823468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.823524Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.823743Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:18.823791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.823938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:18.824030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.825587Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.825630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.825791Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.825845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:18.826188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.826239Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:18.826344Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.826379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.826417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.826446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.826476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:39:18.826509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.826541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:39:18.826571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:39:18.826624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.826657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:39:18.826723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:39:18.828454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.828550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.828581Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 8.929751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:39:18.930210Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2025-03-28T09:39:18.930283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:39:18.930311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:378:2369] 2025-03-28T09:39:18.930477Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2025-03-28T09:39:18.930555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2025-03-28T09:39:18.930577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:378:2369] 2025-03-28T09:39:18.930650Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:39:18.930678Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:39:18.930725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:39:18.930752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:378:2369] 2025-03-28T09:39:18.930813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:39:18.930828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:378:2369] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2025-03-28T09:39:18.931292Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.931454Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 164us result status StatusSuccess 2025-03-28T09:39:18.931787Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.932173Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.932286Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 134us result status StatusSuccess 2025-03-28T09:39:18.932498Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.932879Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.932983Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 110us result status StatusSuccess 2025-03-28T09:39:18.933213Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.933657Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.933789Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 98us result status StatusSuccess 2025-03-28T09:39:18.934095Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.934545Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.934651Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 119us result status StatusSuccess 2025-03-28T09:39:18.934888Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000005 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:39:18.559926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:18.560040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.560085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:18.560122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:18.560847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:18.560892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:18.560955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.561038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:18.562019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:18.651181Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:18.651236Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:18.665686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:18.665912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:18.666066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:18.671590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:18.671782Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:18.672414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.672605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.674277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677788Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677855Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.678362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:18.678436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.678494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:18.678585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.684863Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:39:18.815180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:18.815381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.815581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:18.815792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:18.815843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.817840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.817968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:18.818116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.818171Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:18.818210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:18.818243Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:18.819930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.819984Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:18.820021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:18.821558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.821603Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.821641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.821692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.831486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:18.833312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:18.833536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:18.834490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.834617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.834669Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.834953Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:18.835007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.835210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:18.835282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.837040Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.837090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.837230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.837285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:18.837654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.837701Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:18.837800Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.837850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.837892Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.837922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.837962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:39:18.837998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.838031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:39:18.838077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:39:18.838147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.838184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:39:18.838230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:39:18.840226Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.840333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.840369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... G: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:39:18.906299Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:39:18.906372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:39:18.906411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:328:2319] TestWaitNotification: OK eventTxId 102 2025-03-28T09:39:18.906923Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.907066Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 157us result status StatusSuccess 2025-03-28T09:39:18.907355Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2025-03-28T09:39:18.910091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterUserAttributes AlterUserAttributes { PathName: "DirA" UserAttributes { Key: "AttrA2" Value: "ValA2" } } ApplyIf { PathId: 2 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:18.910235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.910323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2025-03-28T09:39:18.910439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:18.910488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.912347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.912497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2025-03-28T09:39:18.912657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.912692Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.912737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2025-03-28T09:39:18.912838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:18.914395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2025-03-28T09:39:18.914525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000004 2025-03-28T09:39:18.914822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.914913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.914962Z node 1 :FLAT_TX_SCHEMESHARD INFO: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000004, at schemeshard: 72057594046678944 2025-03-28T09:39:18.915112Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:39:18.915153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:39:18.915204Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#103:0 progress is 1/1 2025-03-28T09:39:18.915250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:39:18.915313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:39:18.915369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2025-03-28T09:39:18.915432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2025-03-28T09:39:18.915468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2025-03-28T09:39:18.915501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 103:0 2025-03-28T09:39:18.915533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 103:0 2025-03-28T09:39:18.915590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2025-03-28T09:39:18.915627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2025-03-28T09:39:18.915670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-28T09:39:18.917463Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.917520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:39:18.917668Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.917709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 103, path id: 2 FAKE_COORDINATOR: Erasing txId 103 2025-03-28T09:39:18.918122Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:39:18.918289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2025-03-28T09:39:18.918326Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2025-03-28T09:39:18.918361Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:39:18.918407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2025-03-28T09:39:18.918498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2025-03-28T09:39:18.919982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2025-03-28T09:39:18.920257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2025-03-28T09:39:18.920295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2025-03-28T09:39:18.920697Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2025-03-28T09:39:18.920776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2025-03-28T09:39:18.920809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:345:2336] TestWaitNotification: OK eventTxId 103 2025-03-28T09:39:18.921320Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.921512Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 181us result status StatusSuccess 2025-03-28T09:39:18.921801Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:39:18.559911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:18.560044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.560083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:18.560121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:18.560831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:18.560877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:18.560938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:18.561009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:18.561989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:18.648421Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:18.648467Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:18.662251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:18.662474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:18.662608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:18.667732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:18.667888Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:18.668464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.668619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.671984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677714Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.677778Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.678350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:18.678419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.678473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:18.678551Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.684360Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:39:18.766748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:18.767767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.769393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:18.770472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:18.770540Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.773348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.773476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:18.773640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.773700Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:18.773733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:18.773764Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:18.775692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.775747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:18.775779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:18.777380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.777429Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.777539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.777583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.782204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:18.784191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:18.785134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:18.786203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:18.786345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.786392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.787804Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:18.787869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:18.788728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:18.788823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:18.790510Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.790554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.790736Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.790786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:18.791669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.791793Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:18.791880Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.791909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.791944Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:18.791970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.792003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:39:18.792057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:18.792091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:39:18.792121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:39:18.792171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.792207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:39:18.792249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:39:18.794030Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.794125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:18.794158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 1/1 2025-03-28T09:39:18.978340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-28T09:39:18.978400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:18.978452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:39:18.978481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:39:18.978501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2025-03-28T09:39:18.978566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2025-03-28T09:39:18.978601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 112:0 2025-03-28T09:39:18.978627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 112:0 2025-03-28T09:39:18.978670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:39:18.978738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 112, publications: 3, subscribers: 0 2025-03-28T09:39:18.978770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2025-03-28T09:39:18.978800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 3], 7 2025-03-28T09:39:18.978820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 4], 18446744073709551615 2025-03-28T09:39:18.979404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:39:18.980709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:39:18.981733Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:18.981778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:18.981895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:39:18.981946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 4] 2025-03-28T09:39:18.982039Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:18.982066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 1 2025-03-28T09:39:18.982119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 3 2025-03-28T09:39:18.982151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 112, path id: 4 FAKE_COORDINATOR: Erasing txId 112 2025-03-28T09:39:18.982776Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:39:18.982863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:39:18.982897Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:39:18.982945Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2025-03-28T09:39:18.982971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2025-03-28T09:39:18.983391Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:39:18.983432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:39:18.983454Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:39:18.983485Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 7 2025-03-28T09:39:18.983504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:39:18.984414Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:39:18.984483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2025-03-28T09:39:18.984518Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2025-03-28T09:39:18.984535Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2025-03-28T09:39:18.984555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:39:18.984615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2025-03-28T09:39:18.984815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:39:18.984856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:39:18.984908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:39:18.986121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:39:18.986407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:39:18.987714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2025-03-28T09:39:18.987829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2025-03-28T09:39:18.988132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2025-03-28T09:39:18.988168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2025-03-28T09:39:18.988723Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2025-03-28T09:39:18.988784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2025-03-28T09:39:18.988807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:495:2486] TestWaitNotification: OK eventTxId 112 2025-03-28T09:39:18.989505Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:18.989648Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 154us result status StatusSuccess 2025-03-28T09:39:18.989864Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000008 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2025-03-28T09:39:18.992947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 2 PathVersion: 8 } ApplyIf { PathId: 3 PathVersion: 7 } ApplyIf { PathId: 4 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:18.993061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2025-03-28T09:39:18.993181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:39:18.994920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:18.995022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 4], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableRange [GOOD] Test command err: Trying to start YDB, gRPC: 6131, MsgBus: 5271 2025-03-28T09:39:14.983191Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792662456167554:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:14.983305Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000607/r3tmp/tmphDd1TZ/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6131, node 1 2025-03-28T09:39:15.263286Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:15.264247Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:15.265679Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:15.291697Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:15.291728Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:15.291756Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:15.291963Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:15.347553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:15.347659Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:15.349242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5271 TClient is connected to server localhost:5271 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:15.722166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.745629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.852359Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.992069Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.063819Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.395047Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792675341071223:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.395188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.611904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.634919Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.656778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.679988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.726894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.752098Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.784209Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792675341071731:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.784275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.784320Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792675341071736:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.787241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:17.795282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792675341071738:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:17.861977Z node 1 :TX_PROXY ERROR: Actor# [1:7486792675341071792:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:27: Error: RANGE is not supported on Kikimr clusters. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 26485, MsgBus: 12949 2025-03-28T09:39:08.958171Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792636879928348:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:08.958320Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000819/r3tmp/tmpuqeS59/pdisk_1.dat 2025-03-28T09:39:09.218650Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26485, node 1 2025-03-28T09:39:09.300009Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:09.300167Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:09.300375Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:09.300395Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:09.300403Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:09.300546Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:09.301844Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12949 TClient is connected to server localhost:12949 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:09.719333Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.735675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.857258Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.977532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:10.032147Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.847620Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792649764832018:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.847709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.112760Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.135537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.158722Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.184480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.219166Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.248730Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:12.285165Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792654059799824:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.285239Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.285263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792654059799829:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:12.288246Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:12.295573Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792654059799831:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:12.376843Z node 1 :TX_PROXY ERROR: Actor# [1:7486792654059799885:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:13.382025Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.811048Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154753844, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 3379, MsgBus: 27917 2025-03-28T09:39:14.460729Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792660513067054:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:14.460821Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000819/r3tmp/tmpXnTotm/pdisk_1.dat 2025-03-28T09:39:14.548541Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3379, node 2 2025-03-28T09:39:14.591967Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:14.592051Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:14.593556Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:14.606157Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:14.606184Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:14.606192Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:14.606307Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27917 TClient is connected to server localhost:27917 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:14.986214Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.003809Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.073083Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.188718Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.243338Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.080420Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792673397970728:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.080518Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.099342Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.126174Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.152270Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.175295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.201099Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.228197Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.265772Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792673397971235:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.265871Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.265895Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792673397971240:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.268238Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:17.275749Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792673397971242:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:17.374488Z node 2 :TX_PROXY ERROR: Actor# [2:7486792673397971297:3444] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TSchemeShardUserAttrsTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 23116, MsgBus: 12734 2025-03-28T09:39:08.401460Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792635822843193:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:08.401581Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000849/r3tmp/tmpgzGVSS/pdisk_1.dat 2025-03-28T09:39:08.679782Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23116, node 1 2025-03-28T09:39:08.746941Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:08.746964Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:08.746971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:08.747090Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:08.776457Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:08.776618Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:08.778329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:12734 TClient is connected to server localhost:12734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:09.176286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.203276Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.333450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.467220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.540337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.208524Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792648707746850:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.208631Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.431503Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.454636Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.476555Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.500488Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.526746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.591405Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:11.626008Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792648707747365:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.626069Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.626118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792648707747370:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:11.629358Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:11.637752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792648707747372:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:11.722501Z node 1 :TX_PROXY ERROR: Actor# [1:7486792648707747426:3448] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:12.861481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 2695, MsgBus: 28783 2025-03-28T09:39:13.772329Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792657824604521:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:13.772414Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000849/r3tmp/tmpJ9NqQ6/pdisk_1.dat 2025-03-28T09:39:13.871736Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:13.901429Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:13.901493Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:13.902557Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 2695, node 2 2025-03-28T09:39:13.935496Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:13.935524Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:13.935531Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:13.935643Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28783 TClient is connected to server localhost:28783 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:14.322715Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.335825Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.381032Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.520328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.611406Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.437037Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792670709508181:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.437117Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.482538Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.506783Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.531295Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.555512Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.579611Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.606610Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.640428Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792670709508689:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.640509Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.640531Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792670709508694:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.643485Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:16.661421Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792670709508696:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:16.746545Z node 2 :TX_PROXY ERROR: Actor# [2:7486792670709508751:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:17.705279Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154757674, txId: 281474976715671] shutting down 2025-03-28T09:39:17.793410Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154757785, txId: 281474976715673] shutting down 2025-03-28T09:39:18.533570Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154758555, txId: 281474976715675] shutting down 2025-03-28T09:39:18.772817Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792657824604521:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:18.772878Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:39:19.617478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:19.617594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:19.617631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:19.617665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:19.617717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:19.617749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:19.617800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:19.617869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:19.618174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:19.708553Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:19.708622Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:19.725037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:19.725748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:19.725924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:19.732145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:19.732334Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:19.732926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:19.733134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:19.735109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:19.736419Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:19.736474Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:19.736636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:19.736689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:19.736747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:19.736832Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:39:19.743551Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:39:19.877575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:19.877831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:19.878075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:19.878335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:19.878385Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:19.880818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:19.880974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:19.881153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:19.881214Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:19.881249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:19.881280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:19.883331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:19.883383Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:19.883416Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:19.885049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:19.885093Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:19.885140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:19.885189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:19.893748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:19.895840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:19.896041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:19.896990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:19.897129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:19.897171Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:19.897573Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:19.897630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:19.897785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:19.897847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:19.899719Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:19.899763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:19.899935Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:19.899987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:19.900343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:19.900391Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:19.900492Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:19.900535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:19.900574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:19.900605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:19.900635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:39:19.900672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:19.900704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:39:19.900733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:39:19.900792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:19.900822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:39:19.900866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:39:19.902765Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:19.902864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:19.902901Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:39:19.902936Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:39:19.902977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:19.903086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:39:19.906373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:39:19.906893Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] >> TVectorIndexTests::CreateTableWithError |97.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TargetDiscoverer::IndexedTable |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TargetDiscoverer::Negative >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] >> TVectorIndexTests::CreateTableWithError [GOOD] >> TargetDiscoverer::SystemObjects |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TVectorIndexTests::CreateTableWithError [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:39:21.407815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:21.407939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:21.407991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:21.408029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:21.408085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:21.408120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:21.408192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:21.408283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:21.408586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:21.497493Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:21.497540Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:21.511806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:21.512051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:21.512223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:21.518142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:21.518374Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:21.519011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:21.519261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:21.521095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:21.522340Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:21.522404Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:21.522576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:21.522628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:21.522674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:21.522810Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.529050Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:39:21.622353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:21.622535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.622714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:21.622901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:21.622947Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.624981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:21.625075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:21.625193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.625228Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:21.625265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:21.625295Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:21.626822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.626899Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:21.626946Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:21.628245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.628285Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.628336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:21.628368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:21.631100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:21.632508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:21.632668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:21.633531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:21.633629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:21.633670Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:21.633861Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:21.633895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:21.634010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:21.634063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:21.635554Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:21.635601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:21.635715Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:21.635750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:21.636038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.636072Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:21.636144Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:21.636170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:21.636194Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:21.636217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:21.636241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:39:21.636275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:21.636307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:39:21.636336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:39:21.636382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:21.636407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:39:21.636426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:39:21.637754Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:21.637824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:21.637848Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2025-03-28T09:39:21.637879Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2025-03-28T09:39:21.637916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:21.638010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2025-03-28T09:39:21.640280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2025-03-28T09:39:21.640755Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2025-03-28T09:39:21.642263Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Bootstrap 2025-03-28T09:39:21.656533Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] Become StateWork (SchemeCache [1:272:2263]) 2025-03-28T09:39:21.659110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "__ydb_parent" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "__ydb_parent" Type: EIndexTypeGlobalVectorKmeansTree VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:21.659499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-03-28T09:39:21.659638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 101:0, explain: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-03-28T09:39:21.659675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: index key column shouldn't have a reserved name, at schemeshard: 72057594046678944 2025-03-28T09:39:21.660460Z node 1 :TX_PROXY DEBUG: actor# [1:267:2258] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:39:21.663181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "index key column shouldn\'t have a reserved name" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:21.663295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: index key column shouldn't have a reserved name, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors 2025-03-28T09:39:21.663654Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2025-03-28T09:39:21.666344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "vectors" Columns { Name: "id" Type: "Uint64" } Columns { Name: "embedding" Type: "String" } KeyColumnNames: "id" } IndexDescription { Name: "idx_vector" KeyColumnNames: "embedding" Type: EIndexTypeGlobalVectorKmeansTree DataColumnNames: "id" VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 1024 } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:21.666666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateTableIndex construct operation table path: /MyRoot/vectors domain path id: [OwnerId: 72057594046678944, LocalPathId: 1] domain path: /MyRoot shardsToCreate: 2 GetShardsInside: 0 MaxShards: 200000 2025-03-28T09:39:21.666825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TReject Propose, opId: 102:0, explain: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-03-28T09:39:21.666869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, at schemeshard: 72057594046678944 2025-03-28T09:39:21.668531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "the same column can\'t be used as key and data column for one index, for example id" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:21.668690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: the same column can't be used as key and data column for one index, for example id, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/vectors TestModificationResult got TxId: 102, wait until txId: 102 |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> KqpExplain::LimitOffset >> KqpExplain::SelfJoin3xSameLabels >> TargetDiscoverer::Transfer >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpScripting::SecondaryIndexes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:37:58.864319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:58.864383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:58.864408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:58.864462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:58.864502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:58.864523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:58.864581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:58.864649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:58.864864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:58.922154Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:37:58.922204Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:37:58.930550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:58.930627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:58.930751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:58.935716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:58.935853Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:58.936247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:58.936381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:58.937634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:58.938515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:58.938559Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:58.938638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:58.938679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:58.938737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:58.938839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:37:58.943493Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:37:59.050566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:59.050860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.051128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:59.051364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:59.051448Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.053477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.053577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:59.053733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.053784Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:59.053824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:59.053845Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:59.055406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.055444Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:59.055470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:59.056740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.056772Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.056804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.056858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.059695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:59.061029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:59.061163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:59.061886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:59.061986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:59.062043Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.062292Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:59.062355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:59.062514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:59.062596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:59.064546Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:59.064591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:59.064747Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:59.064789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:37:59.065015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:59.065056Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:59.065158Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:59.065194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:59.065230Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025 ... 7723Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2025-03-28T09:39:21.847831Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 329 RawX2: 416611830028 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-28T09:39:21.847873Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:39:21.847945Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 329 RawX2: 416611830028 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-28T09:39:21.847997Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, datashard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:21.848035Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged CollectSchemaChanged: false 2025-03-28T09:39:21.851667Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:39:21.851800Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:39:21.854286Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.854764Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1146 } } 2025-03-28T09:39:21.854815Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-03-28T09:39:21.854955Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1146 } } 2025-03-28T09:39:21.855063Z node 97 :FLAT_TX_SCHEMESHARD INFO: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000004 OrderId: 1003 ExecLatency: 3 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1146 } } 2025-03-28T09:39:21.856161Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 416611830117 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-28T09:39:21.856211Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2025-03-28T09:39:21.856334Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 435 RawX2: 416611830117 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-28T09:39:21.856378Z node 97 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:39:21.856465Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 435 RawX2: 416611830117 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2025-03-28T09:39:21.856528Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:21.856570Z node 97 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.856606Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:39:21.856649Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:39:21.856679Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 129 -> 240 2025-03-28T09:39:21.859370Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.859953Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.860542Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.860599Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2025-03-28T09:39:21.860654Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2025-03-28T09:39:21.860695Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2025-03-28T09:39:21.860765Z node 97 :FLAT_TX_SCHEMESHARD INFO: TCopyTable TCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2025-03-28T09:39:21.860806Z node 97 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1003:0 240 -> 240 2025-03-28T09:39:21.863806Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.863865Z node 97 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1003:0 ProgressState 2025-03-28T09:39:21.863967Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-28T09:39:21.864010Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-28T09:39:21.864049Z node 97 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 1/1 2025-03-28T09:39:21.864085Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-28T09:39:21.864124Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2025-03-28T09:39:21.864168Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2025-03-28T09:39:21.864213Z node 97 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-03-28T09:39:21.864247Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-03-28T09:39:21.864380Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:39:21.864426Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-03-28T09:39:21.866736Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-03-28T09:39:21.866787Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-03-28T09:39:21.867177Z node 97 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-03-28T09:39:21.867274Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-03-28T09:39:21.867313Z node 97 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [97:529:2490] TestWaitNotification: OK eventTxId 1003 2025-03-28T09:39:21.867791Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:21.867994Z node 97 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 240us result status StatusSuccess 2025-03-28T09:39:21.868464Z node 97 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155313.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155313.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154113.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-03-28T09:38:34.330841Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:38:34.427314Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:38:34.453303Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:38:34.453622Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:38:34.462331Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:38:34.462572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:38:34.462926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:38:34.463064Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:38:34.463188Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:38:34.463318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:38:34.463426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:38:34.463578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:38:34.463715Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:38:34.463833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:38:34.463951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:38:34.464085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:38:34.495755Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:38:34.496138Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:38:34.496202Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:38:34.496383Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:34.496543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:38:34.496638Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:38:34.496687Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:38:34.496786Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:38:34.496853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:38:34.496900Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:38:34.496934Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:38:34.497097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:34.497171Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:38:34.497218Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:38:34.497249Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:38:34.497340Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:38:34.497396Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:38:34.497440Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:38:34.497471Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:38:34.497543Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:38:34.497583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:38:34.497614Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:38:34.497667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:38:34.497711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:38:34.497756Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:38:34.498167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-28T09:38:34.498250Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=38; 2025-03-28T09:38:34.498325Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=33; 2025-03-28T09:38:34.498440Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=59; 2025-03-28T09:38:34.498641Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:38:34.498742Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:38:34.498791Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:38:34.499008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:38:34.499055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:38:34.499092Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:38:34.499234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:38:34.499273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:38:34.499314Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:38:34.499515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... e=14; 2025-03-28T09:39:22.587209Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=380; 2025-03-28T09:39:22.587261Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=29129; 2025-03-28T09:39:22.593329Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=5981; 2025-03-28T09:39:22.599738Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5303; 2025-03-28T09:39:22.599846Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=6409; 2025-03-28T09:39:22.599979Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=75; 2025-03-28T09:39:22.600062Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=44; 2025-03-28T09:39:22.600154Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=62; 2025-03-28T09:39:22.600233Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=44; 2025-03-28T09:39:22.604662Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4373; 2025-03-28T09:39:22.613948Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9184; 2025-03-28T09:39:22.614087Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=44; 2025-03-28T09:39:22.614169Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=30; 2025-03-28T09:39:22.614223Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-03-28T09:39:22.614278Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-03-28T09:39:22.614326Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-28T09:39:22.614408Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-03-28T09:39:22.614460Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-03-28T09:39:22.614566Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=61; 2025-03-28T09:39:22.614620Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2025-03-28T09:39:22.614716Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=54; 2025-03-28T09:39:22.614819Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2025-03-28T09:39:22.615066Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=201; 2025-03-28T09:39:22.615110Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=63170; 2025-03-28T09:39:22.615295Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=20801572;raw_bytes=32169208;count=11;records=320000} evicted {blob_bytes=10402524;raw_bytes=16084646;count=7;records=160000} at tablet 9437184 2025-03-28T09:39:22.615410Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:39:22.615469Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:39:22.615536Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:39:22.625250Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:39:22.625528Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:39:22.625596Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:39:22.625686Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-28T09:39:22.625761Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:39:22.625811Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:39:22.625864Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:22.625905Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:22.626009Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:39:22.626607Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:39:22.626721Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;self_id=[1:1972:3873];tablet_id=9437184;parent=[1:1934:3842];fline=manager.cpp:82;event=ask_data;request=request_id=108;1={portions_count=18};; 2025-03-28T09:39:22.627825Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:39:22.627962Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:39:22.627995Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:39:22.628021Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:39:22.628066Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:39:22.628128Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:39:22.628183Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=3; 2025-03-28T09:39:22.628247Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700006;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:39:22.628294Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:39:22.628344Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:22.628384Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:22.628473Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:39:22.629814Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=18;path_id=1; 2025-03-28T09:39:22.631522Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=3;tablet_id=9437184;self_id=[1:1934:3842];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 160000/10402332 160000/10402524 >> TargetDiscoverer::Basic >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 13942, MsgBus: 11602 2025-03-28T09:39:10.946307Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792645123713940:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:10.946404Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00074b/r3tmp/tmpUdw5Xk/pdisk_1.dat 2025-03-28T09:39:11.182323Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13942, node 1 2025-03-28T09:39:11.234200Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:11.234228Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:11.234239Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:11.234381Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:11.287894Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:11.288045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:11.290116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11602 TClient is connected to server localhost:11602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:11.664597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.692462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.839343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:11.998915Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:12.059125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.399424Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792658008617610:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.399551Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.683746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.711273Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.736221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.806310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.850746Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.915940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:13.963889Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792658008618129:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.963974Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.964422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792658008618134:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:13.968202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:13.977213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792658008618136:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:14.060214Z node 1 :TX_PROXY ERROR: Actor# [1:7486792662303585488:3450] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:14.978056Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.453070Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154755489, txId: 281474976715673] shutting down 2025-03-28T09:39:15.502569Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.549235Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715677:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.559513Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715678:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.570410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715679:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.626054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715680:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.634998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715681:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.894082Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154755937, txId: 281474976715682] shutting down 2025-03-28T09:39:15.932731Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715685:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.946406Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792645123713940:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:15.946486Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:15.965916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715686:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.222217Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154756259, txId: 281474976715687] shutting down Trying to start YDB, gRPC: 32169, MsgBus: 24923 2025-03-28T09:39:16.805337Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792671708084911:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:16.805396Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00074b/r3tmp/tmpSvTopC/pdisk_1.dat 2025-03-28T09:39:16.890638Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32169, node 2 2025-03-28T09:39:16.930933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:16.931037Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:16.932636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:16.949913Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:16.949945Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:16.949959Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:16.950080Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24923 TClient is connected to server localhost:24923 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:17.286574Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.294185Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.361913Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.476535Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.545021Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:19.298056Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792684592988577:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:19.298137Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:19.329263Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.352507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.374799Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.397456Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.425175Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.454127Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.499284Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792684592989084:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:19.499366Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:19.499367Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792684592989089:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:19.502219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:19.510230Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792684592989091:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:19.589611Z node 2 :TX_PROXY ERROR: Actor# [2:7486792684592989145:3440] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:20.307503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:39:20.375088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480 2025-03-28T09:39:20.443842Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:39:21.805751Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792671708084911:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:21.805820Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] >> TargetDiscoverer::Negative [GOOD] |97.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> TAsyncIndexTests::CreateTable >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 10997, MsgBus: 9907 2025-03-28T09:39:12.930066Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792655393474952:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:12.930132Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006ab/r3tmp/tmpkEXPLT/pdisk_1.dat TServer::EnableGrpc on GrpcPort 10997, node 1 2025-03-28T09:39:13.235158Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:13.236709Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:13.238574Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:13.269504Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:13.269531Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:13.269541Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:13.269673Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:13.302964Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:13.303094Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:13.307054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9907 TClient is connected to server localhost:9907 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:13.746396Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.772807Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.883157Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.031706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.105655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.727275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792668278378601:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.727354Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.042990Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.069846Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.094071Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.118308Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.142410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.210081Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.263356Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792672573346414:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.263447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.263481Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792672573346419:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.266082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:16.273433Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792672573346421:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:16.364855Z node 1 :TX_PROXY ERROR: Actor# [1:7486792672573346474:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:17.282970Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T09:39:17.539980Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154757582, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 31116, MsgBus: 1823 2025-03-28T09:39:18.185022Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792681332986537:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:18.185180Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006ab/r3tmp/tmpEFotck/pdisk_1.dat 2025-03-28T09:39:18.282026Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 31116, node 2 2025-03-28T09:39:18.312688Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:18.312770Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:18.314401Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:18.326653Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:18.326676Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:18.326697Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:18.326791Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1823 TClient is connected to server localhost:1823 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:18.681820Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:18.699675Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:18.768749Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:18.893734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:18.968176Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:20.812227Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792689922922898:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:20.812296Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:20.879734Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:20.904761Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:20.931096Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:20.962626Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:20.991394Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:21.019339Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:21.058626Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792694217890700:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:21.058713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:21.058782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792694217890705:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:21.061642Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:21.070205Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792694217890707:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:21.158092Z node 2 :TX_PROXY ERROR: Actor# [2:7486792694217890762:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:22.143462Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:39:22.925768Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154762958, txId: 281474976715675] shutting down |97.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> TargetDiscoverer::SystemObjects [GOOD] >> TargetDiscoverer::IndexedTable [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:36.219093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:36.219206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.219239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:36.219266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:36.220225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:36.220252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:36.220298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.220375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:36.222130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:36.313267Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:36.313331Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:36.325250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:36.325417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:36.325542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:36.330625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:36.331959Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:36.335591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.337526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.341799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351420Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351497Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.351706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:36.351763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.351816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:36.351900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.358581Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:36.470104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:36.470330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.470527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:36.470760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:36.470805Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.472849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.472978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:36.473196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.473239Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:36.473266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:36.473304Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:36.474939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.474983Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:36.475013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:36.476328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.476366Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.476405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.476448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.479000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:36.480589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:36.480718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:36.482105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.482205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:36.482241Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.483747Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:36.483817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.484021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:36.484128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.486985Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.487025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.487175Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.487208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:36.487593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.487639Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:36.487779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.487809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.487839Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.487874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.487904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:36.487937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.487963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:36.487985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:36.488035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:36.488062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:36.488086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:36.489437Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.489527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.489560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2025-03-28T09:39:22.818523Z node 15 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:22.818588Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:39:22.818637Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [15:206:2208], at schemeshard: 72057594046678944, txId: 102, path id: 3 2025-03-28T09:39:22.818888Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2025-03-28T09:39:22.818944Z node 15 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:0 ProgressState 2025-03-28T09:39:22.819160Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:39:22.819231Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:39:22.819290Z node 15 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:0 progress is 1/1 2025-03-28T09:39:22.819336Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:39:22.819412Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2025-03-28T09:39:22.819497Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2025-03-28T09:39:22.819565Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:39:22.819627Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:39:22.819849Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2025-03-28T09:39:22.819935Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2025-03-28T09:39:22.820008Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 5 2025-03-28T09:39:22.820054Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2025-03-28T09:39:22.822267Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:22.822362Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:22.822403Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:22.822488Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:39:22.822578Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2025-03-28T09:39:22.824296Z node 15 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:22.824398Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:39:22.824435Z node 15 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:39:22.824467Z node 15 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2025-03-28T09:39:22.824501Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:39:22.824588Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2025-03-28T09:39:22.831366Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2025-03-28T09:39:22.831750Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:39:22.837334Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:39:22.837417Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:39:22.838090Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:39:22.838245Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:39:22.838309Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [15:512:2465] TestWaitNotification: OK eventTxId 102 2025-03-28T09:39:22.839136Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:39:22.839519Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 446us result status StatusSuccess 2025-03-28T09:39:22.840080Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false BalancerTabletID: 72075186233409547 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:22.840846Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:39:22.841099Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 274us result status StatusSuccess 2025-03-28T09:39:22.841625Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:23.375621Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: PathId: 3 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:23.375934Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe pathId 3 took 338us result status StatusSuccess 2025-03-28T09:39:23.376478Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186233409547 } PersQueueGroup { Name: "Topic1" PathId: 3 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:23.491802Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: FindTabletSubDomainPathId for tablet 72075186233409546 |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest |97.3%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2025-03-28T09:39:21.661590Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792690862584108:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:21.661667Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00189f/r3tmp/tmp49mNA6/pdisk_1.dat 2025-03-28T09:39:21.952129Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:22.003147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:22.003253Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:22.004792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:10509 TServer::EnableGrpc on GrpcPort 12517, node 1 2025-03-28T09:39:22.162204Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:22.162250Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:22.162268Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:22.162384Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:22.407845Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:22.447367Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2025-03-28T09:39:22.447427Z node 1 :REPLICATION_CONTROLLER ERROR: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found } |97.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 14877, MsgBus: 2941 2025-03-28T09:39:12.649996Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792652101678832:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:12.650065Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006d9/r3tmp/tmptmckDv/pdisk_1.dat 2025-03-28T09:39:12.947922Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14877, node 1 2025-03-28T09:39:13.027258Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:13.027278Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:13.027284Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:13.027400Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:13.039808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:13.039897Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:13.041732Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:2941 TClient is connected to server localhost:2941 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:13.509434Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.530329Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.657255Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.785615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.872474Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.713575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792664986582485:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.713681Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.974952Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.001022Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.023775Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.048705Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.072516Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.102299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.136036Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792669281550289:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.136114Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792669281550294:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.136119Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.139368Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:16.146600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792669281550296:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:16.260784Z node 1 :TX_PROXY ERROR: Actor# [1:7486792669281550351:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:17.623743Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154757638, txId: 281474976710671] shutting down 2025-03-28T09:39:17.649964Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792652101678832:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:17.650045Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:17.884347Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154757918, txId: 281474976710673] shutting down 2025-03-28T09:39:18.424829Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154758457, txId: 281474976710677] shutting down Trying to start YDB, gRPC: 11215, MsgBus: 30313 2025-03-28T09:39:19.091755Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792682519053826:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:19.091832Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006d9/r3tmp/tmpNBSR6S/pdisk_1.dat 2025-03-28T09:39:19.177020Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11215, node 2 2025-03-28T09:39:19.219389Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:19.219470Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:19.220835Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:19.233500Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:19.233524Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:19.233533Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:19.233661Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30313 TClient is connected to server localhost:30313 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:19.556982Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:19.564755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:19.634343Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:19.781577Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:19.842988Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.775191Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792691108990186:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:21.775272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:21.821458Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:21.847986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:21.874464Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:21.900583Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:21.926080Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:21.955948Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:21.987271Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792691108990694:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:21.987359Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792691108990699:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:21.987360Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:21.989684Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:21.996784Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792691108990701:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:22.070521Z node 2 :TX_PROXY ERROR: Actor# [2:7486792695403958051:3441] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:23.156511Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154763189, txId: 281474976715671] shutting down 2025-03-28T09:39:23.440310Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154763476, txId: 281474976715673] shutting down >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> TargetDiscoverer::Transfer [GOOD] >> KqpScripting::Pure [GOOD] >> KqpYql::CreateUseTable [GOOD] >> KqpStats::MultiTxStatsFullYql >> TAsyncIndexTests::CreateTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2025-03-28T09:39:21.995396Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792694487923155:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:21.995490Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00188f/r3tmp/tmpQendT0/pdisk_1.dat 2025-03-28T09:39:22.280262Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:22.340170Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:22.340329Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:22.342044Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6043 TServer::EnableGrpc on GrpcPort 19902, node 1 2025-03-28T09:39:22.503292Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:22.503316Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:22.503322Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:22.503451Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6043 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:22.807699Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:22.835599Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:39:22.945114Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.005574Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743154762874, tx_id: 1 } } } 2025-03-28T09:39:23.005606Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-03-28T09:39:23.012413Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743154762944, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743154762986, tx_id: 281474976710659 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-03-28T09:39:23.012465Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-03-28T09:39:24.463502Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743154762944, tx_id: 281474976710658 } } } 2025-03-28T09:39:24.463537Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-03-28T09:39:24.463575Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2025-03-28T09:39:21.356517Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792694016981823:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:21.356629Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0018a7/r3tmp/tmpqqXWgP/pdisk_1.dat 2025-03-28T09:39:21.684109Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:21.732134Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:21.732243Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:21.733792Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21811 TServer::EnableGrpc on GrpcPort 2944, node 1 2025-03-28T09:39:21.898807Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:21.898838Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:21.898845Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:21.898971Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21811 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:22.176567Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:22.197471Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:22.510592Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743154762237, tx_id: 1 } } } 2025-03-28T09:39:22.510623Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-03-28T09:39:22.518259Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743154762475, tx_id: 281474976710658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-03-28T09:39:22.518314Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-03-28T09:39:24.444862Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743154762475, tx_id: 281474976710658 } } } 2025-03-28T09:39:24.444887Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-03-28T09:39:24.444912Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2025-03-28T09:39:24.444976Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable >> KqpScripting::JoinIndexLookup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:39:24.731758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:24.731867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:24.731918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:24.731956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:24.731998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:24.732039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:24.732099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:24.732214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:24.732530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:24.820979Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:24.821037Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:24.834848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:24.835082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:24.835249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:24.840375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:24.840560Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:24.841198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:24.841388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:24.843083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:24.844232Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:24.844290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:24.844430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:24.844473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:24.844511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:24.844583Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:39:24.850615Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:39:24.968772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:24.968986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:24.969164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:24.969443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:24.969522Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:24.971577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:24.971724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:24.971884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:24.971933Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:24.971967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:24.972000Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:24.973693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:24.973748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:24.973811Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:24.975388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:24.975442Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:24.975490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:24.975537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:24.984661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:24.986458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:24.986629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:24.987712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:24.987848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:24.987906Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:24.988155Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:24.988206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:24.988357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:24.988435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:24.990424Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:24.990491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:24.990672Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:24.990740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:24.991153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:24.991214Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:24.991316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:24.991355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:24.991393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:24.991438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:24.991474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:39:24.991519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:24.991570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:39:24.991618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:39:24.991688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:24.991727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:39:24.991759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:39:24.993718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:24.993831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:39:24.993870Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... e.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:25.257343Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:39:25.257392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2025-03-28T09:39:25.257432Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:2 129 -> 240 2025-03-28T09:39:25.258129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:39:25.258166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2025-03-28T09:39:25.258262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:39:25.258322Z node 1 :FLAT_TX_SCHEMESHARD INFO: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 2025-03-28T09:39:25.258391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 322 RawX2: 4294969601 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2025-03-28T09:39:25.258433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, datashard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:25.258460Z node 1 :FLAT_TX_SCHEMESHARD INFO: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:39:25.258488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2025-03-28T09:39:25.258531Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 101:0 129 -> 240 2025-03-28T09:39:25.265095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:39:25.265508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:39:25.271381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:39:25.271726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:39:25.272051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:39:25.272289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2025-03-28T09:39:25.272450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:39:25.272848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2025-03-28T09:39:25.272901Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:2 ProgressState 2025-03-28T09:39:25.273021Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-28T09:39:25.273058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-28T09:39:25.273092Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:2 progress is 2/3 2025-03-28T09:39:25.273128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2025-03-28T09:39:25.273168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2025-03-28T09:39:25.273576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:39:25.273984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2025-03-28T09:39:25.274025Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 101:0 ProgressState 2025-03-28T09:39:25.274084Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-28T09:39:25.274108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:39:25.274152Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#101:0 progress is 3/3 2025-03-28T09:39:25.274180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:39:25.274221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2025-03-28T09:39:25.274303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:375:2343] message: TxId: 101 2025-03-28T09:39:25.274374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2025-03-28T09:39:25.274418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:0 2025-03-28T09:39:25.274451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:0 2025-03-28T09:39:25.274584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2025-03-28T09:39:25.274637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:1 2025-03-28T09:39:25.274667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:1 2025-03-28T09:39:25.274720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:39:25.274760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 101:2 2025-03-28T09:39:25.274781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 101:2 2025-03-28T09:39:25.274829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2025-03-28T09:39:25.277677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2025-03-28T09:39:25.277731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:376:2344] TestWaitNotification: OK eventTxId 101 2025-03-28T09:39:25.278294Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:39:25.278543Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex" took 277us result status StatusSuccess 2025-03-28T09:39:25.279396Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex" PathDescription { Self { Name: "UserDefinedIndex" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "UserDefinedIndex" LocalPathId: 3 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2025-03-28T09:39:22.682794Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792698629721727:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:22.682887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00188a/r3tmp/tmpXV5D8c/pdisk_1.dat 2025-03-28T09:39:22.983702Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:23.047021Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:23.047172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:23.048771Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:7276 TServer::EnableGrpc on GrpcPort 18568, node 1 2025-03-28T09:39:23.191378Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:23.191414Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:23.191426Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:23.191631Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7276 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:23.470254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:39:23.627658Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1743154763602, tx_id: 281474976710658 } } } 2025-03-28T09:39:23.627693Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2025-03-28T09:39:23.659755Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2025-03-28T09:39:23.659806Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2025-03-28T09:39:23.659836Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> LdapAuthProviderTest::LdapServerIsUnavailable >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] >> TColumnShardTestSchema::ColdTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 29781, MsgBus: 11509 2025-03-28T09:39:15.432955Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792666149130124:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:15.433068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000604/r3tmp/tmpRWeqV3/pdisk_1.dat 2025-03-28T09:39:15.695696Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29781, node 1 2025-03-28T09:39:15.765250Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:15.765282Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:15.765319Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:15.765501Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:15.786377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:15.786472Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:15.788440Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11509 TClient is connected to server localhost:11509 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:16.173536Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.198427Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.321597Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.483192Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.556383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.768116Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792674739066509:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:17.768215Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:18.006983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.032808Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.057347Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.081075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.103575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.168351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.242587Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792679034034323:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:18.242646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:18.242666Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792679034034329:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:18.246221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:18.254793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792679034034331:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:18.353449Z node 1 :TX_PROXY ERROR: Actor# [1:7486792679034034386:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:20.206257Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154760242, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 7224, MsgBus: 26410 2025-03-28T09:39:20.909491Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792686502173168:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:20.909565Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000604/r3tmp/tmpw0cf2m/pdisk_1.dat 2025-03-28T09:39:21.007957Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7224, node 2 2025-03-28T09:39:21.040432Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:21.040514Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:21.042185Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:21.073064Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:21.073096Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:21.073104Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:21.073236Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26410 TClient is connected to server localhost:26410 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:21.464141Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.473917Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.549478Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.667632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.741157Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.628586Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792699387076820:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.628670Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.668360Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.695958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.723958Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.751402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.775206Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.802638Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.838862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792699387077331:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.838905Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792699387077336:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.838965Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.842382Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:23.855675Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792699387077338:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:23.914051Z node 2 :TX_PROXY ERROR: Actor# [2:7486792699387077391:3438] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 14405, MsgBus: 17337 2025-03-28T09:39:16.415118Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792671121641310:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:16.415211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005bb/r3tmp/tmp8vivYD/pdisk_1.dat 2025-03-28T09:39:16.673495Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14405, node 1 2025-03-28T09:39:16.747207Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:16.747229Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:16.747243Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:16.747355Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:16.757092Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:16.757191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:16.758932Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17337 TClient is connected to server localhost:17337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:17.192777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.206679Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.287640Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.390794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:17.461517Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:18.832814Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792679711577687:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:18.832909Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:19.065906Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.092384Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.120367Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.147210Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.190426Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.216487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:19.253261Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792684006545493:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:19.253329Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:19.253408Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792684006545498:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:19.256252Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:19.265585Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792684006545500:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:19.357330Z node 1 :TX_PROXY ERROR: Actor# [1:7486792684006545555:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 15098, MsgBus: 32177 2025-03-28T09:39:20.897082Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792688483476146:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:20.897171Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005bb/r3tmp/tmpdqdTjk/pdisk_1.dat 2025-03-28T09:39:20.976369Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 15098, node 2 2025-03-28T09:39:21.022548Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:21.022651Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:21.043933Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:21.070635Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:21.070659Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:21.070666Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:21.070790Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:32177 TClient is connected to server localhost:32177 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:21.444211Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.456001Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.527543Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.663647Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.729669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.677118Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792701368379813:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.677195Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.724224Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.749336Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.773009Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.799650Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.825367Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.859402Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.934849Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792701368380326:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.934926Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.934928Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792701368380331:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.937646Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:23.944925Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792701368380333:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:24.037725Z node 2 :TX_PROXY ERROR: Actor# [2:7486792705663347685:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:24.815558Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.021562Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154765058, txId: 281474976715673] shutting down >> TargetDiscoverer::Basic [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 32090, MsgBus: 1285 2025-03-28T09:39:15.548601Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792667045024192:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:15.548718Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005f1/r3tmp/tmpIBal0N/pdisk_1.dat 2025-03-28T09:39:15.823567Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 32090, node 1 2025-03-28T09:39:15.883981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:15.884206Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:15.885647Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:15.908915Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:15.908943Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:15.908952Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:15.909103Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1285 TClient is connected to server localhost:1285 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:16.366278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.383054Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.496351Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.653498Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:16.716450Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:18.037065Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792679929927801:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:18.037184Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:18.367813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.391875Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.419523Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.446767Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.474734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.513977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:18.562251Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792679929928314:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:18.562318Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:18.562489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792679929928319:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:18.566268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:18.575118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792679929928321:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:18.668063Z node 1 :TX_PROXY ERROR: Actor# [1:7486792679929928377:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:20.130707Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154760109, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 12380, MsgBus: 16964 2025-03-28T09:39:20.908865Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792690255355669:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:20.908954Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005f1/r3tmp/tmpCxgvkd/pdisk_1.dat 2025-03-28T09:39:21.011929Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12380, node 2 2025-03-28T09:39:21.032672Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:21.032776Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:21.034465Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:21.061715Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:21.061739Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:21.061748Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:21.061853Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16964 TClient is connected to server localhost:16964 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:21.473102Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.490495Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.537284Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.706393Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:21.767511Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.500165Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792703140259310:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.500273Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.546209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.572128Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.596283Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.621654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.651209Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.681518Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:23.754862Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792703140259825:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.754921Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.755080Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792703140259830:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:23.758601Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:23.768161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792703140259832:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:23.832292Z node 2 :TX_PROXY ERROR: Actor# [2:7486792703140259886:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155307.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155307.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155307.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155307.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155307.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155307.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154107.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123155307.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123155307.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154107.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154107.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123154107.000000s;Name=;Codec=}; 2025-03-28T09:38:28.047210Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:38:28.120321Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:38:28.134675Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:38:28.134939Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:38:28.141365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:38:28.141552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:38:28.141777Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:38:28.141865Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:38:28.141938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:38:28.142003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:38:28.142070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:38:28.142156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:38:28.142242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:38:28.142320Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:38:28.142397Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:38:28.142467Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:38:28.169522Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:38:28.169831Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:38:28.169913Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:38:28.170104Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:28.170237Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:38:28.170303Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:38:28.170337Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:38:28.170404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:38:28.170447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:38:28.170477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:38:28.170496Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:38:28.170608Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:28.170650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:38:28.170679Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:38:28.170715Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:38:28.170791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:38:28.170831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:38:28.170860Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:38:28.170885Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:38:28.171000Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:38:28.171054Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:38:28.171087Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:38:28.171139Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:38:28.171180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:38:28.171214Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:38:28.171651Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-03-28T09:38:28.171737Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-03-28T09:38:28.171824Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=39; 2025-03-28T09:38:28.171910Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-03-28T09:38:28.172115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:38:28.172199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:38:28.172241Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:38:28.172444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:38:28.172494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:38:28.172533Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:38:28.172685Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:38:28.172733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:4 ... EBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=4; 2025-03-28T09:39:26.159008Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700007;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:39:26.159067Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:39:26.159129Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:26.159221Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:26.159337Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:39:26.159570Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000007:max} readable: {1000000007:max} at tablet 9437184 2025-03-28T09:39:26.159706Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:39:26.159878Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:39:26.159947Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:39:26.160433Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:39:26.160525Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:39:26.161035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000007:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1392:3397];trace_detailed=; 2025-03-28T09:39:26.161468Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:39:26.161718Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:39:26.161911Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:26.162055Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:26.162377Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:39:26.162498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:26.162634Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:26.162704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1392:3397] finished for tablet 9437184 2025-03-28T09:39:26.163220Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1391:3396];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154766160964,"name":"_full_task","f":1743154766160964,"d_finished":0,"c":0,"l":1743154766162793,"d":1829},"events":[{"name":"bootstrap","f":1743154766161155,"d_finished":933,"c":1,"l":1743154766162088,"d":933},{"a":1743154766162352,"name":"ack","f":1743154766162352,"d_finished":0,"c":0,"l":1743154766162793,"d":441},{"a":1743154766162329,"name":"processing","f":1743154766162329,"d_finished":0,"c":0,"l":1743154766162793,"d":464},{"name":"ProduceResults","f":1743154766161821,"d_finished":521,"c":2,"l":1743154766162666,"d":521},{"a":1743154766162669,"name":"Finish","f":1743154766162669,"d_finished":0,"c":0,"l":1743154766162793,"d":124}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:26.163303Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1391:3396];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:39:26.163724Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1391:3396];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":1743154766160964,"name":"_full_task","f":1743154766160964,"d_finished":0,"c":0,"l":1743154766163352,"d":2388},"events":[{"name":"bootstrap","f":1743154766161155,"d_finished":933,"c":1,"l":1743154766162088,"d":933},{"a":1743154766162352,"name":"ack","f":1743154766162352,"d_finished":0,"c":0,"l":1743154766163352,"d":1000},{"a":1743154766162329,"name":"processing","f":1743154766162329,"d_finished":0,"c":0,"l":1743154766163352,"d":1023},{"name":"ProduceResults","f":1743154766161821,"d_finished":521,"c":2,"l":1743154766162666,"d":521},{"a":1743154766162669,"name":"Finish","f":1743154766162669,"d_finished":0,"c":0,"l":1743154766163352,"d":683}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1392:3397]->[1:1391:3396] 2025-03-28T09:39:26.163825Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:39:26.160494Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:39:26.163873Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:39:26.163997Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1392:3397];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 160000/10402136 160000/10402136 160000/10402136 80000/5203584 0/0 >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::FullOuterJoin >> TPQTest::TestPQReadAhead [GOOD] >> TPQTest::TestPQCacheSizeManagement >> KqpStats::RequestUnitForSuccessExplicitPrepare ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2025-03-28T09:39:23.993651Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792702962509246:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:23.993775Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00187c/r3tmp/tmpOIbjQv/pdisk_1.dat 2025-03-28T09:39:24.234973Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:24.343203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:24.343326Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:24.345429Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8530 TServer::EnableGrpc on GrpcPort 3523, node 1 2025-03-28T09:39:24.424061Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:24.424091Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:24.424101Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:24.424228Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8530 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:24.691613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:24.705548Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:24.843325Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1743154764743, tx_id: 1 } } } 2025-03-28T09:39:24.843351Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2025-03-28T09:39:24.850352Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743154764813, tx_id: 281474976715658 } }, { name: .sys, owner: , type: Directory, size_bytes: 0, created_at: { plan_step: 0, tx_id: 0 } }] } } 2025-03-28T09:39:24.850395Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2025-03-28T09:39:26.305675Z node 1 :REPLICATION_CONTROLLER TRACE: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1743154764813, tx_id: 281474976715658 } } } 2025-03-28T09:39:26.305710Z node 1 :REPLICATION_CONTROLLER DEBUG: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2025-03-28T09:39:26.305741Z node 1 :REPLICATION_CONTROLLER INFO: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::PureExpr >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] |97.3%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155308.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155308.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155308.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155308.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155308.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155308.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154108.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123155308.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123155308.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154108.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154108.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123154108.000000s;Name=;Codec=}; 2025-03-28T09:38:28.604270Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:38:28.669581Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:38:28.693450Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:38:28.693741Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:38:28.701471Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:38:28.701682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:38:28.701903Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:38:28.702016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:38:28.702130Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:38:28.702240Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:38:28.702346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:38:28.702475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:38:28.702594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:38:28.702744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:38:28.702876Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:38:28.702980Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:38:28.731210Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:38:28.731550Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:38:28.731608Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:38:28.731774Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:28.731912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:38:28.731983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:38:28.732022Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:38:28.732127Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:38:28.732185Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:38:28.732226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:38:28.732255Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:38:28.732404Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:28.732469Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:38:28.732509Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:38:28.732537Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:38:28.732618Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:38:28.732665Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:38:28.732704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:38:28.732736Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:38:28.732806Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:38:28.732843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:38:28.732871Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:38:28.732914Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:38:28.732950Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:38:28.732985Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:38:28.733354Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=45; 2025-03-28T09:38:28.733430Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-03-28T09:38:28.733506Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=32; 2025-03-28T09:38:28.733583Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2025-03-28T09:38:28.733728Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:38:28.733782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:38:28.733813Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:38:28.734012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:38:28.734061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:38:28.734094Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:39:27.750392Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:39:27.750431Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:39:27.750474Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:27.750504Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:27.750594Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:39:27.750802Z node 1 :TX_COLUMNSHARD DEBUG: EvScan txId: 18446744073709551615 scanId: 0 version: {1000000009:max} readable: {1000000009:max} at tablet 9437184 2025-03-28T09:39:27.750897Z node 1 :TX_COLUMNSHARD DEBUG: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2025-03-28T09:39:27.751012Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:33;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:39:27.751054Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:102;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2025-03-28T09:39:27.751406Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=program.cpp:51;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1}]}],"nodes":{"1":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2025-03-28T09:39:27.751467Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=read_metadata.h:136;filter_limit_not_detected= range{ from {+Inf} to {-Inf}}; 2025-03-28T09:39:27.751807Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1000000009:max};tablet=9437184;timeout=0.000000s;fline=tx_scan.cpp:162;event=TTxScan started;actor_id=[1:1984:3989];trace_detailed=; 2025-03-28T09:39:27.752092Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:84;ff_first=(column_ids=1;column_names=timestamp;);; 2025-03-28T09:39:27.752250Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;fline=context.cpp:99;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2025-03-28T09:39:27.752364Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:27.752461Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:27.752749Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:39:27.752836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:27.752924Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:27.752955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1984:3989] finished for tablet 9437184 2025-03-28T09:39:27.753263Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1983:3988];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154767751762,"name":"_full_task","f":1743154767751762,"d_finished":0,"c":0,"l":1743154767752999,"d":1237},"events":[{"name":"bootstrap","f":1743154767751901,"d_finished":582,"c":1,"l":1743154767752483,"d":582},{"a":1743154767752723,"name":"ack","f":1743154767752723,"d_finished":0,"c":0,"l":1743154767752999,"d":276},{"a":1743154767752704,"name":"processing","f":1743154767752704,"d_finished":0,"c":0,"l":1743154767752999,"d":295},{"name":"ProduceResults","f":1743154767752311,"d_finished":335,"c":2,"l":1743154767752943,"d":335},{"a":1743154767752945,"name":"Finish","f":1743154767752945,"d_finished":0,"c":0,"l":1743154767752999,"d":54}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:27.753317Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1983:3988];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:39:27.753594Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1983:3988];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":1743154767751762,"name":"_full_task","f":1743154767751762,"d_finished":0,"c":0,"l":1743154767753349,"d":1587},"events":[{"name":"bootstrap","f":1743154767751901,"d_finished":582,"c":1,"l":1743154767752483,"d":582},{"a":1743154767752723,"name":"ack","f":1743154767752723,"d_finished":0,"c":0,"l":1743154767753349,"d":626},{"a":1743154767752704,"name":"processing","f":1743154767752704,"d_finished":0,"c":0,"l":1743154767753349,"d":645},{"name":"ProduceResults","f":1743154767752311,"d_finished":335,"c":2,"l":1743154767752943,"d":335},{"a":1743154767752945,"name":"Finish","f":1743154767752945,"d_finished":0,"c":0,"l":1743154767753349,"d":404}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1984:3989]->[1:1983:3988] 2025-03-28T09:39:27.753655Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:39:27.751446Z;index_granules=0;index_portions=0;index_batches=0;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2025-03-28T09:39:27.753690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:39:27.753777Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=4;SelfId=[1:1984:3989];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 11633, MsgBus: 61452 2025-03-28T09:39:13.026128Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792656630705091:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:13.026250Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000658/r3tmp/tmpPzhBxf/pdisk_1.dat 2025-03-28T09:39:13.278811Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11633, node 1 2025-03-28T09:39:13.357528Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:13.357544Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:13.357549Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:13.357675Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:13.373794Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:13.373908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:13.375757Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61452 TClient is connected to server localhost:61452 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:13.804563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.839040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:13.981971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.110029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:14.182203Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:15.663927Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792665220641470:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.664042Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:15.972326Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:15.999894Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.030138Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.056125Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.080795Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.107289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:16.139600Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792669515609275:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.139677Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.139786Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792669515609280:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:16.142902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:16.152606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792669515609282:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:16.218292Z node 1 :TX_PROXY ERROR: Actor# [1:7486792669515609335:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:16.923622Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792669515609583:2486] 2025-03-28T09:39:16.973851Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792669515609590:2489] 2025-03-28T09:39:16.976355Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792669515609598:2493] 2025-03-28T09:39:16.976462Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792669515609600:2494] 2025-03-28T09:39:16.992632Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792669515609601:2495] 2025-03-28T09:39:17.000740Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792669515609627:2503] 2025-03-28T09:39:17.025555Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810576948:2511] 2025-03-28T09:39:17.052984Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810576961:2515] 2025-03-28T09:39:17.083157Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810576982:2521] 2025-03-28T09:39:17.115488Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577006:2528] 2025-03-28T09:39:17.151623Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577021:2534] 2025-03-28T09:39:17.194586Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577040:2540] 2025-03-28T09:39:17.233628Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577057:2546] 2025-03-28T09:39:17.278792Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577084:2552] 2025-03-28T09:39:17.327506Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577104:2560] 2025-03-28T09:39:17.378822Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577124:2566] 2025-03-28T09:39:17.432609Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577148:2575] 2025-03-28T09:39:17.489800Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577176:2581] 2025-03-28T09:39:17.572516Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577202:2590] 2025-03-28T09:39:17.614818Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577222:2594] 2025-03-28T09:39:17.678604Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577254:2605] 2025-03-28T09:39:17.757316Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577275:2612] 2025-03-28T09:39:17.819542Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577305:2621] 2025-03-28T09:39:17.895073Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577338:2630] 2025-03-28T09:39:17.974085Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577355:2636] 2025-03-28T09:39:18.026296Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792656630705091:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:18.026370Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:18.054397Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792673810577389:2645] 2025-03-28T09:39:18.138968Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792678105544701:2651] 2025-03-28T09:39:18.225674Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154758247, txId: 281474976710671] shutting down 2025-03-28T09:39:18.226981Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792678105544725:2661] 2025-03-28T09:39:18.317775Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792678105544830:2676] 2025-03-28T09:39:18.412673Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792678105544891:2690] TxId: 281474976710674. Ctx: { TraceId: 01jqe1xgnh3aggav38td9vrvw1, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWE4MDYyNGItNzkwZTk0ODUtYmUxZDQ2NzEtMWM4ZDIyZjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:39:18.412879Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWE4MDYyNGItNzkwZTk0ODUtYmUxZDQ2NzEtMWM4ZDIyZjY=, ActorId: [1:7486792678105544864:2690], ActorState: ExecuteState, TraceId: 01jqe1xgnh3aggav38td9vrvw1, Create QueryResponse for error on request, msg: 2025-03-28T09:39:18.413220Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snaps ... node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792682400512704:2830] 2025-03-28T09:39:20.163873Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792686695480025:2840] 2025-03-28T09:39:20.304613Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792686695480049:2850] 2025-03-28T09:39:20.448949Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792686695480071:2859] 2025-03-28T09:39:20.596761Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792686695480115:2868] 2025-03-28T09:39:20.746847Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792686695480153:2877] 2025-03-28T09:39:20.900784Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792686695480215:2887] 2025-03-28T09:39:20.900938Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486792686695480259:2892] TxId: 281474976710681. Ctx: { TraceId: 01jqe1xk1ee7ypmpwv43tyyecc, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmU0NzlkOTktM2M4YWUyNmYtZDMzNDdkMjktNmMwOTAyOQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ABORTED: {
: Error: Client lost } 2025-03-28T09:39:20.901104Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmU0NzlkOTktM2M4YWUyNmYtZDMzNDdkMjktNmMwOTAyOQ==, ActorId: [1:7486792686695480226:2892], ActorState: ExecuteState, TraceId: 01jqe1xk1ee7ypmpwv43tyyecc, Create QueryResponse for error on request, msg: 2025-03-28T09:39:20.901391Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154760942, txId: 281474976710680] shutting down 2025-03-28T09:39:21.058481Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792686695480282:2896] 2025-03-28T09:39:21.200245Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154761236, txId: 281474976710683] shutting down 2025-03-28T09:39:21.218012Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792690990447605:2906] 2025-03-28T09:39:21.379922Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154761418, txId: 281474976710685] shutting down 2025-03-28T09:39:21.380923Z node 1 :RPC_REQUEST WARN: Client lost, ActorId: [1:7486792690990447721:2925] 2025-03-28T09:39:21.511562Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154761544, txId: 281474976710687] shutting down 2025-03-28T09:39:21.679197Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154761712, txId: 281474976710689] shutting down Trying to start YDB, gRPC: 6607, MsgBus: 26602 2025-03-28T09:39:22.355105Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792694743494881:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:22.355170Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000658/r3tmp/tmpsth1L0/pdisk_1.dat 2025-03-28T09:39:22.455808Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:22.490978Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:22.491063Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 6607, node 2 2025-03-28T09:39:22.492520Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:22.545325Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:22.545358Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:22.545367Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:22.545560Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26602 TClient is connected to server localhost:26602 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:22.901704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:22.922827Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:22.974049Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.129480Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.195177Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:24.958858Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792703333431249:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:24.958936Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:24.988073Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.011026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.037061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.060637Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.087310Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.153315Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.235263Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792707628399063:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.235319Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.235370Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792707628399068:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.238072Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:25.245415Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792707628399070:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:25.309770Z node 2 :TX_PROXY ERROR: Actor# [2:7486792707628399122:3446] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:26.151563Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWQ0N2IxOTMtZTg1YmYxMi1jY2UzYWE5OC04ZmY5NzBkNg==, ActorId: [2:7486792711923366716:2504], ActorState: ExecuteState, TraceId: 01jqe1xr903yqege0hnh9n8m1b, Create QueryResponse for error on request, msg: 2025-03-28T09:39:26.261036Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZmNmN2NhNDItYTdmYWIwNTItNTY5ZmYwNDktNjVmYjEzMjY=, ActorId: [2:7486792711923366760:2522], ActorState: ExecuteState, TraceId: 01jqe1xrbs6ffvttx7ssjp0x37, Create QueryResponse for error on request, msg: 2025-03-28T09:39:26.316724Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=ZGZhNTQxOTItM2M0ZjJlNGYtMjRlOGRmZmUtYmRhMjQyNDg=, ActorId: [2:7486792711923366803:2534], ActorState: ExecuteState, TraceId: 01jqe1xrdz7nrbm8sk8bbmjhtt, Create QueryResponse for error on request, msg: 2025-03-28T09:39:27.228417Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154767263, txId: 281474976715671] shutting down 2025-03-28T09:39:27.375232Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792694743494881:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:27.375524Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:27.495697Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154767529, txId: 281474976715673] shutting down 2025-03-28T09:39:27.780106Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154767816, txId: 281474976715675] shutting down >> Splitter::Simple >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> Splitter::Simple [GOOD] >> Splitter::Small [GOOD] >> Splitter::Minimal [GOOD] >> Splitter::Trivial [GOOD] >> Splitter::BigAndSmall >> Splitter::BigAndSmall [GOOD] >> Splitter::CritSmallPortions >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRangesFullScan >> KqpExplain::FullOuterJoin [GOOD] >> KqpExplain::MergeConnection >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::StatsProfile >> TSentinelUnstableTests::BSControllerCantChangeStatus >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries >> MetadataConversion::MakeAuthTest [GOOD] >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery >> Splitter::CritSmallPortions [GOOD] >> Splitter::Crit |97.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/gateway/ut/gtest >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |97.4%| [TS] {RESULT} ydb/core/kqp/gateway/ut/gtest >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TxKeys::ComparePointKeys >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery >> TMemoryController::Counters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestPQCacheSizeManagement [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-28T09:37:43.975664Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:43.975773Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-28T09:37:44.001284Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.030240Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "aaa" Generation: 1 Important: true } 2025-03-28T09:37:44.031249Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-28T09:37:44.033815Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-28T09:37:44.036919Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:37:44.039085Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:37:44.050130Z node 1 :PERSQUEUE INFO: new Cookie default|4598df30-f5743f3f-1ee74f8f-353c992a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-28T09:37:44.513906Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.513994Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:107:2139]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:179:2057] recipient: [2:99:2134] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:182:2057] recipient: [2:181:2193] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:183:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:185:2057] recipient: [2:181:2193] 2025-03-28T09:37:44.561691Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.561768Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:107:2139]) rebooted! !Reboot 72057594037927937 (actor [2:107:2139]) tablet resolver refreshed! new actor is[2:184:2194] Leader for TabletID 72057594037927937 is [2:184:2194] sender: [2:258:2057] recipient: [2:14:2061] 2025-03-28T09:37:46.176584Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:46.177558Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "aaa" Generation: 2 Important: true } 2025-03-28T09:37:46.178571Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:264:2256] 2025-03-28T09:37:46.181139Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:264:2256] 2025-03-28T09:37:46.184304Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:265:2257] 2025-03-28T09:37:46.186208Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:265:2257] 2025-03-28T09:37:46.199856Z node 2 :PERSQUEUE INFO: new Cookie default|c8d71581-6d835a95-525ee71f-7e9bd528_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [2:175:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-28T09:37:46.543217Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:46.543290Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:107:2139]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:181:2057] recipient: [3:99:2134] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:184:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:185:2057] recipient: [3:183:2195] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:187:2057] recipient: [3:183:2195] 2025-03-28T09:37:46.588441Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:46.588511Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [3:107:2139]) rebooted! !Reboot 72057594037927937 (actor [3:107:2139]) tablet resolver refreshed! new actor is[3:186:2196] Leader for TabletID 72057594037927937 is [3:186:2196] sender: [3:260:2057] recipient: [3:14:2061] 2025-03-28T09:37:48.163375Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:48.163989Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "aaa" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Consumers { Name: "aaa" Generation: 3 Important: true } 2025-03-28T09:37:48.164636Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:266:2258] 2025-03-28T09:37:48.166332Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [3:266:2258] 2025-03-28T09:37:48.168806Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:267:2259] 2025-03-28T09:37:48.170129Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [3:267:2259] 2025-03-28T09:37:48.180740Z node 3 :PERSQUEUE INFO: new Cookie default|2f4f6963-292f625c-6bd8c34b-eac58833_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [3:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-03-28T09:37:48.541567Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:48.541635Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is ... node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:658:2551] 2025-03-28T09:39:32.362899Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:659:2552] 2025-03-28T09:39:32.373408Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.373486Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 7 [79:659:2552] 2025-03-28T09:39:32.391362Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.391471Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 7 [79:658:2551] 2025-03-28T09:39:32.425848Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:175:2190] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:689:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:692:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:694:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:601:2502] sender: [79:696:2057] recipient: [79:695:2572] Leader for TabletID 72057594037927937 is [79:697:2573] sender: [79:698:2057] recipient: [79:695:2572] 2025-03-28T09:39:32.481488Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:32.481551Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:39:32.482120Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:756:2624] 2025-03-28T09:39:32.484162Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:757:2625] 2025-03-28T09:39:32.492122Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.492188Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 8 [79:757:2625] 2025-03-28T09:39:32.509261Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.509372Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 8 [79:756:2624] 2025-03-28T09:39:32.544118Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:175:2190] Leader for TabletID 72057594037927937 is [79:697:2573] sender: [79:787:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:697:2573] sender: [79:790:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:697:2573] sender: [79:793:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:697:2573] sender: [79:794:2057] recipient: [79:792:2645] Leader for TabletID 72057594037927937 is [79:795:2646] sender: [79:796:2057] recipient: [79:792:2645] 2025-03-28T09:39:32.602346Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:32.602414Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:39:32.603088Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:856:2699] 2025-03-28T09:39:32.605994Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:857:2700] 2025-03-28T09:39:32.617514Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.617589Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [79:857:2700] 2025-03-28T09:39:32.641326Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.641432Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [79:856:2699] 2025-03-28T09:39:32.671012Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:175:2190] Leader for TabletID 72057594037927937 is [79:795:2646] sender: [79:889:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:795:2646] sender: [79:892:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:795:2646] sender: [79:895:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:795:2646] sender: [79:896:2057] recipient: [79:894:2722] Leader for TabletID 72057594037927937 is [79:897:2723] sender: [79:898:2057] recipient: [79:894:2722] 2025-03-28T09:39:32.728800Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:32.728869Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:39:32.729516Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:960:2778] 2025-03-28T09:39:32.732327Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:961:2779] 2025-03-28T09:39:32.743435Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.743513Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [79:961:2779] 2025-03-28T09:39:32.759580Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.759683Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [79:960:2778] 2025-03-28T09:39:32.794749Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:175:2190] Leader for TabletID 72057594037927937 is [79:897:2723] sender: [79:993:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:897:2723] sender: [79:996:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:897:2723] sender: [79:999:2057] recipient: [79:998:2801] Leader for TabletID 72057594037927937 is [79:897:2723] sender: [79:1000:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:1001:2802] sender: [79:1002:2057] recipient: [79:998:2801] 2025-03-28T09:39:32.851357Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:32.851418Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:39:32.852022Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:1066:2859] 2025-03-28T09:39:32.854659Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:1067:2860] 2025-03-28T09:39:32.864336Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.864408Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [79:1067:2860] 2025-03-28T09:39:32.881840Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.881943Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [79:1066:2859] 2025-03-28T09:39:32.904363Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [79:175:2190] Leader for TabletID 72057594037927937 is [79:1001:2802] sender: [79:1097:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:1001:2802] sender: [79:1100:2057] recipient: [79:99:2134] Leader for TabletID 72057594037927937 is [79:1001:2802] sender: [79:1103:2057] recipient: [79:14:2061] Leader for TabletID 72057594037927937 is [79:1001:2802] sender: [79:1104:2057] recipient: [79:1102:2880] Leader for TabletID 72057594037927937 is [79:1105:2881] sender: [79:1106:2057] recipient: [79:1102:2880] 2025-03-28T09:39:32.957059Z node 79 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:32.957123Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:39:32.957798Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [79:1172:2940] 2025-03-28T09:39:32.960362Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [79:1173:2941] 2025-03-28T09:39:32.970352Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.970430Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [79:1173:2941] 2025-03-28T09:39:32.994317Z node 79 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:32.994417Z node 79 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [79:1172:2940] 2025-03-28T09:39:33.022023Z node 79 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 size 8364507 >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> TColumnShardTestSchema::ForgetAfterFail [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpStats::OneShardLocalExec+UseSink >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> TxKeys::ComparePointKeys [GOOD] >> TxKeys::ComparePointKeysWithNull ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155152.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155152.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123153952.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-03-28T09:35:53.642517Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:35:53.701028Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:35:53.713998Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:35:53.714191Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:35:53.719584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:53.719731Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:53.719915Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:53.719988Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:53.720053Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:53.720115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:53.720173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:53.720251Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:53.720319Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:53.720381Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:53.720453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:53.720520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:53.737190Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:35:53.737410Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:35:53.737448Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:35:53.737559Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:35:53.737663Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:53.737718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:35:53.737747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:35:53.737799Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:35:53.737833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:35:53.737858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:35:53.737890Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:35:53.737994Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:35:53.738050Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:35:53.738079Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:35:53.738098Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:35:53.738167Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:35:53.738196Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:35:53.738219Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:35:53.738237Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:35:53.738279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:35:53.738301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:35:53.738319Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:35:53.738347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:35:53.738371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:35:53.738389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:35:53.738635Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=27; 2025-03-28T09:35:53.738699Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=21; 2025-03-28T09:35:53.738747Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=21; 2025-03-28T09:35:53.738817Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=39; 2025-03-28T09:35:53.738927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:35:53.738957Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:35:53.738979Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:35:53.739115Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:35:53.739143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:35:53.739161Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:35:53.739246Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:35:53.739268Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:35:53.739299Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:35:53.739420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... =(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:33.690608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:33.690637Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:39:33.690668Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:194;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2025-03-28T09:39:33.690792Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:39:33.690879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:1;records_count:14867;schema=timestamp: timestamp[us];);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:33.690910Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=plain_read_data.cpp:72;event=DoExtractReadyResults;result=0;count=0;finished=1; 2025-03-28T09:39:33.690986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:225;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;);columns=1;rows=14867; 2025-03-28T09:39:33.691030Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:245;stage=data_format;batch_size=118936;num_rows=14867;batch_columns=timestamp; 2025-03-28T09:39:33.691168Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1882:3857];bytes=118936;rows=14867;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1883:3858]->[1:1882:3857] 2025-03-28T09:39:33.691263Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:263;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:33.691352Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:33.691439Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:33.691558Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:103;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2025-03-28T09:39:33.691634Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:183;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:33.691704Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;method=produce result;fline=actor.cpp:188;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:33.691736Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: Scan [1:1883:3858] finished for tablet 9437184 2025-03-28T09:39:33.692182Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:411;event=scan_finish;compute_actor_id=[1:1882:3857];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.007},{"events":["f_processing","f_task_result"],"t":0.009},{"events":["f_ack","l_task_result"],"t":0.8},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.806}],"full":{"a":1743154772885568,"name":"_full_task","f":1743154772885568,"d_finished":0,"c":0,"l":1743154773691785,"d":806217},"events":[{"name":"bootstrap","f":1743154772885705,"d_finished":7755,"c":1,"l":1743154772893460,"d":7755},{"a":1743154773691538,"name":"ack","f":1743154773686025,"d_finished":4968,"c":7,"l":1743154773691462,"d":5215},{"a":1743154773691526,"name":"processing","f":1743154772894575,"d_finished":399776,"c":56,"l":1743154773691464,"d":400035},{"name":"ProduceResults","f":1743154772888495,"d_finished":11846,"c":65,"l":1743154773691723,"d":11846},{"a":1743154773691725,"name":"Finish","f":1743154773691725,"d_finished":0,"c":0,"l":1743154773691785,"d":60},{"name":"task_result","f":1743154772894589,"d_finished":393990,"c":49,"l":1743154773685839,"d":393990}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); 2025-03-28T09:39:33.692248Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:361;event=send_data;compute_actor_id=[1:1882:3857];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2025-03-28T09:39:33.692629Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:366;event=scan_finished;compute_actor_id=[1:1882:3857];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap"],"t":0.007},{"events":["f_processing","f_task_result"],"t":0.009},{"events":["f_ack","l_task_result"],"t":0.8},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.806}],"full":{"a":1743154772885568,"name":"_full_task","f":1743154772885568,"d_finished":0,"c":0,"l":1743154773692283,"d":806715},"events":[{"name":"bootstrap","f":1743154772885705,"d_finished":7755,"c":1,"l":1743154772893460,"d":7755},{"a":1743154773691538,"name":"ack","f":1743154773686025,"d_finished":4968,"c":7,"l":1743154773691462,"d":5713},{"a":1743154773691526,"name":"processing","f":1743154772894575,"d_finished":399776,"c":56,"l":1743154773691464,"d":400533},{"name":"ProduceResults","f":1743154772888495,"d_finished":11846,"c":65,"l":1743154773691723,"d":11846},{"a":1743154773691725,"name":"Finish","f":1743154773691725,"d_finished":0,"c":0,"l":1743154773692283,"d":558},{"name":"task_result","f":1743154772894589,"d_finished":393990,"c":49,"l":1743154773685839,"d":393990}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;;); Got TEvKqpCompute::TEvScanData [1:1883:3858]->[1:1882:3857] 2025-03-28T09:39:33.692706Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2025-03-28T09:39:32.885220Z;index_granules=0;index_portions=7;index_batches=1260;committed_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10402524;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10402524;selected_rows=0; 2025-03-28T09:39:33.692743Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:188;event=scan_aborted;reason=unexpected on destructor; 2025-03-28T09:39:33.692978Z node 1 :TX_COLUMNSHARD_SCAN INFO: TEST_STEP=3;SelfId=[1:1883:3858];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:81;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 0/0 160000/10402524 >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs >> QueryActorTest::SimpleQuery >> TxKeys::ComparePointKeysWithNull [GOOD] >> TxKeys::ComparePointAndRange >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection >> TxKeys::ComparePointAndRange [GOOD] >> TxKeys::ComparePointAndRangeWithNull >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections >> Splitter::Crit [GOOD] >> Splitter::CritSimple >> TxKeys::ComparePointAndRangeWithNull [GOOD] >> TxKeys::ComparePointAndRangeWithInf >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> KqpStats::StatsProfile [GOOD] >> KqpStats::SelfJoin >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection >> KqpExplain::ReadTableRangesFullScan [GOOD] >> KqpExplain::ReadTableRanges >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> TxKeys::ComparePointAndRangeWithInf [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:79:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:80:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:80:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:84:2057] recipient: [7:83:2113] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:83:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:84:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:88:2057] recipient: [8:87:2116] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:90:2057] recipient: [8:87:2116] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:89:2117] Leader for TabletID 72057594037927937 is [8:89:2117] sender: [8:143:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:84:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:88:2057] recipient: [9:87:2116] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:90:2057] recipient: [9:87:2116] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:89:2117] Leader for TabletID 72057594037927937 is [9:89:2117] sender: [9:143:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:85:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:89:2057] recipient: [10:87:2116] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:91:2057] recipient: [10:87:2116] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:90:2117] Leader for TabletID 72057594037927937 is [10:90:2117] sender: [10:144:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:91:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:92:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:94:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:93:2120] Leader for TabletID 72057594037927937 is [11:93:2120] sender: [11:147:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (acto ... 6:2097]) rebooted! !Reboot 72057594037927937 (actor [44:56:2097]) tablet resolver refreshed! new actor is[44:101:2126] Leader for TabletID 72057594037927937 is [44:101:2126] sender: [44:155:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:54:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:57:2057] recipient: [45:51:2095] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:74:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:97:2057] recipient: [45:36:2083] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:100:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [45:56:2097] sender: [45:101:2057] recipient: [45:99:2125] Leader for TabletID 72057594037927937 is [45:102:2126] sender: [45:103:2057] recipient: [45:99:2125] !Reboot 72057594037927937 (actor [45:56:2097]) rebooted! !Reboot 72057594037927937 (actor [45:56:2097]) tablet resolver refreshed! new actor is[45:102:2126] Leader for TabletID 72057594037927937 is [45:102:2126] sender: [45:120:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:54:2057] recipient: [46:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:54:2057] recipient: [46:51:2095] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:57:2057] recipient: [46:51:2095] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:74:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:99:2057] recipient: [46:36:2083] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:102:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [46:56:2097] sender: [46:103:2057] recipient: [46:101:2127] Leader for TabletID 72057594037927937 is [46:104:2128] sender: [46:105:2057] recipient: [46:101:2127] !Reboot 72057594037927937 (actor [46:56:2097]) rebooted! !Reboot 72057594037927937 (actor [46:56:2097]) tablet resolver refreshed! new actor is[46:104:2128] Leader for TabletID 72057594037927937 is [46:104:2128] sender: [46:158:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:54:2057] recipient: [47:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:54:2057] recipient: [47:51:2095] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:57:2057] recipient: [47:51:2095] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:74:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:99:2057] recipient: [47:36:2083] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:102:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [47:56:2097] sender: [47:103:2057] recipient: [47:101:2127] Leader for TabletID 72057594037927937 is [47:104:2128] sender: [47:105:2057] recipient: [47:101:2127] !Reboot 72057594037927937 (actor [47:56:2097]) rebooted! !Reboot 72057594037927937 (actor [47:56:2097]) tablet resolver refreshed! new actor is[47:104:2128] Leader for TabletID 72057594037927937 is [47:104:2128] sender: [47:158:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:54:2057] recipient: [48:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:54:2057] recipient: [48:51:2095] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:57:2057] recipient: [48:51:2095] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:74:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:100:2057] recipient: [48:36:2083] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:102:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [48:56:2097] sender: [48:104:2057] recipient: [48:103:2127] Leader for TabletID 72057594037927937 is [48:105:2128] sender: [48:106:2057] recipient: [48:103:2127] !Reboot 72057594037927937 (actor [48:56:2097]) rebooted! !Reboot 72057594037927937 (actor [48:56:2097]) tablet resolver refreshed! new actor is[48:105:2128] Leader for TabletID 72057594037927937 is [48:105:2128] sender: [48:123:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:54:2057] recipient: [49:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:54:2057] recipient: [49:50:2095] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:57:2057] recipient: [49:50:2095] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:74:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:102:2057] recipient: [49:36:2083] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:105:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [49:56:2097] sender: [49:106:2057] recipient: [49:104:2129] Leader for TabletID 72057594037927937 is [49:107:2130] sender: [49:108:2057] recipient: [49:104:2129] !Reboot 72057594037927937 (actor [49:56:2097]) rebooted! !Reboot 72057594037927937 (actor [49:56:2097]) tablet resolver refreshed! new actor is[49:107:2130] Leader for TabletID 72057594037927937 is [49:107:2130] sender: [49:161:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:54:2057] recipient: [50:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:54:2057] recipient: [50:52:2095] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:57:2057] recipient: [50:52:2095] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:74:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:102:2057] recipient: [50:36:2083] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:105:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [50:56:2097] sender: [50:106:2057] recipient: [50:104:2129] Leader for TabletID 72057594037927937 is [50:107:2130] sender: [50:108:2057] recipient: [50:104:2129] !Reboot 72057594037927937 (actor [50:56:2097]) rebooted! !Reboot 72057594037927937 (actor [50:56:2097]) tablet resolver refreshed! new actor is[50:107:2130] Leader for TabletID 72057594037927937 is [50:107:2130] sender: [50:161:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:54:2057] recipient: [51:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:54:2057] recipient: [51:50:2095] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:57:2057] recipient: [51:50:2095] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:74:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:103:2057] recipient: [51:36:2083] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:106:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [51:56:2097] sender: [51:107:2057] recipient: [51:105:2129] Leader for TabletID 72057594037927937 is [51:108:2130] sender: [51:109:2057] recipient: [51:105:2129] !Reboot 72057594037927937 (actor [51:56:2097]) rebooted! !Reboot 72057594037927937 (actor [51:56:2097]) tablet resolver refreshed! new actor is[51:108:2130] Leader for TabletID 72057594037927937 is [51:108:2130] sender: [51:126:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:54:2057] recipient: [52:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:54:2057] recipient: [52:52:2095] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:57:2057] recipient: [52:52:2095] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:74:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:105:2057] recipient: [52:36:2083] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:108:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [52:56:2097] sender: [52:109:2057] recipient: [52:107:2131] Leader for TabletID 72057594037927937 is [52:110:2132] sender: [52:111:2057] recipient: [52:107:2131] !Reboot 72057594037927937 (actor [52:56:2097]) rebooted! !Reboot 72057594037927937 (actor [52:56:2097]) tablet resolver refreshed! new actor is[52:110:2132] Leader for TabletID 72057594037927937 is [52:110:2132] sender: [52:164:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:54:2057] recipient: [53:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:54:2057] recipient: [53:51:2095] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:57:2057] recipient: [53:51:2095] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:74:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:56:2097]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:105:2057] recipient: [53:36:2083] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:108:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [53:56:2097] sender: [53:109:2057] recipient: [53:107:2131] Leader for TabletID 72057594037927937 is [53:110:2132] sender: [53:111:2057] recipient: [53:107:2131] !Reboot 72057594037927937 (actor [53:56:2097]) rebooted! !Reboot 72057594037927937 (actor [53:56:2097]) tablet resolver refreshed! new actor is[53:110:2132] Leader for TabletID 72057594037927937 is [53:110:2132] sender: [53:164:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:54:2057] recipient: [54:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:54:2057] recipient: [54:51:2095] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:57:2057] recipient: [54:51:2095] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:74:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:106:2057] recipient: [54:36:2083] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:109:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [54:56:2097] sender: [54:110:2057] recipient: [54:108:2131] Leader for TabletID 72057594037927937 is [54:111:2132] sender: [54:112:2057] recipient: [54:108:2131] !Reboot 72057594037927937 (actor [54:56:2097]) rebooted! !Reboot 72057594037927937 (actor [54:56:2097]) tablet resolver refreshed! new actor is[54:111:2132] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:54:2057] recipient: [55:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:54:2057] recipient: [55:51:2095] Leader for TabletID 72057594037927937 is [55:56:2097] sender: [55:57:2057] recipient: [55:51:2095] Leader for TabletID 72057594037927937 is [55:56:2097] sender: [55:74:2057] recipient: [55:14:2061] >> ReadUpdateWrite::Load >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_keys/unittest >> TxKeys::ComparePointAndRangeWithInf [GOOD] Test command err: 2025-03-28T09:39:34.013502Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:39:34.036072Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:39:34.039748Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:39:34.043515Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:34.121293Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:39:34.192621Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:34.192684Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:34.204373Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:34.205733Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:39:34.209770Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:39:34.209862Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:39:34.209911Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:39:34.211525Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:39:34.211654Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:39:34.211754Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:39:34.295747Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:39:34.321429Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:39:34.323034Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:39:34.323176Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:39:34.323210Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:39:34.323241Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:39:34.323283Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:39:34.323545Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:39:34.323589Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:39:34.324522Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:39:34.324651Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:39:34.324735Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:39:34.324796Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:39:34.324905Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:39:34.324944Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:39:34.325006Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:39:34.325049Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:39:34.325098Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:39:34.325215Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:39:34.325265Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:39:34.325326Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:39:34.328976Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:39:34.329049Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:39:34.329141Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:39:34.329407Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:39:34.329465Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:39:34.329537Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:39:34.329652Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:39:34.329708Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:39:34.329745Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:39:34.329785Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:39:34.330129Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:39:34.330170Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:39:34.330207Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:39:34.330240Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:39:34.330295Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:39:34.330329Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:39:34.330368Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:39:34.330409Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:39:34.330440Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:39:34.342972Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:39:34.343047Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:39:34.343084Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:39:34.343153Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:39:34.345128Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:39:34.348046Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:39:34.348105Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:39:34.348150Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:39:34.348232Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:39:34.348260Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:39:34.348417Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:39:34.348481Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:39:34.348531Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:39:34.348565Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:39:34.352446Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:39:34.352523Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:39:34.352775Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:39:34.352826Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:39:34.352897Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:39:34.352937Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:39:34.352973Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:39:34.353020Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:39:34.353057Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:39:34.353104Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:39:34.353155Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:39:34.353187Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:39:34.353214Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:39:34.353371Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:39:34.353396Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:39:34.353412Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:39:34.353426Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:39:34.353450Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:39:34.353506Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:39:34.353523Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:39:34.353548Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:39:34.353577Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:39:34.353639Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:39:34.353667Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:39:34.353694Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at ... 25-03-28T09:39:37.921892Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CreateIncrementalRestoreSrc 2025-03-28T09:39:37.921920Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:39:37.921946Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CreateIncrementalRestoreSrc 2025-03-28T09:39:37.921970Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompleteOperation 2025-03-28T09:39:37.921995Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompleteOperation 2025-03-28T09:39:37.922229Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is DelayComplete 2025-03-28T09:39:37.922272Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompleteOperation 2025-03-28T09:39:37.922311Z node 5 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit CompletedOperations 2025-03-28T09:39:37.922347Z node 5 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit CompletedOperations 2025-03-28T09:39:37.922382Z node 5 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:39:37.922406Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit CompletedOperations 2025-03-28T09:39:37.922437Z node 5 :TX_DATASHARD TRACE: Execution plan for [1000001:1] at 9437184 has finished 2025-03-28T09:39:37.922485Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:39:37.922521Z node 5 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:39:37.922566Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:39:37.922611Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:39:37.924317Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:132:2155]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-03-28T09:39:37.924375Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-28T09:39:37.924414Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-03-28T09:39:37.924460Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:39:37.926552Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000001 txid# 1} 2025-03-28T09:39:37.926606Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000001} 2025-03-28T09:39:37.926657Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:39:37.927634Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:39:37.927670Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CreateTable 2025-03-28T09:39:37.927708Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:39:37.927746Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-03-28T09:39:37.927775Z node 5 :TX_DATASHARD TRACE: Complete execution for [1000001:1] at 9437184 on unit CompleteOperation 2025-03-28T09:39:37.927827Z node 5 :TX_DATASHARD DEBUG: Complete [1000001 : 1] from 9437184 at tablet 9437184 send result to client [5:99:2134], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:39:37.927869Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-03-28T09:39:37.927942Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:39:37.928450Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:226:2224], Recipient [5:132:2155]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:228:2225] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:39:37.928483Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:39:37.928606Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 132 RawX2: 21474838635 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 2 2025-03-28T09:39:37.928676Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:122:2148], Recipient [5:132:2155]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-03-28T09:39:37.928701Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-28T09:39:37.928733Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-03-28T09:39:37.928787Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-03-28T09:39:37.929092Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 65543, Sender [5:99:2134], Recipient [5:132:2155]: NActors::TEvents::TEvPoison 2025-03-28T09:39:37.929376Z node 5 :TX_DATASHARD INFO: OnDetach: 9437184 2025-03-28T09:39:37.929495Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 9437184 2025-03-28T09:39:37.938889Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [5:231:2226], Recipient [5:234:2227]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:39:37.942140Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [5:231:2226], Recipient [5:234:2227]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:39:37.942335Z node 5 :TX_DATASHARD TRACE: StateInit, received event# 268828684, Sender [5:231:2226], Recipient [5:234:2227]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:39:37.947798Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [5:234:2227] 2025-03-28T09:39:37.947993Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:37.952415Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute Persist Sys_SubDomainInfo 2025-03-28T09:39:37.982469Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:37.982629Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:39:37.984971Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:39:37.985076Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:39:37.985144Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:39:37.985635Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:39:37.985856Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:39:37.985929Z node 5 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [5:277:2227] in generation 3 2025-03-28T09:39:38.032209Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:39:38.032356Z node 5 :TX_DATASHARD INFO: Switched to work state Ready tabletId 9437184 2025-03-28T09:39:38.032450Z node 5 :TX_DATASHARD INFO: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2025-03-28T09:39:38.032578Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2025-03-28T09:39:38.032813Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [5:282:2266] 2025-03-28T09:39:38.032855Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:39:38.032905Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 9437184 2025-03-28T09:39:38.032942Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:39:38.033190Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-03-28T09:39:38.033308Z node 5 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-03-28T09:39:38.033493Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [5:234:2227], Recipient [5:234:2227]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:39:38.033550Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:39:38.033788Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:39:38.033876Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:39:38.034002Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270976, Sender [5:24:2071], Recipient [5:234:2227]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2025-03-28T09:39:38.034044Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2025-03-28T09:39:38.034083Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2025-03-28T09:39:38.034124Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:39:38.034258Z node 5 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 234 RawX2: 21474838707 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2025-03-28T09:39:38.034362Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 270270978, Sender [5:24:2071], Recipient [5:234:2227]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2025-03-28T09:39:38.034399Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2025-03-28T09:39:38.034443Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2025-03-28T09:39:38.034513Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:39:38.034552Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:39:38.034592Z node 5 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:39:38.034632Z node 5 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:39:38.034670Z node 5 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:39:38.034732Z node 5 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:39:38.034778Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:39:38.034883Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [5:280:2264], Recipient [5:234:2227]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:284:2268] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:39:38.034921Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:39:38.035000Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [5:122:2148], Recipient [5:234:2227]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2025-03-28T09:39:38.035033Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-28T09:39:38.035070Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2025-03-28T09:39:38.035146Z node 5 :TX_DATASHARD DEBUG: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2025-03-28T09:39:38.047337Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [5:280:2264], Recipient [5:234:2227]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [5:280:2264] ServerId: [5:284:2268] } 2025-03-28T09:39:38.047402Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed |97.4%| [TM] {RESULT} ydb/core/tx/datashard/ut_keys/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:00.241610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:00.241715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:00.241763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:00.241801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:00.241848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:00.241882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:00.241962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:00.242039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:00.242341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:00.324402Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:00.324458Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:00.331018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:00.331114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:00.331230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:00.336353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:00.336528Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:00.337109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:00.337307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:00.339154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:00.340401Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:00.340458Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:00.340585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:00.340627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:00.340666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:00.340801Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:00.347887Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:00.480157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:00.480391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.480597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:00.480823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:00.480876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.483285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:00.483404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:00.483586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.483638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:00.483676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:00.483707Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:00.485621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.485672Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:00.485718Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:00.487433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.487477Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.487533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:00.487582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:00.491300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:00.493060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:00.493229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:00.494243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:00.494390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:00.494436Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:00.494680Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:00.494754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:00.494920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:00.494991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:00.496995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:00.497048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:00.497229Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:00.497270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:00.497492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:00.497533Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:00.497622Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:00.497652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:00.497690Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:00.497717Z no ... 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:38.077977Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:39:38.078157Z node 24 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 201us result status StatusSuccess 2025-03-28T09:39:38.078673Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:38.090430Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:803:2618] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:39:38.090528Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:723:2618] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-28T09:39:38.090672Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:803:2618] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743154778061674 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743154778061674 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743154778061674 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:39:38.092920Z node 24 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][24:803:2618] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-28T09:39:38.093013Z node 24 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][24:723:2618] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TMemoryController::Counters [GOOD] >> TMemoryController::Counters_HardLimit >> QueryActorTest::SimpleQuery [GOOD] >> QueryActorTest::Rollback >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestReadAndDeleteConsumer >> TGRpcRateLimiterTest::CreateResource >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:39.268879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:39.268951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:39.268978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:39.268999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:39.269033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:39.269062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:39.269100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:39.269186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:39.269451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:39.334291Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:39.334331Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:39.343784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:39.343932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:39.344062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:39.349242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:39.349464Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:39.350055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:39.350255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:39.352088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:39.353438Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:39.353494Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:39.353677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:39.353722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:39.353764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:39.353850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:39.360162Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:39.482569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:39.482795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:39.483007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:39.483235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:39.483323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:39.485339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:39.485466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:39.485652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:39.485706Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:39.485753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:39.485784Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:39.487493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:39.487540Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:39.487621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:39.489189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:39.489229Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:39.489271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:39.489323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:39.493200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:39.494893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:39.495062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:39.496011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:39.496137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:39.496187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:39.496452Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:39.496502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:39.496647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:39.496727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:39.498318Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:39.498360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:39.498469Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:39.498499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:39.498774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:39.498808Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:39.498890Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:39.498916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:39.498939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:39.498975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:39.499009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:39.499034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:39.499062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:39.499079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:39.499161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:39.499188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:39.499213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:39.500458Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:39.500544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:39.500568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... itioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:38.982654Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:39:38.983141Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue" took 492us result status StatusSuccess 2025-03-28T09:39:38.984142Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue" PathDescription { Self { Name: "indexByValue" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 3 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } TableIndex { Name: "indexByValue" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 3 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:38.985506Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/table/indexByValue/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:39:38.985825Z node 16 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/table/indexByValue/indexImplTable" took 346us result status StatusSuccess 2025-03-28T09:39:38.986585Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/table/indexByValue/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710758 CreateStep: 5000004 ParentPathId: 3 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 4 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 100500 MinPartitionsCount: 1 FastSplitSettings { SizeThreshold: 100500 RowCountThreshold: 100500 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409552 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1592 DataSize: 1592 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings >> Splitter::CritSimple [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardLocalExec+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26951, MsgBus: 1062 2025-03-28T09:39:25.627539Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792710707175892:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:25.627631Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a9/r3tmp/tmpkve5dW/pdisk_1.dat 2025-03-28T09:39:25.903487Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:25.903605Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:25.905498Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26951, node 1 2025-03-28T09:39:25.933907Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:25.936536Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:25.943261Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:25.966530Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:25.966567Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:25.966579Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:25.966743Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1062 TClient is connected to server localhost:1062 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:26.393886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:26.412306Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:26.551297Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:26.699785Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:26.771958Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:28.021793Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792723592079573:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:28.021863Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:28.235314Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:28.257075Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:28.278625Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:28.301835Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:28.322823Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:28.348397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:28.380104Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792723592080079:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:28.380183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:28.380267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792723592080084:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:28.383995Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:28.393923Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792723592080086:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:28.478198Z node 1 :TX_PROXY ERROR: Actor# [1:7486792723592080140:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:29.401398Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154769398, txId: 281474976710671] shutting down Trying to start YDB, gRPC: 30204, MsgBus: 23057 2025-03-28T09:39:30.124042Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792731869345254:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:30.124101Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a9/r3tmp/tmp89mpaA/pdisk_1.dat 2025-03-28T09:39:30.225029Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30204, node 2 2025-03-28T09:39:30.246651Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:30.246767Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:30.248534Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:30.268568Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:30.268593Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:30.268601Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:30.268711Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:23057 TClient is connected to server localhost:23057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:30.646704Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:30.663811Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:30.735268Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:30.886888Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:30.959629Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:32.580777Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792740459281630:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:32.580853Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:32.626536Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:32.650131Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:32.677046Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:32.703876Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:32.730885Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:32.797273Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:32.834748Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792740459282143:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:32.834837Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:32.834850Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792740459282148:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:32.838152Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:32.847298Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792740459282150:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:32.908326Z node 2 :TX_PROXY ERROR: Actor# [2:7486792740459282204:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:34.064356Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154774081, txId: 281474976715671] shutting down Trying to start YDB, gRPC: 62442, MsgBus: 62734 2025-03-28T09:39:34.979262Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486792750254753109:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:34.979330Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a9/r3tmp/tmp8UvHfa/pdisk_1.dat 2025-03-28T09:39:35.064774Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 62442, node 3 2025-03-28T09:39:35.111863Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:35.111942Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:35.113599Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:35.127249Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:35.127280Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:35.127287Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:35.127409Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62734 TClient is connected to server localhost:62734 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:35.598763Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:35.613394Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:35.692803Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:35.913574Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:35.991136Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:37.937503Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792763139656765:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:37.937590Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:37.977632Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:38.009848Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:38.039823Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:38.068577Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:38.101001Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:38.135406Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:38.213408Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792767434624581:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:38.213489Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792767434624586:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:38.213506Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:38.216655Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:38.226737Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7486792767434624588:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:38.305595Z node 3 :TX_PROXY ERROR: Actor# [3:7486792767434624643:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/splitter/ut/unittest >> Splitter::CritSimple [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=seria ... 82944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964800;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964800;columns=1; |97.4%| [TS] {RESULT} ydb/core/tx/columnshard/splitter/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery >> KqpStats::SelfJoin [GOOD] >> BasicExample::BasicExample >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRewriteThenLastValueNewApi >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery >> TMemoryController::Counters_HardLimit [GOOD] >> TMemoryController::Counters_NoHardLimit >> QueryActorTest::Rollback [GOOD] >> QueryActorTest::Commit >> NodeWardenDsProxyConfigRetrieval::Disconnect >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus >> TGRpcRateLimiterTest::CreateResource [GOOD] >> TGRpcRateLimiterTest::UpdateResource >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> KqpExplain::IdxFullscan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin [GOOD] Test command err: Trying to start YDB, gRPC: 25770, MsgBus: 6049 2025-03-28T09:39:27.632670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792720336144473:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:27.632762Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a5/r3tmp/tmpuilP4X/pdisk_1.dat 2025-03-28T09:39:27.886881Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 25770, node 1 2025-03-28T09:39:27.936900Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:27.936925Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:27.936933Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:27.937023Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:27.970803Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:27.970921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:27.972754Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6049 TClient is connected to server localhost:6049 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:28.354058Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:28.400076Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:28.491226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:28.604217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:28.655037Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:30.066043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792733221048144:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:30.066173Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:30.269754Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:30.298869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:30.326425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:30.352238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:30.380830Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:30.437455Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:30.471092Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792733221048653:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:30.471188Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:30.471263Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792733221048658:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:30.474299Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:30.481957Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792733221048660:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:30.583785Z node 1 :TX_PROXY ERROR: Actor# [1:7486792733221048714:3446] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 10746, MsgBus: 63911 2025-03-28T09:39:32.452157Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792737640040850:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:32.452232Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0014a5/r3tmp/tmpPPArA5/pdisk_1.dat 2025-03-28T09:39:32.563740Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 10746, node 2 2025-03-28T09:39:32.595623Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:32.595716Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:32.598567Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:32.619268Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:32.619285Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:32.619290Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:32.619373Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63911 TClient is connected to server localhost:63911 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:32.974435Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:32.984716Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:33.053237Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:33.203700Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:33.304617Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:35.521436Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792750524944509:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:35.521541Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: { : Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:41.148394Z node 3 :TX_PROXY ERROR: Actor# [3:7486792777769771538:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"No estimate","PlanNodeId":3,"LookupKeyColumns":["Key"],"Node Type":"TableLookupJoin","Path":"\/Root\/TwoShard","Columns":["Key"],"E-Rows":"No estimate","Table":"TwoShard","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"TwoShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[5,48]}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResumeMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"PauseMessageMs":{"Count":2,"Sum":1,"Max":1,"Min":0},"WaitTimeUs":{"Count":2,"Sum":2319,"Max":1169,"Min":1150,"History":[5,2319]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}],"DurationUs":{"Count":2,"Sum":2000,"Max":1000,"Min":1000},"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576,"History":[5,2097152]},"ResultRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Tasks":2,"ResultBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"FinishedTasks":2,"IngressRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":0,"StageDurationUs":1000,"Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":48,"Max":24,"Min":24}}],"BaseTimeMs":1743154782365,"OutputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"CpuTimeUs":{"Count":2,"Sum":673,"Max":547,"Min":126,"History":[4,126,5,673]},"Ingress":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48,"History":[4,48,5,96]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"FirstMessageMs":{"Count":2,"Sum":4,"Max":2,"Min":2},"Bytes":{"Count":2,"Sum":96,"Max":48,"Min":48,"History":[4,48,5,96]},"PauseMessageMs":{"Count":2,"Sum":1,"Max":1,"Min":0},"WaitTimeUs":{"Count":2,"Sum":2429,"Max":1220,"Min":1209,"History":[4,1209,5,2429]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1}}}]}}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":8,"Max":4,"Min":4},"FirstMessageMs":{"Count":2,"Sum":8,"Max":4,"Min":4},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[5,36,6,48]}},"Name":"6","Push":{"LastMessageMs":{"Count":2,"Sum":8,"Max":4,"Min":4},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"Chunks":{"Count":2,"Sum":6,"Max":3,"Min":3},"ResumeMessageMs":{"Count":2,"Sum":8,"Max":4,"Min":4},"FirstMessageMs":{"Count":2,"Sum":8,"Max":4,"Min":4},"PauseMessageMs":{"Count":2,"Sum":6,"Max":3,"Min":3},"WaitTimeUs":{"Count":2,"Sum":2967,"Max":1646,"Min":1321,"History":[5,1646,6,2967]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"WaitMessageMs":{"Count":2,"Max":4,"Min":3}}}],"MaxMemoryUsage":{"Count":2,"Sum":2097152,"Max":1048576,"Min":1048576,"History":[5,1048576,6,2097152]},"DurationUs":{"Count":2,"Sum":5000,"Max":3000,"Min":2000},"InputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"Tasks":2,"OutputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"FinishedTasks":2,"InputRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"PhysicalStageId":1,"StageDurationUs":3000,"Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":2,"Sum":6,"Max":3,"Min":3},"ReadBytes":{"Count":2,"Sum":24,"Max":12,"Min":12}}],"BaseTimeMs":1743154782365,"WaitInputTimeUs":{"Count":2,"Sum":2884,"Max":1599,"Min":1285,"History":[5,1599,6,2884]},"OutputBytes":{"Count":2,"Sum":48,"Max":36,"Min":12},"CpuTimeUs":{"Count":2,"Sum":552,"Max":370,"Min":182,"History":[5,182,6,552]},"Input":[{"Pop":{"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":5,"Max":3,"Min":2},"ActiveMessageMs":{"Count":2,"Max":3,"Min":2},"FirstMessageMs":{"Count":2,"Sum":5,"Max":3,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[5,36,6,48]}},"Name":"2","Push":{"Rows":{"Count":2,"Sum":6,"Max":3,"Min":3},"LastMessageMs":{"Count":2,"Sum":5,"Max":3,"Min":2},"Chunks":{"Count":2,"Sum":2,"Max":1,"Min":1},"ResumeMessageMs":{"Count":2,"Sum":5,"Max":3,"Min":2},"FirstMessageMs":{"Count":2,"Sum":5,"Max":3,"Min":2},"ActiveMessageMs":{"Count":2,"Max":3,"Min":2},"Bytes":{"Count":2,"Sum":48,"Max":36,"Min":12,"History":[5,36,6,48]},"PauseMessageMs":{"Count":2,"Sum":5,"Max":3,"Min":2},"WaitTimeUs":{"Count":2,"Sum":225,"Max":119,"Min":106,"History":[5,106,6,225]},"WaitPeriods":{"Count":2,"Sum":2,"Max":1,"Min":1},"WaitMessageMs":{"Count":2,"Max":3,"Min":2}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[6,24]}},"Name":"8","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"PauseMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":164,"Max":164,"Min":164,"History":[6,164]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[6,1048576]},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"InputBytes":{"Count":1,"Sum":48,"Max":48,"Min":48},"Tasks":1,"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":2,"StageDurationUs":1000,"BaseTimeMs":1743154782365,"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"CpuTimeUs":{"Count":1,"Sum":1401,"Max":1401,"Min":1401,"History":[6,1401]},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":48,"Max":48,"Min":48,"History":[6,48]}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":48,"Max":48,"Min":48,"History":[6,48]},"PauseMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"WaitTimeUs":{"Count":1,"Sum":58,"Max":58,"Min":58,"History":[6,58]},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[6,24]}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":1,"Sum":5245,"Max":5245,"Min":5245,"History":[6,5245]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6},"FirstMessageMs":{"Count":1,"Sum":6,"Max":6,"Min":6}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[6,1048576]},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"InputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"ResultRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1,"ResultBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":3,"StageDurationUs":1000,"BaseTimeMs":1743154782365,"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"CpuTimeUs":{"Count":1,"Sum":573,"Max":573,"Min":573,"History":[6,573]},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[6,24]}},"Name":"6","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[6,24]},"WaitTimeUs":{"Count":1,"Sum":5219,"Max":5219,"Min":5219,"History":[6,5219]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}]}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":249786,"CpuTimeUs":246559},"ProcessCpuTimeUs":321,"TotalDurationUs":270741,"ResourcePoolId":"default","QueuedTimeUs":485},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"TwoShard","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"},{"Operators":[{"E-Rows":"No estimate","Columns":["Key"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["Key"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"A-Rows":6,"A-SelfCpu":1.401,"A-Cpu":1.401,"A-Size":24,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.573,"A-Cpu":1.974,"A-Size":24,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} >> StatisticsScan::RunScanOnShard >> KqpExplain::ReadTableRanges [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] >> DataShardReassign::AutoReassignOnYellowFlag >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2025-03-28T09:39:26.328630Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792714414952897:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:26.328751Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001589/r3tmp/tmpsgpr44/pdisk_1.dat 2025-03-28T09:39:26.655738Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12454, node 1 2025-03-28T09:39:26.695278Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:26.695302Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:26.695313Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:26.695425Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:26.710074Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:26.710188Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:26.711698Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:26.860536Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:26.863171Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:26.863219Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:26.864393Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:11274, port: 11274 2025-03-28T09:39:26.864478Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:26.919183Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:26.963060Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:27.012238Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****ciWA (568F6995) () has now valid token of ldapuser@ldap 2025-03-28T09:39:28.994299Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792722221648356:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:28.994385Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001589/r3tmp/tmpUc6jUp/pdisk_1.dat 2025-03-28T09:39:29.089258Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21290, node 2 2025-03-28T09:39:29.125763Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:29.125911Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:29.127358Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:29.148206Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:29.148240Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:29.148247Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:29.148356Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:29.312125Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:29.316065Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:29.316094Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:29.316839Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:10978, port: 10978 2025-03-28T09:39:29.316912Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:29.367155Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:29.414982Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:29.415467Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:39:29.415513Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:29.463067Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:29.507038Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:29.507985Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****CUGA (002F0D5B) () has now valid token of ldapuser@ldap 2025-03-28T09:39:31.704066Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486792736570305922:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:31.704128Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001589/r3tmp/tmpk9wgY6/pdisk_1.dat 2025-03-28T09:39:31.802351Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20107, node 3 2025-03-28T09:39:31.841695Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:31.841781Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:31.843393Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:31.845846Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:31.845873Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:31.845907Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:31.846022Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:31.988993Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:31.991492Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:31.991514Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:31.991971Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:61149, port: 61149 2025-03-28T09:39:31.992024Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:32.047294Z node 3 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:32.095582Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****Xw8g (BA7CD192) () has now valid token of ldapuser@ldap 2025-03-28T09:39:34.809907Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486792749671848428:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:34.809956Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001589/r3tmp/tmp8WG7PV/pdisk_1.dat 2025-03-28T09:39:34.902820Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14102, node 4 2025-03-28T09:39:34.941562Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:34.941647Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:34.943233Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:34.961001Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:34.961033Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:34.961045Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:34.961166Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:35.051674Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:35.054721Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:35.054752Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:35.055512Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://qqq:62145 ldaps://localhost:62145 ldaps://localhost:11111, port: 62145 2025-03-28T09:39:35.055606Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:35.111136Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:35.155021Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:35.155493Z node 4 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:39:35.155535Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:35.203020Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:35.247067Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:35.248094Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****_KiA (ECBB8335) () has now valid token of ldapuser@ldap 2025-03-28T09:39:37.880127Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486792762036656070:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:37.880204Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001589/r3tmp/tmpumX7da/pdisk_1.dat 2025-03-28T09:39:37.991596Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63641, node 5 2025-03-28T09:39:38.028653Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:38.028739Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:38.030265Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:38.072493Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:38.072524Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:38.072534Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:38.072693Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:38.184282Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:38.187378Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:38.187416Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:38.188193Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:61135, port: 61135 2025-03-28T09:39:38.188298Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:38.239294Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-28T09:39:38.282978Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:38.283554Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:39:38.283592Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-28T09:39:38.331041Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-28T09:39:38.379027Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2025-03-28T09:39:38.380001Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****g5pg (95911DEC) () has now valid token of ldapuser@ldap 2025-03-28T09:39:41.013280Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486792780382471453:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:41.013347Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001589/r3tmp/tmpuydKKp/pdisk_1.dat 2025-03-28T09:39:41.110537Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23468, node 6 2025-03-28T09:39:41.143113Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:41.143205Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:41.147565Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:41.182305Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:41.182336Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:41.182345Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:41.182486Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:41.278060Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:41.281435Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:41.281476Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:41.282214Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:10064, port: 10064 2025-03-28T09:39:41.282314Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:41.339246Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2025-03-28T09:39:41.339333Z node 6 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:10064. Bad search filter 2025-03-28T09:39:41.339889Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****eIcQ (9BD37C66) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:10064. Bad search filter)' ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::IdxFullscan [GOOD] Test command err: Trying to start YDB, gRPC: 29138, MsgBus: 16105 2025-03-28T09:39:22.468044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792695721469793:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:22.468285Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001478/r3tmp/tmp5ddMuH/pdisk_1.dat 2025-03-28T09:39:22.775208Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29138, node 1 2025-03-28T09:39:22.813434Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:22.813474Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:22.813486Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:22.813662Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:22.828606Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:22.828728Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:22.830045Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:16105 TClient is connected to server localhost:16105 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:23.259668Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.292833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.421376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.567710Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.623942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:25.227086Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792708606373458:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.227220Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.486778Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.517365Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.538929Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.560529Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.585942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.609376Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.681959Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792708606373971:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.682011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.682095Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792708606373976:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.685233Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:25.694282Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792708606373978:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:25.750990Z node 1 :TX_PROXY ERROR: Actor# [1:7486792708606374031:3448] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TopSort-TableFullScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"Min(If,SUM(10,15))"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Inputs":[{"ExternalPlanNodeId":4}],"Offset":"15","Name":"Offset"}],"Node Type":"Limit-Offset"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key","Text"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":8,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/EightShard","E-Rows":"No estimate","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"Name":"TopSort","Limit":"SUM(10,15)","TopSortBy":"row.Text"}],"Node Type":"TopSort"}],"Operators":[{"Name":"Limit","Limit":"Min(If,SUM(10,15))"}],"Node Type":"Limit"}],"Operators":[{"Offset":"15","Name":"Offset"}],"Node Type":"Offset"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 20922, MsgBus: 16133 2025-03-28T09:39:27.386035Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792717665167258:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:27.386079Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001478/r3tmp/tmpLz1Zk9/pdisk_1.dat 2025-03-28T09:39:27.480477Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20922, node 2 2025-03-28T09:39:27.518327Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:27.518417Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:27.519667Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:27.526354Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:27.526381Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:27.526389Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:27.526516Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16133 TClient is connected to server localhost:16133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 ... WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7228, node 4 2025-03-28T09:39:37.754397Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:37.754519Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:37.762464Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:37.792133Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:37.792167Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:37.792176Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:37.792315Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4133 TClient is connected to server localhost:4133 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:38.284119Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:38.300636Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:38.373648Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:38.528859Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:38.591357Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:40.964377Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792775273749719:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:40.964474Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:41.015601Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.085679Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.118712Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.150711Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.182138Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.251516Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.295628Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792779568717530:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:41.295687Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:41.295834Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792779568717535:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:41.299628Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:41.309840Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486792779568717537:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:41.381153Z node 4 :TX_PROXY ERROR: Actor# [4:7486792779568717590:3449] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:42.320977Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:42.594300Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:39:42.609483Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486792762388846054:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:42.609539Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:42.636822Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"E-Size":"No estimate","PlanNodeId":8,"LookupKeyColumns":["id"],"Node Type":"TableLookup","Path":"\/Root\/test_table_idx","Columns":["Value","complex_field","id","str_field"],"E-Rows":"No estimate","Table":"test_table_idx","Plans":[{"PlanNodeId":7,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"No estimate"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"No estimate","Condition":"t.id = idx.id","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":8}],"E-Rows":"No estimate","Predicate":"Exist(item.id)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":10}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":5,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["test_table_idx_idx"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/test_table_idx_idx","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Node Type":"Collect"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/test_table_idx","reads":[{"lookup_by":["id"],"columns":["Value","complex_field","id","str_field"],"type":"Lookup"}]},{"name":"\/Root\/test_table_idx_idx","reads":[{"lookup_by":["str_field (null)"],"columns":["id"],"scan_by":["complex_field (-∞, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"No estimate","Columns":["Value","complex_field","id","str_field"],"E-Size":"No estimate","E-Cost":"No estimate","Name":"TableLookup","Table":"test_table_idx","LookupKeyColumns":["id"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.id)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":13,"Operators":[{"Scan":"Parallel","ReadRange":["str_field (null)","complex_field (-∞, +∞)"],"E-Size":"No estimate","Name":"TableRangeScan","Path":"\/Root\/test_table_idx_idx","E-Rows":"No estimate","Table":"test_table_idx_idx","ReadColumns":["id"],"E-Cost":"No estimate"}],"Node Type":"TableRangeScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t.id = idx.id","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":2},"PlanNodeType":"Query"}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] Test command err: Caught NodeWarden registration actorId# [1:11:2058] 2025-03-28T09:39:44.035670Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:319} Bootstrap 2025-03-28T09:39:44.066479Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/2909/000686/r3tmp/tmprp9OdG/static.dat" PDiskGuid: 11556887776951382532 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 11556887776951382532 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 11556887776951382532 } } } } AvailabilityDomains: 0 } 2025-03-28T09:39:44.068940Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:196} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:/home/runner/.ya/build/build_root/2909/000686/r3tmp/tmprp9OdG/static.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2025-03-28T09:39:44.073326Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:39:44.077160Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 11556887776951382532 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:39:44.078511Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 11556887776951382532 2025-03-28T09:39:44.078575Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 0 2025-03-28T09:39:44.080138Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:29:2075] ControllerId# 72057594037932033 2025-03-28T09:39:44.080191Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:39:44.080293Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:294} StartInvalidGroupProxy GroupId# 4294967295 2025-03-28T09:39:44.080800Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:306} StartRequestReportingThrottler 2025-03-28T09:39:44.097910Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:22} Bootstrap 2025-03-28T09:39:44.126743Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:39:44.126817Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:39:44.126925Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2025-03-28T09:39:44.127794Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:39:44.127828Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:221} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2025-03-28T09:39:44.133185Z node 1 :BS_NODE DEBUG: {NWDC35@distconf_persistent_storage.cpp:184} PersistConfig Record# {} Drives# [] 2025-03-28T09:39:44.140144Z node 1 :BS_NODE DEBUG: {NWDC51@distconf_persistent_storage.cpp:103} TWriterActor bootstrap Drives# [] Record# {} 2025-03-28T09:39:44.140365Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435075 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2025-03-28T09:39:44.153638Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/2909/000686/r3tmp/tmprp9OdG/static.dat" PDiskGuid: 11556887776951382532 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 11556887776951382532 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 11556887776951382532 } } } } AvailabilityDomains: 0 } 2025-03-28T09:39:44.155876Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 268639248 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2025-03-28T09:39:44.200817Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 2 2025-03-28T09:39:44.200878Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:6} TEvNodesInfo 2025-03-28T09:39:44.201616Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:322} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\320\251q1^\2157\240\257\252\376\276\225\026\212\037T\354C\016" } 2025-03-28T09:39:44.201835Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 2 2025-03-28T09:39:44.201881Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 2146435075 Sender# [1:46:2089] SessionId# [0:0:0] Cookie# 0 2025-03-28T09:39:44.201930Z node 1 :BS_NODE DEBUG: {NWDC36@distconf_persistent_storage.cpp:205} TEvStorageConfigStored NumOk# 0 NumError# 0 Passed# 0.073986s 2025-03-28T09:39:44.202220Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:255} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2025-03-28T09:39:44.202263Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:317} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2025-03-28T09:39:44.213455Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-28T09:39:44.221887Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-28T09:39:44.251324Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2025-03-28T09:39:44.270258Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2025-03-28T09:39:44.273418Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2025-03-28T09:39:44.275635Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:39:44.277102Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2025-03-28T09:39:44.279199Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2036} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2025-03-28T09:39:44.279757Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:497} Handle TEvInterconnect::TEvNodesInfo 2025-03-28T09:39:44.280115Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2025-03-28T09:39:44.296873Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2025-03-28T09:39:44.297131Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2025-03-28T09:39:44.297709Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2025-03-28T09:39:44.297876Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:39:44.297986Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2025-03-28T09:39:44.298209Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:39:44.325225Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2025-03-28T09:39:44.325377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:39:44.337980Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2025-03-28T09:39:44.338091Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:39:44.338145Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2025-03-28T09:39:44.338202Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:39:44.338325Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2025-03-28T09:39:44.338368Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:39:44.338395Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2025-03-28T09:39:44.338421Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:39:44.350257Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2025-03-28T09:39:44.350374Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:39:44.362215Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2025-03-28T09:39:44.362367Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:19} TTxLoadEverything Execute 2025-03-28T09:39:44.365554Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:546} TTxLoadEverything Complete 2025-03-28T09:39:44.365634Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2167} LoadFinished 2025-03-28T09:39:44.380469Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2025-03-28T09:39:44.380554Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:551} TTxLoadEverything InitQueue processed Pipe connected clientId# [1:29:2075] 2025-03-28T09:39:44.381069Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:52} TEvTabletPipe::TEvClientConnected OK ClientId# [1:29:2075] ServerId# [1:124:2146] TabletId# 72057594037932033 PipeClientId# [1:29:2075] 2025-03-28T09:39:44.381877Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 11556887776951382532 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-03-28T09:39:44.383423Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:398} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:/home/runner/.ya/build/build_root/2909/000686/r3tmp/tmprp9OdG/static.dat" PDiskConfig { ExpectedSlotCount: 2 } } } } Command { DefineBox { BoxId: 1 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 ErasureSpecies: "none" VDiskKind: "Default" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2025-03-28T09:39:44.385244Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:339} Create new pdisk PDiskId# 1:1 Path# SectorMap:/home/runner/.ya/build/build_root/2909/000686/r3tmp/tmprp9OdG/static.dat 2025-03-28T09:39:44.394323Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: Initial Replicated: false DiskSpace: Green } } 2025-03-28T09:39:44.394517Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-03-28T09:39:44.394624Z node 1 :BS_CONTROLLER DEBUG: {BSC13@scrub.cpp:597} sending TEvControllerScrubStartQuantum Msg# NKikimrBlobStorage.TEvControllerScrubStartQuantum VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } 2025-03-28T09:39:44.394910Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 11556887776951382532 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-03-28T09:39:44.395065Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 11556887776951382532 Status: READY OnlyPhantomsRemain: false } } 2025-03-28T09:39:44.396203Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-28T09:39:44.397909Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-03-28T09:39:44.398146Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-28T09:39:44.398314Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-28T09:39:44.398377Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-03-28T09:39:44.412643Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] === Waiting for pipe to establish === === Breaking pipe === === Sending put === Pipe disconnected clientId# [1:29:2075] 2025-03-28T09:39:44.413391Z node 1 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [1:29:2075] ServerId# [1:124:2146] TabletId# 72057594037932033 PipeClientId# [1:29:2075] 2025-03-28T09:39:44.413500Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:139:2159] ControllerId# 72057594037932033 2025-03-28T09:39:44.413539Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:72} SendRegisterNode 2025-03-28T09:39:44.414019Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:127} HandleForwarded GroupId# 2147483648 EnableProxyMock# false NoGroup# false 2025-03-28T09:39:44.414068Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:22} StartLocalProxy GroupId# 2147483648 2025-03-28T09:39:44.414103Z node 1 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:265} RequestGroupConfig GroupId# 2147483648 2025-03-28T09:39:44.414391Z node 1 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2147483648 Pipe connected clientId# [1:139:2159] 2025-03-28T09:39:44.414708Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:52} TEvTabletPipe::TEvClientConnected OK ClientId# [1:139:2159] ServerId# [1:142:2161] TabletId# 72057594037932033 PipeClientId# [1:139:2159] 2025-03-28T09:39:44.414911Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 11556887776951382532 Status: READY OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2025-03-28T09:39:44.415226Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2025-03-28T09:39:44.415444Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2025-03-28T09:39:44.415549Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:11:2058] Cookie# 0 Recipient# [1:142:2161] RecipientRewrite# [1:90:2122] Request# {NodeID: 1 GroupIDs: 2147483648 } StopGivingGroups# false 2025-03-28T09:39:44.415639Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2147483648 } 2025-03-28T09:39:44.415821Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 0 } } 2025-03-28T09:39:44.429273Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:792} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/2909/000686/r3tmp/tmprp9OdG/static.dat" PDiskGuid: 11556887776951382532 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 11556887776951382532 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 11556887776951382532 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "f5b4ee01-43d40639-a68ee22a-ed2df7a0" Comprehensive: true AvailDomain: 0 } 2025-03-28T09:39:44.429538Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# true Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/2909/000686/r3tmp/tmprp9OdG/static.dat" PDiskGuid: 11556887776951382532 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 11556887776951382532 } VDiskKind: Default StoragePoolName: "" } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 11556887776951382532 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-03-28T09:39:44.429757Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:67} StartLocalVDiskActor SlayInFlight# false VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 11556887776951382532 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2025-03-28T09:39:44.430760Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:265} StartLocalVDiskActor done VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 11556887776951382532 2025-03-28T09:39:44.431044Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2025-03-28T09:39:44.436701Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:792} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 11556887776951382532 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2025-03-28T09:39:44.436836Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 11556887776951382532 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2025-03-28T09:39:44.437040Z node 1 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2147483648 2025-03-28T09:39:44.437934Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 11556887776951382532 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2025-03-28T09:39:44.440962Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2025-03-28T09:39:44.446003Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } 2025-03-28T09:39:44.450750Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-28T09:39:44.451339Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 11556887776951382532 Status: REPLICATING OnlyPhantomsRemain: false } } 2025-03-28T09:39:44.452477Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1133} Handle(TEvStatusUpdate) 2025-03-28T09:39:44.453348Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 11556887776951382532 Status: READY OnlyPhantomsRemain: false } } |97.4%| [TM] {RESULT} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::ReadTableRanges [GOOD] Test command err: Trying to start YDB, gRPC: 14985, MsgBus: 17674 2025-03-28T09:39:22.560785Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792697318831942:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:22.560872Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00148d/r3tmp/tmplOds5j/pdisk_1.dat 2025-03-28T09:39:22.848551Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14985, node 1 2025-03-28T09:39:22.939258Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:22.939285Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:22.939292Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:22.939428Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:22.948147Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:22.948260Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:22.950270Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17674 TClient is connected to server localhost:17674 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:23.407886Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.432412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.530485Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.649480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:23.707551Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:24.986053Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792705908768328:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:24.986162Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.256996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.282794Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.305039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.327893Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.349173Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.412412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:25.443075Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792710203736136:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.443130Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.443183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792710203736141:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:25.446733Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:25.456716Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792710203736143:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:25.512744Z node 1 :TX_PROXY ERROR: Actor# [1:7486792710203736197:3443] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":17,"Plans":[{"PlanNodeId":16,"Plans":[{"PlanNodeId":15,"Plans":[{"PlanNodeId":14,"Plans":[{"PlanNodeId":13,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Subplan Name":"CTE Stage_11","Plans":[{"Tables":["KeyValue"],"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Parent Relationship":"InitPlan"}],"Node Type":"Map","PlanNodeType":"Connection"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Node Type":"UnionAll","PlanNodeType":"Connection","CTE Name":"Stage_11"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"GroupBy":"item.t1.Key","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":6}],"E-Rows":"No estimate","Condition":"t1.Key = t2.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":9}],"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Aggregate-InnerJoin (MapJoin)-Filter"}],"Node Type":"HashShuffle","KeyColumns":["t1.Key"],"PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":11}],"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Node Type":"Map","PlanNodeType":"Connection"},{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"UnionAll","PlanNodeType":"Connection","CTE Name":"Stage_11"}],"Node Type":"Stage"}],"Node Type":"Broadcast","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Key","Name":"Sort"},{"Inputs":[{"InternalOperatorId":2},{"ExternalPlanNodeId":3}],"E-Rows":"No estimate","Condition":"Foo.t1.Key = t1.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"},{"Inputs":[{"ExternalPlanNodeId":13}],"E-Rows":"No estimate","Predicate":"Exist(item.t1.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Sort-InnerJoin (MapJoin)-Filter"}],"Node Type":"Merge","SortColumns":["Key (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":11,"Plans":[{"PlanNodeId":12,"Plans":[{"PlanNodeId":15,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"},{"PlanNodeId":19,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"No estimate","Table":"KeyValue","ReadColumns":["Key"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Condition":"t1.Key = t2.Key","Name":"InnerJoin (MapJoin)","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"GroupBy":"item.t1.Key","Aggregation":"state","Name":"Aggregate","Phase":"Intermediate"}],"Node Type":"Aggregate"}],"Node Type":"HashShuffle (KeyColumns: [\"t1.Key\"])","PlanNodeType":"Connection"}],"Operators":[{"Name":"Aggregate","Phase":"Final"}],"Node Type":"Aggregate"}],"Operators":[{"E-Rows":"No estimate","Predicate":"Exist(item.t1.Key)","Name":"Filt ... n"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoKeys"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"},{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoKeys","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"Filter-TableFullScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/TwoKeys","reads":[{"columns":["Key1","Key2","Value"],"scan_by":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key1 (-∞, +∞)","Key2 (-∞, +∞)"],"Name":"TableFullScan","Path":"\/Root\/TwoKeys","E-Rows":"No estimate","Table":"TwoKeys","ReadColumns":["Key1","Key2","Value"],"E-Cost":"No estimate"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"No estimate","Predicate":"item.Key2 \u003E 101","Name":"Filter","E-Size":"No estimate","E-Cost":"No estimate"}],"Node Type":"Filter"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 28371, MsgBus: 16903 2025-03-28T09:39:37.743245Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486792762824459463:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:37.743318Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00148d/r3tmp/tmpx1lXa6/pdisk_1.dat 2025-03-28T09:39:37.872977Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:37.903791Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:37.903896Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:37.906047Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 28371, node 4 2025-03-28T09:39:37.945420Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:37.945447Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:37.945462Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:37.945623Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16903 TClient is connected to server localhost:16903 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:38.406375Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:38.421644Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:38.477166Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:38.652512Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:38.715950Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:41.458374Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792780004330429:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:41.458465Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:41.505366Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.538347Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.569598Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.601422Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.632647Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.701889Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:41.780501Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792780004330946:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:41.780588Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:41.780593Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792780004330951:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:41.784386Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:41.794065Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486792780004330953:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:39:41.856129Z node 4 :TX_PROXY ERROR: Actor# [4:7486792780004331005:3445] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:42.743529Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486792762824459463:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:42.743615Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:42.852690Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:43.143242Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480 2025-03-28T09:39:43.206069Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480 {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, 100)","Key [2000, +∞)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"Scan":"Parallel","E-Size":"No estimate","ReadRanges":["Key (-∞, 100)","Key [2000, +∞)"],"Name":"TableRangeScan","Path":"\/Root\/KeyValue","E-Rows":"No estimate","Table":"KeyValue","ReadRangesKeys":["Key"],"ReadColumns":["Key","Value"],"E-Cost":"No estimate","ReadRangesExpectedSize":2}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} >> TPQTest::TestReadAndDeleteConsumer [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-28T09:37:43.982503Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:43.982602Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-28T09:37:44.004583Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:44.021613Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1 ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } Consumers { Name: "important_user" Generation: 1 Important: true } 2025-03-28T09:37:44.022762Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-28T09:37:44.025253Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-28T09:37:44.039894Z node 1 :PERSQUEUE INFO: new Cookie default|93b600b8-6ac95d9b-39f08e9d-f81d92e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-28T09:37:52.292231Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.292311Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-03-28T09:37:52.309871Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.310474Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 2 ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } Consumers { Name: "important_user" Generation: 2 Important: true } 2025-03-28T09:37:52.311008Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-03-28T09:37:52.313530Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-03-28T09:37:52.323328Z node 2 :PERSQUEUE INFO: new Cookie default|56e261b3-2e557576-dd7e609d-e524aa7f_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:175:2190] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [2:175:2190] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-28T09:38:00.112235Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:00.112290Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-03-28T09:38:00.127373Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:00.127830Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 3 ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } Consumers { Name: "important_user" Generation: 3 Important: true } 2025-03-28T09:38:00.128301Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-03-28T09:38:00.130728Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-03-28T09:38:00.140835Z node 3 :PERSQUEUE INFO: new Cookie default|dac82dac-d23f4412-8092e840-ff48bfe_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [3:177:2192] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-03-28T09:38:07.918413Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:07.918476Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-03-28T09:38:07.931634Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:38:07.932057Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 ImportantClientId: "important_user" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 4 ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } Consumers { Name: "important_user" Generation: 4 Important: true } 2025-03-28T09:38:07.932444Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-03-28T09:38:07.933996Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-03-28T09:38:07.941108Z node 4 :PERSQUEUE INFO: new Cookie default|35b8d64c-ed7e48e1-a2e8ff25-7c60bb1a_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [4:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [4:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [4:177:2192] Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user3" ... onfig 2025-03-28T09:39:41.538267Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:39:41.538958Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [19:294:2285] 2025-03-28T09:39:41.564634Z node 19 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:41.564750Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [19:294:2285] 2025-03-28T09:39:41.590935Z node 19 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [19:245:2244] sender: [19:317:2057] recipient: [19:14:2061] 2025-03-28T09:39:41.599809Z node 19 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:41.603632Z node 19 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1001 actor [19:314:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1001 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1000 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1000 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:103:2057] recipient: [20:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:103:2057] recipient: [20:101:2135] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:108:2057] recipient: [20:101:2135] 2025-03-28T09:39:42.226006Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:42.226114Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [20:149:2057] recipient: [20:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [20:149:2057] recipient: [20:147:2170] Leader for TabletID 72057594037927938 is [20:153:2174] sender: [20:154:2057] recipient: [20:147:2170] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:179:2057] recipient: [20:14:2061] 2025-03-28T09:39:42.251305Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:42.252195Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1002 actor [20:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1002 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 ReadRuleGenerations: 1002 ReadRuleGenerations: 1002 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1002 Important: false } Consumers { Name: "user1" Generation: 1002 Important: true } Consumers { Name: "user2" Generation: 1002 Important: true } 2025-03-28T09:39:42.253044Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [20:185:2198] 2025-03-28T09:39:42.256809Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [20:185:2198] 2025-03-28T09:39:42.272792Z node 20 :PERSQUEUE INFO: new Cookie default|9470e8d1-1cd83c8c-bb7885fd-403de73e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:39:43.285856Z node 20 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-03-28T09:39:43.339822Z node 20 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:240:2057] recipient: [20:99:2134] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:243:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [20:107:2139] sender: [20:244:2057] recipient: [20:242:2243] Leader for TabletID 72057594037927937 is [20:245:2244] sender: [20:246:2057] recipient: [20:242:2243] 2025-03-28T09:39:43.381402Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:43.381483Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:39:43.382113Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [20:294:2285] 2025-03-28T09:39:43.409231Z node 20 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:43.409355Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [20:294:2285] 2025-03-28T09:39:43.436447Z node 20 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [20:245:2244] sender: [20:317:2057] recipient: [20:14:2061] 2025-03-28T09:39:43.445018Z node 20 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:43.449377Z node 20 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1003 actor [20:314:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1003 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1002 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1002 Important: true } Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:103:2057] recipient: [21:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:103:2057] recipient: [21:101:2135] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:108:2057] recipient: [21:101:2135] 2025-03-28T09:39:43.967440Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:43.967518Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [21:149:2057] recipient: [21:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [21:149:2057] recipient: [21:147:2170] Leader for TabletID 72057594037927938 is [21:153:2174] sender: [21:154:2057] recipient: [21:147:2170] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:179:2057] recipient: [21:14:2061] 2025-03-28T09:39:43.988930Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:43.989591Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1004 actor [21:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1004 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 ReadRuleGenerations: 1004 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1004 Important: false } Consumers { Name: "user1" Generation: 1004 Important: true } Consumers { Name: "user2" Generation: 1004 Important: true } 2025-03-28T09:39:43.990236Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [21:185:2198] 2025-03-28T09:39:43.993289Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [21:185:2198] 2025-03-28T09:39:44.007291Z node 21 :PERSQUEUE INFO: new Cookie default|3d531e71-caa3ea5c-e6b59381-a97722d7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2025-03-28T09:39:44.995050Z node 21 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 2025-03-28T09:39:45.045634Z node 21 :PERSQUEUE NOTICE: Have to remove new data from cache. Topic rt3.dc1--asdfgs--topic, tablet id72057594037927937, cookie 0 Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:240:2057] recipient: [21:99:2134] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:243:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [21:107:2139] sender: [21:244:2057] recipient: [21:242:2243] Leader for TabletID 72057594037927937 is [21:245:2244] sender: [21:246:2057] recipient: [21:242:2243] 2025-03-28T09:39:45.081694Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:45.081763Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info 2025-03-28T09:39:45.082326Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [21:294:2285] 2025-03-28T09:39:45.109618Z node 21 :PERSQUEUE INFO: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2025-03-28T09:39:45.109719Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [21:294:2285] 2025-03-28T09:39:45.137374Z node 21 :PERSQUEUE WARN: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 size 8296398 Leader for TabletID 72057594037927937 is [21:245:2244] sender: [21:317:2057] recipient: [21:14:2061] 2025-03-28T09:39:45.144158Z node 21 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:39:45.147664Z node 21 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1005 actor [21:314:2298] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 ImportantClientId: "user1" ImportantClientId: "user2" LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1005 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } ReadRuleGenerations: 1004 AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1004 Important: true } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155306.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155306.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155306.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155306.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=143155306.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=143155306.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154106.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123155306.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123155306.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154106.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=123154106.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=123154106.000000s;Name=;Codec=}; 2025-03-28T09:38:26.904359Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:38:26.966159Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:38:26.979420Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:38:26.979619Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:38:26.985287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:38:26.985430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:38:26.985578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:38:26.985662Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:38:26.985737Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:38:26.985799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:38:26.985859Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:38:26.985939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:38:26.986013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:38:26.986086Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:38:26.986156Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:38:26.986217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:38:27.002638Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:38:27.002878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:38:27.002922Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:38:27.003037Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:27.003164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:38:27.003222Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:38:27.003251Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:38:27.003309Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:38:27.003362Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:38:27.003386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:38:27.003407Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:38:27.003503Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:27.003539Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:38:27.003562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:38:27.003580Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:38:27.003631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:38:27.003661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:38:27.003684Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:38:27.003701Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:38:27.003743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:38:27.003765Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:38:27.003782Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:38:27.003808Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:38:27.003830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:38:27.003852Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:38:27.004120Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=28; 2025-03-28T09:38:27.004174Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=22; 2025-03-28T09:38:27.004221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=21; 2025-03-28T09:38:27.004273Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=25; 2025-03-28T09:38:27.004365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:38:27.004398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:38:27.004416Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:38:27.004526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:38:27.004548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:38:27.004565Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TT ... n_data.cpp:29;EXECUTE:finishLoadingTime=376; 2025-03-28T09:39:44.626890Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=40095; 2025-03-28T09:39:44.634671Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7696; 2025-03-28T09:39:44.643684Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6949; 2025-03-28T09:39:44.643775Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=8995; 2025-03-28T09:39:44.643902Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=76; 2025-03-28T09:39:44.644005Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=65; 2025-03-28T09:39:44.644115Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=73; 2025-03-28T09:39:44.644247Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=92; 2025-03-28T09:39:44.652432Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8114; 2025-03-28T09:39:44.663328Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=10781; 2025-03-28T09:39:44.663450Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=37; 2025-03-28T09:39:44.663510Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=24; 2025-03-28T09:39:44.663546Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=6; 2025-03-28T09:39:44.663578Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2025-03-28T09:39:44.663621Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-28T09:39:44.663703Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=45; 2025-03-28T09:39:44.663744Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2025-03-28T09:39:44.663816Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=45; 2025-03-28T09:39:44.663848Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2025-03-28T09:39:44.663898Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2025-03-28T09:39:44.663977Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=43; 2025-03-28T09:39:44.664277Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=267; 2025-03-28T09:39:44.664306Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=83108; 2025-03-28T09:39:44.664420Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=46800024;raw_bytes=72380025;count=29;records=720000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T09:39:44.664517Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T09:39:44.664560Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T09:39:44.664614Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T09:39:44.678770Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=1; 2025-03-28T09:39:44.678898Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:39:44.678943Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:39:44.679001Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:39:44.679046Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:39:44.679093Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:39:44.679130Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:44.679175Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:44.679266Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:39:44.679699Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:39:44.679772Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;self_id=[1:2654:4528];tablet_id=9437184;parent=[1:2612:4493];fline=manager.cpp:82;event=ask_data;request=request_id=155;1={portions_count=29};; 2025-03-28T09:39:44.680620Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T09:39:44.680740Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T09:39:44.680771Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T09:39:44.680796Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T09:39:44.680830Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T09:39:44.680878Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T09:39:44.680932Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=7; 2025-03-28T09:39:44.680995Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=999700009;tx_id=18446744073709551615;;current_snapshot_ts=1000000003; 2025-03-28T09:39:44.681040Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=7;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T09:39:44.681088Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:44.681130Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T09:39:44.681212Z node 1 :TX_COLUMNSHARD DEBUG: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T09:39:44.682383Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=29;path_id=1; 2025-03-28T09:39:44.683949Z node 1 :TX_COLUMNSHARD INFO: TEST_STEP=4;tablet_id=9437184;self_id=[1:2612:4493];ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/tier1' stopped at tablet 9437184 240000/15598728 160000/10402096 160000/10402096 80000/5203544 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:38:56.046606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:56.046723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:56.046770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:56.046805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:56.046857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:56.046882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:56.046928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:56.047007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:56.047306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:56.123446Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:38:56.123514Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:38:56.130626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:56.130772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:56.130896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:56.137329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:56.137534Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:56.138247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:56.138496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:56.140735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:56.142143Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:56.142208Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:56.142363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:56.142411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:56.142453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:56.142632Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:38:56.150743Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:38:56.269312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:56.269566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:56.269794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:56.270046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:56.270105Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:56.275706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:56.275823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:56.275987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:56.276033Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:56.276071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:56.276099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:56.277959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:56.278011Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:56.278052Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:56.279573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:56.279609Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:56.279668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:56.279722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:56.282933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:56.284807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:56.284960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:56.285741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:56.285870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:56.285916Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:56.286129Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:56.286170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:56.286307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:56.286372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:56.288681Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:56.288742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:56.288949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:56.288994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:38:56.289249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:56.289297Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:56.289404Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:56.289447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:56.289486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:56.289519Z no ... tusSuccess 2025-03-28T09:39:45.102197Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:45.107689Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:39:45.107950Z node 26 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 317us result status StatusSuccess 2025-03-28T09:39:45.108828Z node 26 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] Test command err: 2025-03-28T09:39:26.449682Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792713844355928:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:26.449777Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001586/r3tmp/tmpSMoVDb/pdisk_1.dat 2025-03-28T09:39:26.729353Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4477, node 1 2025-03-28T09:39:26.785893Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:26.785913Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:26.785919Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:26.786021Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:26.824366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:26.824477Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:26.826622Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:26.894253Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:26.898185Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:26.898234Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:26.899753Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:9479, port: 9479 2025-03-28T09:39:26.899857Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:39:26.903911Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-03-28T09:39:26.904501Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****YIRg (7449335B) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-03-28T09:39:26.904818Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:26.904875Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:26.905735Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://unavailablehost:9479, port: 9479 2025-03-28T09:39:26.905794Z node 1 :LDAP_AUTH_PROVIDER DEBUG: start TLS 2025-03-28T09:39:26.908894Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Could not start TLS. Can't contact LDAP server 2025-03-28T09:39:26.909014Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****YIRg (7449335B) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2025-03-28T09:39:29.237013Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792727892253551:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:29.237110Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001586/r3tmp/tmpIrriyc/pdisk_1.dat 2025-03-28T09:39:29.316770Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12349, node 2 2025-03-28T09:39:29.372996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:29.373100Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:29.375143Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:29.394556Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:29.394578Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:29.394586Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:29.394708Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:29.476912Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:29.479680Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:29.479705Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:29.480291Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****PVdA (5A6A8E3E) () has now permanent error message 'Could not login via LDAP (List of ldap server hosts is empty)' 2025-03-28T09:39:31.771648Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486792733578233890:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:31.771698Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001586/r3tmp/tmpp8rSyK/pdisk_1.dat 2025-03-28T09:39:31.864269Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 27776, node 3 2025-03-28T09:39:31.900995Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:31.901130Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:31.902561Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:31.919227Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:31.919257Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:31.919265Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:31.919412Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:31.990985Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:31.994716Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:31.994750Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:31.995571Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****_Adw (249C26B1) () has now permanent error message 'Could not login via LDAP (Parameter BaseDn is empty)' 2025-03-28T09:39:34.947501Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486792746975618637:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:34.947587Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001586/r3tmp/tmpPeEPZB/pdisk_1.dat 2025-03-28T09:39:35.047182Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18953, node 4 2025-03-28T09:39:35.078108Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:35.078190Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:35.079712Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:35.092954Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:35.092981Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:35.092989Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:35.093135Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:35.215442Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:35.218942Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:35.218987Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:35.219792Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****UnZA (5B85FB04) () has now permanent error message 'Could not login via LDAP (Parameter BindDn is empty)' 2025-03-28T09:39:38.243838Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486792766825813195:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:38.243927Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001586/r3tmp/tmpv01je3/pdisk_1.dat 2025-03-28T09:39:38.340660Z node 5 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21840, node 5 2025-03-28T09:39:38.384593Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:38.384669Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:38.385293Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:38.385318Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:38.385328Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:38.385494Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:38.386175Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:38.525342Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:38.529084Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:38.529123Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:38.529777Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****B0_g (0EC1A7E9) () has now permanent error message 'Could not login via LDAP (Parameter BindPassword is empty)' 2025-03-28T09:39:41.941729Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486792776474981500:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:41.941816Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001586/r3tmp/tmpfREHwz/pdisk_1.dat 2025-03-28T09:39:42.051027Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:42.077098Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:42.077192Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:42.078807Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3349, node 6 2025-03-28T09:39:42.117878Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:42.117904Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:42.117912Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:42.118072Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:42.290933Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:42.293930Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:42.293966Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:42.294758Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:8685, port: 8685 2025-03-28T09:39:42.294875Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:42.355240Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:42.399741Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****Luhw (FB2DE1DB) () has now valid token of ldapuser@ldap >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs >> BasicExample::BasicExample [GOOD] >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] Test command err: 2025-03-28T09:39:24.048669Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792707105458571:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:24.048787Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001599/r3tmp/tmphq1mYr/pdisk_1.dat 2025-03-28T09:39:24.351745Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:24.351814Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:24.353642Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 4964, node 1 2025-03-28T09:39:24.375187Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:24.375217Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:24.385694Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:24.402484Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:24.402507Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:24.402518Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:24.402637Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:24.560245Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:24.563986Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:24.564045Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:24.564848Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:13465, port: 13465 2025-03-28T09:39:24.565610Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:24.570884Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:24.619027Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:24.619462Z node 1 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:39:24.619542Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:24.667027Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:24.714970Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:24.716153Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****iGSQ (339B51B3) () has now valid token of ldapuser@ldap 2025-03-28T09:39:29.048573Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792707105458571:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:29.048647Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:29.058891Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****iGSQ (339B51B3) 2025-03-28T09:39:29.059142Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:13465, port: 13465 2025-03-28T09:39:29.059306Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:29.062603Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:29.063067Z node 1 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:13465 return no entries 2025-03-28T09:39:29.063410Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****iGSQ (339B51B3) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:13465 return no entries)' 2025-03-28T09:39:32.060093Z node 1 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****iGSQ (339B51B3) 2025-03-28T09:39:35.239272Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792754048789411:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:35.239329Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001599/r3tmp/tmpAwCKgJ/pdisk_1.dat 2025-03-28T09:39:35.329514Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 65332, node 2 2025-03-28T09:39:35.380122Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:35.380238Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:35.382092Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:35.382103Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:35.382117Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:35.382124Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:35.382249Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:35.544205Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:35.547646Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:35.547678Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:35.548403Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5862, port: 5862 2025-03-28T09:39:35.548502Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:35.551978Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:35.552291Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:5862. Server is busy 2025-03-28T09:39:35.552566Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****8_7w (6DB6C5C8) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:5862. Server is busy)' 2025-03-28T09:39:35.552835Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:35.552858Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:35.553730Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5862, port: 5862 2025-03-28T09:39:35.553822Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:35.568701Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:35.569081Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:5862. Server is busy 2025-03-28T09:39:35.569329Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****8_7w (6DB6C5C8) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:5862. Server is busy)' 2025-03-28T09:39:38.247946Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****8_7w (6DB6C5C8) 2025-03-28T09:39:38.248259Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:38.248291Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:38.249021Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5862, port: 5862 2025-03-28T09:39:38.249181Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:38.253789Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:38.254237Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform search for filter uid=ldapuser on server ldap://localhost:5862. Server is busy 2025-03-28T09:39:38.254462Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****8_7w (6DB6C5C8) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:5862. Server is busy)' 2025-03-28T09:39:40.242934Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792754048789411:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:40.242995Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:41.251556Z node 2 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****8_7w (6DB6C5C8) 2025-03-28T09:39:41.251878Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:41.251910Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:41.252602Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5862, port: 5862 2025-03-28T09:39:41.252689Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:41.255793Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:41.302999Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:41.303460Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:39:41.303513Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:41.351010Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:41.395025Z node 2 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:41.395790Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****8_7w (6DB6C5C8) () has now valid token of ldapuser@ldap >> QueryActorTest::Commit [GOOD] >> QueryActorTest::StreamQuery >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> TGRpcRateLimiterTest::DropResource >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> TMemoryController::Counters_NoHardLimit [GOOD] >> TMemoryController::Config_ConsumerLimits >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=saved_at;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=143155309.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123155309.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=123154109.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2025-03-28T09:38:30.745565Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:38:30.821968Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:38:30.848226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:38:30.848543Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:38:30.857345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:38:30.857583Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:38:30.857897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:38:30.858027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:38:30.858143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:38:30.858265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:38:30.858375Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:38:30.858515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:38:30.858644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:38:30.858791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:38:30.858918Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:38:30.859055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:38:30.888470Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:38:30.888819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:38:30.888881Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:38:30.889054Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:30.889234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:38:30.889321Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:38:30.889371Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:38:30.889470Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:38:30.889536Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:38:30.889581Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:38:30.889616Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:38:30.889772Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:38:30.889840Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:38:30.889886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:38:30.889915Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:38:30.890002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:38:30.890058Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:38:30.890097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:38:30.890157Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:38:30.890244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:38:30.890285Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:38:30.890313Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:38:30.890364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:38:30.890405Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:38:30.890441Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:38:30.890874Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=51; 2025-03-28T09:38:30.890957Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=34; 2025-03-28T09:38:30.891072Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=55; 2025-03-28T09:38:30.891163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=42; 2025-03-28T09:38:30.891351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:38:30.891410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:38:30.891446Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:38:30.891656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:38:30.891704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:38:30.891735Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:38:30.891886Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:38:30.891927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:38:30.891957Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:38:30.892153Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description= ... ;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:24:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:1:8688:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:14:8656:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:10:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:19:8576:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:81:8336:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:29:8704:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:8:8672:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUGFALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:89:8552:0]; ;component=FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:88:8560:0];1600 ;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:28:8712:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:41:2848:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:80:8368:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:59:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:63:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:58:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:60:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:97:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:4:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:69:2768:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:33:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:44:8560:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:30:8704:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:20:2840:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:87:8552:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:13:8656:0]; FALLBACK_ACTOR_LOGGING;priority=FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=DEBUGFALLBACK_ACTOR_LOGGING;priority=;component=1600;DEBUG;component=fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:56:9608:0]; 1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:93:8368:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:64:8544:0]; FALLBACK_ACTOR_LOGGING;priority=FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;DEBUGFALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:74:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:72:8560:0]; ;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:32:8704:0]; 1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:47:8544:0]; fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:79:8408:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:94:8360:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:18:8592:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:27:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:50:8536:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:96:8328:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:49:8544:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:6:8680:0]; 1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:84:9608:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:61:8552:0]; FALLBACK_ACTOR_LOGGING;priority=FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:77:8544:0];; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:66:8360:0];FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;1600 ;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:11:8672:0]; fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:45:8560:0];FALLBACK_ACTOR_LOGGING;priority=DEBUG ;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:98:9384:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:3:8680:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:36:8624:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;fline=adapter.cpp:21;event=s3_request_failed;request_type=get_object;exception=;message=curlCode: 6, Couldn't resolve host name;storage_id=/cold;blob=[9437184:7:1:255:86:8568:0]; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 160000/10402332 160000/10402332 0/0 160000/10402524 >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::JsonStorageListingV1PDiskIdFilter [GOOD] Test command err: 2025-03-28T09:31:22.571973Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:336:2378], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:31:22.572421Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:31:22.572539Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] TServer::EnableGrpc on GrpcPort 28626, node 1 TClient is connected to server localhost:28787 2025-03-28T09:32:16.569604Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:3128:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:16.570893Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:16.571872Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:16.575198Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:3124:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:16.576237Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:16.576855Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:16.579679Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:3131:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:16.580460Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:1278:2180], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:16.580744Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:3137:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:16.581937Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:16.582549Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:16.582759Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:3134:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:16.582854Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:16.583081Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:16.583850Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:16.584582Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:16.584712Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:16.584859Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:1543:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:16.585570Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:16.585762Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:3140:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:16.585953Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:1540:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:32:16.586745Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:16.587384Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:16.587474Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:32:16.587537Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:16.588212Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:32:16.588844Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:32:16.992525Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:32:17.196995Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:32:17.215858Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:32:17.755129Z node 2 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 15441, node 2 TClient is connected to server localhost:10940 2025-03-28T09:32:18.140648Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:32:18.140725Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:32:18.140778Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:32:18.141848Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:33:47.379270Z node 11 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [11:3106:2433], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:47.381306Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:47.382338Z node 11 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:33:47.386533Z node 12 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [12:3089:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:47.386856Z node 14 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [14:3112:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:47.388945Z node 17 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [17:3121:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:47.389682Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:47.389821Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:47.390916Z node 15 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [15:3115:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:47.391113Z node 16 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [16:3118:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:47.391215Z node 17 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:33:47.391410Z node 18 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [18:3124:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:47.391597Z node 19 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [19:3127:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:33:47.392257Z node 12 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:33:47.392489Z node 14 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;p ... error=incorrect path status: LookupError; 2025-03-28T09:37:00.864576Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:37:00.864609Z node 32 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:37:00.864789Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:00.864953Z node 36 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [36:2213:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:00.865555Z node 35 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:37:00.866087Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:00.866552Z node 34 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [34:2207:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:00.866626Z node 36 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:37:00.867654Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:00.867831Z node 37 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [37:2216:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:37:00.868197Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:37:00.868514Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:37:00.868952Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:37:01.278021Z node 29 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:01.493134Z node 29 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:37:01.522449Z node 29 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:37:02.432604Z node 29 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 28519, node 29 TClient is connected to server localhost:16969 2025-03-28T09:37:03.002377Z node 29 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:37:03.002460Z node 29 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:37:03.002533Z node 29 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:37:03.003266Z node 29 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:11.266464Z node 38 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [38:3151:2436], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:11.268454Z node 38 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:11.269283Z node 38 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:39:11.274711Z node 45 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [45:1141:2177], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:11.275691Z node 41 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [41:3157:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:11.276036Z node 46 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [46:1303:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:11.277252Z node 45 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:39:11.277319Z node 45 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:11.277847Z node 41 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:11.278038Z node 46 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:11.278609Z node 41 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:39:11.279002Z node 46 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:39:11.279805Z node 39 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [39:3147:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:11.280226Z node 44 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [44:3112:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:11.281649Z node 39 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:11.281973Z node 43 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [43:3109:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:11.282107Z node 44 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:11.282865Z node 39 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:39:11.283143Z node 44 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:39:11.284263Z node 43 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:11.285160Z node 42 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [42:3106:2376], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:11.285319Z node 43 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:39:11.287236Z node 42 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:11.287586Z node 42 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:39:11.288576Z node 40 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [40:3154:2379], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:11.289874Z node 40 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:11.290705Z node 40 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T09:39:11.872560Z node 38 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:12.091026Z node 38 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:106} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2025-03-28T09:39:12.121524Z node 38 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:422} Magic sector is present on disk, now going to format device PDiskId# 1000 2025-03-28T09:39:13.240132Z node 38 :BS_PDISK WARN: {BSP01@blobstorage_pdisk_actor.cpp:362} Device formatting done PDiskId# 1000 TServer::EnableGrpc on GrpcPort 6640, node 38 TClient is connected to server localhost:20010 2025-03-28T09:39:14.031217Z node 38 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:14.031346Z node 38 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:14.031445Z node 38 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:14.032439Z node 38 :NET_CLASSIFIER ERROR: got bad distributable configuration >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection |97.5%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> TDqPqRdReadActorTests::SessionError [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] >> TabletService_ChangeSchema::Basics |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] |97.5%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> TTestYqlToMiniKQLCompile::CheckResolve >> TTestYqlToMiniKQLCompile::CheckResolve [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] >> StatisticsScan::RunScanOnShard [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow [GOOD] >> TTestYqlToMiniKQLCompile::UpdateRow >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection >> TTestYqlToMiniKQLCompile::UpdateRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow >> TTestYqlToMiniKQLCompile::SelectRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange [GOOD] >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::DropTableWithInflightChanges[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:26.568822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:26.568906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:26.568943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:26.568976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:26.569008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:26.569026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:26.569075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:26.569132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:26.569352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:26.631148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:26.631192Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:26.636252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:26.636336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:26.636410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:26.641911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:26.642100Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:26.642547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:26.642718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:26.644016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:26.645159Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:26.645202Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:26.645291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:26.645320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:26.645345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:26.645439Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:26.650977Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:26.758241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:26.758447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:26.758624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:26.758966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:26.759019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:26.761192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:26.761306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:26.761459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:26.761515Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:26.761586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:26.761616Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:26.763182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:26.763223Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:26.763257Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:26.764391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:26.764423Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:26.764458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:26.764497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:26.772659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:26.774526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:26.774677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:26.775685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:26.775815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:26.775868Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:26.776106Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:26.776164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:26.776342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:26.776432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:26.778207Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:26.778260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:26.778407Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:26.778444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:26.778679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:26.778740Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:26.778825Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:26.778852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:26.778900Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:26.778933Z no ... perationId: 1003:2, at schemeshard: 72057594046678944 2025-03-28T09:39:49.462761Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:2 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:49.462991Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2025-03-28T09:39:49.463118Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-03-28T09:39:49.463150Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-03-28T09:39:49.467219Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:2 progress is 2/3 2025-03-28T09:39:49.467272Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 2/3 2025-03-28T09:39:49.467306Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: false 2025-03-28T09:39:49.468526Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:39:49.468622Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:39:49.468655Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-28T09:39:49.468884Z node 26 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:39:49.468951Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 5 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2025-03-28T09:39:49.468975Z node 26 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2025-03-28T09:39:49.469006Z node 26 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 18446744073709551615 2025-03-28T09:39:49.469041Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2025-03-28T09:39:49.469113Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 2/3, is published: true 2025-03-28T09:39:49.470597Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2025-03-28T09:39:49.470650Z node 26 :FLAT_TX_SCHEMESHARD INFO: TDropTable TProposedDeletePart operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:49.470879Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2025-03-28T09:39:49.470992Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-03-28T09:39:49.471023Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-28T09:39:49.471059Z node 26 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1003:0 progress is 3/3 2025-03-28T09:39:49.471105Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-28T09:39:49.471136Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1003, ready parts: 3/3, is published: true 2025-03-28T09:39:49.471168Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1003 ready parts: 3/3 2025-03-28T09:39:49.471204Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:0 2025-03-28T09:39:49.471233Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:0 2025-03-28T09:39:49.471320Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2025-03-28T09:39:49.471355Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:1 2025-03-28T09:39:49.471379Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:1 2025-03-28T09:39:49.471410Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2025-03-28T09:39:49.471435Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1003:2 2025-03-28T09:39:49.471458Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1003:2 2025-03-28T09:39:49.471502Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 2 2025-03-28T09:39:49.472111Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:39:49.473295Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:39:49.473397Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:39:49.473432Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:39:49.477860Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:39:49.478142Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2025-03-28T09:39:49.481153Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 345 RawX2: 111669152025 } TabletId: 72075186233409546 State: 4 2025-03-28T09:39:49.481228Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2025-03-28T09:39:49.482839Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:39:49.483243Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 2 TabletID: 72075186233409546 2025-03-28T09:39:49.483440Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2025-03-28T09:39:49.483664Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 1 Forgetting tablet 72075186233409546 2025-03-28T09:39:49.486184Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:39:49.486226Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 5], at schemeshard: 72057594046678944 2025-03-28T09:39:49.486280Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 2025-03-28T09:39:49.486318Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2025-03-28T09:39:49.486351Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2025-03-28T09:39:49.488619Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:2 2025-03-28T09:39:49.488676Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409546 2025-03-28T09:39:49.489647Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2025-03-28T09:39:49.489871Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2025-03-28T09:39:49.489911Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2025-03-28T09:39:49.490616Z node 26 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2025-03-28T09:39:49.490723Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2025-03-28T09:39:49.490758Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [26:628:2554] 2025-03-28T09:39:49.495768Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 349 RawX2: 111669152028 } TabletId: 72075186233409547 State: 4 2025-03-28T09:39:49.495846Z node 26 :FLAT_TX_SCHEMESHARD INFO: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2025-03-28T09:39:49.497346Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2025-03-28T09:39:49.497707Z node 26 :HIVE INFO: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 1 TabletID: 72075186233409547 Forgetting tablet 72075186233409547 2025-03-28T09:39:49.497841Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:49.498002Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2025-03-28T09:39:49.498252Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2025-03-28T09:39:49.498288Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2025-03-28T09:39:49.498344Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:39:49.502051Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046678944:1 2025-03-28T09:39:49.502117Z node 26 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409547 2025-03-28T09:39:49.502669Z node 26 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 1003 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2025-03-28T09:39:49.503033Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2025-03-28T09:39:49.503120Z node 26 :HIVE INFO: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx [GOOD] >> TTestYqlToMiniKQLCompile::AcquireLocks >> TTestYqlToMiniKQLCompile::AcquireLocks [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange [GOOD] >> TTestYqlToMiniKQLCompile::Extract >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection >> TTestYqlToMiniKQLCompile::Extract [GOOD] >> ReadUpdateWrite::Load [GOOD] >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_column_stats/unittest >> StatisticsScan::RunScanOnShard [GOOD] Test command err: 2025-03-28T09:39:47.327775Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:47.328280Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:47.328348Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009f0/r3tmp/tmp64D1Id/pdisk_1.dat 2025-03-28T09:39:47.846860Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:39:47.907722Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:47.961701Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:47.966153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:47.979738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:48.075043Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:39:48.433203Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:48.433335Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:48.433431Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:48.442336Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:39:48.598359Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:39:48.659544Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:49.646106Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1ye2c77h2437444vsf07c, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=Njc0ZGJkMGQtNDgzMzc0ZDEtM2Y3YTQzNjQtMzc2NDZmNjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |97.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_column_stats/unittest >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] |97.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/minikql_compile/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |97.5%| [TS] {RESULT} ydb/core/client/minikql_compile/ut/unittest >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved >> TDqPqRdReadActorTests::CoordinatorChanged >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding >> DiscoveryIsNotBroken::NoKafkaEndpointInDiscovery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] Test command err: 2025-03-28T09:39:51.134450Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-03-28T09:39:51.134619Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-03-28T09:39:51.134827Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-03-28T09:39:51.134857Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-03-28T09:39:51.134884Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-03-28T09:39:51.136929Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-03-28T09:39:51.145550Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-03-28T09:39:51.145610Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-03-28T09:39:51.145641Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-28T09:39:51.151398Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-03-28T09:39:51.151484Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-03-28T09:39:51.151538Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-28T09:39:51.151659Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.151696Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-03-28T09:39:51.151735Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.151764Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-03-28T09:39:51.151802Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.151832Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-03-28T09:39:51.151866Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-28T09:39:51.151960Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:39:51.151991Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-03-28T09:39:51.152227Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-03-28T09:39:51.152286Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2025-03-28T09:39:51.152328Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-03-28T09:39:51.152370Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2025-03-28T09:39:51.152399Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-03-28T09:39:51.152457Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-03-28T09:39:51.152491Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint completed 2025-03-28T09:39:51.239990Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-03-28T09:39:51.240127Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-03-28T09:39:51.240219Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-03-28T09:39:51.240255Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-03-28T09:39:51.240282Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-03-28T09:39:51.240373Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-03-28T09:39:51.240568Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-03-28T09:39:51.240602Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-03-28T09:39:51.240630Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-28T09:39:51.240755Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-03-28T09:39:51.240790Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-03-28T09:39:51.240828Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-28T09:39:51.240911Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.240941Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-03-28T09:39:51.240982Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.241013Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-03-28T09:39:51.241082Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.241116Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-03-28T09:39:51.241144Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-28T09:39:51.241209Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:39:51.241235Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-03-28T09:39:51.241386Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2025-03-28T09:39:51.241424Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2025-03-28T09:39:51.241460Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2025-03-28T09:39:51.241498Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2025-03-28T09:39:51.241521Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-03-28T09:39:51.241604Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2025-03-28T09:39:51.241631Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint completed 2025-03-28T09:39:51.333158Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-03-28T09:39:51.333298Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-03-28T09:39:51.333394Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-03-28T09:39:51.333420Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-03-28T09:39:51.333447Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-03-28T09:39:51.333504Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-03-28T09:39:51.333654Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-03-28T09:39:51.333698Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-03-28T09:39:51.333729Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-28T09:39:51.333865Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-03-28T09:39:51.333913Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-03-28T09:39:51.333956Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-28T09:39:51.334046Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.334088Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-03-28T09:39:51.334141Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.334171Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2025-03-28T09:39:51.334207Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.334240Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2025-03-28T09:39:51.334267Z node 3 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-28T09:39:51.334328Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:39:51.334357Z node 3 :STREAMS_C ... :2] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.425122Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Task state saved, need 0 more acks 2025-03-28T09:39:51.425150Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-28T09:39:51.425200Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:39:51.425259Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-03-28T09:39:51.425421Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvStateCommitted; task: 1 2025-03-28T09:39:51.425460Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] State committed [4:6:2053], need 1 more acks 2025-03-28T09:39:51.425492Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvStateCommitted; task: 3 2025-03-28T09:39:51.425522Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] State committed [4:8:2055], need 0 more acks 2025-03-28T09:39:51.425549Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-03-28T09:39:51.425600Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvCompleteCheckpointResponse 2025-03-28T09:39:51.425629Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint completed 2025-03-28T09:39:51.425663Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-03-28T09:39:51.425696Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-28T09:39:51.425791Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvCreateCheckpointResponse 2025-03-28T09:39:51.425824Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-28T09:39:51.425909Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.425945Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 2 more acks 2025-03-28T09:39:51.425982Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.426013Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2025-03-28T09:39:51.426053Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.426085Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2025-03-28T09:39:51.426109Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-28T09:39:51.426221Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:39:51.426255Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-03-28T09:39:51.426390Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2025-03-28T09:39:51.426434Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2025-03-28T09:39:51.426473Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2025-03-28T09:39:51.426503Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2025-03-28T09:39:51.426526Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-03-28T09:39:51.426578Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2025-03-28T09:39:51.426615Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:3] Checkpoint completed 2025-03-28T09:39:51.426652Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-03-28T09:39:51.426722Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-28T09:39:51.426799Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2025-03-28T09:39:51.426846Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-28T09:39:51.426922Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.426950Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2025-03-28T09:39:51.426985Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.427030Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2025-03-28T09:39:51.427089Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.427120Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2025-03-28T09:39:51.427153Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2025-03-28T09:39:51.427215Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2025-03-28T09:39:51.427242Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2025-03-28T09:39:51.427334Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2025-03-28T09:39:51.427368Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2025-03-28T09:39:51.427425Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2025-03-28T09:39:51.427457Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2025-03-28T09:39:51.427482Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2025-03-28T09:39:51.427527Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2025-03-28T09:39:51.427560Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:4] Checkpoint completed 2025-03-28T09:39:51.514933Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT 2025-03-28T09:39:51.515096Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2025-03-28T09:39:51.515195Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2025-03-28T09:39:51.515222Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Successfully registered in storage 2025-03-28T09:39:51.515266Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2025-03-28T09:39:51.515327Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2025-03-28T09:39:51.515485Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2025-03-28T09:39:51.515543Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2025-03-28T09:39:51.515575Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-28T09:39:51.515701Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2025-03-28T09:39:51.515734Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2025-03-28T09:39:51.515766Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:1] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2025-03-28T09:39:51.515855Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2025-03-28T09:39:51.515886Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2025-03-28T09:39:51.515926Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-03-28T09:39:51.515958Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-03-28T09:39:51.515992Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2025-03-28T09:39:51.516020Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2025-03-28T09:39:51.516051Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2025-03-28T09:39:51.516144Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2025-03-28T09:39:51.516171Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: [my-graph-id.42] [42:1] Checkpoint aborted 2025-03-28T09:39:51.516207Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] Got TEvScheduleCheckpointing 2025-03-28T09:39:51.516245Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2025-03-28T09:39:51.516321Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2025-03-28T09:39:51.516352Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) |97.5%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest >> MediatorTimeCast::ReadStepSubscribe >> TMemoryController::Config_ConsumerLimits [GOOD] >> TMemoryController::SharedCache >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [GOOD] Test command err: 2025-03-28T09:39:33.276002Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-03-28T09:39:33.276085Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-03-28T09:39:33.276161Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-28T09:39:33.276195Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-03-28T09:39:33.276260Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-03-28T09:39:33.276369Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-03-28T09:39:33.278756Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-03-28T09:39:33.295417Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDom ... PDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37820026 2025-03-28T09:39:49.961009Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37820026 2025-03-28T09:39:49.961081Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37820026 2025-03-28T09:39:49.961133Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-28T09:39:49.971724Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-28T09:39:49.971795Z node 1 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-28T09:39:49.971927Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2025-03-28T09:39:49.971995Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2025-03-28T09:39:49.972024Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2025-03-28T09:39:49.972052Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2025-03-28T09:39:49.972094Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2025-03-28T09:39:49.972137Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2025-03-28T09:39:49.972161Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2025-03-28T09:39:49.972185Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2025-03-28T09:39:49.972639Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:39:49.973125Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:39:49.973429Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:39:49.973595Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:39:49.973749Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:39:49.973885Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:39:49.974008Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:39:49.974136Z node 1 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:39:49.974210Z node 1 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-28T09:39:49.974570Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 5:20, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-28T09:39:49.974618Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 8:32, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-28T09:39:49.974657Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 4:18, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-28T09:39:49.974715Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-03-28T09:39:49.974918Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { }, cookie# 140 2025-03-28T09:39:49.974943Z node 1 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-03-28T09:39:49.985288Z node 1 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-03-28T09:39:49.985352Z node 1 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2025-03-28T09:39:49.985598Z node 1 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 141 2025-03-28T09:39:49.985664Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 4:18 2025-03-28T09:39:49.985716Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 5:20 2025-03-28T09:39:49.985751Z node 1 :CMS NOTICE: [Sentinel] [Main] PDisk status has been changed: pdiskId# 8:32 |97.5%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding >> TCreateAndDropViewTest::CheckCreatedView >> TTxDataShardLocalKMeansScan::BadRequest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reassign/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] Test command err: 2025-03-28T09:39:47.569733Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:47.570242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:47.570324Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000794/r3tmp/tmpBNpNxT/pdisk_1.dat 2025-03-28T09:39:48.035534Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2025-03-28T09:39:48.035638Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:48.044251Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:39:48.044959Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{4, redo 987b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2025-03-28T09:39:48.045023Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:5} Tx{8, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:48.045851Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:1:24576:515:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:48.045944Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:48.046032Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} commited cookie 1 for step 5 2025-03-28T09:39:48.057744Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-28T09:39:48.057804Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:48.059399Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{5, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-03-28T09:39:48.059496Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:6} Tx{9, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:48.059819Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:1:24576:129:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:48.059863Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:48.059928Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} commited cookie 1 for step 6 2025-03-28T09:39:48.060042Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-28T09:39:48.060066Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:48.060180Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{6, redo 174b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2025-03-28T09:39:48.060286Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:7} Tx{10, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:48.060453Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:1:24576:130:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:48.060500Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:48.060565Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} commited cookie 1 for step 7 2025-03-28T09:39:48.060639Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2025-03-28T09:39:48.060658Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:48.062296Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{7, redo 120b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-03-28T09:39:48.062361Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:8} Tx{11, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:48.062533Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:48.062580Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046644480:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:48.062623Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} commited cookie 1 for step 8 2025-03-28T09:39:48.066306Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2025-03-28T09:39:48.066382Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:48.066448Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:39:48.066483Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:9} Tx{12, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:48.077211Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} queued, type NKikimr::NBsController::TBlobStorageController::TTxRegisterNode 2025-03-28T09:39:48.077285Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:48.077468Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} hope 1 -> done Change{7, redo 79b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-28T09:39:48.077527Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:8} Tx{14, NKikimr::NBsController::TBlobStorageController::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:48.098856Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:48.104007Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} queued, type NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig 2025-03-28T09:39:48.104086Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:48.110786Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:39:48.110902Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936130:2:3} Tx{3, NKikimr::NTenantSlotBroker::TTenantSlotBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:48.111062Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} queued, type NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig 2025-03-28T09:39:48.111125Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:48.115665Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} hope 1 -> done Change{3, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:39:48.115762Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037936129:2:4} Tx{3, NKikimr::NNodeBroker::TNodeBroker::TTxUpdateConfig} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:48.128140Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037932033:2:8:0:0:87:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:48.128252Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} commited cookie 1 for step 8 2025-03-28T09:39:48.131429Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives 2025-03-28T09:39:48.131550Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:48.131944Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:39:48.132007Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{15, NKikimr::NBsController::TBlobStorageController::TTxUpdateNodeDrives} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:48.154747Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:39:48.154901Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} queued, type NKikimr::NTxAllocator::TTxAllocator::TTxReserve 2025-03-28T09:39:48.154975Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:48.155171Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTxAllocator::TTxAllocator::TTxReserve} hope 1 -> done Change{3, redo 76b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-03-28T09:39:48.155219Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046447617:2:4} Tx{3, NKikimr::NTx ... er{72057594046316545:2:26} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{20, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-28T09:39:51.374223Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:51.374586Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.374673Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.374816Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} commited cookie 1 for step 26 2025-03-28T09:39:51.397309Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2025-03-28T09:39:51.397357Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:51.397504Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{12, redo 82b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2025-03-28T09:39:51.397560Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:10} Tx{17, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:51.408094Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594037968897:2:10:0:0:94:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.408256Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} commited cookie 1 for step 10 2025-03-28T09:39:51.531810Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-28T09:39:51.531907Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:51.532114Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{21, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-28T09:39:51.532176Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{25, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:51.532559Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.532620Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.532692Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} commited cookie 1 for step 27 2025-03-28T09:39:51.668612Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-28T09:39:51.668688Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:51.668838Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{22, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-28T09:39:51.668890Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{26, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:51.669252Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.669329Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.669398Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} commited cookie 1 for step 28 2025-03-28T09:39:51.804811Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-28T09:39:51.804908Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:51.805061Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{23, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-28T09:39:51.805108Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{27, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:51.805451Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.805515Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.805594Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} commited cookie 1 for step 29 2025-03-28T09:39:51.940788Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-28T09:39:51.940866Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:51.940981Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{24, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-28T09:39:51.941015Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{28, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:51.941269Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.941339Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:51.941427Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} commited cookie 1 for step 30 2025-03-28T09:39:51.954904Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:39:51.954948Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:51.977390Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics 2025-03-28T09:39:51.977493Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:51.977577Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:39:51.977657Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:52.070948Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-28T09:39:52.071022Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-28T09:39:52.071158Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxCleanupTransaction 2025-03-28T09:39:52.071232Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:52.071316Z node 1 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 15000 last cleanup 0 2025-03-28T09:39:52.071390Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:39:52.071426Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037888 2025-03-28T09:39:52.071480Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:39:52.071516Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:39:52.071586Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:39:52.071643Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:52.071788Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:39:52.133646Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2025-03-28T09:39:52.133733Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:39:52.133903Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{25, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2025-03-28T09:39:52.133951Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:39:52.134299Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:52.134363Z node 1 :TABLET_MAIN DEBUG: Put Result: TEvPutResult {Id# [72057594046316545:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2025-03-28T09:39:52.134481Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:32} commited cookie 1 for step 31 --- Captured TEvCheckBlobstorageStatusResult event --- Waiting for TEvReassignTablet event... 2025-03-28T09:39:52.260258Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:15} CheckYellow current light yellow move channels: 0 1 2 --- Captured TEvReassignTablet event |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_reassign/unittest >> SdkCredProvider::PingFromProviderSyncDiscovery >> TDqPqRdReadActorTests::Backpressure >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding >> DataShardBackgroundCompaction::ShouldCompact >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery >> SequenceProxy::Basics ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 0 MiB, Speed: 0 MiB/s Write: 10% 0.152790s 30% 0.152790s 50% 0.152790s 90% 0.152790s 99% 0.152790s Write: 10% 0.210660s 30% 0.210660s 50% 0.210660s 90% 0.210660s 99% 0.210660s Write: 10% 0.228858s 30% 0.228858s 50% 0.228858s 90% 0.228858s 99% 0.228858s Write: 10% 0.207027s 30% 0.207027s 50% 0.207027s 90% 0.207027s 99% 0.207027s Write: 10% 0.215844s 30% 0.215844s 50% 0.215844s 90% 0.215844s 99% 0.215844s Write: 10% 0.241911s 30% 0.241911s 50% 0.241911s 90% 0.241911s 99% 0.241911s Write: 10% 0.242892s 30% 0.242892sWrite: 10% 0.245166s 30% 0.245166s 50% 50% 0.242892s 90% 0.242892s 99% 0.242892s Write: 10% Write: 10% 0.189615s 30% 0.189615s 50% 0.217387s 30% 0.189615s0.217387s 50% 90% 0.189615s 99% 0.189615s 0.245166s 90% 0.245166s 99% 0.245166s 0.217387s 90% 0.217387s 99% 0.217387s Write: 10% Write: 10% 0.244338s 30% 0.244338s 50% 0.191441s 30% 0.191441s 50% 0.191441s 90% 0.191441s 99% 0.191441s Write: 10% 0.189924s 30% 0.189924s0.244338s 90% 0.244338s 99% 0.244338s 50% 0.189924s 90% 0.189924s 99% 0.189924s Write: 10% 0.181409s 30% 0.181409s 50% 0.181409s 90% 0.181409s 99% 0.181409s Write: 10% 0.216587s 30% 0.216587s 50% Write: 10% 0.202738s 30% 0.202738s 50% 0.202738s 90% 0.202738s 99% 0.202738s 0.216587s 90% 0.216587s 99% 0.216587s Write: 10% 0.246331s 30% 0.246331s 50% 0.246331s 90% 0.246331s 99% 0.246331s Write: 10% 0.294019s 30% 0.294019s 50% 0.294019s 90% 0.294019s 99% 0.294019s Write: 10% 0.288409s 30% 0.288409s 50% 0.288409s 90% 0.288409s 99% 0.288409s Write: 10% 0.320873s 30% 0.320873s 50% 0.320873s 90% 0.320873s 99% 0.320873s Write: 10% 0.300209s 30% 0.300209s 50% 0.300209s 90% 0.300209s 99% 0.300209s Write: 10% 0.328547s 30% 0.328547s 50% 0.328547s 90% 0.328547s 99% 0.328547s Write: 10% 0.336840s 30% 0.336840s 50% 0.336840s 90% 0.336840s 99% 0.336840s Write: 10% 0.328379s 30% 0.328379s 50% 0.328379s 90% 0.328379s 99% 0.328379s Write: 10% 0.365791s 30% 0.365791s 50% 0.365791s 90% 0.365791s 99% 0.365791s Write: 10% 0.250718s 30% 0.250718s 50% 0.250718s 90% 0.250718s 99% 0.250718s Write: 10% 0.332635s 30% 0.332635s 50% 0.332635s 90% 0.332635s 99% 0.332635s Write: 10% 0.406089s 30% 0.406089s 50% 0.406089s 90% 0.406089s 99% 0.406089s Write: 10% 0.413928s 30% 0.413928s 50% 0.413928s 90% 0.413928s 99% 0.413928s Write: 10% 0.356872s 30% 0.356872s 50% 0.356872s 90% 0.356872s 99% 0.356872s Write: 10% 0.353784s 30% 0.353784s 50% 0.353784s 90% 0.353784s 99% 0.353784s Write: 10% 0.406353s 30% 0.406353s 50% 0.406353s 90% 0.406353s 99% 0.406353s Write: 10% Write: 10% 0.275060s 30% 0.275060s 50% 0.254676s 30% 0.254676s0.275060s 50% 90% 0.275060s 99% 0.254676s 90% 0.275060s0.254676s 99% 0.254676s Write: 10% 0.386612s 30% 0.386612s 50% 0.386612s 90% 0.386612s 99% 0.386612s Write: 10% 0.362305s 30% 0.362305s 50% 0.362305s 90% 0.362305s 99% 0.362305s Write: 10% 0.300831s 30% 0.300831s 50% 0.300831s 90% 0.300831s 99% 0.300831s Write: 10% 0.433430s 30% Write: 10% 0.371804s 30% 0.371804s 50% 0.433430s 50% 0.371804s 90% 0.371804s 99% 0.433430s 90% 0.371804s 0.433430s 99% 0.433430s Write: 10% Write: 10% 0.365750s 30% 0.365750s 50% 0.365750s 90% 0.365750s 99% Write: 10% 0.395391s 30% 0.395391s 50% 0.387445s 30% 0.365750s0.387445s 50% 0.387445s 90% 0.387445s 99% 0.387445s 0.395391s 90% 0.395391s 99% 0.395391s Write: 10% 0.437130s 30% 0.437130s 50% 0.437130s 90% 0.437130s 99% 0.437130s Write: 10% 0.382617s 30% Write: 10% 0.418501s 30% 0.418501s 50% 0.382617s 50% 0.418501s 90% 0.382617s 90% 0.418501s 99% 0.382617s 99% 0.418501s 0.382617s Write: 10% 0.390309s 30% 0.390309s 50% 0.390309s 90% 0.390309s 99% 0.390309s Write: 10% 0.263728s 30% 0.263728s 50% Write: 10% 0.263728s 90% Write: 10% 0.421494s 30% 0.263728s0.267518s 30% 0.267518s 50% 99% 0.263728s0.421494s 50% 0.421494s 90% 0.421494s 99% 0.267518s 90% 0.267518s 99% 0.267518s 0.421494s Write: 10% 0.284448s 30% 0.284448s 50% 0.284448s 90% 0.284448s 99% 0.284448s Write: 10% 0.439691s 30% 0.439691s 50% 0.439691s 90% 0.439691s 99% 0.439691s Write: 10% Write: 10% 0.378469s 30% 0.378469s 50% 0.394592s 30% 0.394592s 50% 0.394592s 90% 0.394592s 99% 0.394592s 0.378469s 90% 0.378469s 99% 0.378469s Write: 10% Write: 10% 0.335317s 30% 0.275789s 30% 0.335317s 50% 0.275789s 50% 0.335317s 90% 0.275789s 90% 0.335317s0.275789s 99% 0.275789s Write: 10% 0.371922s 30% 0.371922s 50% 0.371922s 90% Write: 10% 0.371922s 99% 0.377995s0.371922s 30% 0.377995s 50% 0.377995s 90% 0.377995s 99% 0.377995s 99% 0.335317s Write: 10% 0.443861s 30% 0.443861sWrite: 10% 0.245603s 30% 0.245603s 50% Write: 10% 0.429540s 30% 0.429540s 50% 50% 0.245603s 90% 0.429540s0.443861s 90% 0.429540s 99% 90% 0.245603s 99% 0.245603s0.443861s 99% 0.443861s0.429540s Write: 10% 0.376121s 30% 0.376121s 50% 0.376121s 90% 0.376121s 99% 0.376121s Write: 10% 0.442403s 30% 0.442403s 50% 0.442403s 90% 0.442403s 99% 0.442403s Write: 10% 0.334518s 30% 0.334518s 50% 0.334518s 90% 0.334518s 99% 0.334518s Write: 10% 0.415970s 30% 0.415970s 50% 0.415970s 90% 0.415970s 99% 0.415970s Step 2. read write Write: 10% 0.173468s 30% 0.173468s 50% 0.173468s 90% 0.173468s 99% 0.173468s Write: 10% 0.177755s 30% 0.177755s 50% 0.177755s 90% 0.177755s 99% 0.177755s Write: 10% Write: 10% 0.177541s 30% 0.152175s0.177541s 50% 30% 0.152175s 50% 0.177541s 90% 0.177541s 99% 0.152175s0.177541s 90% 0.152175s 99% 0.152175s Write: 10% 0.164610s 30% 0.164610s 50% 0.164610s 90% 0.164610s 99% 0.164610s Write: 10% 0.190207s 30% 0.190207s 50% 0.190207s 90% 0.190207s 99% 0.190207s Write: 10% 0.182434s 30% 0.182434s 50% 0.182434s 90% 0.182434s 99% 0.182434s Write: 10% 0.182560s 30% 0.182560s 50% 0.182560s 90% 0.182560s 99% 0.182560s Write: 10% 0.228506s 30% 0.228506s 50% 0.228506s 90% 0.228506s 99% 0.228506s Write: 10% 0.220506s 30% 0.220506s 50% 0.220506s 90% 0.220506s 99% 0.220506s Write: 10% 0.245076s 30% 0.245076s 50% 0.245076s 90% 0.245076s 99% 0.245076s Write: 10% 0.227565s 30% 0.227565s 50% 0.227565s 90% 0.227565s 99% 0.227565s Write: 10% 0.288195s 30% 0.288195s 50% 0.288195s 90% 0.288195s 99% 0.288195s Write: 10% 0.284181s 30% 0.284181s 50% 0.284181s 90% 0.284181s 99% 0.284181s Write: 10% 0.290342s 30% 0.290342s 50% 0.290342s 90% 0.290342s 99% 0.290342s Write: 10% 0.285162s 30% 0.285162s 50% 0.285162s 90% 0.285162s 99% 0.285162s Write: 10% 0.276507s 30% 0.276507s 50% 0.276507s 90% 0.276507s 99% 0.276507s Write: 10% 0.293165s 30% 0.293165s 50% 0.293165s 90% 0.293165s 99% 0.293165s Write: 10% 0.272489s 30% 0.272489s 50% 0.272489s 90% 0.272489s 99% 0.272489s Write: 10% 0.278876s 30% 0.278876s 50% 0.278876s 90% 0.278876s 99% 0.278876s Write: 10% 0.268730s 30% 0.268730s 50% 0.268730s 90% 0.268730s 99% 0.268730s Write: 10% 0.294992s 30% 0.294992s 50% 0.294992s 90% 0.294992s 99% 0.294992s Write: 10% 0.220298s 30% 0.220298s 50% 0.220298s 90% 0.220298s 99% 0.220298s Write: 10% 0.275566s 30% 0.275566s 50% 0.275566s 90% 0.275566s 99% 0.275566s Write: 10% 0.302541s 30% 0.302541s 50% 0.302541s 90% 0.302541s 99% 0.302541s Write: 10% 0.197824s 30% 0.197824s 50% 0.197824s 90% 0.197824s 99% 0.197824s Write: 10% 0.291286s 30% 0.291286s 50% 0.291286s 90% 0.291286s 99% 0.291286s Write: 10% 0.294734s 30% 0.294734s 50% 0.294734s 90% 0.294734s 99% 0.294734s Write: 10% 0.199535s 30% 0.199535s 50% 0.199535s 90% 0.199535s 99% 0.199535s Write: 10% 0.282682s 30% 0.282682s 50% 0.282682s 90% 0.282682s 99% 0.282682sWrite: 10% 0.301362s 30% 0.301362s 50% 0.301362s 90% 0.301362s 99% 0.301362s Write: 10% 0.271198s 30% 0.271198s 50% 0.271198s 90% 0.271198s 99% 0.271198s Write: 10% 0.264556s 30% 0.264556s 50% 0.264556s 90% 0.264556s 99% 0.264556sWrite: 10% 0.282384s 30% 0.282384s 50% 0.282384s 90% 0.282384s 99% 0.282384s Write: 10% 0.280126s 30% 0.280126s 50% 0.280126s 90% 0.280126s 99% 0.280126s Write: 10% 0.299211s 30% 0.299211sWrite: 10% 0.176172s 30% 0.176172s 50% 50% 0.299211s 90% 0.299211s 99% 0.299211s 0.176172s 90% 0.176172s 99% 0.176172s Write: 10% 0.295572s 30% 0.295572s 50% 0.295572s 90% 0.295572s 99% 0.295572s Write: 10% 0.292134s 30% 0.292134s 50% 0.292134s 90% 0.292134s 99% 0.292134s Write: 10% 0.268500s 30% 0.268500s 50% Write: 10% 0.268644s 30% 0.268644s0.268500s 90% 0.268500s 99% 0.268500s 50% Write: 10% 0.155601s 30% 0.155601s0.268644s 90% 0.268644s 99% 50% Write: 10% 0.268644s 0.281591s 30% 0.281591s 50% 0.155601s 90% 0.155601s 99% 0.155601s Write: 10% 0.167289s 30% 0.167289s 50% 0.281591s 90% 0.281591s 99% 0.281591s 0.167289s 90% 0.167289s 99% 0.167289s Write: 10% 0.200349s 30% 0.200349s 50% 0.200349s 90% 0.200349s 99% 0.200349sWrite: 10% 0.202311s 30% 0.202311s 50% 0.202311s 90% 0.202311s 99% 0.202311s Write: 10% 0.280456s 30% 0.280456s 50% 0.280456s 90% 0.280456s 99% 0.280456s Write: 10% 0.175879s 30% 0.175879s 50% Write: 10% 0.161778s 30% 0.161778s 50% 0.161778s 90% 0.161778s 99% 0.161778s 0.175879s 90% 0.175879s 99% 0.175879s Write: 10% 0.185550s 30% 0.185550s 50% 0.185550s 90% 0.185550s 99% 0.185550s Write: 10% 0.305388s 30% 0.305388s 50% Write: 10% 0.157726s 30% 0.157726s 50% 0.305388s 90% 0.305388s 99% 0.305388s 0.157726s 90% 0.157726s 99% 0.157726s Write: 10% 0.158978s 30% 0.158978s 50% 0.158978s 90% 0.158978s 99% 0.158978s Write: 10% Write: 10% 0.173569s 30% 0.159432s 30% 0.159432s 50% 0.173569s 50% 0.159432s 90% 0.159432s 99% 0.159432s 0.173569s 90% 0.173569s 99% 0.173569s Write: 10% 0.274660s 30% 0.274660s 50% 0.274660s 90% 0.274660s 99% 0.274660s Write: 10% 0.281140s 30% 0.281140s 50% 0.281140s 90% 0.281140s 99% 0.281140s Write: 10% 0.161598s 30% 0.161598s 50% Write: 10% 0.162210s 30% 0.162210s 50% 0.162210s 90% 0.162210s 99% 0.162210s 0.161598s 90% 0.161598s 99% 0.161598s Write: 10% 0.290093s 30% 0.290093s 50% 0.290093s 90% 0.290093s 99% 0.290093s Write: 10% Write: 10% 0.197265s 30% 0.301266s 30% 0.197265s 50% 0.301266sWrite: 10% 0.272515s 30% 0.272515s 50% 50% 0.301266s 90% 0.197265s 90% 0.197265s 99% 0.272515s0.197265s 0.301266s 99% 0.301266s 90% 0.272515s 99% 0.272515s Write: 10% 0.262879s 30% 0.262879s 50% 0.262879s 90% 0.262879s 99% 0.262879s Read: 10% 1.137544s 30% 1.137544s 50% 1.137544s 90% 1.137544s 99% 1.137544s Step 3. write modify Write: 10% 0.142659s 30% 0.142659s 50% 0.142659s 90% 0.142659s 99% 0.142659s Write: 10% 0.131497s 30% Write: 10% 0.131497s0.146029s 30% 50% 0.146029s 50% 0.146029s 90% 0.131497s 90% 0.146029s 99% 0.131497s0.146029s 99% 0.131497s Write: 10% 0.150911s 30% 0.150911s 50% 0.150911s 90% 0.150911s 99% 0.150911s Write: 10% 0.151163s 30% 0.151163s 50% 0.151163s 90% 0.151163s 99% 0.151163s Write: 10% 0.156373s 30% 0.156373s 50% 0.156373s 90% 0.156373s 99% 0.156373s Write: 10% 0.160705s 30% 0.160705s 50% 0.160705s 90% 0.160705s 99% 0.160705s Write: 10% 0.134607s 30% 0.134607s 50% 0.134607s 90% 0.134607s 99% 0.134607s Write: 10% 0.137541s 30% 0.137541s 50% 0.137541s 90% 0.137541s 99% 0.137541s Write: 10% 0.155035s 30% 0.155035s 50% 0.155035s 90% 0.155035s 99% 0.155035s Write: 10% 0.210554s 30% 0.210554s 50% 0.210554s 90% 0.210554s 99% 0.210554s Write: 10% 0.204866s 30% 0.204866s 50% 0.204866s 90% 0.204866s 99% 0.204866s Write: 10% 0.218390s 30% 0.218390s 50% 0.218390s 90% 0.218390s 99% 0.218390s Write: 10% 0.202509s 30% 0.202509s 50% 0.202509s 90% 0.202509s 99% 0.202509s Write: 10% 0.199388s 30% 0.199388s 50% 0.199388s 90% 0.199388s 99% 0.199388s Write: 10% 0.221129s 30% 0.221129s 50% 0.221129s 90% 0.221129s 99% 0.221129s Write: 10% 0.223753s 30% 0.223753s 50% 0.223753s 90% 0.223753s 99% 0.223753s Write: 10% 0.227960s 30% 0.227960s 50% 0.227960s 90% 0.227960s 99% 0.227960s Write: 10% 0.232833s 30% 0.232833s 50% 0.232833s 90% 0.232833s 99% 0.232833s Write: 10% 0.245700s 30% 0.245700s 50% 0.245700s 90% 0.245700s 99% 0.245700s Write: 10% 0.261275s 30% 0.261275s 50% 0.261275s 90% 0.261275s 99% 0.261275s Write: 10% 0.233933s 30% 0.233933s 50% 0.233933s 90% 0.233933s 99% 0.233933s Write: 10% 0.297780s 30% 0.297780s 50% 0.297780s 90% 0.297780s 99% 0.297780s Write: 10% 0.276692s 30% 0.276692s 50% 0.276692s 90% 0.276692s 99% 0.276692s Write: 10% 0.304839s 30% 0.304839s 50% 0.304839s 90% 0.304839s 99% 0.304839s Write: 10% 0.273598s 30% 0.273598s 50% 0.273598s 90% 0.273598s 99% 0.273598s Write: 10% 0.273120s 30% 0.273120s 50% 0.273120s 90% 0.273120s 99% 0.273120s Write: 10% 0.296064s 30% 0.296064s 50% 0.296064s 90% 0.296064s 99% 0.296064s Write: 10% 0.281772s 30% 0.281772s 50% 0.281772s 90% 0.281772s 99% 0.281772s Write: 10% 0.298859s 30% 0.298859s 50% 0.298859s 90% 0.298859s 99% 0.298859s Write: 10% 0.295443s 30% 0.295443s 50% 0.295443s 90% 0.295443s 99% 0.295443s Write: 10% 0.310098s 30% 0.310098s 50% 0.310098s 90% 0.310098s 99% 0.310098s Write: 10% 0.261762s 30% 0.261762s 50% 0.261762s 90% 0.261762s 99% 0.261762s Write: 10% 0.307773s 30% 0.307773s 50% 0.307773s 90% 0.307773s 99% 0.307773s Write: 10% 0.289317s 30% 0.289317s 50% 0.289317s 90% 0.289317s 99% 0.289317s Write: 10% 0.319908s 30% 0.319908s 50% 0.319908s 90% 0.319908s 99% 0.319908s Write: 10% 0.331741s 30% 0.331741s 50% 0.331741s 90% 0.331741s 99% 0.331741s Write: 10% 0.317225s 30% 0.317225s 50% 0.317225s 90% 0.317225s 99% 0.317225s Write: 10% Write: 10% 0.195713s0.187820s 30% 0.195713s 50% 30% 0.187820s 50% 0.187820s 90% 0.187820s 99% 0.195713s 90% 0.195713s 99% 0.187820s 0.195713s Write: 10% 0.340048s 30% 0.340048s 50% 0.340048s 90% 0.340048s 99% 0.340048s Write: 10% 0.246473s 30% 0.246473s 50% 0.246473s 90% 0.246473s 99% 0.246473s Write: 10% 0.201509s 30% 0.201509s 50% 0.201509s 90% 0.201509s 99% 0.201509s Write: 10% 0.257590s 30% 0.257590s 50% 0.257590s 90% 0.257590s 99% 0.257590s Write: 10% 0.225443s 30% 0.225443s 50% 0.225443s 90% 0.225443s 99% 0.225443s Write: 10% 0.220690s 30% 0.220690s 50% 0.220690s 90% 0.220690s 99% 0.220690s Write: 10% 0.215276s 30% 0.215276s 50% 0.215276s 90% 0.215276s 99% 0.215276s Write: 10% 0.242690s 30% 0.242690s 50% 0.242690s 90% 0.242690s 99% 0.242690s Write: 10% 0.217136s 30% 0.217136s 50% 0.217136s 90% 0.217136s 99% 0.217136s Write: 10% 0.268664s 30% 0.268664s 50% 0.268664s 90% 0.268664s 99% 0.268664s Write: 10% 0.224142s 30% 0.224142s 50% 0.224142s 90% 0.224142s 99% 0.224142s Write: 10% 0.257560s 30% 0.257560s 50% 0.257560s 90% 0.257560s 99% 0.257560s Write: 10% 0.214987s 30% 0.214987s 50% 0.214987s 90% 0.214987s 99% 0.214987s Write: 10% 0.255989s 30% 0.255989s 50% 0.255989s 90% 0.255989s 99% 0.255989s Write: 10% 0.244797s 30% 0.244797s 50% 0.244797s 90% 0.244797s 99% 0.244797s Write: 10% 0.233947s 30% 0.233947s 50% 0.233947s 90% 0.233947s 99% 0.233947s Write: 10% 0.289400s 30% 0.289400s 50% 0.289400s 90% 0.289400s 99% 0.289400s Write: 10% 0.242603s 30% 0.242603s 50% 0.242603s 90% 0.242603s 99% 0.242603s Write: 10% 0.323199s 30% 0.323199s 50% 0.323199s 90% 0.323199s 99% 0.323199s Write: 10% 0.240878s 30% 0.240878s 50% 0.240878s 90% 0.240878s 99% 0.240878s Write: 10% 0.233880s 30% 0.233880s 50% 0.233880s 90% 0.233880s 99% 0.233880s Write: 10% 0.268285s 30% 0.268285s 50% 0.268285s 90% 0.268285s 99% 0.268285s Write: 10% 0.218707s 30% 0.218707s 50% Write: 10% 0.233082s 30% 0.233082s 50% 0.218707s 90% 0.233082s 90% 0.218707s 99% 0.233082s 99% 0.218707s0.233082s Update: 10% 0.226708s 30% 0.226708s 50% 0.226708s 90% 0.226708s 99% 0.226708s Step 4. read modify write Write: 10% 0.540698s 30% 0.540698s 50% 0.540698s 90% 0.540698s 99% 0.540698s Write: 10% 0.554135s 30% 0.554135s 50% 0.554135s 90% 0.554135s 99% 0.554135s Write: 10% 0.439090s 30% 0.439090s 50% 0.439090s 90% 0.439090s 99% 0.439090s Write: 10% 0.610628s 30% 0.610628s 50% 0.610628s 90% 0.610628s 99% 0.610628s Write: 10% 0.584024s 30% 0.584024s 50% 0.584024s 90% 0.584024s 99% 0.584024s Write: 10% 0.523866s 30% 0.523866s 50% 0.523866s 90% 0.523866s 99% 0.523866s Write: 10% 0.633958s 30% 0.633958s 50% 0.633958s 90% 0.633958s 99% 0.633958s Write: 10% 0.550171s 30% 0.550171s 50% 0.550171s 90% 0.550171s 99% 0.550171s Write: 10% 0.661754s 30% 0.661754s 50% 0.661754s 90% 0.661754s 99% 0.661754s Write: 10% 0.610487s 30% 0.610487s 50% 0.610487s 90% 0.610487s 99% 0.610487s Write: 10% 0.661491s 30% 0.661491s 50% 0.661491s 90% 0.661491s 99% 0.661491s Write: 10% 0.602161s 30% 0.602161s 50% 0.602161s 90% 0.602161s 99% 0.602161s Write: 10% 0.725395s 30% 0.725395s 50% 0.725395s 90% 0.725395s 99% 0.725395s Write: 10% 0.707127s 30% 0.707127s 50% 0.707127s 90% 0.707127s 99% 0.707127s Write: 10% 0.696706s 30% 0.696706s 50% 0.696706s 90% 0.696706s 99% 0.696706s Write: 10% 0.712352s 30% 0.712352s 50% 0.712352s 90% 0.712352s 99% 0.712352s Write: 10% 0.668538s 30% 0.668538s 50% 0.668538s 90% 0.668538s 99% 0.668538s Write: 10% 0.739019s 30% 0.739019s 50% 0.739019s 90% 0.739019s 99% 0.739019s Write: 10% 0.674865s 30% 0.674865s 50% 0.674865s 90% 0.674865s 99% 0.674865s Write: 10% 0.599951s 30% 0.599951s 50% 0.599951s 90% 0.599951s 99% 0.599951s Write: 10% 0.658894s 30% 0.658894s 50% 0.658894s 90% 0.658894s 99% 0.658894s Write: 10% 0.591112s 30% 0.591112s 50% 0.591112s 90% 0.591112s 99% 0.591112s Write: 10% 0.595264s 30% 0.595264s 50% 0.595264s 90% 0.595264s 99% 0.595264s Write: 10% 0.746834s 30% 0.746834s 50% 0.746834s 90% 0.746834s 99% 0.746834s Write: 10% 0.661627s 30% 0.661627s 50% 0.661627s 90% 0.661627s 99% 0.661627s Write: 10% 0.705846s 30% 0.705846s 50% 0.705846s 90% 0.705846s 99% 0.705846s Write: 10% 0.619128s 30% 0.619128s 50% 0.619128s 90% 0.619128s 99% 0.619128s Write: 10% 0.624352s 30% 0.624352s 50% 0.624352s 90% 0.624352s 99% 0.624352s Write: 10% 0.794437s 30% 0.794437s 50% 0.794437s 90% 0.794437s 99% 0.794437s Write: 10% 0.600289s 30% 0.600289s 50% 0.600289s 90% 0.600289s 99% 0.600289s Write: 10% 0.627433s 30% 0.627433s 50% 0.627433s 90% 0.627433s 99% 0.627433s Write: 10% 0.624549s 30% 0.624549s 50% 0.624549s 90% 0.624549s 99% 0.624549s Write: 10% 0.642839s 30% 0.642839s 50% 0.642839s 90% 0.642839s 99% 0.642839s Write: 10% 0.602631s 30% 0.602631s 50% 0.602631s 90% 0.602631s 99% 0.602631s Write: 10% 0.613625s 30% 0.613625s 50% 0.613625s 90% 0.613625s 99% 0.613625s Write: 10% 0.614082s 30% 0.614082s 50% 0.614082s 90% 0.614082s 99% 0.614082s Write: 10% 0.624469s 30% 0.624469s 50% 0.624469s 90% 0.624469s 99% 0.624469s Write: 10% 0.627062s 30% 0.627062s 50% 0.627062s 90% 0.627062s 99% 0.627062s Write: 10% 0.630985s 30% 0.630985s 50% 0.630985s 90% 0.630985s 99% 0.630985s Write: 10% 0.604482s 30% 0.604482s 50% 0.604482s 90% 0.604482s 99% 0.604482s Write: 10% 0.604526s 30% 0.604526s 50% 0.604526s 90% 0.604526s 99% 0.604526s Write: 10% 0.605997s 30% 0.605997s 50% 0.605997s 90% 0.605997s 99% 0.605997s Write: 10% 0.609183s 30% 0.609183s 50% 0.609183s 90% 0.609183s 99% 0.609183s Write: 10% 0.611628s 30% 0.611628s 50% 0.611628s 90% 0.611628s 99% 0.611628s Write: 10% 0.622545s 30% 0.622545s 50% 0.622545s 90% 0.622545s 99% 0.622545s Write: 10% 0.621931s 30% 0.621931s 50% 0.621931s 90% 0.621931s 99% 0.621931s Write: 10% 0.620608s 30% 0.620608s 50% 0.620608s 90% 0.620608s 99% 0.620608s Write: 10% 0.627528s 30% 0.627528s 50% 0.627528s 90% 0.627528s 99% 0.627528s Write: 10% 0.631892s 30% 0.631892s 50% 0.631892s 90% 0.631892s 99% 0.631892s Write: 10% 0.606958s 30% 0.606958s 50% 0.606958s 90% 0.606958s 99% 0.606958s Write: 10% 0.617206s 30% 0.617206s 50% 0.617206s 90% 0.617206s 99% 0.617206s Write: 10% 0.623797s 30% 0.623797s 50% 0.623797s 90% 0.623797s 99% 0.623797s Write: 10% 0.613237s 30% 0.613237s 50% 0.613237s 90% 0.613237s 99% 0.613237s Write: 10% 0.630755s 30% 0.630755s 50% 0.630755s 90% 0.630755s 99% 0.630755s Write: 10% 0.617447s 30% 0.617447s 50% 0.617447s 90% 0.617447s 99% 0.617447s Write: 10% 0.614003s 30% 0.614003s 50% 0.614003s 90% 0.614003s 99% 0.614003s Write: 10% 0.623094s 30% 0.623094s 50% 0.623094s 90% 0.623094s 99% 0.623094s Write: 10% 0.634855s 30% 0.634855s 50% 0.634855s 90% 0.634855s 99% 0.634855s Write: 10% 0.626182s 30% 0.626182s 50% 0.626182s 90% 0.626182s 99% 0.626182s Write: 10% 0.626052s 30% 0.626052s 50% 0.626052s 90% 0.626052s 99% 0.626052s Write: 10% 0.639291s 30% 0.639291s 50% 0.639291s 90% 0.639291s 99% 0.639291s Write: 10% 0.634556s 30% 0.634556s 50% 0.634556s 90% 0.634556s 99% 0.634556s Write: 10% 0.639867s 30% 0.639867s 50% 0.639867s 90% 0.639867s 99% 0.639867s Write: 10% 0.637494s 30% 0.637494s 50% 0.637494s 90% 0.637494s 99% 0.637494s Update: 10% 0.095527s 30% 0.095527s 50% 0.610235s 90% 0.610235s 99% 0.610235s Read: 10% 1.240880s 30% 1.240880s 50% 1.240880s 90% 1.240880s 99% 1.240880s |97.6%| [TM] {RESULT} ydb/tests/olap/high_load/unittest >> TabletService_ChangeSchema::Basics [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries >> DataShardCompaction::CompactBorrowed >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery >> QueryActorTest::StreamQuery [GOOD] >> CoordinatorTests::Route >> CoordinatorTests::Route [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> LeaderElectionTests::Test1 >> TTxDataShardLocalKMeansScan::BadRequest [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery >> LeaderElectionTests::Test1 [GOOD] >> LeaderElectionTests::TestLocalMode >> LeaderElectionTests::TestLocalMode [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery >> TCreateAndDropViewTest::CheckCreatedView [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag >> test.py::test_wait_for_cluster_ready [GOOD] >> test.py::test_counter >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData >> MediatorTimeCast::ReadStepSubscribe [GOOD] >> MediatorTimeCast::GranularTimecast ------- [TS] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2025-03-28T09:39:36.029373Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792755725523542:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:36.029442Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00082b/r3tmp/tmp3e4lZD/pdisk_1.dat 2025-03-28T09:39:36.373252Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:36.417284Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:36.417407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:36.420456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:15245 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:39:36.667423Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:36.692854Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:36.840318Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-03-28T09:39:38.510823Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:39:38.513052Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:39:38.515372Z node 1 :KQP_PROXY DEBUG: Request has 18445000918931.036269s seconds to be completed 2025-03-28T09:39:38.527604Z node 1 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=1&id=ZGEwMmJjZjctNzJmNjI2NmItZDVhZDZiZmItYzNmOWE0OTc=, workerId: [1:7486792764315458843:2314], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-28T09:39:38.527652Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:39:38.527851Z node 1 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:39:38.527926Z node 1 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:39:38.527971Z node 1 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:39:38.528008Z node 1 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:39:38.528053Z node 1 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:39:38.528179Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:38.528216Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:38.528250Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:38.528266Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:38.528295Z node 1 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:38.532782Z node 1 :KQP_PROXY DEBUG: [TQueryBase] RunDataQuery: SELECT 42 2025-03-28T09:39:38.537205Z node 1 :KQP_PROXY DEBUG: Ctx: { TraceId: , Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=1&id=ZGEwMmJjZjctNzJmNjI2NmItZDVhZDZiZmItYzNmOWE0OTc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7486792764315458843:2314] 2025-03-28T09:39:38.537263Z node 1 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7486792764315458845:2357] 2025-03-28T09:39:38.540236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792764315458854:2319], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:38.540244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792764315458846:2316], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:38.540352Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:38.547586Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:39:38.556664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792764315458860:2320], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:39:38.616148Z node 1 :TX_PROXY ERROR: Actor# [1:7486792764315458911:2392] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:39.262082Z node 1 :KQP_PROXY DEBUG: Forwarded response to sender actor, requestId: 3, sender: [1:7486792764315458844:2315], selfId: [1:7486792755725523789:2278], source: [1:7486792764315458843:2314] 2025-03-28T09:39:39.262244Z node 1 :KQP_PROXY DEBUG: [TQueryBase] TEvDataQueryResult SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZGEwMmJjZjctNzJmNjI2NmItZDVhZDZiZmItYzNmOWE0OTc=, TxId: 2025-03-28T09:39:39.262926Z node 1 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZGEwMmJjZjctNzJmNjI2NmItZDVhZDZiZmItYzNmOWE0OTc=, TxId: 2025-03-28T09:39:39.263309Z node 1 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=1&id=ZGEwMmJjZjctNzJmNjI2NmItZDVhZDZiZmItYzNmOWE0OTc=, workerId: [1:7486792764315458843:2314], local sessions count: 0 2025-03-28T09:39:39.824239Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792771341079197:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:39.824321Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00082b/r3tmp/tmpbeeGJ2/pdisk_1.dat 2025-03-28T09:39:39.919440Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:39.960636Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:39.960718Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:39.962001Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:32210 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:39:40.130717Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:40.138295Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:39:40.145285Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:40.212541Z node 2 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-03-28T09:39:42.709449Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:39:42.710674Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:39:42.711160Z node 2 :KQP_PROXY DEBUG: Request has 18445000918926.840476s seconds to be completed 2025-03-28T09:39:42.713130Z node 2 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=2&id=NmU0Y2FjMjEtYzZlNjFkZjUtNzVjMTA2OC01Nzk1ZWIx, workerId: [2:7486792784225981775:2314], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-28T09:39:42.713178Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:39:42.713288Z node 2 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:39:42.713359Z node 2 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:39:42.713388Z node 2 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:39:42.713404Z node 2 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:39:42.713442Z node 2 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:39:42.713493Z node 2 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than u ... metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00082b/r3tmp/tmpxUJ6CB/pdisk_1.dat 2025-03-28T09:39:47.575590Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:47.606285Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:47.606376Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:47.607802Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:4239 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T09:39:47.775636Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:47.782676Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:47.811515Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-03-28T09:39:50.484307Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:39:50.485278Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:39:50.485830Z node 4 :KQP_PROXY DEBUG: Request has 18445000918919.065803s seconds to be completed 2025-03-28T09:39:50.488125Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=ZjBjODYzYzgtZWM1YTA1MzgtNDVlMGM5M2ItNGI2NDA3YzI=, workerId: [4:7486792817763828307:2314], database: /dc-1, longSession: 1, local sessions count: 1 2025-03-28T09:39:50.488174Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:39:50.488320Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:39:50.488389Z node 4 :KQP_PROXY DEBUG: Subscribed for config changes. 2025-03-28T09:39:50.488433Z node 4 :KQP_PROXY DEBUG: Updated table service config. 2025-03-28T09:39:50.488458Z node 4 :KQP_PROXY DEBUG: Updated YQL logs priority to current level: 4 2025-03-28T09:39:50.488503Z node 4 :KQP_PROXY INFO: Cannot start publishing usage, tenants: /dc-1, empty 2025-03-28T09:39:50.488591Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:50.488642Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:50.488725Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:50.488757Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:50.488784Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:50.489306Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-03-28T09:39:50.490674Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-03-28T09:39:50.493803Z node 4 :KQP_PROXY DEBUG: TraceId: "01jqe1yg2tfe117cnq75qxnxj4", Created new session, sessionId: ydb://session/3?node_id=4&id=NzBhNDczNTktYTUyMTBlN2QtOTAxNmM0NDYtNTljYmFiY2Y=, workerId: [4:7486792817763828310:2315], database: /dc-1, longSession: 0, local sessions count: 2 2025-03-28T09:39:50.494044Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqe1yg2tfe117cnq75qxnxj4, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=NzBhNDczNTktYTUyMTBlN2QtOTAxNmM0NDYtNTljYmFiY2Y=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7486792817763828310:2315] 2025-03-28T09:39:50.494078Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7486792817763828312:2358] 2025-03-28T09:39:50.494981Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792817763828311:2316], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:50.494999Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486792817763828320:2319], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:50.495040Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:50.497700Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:39:50.504413Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486792817763828326:2320], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:39:50.563465Z node 4 :TX_PROXY ERROR: Actor# [4:7486792817763828379:2394] txid# 281474976715660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:51.494560Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:52.457015Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486792804878925716:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:52.457152Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:54.447192Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-03-28T09:39:54.451173Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Cancel stream request 2025-03-28T09:39:54.451253Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZjBjODYzYzgtZWM1YTA1MzgtNDVlMGM5M2ItNGI2NDA3YzI=, TxId: 2025-03-28T09:39:54.452527Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Bootstrap. Database: dc-1 2025-03-28T09:39:54.615977Z node 4 :RPC_REQUEST WARN: Client lost 2025-03-28T09:39:54.676622Z node 4 :KQP_PROXY DEBUG: Request has 18445000918914.875024s seconds to be completed 2025-03-28T09:39:54.678889Z node 4 :KQP_PROXY DEBUG: Created new session, sessionId: ydb://session/3?node_id=4&id=MTBhM2VlM2ItZDRmZTc1ZDEtYzljODY5ZmQtN2VkMmY4ZTE=, workerId: [4:7486792834943697618:2337], database: /dc-1, longSession: 1, local sessions count: 3 2025-03-28T09:39:54.679073Z node 4 :KQP_PROXY DEBUG: Received create session request, trace_id: 2025-03-28T09:39:54.848648Z node 4 :KQP_PROXY DEBUG: Unexpected whiteboard info: pool size is smaller than user pool id, pool size: 0, user pool id: 1 2025-03-28T09:39:54.849124Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=ZjBjODYzYzgtZWM1YTA1MzgtNDVlMGM5M2ItNGI2NDA3YzI=, workerId: [4:7486792817763828307:2314], local sessions count: 2 2025-03-28T09:39:54.849563Z node 4 :KQP_PROXY DEBUG: [TQueryBase] RunStreamQuery: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2025-03-28T09:39:54.849660Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-03-28T09:39:54.851426Z node 4 :KQP_PROXY DEBUG: TraceId: "01jqe1ymb1a06s3h1zwjvznvdz", Created new session, sessionId: ydb://session/3?node_id=4&id=YjllZGNhYTgtNGNjMzg2ZTAtNDRhZjA2NzMtZDZiNDBkNjM=, workerId: [4:7486792834943697625:2339], database: /dc-1, longSession: 0, local sessions count: 3 2025-03-28T09:39:54.851621Z node 4 :KQP_PROXY DEBUG: Ctx: { TraceId: 01jqe1ymb1a06s3h1zwjvznvdz, Database: /dc-1, DatabaseId: , SessionId: ydb://session/3?node_id=4&id=YjllZGNhYTgtNGNjMzg2ZTAtNDRhZjA2NzMtZDZiNDBkNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: }. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7486792834943697625:2339] 2025-03-28T09:39:54.851646Z node 4 :KQP_PROXY DEBUG: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7486792834943697626:2425] 2025-03-28T09:39:54.935690Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-03-28T09:39:54.936879Z node 4 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154794926, txId: 281474976715663] shutting down 2025-03-28T09:39:54.938428Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Start read next stream part 2025-03-28T09:39:54.940116Z node 4 :KQP_PROXY DEBUG: TraceId: "01jqe1ymb1a06s3h1zwjvznvdz", Forwarded response to sender actor, requestId: 5, sender: [4:7486792834943697623:2421], selfId: [4:7486792804878925931:2267], source: [4:7486792834943697625:2339] 2025-03-28T09:39:54.940397Z node 4 :KQP_PROXY DEBUG: [TQueryBase] TEvStreamQueryResultPart SUCCESS, Issues: 2025-03-28T09:39:54.940432Z node 4 :KQP_PROXY DEBUG: [TQueryBase] Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=MTBhM2VlM2ItZDRmZTc1ZDEtYzljODY5ZmQtN2VkMmY4ZTE=, TxId: 2025-03-28T09:39:54.940846Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=YjllZGNhYTgtNGNjMzg2ZTAtNDRhZjA2NzMtZDZiNDBkNjM=, workerId: [4:7486792834943697625:2339], local sessions count: 2 2025-03-28T09:39:54.941183Z node 4 :KQP_PROXY DEBUG: Session closed, sessionId: ydb://session/3?node_id=4&id=MTBhM2VlM2ItZDRmZTc1ZDEtYzljODY5ZmQtN2VkMmY4ZTE=, workerId: [4:7486792834943697618:2337], local sessions count: 1 |97.6%| [TS] {RESULT} ydb/library/query_actor/ut/unittest >> ServerRestartTest::RestartOnGetSession >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs >> RangeOps::Intersection [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead >> test.py::test_counter [GOOD] >> test.py::test_viewer_nodes >> test.py::test_viewer_nodes [GOOD] >> test.py::test_storage_groups [GOOD] >> test.py::test_viewer_sysinfo >> test.py::test_viewer_sysinfo [GOOD] >> test.py::test_viewer_vdiskinfo [GOOD] >> test.py::test_viewer_pdiskinfo [GOOD] >> test.py::test_viewer_bsgroupinfo [GOOD] >> test.py::test_viewer_tabletinfo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:21.343956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:21.344051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:21.344090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:21.344132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:21.344169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:21.344197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:21.344245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:21.344304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:21.344573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:21.423461Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:21.423523Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:21.430361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:21.430478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:21.430610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:21.436410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:21.436574Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:21.437220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:21.437419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:21.439371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:21.440629Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:21.440690Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:21.440826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:21.440877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:21.440933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:21.441099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:21.448349Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:21.571376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:21.571571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.571736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:21.571928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:21.571973Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.574002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:21.574104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:21.574239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.574286Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:21.574327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:21.574352Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:21.576115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.576161Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:21.576201Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:21.577689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.577731Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.577780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:21.577831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:21.580612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:21.582097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:21.582261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:21.583130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:21.583265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:21.583323Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:21.583533Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:21.583575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:21.583724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:21.583794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:21.586260Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:21.586322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:21.586456Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:21.586488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:21.586715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:21.586762Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:21.586837Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:21.586864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:21.586894Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:21.586920Z no ... CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:57.674245Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:39:57.674428Z node 22 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 203us result status StatusSuccess 2025-03-28T09:39:57.675089Z node 22 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_range_ops/unittest >> RangeOps::Intersection [GOOD] Test command err: first [(Uint64 : NULL, Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 5)] result [(Uint64 : 10) ; (Uint64 : 5)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 17)] result [(Uint64 : 15) ; (Uint64 : 17)] correct [(Uint64 : 15) ; (Uint64 : 17)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 20)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 30)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 20)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 30)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 25) ; (Uint64 : 30)] result [(Uint64 : 25) ; (Uint64 : 20)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result ((Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result ((Uint64 : 10) ; (Uint64 : 15)] correct ((Uint64 : 10) ; (Uint64 : 15)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)) result ((Uint64 : 10) ; (Uint64 : 15)) correct ((Uint64 : 10) ; (Uint64 : 15)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; (Uint64 : 20)) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 20)) correct [(Uint64 : 1) ; (Uint64 : 20)) first [(Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 10) ; (Uint64 : 20)) correct [(Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; (Uint64 : 10)] second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 10)] correct [(Uint64 : 1) ; (Uint64 : 10)] first [(Uint64 : NULL) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 1) ; (Uint64 : 10)) correct [(Uint64 : 1) ; (Uint64 : 10)) |97.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_range_ops/unittest >> test.py::test_viewer_tabletinfo [GOOD] >> test.py::test_viewer_describe >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> test.py::test_viewer_describe [GOOD] >> test.py::test_viewer_cluster [GOOD] >> test.py::test_viewer_tenantinfo [GOOD] >> test.py::test_viewer_tenantinfo_db >> SequenceProxy::DropRecreate [GOOD] >> AttributesMD5Test::AmazonSampleWithString [GOOD] >> AttributesMD5Test::AmazonSampleWithBinary [GOOD] >> InflyTest::AddMessage [GOOD] >> InflyTest::DeleteMessage [GOOD] >> InflyTest::ChangeMesageVisibility [GOOD] >> InflyTest::ReceiveMessages [GOOD] >> InflyTest::DeleteReceivedMessage [GOOD] >> MessageDelayStatsTest::All [GOOD] >> MessageDelayStatsTest::BigTimeDiff [GOOD] >> MessageDelayStatsTest::MaxMessageDelay [GOOD] >> Metering::BillingRecords >> test.py::test_viewer_tenantinfo_db [GOOD] >> test.py::test_viewer_healthcheck >> test.py::test_viewer_healthcheck [GOOD] >> test.py::test_viewer_acl >> test.py::test_viewer_acl [GOOD] >> test.py::test_viewer_autocomplete >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection >> test.py::test_viewer_autocomplete [GOOD] >> test.py::test_viewer_check_access [GOOD] >> test.py::test_viewer_query >> DataShardBackgroundCompaction::ShouldCompact [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2025-03-28T09:39:55.070627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:55.070710Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:55.161286Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:39:55.912832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-03-28T09:39:56.166856Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:39:56.167424Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000668/r3tmp/tmpFmLGLj/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:39:56.168175Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000668/r3tmp/tmpFmLGLj/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000668/r3tmp/tmpFmLGLj/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4497379652738272928 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:39:57.051086Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:39:57.051154Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:57.100317Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:39:57.605359Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944 2025-03-28T09:39:57.843161Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:39:57.843691Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000668/r3tmp/tmpg2viUI/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:39:57.843928Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000668/r3tmp/tmpg2viUI/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000668/r3tmp/tmpg2viUI/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15194144402964693750 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:39:57.992624Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944 2025-03-28T09:39:58.316147Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944 |97.6%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> test.py::test_viewer_query [GOOD] >> test.py::test_viewer_query_issue_13757 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi >> test.py::test_viewer_query_issue_13757 [GOOD] >> test.py::test_viewer_query_issue_13945 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::InvalidQuery >> test.py::test_viewer_query_issue_13945 [GOOD] >> test.py::test_pqrb_tablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-03-28T09:39:26.975509Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792714971329555:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:26.975708Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001580/r3tmp/tmpPYvXuA/pdisk_1.dat 2025-03-28T09:39:27.224681Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30648, node 1 2025-03-28T09:39:27.278794Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:27.278815Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:27.278825Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:27.278983Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:27.326553Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:27.326673Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:27.328345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:27.419545Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:27.423309Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:27.423358Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:27.424945Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:63541, port: 63541 2025-03-28T09:39:27.425046Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:27.479252Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-28T09:39:27.523984Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****P4Kg (F60242AA) () has now valid token of ldapuser@ldap 2025-03-28T09:39:29.347831Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792728846087034:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:29.347916Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001580/r3tmp/tmpkzXzoP/pdisk_1.dat 2025-03-28T09:39:29.460043Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7950, node 2 2025-03-28T09:39:29.486886Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:29.486974Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:29.489453Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:29.515705Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:29.515735Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:29.515742Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:29.515839Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:29.603576Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:29.605898Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:29.605925Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:29.606590Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:16037, port: 16037 2025-03-28T09:39:29.606707Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:29.659260Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:16037. Invalid credentials 2025-03-28T09:39:29.659856Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****ZGIw (0DC8F41A) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:16037. Invalid credentials)' 2025-03-28T09:39:32.002704Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486792740905924713:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:32.002765Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001580/r3tmp/tmpq9rNsG/pdisk_1.dat 2025-03-28T09:39:32.103808Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 29962, node 3 2025-03-28T09:39:32.148456Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:32.148537Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:32.150125Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:32.154969Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:32.154994Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:32.155001Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:32.155171Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:32.301163Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:32.304799Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:32.304831Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:32.305606Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:13353, port: 13353 2025-03-28T09:39:32.305705Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:32.355227Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:13353. Invalid credentials 2025-03-28T09:39:32.355794Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****x7Fw (B932C54D) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:13353. Invalid credentials)' 2025-03-28T09:39:34.783674Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486792747479381323:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:34.783722Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001580/r3tmp/tmpDWVKD3/pdisk_1.dat 2025-03-28T09:39:34.865988Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13973, node 4 2025-03-28T09:39:34.913535Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:34.913591Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:34.914823Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:34.925946Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:34.925966Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:34.925971Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:34.926071Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:35.054804Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:35.055747Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:35.055776Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:35.056302Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:12394, port: 12394 2025-03-28T09:39:35.056391Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:35.107325Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:35.107818Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:12394 return no entries 2025-03-28T09:39:35.108347Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****blhQ (0C3C94C9) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:12394 return no entries)' 2025-03-28T09:39:38.089515Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486792766870298812:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:38.089601Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001580/r3tmp/tmpMxDSG4/pdisk_1.dat 2025-03-28T09:39:38.211565Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:38.240563Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:38.240639Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:38.245048Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21142, node 5 2025-03-28T09:39:38.276234Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:38.276259Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:38.276267Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:38.276421Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:38.417823Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:38.421540Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:38.421569Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:38.422320Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:10100, port: 10100 2025-03-28T09:39:38.422398Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:38.479340Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:38.523028Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:38.523516Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:39:38.523559Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:38.571048Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:38.619048Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:38.620285Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****QtHQ (167DD5BE) () has now valid token of ldapuser@ldap 2025-03-28T09:39:43.089947Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486792766870298812:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:43.090085Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:43.119124Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****QtHQ (167DD5BE) 2025-03-28T09:39:43.119217Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:10100, port: 10100 2025-03-28T09:39:43.119270Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:43.171190Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:43.215401Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:43.215984Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:39:43.216029Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:43.259047Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:43.303051Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:43.304350Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****QtHQ (167DD5BE) () has now valid token of ldapuser@ldap 2025-03-28T09:39:46.120445Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****QtHQ (167DD5BE) 2025-03-28T09:39:46.120557Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:10100, port: 10100 2025-03-28T09:39:46.120629Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:46.171168Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:46.218999Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:46.219537Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:39:46.219581Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:46.263052Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:46.311016Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:46.312213Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****QtHQ (167DD5BE) () has now valid token of ldapuser@ldap 2025-03-28T09:39:49.106962Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486792813520689243:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:49.107049Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001580/r3tmp/tmp8VXyhq/pdisk_1.dat 2025-03-28T09:39:49.199724Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:49.241460Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:49.241552Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 1325, node 6 2025-03-28T09:39:49.243211Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:49.271550Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:49.271574Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:49.271587Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:49.271724Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:49.397735Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:49.401516Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:49.401565Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:49.402323Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:25612, port: 25612 2025-03-28T09:39:49.402397Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:49.455277Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:49.499603Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****LRsg (EBF5C844) () has now valid token of ldapuser@ldap 2025-03-28T09:39:54.107103Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486792813520689243:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:54.107182Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:55.118924Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****LRsg (EBF5C844) 2025-03-28T09:39:55.119769Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:25612, port: 25612 2025-03-28T09:39:55.119936Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:55.183251Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:55.227662Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****LRsg (EBF5C844) () has now valid token of ldapuser@ldap 2025-03-28T09:39:59.120861Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****LRsg (EBF5C844) 2025-03-28T09:39:59.121276Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldaps, uris: ldaps://localhost:25612, port: 25612 2025-03-28T09:39:59.121348Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:59.183136Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:59.227674Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****LRsg (EBF5C844) () has now valid token of ldapuser@ldap >> test.py::test_pqrb_tablet [GOOD] >> test.py::test_viewer_nodes_issue_14992 >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection >> test.py::test_viewer_nodes_issue_14992 [GOOD] >> test.py::test_operations_list [GOOD] >> test.py::test_operations_list_page [GOOD] >> test.py::test_operations_list_page_bad >> test.py::test_operations_list_page_bad [GOOD] >> test.py::test_topic_data >> TTxDataShardLocalKMeansScan::MainToPosting [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection >> TIndexProcesorTests::TestCreateIndexProcessor >> MediatorTimeCast::GranularTimecast [GOOD] >> DiscoveryIsNotBroken::NoKafkaEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::NoKafkaSslEndpointInDiscovery >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] Test command err: 2 2 |97.6%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/time_cast/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] Test command err: 2025-03-28T09:39:54.841575Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:54.842125Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:54.842177Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000846/r3tmp/tmp9GyCEc/pdisk_1.dat 2025-03-28T09:39:55.347291Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2025-03-28T09:39:55.348058Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-03-28T09:39:55.352191Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 1 LastAcquireStep: 0 NextAcquireStep: 0 2025-03-28T09:39:55.391464Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:55.446191Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:55.446351Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:55.458974Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:55.543385Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 500 2025-03-28T09:39:55.648354Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 1000 2025-03-28T09:39:55.829111Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 2000 2025-03-28T09:39:55.956182Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2025-03-28T09:39:56.082225Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 4000 2025-03-28T09:39:56.219280Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 5000 2025-03-28T09:39:56.264181Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 7000 } 2025-03-28T09:39:56.405972Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 6000 2025-03-28T09:39:56.535634Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 7000 2025-03-28T09:39:56.538273Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientDestroyed 2025-03-28T09:39:56.559104Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE EvClientConnected 2025-03-28T09:39:56.559926Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 0 NextAcquireStep: 7000 2025-03-28T09:39:56.572267Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 12000 } 2025-03-28T09:39:56.677419Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 7500 2025-03-28T09:39:56.771927Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 8000 2025-03-28T09:39:56.920165Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 9000 2025-03-28T09:39:57.046895Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 10000 2025-03-28T09:39:57.195839Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 11000 2025-03-28T09:39:57.344643Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [1:24:2071] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 12000 2025-03-28T09:40:00.327163Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:00.327680Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:00.327758Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000846/r3tmp/tmpyuTQmx/pdisk_1.dat 2025-03-28T09:40:00.620275Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365120 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-03-28T09:40:00.621074Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 1 Tablets: 72057594047365120 MinStep: 0 2025-03-28T09:40:00.621135Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-03-28T09:40:00.621188Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:650:2548] {TEvRegisterTabletResult TabletId# 72057594047365120 Entry# 0} 2025-03-28T09:40:00.621543Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-03-28T09:40:00.621722Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 0 2025-03-28T09:40:00.621904Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} 2025-03-28T09:40:00.622151Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365121 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-03-28T09:40:00.622246Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 2 AddTablets: 72057594047365121 2025-03-28T09:40:00.622301Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:653:2550] {TEvRegisterTabletResult TabletId# 72057594047365121 Entry# 0} 2025-03-28T09:40:00.622502Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 2 LatestStep: 0 2025-03-28T09:40:00.622704Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvRegisterTablet TabletId# 72057594047365123 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2025-03-28T09:40:00.622785Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72057594047365123 2025-03-28T09:40:00.622819Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Sender# [2:654:2551] {TEvRegisterTabletResult TabletId# 72057594047365123 Entry# 0} 2025-03-28T09:40:00.622952Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 0 2025-03-28T09:40:00.645056Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:00.681254Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:00.681372Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:00.692919Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:00.767450Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 500 2025-03-28T09:40:00.767539Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 500} 2025-03-28T09:40:00.881169Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 1000 2025-03-28T09:40:00.881262Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1000} 2025-03-28T09:40:01.075843Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2000 2025-03-28T09:40:01.075917Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2000} ... have step 0 and 2000 after sleep 2025-03-28T09:40:01.165608Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... tx1 planned at step 2500 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 2500 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet 2025-03-28T09:40:01.261399Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 3 LatestStep: 3000 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... tx2 planned at step 3000 ... tablet1 at 2499 ... tablet2 at 2499 ... tablet3 at 3000 ... unblocking tx1 at tablet2 ... unblocking NKikimr::TEvTxProcessing: ... R_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 0 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-28T09:40:01.445554Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-28T09:40:01.456286Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 2500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 2499 FrozenSteps: 2499 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3000 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-28T09:40:01.467046Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3000 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-28T09:40:01.477669Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-28T09:40:01.499080Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 2999 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-28T09:40:01.520314Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 FrozenTablets: 72057594047365121 FrozenSteps: 3499 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-28T09:40:01.541748Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 7 LatestStep: 3500 UnfrozenTablets: 72057594047365121 ... restarting mediator 2025-03-28T09:40:01.553440Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientDestroyed 2025-03-28T09:40:01.553742Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 8 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-03-28T09:40:01.553792Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-03-28T09:40:01.554356Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-03-28T09:40:01.554500Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 9 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-03-28T09:40:01.554544Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-03-28T09:40:01.555217Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-03-28T09:40:01.555326Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 10 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-03-28T09:40:01.555356Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-03-28T09:40:01.556108Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected 2025-03-28T09:40:01.556197Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 11 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 3500 2025-03-28T09:40:01.556225Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2025-03-28T09:40:01.560419Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE EvClientConnected ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2025-03-28T09:40:01.560804Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... fully unblocking tx1 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-03-28T09:40:01.583765Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 2500} ... tablet1 at 2500 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-03-28T09:40:01.594883Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3000} ... tablet1 at 3000 ... tablet2 at 3500 ... tablet3 at 3500 ... fully unblocking tx3 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR 2025-03-28T09:40:01.605953Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: Actor# [2:24:2071] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3500} ... tablet1 at 3500 ... tablet2 at 3500 ... tablet3 at 3500 |97.6%| [TM] {RESULT} ydb/core/tx/time_cast/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2025-03-28T09:39:29.047557Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792725343886654:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:29.047617Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157f/r3tmp/tmpFxCjOI/pdisk_1.dat 2025-03-28T09:39:29.353980Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:29.374872Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:29.374971Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:29.376589Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29233, node 1 2025-03-28T09:39:29.428826Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:29.428860Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:29.428877Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:29.429054Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:29.573460Z node 1 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:29.577443Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:29.577470Z node 1 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:29.578116Z node 1 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:5364, port: 5364 2025-03-28T09:39:29.578668Z node 1 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:29.583659Z node 1 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2025-03-28T09:39:29.627477Z node 1 :TICKET_PARSER DEBUG: Ticket eyJh****E_8A (35EA5166) () has now valid token of ldapuser@ldap 2025-03-28T09:39:31.395307Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792737205771122:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:31.395384Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157f/r3tmp/tmppZmR5A/pdisk_1.dat 2025-03-28T09:39:31.489033Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13984, node 2 2025-03-28T09:39:31.527861Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:31.527961Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:31.529439Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:31.536543Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:31.536562Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:31.536569Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:31.536665Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:31.640993Z node 2 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:31.644897Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:31.644931Z node 2 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:31.645683Z node 2 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:62382, port: 62382 2025-03-28T09:39:31.645755Z node 2 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:31.648838Z node 2 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:62382. Invalid credentials 2025-03-28T09:39:31.649100Z node 2 :TICKET_PARSER DEBUG: Ticket eyJh****5uMA (E86BB5FA) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:62382. Invalid credentials)' 2025-03-28T09:39:33.996759Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486792743957566958:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:33.996843Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157f/r3tmp/tmpK9Jxnx/pdisk_1.dat 2025-03-28T09:39:34.092238Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7908, node 3 2025-03-28T09:39:34.136288Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:34.136387Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:34.137935Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:34.147787Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:34.147810Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:34.147816Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:34.147959Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:34.247328Z node 3 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:34.250921Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:34.250940Z node 3 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:34.251620Z node 3 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:31930, port: 31930 2025-03-28T09:39:34.251685Z node 3 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:34.262296Z node 3 :LDAP_AUTH_PROVIDER DEBUG: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:31930. Invalid credentials 2025-03-28T09:39:34.262545Z node 3 :TICKET_PARSER DEBUG: Ticket eyJh****9WXw (39857B50) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:31930. Invalid credentials)' 2025-03-28T09:39:36.769045Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486792757134048963:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:36.769172Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157f/r3tmp/tmpDnDxuu/pdisk_1.dat 2025-03-28T09:39:36.834357Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12019, node 4 2025-03-28T09:39:36.882487Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:36.882519Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:36.882529Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:36.882677Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:36.892468Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:36.892566Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:36.894115Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:37.054558Z node 4 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:37.057500Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:37.057541Z node 4 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:37.058164Z node 4 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:10396, port: 10396 2025-03-28T09:39:37.058245Z node 4 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:37.061688Z node 4 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:37.062026Z node 4 :LDAP_AUTH_PROVIDER DEBUG: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:10396 return no entries 2025-03-28T09:39:37.062297Z node 4 :TICKET_PARSER DEBUG: Ticket eyJh****FB3Q (FA1B0F7F) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:10396 return no entries)' 2025-03-28T09:39:40.065936Z node 5 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7486792776003541142:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:40.065986Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157f/r3tmp/tmpYxrHTG/pdisk_1.dat 2025-03-28T09:39:40.157898Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:40.174161Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:40.174247Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:40.175671Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6089, node 5 2025-03-28T09:39:40.243333Z node 5 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:40.243360Z node 5 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:40.243373Z node 5 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:40.243513Z node 5 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:40.398871Z node 5 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:40.399247Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:40.399272Z node 5 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:40.400147Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:30106, port: 30106 2025-03-28T09:39:40.400227Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:40.403644Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:40.451022Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:40.451560Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:39:40.451654Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:40.495004Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:40.539771Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:40.540639Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****kepw (0ADD62F6) () has now valid token of ldapuser@ldap 2025-03-28T09:39:45.066091Z node 5 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7486792776003541142:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:45.066198Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:46.074731Z node 5 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****kepw (0ADD62F6) 2025-03-28T09:39:46.074876Z node 5 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:30106, port: 30106 2025-03-28T09:39:46.074985Z node 5 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:46.077560Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:46.118947Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2025-03-28T09:39:46.119408Z node 5 :LDAP_AUTH_PROVIDER DEBUG: Try to get nested groups - tree traversal 2025-03-28T09:39:46.119451Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:46.166990Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:46.214970Z node 5 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2025-03-28T09:39:46.215566Z node 5 :TICKET_PARSER DEBUG: Ticket eyJh****kepw (0ADD62F6) () has now valid token of ldapuser@ldap 2025-03-28T09:39:51.046057Z node 6 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7486792823082736197:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:51.046113Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00157f/r3tmp/tmp6K5wUS/pdisk_1.dat 2025-03-28T09:39:51.161422Z node 6 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26540, node 6 2025-03-28T09:39:51.181005Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:51.181095Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:51.182590Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:51.200879Z node 6 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:51.200901Z node 6 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:51.200908Z node 6 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:51.201019Z node 6 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:51.402189Z node 6 :TICKET_PARSER DEBUG: Updated state for /Root keys 1 2025-03-28T09:39:51.405123Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2025-03-28T09:39:51.405162Z node 6 :TICKET_PARSER TRACE: CanInitLoginToken, target database candidates(1): /Root 2025-03-28T09:39:51.406018Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:16517, port: 16517 2025-03-28T09:39:51.406127Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:51.409370Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:51.451392Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****S-ZQ (3F06FD14) () has now valid token of ldapuser@ldap 2025-03-28T09:39:56.046449Z node 6 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7486792823082736197:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:56.046554Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:56.058244Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****S-ZQ (3F06FD14) 2025-03-28T09:39:56.058393Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:16517, port: 16517 2025-03-28T09:39:56.058519Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:39:56.062157Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:39:56.111295Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****S-ZQ (3F06FD14) () has now valid token of ldapuser@ldap 2025-03-28T09:40:00.060460Z node 6 :TICKET_PARSER DEBUG: Refreshing ticket eyJh****S-ZQ (3F06FD14) 2025-03-28T09:40:00.060531Z node 6 :LDAP_AUTH_PROVIDER DEBUG: init: scheme: ldap, uris: ldap://localhost:16517, port: 16517 2025-03-28T09:40:00.060607Z node 6 :LDAP_AUTH_PROVIDER DEBUG: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2025-03-28T09:40:00.067394Z node 6 :LDAP_AUTH_PROVIDER DEBUG: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2025-03-28T09:40:00.111408Z node 6 :TICKET_PARSER DEBUG: Ticket eyJh****S-ZQ (3F06FD14) () has now valid token of ldapuser@ldap >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection |97.6%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:38:48.194773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:48.194867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:48.194909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:48.194940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:48.194977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:48.194998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:48.195038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:48.195094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:48.195349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:48.252585Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:38:48.252641Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:38:48.258226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:48.258335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:48.258433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:48.262901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:48.263048Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:48.263564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:48.263739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:48.265183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:48.266147Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:48.266188Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:48.266289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:48.266325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:48.266354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:48.266470Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:38:48.271982Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:38:48.382234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:48.382493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:48.382718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:48.382951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:48.382998Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:48.385647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:48.385782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:48.385986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:48.386047Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:48.386090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:48.386125Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:48.388363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:48.388418Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:48.388468Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:48.390025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:48.390064Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:48.390104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:48.390140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:48.393496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:48.395358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:48.395537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:48.396590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:48.396733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:48.396790Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:48.397045Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:48.397097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:48.397260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:48.397398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:48.399353Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:48.399407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:48.399593Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:48.399633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:38:48.399872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:48.399916Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:48.400026Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:48.400056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:48.400095Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:48.400131Z no ... BrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:40:02.078776Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:40:02.079068Z node 34 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 338us result status StatusSuccess 2025-03-28T09:40:02.079905Z node 34 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:40:02.091086Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1099:2886] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:40:02.091200Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1057:2886] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-28T09:40:02.091358Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1099:2886] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743154802046503 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743154802046503 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743154802046503 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:40:02.095401Z node 34 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][34:1099:2886] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-03-28T09:40:02.095494Z node 34 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][34:1057:2886] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |97.6%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] >> TCreateAndDropViewTest::InvalidQuery [GOOD] >> TCreateAndDropViewTest::ParsingSecurityInvoker >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding >> DataShardStats::OneChannelStatsCorrect >> TIndexProcesorTests::TestCreateIndexProcessor [GOOD] >> TIndexProcesorTests::TestSingleCreateQueueEvent >> test.py::test_topic_data [GOOD] >> test.py::test_transfer_describe >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local >> Metering::BillingRecords [GOOD] >> Metering::MockedNetClassifierOnly >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings >> KqpTpch::Query01 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitIndexWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:37:21.010411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:37:21.010493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:21.010531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:37:21.010573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:37:21.010621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:37:21.010641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:37:21.010678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:37:21.010756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:37:21.010985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:21.067971Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:37:21.068016Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:37:21.073435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:21.073515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:37:21.073607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:37:21.077430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:37:21.077545Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:37:21.077998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:21.078146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:37:21.079523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:21.080596Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:21.080638Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:21.080731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:37:21.080762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:21.080786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:37:21.080891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:37:21.087698Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:37:21.169498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:37:21.169682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:21.169833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:37:21.170033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:37:21.170073Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:21.171981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:21.172068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:37:21.172192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:21.172233Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:37:21.172262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:37:21.172283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:37:21.173804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:21.173844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:37:21.173878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:37:21.175034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:21.175067Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:21.175100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:21.175132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:37:21.177485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:37:21.178592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:37:21.178745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:37:21.179466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:37:21.179563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:37:21.179593Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:21.179752Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:37:21.179786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:37:21.179897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:37:21.180009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:37:21.181346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:37:21.181393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:37:21.181520Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:37:21.181549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:37:21.181731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:37:21.181758Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:37:21.181826Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:21.181848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:37:21.181872Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:37:21.181891Z no ... CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409547 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:40:03.883453Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:40:03.883685Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 261us result status StatusSuccess 2025-03-28T09:40:03.884521Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> test.py::test_transfer_describe [GOOD] >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams >> TTxDataShardLocalKMeansScan::MainToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToPosting >> GraphShard::NormalizeAndDownsample1 [GOOD] >> GraphShard::NormalizeAndDownsample2 [GOOD] >> GraphShard::NormalizeAndDownsample3 [GOOD] >> GraphShard::NormalizeAndDownsample4 [GOOD] >> GraphShard::NormalizeAndDownsample5 [GOOD] >> GraphShard::NormalizeAndDownsample6 [GOOD] >> GraphShard::CheckHistogramToPercentileConversions [GOOD] >> GraphShard::CreateGraphShard >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::test_transfer_describe [GOOD] |97.7%| [TM] {RESULT} ydb/core/viewer/tests/py3test >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding >> test.py::test[solomon-BadDownsamplingAggregation-] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery >> GraphShard::CreateGraphShard [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries >> TIndexProcesorTests::TestSingleCreateQueueEvent [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue >> DataCleanup::ForceDataCleanup >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/shard/ut/unittest >> GraphShard::CreateGraphShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:40:07.773488Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:40:07.773680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:40:07.773743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:40:07.773785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:40:07.775813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:40:07.775883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:40:07.776002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:40:07.776102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:40:07.777457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:07.883864Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:40:07.883930Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:07.902454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:07.902771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:40:07.902979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:40:07.910339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:40:07.910557Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:40:07.913940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:40:07.915636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:40:07.920681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:40:07.929264Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:40:07.929364Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:40:07.930872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:40:07.930964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:40:07.931077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:40:07.931208Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:40:07.948743Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:40:08.078762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:40:08.081235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:08.082329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:40:08.083629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:40:08.083706Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:08.086973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:40:08.087142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:40:08.087372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:08.087521Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:40:08.087565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:40:08.087598Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:40:08.089805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:08.089876Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:40:08.089925Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:40:08.091868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:08.091911Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:08.091955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:40:08.092047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:40:08.101619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:40:08.103596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:40:08.104477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:40:08.105766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:40:08.105924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:40:08.105993Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:40:08.107342Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:40:08.107417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:40:08.107659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:40:08.107752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:40:08.110230Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:40:08.110325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:40:08.110523Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:40:08.110561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:40:08.116065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:08.116143Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:40:08.116283Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:40:08.116320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:40:08.116369Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:40:08.116399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:40:08.116436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:40:08.116477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:40:08.116511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:40:08.116559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:40:08.116645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:40:08.116683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:40:08.116717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:40:08.129086Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:40:08.129246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:40:08.129294Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... 78944 2025-03-28T09:40:08.512752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:1 Got OK TEvInitTenantSchemeShardResult from schemeshard tablet: 72075186234409546 shardIdx: 72057594046678944:2 at schemeshard: 72057594046678944 2025-03-28T09:40:08.516285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-28T09:40:08.516589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186234409548, partId: 1 2025-03-28T09:40:08.516737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply execute, operationId: 102:1, at schemeshard: 72057594046678944, message: Status: SUCCESS OnTabletId: 72075186234409548 2025-03-28T09:40:08.516780Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 102:1 HandleReply TEvConfigureStatus operationId:102:1 at schemeshard:72057594046678944 2025-03-28T09:40:08.516817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TConfigureParts operationId# 102:1 Got OK TEvConfigureStatus from tablet# 72075186234409548 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2025-03-28T09:40:08.516851Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 3 -> 128 2025-03-28T09:40:08.517502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-28T09:40:08.523942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationReply complete, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-28T09:40:08.524122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-28T09:40:08.524187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-28T09:40:08.524268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 102:1, at tablet# 72057594046678944 2025-03-28T09:40:08.524314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 102 ready parts: 2/2 2025-03-28T09:40:08.524461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:40:08.535669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2025-03-28T09:40:08.535842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000003 2025-03-28T09:40:08.536212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000003, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:40:08.536335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000003 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:40:08.536376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute operation part is already done, operationId: 102:0 2025-03-28T09:40:08.536431Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-03-28T09:40:08.536719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 128 -> 240 2025-03-28T09:40:08.536800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2025-03-28T09:40:08.536922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-28T09:40:08.537025Z node 1 :FLAT_TX_SCHEMESHARD INFO: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 2, ActorId:[1:398:2366], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 72075186234409549, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2025-03-28T09:40:08.540290Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:40:08.540336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2025-03-28T09:40:08.540505Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:40:08.540549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 102, path id: 2 2025-03-28T09:40:08.540825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-28T09:40:08.540882Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TSyncHive, operationId 102:1, ProgressState, NeedSyncHive: 0 2025-03-28T09:40:08.540916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 102:1 240 -> 240 2025-03-28T09:40:08.541553Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:40:08.541667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2025-03-28T09:40:08.541696Z node 1 :FLAT_TX_SCHEMESHARD INFO: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2025-03-28T09:40:08.541725Z node 1 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2025-03-28T09:40:08.541763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2025-03-28T09:40:08.541847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/2, is published: true 2025-03-28T09:40:08.552228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2025-03-28T09:40:08.552292Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 102:1 ProgressState 2025-03-28T09:40:08.552370Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-03-28T09:40:08.552398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-28T09:40:08.552435Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#102:1 progress is 2/2 2025-03-28T09:40:08.552459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-28T09:40:08.552487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2025-03-28T09:40:08.552521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2025-03-28T09:40:08.552594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:0 2025-03-28T09:40:08.552636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:0 2025-03-28T09:40:08.552783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2025-03-28T09:40:08.552818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 102:1 2025-03-28T09:40:08.552836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 102:1 2025-03-28T09:40:08.552922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2025-03-28T09:40:08.555049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2025-03-28T09:40:08.556508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2025-03-28T09:40:08.556569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2025-03-28T09:40:08.556973Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2025-03-28T09:40:08.557091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2025-03-28T09:40:08.557129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:562:2491] TestWaitNotification: OK eventTxId 102 2025-03-28T09:40:08.558478Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/db1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:40:08.558677Z node 1 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/db1" took 222us result status StatusSuccess 2025-03-28T09:40:08.560606Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/db1" PathDescription { Self { Name: "db1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 GraphShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.7%| [TS] {RESULT} ydb/core/graph/shard/ut/unittest >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable >> TopicSessionTests::TwoSessionWithoutPredicate >> TCreateAndDropViewTest::ParsingSecurityInvoker [GOOD] >> TCreateAndDropViewTest::ListCreatedView >> ServerRestartTest::RestartOnGetSession [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] >> TMemoryController::SharedCache [GOOD] >> TMemoryController::SharedCache_ConfigLimit >> TabletService_ExecuteMiniKQL::MalformedParams [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> TIndexProcesorTests::TestReindexSingleQueue [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery >> DiscoveryIsNotBroken::NoKafkaSslEndpointInDiscovery [GOOD] >> DiscoveryIsNotBroken::HaveKafkaEndpointInDiscovery >> TTxDataShardLocalKMeansScan::BuildToPosting [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery >> KeyValueGRPCService::SimpleAcquireLock >> Graph::CreateGraphShard >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> TTxDataShardPrefixKMeansScan::BadRequest >> KqpScripting::StreamOperationTimeout [GOOD] >> DataShardCompaction::CompactBorrowed [GOOD] >> DataShardCompaction::CompactBorrowedTxStatus >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData >> test_timeout.py::TestTimeout::test_timeout >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs >> Coordinator::ReadStepSubscribe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 22579, MsgBus: 24639 2025-03-28T09:38:58.547044Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792592959135731:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:58.547119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a69/r3tmp/tmpmEGeZT/pdisk_1.dat 2025-03-28T09:38:58.805661Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 22579, node 1 2025-03-28T09:38:58.871679Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:58.871713Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:58.871721Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:58.871878Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:38:58.900488Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:58.900569Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:58.902301Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:24639 TClient is connected to server localhost:24639 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:59.351268Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.383652Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.499563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.633876Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:59.691174Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:00.990789Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792601549072110:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:00.990905Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.238977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.266313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.292257Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.318484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.371996Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.397527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:01.469351Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792605844039919:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.469455Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.469466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792605844039924:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:01.472417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:01.480622Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792605844039926:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:01.553638Z node 1 :TX_PROXY ERROR: Actor# [1:7486792605844039979:3444] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:02.628910Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154742644, txId: 281474976710672] shutting down 2025-03-28T09:39:02.843354Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154742882, txId: 281474976710675] shutting down 2025-03-28T09:39:03.074099Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154743099, txId: 281474976710678] shutting down 2025-03-28T09:39:03.319267Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154743358, txId: 281474976710681] shutting down 2025-03-28T09:39:03.547374Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792592959135731:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:03.547435Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:03.597182Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154743631, txId: 281474976710684] shutting down 2025-03-28T09:39:03.840338Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154743876, txId: 281474976710687] shutting down 2025-03-28T09:39:04.099466Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154744128, txId: 281474976710690] shutting down 2025-03-28T09:39:04.359942Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154744394, txId: 281474976710693] shutting down 2025-03-28T09:39:04.578240Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154744611, txId: 281474976710696] shutting down 2025-03-28T09:39:04.789947Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154744828, txId: 281474976710699] shutting down 2025-03-28T09:39:05.082365Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154745115, txId: 281474976710702] shutting down 2025-03-28T09:39:05.368256Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154745402, txId: 281474976710705] shutting down 2025-03-28T09:39:05.657057Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154745689, txId: 281474976710708] shutting down 2025-03-28T09:39:05.937696Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154745976, txId: 281474976710711] shutting down 2025-03-28T09:39:06.200266Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154746235, txId: 281474976710714] shutting down 2025-03-28T09:39:06.411578Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154746445, txId: 281474976710717] shutting down 2025-03-28T09:39:06.708991Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154746746, txId: 281474976710720] shutting down 2025-03-28T09:39:06.965496Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154747005, txId: 281474976710723] shutting down 2025-03-28T09:39:07.225103Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154747257, txId: 281474976710726] shutting down 2025-03-28T09:39:07.506650Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154747544, txId: 281474976710729] shutting down 2025-03-28T09:39:07.775529Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154747803, txId: 281474976710732] shutting down 2025-03-28T09:39:08.014089Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154748048, txId: 281474976710735] shutting down 2025-03-28T09:39:08.280157Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our sna ... ting down 2025-03-28T09:39:56.894754Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154796922, txId: 281474976711212] shutting down 2025-03-28T09:39:57.238239Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154797265, txId: 281474976711215] shutting down 2025-03-28T09:39:57.613159Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154797643, txId: 281474976711218] shutting down 2025-03-28T09:39:57.932542Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154797958, txId: 281474976711221] shutting down 2025-03-28T09:39:58.276932Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154798301, txId: 281474976711224] shutting down 2025-03-28T09:39:58.662126Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154798686, txId: 281474976711227] shutting down 2025-03-28T09:39:59.107311Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154799134, txId: 281474976711230] shutting down 2025-03-28T09:39:59.479662Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154799512, txId: 281474976711233] shutting down 2025-03-28T09:39:59.862034Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154799890, txId: 281474976711236] shutting down 2025-03-28T09:40:00.256462Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154800282, txId: 281474976711239] shutting down 2025-03-28T09:40:00.760952Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154800786, txId: 281474976711242] shutting down 2025-03-28T09:40:01.195709Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154801178, txId: 281474976711245] shutting down 2025-03-28T09:40:01.582907Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154801612, txId: 281474976711248] shutting down 2025-03-28T09:40:02.045541Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154802060, txId: 281474976711251] shutting down 2025-03-28T09:40:02.433941Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154802459, txId: 281474976711254] shutting down 2025-03-28T09:40:02.865200Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154802893, txId: 281474976711257] shutting down 2025-03-28T09:40:03.231146Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154803257, txId: 281474976711260] shutting down 2025-03-28T09:40:03.605537Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154803628, txId: 281474976711263] shutting down 2025-03-28T09:40:04.024004Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154804048, txId: 281474976711266] shutting down 2025-03-28T09:40:04.513837Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154804496, txId: 281474976711269] shutting down Trying to start YDB, gRPC: 26918, MsgBus: 64891 2025-03-28T09:40:05.735577Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792881048673405:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:05.735838Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a69/r3tmp/tmpNXMlLg/pdisk_1.dat 2025-03-28T09:40:05.877106Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:05.911208Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:05.911282Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:05.912501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26918, node 2 2025-03-28T09:40:05.959587Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:05.959608Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:05.959617Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:05.959786Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:64891 TClient is connected to server localhost:64891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:06.521189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:06.529572Z node 2 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:06.556507Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:40:06.645882Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:06.895755Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:07.007784Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:09.923490Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792898228544349:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:09.923599Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:09.971765Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:40:10.025978Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:40:10.078950Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:40:10.120026Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:40:10.165994Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:40:10.220351Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:40:10.280906Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792902523512158:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:10.281083Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:10.281409Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792902523512164:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:10.286502Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:40:10.296105Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792902523512166:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:40:10.367589Z node 2 :TX_PROXY ERROR: Actor# [2:7486792902523512219:3442] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:10.733061Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792881048673405:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:10.733163Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:40:11.738880Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=NjdkOWE0NDAtYTNlN2VkYzktYWFjOTI3YjYtOTJmNzVkYTE=, ActorId: [2:7486792906818479780:2489], ActorState: ExecuteState, TraceId: 01jqe1z4sn7xbawymja4h3ttq6, Create QueryResponse for error on request, msg: |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |97.7%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> Graph::CreateGraphShard [GOOD] >> Graph::UseGraphShard >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 >> TCreateAndDropViewTest::ListCreatedView [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice >> TIndexProcesorTests::TestDeletedQueueNotReindexed [GOOD] >> TIndexProcesorTests::TestManyMessages >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection >> TabletService_ExecuteMiniKQL::MalformedProgram [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow >> TTxDataShardSampleKScan::RunScan >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> Metering::MockedNetClassifierOnly [GOOD] >> Metering::MockedNetClassifierLabelTransformation >> TMemoryController::SharedCache_ConfigLimit [GOOD] >> TMemoryController::MemTable >> TTxDataShardPrefixKMeansScan::BadRequest [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting >> KeyValueGRPCService::SimpleAcquireLock [GOOD] >> KeyValueGRPCService::SimpleExecuteTransaction >> Graph::UseGraphShard [GOOD] >> Graph::MemoryBackendFullCycle >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> DataCleanup::ForceDataCleanup [GOOD] >> DataCleanup::ForceDataCleanupWithoutCompaction >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> test_commit.py::TestCommit::test_commit >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_local_kmeans/unittest >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] Test command err: 2025-03-28T09:39:53.314670Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792830517171478:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:53.314737Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005df/r3tmp/tmphxhuS1/pdisk_1.dat 2025-03-28T09:39:53.721692Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:53.737129Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:53.737240Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:53.740152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:53.786902Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:39:53.826887Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:39:53.864235Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7486792830517172055:2295] 2025-03-28T09:39:53.864575Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:53.880680Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:53.880762Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:39:53.883902Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:39:53.883963Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:39:53.883996Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:39:53.886102Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:39:53.886189Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:39:53.886221Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7486792830517172069:2295] in generation 1 2025-03-28T09:39:53.887529Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:39:53.928583Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:39:53.929765Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:39:53.929854Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7486792830517172073:2296] 2025-03-28T09:39:53.929869Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:39:53.929878Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:39:53.929894Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:39:53.931015Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792830517172052:2296], serverId# [1:7486792830517172072:2305], sessionId# [0:0:0] 2025-03-28T09:39:53.931145Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:39:53.931294Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:39:53.931348Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:39:53.931362Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:39:53.931437Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:39:53.931463Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:39:53.931482Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:39:53.931799Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:39:53.932055Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-28T09:39:53.933845Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:39:53.935184Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:39:53.951728Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:39:53.954080Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792830517172088:2314], serverId# [1:7486792830517172089:2315], sessionId# [0:0:0] 2025-03-28T09:39:53.958538Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743154794003 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154794003 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:39:53.958588Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:39:53.958719Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:39:53.958773Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:39:53.958787Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:39:53.958802Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743154794003:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-28T09:39:53.959063Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743154794003:281474976710657 keys extracted: 0 2025-03-28T09:39:53.959179Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:39:53.959300Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:39:53.959358Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:39:53.962653Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:39:53.966202Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:39:53.967527Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743154794002 2025-03-28T09:39:53.967552Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:39:53.967580Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743154794010 2025-03-28T09:39:53.967642Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743154794003} 2025-03-28T09:39:53.967683Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:39:53.967720Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:39:53.967742Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:39:53.967786Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:39:53.967840Z node 1 :TX_DATASHARD DEBUG: Complete [1743154794003 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7486792830517171873:2179], exec latency: 6 ms, propose latency: 8 ms 2025-03-28T09:39:53.967929Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-28T09:39:53.968047Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:39:53.972234Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-28T09:39:53.972306Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:39:53.984139Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792830517172127:2343], serverId# [1:7486792830517172128:2344], sessionId# [0:0:0] 2025-03-28T09:39:53.986426Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:39:53.986587Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-03-28T09:39:53.987902Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:39:53.989252Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1743154794038 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154794038 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:39:53.989276Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:39:53.989365Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:39:53.989385Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:39:53.989407Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743154794038:281474976710658] in PlanQueue unit at 72075186224037888 2025-03-28T09:39:53.989580Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743154794038:281474976710658 keys extracted: 0 2025-03-28T09:39:53.989972Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:39:53.990379Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743154794038} 2025-03-28T09:39:53.990425Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:39:53.990457Z node 1 :TX_DATASHARD DEBUG: Complete [1743154794038 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7486792830517172122:2339], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:39:53.990490Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:39:54.001585Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792830517172138:2354], serverId# [1:7486792834812139435:2355], sessionId# [0:0:0] 2025-03-28T09:39:54.003106Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:39:54.003221Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710659 at tablet 72075186224037888 2025-03-28T09:39:54.004692Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037 ... ARD DEBUG: GetNextActiveOp at 72075186224037903 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:16.321959Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1743154816368:281474976715710] in PlanQueue unit at 72075186224037903 2025-03-28T09:40:16.322212Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037903 loaded tx from db 1743154816368:281474976715710 keys extracted: 0 2025-03-28T09:40:16.322324Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037903 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:16.322427Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037903 2025-03-28T09:40:16.322464Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037903 tableId# [OwnerId: 72057594046644480, LocalPathId: 21] schema version# 1 2025-03-28T09:40:16.322893Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037903 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:40:16.323271Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:16.324032Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:16.324636Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037903 time 1743154816298 2025-03-28T09:40:16.324652Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037903 2025-03-28T09:40:16.324672Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-03-28T09:40:16.325737Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037903 step# 1743154816368} 2025-03-28T09:40:16.325786Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037903 2025-03-28T09:40:16.325847Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037903 2025-03-28T09:40:16.325872Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037903 2025-03-28T09:40:16.325898Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037903 2025-03-28T09:40:16.325948Z node 5 :TX_DATASHARD DEBUG: Complete [1743154816368 : 281474976715710] from 72075186224037903 at tablet 72075186224037903 send result to client [5:7486792905373608670:2140], exec latency: 0 ms, propose latency: 3 ms 2025-03-28T09:40:16.325978Z node 5 :TX_DATASHARD INFO: 72075186224037903 Sending notify to schemeshard 72057594046644480 txId 281474976715710 state Ready TxInFly 0 2025-03-28T09:40:16.326021Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037903 2025-03-28T09:40:16.326592Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037903 coordinator 72057594046316545 last step 0 next step 1743154816375 2025-03-28T09:40:16.328149Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715710 datashard 72075186224037903 state Ready 2025-03-28T09:40:16.328189Z node 5 :TX_DATASHARD DEBUG: 72075186224037903 Got TEvSchemaChangedResult from SS at 72075186224037903 2025-03-28T09:40:16.333619Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715711:0, at schemeshard: 72057594046644480 2025-03-28T09:40:16.335514Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:16.335645Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-03-28T09:40:16.340421Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037904 actor [5:7486792926848447657:2400] 2025-03-28T09:40:16.340661Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:16.351948Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:16.352017Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:16.353477Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037904 2025-03-28T09:40:16.353534Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037904 2025-03-28T09:40:16.353573Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037904 2025-03-28T09:40:16.353893Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:16.353953Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:16.353983Z node 5 :TX_DATASHARD DEBUG: DataShard 72075186224037904 persisting started state actor id [5:7486792926848447674:2400] in generation 1 2025-03-28T09:40:16.355560Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:16.355598Z node 5 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037904 2025-03-28T09:40:16.355683Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:16.355728Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037904, actorId: [5:7486792926848447676:2401] 2025-03-28T09:40:16.355746Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037904 2025-03-28T09:40:16.355758Z node 5 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037904, state: WaitScheme 2025-03-28T09:40:16.355771Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-03-28T09:40:16.355888Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037904 2025-03-28T09:40:16.355950Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037904 2025-03-28T09:40:16.356001Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037904, clientId# [5:7486792926848447659:4007], serverId# [5:7486792926848447665:4011], sessionId# [0:0:0] 2025-03-28T09:40:16.356030Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-03-28T09:40:16.356049Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:16.356066Z node 5 :TX_DATASHARD INFO: No tx to execute at 72075186224037904 TxInFly 0 2025-03-28T09:40:16.356083Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-03-28T09:40:16.356302Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037904 2025-03-28T09:40:16.356517Z node 5 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037904 txId 281474976715711 ssId 72057594046644480 seqNo 2:31 2025-03-28T09:40:16.356575Z node 5 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715711 at tablet 72075186224037904 2025-03-28T09:40:16.357685Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037904 2025-03-28T09:40:16.357766Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037904 2025-03-28T09:40:16.357820Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 not sending time cast registration request in state WaitScheme 2025-03-28T09:40:16.360417Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037904, clientId# [5:7486792926848447682:4022], serverId# [5:7486792926848447684:4024], sessionId# [0:0:0] 2025-03-28T09:40:16.360690Z node 5 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715711 at step 1743154816403 at tablet 72075186224037904 { Transactions { TxId: 281474976715711 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154816403 MediatorID: 72057594046382081 TabletID: 72075186224037904 } 2025-03-28T09:40:16.360711Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-03-28T09:40:16.360802Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-03-28T09:40:16.360825Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:16.360848Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1743154816403:281474976715711] in PlanQueue unit at 72075186224037904 2025-03-28T09:40:16.361078Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037904 loaded tx from db 1743154816403:281474976715711 keys extracted: 0 2025-03-28T09:40:16.361196Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:16.361294Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037904 2025-03-28T09:40:16.361331Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037904 tableId# [OwnerId: 72057594046644480, LocalPathId: 22] schema version# 1 2025-03-28T09:40:16.361753Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037904 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:40:16.361872Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:16.361991Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037903 2025-03-28T09:40:16.362118Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037904 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:16.363476Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037904 2025-03-28T09:40:16.363522Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037904 time 1743154816402 2025-03-28T09:40:16.363536Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-03-28T09:40:16.363558Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037904 coordinator 72057594046316545 last step 0 next step 1743154816410 2025-03-28T09:40:16.363608Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037904 step# 1743154816403} 2025-03-28T09:40:16.363639Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-03-28T09:40:16.364069Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037904 2025-03-28T09:40:16.364095Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037904 2025-03-28T09:40:16.364121Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037904 2025-03-28T09:40:16.364177Z node 5 :TX_DATASHARD DEBUG: Complete [1743154816403 : 281474976715711] from 72075186224037904 at tablet 72075186224037904 send result to client [5:7486792905373608670:2140], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:40:16.364210Z node 5 :TX_DATASHARD INFO: 72075186224037904 Sending notify to schemeshard 72057594046644480 txId 281474976715711 state Ready TxInFly 0 2025-03-28T09:40:16.364253Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037904 2025-03-28T09:40:16.364954Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715711 datashard 72075186224037904 state Ready 2025-03-28T09:40:16.364994Z node 5 :TX_DATASHARD DEBUG: 72075186224037904 Got TEvSchemaChangedResult from SS at 72075186224037904 |97.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_local_kmeans/unittest >> TIndexProcesorTests::TestManyMessages [GOOD] >> TIndexProcesorTests::TestOver1000Queues >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> test_commit.py::TestCommit::test_commit [GOOD] >> DataShardStats::OneChannelStatsCorrect [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection >> DataShardTxOrder::RandomPoints_DelayData [GOOD] >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection >> LongTxService::BasicTransactions >> TabletService_ExecuteMiniKQL::DryRunEraseRow [GOOD] >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:27.968461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:27.968585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:27.968632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:27.968680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:27.968717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:27.968740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:27.968783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:27.968862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:27.969156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:28.024679Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:28.024722Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:28.029762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:28.029843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:28.029969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:28.034182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:28.034338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:28.034943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:28.035192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:28.036807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:28.037934Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:28.037982Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:28.038084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:28.038132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:28.038171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:28.038316Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:28.043792Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:28.146662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:28.146873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:28.147063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:28.147299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:28.147348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:28.149359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:28.149466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:28.149636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:28.149678Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:28.149712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:28.149738Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:28.151355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:28.151394Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:28.151438Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:28.152812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:28.152846Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:28.152888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:28.152924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:28.155468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:28.156945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:28.157102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:28.157849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:28.157952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:28.157992Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:28.158179Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:28.158214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:28.158352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:28.158424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:28.160098Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:28.160154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:28.160345Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:28.160377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:28.160591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:28.160637Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:28.160714Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:28.160738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:28.160778Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:28.160810Z no ... 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:40:18.949769Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:40:18.950051Z node 30 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 314us result status StatusSuccess 2025-03-28T09:40:18.952249Z node 30 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:40:18.963503Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:853:2684] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:40:18.963626Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:792:2684] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-28T09:40:18.963805Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:853:2684] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743154818936256 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743154818936256 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743154818936256 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:40:18.966481Z node 30 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][30:853:2684] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-28T09:40:18.966591Z node 30 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][30:792:2684] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> KeyValueGRPCService::SimpleExecuteTransaction [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration >> DiscoveryIsNotBroken::HaveKafkaEndpointInDiscovery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> DiscoveryIsNotBroken::HaveKafkaSslEndpointInDiscovery >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> TTxDataShardSampleKScan::RunScan [GOOD] >> TTxDataShardSampleKScan::ScanBadParameters ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_background_compaction/unittest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] Test command err: 2025-03-28T09:39:56.853827Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:56.854335Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:56.854407Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00074d/r3tmp/tmpOK0ZYk/pdisk_1.dat 2025-03-28T09:39:57.320494Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:39:57.373581Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:57.416629Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:39:57.418539Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:39:57.418882Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:57.419470Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:57.431823Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:57.515951Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:39:57.516040Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:39:57.517236Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:39:57.610509Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:39:57.610606Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:39:57.611196Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:39:57.611278Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:39:57.611607Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:39:57.611771Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:39:57.611958Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:39:57.612274Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:39:57.615278Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:39:57.616678Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:39:57.616752Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:39:57.670150Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:39:57.671271Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:39:57.671690Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:39:57.671904Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:57.715889Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:39:57.716585Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:57.716670Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:39:57.718366Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:39:57.718437Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:39:57.718540Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:39:57.720332Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:39:57.720458Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:39:57.720525Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:39:57.731129Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:39:57.753586Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:39:57.755020Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:39:57.755136Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:39:57.755161Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:39:57.755187Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:39:57.755210Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:39:57.755396Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:39:57.755443Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:39:57.756377Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:39:57.756480Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:39:57.757811Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:39:57.757850Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:39:57.757932Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:39:57.757962Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:39:57.757993Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:39:57.758035Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:39:57.758119Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:39:57.758237Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:39:57.758263Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:39:57.758293Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:39:57.758344Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:39:57.758377Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:39:57.758466Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:39:57.758803Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:39:57.758862Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:39:57.759101Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:39:57.759148Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:39:57.759185Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:39:57.759216Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:39:57.759250Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:39:57.759505Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:39:57.759533Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:39:57.759564Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:39:57.759599Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:39:57.759632Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:39:57.759656Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:39:57.759679Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:39:57.759710Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:39:57.759725Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:39:57.760929Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:39:57.760988Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:39:57.771687Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 74976715660] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:40:19.404629Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:40:19.404710Z node 5 :TX_DATASHARD TRACE: Activated operation [0:281474976715660] at 72075186224037888 2025-03-28T09:40:19.404756Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-03-28T09:40:19.404781Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:40:19.404807Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-28T09:40:19.404833Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-28T09:40:19.404877Z node 5 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2025-03-28T09:40:19.404940Z node 5 :TX_DATASHARD TRACE: Operation [0:281474976715660] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4191860 2025-03-28T09:40:19.405227Z node 5 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:40:19.405307Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:40:19.405342Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-28T09:40:19.405387Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:40:19.405429Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-03-28T09:40:19.405503Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:40:19.405538Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:40:19.405578Z node 5 :TX_DATASHARD TRACE: Add [0:281474976715660] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:40:19.405610Z node 5 :TX_DATASHARD TRACE: Trying to execute [0:281474976715660] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:40:19.405659Z node 5 :TX_DATASHARD TRACE: Execution status for [0:281474976715660] at 72075186224037888 is Executed 2025-03-28T09:40:19.405683Z node 5 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715660] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:40:19.405711Z node 5 :TX_DATASHARD TRACE: Execution plan for [0:281474976715660] at 72075186224037888 has finished 2025-03-28T09:40:19.416654Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:40:19.416762Z node 5 :TX_DATASHARD TRACE: Complete execution for [0:281474976715660] at 72075186224037888 on unit FinishPropose 2025-03-28T09:40:19.416826Z node 5 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715660 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:40:19.416940Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:19.419057Z node 5 :TX_PROXY DEBUG: actor# [5:59:2106] Handle TEvNavigate describe path /Root/table-1 2025-03-28T09:40:19.419214Z node 5 :TX_PROXY DEBUG: Actor# [5:856:2691] HANDLE EvNavigateScheme /Root/table-1 2025-03-28T09:40:19.419741Z node 5 :TX_PROXY DEBUG: Actor# [5:856:2691] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T09:40:19.419881Z node 5 :TX_PROXY DEBUG: Actor# [5:856:2691] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table-1" Options { ShowPrivateTable: true } 2025-03-28T09:40:19.421053Z node 5 :TX_PROXY DEBUG: Actor# [5:856:2691] Handle TEvDescribeSchemeResult Forward to# [5:594:2519] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-03-28T09:40:19.422113Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:860:2695], Recipient [5:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:19.422180Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:19.422238Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:859:2694], serverId# [5:860:2695], sessionId# [0:0:0] 2025-03-28T09:40:19.422377Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553169, Sender [5:858:2693], Recipient [5:667:2571]: NKikimrTxDataShard.TEvGetInfoRequest 2025-03-28T09:40:19.423720Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:863:2698], Recipient [5:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:19.423797Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:19.423862Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:862:2697], serverId# [5:863:2698], sessionId# [0:0:0] 2025-03-28T09:40:19.424058Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:861:2696], Recipient [5:667:2571]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-03-28T09:40:19.424229Z node 5 :TX_DATASHARD INFO: Started background compaction# 1 of 72075186224037888 tableId# 2 localTid# 1001, requested from [5:861:2696], partsCount# 0, memtableSize# 728, memtableWaste# 3880, memtableRows# 3 2025-03-28T09:40:19.430909Z node 5 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:01.543654Z 2025-03-28T09:40:19.431025Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2025-03-28T09:40:19.431099Z node 5 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [5:861:2696]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:40:19.431921Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [5:658:2565], Recipient [5:667:2571]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-28T09:40:19.432527Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [5:870:2704], Recipient [5:667:2571]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:19.432586Z node 5 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:19.432635Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [5:869:2703], serverId# [5:870:2704], sessionId# [0:0:0] 2025-03-28T09:40:19.432779Z node 5 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [5:868:2702], Recipient [5:667:2571]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-03-28T09:40:19.432886Z node 5 :TX_DATASHARD DEBUG: Background compaction of tablet# 72075186224037888 of path# [OwnerId: 72057594046644480, LocalPathId: 2], requested from# [5:868:2702] is not needed |97.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_background_compaction/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding >> LongTxService::BasicTransactions [GOOD] >> LongTxService::AcquireSnapshot >> TTxDataShardReshuffleKMeansScan::BadRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2025-03-28T09:37:05.061246Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:37:05.066382Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:37:05.066744Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:122:2148] 2025-03-28T09:37:05.066937Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:37:05.098216Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:106:2138], Recipient [1:122:2148]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:37:05.150479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:37:05.150521Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:37:05.156831Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:37:05.157865Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:37:05.158948Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:37:05.159005Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:37:05.159066Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:37:05.159391Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:37:05.159452Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:37:05.159527Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:190:2148] in generation 2 2025-03-28T09:37:05.210383Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:37:05.234362Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:37:05.234496Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:37:05.234559Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:215:2213] 2025-03-28T09:37:05.234583Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:37:05.234607Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:37:05.234637Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:05.234811Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:05.234846Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:05.235011Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:37:05.235065Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:37:05.235115Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:37:05.235157Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:37:05.235181Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:37:05.235204Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:37:05.235224Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:37:05.235248Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:37:05.235294Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:37:05.235354Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:211:2210], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:05.235376Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:05.235409Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:209:2209], serverId# [1:211:2210], sessionId# [0:0:0] 2025-03-28T09:37:05.237114Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:99:2134], Recipient [1:122:2148]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 99 RawX2: 4294969430 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2025-03-28T09:37:05.237153Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:37:05.237209Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 9437184 2025-03-28T09:37:05.237293Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2025-03-28T09:37:05.237324Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2025-03-28T09:37:05.237352Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 1 at tablet 9437184 2025-03-28T09:37:05.237386Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:37:05.237421Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2025-03-28T09:37:05.237448Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2025-03-28T09:37:05.237470Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:37:05.237678Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2025-03-28T09:37:05.237703Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2025-03-28T09:37:05.237739Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit FinishPropose 2025-03-28T09:37:05.237762Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:37:05.237793Z node 1 :TX_DATASHARD TRACE: Execution status for [0:1] at 9437184 is DelayComplete 2025-03-28T09:37:05.237819Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2025-03-28T09:37:05.237842Z node 1 :TX_DATASHARD TRACE: Add [0:1] at 9437184 to execution unit WaitForPlan 2025-03-28T09:37:05.237868Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2025-03-28T09:37:05.237887Z node 1 :TX_DATASHARD TRACE: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2025-03-28T09:37:05.249531Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 9437184 2025-03-28T09:37:05.249582Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2025-03-28T09:37:05.249607Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:1] at 9437184 on unit FinishPropose 2025-03-28T09:37:05.249639Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2025-03-28T09:37:05.249713Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme 2025-03-28T09:37:05.250101Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:221:2219], Recipient [1:122:2148]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:05.250136Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:37:05.250170Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 9437184, clientId# [1:220:2218], serverId# [1:221:2219], sessionId# [0:0:0] 2025-03-28T09:37:05.250215Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:99:2134], Recipient [1:122:2148]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2025-03-28T09:37:05.250234Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:37:05.250325Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2025-03-28T09:37:05.250374Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:05.250408Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2025-03-28T09:37:05.250430Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit PlanQueue 2025-03-28T09:37:05.253113Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 99 RawX2: 4294969430 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2025-03-28T09:37:05.253165Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:37:05.253349Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:122:2148], Recipient [1:122:2148]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:05.253376Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:37:05.253415Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:37:05.253440Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:37:05.253470Z node 1 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 9437184 2025-03-28T09:37:05.253502Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2025-03-28T09:37:05.253529Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit PlanQueue 2025-03-28T09:37:05.253559Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:05.253598Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit PlanQueue 2025-03-28T09:37:05.253628Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit LoadTxDetails 2025-03-28T09:37:05.253651Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit LoadTxDetails 2025-03-28T09:37:05.253770Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 9437184 loaded tx from db 1000001:1 keys extracted: 0 2025-03-28T09:37:05.253794Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is Executed 2025-03-28T09:37:05.253808Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit LoadTxDetails 2025-03-28T09:37:05.253821Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit ProtectSchemeEchoes 2025-03-28T09:37:05.253836Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit ProtectSchemeEchoes 2025-03-28T09:37:05.253878Z node 1 :TX_DATASHARD TRACE: Execution status for [1000001:1] at 9437184 is ExecutedNoMoreRestarts 2025-03-28T09:37:05.253892Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000001:1] at 9437184 executing on unit ProtectSchemeEchoes 2025-03-28T09:37:05.253912Z node 1 :TX_DATASHARD TRACE: Add [1000001:1] at 9437184 to execution unit BuildAndWaitDependencies 2025-03-28T09:37:05.253933Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000001:1] at 9437184 on unit BuildAndWaitDependencies 2025-03-28T09:37:05.253992Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically complete end at 9437184 2025-03-28T09:37:05.254033Z node 1 :TX_DATASHARD TRACE: Operation [1000001:1] is the new logically incomplete end at 9437184 2025-03-28T09:37:05.254057Z node 1 :TX_DATASHARD TRACE: Activated operation [1000001:1] at 9437184 2025-03-28T09:37:05.254098Z node 1 :TX_D ... 4 consumer 9437184 txId 521 2025-03-28T09:40:18.768781Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2025-03-28T09:40:18.768812Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.768838Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2025-03-28T09:40:18.768954Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2025-03-28T09:40:18.768984Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.769009Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2025-03-28T09:40:18.769164Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2025-03-28T09:40:18.769197Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.769222Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2025-03-28T09:40:18.769354Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2025-03-28T09:40:18.769390Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.769416Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2025-03-28T09:40:18.769543Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2025-03-28T09:40:18.769574Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.769599Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2025-03-28T09:40:18.769772Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2025-03-28T09:40:18.769805Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.769831Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2025-03-28T09:40:18.769963Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2025-03-28T09:40:18.769997Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.770022Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2025-03-28T09:40:18.770135Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2025-03-28T09:40:18.770166Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.770191Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2025-03-28T09:40:18.770377Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2025-03-28T09:40:18.770409Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.770435Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2025-03-28T09:40:18.770584Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2025-03-28T09:40:18.770617Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.770642Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2025-03-28T09:40:18.770849Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2025-03-28T09:40:18.770884Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.770911Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2025-03-28T09:40:18.771039Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2025-03-28T09:40:18.771078Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.771112Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2025-03-28T09:40:18.771313Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2025-03-28T09:40:18.771344Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.771372Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2025-03-28T09:40:18.771511Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2025-03-28T09:40:18.771544Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.771569Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2025-03-28T09:40:18.771668Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2025-03-28T09:40:18.771697Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.771724Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2025-03-28T09:40:18.771829Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2025-03-28T09:40:18.771860Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.771887Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2025-03-28T09:40:18.772048Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2025-03-28T09:40:18.772080Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.772106Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2025-03-28T09:40:18.772302Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2025-03-28T09:40:18.772334Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.772360Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2025-03-28T09:40:18.772435Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2025-03-28T09:40:18.772464Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.772488Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 2025-03-28T09:40:18.799684Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:40:18.799771Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2025-03-28T09:40:18.799848Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [16:99:2134], exec latency: 1 ms, propose latency: 2 ms 2025-03-28T09:40:18.799945Z node 16 :TX_DATASHARD DEBUG: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-03-28T09:40:18.800005Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:40:18.800265Z node 16 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:40:18.800297Z node 16 :TX_DATASHARD TRACE: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2025-03-28T09:40:18.800347Z node 16 :TX_DATASHARD DEBUG: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [16:99:2134], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:40:18.800379Z node 16 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:40:18.800585Z node 16 :TX_DATASHARD TRACE: StateWork, received event# 269287938, Sender [16:237:2230], Recipient [16:348:2315]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2025-03-28T09:40:18.800632Z node 16 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2025-03-28T09:40:18.800666Z node 16 :TX_DATASHARD DEBUG: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 31 31 30 30 30 31 25 31 27 30 31 31 22 31 23 30 6 9 31 31 25 22 22 - 31 - 20 - - 8 - - actual 31 31 30 30 30 31 25 31 27 30 31 31 22 31 23 30 6 9 31 31 25 22 22 - 31 - 20 - - 8 - - interm 30 30 30 30 30 27 25 27 27 30 27 22 22 20 23 30 6 9 16 20 25 22 22 - 0 - 20 - - 8 - - >> DataCleanup::ForceDataCleanupWithoutCompaction [GOOD] >> DataCleanup::MultipleDataCleanups >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings >> Metering::MockedNetClassifierLabelTransformation [GOOD] >> SHA256Test::SHA256Test [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuild >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> test_ctas.py::TestYtCtas::test_simple_ctast >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected ------- [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/ut/unittest >> SHA256Test::SHA256Test [GOOD] Test command err: 2025-03-28T09:39:59.332470Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792856665218250:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:59.332556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006cf/r3tmp/tmpuIdRei/pdisk_1.dat 2025-03-28T09:39:59.653983Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:59.682651Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:59.683349Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:59.686456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:59.721857Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:59.721875Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:59.721881Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:59.721994Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:40:04.334816Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792856665218250:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:04.334909Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:40:05.354161Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792881597628217:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:05.354208Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006cf/r3tmp/tmpzITVYX/pdisk_1.dat 2025-03-28T09:40:05.457661Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:05.487996Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:05.488069Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:05.500369Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:05.500389Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:05.500397Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:05.500503Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:40:05.501147Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:10.354018Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792881597628217:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:10.354093Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:40:16.139058Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486792930098097623:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:16.139116Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006cf/r3tmp/tmpWLck3H/pdisk_1.dat 2025-03-28T09:40:16.224817Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:16.248623Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:16.248707Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:16.254073Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:16.261578Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:16.261610Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:16.261617Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:16.261736Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:40:21.139789Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486792930098097623:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:21.139877Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |97.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/py3test >> test_commit.py::TestCommit::test_commit [GOOD] |97.7%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding >> LongTxService::AcquireSnapshot [GOOD] >> LongTxService::LockSubscribe >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 |97.7%| [TS] {RESULT} ydb/core/ymq/actor/ut/unittest >> Graph::MemoryBackendFullCycle [GOOD] >> Graph::LocalBackendFullCycle >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi >> TTxDataShardSampleKScan::ScanBadParameters [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGeneration [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 9725, MsgBus: 8200 2025-03-28T09:39:07.312765Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792633726245405:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:07.312879Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000897/r3tmp/tmpCxjrsX/pdisk_1.dat TServer::EnableGrpc on GrpcPort 9725, node 1 2025-03-28T09:39:07.636128Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:07.638399Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T09:39:07.639112Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:07.668943Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:07.668965Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:07.668974Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:07.669116Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:07.693709Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:07.693837Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:07.695343Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:8200 TClient is connected to server localhost:8200 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:08.122213Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.158993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.282051Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.414129Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:08.462272Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:09.894208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792642316181776:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:09.894290Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:10.181851Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.206755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.233313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.260093Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.287429Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.333907Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:39:10.369157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792646611149581:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:10.369232Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:10.369388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792646611149586:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:10.373327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:39:10.384752Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792646611149588:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:39:10.444109Z node 1 :TX_PROXY ERROR: Actor# [1:7486792646611149644:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:11.620647Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154751653, txId: 281474976710672] shutting down 2025-03-28T09:39:11.824116Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154751863, txId: 281474976710675] shutting down 2025-03-28T09:39:12.100754Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154752136, txId: 281474976710678] shutting down 2025-03-28T09:39:12.312816Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792633726245405:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:12.312916Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:12.337075Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154752374, txId: 281474976710681] shutting down 2025-03-28T09:39:12.578935Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154752591, txId: 281474976710684] shutting down 2025-03-28T09:39:12.803856Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154752836, txId: 281474976710687] shutting down 2025-03-28T09:39:13.079141Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154753109, txId: 281474976710690] shutting down 2025-03-28T09:39:13.354343Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154753389, txId: 281474976710693] shutting down 2025-03-28T09:39:13.607881Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154753641, txId: 281474976710696] shutting down 2025-03-28T09:39:13.858403Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154753893, txId: 281474976710699] shutting down 2025-03-28T09:39:14.099730Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154754138, txId: 281474976710702] shutting down 2025-03-28T09:39:14.373326Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154754411, txId: 281474976710705] shutting down 2025-03-28T09:39:14.654711Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154754691, txId: 281474976710708] shutting down 2025-03-28T09:39:14.890355Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154754929, txId: 281474976710711] shutting down 2025-03-28T09:39:15.162402Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154755202, txId: 281474976710714] shutting down 2025-03-28T09:39:15.396731Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154755433, txId: 281474976710717] shutting down 2025-03-28T09:39:15.616499Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154755650, txId: 281474976710720] shutting down 2025-03-28T09:39:15.849007Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154755888, txId: 281474976710723] shutting down 2025-03-28T09:39:16.109991Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154756147, txId: 281474976710726] shutting down 2025-03-28T09:39:16.359763Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154756371, txId: 281474976710729] shutting down 2025-03-28T09:39:16.584646Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154756623, txId: 281474976710732] shutting down 2025-03-28T09:39:16.802092Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [ ... SnapshotManager: discarding snapshot; our snapshot: [step: 1743154804888, txId: 281474976711206] shutting down 2025-03-28T09:40:05.270542Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154805294, txId: 281474976711209] shutting down 2025-03-28T09:40:05.790178Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154805812, txId: 281474976711212] shutting down 2025-03-28T09:40:06.259978Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154806274, txId: 281474976711215] shutting down 2025-03-28T09:40:06.672218Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154806694, txId: 281474976711218] shutting down 2025-03-28T09:40:07.239327Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154807240, txId: 281474976711221] shutting down 2025-03-28T09:40:07.633305Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154807646, txId: 281474976711224] shutting down 2025-03-28T09:40:08.148929Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154808150, txId: 281474976711227] shutting down 2025-03-28T09:40:08.621675Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154808633, txId: 281474976711230] shutting down 2025-03-28T09:40:09.051349Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154809067, txId: 281474976711233] shutting down 2025-03-28T09:40:09.476118Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154809494, txId: 281474976711236] shutting down 2025-03-28T09:40:09.904309Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154809928, txId: 281474976711239] shutting down 2025-03-28T09:40:10.349293Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154810327, txId: 281474976711242] shutting down 2025-03-28T09:40:10.794732Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154810817, txId: 281474976711245] shutting down 2025-03-28T09:40:11.263352Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154811279, txId: 281474976711248] shutting down 2025-03-28T09:40:11.696307Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154811713, txId: 281474976711251] shutting down 2025-03-28T09:40:12.116063Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154812140, txId: 281474976711254] shutting down 2025-03-28T09:40:12.537090Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154812560, txId: 281474976711257] shutting down 2025-03-28T09:40:12.959378Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154812973, txId: 281474976711260] shutting down 2025-03-28T09:40:13.354874Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154813379, txId: 281474976711263] shutting down 2025-03-28T09:40:13.786278Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154813806, txId: 281474976711266] shutting down 2025-03-28T09:40:14.197023Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154814219, txId: 281474976711269] shutting down Trying to start YDB, gRPC: 18944, MsgBus: 12101 2025-03-28T09:40:15.332263Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792925165459897:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:15.332301Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000897/r3tmp/tmpjJNkQv/pdisk_1.dat 2025-03-28T09:40:15.459493Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:15.488946Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:15.489059Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:15.490768Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 18944, node 2 2025-03-28T09:40:15.547423Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:15.547455Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:15.547465Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:15.547627Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12101 TClient is connected to server localhost:12101 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:16.090728Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:16.103513Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:16.188839Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:16.401881Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:16.484632Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:19.207272Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792942345330862:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:19.207384Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:19.244255Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:40:19.316662Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:40:19.363519Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:40:19.444015Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:40:19.489090Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:40:19.536777Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:40:19.640322Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792942345331386:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:19.640413Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:19.640696Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792942345331391:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:19.645798Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:40:19.696787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792942345331393:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:40:19.783828Z node 2 :TX_PROXY ERROR: Actor# [2:7486792942345331452:3460] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:20.332564Z node 2 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7486792925165459897:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:20.332654Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:40:22.235877Z node 2 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743154822241, txId: 281474976715671] shutting down >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed [GOOD] >> TabletService_Restart::Basics >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> TTxDataShardReshuffleKMeansScan::BadRequest [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery >> TCreateAndDropViewTest::CreateViewOccupiedName [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists >> LongTxService::LockSubscribe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sample_k/unittest >> TTxDataShardSampleKScan::ScanBadParameters [GOOD] Test command err: 2025-03-28T09:40:16.122625Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792929087053527:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:16.122672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a28/r3tmp/tmpMgNDZ5/pdisk_1.dat 2025-03-28T09:40:16.554202Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:16.554788Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:16.557360Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:16.600682Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:16.635865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:16.676018Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:16.722183Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7486792929087054099:2295] 2025-03-28T09:40:16.722491Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:16.747186Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:16.747262Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:16.756658Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:40:16.756714Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:40:16.756750Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:40:16.767086Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:16.767208Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:16.767254Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7486792929087054115:2295] in generation 1 2025-03-28T09:40:16.774779Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:16.844997Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:40:16.850786Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:16.850887Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7486792929087054119:2296] 2025-03-28T09:40:16.850897Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:16.850906Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:40:16.850916Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:16.855066Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792929087054096:2295], serverId# [1:7486792929087054114:2303], sessionId# [0:0:0] 2025-03-28T09:40:16.855203Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:40:16.855359Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:40:16.855403Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:16.855417Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:16.855512Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:40:16.855532Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:16.855561Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:16.855929Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:40:16.856362Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-28T09:40:16.857995Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:16.858487Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:40:16.858574Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:40:16.860796Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792929087054132:2313], serverId# [1:7486792929087054133:2314], sessionId# [0:0:0] 2025-03-28T09:40:16.865886Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743154816907 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154816907 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:40:16.865935Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:16.866102Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:16.866165Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:16.866192Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:16.866239Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743154816907:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-28T09:40:16.866478Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743154816907:281474976710657 keys extracted: 0 2025-03-28T09:40:16.866621Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:16.866758Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:16.866844Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:40:16.869806Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:40:16.871307Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:16.872600Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743154816906 2025-03-28T09:40:16.872621Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:16.872658Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743154816907} 2025-03-28T09:40:16.872693Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:16.872722Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:16.872746Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:16.872775Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:40:16.872816Z node 1 :TX_DATASHARD DEBUG: Complete [1743154816907 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7486792929087053930:2186], exec latency: 4 ms, propose latency: 6 ms 2025-03-28T09:40:16.872838Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-28T09:40:16.872949Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:16.873028Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743154816914 2025-03-28T09:40:16.878120Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-28T09:40:16.878192Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:40:19.174244Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792941971956116:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:19.174245Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792941971956127:2316], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:19.174341Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:19.189232Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:19.193495Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:19.200269Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:19.201052Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792941971956130:2317], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T09:40:19.361524Z node 1 :TX_PROXY ERROR: Actor# [1:7486792941971956181:2393] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:20.481482Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710660. Ctx: { TraceId: 01jqe1zc312agzm8fhtxqx0fb3, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2ExYjgyY2EtZjQ4ODM5MzUtNjVhZjdlZmYtYmEyY2ZkZjk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:20.495875Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792946266923509:2408], serverId# [1:7486792946266923510:2409], sessionId# [0:0:0] 2025-03-28T09:40:20.497247Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:20.513209Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-0 ... osed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:21.350735Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:21.350814Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:21.353833Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:21.356771Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:21.366114Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:7486792951845269127:2295] 2025-03-28T09:40:21.366336Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:21.377528Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:21.377579Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:21.378994Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:40:21.379037Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:40:21.379073Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:40:21.379390Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:21.379813Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:21.379846Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:7486792951845269141:2295] in generation 1 2025-03-28T09:40:21.380866Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:21.380899Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:40:21.380969Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:21.380995Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:7486792951845269143:2296] 2025-03-28T09:40:21.381003Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:21.381014Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:40:21.381023Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:21.381102Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:40:21.381155Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:40:21.381171Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:21.381185Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:21.381200Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:40:21.381211Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:21.415534Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7486792951845269119:2293], serverId# [2:7486792951845269146:2306], sessionId# [0:0:0] 2025-03-28T09:40:21.415666Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:21.415904Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:40:21.415981Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:40:21.418199Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:21.422770Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:40:21.422831Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:40:21.426781Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7486792951845269159:2313], serverId# [2:7486792951845269161:2315], sessionId# [0:0:0] 2025-03-28T09:40:21.427054Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1743154821471 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154821471 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:40:21.427068Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:21.427188Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:21.427208Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:21.427228Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743154821471:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:40:21.427481Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743154821471:281474976715657 keys extracted: 0 2025-03-28T09:40:21.427592Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:21.427679Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:21.427710Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:21.427740Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:40:21.428126Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:40:21.428484Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:21.430575Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743154821470 2025-03-28T09:40:21.430600Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:21.430636Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743154821471} 2025-03-28T09:40:21.430665Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:21.430711Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:21.430734Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:21.430748Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:40:21.430783Z node 2 :TX_DATASHARD DEBUG: Complete [1743154821471 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:7486792951845268883:2141], exec latency: 0 ms, propose latency: 3 ms 2025-03-28T09:40:21.430805Z node 2 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:40:21.430859Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:21.430928Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743154821478 2025-03-28T09:40:21.433414Z node 2 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:40:21.433442Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:40:21.440081Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7486792951845269198:2342], serverId# [2:7486792951845269199:2343], sessionId# [0:0:0] 2025-03-28T09:40:21.440158Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:21.440220Z node 2 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976715658 at tablet 72075186224037888 2025-03-28T09:40:21.440877Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:40:21.441942Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715658 at step 1743154821485 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154821485 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:40:21.441958Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:21.442039Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:21.442061Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:21.442078Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1743154821485:281474976715658] in PlanQueue unit at 72075186224037888 2025-03-28T09:40:21.442186Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743154821485:281474976715658 keys extracted: 0 2025-03-28T09:40:21.442407Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:21.444510Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743154821485} 2025-03-28T09:40:21.444535Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:21.444564Z node 2 :TX_DATASHARD DEBUG: Complete [1743154821485 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [2:7486792951845269193:2338], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:40:21.444579Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:21.446650Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7486792951845269209:2353], serverId# [2:7486792951845269210:2354], sessionId# [0:0:0] 2025-03-28T09:40:21.447933Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7486792951845269214:2358], serverId# [2:7486792951845269215:2359], sessionId# [0:0:0] 2025-03-28T09:40:21.448962Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7486792951845269219:2363], serverId# [2:7486792951845269220:2364], sessionId# [0:0:0] 2025-03-28T09:40:21.450070Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7486792951845269224:2368], serverId# [2:7486792951845269225:2369], sessionId# [0:0:0] 2025-03-28T09:40:21.451381Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7486792951845269229:2373], serverId# [2:7486792951845269230:2374], sessionId# [0:0:0] 2025-03-28T09:40:21.452576Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:7486792951845269234:2378], serverId# [2:7486792951845269235:2379], sessionId# [0:0:0] |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_sample_k/unittest >> TSequence::CreateTableWithDefaultFromSequence >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries >> test_http_api.py::TestHttpApi::test_simple_analytics_query ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/ut/unittest >> LongTxService::LockSubscribe [GOOD] Test command err: 2025-03-28T09:40:21.295150Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:40:21.295679Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000a2a/r3tmp/tmpaRFL11/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:40:21.296387Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000a2a/r3tmp/tmpaRFL11/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000a2a/r3tmp/tmpaRFL11/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10560073104304133519 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:40:21.349931Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvBeginTx from [1:433:2321] 2025-03-28T09:40:21.350774Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Created new LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=1 2025-03-28T09:40:21.361928Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvAttachColumnShardWrites from [2:434:2099] LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=1 2025-03-28T09:40:21.362057Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeConnected for NodeId# 1 from session [2:97:2048] 2025-03-28T09:40:21.362205Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvAttachColumnShardWrites from [2:147:2088] LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=1 2025-03-28T09:40:21.362397Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:434:2099] LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=1 2025-03-28T09:40:21.362584Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvCommitTx from [2:147:2088] LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=1 2025-03-28T09:40:21.363480Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Committed LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=1 without side-effects 2025-03-28T09:40:21.363739Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:434:2099] LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=1 2025-03-28T09:40:21.363937Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:147:2088] LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=1 2025-03-28T09:40:21.364979Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvRollbackTx from [2:434:2099] LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=1 2025-03-28T09:40:21.365088Z node 1 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 1] Received TEvRollbackTx from [2:147:2088] LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=1 2025-03-28T09:40:21.365847Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-03-28T09:40:21.366162Z node 1 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 2 2025-03-28T09:40:21.366435Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 1 from session [2:97:2048] 2025-03-28T09:40:21.366732Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:83:2074] ServerId# [1:353:2271] TabletId# 72057594037932033 PipeClientId# [2:83:2074] 2025-03-28T09:40:21.369915Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvCommitTx from [2:434:2099] LongTxId# ydb://long-tx/000000001h54am0e49ehaf7qm3?node_id=3 2025-03-28T09:40:21.370158Z node 2 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 3 from session [2:472:2101] 2025-03-28T09:40:22.108007Z node 3 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:40:22.108097Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:22.200911Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944 2025-03-28T09:40:23.076157Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:40:23.076692Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000a2a/r3tmp/tmptILaut/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:40:23.076947Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000a2a/r3tmp/tmptILaut/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000a2a/r3tmp/tmptILaut/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8927684225962707585 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:40:23.518156Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireReadSnapshot from [3:509:2382] for database /dc-1 2025-03-28T09:40:23.518259Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-03-28T09:40:23.528607Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-03-28T09:40:23.528888Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Sending navigate request for /dc-1 2025-03-28T09:40:23.533226Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Received navigate response status Ok 2025-03-28T09:40:23.533301Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Sending acquire step to coordinator 72057594046316545 2025-03-28T09:40:23.538907Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:561:2421] Received read step 1000 2025-03-28T09:40:23.539168Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 1 2025-03-28T09:40:23.539812Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:509:2382] 2025-03-28T09:40:23.539860Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-03-28T09:40:23.550254Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-03-28T09:40:23.550484Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Sending navigate request for /dc-1 2025-03-28T09:40:23.550743Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Received navigate response status Ok 2025-03-28T09:40:23.550806Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Sending acquire step to coordinator 72057594046316545 2025-03-28T09:40:23.550986Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:579:2433] Received read step 1500 2025-03-28T09:40:23.551073Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 2 2025-03-28T09:40:23.551138Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-only LongTxId# ydb://long-tx/read-only?snapshot=1500%3Amax 2025-03-28T09:40:23.551402Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvBeginTx from [3:509:2382] 2025-03-28T09:40:23.551467Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2025-03-28T09:40:23.561829Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2025-03-28T09:40:23.562065Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Sending navigate request for /dc-1 2025-03-28T09:40:23.562293Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Received navigate response status Ok 2025-03-28T09:40:23.562350Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Sending acquire step to coordinator 72057594046316545 2025-03-28T09:40:23.562529Z node 3 :LONG_TX_SERVICE DEBUG: LongTxService.AcquireSnapshot [3:581:2435] Received read step 1500 2025-03-28T09:40:23.562609Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 3 2025-03-28T09:40:23.562708Z node 3 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 3] Created new read-write LongTxId# ydb://long-tx/00000001ea7gvd9s5v8ys872aa?node_id=3&snapshot=1500%3Amax 2025-03-28T09:40:24.398732Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:920} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2025-03-28T09:40:24.399105Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2744} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/2909/000a2a/r3tmp/tmpmgMKWD/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2025-03-28T09:40:24.399302Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:290} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/2909/000a2a/r3tmp/tmpmgMKWD/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfg Path# "/home/runner/.ya/build/build_root/2909/000a2a/r3tmp/tmpmgMKWD/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14679426453257239336 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false} PDiskId# 1000 2025-03-28T09:40:24.440539Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvRegisterLock for LockId# 123 2025-03-28T09:40:24.440666Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:428:2316] for LockId# 987 LockNode# 5 2025-03-28T09:40:24.449941Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:429:2099] for LockId# 987 LockNode# 5 2025-03-28T09:40:24.451639Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:96:2048] 2025-03-28T09:40:24.463052Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:147:2088] for LockId# 987 LockNode# 5 2025-03-28T09:40:24.464619Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 987 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-03-28T09:40:24.464863Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [5:428:2316] for LockId# 123 LockNode# 5 2025-03-28T09:40:24.465018Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:429:2099] for LockId# 123 LockNode# 5 2025-03-28T09:40:24.467524Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvSubscribeLock from [6:147:2088] for LockId# 123 LockNode# 5 2025-03-28T09:40:24.467767Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 123 LockNode# 5 LockStatus# STATUS_SUBSCRIBED 2025-03-28T09:40:24.467949Z node 5 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 5] Received TEvUnregisterLock for LockId# 123 2025-03-28T09:40:24.468093Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvLockStatus from [5:146:2135] for LockId# 123 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2025-03-28T09:40:24.468455Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvSubscribeLock from [6:429:2099] for LockId# 234 LockNode# 5 2025-03-28T09:40:24.468669Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-03-28T09:40:24.469068Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-03-28T09:40:24.469309Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:96:2048] 2025-03-28T09:40:24.469473Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:82:2074] ServerId# [5:350:2268] TabletId# 72057594037932033 PipeClientId# [6:82:2074] 2025-03-28T09:40:24.676727Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:458:2048] 2025-03-28T09:40:24.676982Z node 5 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 6 2025-03-28T09:40:24.677258Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:458:2048] 2025-03-28T09:40:24.677354Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-03-28T09:40:24.677388Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-03-28T09:40:24.677495Z node 6 :TX_PROXY WARN: actor# [6:145:2087] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-03-28T09:40:24.677872Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:459:2100] ServerId# [5:464:2335] TabletId# 72057594037932033 PipeClientId# [6:459:2100] 2025-03-28T09:40:24.897193Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:488:2048] 2025-03-28T09:40:24.897459Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-03-28T09:40:24.897528Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-03-28T09:40:24.898076Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:488:2048] 2025-03-28T09:40:24.898431Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:491:2101] ServerId# [5:495:2357] TabletId# 72057594037932033 PipeClientId# [6:491:2101] 2025-03-28T09:40:25.147841Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:509:2048] 2025-03-28T09:40:25.148036Z node 5 :PIPE_SERVER ERROR: [72057594046578946] NodeDisconnected NodeId# 6 2025-03-28T09:40:25.148129Z node 5 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 6 2025-03-28T09:40:25.148166Z node 6 :LONG_TX_SERVICE DEBUG: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:509:2048] 2025-03-28T09:40:25.148435Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:512:2103] ServerId# [5:516:2371] TabletId# 72057594037932033 PipeClientId# [6:512:2103] |97.8%| [TS] {RESULT} ydb/core/tx/long_tx_service/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update |97.8%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus |97.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} >> DataCleanup::MultipleDataCleanups [GOOD] >> DataCleanup::MultipleDataCleanupsWithOldGenerations >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery >> KeyValueGRPCService::SimpleRenameUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKey >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_prefix_kmeans/unittest >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] Test command err: 2025-03-28T09:40:13.124317Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792917457415057:2200];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:13.125082Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00090a/r3tmp/tmpDDZgQF/pdisk_1.dat 2025-03-28T09:40:13.645461Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:13.657189Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:13.657779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:13.660783Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:13.712647Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:13.753136Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:13.797564Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7486792917457415494:2295] 2025-03-28T09:40:13.797856Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:13.812788Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:13.812860Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:13.815834Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:40:13.815897Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:40:13.815933Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:40:13.817676Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:13.817749Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:13.817775Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7486792917457415508:2295] in generation 1 2025-03-28T09:40:13.822770Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:13.862349Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:40:13.863403Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:13.863463Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7486792917457415512:2296] 2025-03-28T09:40:13.863557Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:13.863571Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:40:13.863594Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:13.864474Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792917457415489:2294], serverId# [1:7486792917457415511:2305], sessionId# [0:0:0] 2025-03-28T09:40:13.864802Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:40:13.864917Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:40:13.864944Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:13.864959Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:13.865028Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:40:13.865058Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:13.865086Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:13.865410Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:40:13.865817Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-28T09:40:13.867326Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:13.867506Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:40:13.868354Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:40:13.870581Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792917457415526:2313], serverId# [1:7486792917457415527:2314], sessionId# [0:0:0] 2025-03-28T09:40:13.876013Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743154813918 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154813918 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:40:13.876045Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:13.876191Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:13.876284Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:13.876323Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:13.876384Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743154813918:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-28T09:40:13.876627Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743154813918:281474976710657 keys extracted: 0 2025-03-28T09:40:13.876752Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:13.876950Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:13.876979Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:40:13.886448Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:40:13.887873Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:13.889094Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743154813918} 2025-03-28T09:40:13.889131Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:13.889164Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743154813917 2025-03-28T09:40:13.889176Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:13.889227Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743154813925 2025-03-28T09:40:13.889295Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:13.889311Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:13.889334Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:40:13.889371Z node 1 :TX_DATASHARD DEBUG: Complete [1743154813918 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7486792917457415311:2179], exec latency: 10 ms, propose latency: 12 ms 2025-03-28T09:40:13.889400Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-28T09:40:13.889480Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:13.893730Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-28T09:40:13.893783Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:40:13.903003Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792917457415562:2340], serverId# [1:7486792917457415563:2341], sessionId# [0:0:0] 2025-03-28T09:40:13.923730Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 1 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 0 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" EmbeddingColumn: "embedding" row version v1743154813925/18446744073709551615 2025-03-28T09:40:13.925747Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792917457415567:2345], serverId# [1:7486792917457415568:2346], sessionId# [0:0:0] 2025-03-28T09:40:13.925930Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 2 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 1 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" EmbeddingColumn: "embedding" row version v1743154813925/18446744073709551615 2025-03-28T09:40:13.927995Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792917457415572:2350], serverId# [1:7486792917457415573:2351], sessionId# [0:0:0] 2025-03-28T09:40:13.928180Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 72075186224037888 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 3 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 2 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" row version v1743154813925/18446744073709551615 2025-03-28T09:40:13.929671Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792917457415577:2355], serverId# [1:7486792917457415578:2356], sessionId# [0:0:0] 2025-03-28T09:40:13.929807Z node 1 :BUILD_INDEX NOTICE: Starting TPrefixKMeansScan Id: 1 TabletId: 0 PathId { OwnerId: 72057594046644480 LocalId: 2 } SeqNoGeneration: 4 SeqNoRound: 1 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 2 } Seed: 1337 Upload: UPLOAD_BUILD_TO_POSTING K: 2 NeedsRounds: 3 Child: 1 LevelName: "/Root/table-level" PostingName: "/Root/table-posting" EmbeddingColumn: "embedding" row version v1743154813925/18446744073709551615 2025-03-28T09:40:13.931386Z node 1 :TX_DATASHARD DEBUG: Server connecte ... ast Ready tabletId 72075186224037911 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:40:27.658454Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037911 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:27.658528Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:27.658653Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037910 2025-03-28T09:40:27.659856Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037911 time 1743154827700 2025-03-28T09:40:27.659879Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037911 2025-03-28T09:40:27.659898Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037911 2025-03-28T09:40:27.659939Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037911 coordinator 72057594046316545 last step 0 next step 1743154827708 2025-03-28T09:40:27.659985Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037911 step# 1743154827701} 2025-03-28T09:40:27.660011Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037911 2025-03-28T09:40:27.660040Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037911 2025-03-28T09:40:27.660056Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037911 2025-03-28T09:40:27.660079Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037911 2025-03-28T09:40:27.660117Z node 3 :TX_DATASHARD DEBUG: Complete [1743154827701 : 281474976715725] from 72075186224037911 at tablet 72075186224037911 send result to client [3:7486792955399671326:2159], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:40:27.660146Z node 3 :TX_DATASHARD INFO: 72075186224037911 Sending notify to schemeshard 72057594046644480 txId 281474976715725 state Ready TxInFly 0 2025-03-28T09:40:27.660187Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037911 2025-03-28T09:40:27.661094Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715725 datashard 72075186224037911 state Ready 2025-03-28T09:40:27.661146Z node 3 :TX_DATASHARD DEBUG: 72075186224037911 Got TEvSchemaChangedResult from SS at 72075186224037911 2025-03-28T09:40:27.665539Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715726:0, at schemeshard: 72057594046644480 2025-03-28T09:40:27.667797Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:27.667805Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037910 2025-03-28T09:40:27.667888Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037911 2025-03-28T09:40:27.672228Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037912 actor [3:7486792976874511179:2438] 2025-03-28T09:40:27.672462Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:27.683151Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:27.683216Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:27.684538Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037912 2025-03-28T09:40:27.684591Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037912 2025-03-28T09:40:27.684625Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037912 2025-03-28T09:40:27.684926Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:27.684975Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:27.685000Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037912 persisting started state actor id [3:7486792976874511197:2438] in generation 1 2025-03-28T09:40:27.686315Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:27.686348Z node 3 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037912 2025-03-28T09:40:27.686415Z node 3 :TX_DATASHARD DEBUG: 72075186224037912 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:27.686448Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037912, actorId: [3:7486792976874511199:2439] 2025-03-28T09:40:27.686462Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037912 2025-03-28T09:40:27.686475Z node 3 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037912, state: WaitScheme 2025-03-28T09:40:27.686488Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037912 2025-03-28T09:40:27.686585Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037912 2025-03-28T09:40:27.686641Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037912 2025-03-28T09:40:27.686667Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037912, clientId# [3:7486792976874511184:4704], serverId# [3:7486792976874511189:4708], sessionId# [0:0:0] 2025-03-28T09:40:27.686779Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037912 2025-03-28T09:40:27.686793Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037912 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:27.686810Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037912 TxInFly 0 2025-03-28T09:40:27.686830Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037912 2025-03-28T09:40:27.686854Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037912 2025-03-28T09:40:27.687056Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037912 txId 281474976715726 ssId 72057594046644480 seqNo 2:46 2025-03-28T09:40:27.687108Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715726 at tablet 72075186224037912 2025-03-28T09:40:27.687482Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037912 2025-03-28T09:40:27.688236Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037912 2025-03-28T09:40:27.688289Z node 3 :TX_DATASHARD DEBUG: 72075186224037912 not sending time cast registration request in state WaitScheme 2025-03-28T09:40:27.693794Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037911 2025-03-28T09:40:27.693896Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037912, clientId# [3:7486792976874511205:4718], serverId# [3:7486792976874511206:4719], sessionId# [0:0:0] 2025-03-28T09:40:27.694125Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715726 at step 1743154827736 at tablet 72075186224037912 { Transactions { TxId: 281474976715726 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154827736 MediatorID: 72057594046382081 TabletID: 72075186224037912 } 2025-03-28T09:40:27.694141Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037912 2025-03-28T09:40:27.694214Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037912 2025-03-28T09:40:27.694254Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037912 2025-03-28T09:40:27.694271Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037912 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:27.694292Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1743154827736:281474976715726] in PlanQueue unit at 72075186224037912 2025-03-28T09:40:27.694513Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037912 loaded tx from db 1743154827736:281474976715726 keys extracted: 0 2025-03-28T09:40:27.694624Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037912 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:27.694717Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037912 2025-03-28T09:40:27.694754Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037912 tableId# [OwnerId: 72057594046644480, LocalPathId: 30] schema version# 1 2025-03-28T09:40:27.695156Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037912 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:40:27.695493Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037912 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:27.696944Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037912 time 1743154827735 2025-03-28T09:40:27.696962Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037912 2025-03-28T09:40:27.696981Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037912 coordinator 72057594046316545 last step 0 next step 1743154827743 2025-03-28T09:40:27.697021Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037912 step# 1743154827736} 2025-03-28T09:40:27.697055Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037912 2025-03-28T09:40:27.697085Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037912 2025-03-28T09:40:27.697103Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037912 2025-03-28T09:40:27.697120Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037912 2025-03-28T09:40:27.697157Z node 3 :TX_DATASHARD DEBUG: Complete [1743154827736 : 281474976715726] from 72075186224037912 at tablet 72075186224037912 send result to client [3:7486792955399671326:2159], exec latency: 0 ms, propose latency: 2 ms 2025-03-28T09:40:27.697186Z node 3 :TX_DATASHARD INFO: 72075186224037912 Sending notify to schemeshard 72057594046644480 txId 281474976715726 state Ready TxInFly 0 2025-03-28T09:40:27.697217Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037912 2025-03-28T09:40:27.697277Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:27.697341Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037910 2025-03-28T09:40:27.698222Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715726 datashard 72075186224037912 state Ready 2025-03-28T09:40:27.698268Z node 3 :TX_DATASHARD DEBUG: 72075186224037912 Got TEvSchemaChangedResult from SS at 72075186224037912 2025-03-28T09:40:27.726037Z node 3 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7486792955399670959:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:27.726163Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_prefix_kmeans/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect [GOOD] >> DataShardStats::HistogramStatsCorrect >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] >> TabletService_Restart::Basics [GOOD] >> TabletService_Restart::OnlyAdminsAllowed >> TTxDataShardReshuffleKMeansScan::MainToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild >> Coordinator::ReadStepSubscribe [GOOD] >> Coordinator::LastStepSubscribe >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists [GOOD] >> TCreateAndDropViewTest::DropView >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_compaction/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] Test command err: 2025-03-28T09:39:58.181281Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:58.181833Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:58.181887Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008c3/r3tmp/tmppv6ERs/pdisk_1.dat 2025-03-28T09:39:58.687391Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:39:58.738908Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:58.782632Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:39:58.784552Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:39:58.784829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:58.785379Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:58.801070Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:39:58.888355Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:39:58.888439Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:39:58.889334Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:39:59.010383Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:39:59.010513Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:39:59.011120Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:39:59.011210Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:39:59.011552Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:39:59.011737Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:39:59.011943Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:39:59.012261Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:39:59.015121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:39:59.016927Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:39:59.017000Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:39:59.061112Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:39:59.062377Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:39:59.062876Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:39:59.063164Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:59.116924Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:39:59.117752Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:59.117883Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:39:59.120104Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:39:59.120185Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:39:59.120297Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:39:59.121960Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:39:59.122116Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:39:59.122206Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:39:59.133065Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:39:59.168066Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:39:59.169443Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:39:59.169633Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:39:59.169671Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:39:59.169707Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:39:59.169742Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:39:59.169999Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:39:59.170061Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:39:59.171392Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:39:59.171571Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:39:59.173200Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:39:59.173255Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:39:59.173424Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:39:59.173493Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:39:59.173531Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:39:59.173567Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:39:59.173624Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:39:59.173766Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:39:59.173819Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:39:59.173859Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:39:59.173928Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:39:59.173955Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:39:59.174061Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:39:59.174340Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:39:59.174404Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:39:59.174517Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:39:59.174567Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:39:59.174624Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:39:59.174664Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:39:59.174730Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:39:59.175021Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:39:59.175084Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:39:59.175118Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:39:59.175172Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:39:59.175226Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:39:59.175256Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:39:59.175293Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:39:59.175333Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:39:59.175363Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:39:59.177000Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:39:59.177067Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:39:59.187885Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Comple ... 3-28T09:40:29.401950Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037892 2025-03-28T09:40:29.402019Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037892 2025-03-28T09:40:29.402484Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:931:2762], Recipient [2:931:2762]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:29.402528Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:29.402596Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037892 2025-03-28T09:40:29.402632Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:29.402668Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037892 for ReadTableScan 2025-03-28T09:40:29.402795Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit ReadTableScan 2025-03-28T09:40:29.402831Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037892 error: , IsFatalError: 0 2025-03-28T09:40:29.402869Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-03-28T09:40:29.402900Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit ReadTableScan 2025-03-28T09:40:29.402928Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompleteOperation 2025-03-28T09:40:29.402972Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-03-28T09:40:29.403179Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is DelayComplete 2025-03-28T09:40:29.403213Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompleteOperation 2025-03-28T09:40:29.403241Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037892 to execution unit CompletedOperations 2025-03-28T09:40:29.403270Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037892 on unit CompletedOperations 2025-03-28T09:40:29.403300Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037892 is Executed 2025-03-28T09:40:29.403323Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037892 executing on unit CompletedOperations 2025-03-28T09:40:29.403349Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037892 has finished 2025-03-28T09:40:29.403377Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:29.403406Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037892 2025-03-28T09:40:29.403436Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037892 has no attached operations 2025-03-28T09:40:29.403467Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037892 2025-03-28T09:40:29.415925Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-28T09:40:29.416000Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037892 2025-03-28T09:40:29.416037Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037892 on unit CompleteOperation 2025-03-28T09:40:29.416094Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037892 at tablet 72075186224037892 send result to client [2:1476:3277], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:40:29.416145Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2025-03-28T09:40:29.416321Z node 2 :TX_PROXY DEBUG: Actor# [2:1476:3277] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2025-03-28T09:40:29.416392Z node 2 :TX_PROXY DEBUG: Send stream clearance, shard: 72075186224037890, txid: 281474976715661, cleared: 1 2025-03-28T09:40:29.416537Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 269287940, Sender [2:1476:3277], Recipient [2:769:2643]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2025-03-28T09:40:29.416578Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2025-03-28T09:40:29.416669Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:769:2643], Recipient [2:769:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:29.416700Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:29.416766Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:40:29.416800Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:29.416837Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for WaitForStreamClearance 2025-03-28T09:40:29.416870Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit WaitForStreamClearance 2025-03-28T09:40:29.416901Z node 2 :TX_DATASHARD TRACE: Got stream clearance for [71500:281474976715661] at 72075186224037890 2025-03-28T09:40:29.416941Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-03-28T09:40:29.416976Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit WaitForStreamClearance 2025-03-28T09:40:29.417004Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit ReadTableScan 2025-03-28T09:40:29.417035Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-03-28T09:40:29.417267Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Continue 2025-03-28T09:40:29.417297Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:29.417326Z node 2 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037890 2025-03-28T09:40:29.417356Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-28T09:40:29.417392Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-28T09:40:29.417857Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435082, Sender [2:1508:3306], Recipient [2:769:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2025-03-28T09:40:29.417898Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2025-03-28T09:40:29.418181Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-03-28T09:40:29.418278Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2025-03-28T09:40:29.420096Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-28T09:40:29.420140Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715661, at: 72075186224037890 2025-03-28T09:40:29.420360Z node 2 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [2:769:2643], Recipient [2:769:2643]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:29.420406Z node 2 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:29.420471Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:40:29.420505Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:29.420545Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [71500:281474976715661] at 72075186224037890 for ReadTableScan 2025-03-28T09:40:29.420577Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit ReadTableScan 2025-03-28T09:40:29.420629Z node 2 :TX_DATASHARD TRACE: ReadTable scan complete for [71500:281474976715661] at 72075186224037890 error: , IsFatalError: 0 2025-03-28T09:40:29.420671Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-03-28T09:40:29.420705Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit ReadTableScan 2025-03-28T09:40:29.420733Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompleteOperation 2025-03-28T09:40:29.420764Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-03-28T09:40:29.420994Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is DelayComplete 2025-03-28T09:40:29.421028Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompleteOperation 2025-03-28T09:40:29.421057Z node 2 :TX_DATASHARD TRACE: Add [71500:281474976715661] at 72075186224037890 to execution unit CompletedOperations 2025-03-28T09:40:29.421085Z node 2 :TX_DATASHARD TRACE: Trying to execute [71500:281474976715661] at 72075186224037890 on unit CompletedOperations 2025-03-28T09:40:29.421119Z node 2 :TX_DATASHARD TRACE: Execution status for [71500:281474976715661] at 72075186224037890 is Executed 2025-03-28T09:40:29.421140Z node 2 :TX_DATASHARD TRACE: Advance execution plan for [71500:281474976715661] at 72075186224037890 executing on unit CompletedOperations 2025-03-28T09:40:29.421170Z node 2 :TX_DATASHARD TRACE: Execution plan for [71500:281474976715661] at 72075186224037890 has finished 2025-03-28T09:40:29.421198Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:29.421225Z node 2 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037890 2025-03-28T09:40:29.421254Z node 2 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037890 has no attached operations 2025-03-28T09:40:29.421281Z node 2 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037890 2025-03-28T09:40:29.432061Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:40:29.432144Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:40:29.432183Z node 2 :TX_DATASHARD TRACE: Complete execution for [71500:281474976715661] at 72075186224037890 on unit CompleteOperation 2025-03-28T09:40:29.432243Z node 2 :TX_DATASHARD DEBUG: Complete [71500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1476:3277], exec latency: 1 ms, propose latency: 1 ms 2025-03-28T09:40:29.432307Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:40:29.432478Z node 2 :TX_PROXY DEBUG: Actor# [2:1476:3277] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037890 marker# P12 2025-03-28T09:40:29.432564Z node 2 :TX_PROXY INFO: Actor# [2:1476:3277] txid# 281474976715661 RESPONSE Status# ExecComplete prepare time: 0.000500s execute time: 0.001500s total time: 0.002000s marker# P13 |97.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_compaction/unittest >> SequenceShardTests::Basics >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs >> TopicSessionTests::TwoSessionsWithOffsets >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert >> test_example.py::TestExample::test_example >> DiscoveryIsNotBroken::HaveKafkaSslEndpointInDiscovery [GOOD] >> Functions::CreateRequest [GOOD] >> Functions::CreateResponse [GOOD] >> KafkaProtocol::ProduceScenario >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob >> Graph::LocalBackendFullCycle [GOOD] >> Graph::MemoryBordersOnGet >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries >> test.py::test_local [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::SequencesIndex >> Graph::MemoryBordersOnGet [GOOD] >> Graph::LocalBordersOnGet >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> DataCleanup::MultipleDataCleanupsWithOldGenerations [GOOD] >> DataCleanup::ForceDataCleanupWithRestart >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> SequenceShardTests::NegativeIncrement >> test_yt_reading.py::TestYtReading::test_partitioned_reading >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections >> KeyValueGRPCService::SimpleConcatUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKey >> SequenceShardTests::NegativeIncrement [GOOD] >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> TTxDataShardReshuffleKMeansScan::MainToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2025-03-28T09:40:32.070460Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-03-28T09:40:32.070720Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-03-28T09:40:32.091643Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-03-28T09:40:32.096217Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-03-28T09:40:32.096284Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-03-28T09:40:32.104570Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-03-28T09:40:32.105655Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-03-28T09:40:32.131258Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-03-28T09:40:32.131741Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-03-28T09:40:32.131795Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:32.131885Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-03-28T09:40:32.132178Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2025-03-28T09:40:32.132389Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2025-03-28T09:40:32.146033Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-03-28T09:40:32.146411Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-28T09:40:32.146600Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2025-03-28T09:40:32.163442Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.163834Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-03-28T09:40:32.163930Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2025-03-28T09:40:32.176173Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.176543Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-03-28T09:40:32.176650Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2025-03-28T09:40:32.192975Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.193521Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2025-03-28T09:40:32.193631Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2025-03-28T09:40:32.208799Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.209193Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2025-03-28T09:40:32.209247Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2025-03-28T09:40:32.209304Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.209566Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-03-28T09:40:32.209661Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2025-03-28T09:40:32.224548Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.224922Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-03-28T09:40:32.224975Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:32.225030Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.225358Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:32.225442Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:32.237602Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-03-28T09:40:32.237928Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:32.237976Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:32.238039Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2025-03-28T09:40:32.289719Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-03-28T09:40:32.289831Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-03-28T09:40:32.290410Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-03-28T09:40:32.294758Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-03-28T09:40:32.296208Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-03-28T09:40:32.301041Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-28T09:40:32.301132Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:32.301202Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.301471Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-03-28T09:40:32.301563Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2025-03-28T09:40:32.317463Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.318061Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2025-03-28T09:40:32.318161Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-03-28T09:40:32.339395Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-03-28T09:40:32.339867Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-03-28T09:40:32.339967Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2025-03-28T09:40:32.357523Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.357991Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2025-03-28T09:40:32.358087Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-03-28T09:40:32.374958Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-03-28T09:40:32.375424Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2025-03-28T09:40:32.375517Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2025-03-28T09:40:32.402801Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:32.403355Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] 2025-03-28T09:40:32.403414Z node 1 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxGetSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2025-03-28T09:40:32.403473Z node 1 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxGetSequence.Complete 2025-03-28T09:40:32.900868Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-03-28T09:40:32.900970Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-03-28T09:40:32.913544Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-03-28T09:40:32.917638Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-03-28T09:40:32.917701Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-03-28T09:40:32.919170Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxMarkSchemeShardPipe.Execute SchemeShardId# 123 Generation# 1 Round# 1 2025-03-28T09:40:32.919481Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2025-03-28T09:40:32.919608Z node 2 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2025-03-28T09:40:32.944712Z node 2 :SEQUENCESHARD TRACE: [sequenceshard 7205 ... ENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-03-28T09:40:33.584744Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:33.584815Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:33.599515Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-03-28T09:40:33.599891Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2025-03-28T09:40:33.599943Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_FROZEN PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:33.600002Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:33.600344Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-03-28T09:40:33.600446Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-03-28T09:40:33.612547Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-03-28T09:40:33.612941Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Cache# 0 2025-03-28T09:40:33.613041Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2025-03-28T09:40:33.630097Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:33.630670Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2025-03-28T09:40:33.630747Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2025-03-28T09:40:33.630820Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-03-28T09:40:33.631130Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-28T09:40:33.631219Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-28T09:40:33.649534Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-03-28T09:40:33.649884Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-28T09:40:33.649986Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-28T09:40:33.663818Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-03-28T09:40:33.664184Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-28T09:40:33.664270Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2025-03-28T09:40:33.676505Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-03-28T09:40:33.676854Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-28T09:40:33.676910Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2025-03-28T09:40:33.676972Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:33.677264Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-03-28T09:40:33.677347Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2025-03-28T09:40:33.692693Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-03-28T09:40:33.693168Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-03-28T09:40:33.693348Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2025-03-28T09:40:33.709127Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2025-03-28T09:40:33.709522Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-03-28T09:40:33.709609Z node 3 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2025-03-28T09:40:33.724586Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2025-03-28T09:40:33.724957Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2025-03-28T09:40:33.725007Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2025-03-28T09:40:33.725067Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2025-03-28T09:40:33.725378Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-28T09:40:33.725480Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2025-03-28T09:40:33.740691Z node 3 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:34.217241Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] OnActivateExecutor 2025-03-28T09:40:34.217325Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2025-03-28T09:40:34.230017Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Execute 2025-03-28T09:40:34.234550Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2025-03-28T09:40:34.234621Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxInit.Complete 2025-03-28T09:40:34.236658Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2025-03-28T09:40:34.236794Z node 4 :SEQUENCESHARD NOTICE: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2025-03-28T09:40:34.259919Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2025-03-28T09:40:34.260328Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-28T09:40:34.260445Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-03-28T09:40:34.275927Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:34.276392Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-28T09:40:34.276510Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-03-28T09:40:34.290143Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:34.290551Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2025-03-28T09:40:34.290650Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2025-03-28T09:40:34.303050Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:34.303485Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2025-03-28T09:40:34.303562Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:34.303635Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:34.303987Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2025-03-28T09:40:34.304081Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2025-03-28T09:40:34.320119Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2025-03-28T09:40:34.325151Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-28T09:40:34.325313Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2025-03-28T09:40:34.340187Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2025-03-28T09:40:34.340640Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2025-03-28T09:40:34.340749Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2025-03-28T09:40:34.368413Z node 4 :SEQUENCESHARD TRACE: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete >> Graph::LocalBordersOnGet [GOOD] |97.8%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> test.py::test[solomon-Basic-default.txt] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/ut/unittest >> Graph::LocalBordersOnGet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:40:13.330875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:40:13.331101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:40:13.331145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:40:13.331190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:40:13.332391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:40:13.332454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:40:13.332578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:40:13.332679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:40:13.334047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:13.435547Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:40:13.435636Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:13.463306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:13.463672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:40:13.463853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:40:13.476696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:40:13.476933Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:40:13.480876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:40:13.482087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:40:13.487957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:40:13.500167Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:40:13.500269Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:40:13.501246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:40:13.501323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:40:13.501422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:40:13.501539Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:40:13.510262Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:40:13.640018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:40:13.641883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:13.642897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:40:13.644135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:40:13.644208Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:13.653107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:40:13.653240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:40:13.653426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:13.653525Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:40:13.653556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:40:13.653584Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:40:13.655793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:13.655918Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:40:13.655961Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:40:13.658329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:13.658392Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:13.658453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:40:13.658515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:40:13.663799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:40:13.666517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:40:13.667860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:40:13.669204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:40:13.669391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:40:13.669474Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:40:13.671014Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:40:13.671094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:40:13.671343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:40:13.671452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:40:13.674265Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:40:13.674322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:40:13.674585Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:40:13.674949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:40:13.675532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:40:13.675587Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:40:13.675701Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:40:13.675742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:40:13.675785Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:40:13.675817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:40:13.675856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:40:13.675906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:40:13.675971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:40:13.676015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:40:13.676091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:40:13.676135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:40:13.676209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:40:13.678255Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:40:13.678377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:40:13.678423Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... cs 2025-03-28T09:40:35.198986Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 101 } Time: 101 2025-03-28T09:40:35.199015Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.199034Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.199057Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.199113Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 102 } Time: 102 2025-03-28T09:40:35.199129Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.199147Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.199168Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.199217Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 103 } Time: 103 2025-03-28T09:40:35.199235Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.199252Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.199275Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.199320Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 104 } Time: 104 2025-03-28T09:40:35.199337Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.199353Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.199375Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.199419Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 105 } Time: 105 2025-03-28T09:40:35.199435Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.199453Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.199472Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.199518Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 106 } Time: 106 2025-03-28T09:40:35.199535Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.199553Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.199575Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.199628Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 107 } Time: 107 2025-03-28T09:40:35.199645Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.199663Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.199682Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.199725Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 108 } Time: 108 2025-03-28T09:40:35.199742Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.199759Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.199780Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.199839Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 109 } Time: 109 2025-03-28T09:40:35.199855Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.199873Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.199892Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.199938Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 110 } Time: 110 2025-03-28T09:40:35.199952Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.199969Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.199988Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.200032Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 111 } Time: 111 2025-03-28T09:40:35.200047Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.200064Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.200082Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.200135Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 112 } Time: 112 2025-03-28T09:40:35.200151Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.200169Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.200190Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.200235Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 113 } Time: 113 2025-03-28T09:40:35.200251Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.200267Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.200285Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.200324Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 114 } Time: 114 2025-03-28T09:40:35.200339Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.200357Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.200379Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.200422Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 115 } Time: 115 2025-03-28T09:40:35.200439Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.200456Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.200475Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.200519Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 116 } Time: 116 2025-03-28T09:40:35.200534Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.200552Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.200569Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.200612Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 117 } Time: 117 2025-03-28T09:40:35.200628Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.200645Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.200664Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.200707Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 118 } Time: 118 2025-03-28T09:40:35.200722Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.200737Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.200757Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.200802Z node 6 :GRAPH TRACE: SHARD Metrics { Name: "test.metric0" Value: 119 } Time: 119 2025-03-28T09:40:35.200818Z node 6 :GRAPH TRACE: SHARD Executing direct TxStoreMetrics 2025-03-28T09:40:35.200834Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Execute 2025-03-28T09:40:35.200853Z node 6 :GRAPH TRACE: DB Stored metrics 2025-03-28T09:40:35.200920Z node 6 :GRAPH TRACE: SHARD Handle TEvGraph::TEvGetMetrics from [6:572:2501] 2025-03-28T09:40:35.200974Z node 6 :GRAPH DEBUG: SHARD TTxGetMetrics::Execute 2025-03-28T09:40:35.201014Z node 6 :GRAPH DEBUG: DB Querying from 0 to 119 2025-03-28T09:40:35.218505Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218587Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218612Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218637Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218659Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218680Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218722Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218748Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218773Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218799Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218825Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218851Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218875Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218897Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218941Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218966Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.218993Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219016Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219036Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219060Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219082Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219105Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219126Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219147Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219170Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219193Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219215Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219237Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219258Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219279Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219299Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219318Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219340Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219363Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219384Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219406Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219427Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219450Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219471Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219493Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219514Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219534Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219558Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219581Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219602Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219622Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219646Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219669Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219691Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219716Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219737Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219758Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219778Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219799Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219820Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219841Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219863Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219884Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219903Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219923Z node 6 :GRAPH DEBUG: SHARD TTxStoreMetrics::Complete 2025-03-28T09:40:35.219958Z node 6 :GRAPH DEBUG: SHARD TTxGetMetric::Complete 2025-03-28T09:40:35.220014Z node 6 :GRAPH TRACE: SHARD TxGetMetrics returned 60 points for request 3 2025-03-28T09:40:35.220186Z node 6 :GRAPH TRACE: SVC TEvMetricsResult 3 2025-03-28T09:40:35.220244Z node 6 :GRAPH TRACE: SVC TEvMetricsResult found request 3 resending to [6:573:2502] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection |97.8%| [TS] {RESULT} ydb/core/graph/ut/unittest >> KafkaProtocol::ProduceScenario [GOOD] >> KafkaProtocol::FetchScenario >> DataShardReplication::SimpleApplyChanges >> TMemoryController::MemTable [GOOD] >> TMemoryController::ResourceBroker >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter >> TCreateAndDropViewTest::DropView [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/tablet/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] Test command err: 2025-03-28T09:39:52.630826Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:52.631211Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:52.631258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005c8/r3tmp/tmp5fYPcN/pdisk_1.dat 2025-03-28T09:39:53.148701Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:39:53.212163Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:53.258412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:53.259164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:53.272080Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema ... changing schema (dry run) ... reading schema ... changing schema ... reading schema 2025-03-28T09:39:57.444453Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:57.444695Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:57.444733Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005c8/r3tmp/tmpGxHtSs/pdisk_1.dat 2025-03-28T09:39:57.697145Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:39:57.719509Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:57.755467Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:57.755580Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:57.767055Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... reading schema (without token) ... reading schema (non-admin token) ... reading schema (admin token) 2025-03-28T09:40:01.175585Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:01.175758Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:01.175843Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005c8/r3tmp/tmprNnj5T/pdisk_1.dat 2025-03-28T09:40:01.464943Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:01.499553Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:01.536573Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:01.536682Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:01.548399Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:04.950146Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:04.950395Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:04.950504Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005c8/r3tmp/tmppu1pSn/pdisk_1.dat 2025-03-28T09:40:05.284786Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:05.313249Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:05.353777Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:05.353892Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:05.365713Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:09.439662Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:09.439929Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:09.440108Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005c8/r3tmp/tmpWUPPbK/pdisk_1.dat 2025-03-28T09:40:09.763083Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:09.799912Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:09.840729Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:09.840898Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:09.852698Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:14.357772Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:14.357978Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:14.358075Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005c8/r3tmp/tmpatW0KM/pdisk_1.dat 2025-03-28T09:40:14.657265Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:14.690257Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:14.728367Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:14.728524Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:14.740199Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:19.063461Z node 7 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [7:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:19.063793Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:19.063865Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005c8/r3tmp/tmpnOUGlY/pdisk_1.dat 2025-03-28T09:40:19.401427Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:19.436283Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:19.476174Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:19.476348Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:19.487952Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:23.840107Z node 8 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [8:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:23.840593Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:40:23.840657Z node 8 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005c8/r3tmp/tmpNPR6vX/pdisk_1.dat 2025-03-28T09:40:24.222299Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:24.256112Z node 8 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:24.294741Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:24.294914Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:24.306550Z node 8 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:28.746220Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [9:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:28.746519Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:28.746659Z node 9 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005c8/r3tmp/tmpzHGuxi/pdisk_1.dat 2025-03-28T09:40:29.084008Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:29.122108Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:29.161956Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:29.162147Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:29.174045Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 2025-03-28T09:40:29.316125Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:34.577418Z node 10 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [10:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:34.577579Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:34.577651Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0005c8/r3tmp/tmpmDbhKP/pdisk_1.dat 2025-03-28T09:40:34.924462Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:34.978089Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:35.023157Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:35.023338Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:35.035672Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected ... restarting tablet 72057594046644480 (without token) ... restarting tablet 72057594046644480 (non-admin token) ... restarting tablet 72057594046644480 (admin token) 2025-03-28T09:40:35.411061Z node 10 :IMPORT WARN: Table profiles were not loaded |97.8%| [TM] {RESULT} ydb/core/grpc_services/tablet/ut/unittest >> BulkUpsert::BulkUpsert >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] |97.8%| [TM] {RESULT} ydb/tests/functional/serializable/py3test >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> TRangeTreap::Simple [GOOD] >> TRangeTreap::Sequential >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection >> test_example.py::TestExample::test_example [GOOD] >> test_example.py::TestExample::test_example2 [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> TopicSessionTests::BadDataSessionError >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> KeyValueGRPCService::SimpleCopyUnexistedKey [GOOD] >> KeyValueGRPCService::SimpleWriteRead >> TTxDataShardReshuffleKMeansScan::BuildToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild >> test_yt_reading.py::TestYtReading::test_partitioned_reading [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding >> DataCleanup::ForceDataCleanupWithRestart [GOOD] >> DataCleanup::OutReadSetsCleanedAfterCopyTable >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] >> HttpRouter::Basic [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings |97.8%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |97.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/public_http/ut/unittest >> HttpRouter::Basic [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data |97.8%| [TS] {RESULT} ydb/core/public_http/ut/unittest >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution >> DataShardReplication::SimpleApplyChanges [GOOD] >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> KafkaProtocol::FetchScenario [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> DataShardReplication::SplitMergeChanges >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel >> TCreateAndDropViewTest::DropNonexistingView >> TestFilterSet::FilterGroup >> Coordinator::LastStepSubscribe [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding >> KafkaProtocol::BalanceScenario >> test_query_cache.py::TestQueryCache::test >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding >> Coordinator::RestoreDomainConfiguration >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter >> TMemoryController::ResourceBroker [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit >> TTxDataShardBuildIndexScan::RunScan >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> TIndexProcesorTests::TestOver1000Queues [GOOD] >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] >> KeyValueGRPCService::SimpleWriteRead [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] |97.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> TopicSessionTests::BadDataSessionError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reshuffle_kmeans/unittest >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] Test command err: 2025-03-28T09:40:22.079983Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792956464828464:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:22.080089Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a32/r3tmp/tmph0PfBB/pdisk_1.dat 2025-03-28T09:40:22.532554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:22.547992Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:22.548672Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:22.570083Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:22.637972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:22.676675Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:22.714810Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7486792956464829036:2295] 2025-03-28T09:40:22.715148Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:22.743240Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:22.743305Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:22.746465Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:40:22.746552Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:40:22.746592Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:40:22.754978Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:22.755086Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:22.755125Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:7486792956464829052:2295] in generation 1 2025-03-28T09:40:22.767431Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:22.809015Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:40:22.810380Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:22.810447Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:7486792956464829054:2296] 2025-03-28T09:40:22.810482Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:22.810494Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:40:22.810502Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:22.811591Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792956464829033:2296], serverId# [1:7486792956464829051:2304], sessionId# [0:0:0] 2025-03-28T09:40:22.811730Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:40:22.811791Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:40:22.811809Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:22.811882Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:22.811899Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:40:22.811914Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:22.811956Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:22.812313Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:40:22.813165Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2025-03-28T09:40:22.815017Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:22.815338Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:40:22.816252Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:40:22.818701Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792956464829068:2313], serverId# [1:7486792956464829069:2314], sessionId# [0:0:0] 2025-03-28T09:40:22.825827Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710657 at step 1743154822864 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154822864 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:40:22.825861Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:22.825997Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:22.826081Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:22.826108Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:22.826155Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743154822864:281474976710657] in PlanQueue unit at 72075186224037888 2025-03-28T09:40:22.826399Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743154822864:281474976710657 keys extracted: 0 2025-03-28T09:40:22.826538Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:22.826614Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:22.826671Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:40:22.829464Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:40:22.831236Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:22.832474Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1743154822863 2025-03-28T09:40:22.832488Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:22.832519Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1743154822871 2025-03-28T09:40:22.832588Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743154822864} 2025-03-28T09:40:22.832621Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:22.832661Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:22.832697Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:22.832728Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:40:22.832787Z node 1 :TX_DATASHARD DEBUG: Complete [1743154822864 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7486792956464828851:2180], exec latency: 4 ms, propose latency: 6 ms 2025-03-28T09:40:22.832852Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2025-03-28T09:40:22.832943Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:22.836760Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2025-03-28T09:40:22.836810Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:40:22.847623Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792956464829107:2342], serverId# [1:7486792956464829108:2343], sessionId# [0:0:0] 2025-03-28T09:40:22.848792Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:22.848934Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710658 at tablet 72075186224037888 2025-03-28T09:40:22.852295Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:40:22.853532Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976710658 at step 1743154822899 at tablet 72075186224037888 { Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154822899 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:40:22.853554Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:22.853701Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:22.853717Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:22.853732Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1743154822899:281474976710658] in PlanQueue unit at 72075186224037888 2025-03-28T09:40:22.853887Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1743154822899:281474976710658 keys extracted: 0 2025-03-28T09:40:22.854191Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:22.854789Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1743154822899} 2025-03-28T09:40:22.854826Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:22.854867Z node 1 :TX_DATASHARD DEBUG: Complete [1743154822899 : 281474976710658] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7486792956464829102:2338], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:40:22.854900Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:22.866120Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:7486792956464829118:2353], serverId# [1:7486792956464829119:2354], sessionId# [0:0:0] 2025-03-28T09:40:22.867489Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:22.867584Z node 1 :TX_DATASHARD DEBUG: Prepared Snapshot transaction txId 281474976710659 at tablet 72075186224037888 2025-03-28T09:40:22.869059Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037 ... [1743154845187:281474976715688] in PlanQueue unit at 72075186224037895 2025-03-28T09:40:45.141227Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037895 loaded tx from db 1743154845187:281474976715688 keys extracted: 0 2025-03-28T09:40:45.141335Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:45.141434Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037895 2025-03-28T09:40:45.141487Z node 5 :TX_DATASHARD INFO: Trying to DROP TABLE at 72075186224037895 2025-03-28T09:40:45.141881Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:45.144860Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:45.144870Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-03-28T09:40:45.145669Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037895 step# 1743154845187} 2025-03-28T09:40:45.145709Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037895 2025-03-28T09:40:45.145747Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037895 2025-03-28T09:40:45.145798Z node 5 :TX_DATASHARD DEBUG: Complete [1743154845187 : 281474976715688] from 72075186224037895 at tablet 72075186224037895 send result to client [5:7486793031974926029:2144], exec latency: 0 ms, propose latency: 4 ms 2025-03-28T09:40:45.145844Z node 5 :TX_DATASHARD INFO: 72075186224037895 Sending notify to schemeshard 72057594046644480 txId 281474976715688 state PreOffline TxInFly 0 2025-03-28T09:40:45.145886Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2025-03-28T09:40:45.147083Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715688 datashard 72075186224037895 state PreOffline 2025-03-28T09:40:45.147136Z node 5 :TX_DATASHARD DEBUG: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2025-03-28T09:40:45.148207Z node 5 :TX_DATASHARD DEBUG: 72075186224037895 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2025-03-28T09:40:45.148275Z node 5 :TX_DATASHARD INFO: 72075186224037895 Initiating switch from PreOffline to Offline state 2025-03-28T09:40:45.150116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480 2025-03-28T09:40:45.150507Z node 5 :TX_DATASHARD INFO: 72075186224037895 Reporting state Offline to schemeshard 72057594046644480 2025-03-28T09:40:45.152042Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:45.152117Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037895 2025-03-28T09:40:45.153779Z node 5 :TX_DATASHARD DEBUG: Handle TEvStateChangedResult datashard 72075186224037895 state Offline 2025-03-28T09:40:45.157043Z node 5 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037896 actor [5:7486793053449764040:2362] 2025-03-28T09:40:45.157283Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:45.158311Z node 5 :TX_DATASHARD INFO: OnTabletStop: 72075186224037895 reason = ReasonStop 2025-03-28T09:40:45.158359Z node 5 :TX_DATASHARD DEBUG: Server disconnected at leader tablet# 72075186224037895, clientId# [5:7486793053449763967:3189], serverId# [5:7486793053449763968:3190], sessionId# [0:0:0] 2025-03-28T09:40:45.158518Z node 5 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 5, TabletId: 72075186224037895 not found 2025-03-28T09:40:45.158821Z node 5 :TX_DATASHARD INFO: OnTabletDead: 72075186224037895 2025-03-28T09:40:45.158903Z node 5 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037895 2025-03-28T09:40:45.167891Z node 5 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:45.167985Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:45.169846Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037896 2025-03-28T09:40:45.169893Z node 5 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037896 2025-03-28T09:40:45.169924Z node 5 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037896 2025-03-28T09:40:45.170289Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:45.170335Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:45.170360Z node 5 :TX_DATASHARD DEBUG: DataShard 72075186224037896 persisting started state actor id [5:7486793053449764066:2362] in generation 1 2025-03-28T09:40:45.171588Z node 5 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:45.171620Z node 5 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037896 2025-03-28T09:40:45.171683Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:45.171719Z node 5 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037896, actorId: [5:7486793053449764068:2363] 2025-03-28T09:40:45.171731Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037896 2025-03-28T09:40:45.171746Z node 5 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037896, state: WaitScheme 2025-03-28T09:40:45.171758Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-03-28T09:40:45.171846Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037896 2025-03-28T09:40:45.171938Z node 5 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037896 2025-03-28T09:40:45.171968Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-03-28T09:40:45.171984Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:45.171999Z node 5 :TX_DATASHARD INFO: No tx to execute at 72075186224037896 TxInFly 0 2025-03-28T09:40:45.172015Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-03-28T09:40:45.172215Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037896, clientId# [5:7486793053449764045:3249], serverId# [5:7486793053449764047:3250], sessionId# [0:0:0] 2025-03-28T09:40:45.172309Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037896 2025-03-28T09:40:45.172494Z node 5 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037896 txId 281474976715689 ssId 72057594046644480 seqNo 2:16 2025-03-28T09:40:45.172550Z node 5 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715689 at tablet 72075186224037896 2025-03-28T09:40:45.172953Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-03-28T09:40:45.175521Z node 5 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037896 2025-03-28T09:40:45.175607Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 not sending time cast registration request in state WaitScheme 2025-03-28T09:40:45.180758Z node 5 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037896, clientId# [5:7486793053449764074:3270], serverId# [5:7486793053449764076:3272], sessionId# [0:0:0] 2025-03-28T09:40:45.182046Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:45.182062Z node 5 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715689 at step 1743154845222 at tablet 72075186224037896 { Transactions { TxId: 281474976715689 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154845222 MediatorID: 72057594046382081 TabletID: 72075186224037896 } 2025-03-28T09:40:45.182091Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-03-28T09:40:45.182225Z node 5 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037896 2025-03-28T09:40:45.182339Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-03-28T09:40:45.182365Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:45.182387Z node 5 :TX_DATASHARD DEBUG: Found ready operation [1743154845222:281474976715689] in PlanQueue unit at 72075186224037896 2025-03-28T09:40:45.184315Z node 5 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037896 loaded tx from db 1743154845222:281474976715689 keys extracted: 0 2025-03-28T09:40:45.184492Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:45.184726Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037896 2025-03-28T09:40:45.184788Z node 5 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037896 tableId# [OwnerId: 72057594046644480, LocalPathId: 14] schema version# 1 2025-03-28T09:40:45.185262Z node 5 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037896 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:40:45.185648Z node 5 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:45.188203Z node 5 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037896 step# 1743154845222} 2025-03-28T09:40:45.188265Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-03-28T09:40:45.188301Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037896 time 1743154845221 2025-03-28T09:40:45.188316Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-03-28T09:40:45.188338Z node 5 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037896 coordinator 72057594046316545 last step 0 next step 1743154845229 2025-03-28T09:40:45.188527Z node 5 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037896 2025-03-28T09:40:45.188554Z node 5 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037896 2025-03-28T09:40:45.188579Z node 5 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037896 2025-03-28T09:40:45.188628Z node 5 :TX_DATASHARD DEBUG: Complete [1743154845222 : 281474976715689] from 72075186224037896 at tablet 72075186224037896 send result to client [5:7486793031974926029:2144], exec latency: 0 ms, propose latency: 4 ms 2025-03-28T09:40:45.188662Z node 5 :TX_DATASHARD INFO: 72075186224037896 Sending notify to schemeshard 72057594046644480 txId 281474976715689 state Ready TxInFly 0 2025-03-28T09:40:45.188700Z node 5 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2025-03-28T09:40:45.189908Z node 5 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715689 datashard 72075186224037896 state Ready 2025-03-28T09:40:45.189955Z node 5 :TX_DATASHARD DEBUG: 72075186224037896 Got TEvSchemaChangedResult from SS at 72075186224037896 |97.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_reshuffle_kmeans/unittest >> TopicSessionTests::WrongFieldType >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |97.9%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_import/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/example/py3test >> test_example.py::TestExample::test_example2 [GOOD] |97.9%| [TM] {RESULT} ydb/tests/example/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. |97.9%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test >> TestFilterSet::FilterGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/yc_search_ut/unittest >> TIndexProcesorTests::TestOver1000Queues [GOOD] Test command err: 2025-03-28T09:40:01.415422Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792866124116974:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:01.415518Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0009f3/r3tmp/tmpAnqT7T/pdisk_1.dat 2025-03-28T09:40:01.731951Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16679, node 1 2025-03-28T09:40:01.796422Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:01.796472Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:01.796481Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:01.796654Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:40:01.818304Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:01.818436Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:01.820060Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:9720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:02.105725Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:40:02.155420Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 TClient is connected to server localhost:9720 waiting... 2025-03-28T09:40:04.686865Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480 2025-03-28T09:40:04.688782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 TClient is connected to server localhost:9720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743154802186 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SQS" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1743154802200 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: ".sys" PathId: 184467... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:05.162300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:05.173913Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2025-03-28T09:40:05.179105Z node 1 :TX_PROXY ERROR: Actor# [1:7486792883303987060:2471] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) TClient is connected to server localhost:9720 waiting... 2025-03-28T09:40:05.400381Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2025-03-28T09:40:05.447363Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:1, at schemeshard: 72057594046644480 2025-03-28T09:40:05.448963Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1743154805231, "myFolder", "{\"k1\": \"v1\"}"); 2025-03-28T09:40:05.646933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792883303987248:2366], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:05.646933Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792883303987236:2363], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:05.647084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:05.662900Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710667:3, at schemeshard: 72057594046644480 2025-03-28T09:40:05.677825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792883303987250:2367], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710667 completed, doublechecking } 2025-03-28T09:40:05.777188Z node 1 :TX_PROXY ERROR: Actor# [1:7486792883303987304:2632] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 16], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:06.418835Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792866124116974:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:06.471141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:40:06.942936Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710669. Ctx: { TraceId: 01jqe1yyvscht6v9r04dewwavg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQyZWZjNjctMzFkZWEzNWMtNjcxNGVjYy1kYjU2YTgxMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:07.612427Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710670. Ctx: { TraceId: 01jqe1z07n3vpknasejz2awr2f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=N2RlN2IzZDItZDMzZWRlYWUtMjZlMzE3NzQtM2RhYjQ1MjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:07.826419Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710671. Ctx: { TraceId: 01jqe1z0tx2az5dkmt0xhmfhwf, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTA1ODFkMWUtZWVjYjY2MDQtMjE5YWQ3ODQtNjQxNTJjZWY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:08.022211Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710672. Ctx: { TraceId: 01jqe1z10y6q52wcwcmg9bb549, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZmNiMGJmZWMtMzMwMTA4ZjUtNmUxMzJmZjMtMzFlMmM2MjY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:05.231000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:05.231000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-28T09:40:08.026399Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2025-03-28T09:40:08.130667Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710673. Ctx: { TraceId: 01jqe1z1a183rsfgbtwg3h74ck, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZDcwZDkzZjUtMzU2YTU0NWItYjdiYzY1MTItNWFmZmQ4Y2E=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:08.151683Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710674. Ctx: { TraceId: 01jqe1z1a931gsp0vq3k55v5ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTYyNjAwOTgtY2NjNTM1NTgtNzJmYTI1ODgtYTBlYzhkZDE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:08.262317Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710675. Ctx: { TraceId: 01jqe1z1e296767cc1c0v3h01m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2M0Zjc3MTAtYWEwNjQ5NTYtNDZkZjJhOTktMTdlMDVmODM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:08.280648Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710676. Ctx: { TraceId: 01jqe1z1ecch3k6b4zmtm65wys, Database: , DatabaseId: /Root, SessionId: ydb://session ... : default}. Database not set, use /Root 2025-03-28T09:40:17.853061Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710748. Ctx: { TraceId: 01jqe1zan85b3ffasst02b1a0p, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YjRiM2YwOTctMjZhNjU5ZGYtZWI3MDgwOGYtYzgzZTJmODY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:15.318000Z","resource_id":"deleting2","name":"myQueueCustomName","service":"message-queue","deleted":"2025-03-28T09:40:15.318000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:15.318000Z","resource_id":"deleting1","name":"myQueueCustomName","service":"message-queue","deleted":"2025-03-28T09:40:15.318000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:15.318000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:15.318000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:14.000000Z","resource_id":"existing1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-28T09:40:17.859750Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:14.000000Z","resource_id":"existing2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-28T09:40:17.859822Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:15.318000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-28T09:40:17.859853Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:14.000000Z","resource_id":"existing3","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-28T09:40:17.859884Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2025-03-28T09:40:15.318000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2025-03-28T09:40:17.859938Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2025-03-28T09:40:17.968170Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710749. Ctx: { TraceId: 01jqe1zaxbe1th8w1jf2xmspvs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWUzZjViNzQtOTgxYjliNTQtMTk2M2QzNmQtZDQ1ZjU5ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:17.976152Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710750. Ctx: { TraceId: 01jqe1zaxndkssx0expq6et1e1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWUzZjViNzQtOTgxYjliNTQtMTk2M2QzNmQtZDQ1ZjU5ZmY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:17.986033Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710751. Ctx: { TraceId: 01jqe1zaxy6dmk41bwpc7txen4, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTFlNTBkOWEtYzUxMGIyODMtOTExMjcxMmItOTNhNDA1NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:18.112160Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710752. Ctx: { TraceId: 01jqe1zb1sbhcfwjv9mavgc03n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNlOWMyMDAtYzhlMDg0ODQtN2Q5Y2E0MjUtNzg1Mzk0NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:18.119655Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710753. Ctx: { TraceId: 01jqe1zb25ba7dsbw7y63w4zxe, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWNlOWMyMDAtYzhlMDg0ODQtN2Q5Y2E0MjUtNzg1Mzk0NDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:18.132531Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710754. Ctx: { TraceId: 01jqe1zb2hfwfjcq6xj3bwwt84, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTc1ZjIzNGYtMTFlYWE2YzYtYzg2NGE1YjQtYTBmOTFjZg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:18.244732Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710755. Ctx: { TraceId: 01jqe1zb605xegenbt71e055se, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2NkOGI3NmMtMjEyODAxZTctNGIwMzc5MmMtN2EzMzVmMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:18.245125Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710756. Ctx: { TraceId: 01jqe1zatk8n7c084c1p41qvxg, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQyZWZjNjctMzFkZWEzNWMtNjcxNGVjYy1kYjU2YTgxMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:18.251771Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710757. Ctx: { TraceId: 01jqe1zb6a32kc9evdmc8pgtcz, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=M2NkOGI3NmMtMjEyODAxZTctNGIwMzc5MmMtN2EzMzVmMzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root TClient is connected to server localhost:9720 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1743154802186 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 5 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".metadata" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710667 CreateStep: 1743154805714 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SQS" Pat... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:18.482726Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710758:0, at schemeshard: 72057594046644480 waiting... Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges) 2025-03-28T09:40:18.501732Z node 1 :TX_PROXY ERROR: Actor# [1:7486792939138564015:3613] txid# 281474976710759, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 2], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } TClient is connected to server localhost:9720 waiting... 2025-03-28T09:40:18.771121Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710760, at schemeshard: 72057594046644480 2025-03-28T09:40:18.816627Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710761:1, at schemeshard: 72057594046644480 2025-03-28T09:40:18.822207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710762:0, at schemeshard: 72057594046644480 ===Started add queue batch 2025-03-28T09:40:43.284031Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710763. Ctx: { TraceId: 01jqe202xf1cwkfcmbabm98hgw, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQyZWZjNjctMzFkZWEzNWMtNjcxNGVjYy1kYjU2YTgxMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:44.495839Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710764. Ctx: { TraceId: 01jqe204caaa5r9yn1prx1m59v, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YzQyZWZjNjctMzFkZWEzNWMtNjcxNGVjYy1kYjU2YTgxMw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:45.783130Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710765. Ctx: { TraceId: 01jqe205ta03zr744bk6j1mtvx, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVhMzljZjUtNDVlNzc4NDItMjY0NWNlNzktZWU5YWQwYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:45.856778Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710766. Ctx: { TraceId: 01jqe2064y2vc88jbk9gka1kt2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVhMzljZjUtNDVlNzc4NDItMjY0NWNlNzktZWU5YWQwYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:45.926514Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710767. Ctx: { TraceId: 01jqe20674b391thmrw47rgpyd, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVhMzljZjUtNDVlNzc4NDItMjY0NWNlNzktZWU5YWQwYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:45.936041Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710768. Ctx: { TraceId: 01jqe2067denhd641tq6yvhtfc, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjVhMzljZjUtNDVlNzc4NDItMjY0NWNlNzktZWU5YWQwYjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:46.109428Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710769. Ctx: { TraceId: 01jqe2067q2vjrxeyy8bb43cb2, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZWFiOTdiNjUtNWVkOGZkZGQtMTU0ZWYzMTItYmViYmRkMmI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |97.9%| [TM] {RESULT} ydb/core/ymq/actor/yc_search_ut/unittest >> TestFilterSet::DuplicationValidation >> DataCleanup::OutReadSetsCleanedAfterCopyTable [GOOD] >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> Coordinator::RestoreDomainConfiguration [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false >> DataShardReplication::SplitMergeChanges [GOOD] >> DataShardReplication::SplitMergeChangesReboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_data_cleanup/unittest >> DataCleanup::OutReadSetsCleanedAfterCopyTable [GOOD] Test command err: 2025-03-28T09:40:12.988956Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:326:2368], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:12.989556Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:12.989627Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007ea/r3tmp/tmpJrEF4D/pdisk_1.dat 2025-03-28T09:40:13.637761Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:13.691268Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:13.745493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:13.745655Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:13.758361Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:13.857287Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:14.383606Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:769:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:14.389383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:14.389503Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:14.399549Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:14.560491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:783:2658], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:40:14.621898Z node 1 :TX_PROXY ERROR: Actor# [1:857:2701] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:15.892608Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1z7dcd8n8cb404z54y0ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzEzZTI5ZjMtOTFjMjQ5OWYtNjgzYjYzNjktNDNkYTE0ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:15.943370Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1z7dcd8n8cb404z54y0ae, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MzEzZTI5ZjMtOTFjMjQ5OWYtNjgzYjYzNjktNDNkYTE0ZDU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:20.048984Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:304:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:20.049193Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:20.049231Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007ea/r3tmp/tmpDZXa9n/pdisk_1.dat 2025-03-28T09:40:20.336708Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:20.366062Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:20.403032Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:20.403152Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:20.414591Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:20.501328Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:20.841616Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:763:2645], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:20.841713Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:774:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:20.841787Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:20.847088Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:20.983782Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:777:2653], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:40:21.020505Z node 2 :TX_PROXY ERROR: Actor# [2:850:2695] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:21.328245Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1zdq70ttjwm81h3q5n3x9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWYxMTg4MTctNTE2Nzg0LTE3OWQxMjhiLTk4NGU1Yjlk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:21.334452Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1zdq70ttjwm81h3q5n3x9, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=YWYxMTg4MTctNTE2Nzg0LTE3OWQxMjhiLTk4NGU1Yjlk, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:25.644907Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:25.645094Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:25.645186Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007ea/r3tmp/tmpH5oYRJ/pdisk_1.dat 2025-03-28T09:40:25.920929Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:25.947544Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:25.985920Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:25.986074Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:25.997625Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:26.082718Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:26.466503Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:770:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:26.466626Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:26.466735Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:26.472297Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:26.630781Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:784:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:40:26.665371Z node 3 :TX_PROXY ERROR: Actor# [3:858:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:26.943077Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1zk703nffyj92ychs9khk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=YWNiMmI0MDUtNDY0YjNlZDYtZjZhZGZmMGYtZTlkZjc1ZDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:26.948642Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1z ... , but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:31.453654Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:31.490817Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:31.490967Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:31.502393Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:31.590978Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:31.991881Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:769:2650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:31.992046Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:779:2655], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:31.992500Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:31.998183Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:32.181826Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:783:2658], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:40:32.219559Z node 4 :TX_PROXY ERROR: Actor# [4:857:2701] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:32.570195Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1zrkn2c1tmp52t7qadrxk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OWViMjRmN2QtNmE3NzNmNDUtMzNiY2FmM2MtODk4Nzg2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:32.576449Z node 4 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1zrkn2c1tmp52t7qadrxk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=OWViMjRmN2QtNmE3NzNmNDUtMzNiY2FmM2MtODk4Nzg2Zg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:37.841311Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:304:2348], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:37.841570Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:37.841614Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007ea/r3tmp/tmpPQG7dO/pdisk_1.dat 2025-03-28T09:40:38.177116Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:38.209193Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:38.249907Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:38.250080Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:38.261722Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:38.350131Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:38.706123Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:770:2651], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:38.706205Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [5:780:2656], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:38.706259Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:38.711425Z node 5 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:38.878559Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [5:784:2659], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:40:38.915133Z node 5 :TX_PROXY ERROR: Actor# [5:858:2702] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:39.329372Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1zz5gbwn9rsfbdfb71p16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDhhZDUwYjItODk5MjE4MWQtOWMwN2U4N2MtOTJkYTA2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:39.336031Z node 5 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1zz5gbwn9rsfbdfb71p16, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=5&id=NDhhZDUwYjItODk5MjE4MWQtOWMwN2U4N2MtOTJkYTA2ZTE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:44.982140Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:324:2367], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:44.982409Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:44.982517Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007ea/r3tmp/tmpBzZQnw/pdisk_1.dat 2025-03-28T09:40:45.410908Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:45.451648Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:45.495816Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:45.495985Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:45.507723Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:45.599467Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:46.423953Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:878:2729], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:46.424085Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [6:887:2734], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:46.424182Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:46.436233Z node 6 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:40:46.629788Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [6:892:2737], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T09:40:46.670308Z node 6 :TX_PROXY ERROR: Actor# [6:953:2779] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:46.985720Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe206pn6ybhgbz5kkc8bj11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=OTY5NDJiNTktZTQ0MTMzNDMtNzMzYmQxNjItZTdmMmViYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:46.992571Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe206pn6ybhgbz5kkc8bj11, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=OTY5NDJiNTktZTQ0MTMzNDMtNzMzYmQxNjItZTdmMmViYWI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:47.340871Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe20793d2pwxz5d7cacejf0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZGQ3N2QwMmUtMWUyYWUzMGEtZjBlMmYxY2QtODM0Yzc1Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:47.348300Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe20793d2pwxz5d7cacejf0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=ZGQ3N2QwMmUtMWUyYWUzMGEtZjBlMmYxY2QtODM0Yzc1Mzc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |97.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_data_cleanup/unittest >> TCreateAndDropViewTest::DropNonexistingView [GOOD] >> TCreateAndDropViewTest::CallDropViewOnTable >> TMemoryController::ResourceBroker_ConfigLimit [GOOD] >> TMemTableMemoryConsumersCollection::Empty [GOOD] >> TMemTableMemoryConsumersCollection::Destruction [GOOD] >> TMemTableMemoryConsumersCollection::Register [GOOD] >> TMemTableMemoryConsumersCollection::Unregister [GOOD] >> TMemTableMemoryConsumersCollection::SetConsumption [GOOD] >> TMemTableMemoryConsumersCollection::CompactionComplete [GOOD] >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] >> TRangeTreap::Sequential [GOOD] >> TRangeTreap::Random >> TRUCalculatorTests::TestReadTable [GOOD] >> TRUCalculatorTests::TestBulkUpsert [GOOD] >> TestFilterSet::DuplicationValidation [GOOD] >> TRangeTreap::Random [GOOD] >> TestFilterSet::CompilationValidation |97.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |97.9%| [TS] {RESULT} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> TTxDataShardBuildIndexScan::RunScan [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2025-03-28T09:39:40.627100Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792775592696946:2078];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:40.628825Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006e4/r3tmp/tmpAupH3c/pdisk_1.dat 2025-03-28T09:39:41.019626Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:41.059779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:41.060742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:41.076328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20128, node 1 2025-03-28T09:39:41.219411Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:41.219444Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:41.219468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:41.219580Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27324 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:41.666337Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:41.750198Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:39:44.271750Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486792790641482405:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:44.271791Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006e4/r3tmp/tmp2mD7Eo/pdisk_1.dat 2025-03-28T09:39:44.372878Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:44.402310Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:44.402394Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:44.404862Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10910, node 4 2025-03-28T09:39:44.445571Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:44.445598Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:44.445605Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:44.445729Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7162 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:44.648903Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:44.692730Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:39:47.782591Z node 7 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7486792802438330383:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:47.782667Z node 7 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006e4/r3tmp/tmpnmBqOe/pdisk_1.dat 2025-03-28T09:39:47.877412Z node 7 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:47.902773Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:47.902858Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:47.905571Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 3224, node 7 2025-03-28T09:39:47.946548Z node 7 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:47.946570Z node 7 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:47.946577Z node 7 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:47.946730Z node 7 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26234 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:48.136802Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:48.181564Z node 7 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:39:51.140938Z node 10 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7486792822301644431:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:51.141023Z node 10 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006e4/r3tmp/tmp6JuGcd/pdisk_1.dat 2025-03-28T09:39:51.227659Z node 10 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:51.255013Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:51.255109Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:51.257640Z node 10 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 23252, node 10 2025-03-28T09:39:51.292952Z node 10 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:51.292973Z node 10 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:51.292981Z node 10 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:51.293145Z node 10 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21436 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39: ... #72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:24.936511Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:24.940815Z node 28 :HIVE WARN: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6584, node 28 2025-03-28T09:40:25.024649Z node 28 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:25.024688Z node 28 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:25.024698Z node 28 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:25.024887Z node 28 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:62873 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:25.381085Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:25.465432Z node 28 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:40:31.062916Z node 31 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7486792994833922431:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:31.063017Z node 31 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006e4/r3tmp/tmp7xolBa/pdisk_1.dat 2025-03-28T09:40:31.220688Z node 31 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:31.257682Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:31.257796Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:31.263149Z node 31 :HIVE WARN: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22362, node 31 2025-03-28T09:40:31.387554Z node 31 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:31.387579Z node 31 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:31.387588Z node 31 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:31.387765Z node 31 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24721 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:31.792323Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:31.811349Z node 31 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:40:31.896699Z node 31 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 2025-03-28T09:40:37.679761Z node 34 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[34:7486793017504837626:2211];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:37.700790Z node 34 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006e4/r3tmp/tmpquTnpa/pdisk_1.dat 2025-03-28T09:40:37.847595Z node 34 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:37.894175Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:37.894300Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:37.900662Z node 34 :HIVE WARN: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 15621, node 34 2025-03-28T09:40:37.963607Z node 34 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:37.963635Z node 34 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:37.963643Z node 34 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:37.963807Z node 34 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30149 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:38.225108Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:38.328788Z node 34 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T09:40:44.861878Z node 37 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[37:7486793049539557952:2155];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006e4/r3tmp/tmpk2JlB2/pdisk_1.dat 2025-03-28T09:40:44.976193Z node 37 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:40:45.110336Z node 37 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:45.190345Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:45.190602Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:45.196182Z node 37 :HIVE WARN: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26372, node 37 2025-03-28T09:40:45.366556Z node 37 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:45.366587Z node 37 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:45.366599Z node 37 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:45.366804Z node 37 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:26165 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:45.576576Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:45.671683Z node 37 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480 |97.9%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPath [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutToken ------- [TM] {asan, default-linux-x86_64, release} ydb/core/memory_controller/ut/unittest >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] Test command err: ResourceBrokerSelfConfig: LimitBytes: 0 QueryExecutionLimitBytes: 0 2025-03-28T09:39:34.444462Z node 1 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: none MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 60MiB ConsumersConsumption: 0B OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 6MiB Coefficient: 0.9999990463 2025-03-28T09:39:34.447450Z node 1 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 0B Limit: 6MiB Min: 2MiB Max: 6MiB 2025-03-28T09:39:34.448090Z node 1 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 0B Limit: 40MiB 2025-03-28T09:39:34.450047Z node 1 :MEMORY_CONTROLLER INFO: Bootstrapped with config HardLimitBytes: 209715200 2025-03-28T09:39:34.468603Z node 1 :RESOURCE_BROKER DEBUG: TResourceBrokerActor bootstrap 2025-03-28T09:39:34.471294Z node 1 :TABLET_SAUSAGECACHE NOTICE: Bootstrap with config MemoryLimit: 33554432 2025-03-28T09:39:36.858072Z node 1 :MEMORY_CONTROLLER INFO: Consumer SharedCache [1:20:2067] registered 2025-03-28T09:39:36.859275Z node 1 :RESOURCE_BROKER INFO: New config diff: Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 41943040 } } ResourceLimit { Memory: 62914560 } 2025-03-28T09:39:36.859789Z node 1 :RESOURCE_BROKER INFO: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 41943040 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } ResourceLimit { Cpu: 20 Memory: 62914560 } 2025-03-28T09:39:36.860720Z node 1 :RESOURCE_BROKER INFO: Configure result: Success: true 2025-03-28T09:39:36.861035Z node 1 :TABLET_SAUSAGECACHE NOTICE: Register memory consumer 2025-03-28T09:39:36.865681Z node 1 :MEMORY_CONTROLLER INFO: ResourceBroker configure result Success: true 2025-03-28T09:39:36.915511Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:39:36.915633Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:39:36.915671Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:39:37.022575Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:438:2399] 1 registered 2025-03-28T09:39:37.035477Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2401] 0 registered 2025-03-28T09:39:37.039344Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2401] 2 registered 2025-03-28T09:39:37.039509Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2401] 4 registered 2025-03-28T09:39:37.039608Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:451:2401] 5 registered 2025-03-28T09:39:37.041370Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:459:2404] 1 registered 2025-03-28T09:39:37.043923Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:459:2404] 2 registered 2025-03-28T09:39:37.089607Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 1 registered 2025-03-28T09:39:37.133765Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 2 registered 2025-03-28T09:39:37.134295Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 3 registered 2025-03-28T09:39:37.135367Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 4 registered 2025-03-28T09:39:37.135664Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 5 registered 2025-03-28T09:39:37.135859Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 6 registered 2025-03-28T09:39:37.136127Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 7 registered 2025-03-28T09:39:37.136446Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 8 registered 2025-03-28T09:39:37.136725Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 9 registered 2025-03-28T09:39:37.137665Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 10 registered 2025-03-28T09:39:37.145101Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 11 registered 2025-03-28T09:39:37.145319Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 12 registered 2025-03-28T09:39:37.170931Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 13 registered 2025-03-28T09:39:37.181691Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 14 registered 2025-03-28T09:39:37.182997Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 15 registered 2025-03-28T09:39:37.183366Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 16 registered 2025-03-28T09:39:37.183480Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 17 registered 2025-03-28T09:39:37.183596Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 18 registered 2025-03-28T09:39:37.184218Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 19 registered 2025-03-28T09:39:37.187282Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 20 registered 2025-03-28T09:39:37.198356Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 22 registered 2025-03-28T09:39:37.199603Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 23 registered 2025-03-28T09:39:37.200052Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 24 registered 2025-03-28T09:39:37.203154Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 25 registered 2025-03-28T09:39:37.206238Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 26 registered 2025-03-28T09:39:37.206394Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 27 registered 2025-03-28T09:39:37.206611Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 28 registered 2025-03-28T09:39:37.206826Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 29 registered 2025-03-28T09:39:37.207123Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 30 registered 2025-03-28T09:39:37.207237Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 31 registered 2025-03-28T09:39:37.207294Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 32 registered 2025-03-28T09:39:37.212434Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 33 registered 2025-03-28T09:39:37.215307Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 34 registered 2025-03-28T09:39:37.215683Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 35 registered 2025-03-28T09:39:37.215957Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 36 registered 2025-03-28T09:39:37.217135Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 37 registered 2025-03-28T09:39:37.218110Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 38 registered 2025-03-28T09:39:37.218349Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 39 registered 2025-03-28T09:39:37.220715Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 40 registered 2025-03-28T09:39:37.225206Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 41 registered 2025-03-28T09:39:37.225255Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 42 registered 2025-03-28T09:39:37.225511Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 43 registered 2025-03-28T09:39:37.225762Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 44 registered 2025-03-28T09:39:37.225954Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 45 registered 2025-03-28T09:39:37.226164Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 46 registered 2025-03-28T09:39:37.226329Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 47 registered 2025-03-28T09:39:37.226751Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 48 registered 2025-03-28T09:39:37.226962Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 49 registered 2025-03-28T09:39:37.227023Z node 1 :MEMORY_CONTROLLER TRACE: MemTable [1:469:2405] 50 registered 2025-03-28T09:39:37.227069Z node 1 :MEMORY_CONTROLLER TRACE: ... 973281Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 105 registered 2025-03-28T09:40:48.973321Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 106 registered 2025-03-28T09:40:48.973373Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 107 registered 2025-03-28T09:40:48.973412Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 108 registered 2025-03-28T09:40:48.973450Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 109 registered 2025-03-28T09:40:48.973487Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 110 registered 2025-03-28T09:40:48.973525Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 111 registered 2025-03-28T09:40:48.973603Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 113 registered 2025-03-28T09:40:48.974031Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 114 registered 2025-03-28T09:40:48.974108Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 115 registered 2025-03-28T09:40:48.974257Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 116 registered 2025-03-28T09:40:48.975023Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 117 registered 2025-03-28T09:40:48.975199Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:473:2405] 118 registered 2025-03-28T09:40:48.975345Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 17 registered 2025-03-28T09:40:48.975624Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 0 registered 2025-03-28T09:40:48.975750Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 18 registered 2025-03-28T09:40:48.975913Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 1 registered 2025-03-28T09:40:48.976069Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 19 registered 2025-03-28T09:40:48.976300Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 2 registered 2025-03-28T09:40:48.976540Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 20 registered 2025-03-28T09:40:48.976604Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 3 registered 2025-03-28T09:40:48.976637Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 21 registered 2025-03-28T09:40:48.976719Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 4 registered 2025-03-28T09:40:48.976783Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 6 registered 2025-03-28T09:40:48.976834Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 9 registered 2025-03-28T09:40:48.976919Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 10 registered 2025-03-28T09:40:48.976987Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 13 registered 2025-03-28T09:40:48.977024Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 14 registered 2025-03-28T09:40:48.977135Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:489:2407] 16 registered 2025-03-28T09:40:48.977211Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 7 registered 2025-03-28T09:40:48.977256Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 1 registered 2025-03-28T09:40:48.977301Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 2 registered 2025-03-28T09:40:48.977335Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 3 registered 2025-03-28T09:40:48.977385Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 4 registered 2025-03-28T09:40:48.977416Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 5 registered 2025-03-28T09:40:48.977455Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:513:2411] 6 registered 2025-03-28T09:40:48.977498Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 102 registered 2025-03-28T09:40:48.977552Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 103 registered 2025-03-28T09:40:48.977593Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 1 registered 2025-03-28T09:40:48.977630Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 104 registered 2025-03-28T09:40:48.977708Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 2 registered 2025-03-28T09:40:48.977748Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 3 registered 2025-03-28T09:40:48.977793Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 4 registered 2025-03-28T09:40:48.977832Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 5 registered 2025-03-28T09:40:48.977864Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 6 registered 2025-03-28T09:40:48.977907Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 7 registered 2025-03-28T09:40:48.977942Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 100 registered 2025-03-28T09:40:48.977975Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:534:2413] 101 registered 2025-03-28T09:40:48.978017Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:541:2415] 1 registered 2025-03-28T09:40:48.978063Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:541:2415] 2 registered 2025-03-28T09:40:48.978097Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:541:2415] 3 registered test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a44/r3tmp/tmpr04D8l/pdisk_1.dat 2025-03-28T09:40:49.007552Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 1 registered 2025-03-28T09:40:49.007660Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 2 registered 2025-03-28T09:40:49.007850Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 3 registered 2025-03-28T09:40:49.007954Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 4 registered 2025-03-28T09:40:49.008099Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 120 registered 2025-03-28T09:40:49.008174Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 5 registered 2025-03-28T09:40:49.008367Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 121 registered 2025-03-28T09:40:49.008412Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 6 registered 2025-03-28T09:40:49.008473Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 122 registered 2025-03-28T09:40:49.008508Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 7 registered 2025-03-28T09:40:49.008641Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 123 registered 2025-03-28T09:40:49.008679Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 8 registered 2025-03-28T09:40:49.008711Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 125 registered 2025-03-28T09:40:49.008744Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 126 registered 2025-03-28T09:40:49.008780Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 127 registered 2025-03-28T09:40:49.009001Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 128 registered 2025-03-28T09:40:49.009045Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 129 registered 2025-03-28T09:40:49.009078Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 100 registered 2025-03-28T09:40:49.009110Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 101 registered 2025-03-28T09:40:49.009145Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 131 registered 2025-03-28T09:40:49.009176Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 102 registered 2025-03-28T09:40:49.009207Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 103 registered 2025-03-28T09:40:49.009239Z node 9 :MEMORY_CONTROLLER TRACE: MemTable [9:493:2409] 105 registered 2025-03-28T09:40:49.075239Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:49.114315Z node 9 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:49.124240Z node 9 :MEMORY_CONTROLLER INFO: Config updated QueryExecutionLimitPercent: 15 2025-03-28T09:40:49.124885Z node 9 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-03-28T09:40:49.165875Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:49.166068Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:49.177697Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:49.408825Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 32.5KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-03-28T09:40:49.409577Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-03-28T09:40:49.409752Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 32.5KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-03-28T09:40:49.409798Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-03-28T09:40:49.409939Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 236MiB 2025-03-28T09:40:49.598823Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 500MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 500MiB SoftLimit: 375MiB TargetUtilization: 250MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 32.6KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 250MiB ResultingConsumersConsumption: 250MiB Coefficient: 0.90625 2025-03-28T09:40:49.599447Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 236MiB Min: 100MiB Max: 250MiB 2025-03-28T09:40:49.599501Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 32.6KiB Limit: 14.1MiB Min: 5MiB Max: 15MiB 2025-03-28T09:40:49.599537Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-03-28T09:40:49.599634Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 236MiB 2025-03-28T09:40:49.765666Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 33KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-03-28T09:40:49.766456Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-03-28T09:40:49.766541Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 33KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-03-28T09:40:49.766593Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-03-28T09:40:49.766763Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 94.4MiB 2025-03-28T09:40:49.917586Z node 9 :MEMORY_CONTROLLER INFO: Periodic memory stats: AnonRss: none CGroupLimit: 200MiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 1000MiB ConsumersConsumption: 33.3KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 100MiB Coefficient: 0.90625 2025-03-28T09:40:49.918209Z node 9 :MEMORY_CONTROLLER INFO: Consumer SharedCache state: Consumption: 0B Limit: 94.4MiB Min: 40MiB Max: 100MiB 2025-03-28T09:40:49.918258Z node 9 :MEMORY_CONTROLLER INFO: Consumer MemTable state: Consumption: 33.3KiB Limit: 5.63MiB Min: 2MiB Max: 6MiB 2025-03-28T09:40:49.918296Z node 9 :MEMORY_CONTROLLER INFO: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 999MiB 2025-03-28T09:40:49.918387Z node 9 :TABLET_SAUSAGECACHE INFO: Limit memory consumer with 94.4MiB |97.9%| [TM] {RESULT} ydb/core/memory_controller/ut/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> test_query_cache.py::TestQueryCache::test [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/locks/ut_range_treap/unittest >> TRangeTreap::Random [GOOD] Test command err: NOTE: building treap of size 1000000 got height 49 and needed 1000000 ops (1000000 inserts 0 updates 0 deletes) and 30570446 comparisons (30.570446 per op) NOTE: building treap of size 8733 got height 34 and needed 11033 ops (9766 inserts 234 updates 1033 deletes) and 228433 comparisons (20.7045228 per op) Checking point 6753 ... found 2275 ranges, needed 8187 comparisons (3.598681319 per range) Checking point 4854 ... found 2234 ranges, needed 8245 comparisons (3.690689346 per range) Checking point 7707 ... found 2243 ranges, needed 8282 comparisons (3.692376282 per range) Checking point 2905 ... found 1835 ranges, needed 5542 comparisons (3.020163488 per range) Checking point 7047 ... found 2276 ranges, needed 8307 comparisons (3.649824253 per range) Checking point 1848 ... found 1390 ranges, needed 3741 comparisons (2.691366906 per range) Checking point 8064 ... found 2210 ranges, needed 8050 comparisons (3.642533937 per range) Checking point 959 ... found 791 ranges, needed 1966 comparisons (2.485461441 per range) Checking point 5077 ... found 2208 ranges, needed 8255 comparisons (3.738677536 per range) Checking point 4139 ... found 2149 ranges, needed 7454 comparisons (3.468590042 per range) |97.9%| [TM] {RESULT} ydb/core/tx/locks/ut_range_treap/unittest >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: 2025-03-28T09:40:29.429925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:29.430405Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:29.430476Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008d4/r3tmp/tmp0Bk0sq/pdisk_1.dat 2025-03-28T09:40:30.013942Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:30.075967Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:30.128811Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:30.128951Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:30.148328Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:30.263655Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:30.779342Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:770:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:30.779982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:780:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:30.780154Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:30.787955Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:30.968776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:784:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:40:31.054286Z node 1 :TX_PROXY ERROR: Actor# [1:858:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:32.131026Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1zqdqapz5fbvedj2xzqd0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTE5NjlmZDAtYTY4MjcxNjUtMmQwZmNkNzMtYmFiODU3MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:32.207543Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1zqdqapz5fbvedj2xzqd0, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTE5NjlmZDAtYTY4MjcxNjUtMmQwZmNkNzMtYmFiODU3MzE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:32.642117Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1zrvnaq3htfgkexej3nft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWU3MjZiMzUtNmVhYjY1M2UtNzczMTg2MDctN2M0YWY1ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:32.706671Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1zrvnaq3htfgkexej3nft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWU3MjZiMzUtNmVhYjY1M2UtNzczMTg2MDctN2M0YWY1ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:32.724560Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1zrvnaq3htfgkexej3nft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWU3MjZiMzUtNmVhYjY1M2UtNzczMTg2MDctN2M0YWY1ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:32.731587Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe1zrvnaq3htfgkexej3nft, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWU3MjZiMzUtNmVhYjY1M2UtNzczMTg2MDctN2M0YWY1ODk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:32.862027Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe1zsbddq80waeqtat6g93n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ1MTJjYzEtYmVkNDE1MzQtODk1M2JlMGEtMjY1ODgzZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:32.893297Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqe1zsbddq80waeqtat6g93n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDQ1MTJjYzEtYmVkNDE1MzQtODk1M2JlMGEtMjY1ODgzZDA=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:33.118992Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqe1zsgg14nqqa2axbdgetv5, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MTA4YzM2ZTItMjFhMTFiNjctZGU0NGEyMDgtODcxZGZmYTI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-03-28T09:40:37.106462Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:37.106814Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:37.106968Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008d4/r3tmp/tmpEvDPtz/pdisk_1.dat 2025-03-28T09:40:37.405250Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:37.437182Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:37.477501Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:37.477640Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:37.491844Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:37.587669Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:37.976767Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:820:2681], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:37.976865Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:830:2686], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:37.976945Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:37.982111Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:38.166202Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:834:2689], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:40:38.204614Z node 2 :TX_PROXY ERROR: Actor# [2:912:2736] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:38.768309Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe1zyeqcn39wwry5zb0dh65, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDc5ZDJjNjAtYjYxYjBhZjEtODRhMjYzYTAtZTJkZDVjMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:38.791362Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe1zyeqcn39wwry5zb0dh65, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDc5ZDJjNjAtYjYxYjBhZjEtODRhMjYzYTAtZTJkZDVjMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:38.808390Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe1zyeqcn39wwry5zb0dh65, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDc5ZDJjNjAtYjYxYjBhZjEtODRhMjYzYTAtZTJkZDVjMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:38.819216Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe1zyeqcn39wwry5zb0dh65, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=ZDc5ZDJjNjAtYjYxYjBhZjEtODRhMjYzYTAtZTJkZDVjMGQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:39.222709Z node 2 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe1zz9z9bak4965ymhbe880, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=2&id=MzIzZTMwYTMtOWRhNzk3ZjEtZDI5NzhiYzUtYTc3YTM4ZmU=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Ro ... Id: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } 2025-03-28T09:40:44.495462Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:323:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:44.495702Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:44.495832Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008d4/r3tmp/tmp24m9zp/pdisk_1.dat 2025-03-28T09:40:44.839790Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:44.873731Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:44.911647Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:44.911809Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:44.923904Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:45.008189Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:45.353578Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:771:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:45.353705Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:781:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:45.353791Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:45.359920Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:45.525718Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [3:785:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:40:45.560772Z node 3 :TX_PROXY ERROR: Actor# [3:859:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:45.688524Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe205n76d74jep8s4gyed0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTQ3MDYyNWItMTAyOTc0NDQtNGQ2NTQ4NTQtYmI3MmZkMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:45.712910Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe205n76d74jep8s4gyed0t, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZTQ3MDYyNWItMTAyOTc0NDQtNGQ2NTQ4NTQtYmI3MmZkMDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:45.880039Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715662. Ctx: { TraceId: 01jqe20613674d1pn76503dewa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=OGY2ZjA1MC1jZmU4MTY3YS1iOWQ1OTY4Yy1iN2Y1NjNiMQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } } 2025-03-28T09:40:46.031793Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715663. Ctx: { TraceId: 01jqe20661bxsc60frh2fhbbqp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjgxZTIzZTYtNzU2Mzg3ZDUtZjgyOWQ0NjktYmY4OTg5NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:46.050220Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715664. Ctx: { TraceId: 01jqe20661bxsc60frh2fhbbqp, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NjgxZTIzZTYtNzU2Mzg3ZDUtZjgyOWQ0NjktYmY4OTg5NWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:46.202269Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe206bn0yqxqck18w14rk4y, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=Yjk5OTRkZDMtZDk2ZTE5NWEtODgxZmFhYWQtM2E2Nzg1OGI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } 2025-03-28T09:40:46.420573Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715666. Ctx: { TraceId: 01jqe206g4073pbr9cmfpz164z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDZhYzNmZWItNzMyY2Y0YjMtYWYxZTNiYTEtYmY2ODA3YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:46.445188Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715667. Ctx: { TraceId: 01jqe206g4073pbr9cmfpz164z, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZDZhYzNmZWItNzMyY2Y0YjMtYWYxZTNiYTEtYmY2ODA3YzQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:46.593172Z node 3 :KQP_EXECUTER ERROR: TxId: 281474976715668. Ctx: { TraceId: 01jqe206r4d0fy1asxddazq8fa, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=ZWI3ZjUzNTgtZmVhMTg4YmMtMWJhMTAwNzUtYWM3YjE5MmM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } 2025-03-28T09:40:50.831564Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:323:2365], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:50.831839Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:50.831955Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0008d4/r3tmp/tmph7N4hP/pdisk_1.dat 2025-03-28T09:40:51.156561Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:51.188242Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:51.229351Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:51.229493Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:51.241257Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:51.329868Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:51.672865Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:771:2642], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:51.673011Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:782:2647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:51.673097Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:51.678906Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:51.855816Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:785:2650], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:40:51.891926Z node 4 :TX_PROXY ERROR: Actor# [4:859:2693] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 7], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:52.045769Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:869:2702], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2025-03-28T09:40:52.048244Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NjgxNTliNzYtZDdiNTM1NGYtMzIzY2UyYjEtNjNmN2U1MTI=, ActorId: [4:768:2639], ActorState: ExecuteState, TraceId: 01jqe20btpcyv9jrbrh8en12mn, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: 2025-03-28T09:40:52.095250Z node 4 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [4:891:2718], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2025-03-28T09:40:52.097571Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=ZjRjNGE4MWYtM2JhOTQyMTMtNjQ3NDdlOTMtYzU5OWUzZDM=, ActorId: [4:883:2710], ActorState: ExecuteState, TraceId: 01jqe20c6m1510g68mckfxfe2g, ReplyQueryCompileError, status BAD_REQUEST remove tx with tx_id: |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianRackRatio |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |98.0%| [TM] {RESULT} ydb/tests/functional/wardens/py3test >> TSentinelBaseTests::GuardianRackRatio [GOOD] >> TSentinelTests::Smoke >> DataShardStats::HistogramStatsCorrect [GOOD] >> DataShardStats::BlobsStatsCorrect >> TopicSessionTests::WrongFieldType [GOOD] >> TestFilterSet::CompilationValidation [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> ConfigGRPCService::ReplaceConfig >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> TDataShardRSTest::TestCleanupInRS+UseSink >> TestFormatHandler::ManyJsonClients >> TTxDataShardTestInit::TestGetShardStateAfterInitialization >> DataShardReplication::SplitMergeChangesReboots [GOOD] >> DataShardReplication::ReplicatedTable+UseSink >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> TTxDataShardTestInit::TestGetShardStateAfterInitialization [GOOD] >> TTxDataShardTestInit::TestTableHasPath >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] >> TSentinelTests::Smoke [GOOD] >> TSentinelTests::PDiskUnknownState >> TCreateAndDropViewTest::CallDropViewOnTable [GOOD] >> TCreateAndDropViewTest::DropSameViewTwice >> MediatorTest::BasicTimecastUpdates >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 >> KeyValueGRPCService::SimpleWriteReadWithoutToken [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes >> TestFormatHandler::ManyJsonClients [GOOD] >> TSentinelTests::PDiskUnknownState [GOOD] >> TSentinelTests::PDiskErrorState >> TestFormatHandler::ManyRawClients >> TDqSolomonWriteActorTest::TestWriteFormat >> ConfigGRPCService::ReplaceConfig [GOOD] >> ConfigGRPCService::FetchConfig >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] |98.0%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> TTxDataShardTestInit::TestTableHasPath [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] >> DataShardReplication::ReplicatedTable+UseSink [GOOD] >> DataShardReplication::ReplicatedTable-UseSink >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed [GOOD] >> KesusProxyTest::RejectsNotCanonizedResourceName [GOOD] >> KesusProxyTest::SubscribesOnResource >> MediatorTest::BasicTimecastUpdates [GOOD] >> KesusProxyTest::SubscribesOnResource [GOOD] >> KesusProxyTest::SubscribesOnResourcesWhenReconnected [GOOD] >> KesusProxyTest::ProxyRequestDuringDisconnection >> KesusProxyTest::ProxyRequestDuringDisconnection [GOOD] >> KesusProxyTest::DeactivateSessionWhenResourceClosed [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnSuccess [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnFailure [GOOD] >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown [GOOD] >> KesusProxyTest::SendsBrokenUpdateWhenKesusPassesError [GOOD] >> KesusProxyTest::AllocatesResourceWithKesus >> KesusProxyTest::AllocatesResourceWithKesus [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession [GOOD] >> KesusProxyTest::AllocatesResourceOffline [GOOD] >> KesusProxyTest::ConnectsDuringOfflineAllocation [GOOD] >> KesusResourceAllocationStatisticsTest::ReturnsDefaultValues [GOOD] >> KesusResourceAllocationStatisticsTest::CalculatesAverage [GOOD] >> KesusResourceAllocationStatisticsTest::TakesBestStat [GOOD] >> TQuoterServiceTest::StaticRateLimiter >> MediatorTest::MultipleTablets >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] >> TestFormatHandler::ManyRawClients [GOOD] >> TestFormatHandler::ClientValidation >> ConfigGRPCService::FetchConfig [GOOD] >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> TCreateAndDropViewTest::DropSameViewTwice [GOOD] >> TCreateAndDropViewTest::DropViewIfExists >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok ------- [TM] {asan, default-linux-x86_64, release} ydb/services/config/ut/unittest >> ConfigGRPCService::FetchConfig [GOOD] Test command err: 2025-03-28T09:40:55.716146Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486793094861834351:2205];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:55.716862Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0007d7/r3tmp/tmp8G5L9Y/pdisk_1.dat 2025-03-28T09:40:56.500910Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:56.515747Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:56.515870Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:56.533102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21590, node 1 2025-03-28T09:40:56.655445Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-03-28T09:40:56.655928Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-03-28T09:40:56.656033Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-03-28T09:40:56.656152Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-03-28T09:40:56.657292Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-03-28T09:40:56.657318Z node 1 :GRPC_SERVER INFO: Updated app config 2025-03-28T09:40:56.657343Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-03-28T09:40:56.657350Z node 1 :GRPC_SERVER INFO: Updated app config 2025-03-28T09:40:56.662776Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-03-28T09:40:56.662803Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-03-28T09:40:56.679140Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:40:56.679190Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:40:56.679203Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:40:56.679214Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:40:56.854289Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ac80] created request Name# BlobStorageConfig 2025-03-28T09:40:56.861191Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b280] created request Name# HiveCreateTablet 2025-03-28T09:40:56.883299Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b880] created request Name# TabletStateRequest 2025-03-28T09:40:56.883684Z node 1 :GRPC_SERVER DEBUG: [0x51a00002be80] created request Name# SchemeOperationStatus 2025-03-28T09:40:56.886817Z node 1 :GRPC_SERVER DEBUG: [0x51a00002c480] created request Name# ChooseProxy 2025-03-28T09:40:56.887218Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# ResolveNode 2025-03-28T09:40:56.889599Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d080] created request Name# FillNode 2025-03-28T09:40:56.889931Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d680] created request Name# DrainNode 2025-03-28T09:40:56.891582Z node 1 :GRPC_SERVER DEBUG: [0x51a00002dc80] created request Name# InterconnectDebug 2025-03-28T09:40:56.891951Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e280] created request Name# TestShardControl 2025-03-28T09:40:56.893343Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e880] created request Name# RegisterNode 2025-03-28T09:40:56.893753Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ee80] created request Name# CmsRequest 2025-03-28T09:40:56.894064Z node 1 :GRPC_SERVER DEBUG: [0x51a0000df880] created request Name# ConsoleRequest 2025-03-28T09:40:56.894660Z node 1 :GRPC_SERVER DEBUG: [0x51a0000dfe80] created request Name# SchemeInitRoot 2025-03-28T09:40:56.894998Z node 1 :GRPC_SERVER DEBUG: [0x51a0000e0480] created request Name# PersQueueRequest 2025-03-28T09:40:56.895335Z node 1 :GRPC_SERVER DEBUG: [0x51a0000e0a80] created request Name# SchemeOperation 2025-03-28T09:40:56.895621Z node 1 :GRPC_SERVER DEBUG: [0x51a0000e1080] created request Name# SchemeDescribe 2025-03-28T09:40:57.035365Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:57.035386Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:57.035394Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:57.035507Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24898 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:57.902049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2" Kind: "hdd2" } StoragePools { Name: "hdd" Kind: "hdd" } StoragePools { Name: "hdd1" Kind: "hdd1" } StoragePools { Name: "ssd" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:40:57.908715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:57.910437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:40:57.913297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:40:57.913390Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:57.919892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:40:57.922898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:40:57.923162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:57.923240Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:40:57.923350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:40:57.923375Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-28T09:40:57.931763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:40:57.931796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:40:57.931814Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:40:57.936441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:57.936505Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:40:57.936537Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:40:57.942147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:57.942187Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:57.942231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:40:57.942258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:40:57.951492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:40:57.955491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:40:57.957595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:40:57.960164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154858004, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:40:57.960331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154858004 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:40:57.960387Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:40:57.961829Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:40:57.961906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:40:57.962156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:40:57.962222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:40:57.971943Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:40:57.971981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:40:57.972199Z n ... ionPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:41:01.480925Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:41:01.481056Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:41:01.481093Z node 3 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:41:01.481108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2025-03-28T09:41:01.481123Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 2 -> 3 waiting... 2025-03-28T09:41:01.484328Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:41:01.484417Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:41:01.484459Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 3 -> 128 2025-03-28T09:41:01.486681Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:41:01.486723Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2025-03-28T09:41:01.486740Z node 3 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T09:41:01.490283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:41:01.490310Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:41:01.490325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2025-03-28T09:41:01.490347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2025-03-28T09:41:01.490467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:41:01.493363Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2025-03-28T09:41:01.493493Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:41:01.496245Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154861539, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:41:01.496317Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154861539 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:41:01.496335Z node 3 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-03-28T09:41:01.496570Z node 3 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976715657:0 128 -> 240 2025-03-28T09:41:01.496606Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2025-03-28T09:41:01.496684Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:41:01.496731Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:41:01.499243Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:41:01.499280Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T09:41:01.499428Z node 3 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T09:41:01.499445Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [3:7486793119471081922:2362], at schemeshard: 72057594046644480, txId: 281474976715657, path id: 1 2025-03-28T09:41:01.499523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:41:01.499543Z node 3 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDone opId# 281474976715657:0 ProgressState 2025-03-28T09:41:01.499612Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-03-28T09:41:01.499622Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-03-28T09:41:01.499638Z node 3 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715657:0 progress is 1/1 2025-03-28T09:41:01.499647Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-03-28T09:41:01.499659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 1/1, is published: false 2025-03-28T09:41:01.499675Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 1/1 2025-03-28T09:41:01.499687Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715657:0 2025-03-28T09:41:01.499695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715657:0 2025-03-28T09:41:01.499729Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2025-03-28T09:41:01.499741Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2025-03-28T09:41:01.499750Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715657, [OwnerId: 72057594046644480, LocalPathId: 1], 3 2025-03-28T09:41:01.500778Z node 3 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-03-28T09:41:01.500850Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715657 2025-03-28T09:41:01.500862Z node 3 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715657 2025-03-28T09:41:01.500874Z node 3 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 3 2025-03-28T09:41:01.500899Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:41:01.500974Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2025-03-28T09:41:01.500986Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7486793123766049528:2316] 2025-03-28T09:41:01.502334Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:41:01.502384Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:41:01.502392Z node 3 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:41:01.502406Z node 3 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:41:01.508143Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715657 2025-03-28T09:41:01.595077Z node 3 :GRPC_SERVER DEBUG: Got grpc request# FetchConfigRequest, traceId# 01jqe20ngt04acg4dhyt98ehwv, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:38062, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:41:01.608170Z node 3 :GRPC_SERVER DEBUG: [0x51a00000de80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.608183Z node 3 :GRPC_SERVER DEBUG: [0x51a000100280] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.608439Z node 3 :GRPC_SERVER DEBUG: [0x51a0000a4a80] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.608449Z node 3 :GRPC_SERVER DEBUG: [0x51a000100880] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.608598Z node 3 :GRPC_SERVER DEBUG: [0x51a00000e480] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.608642Z node 3 :GRPC_SERVER DEBUG: [0x51a00000cc80] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.608744Z node 3 :GRPC_SERVER DEBUG: [0x51a0000ae680] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.608780Z node 3 :GRPC_SERVER DEBUG: [0x51a0000ada80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.608872Z node 3 :GRPC_SERVER DEBUG: [0x51a0000ace80] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.608928Z node 3 :GRPC_SERVER DEBUG: [0x51a0000abc80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.609006Z node 3 :GRPC_SERVER DEBUG: [0x51a0000df280] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.609070Z node 3 :GRPC_SERVER DEBUG: [0x51a0000ab680] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.609124Z node 3 :GRPC_SERVER DEBUG: [0x51a0000b1080] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.609217Z node 3 :GRPC_SERVER DEBUG: [0x51a00000ea80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.609245Z node 3 :GRPC_SERVER DEBUG: [0x51a00000d880] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.609351Z node 3 :GRPC_SERVER DEBUG: [0x51a0000ac280] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-28T09:41:01.609381Z node 3 :GRPC_SERVER DEBUG: [0x51a0000ffc80] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 |98.0%| [TM] {RESULT} ydb/services/config/ut/unittest >> TopicSessionTests::ReadNonExistentTopic >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> DataShardStats::BlobsStatsCorrect [GOOD] >> DataShardStats::SharedCacheGarbage >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> MediatorTest::MultipleTablets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:54:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:57:2057] recipient: [1:51:2095] Leader for TabletID 72057594037927937 is [1:56:2097] sender: [1:74:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:54:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:57:2057] recipient: [2:51:2095] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:74:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:76:2057] recipient: [2:36:2083] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:79:2057] recipient: [2:78:2110] Leader for TabletID 72057594037927937 is [2:56:2097] sender: [2:80:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:82:2057] recipient: [2:78:2110] !Reboot 72057594037927937 (actor [2:56:2097]) rebooted! !Reboot 72057594037927937 (actor [2:56:2097]) tablet resolver refreshed! new actor is[2:81:2111] Leader for TabletID 72057594037927937 is [2:81:2111] sender: [2:135:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:54:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:57:2057] recipient: [3:51:2095] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:74:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:76:2057] recipient: [3:36:2083] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:78:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [3:56:2097] sender: [3:80:2057] recipient: [3:79:2110] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:82:2057] recipient: [3:79:2110] !Reboot 72057594037927937 (actor [3:56:2097]) rebooted! !Reboot 72057594037927937 (actor [3:56:2097]) tablet resolver refreshed! new actor is[3:81:2111] Leader for TabletID 72057594037927937 is [3:81:2111] sender: [3:135:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:54:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:57:2057] recipient: [4:51:2095] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:74:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:77:2057] recipient: [4:36:2083] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:80:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [4:56:2097] sender: [4:81:2057] recipient: [4:79:2110] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:83:2057] recipient: [4:79:2110] !Reboot 72057594037927937 (actor [4:56:2097]) rebooted! !Reboot 72057594037927937 (actor [4:56:2097]) tablet resolver refreshed! new actor is[4:82:2111] Leader for TabletID 72057594037927937 is [4:82:2111] sender: [4:136:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:54:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:57:2057] recipient: [5:51:2095] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:74:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:80:2057] recipient: [5:36:2083] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:83:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [5:56:2097] sender: [5:84:2057] recipient: [5:82:2113] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:86:2057] recipient: [5:82:2113] !Reboot 72057594037927937 (actor [5:56:2097]) rebooted! !Reboot 72057594037927937 (actor [5:56:2097]) tablet resolver refreshed! new actor is[5:85:2114] Leader for TabletID 72057594037927937 is [5:85:2114] sender: [5:139:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:54:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:57:2057] recipient: [6:52:2095] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:74:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:80:2057] recipient: [6:36:2083] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:83:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [6:56:2097] sender: [6:84:2057] recipient: [6:82:2113] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:86:2057] recipient: [6:82:2113] !Reboot 72057594037927937 (actor [6:56:2097]) rebooted! !Reboot 72057594037927937 (actor [6:56:2097]) tablet resolver refreshed! new actor is[6:85:2114] Leader for TabletID 72057594037927937 is [6:85:2114] sender: [6:139:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:54:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:57:2057] recipient: [7:51:2095] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:74:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:81:2057] recipient: [7:36:2083] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:83:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [7:56:2097] sender: [7:85:2057] recipient: [7:84:2113] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:87:2057] recipient: [7:84:2113] !Reboot 72057594037927937 (actor [7:56:2097]) rebooted! !Reboot 72057594037927937 (actor [7:56:2097]) tablet resolver refreshed! new actor is[7:86:2114] Leader for TabletID 72057594037927937 is [7:86:2114] sender: [7:140:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:54:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:57:2057] recipient: [8:50:2095] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:74:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:83:2057] recipient: [8:36:2083] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:86:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [8:56:2097] sender: [8:87:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:89:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:56:2097]) rebooted! !Reboot 72057594037927937 (actor [8:56:2097]) tablet resolver refreshed! new actor is[8:88:2116] Leader for TabletID 72057594037927937 is [8:88:2116] sender: [8:142:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:54:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:57:2057] recipient: [9:52:2095] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:74:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:56:2097]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:83:2057] recipient: [9:36:2083] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:86:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [9:56:2097] sender: [9:87:2057] recipient: [9:85:2115] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:89:2057] recipient: [9:85:2115] !Reboot 72057594037927937 (actor [9:56:2097]) rebooted! !Reboot 72057594037927937 (actor [9:56:2097]) tablet resolver refreshed! new actor is[9:88:2116] Leader for TabletID 72057594037927937 is [9:88:2116] sender: [9:142:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:54:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:57:2057] recipient: [10:51:2095] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:74:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:84:2057] recipient: [10:36:2083] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:87:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [10:56:2097] sender: [10:88:2057] recipient: [10:86:2115] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:90:2057] recipient: [10:86:2115] !Reboot 72057594037927937 (actor [10:56:2097]) rebooted! !Reboot 72057594037927937 (actor [10:56:2097]) tablet resolver refreshed! new actor is[10:89:2116] Leader for TabletID 72057594037927937 is [10:89:2116] sender: [10:143:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:54:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:57:2057] recipient: [11:51:2095] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:74:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:56:2097]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:85:2057] recipient: [11:36:2083] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:88:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [11:56:2097] sender: [11:89:2057] recipient: [11:87:2116] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:91:2057] recipient: [11:87:2116] !Reboot 72057594037927937 (actor [11:56:2097]) rebooted! !Reboot 72057594037927937 (actor [11:56:2097]) tablet resolver refreshed! new actor is[11:90:2117] Leader for TabletID 72057594037927937 is [11:90:2117] sender: [11:110:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:54:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:57:2057] recipient: [12:51:2095] Leader for TabletID 72057594037927937 is [12:56:2097] sender: [12:74:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:56:2 ... 2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [106:54:2057] recipient: [106:51:2095] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:57:2057] recipient: [106:51:2095] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:74:2057] recipient: [106:14:2061] !Reboot 72057594037927937 (actor [106:56:2097]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:127:2057] recipient: [106:36:2083] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:129:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [106:56:2097] sender: [106:131:2057] recipient: [106:130:2148] Leader for TabletID 72057594037927937 is [106:132:2149] sender: [106:133:2057] recipient: [106:130:2148] !Reboot 72057594037927937 (actor [106:56:2097]) rebooted! !Reboot 72057594037927937 (actor [106:56:2097]) tablet resolver refreshed! new actor is[106:132:2149] Leader for TabletID 72057594037927937 is [106:132:2149] sender: [106:152:2057] recipient: [106:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:54:2057] recipient: [107:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [107:54:2057] recipient: [107:50:2095] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:57:2057] recipient: [107:50:2095] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:74:2057] recipient: [107:14:2061] !Reboot 72057594037927937 (actor [107:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:130:2057] recipient: [107:36:2083] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:133:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [107:56:2097] sender: [107:134:2057] recipient: [107:132:2151] Leader for TabletID 72057594037927937 is [107:135:2152] sender: [107:136:2057] recipient: [107:132:2151] !Reboot 72057594037927937 (actor [107:56:2097]) rebooted! !Reboot 72057594037927937 (actor [107:56:2097]) tablet resolver refreshed! new actor is[107:135:2152] Leader for TabletID 72057594037927937 is [107:135:2152] sender: [107:189:2057] recipient: [107:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:54:2057] recipient: [108:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [108:54:2057] recipient: [108:51:2095] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:57:2057] recipient: [108:51:2095] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:74:2057] recipient: [108:14:2061] !Reboot 72057594037927937 (actor [108:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:130:2057] recipient: [108:36:2083] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:133:2057] recipient: [108:132:2151] Leader for TabletID 72057594037927937 is [108:56:2097] sender: [108:134:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [108:135:2152] sender: [108:136:2057] recipient: [108:132:2151] !Reboot 72057594037927937 (actor [108:56:2097]) rebooted! !Reboot 72057594037927937 (actor [108:56:2097]) tablet resolver refreshed! new actor is[108:135:2152] Leader for TabletID 72057594037927937 is [108:135:2152] sender: [108:189:2057] recipient: [108:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:54:2057] recipient: [109:50:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [109:54:2057] recipient: [109:50:2095] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:57:2057] recipient: [109:50:2095] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:74:2057] recipient: [109:14:2061] !Reboot 72057594037927937 (actor [109:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:131:2057] recipient: [109:36:2083] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:134:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [109:56:2097] sender: [109:135:2057] recipient: [109:133:2151] Leader for TabletID 72057594037927937 is [109:136:2152] sender: [109:137:2057] recipient: [109:133:2151] !Reboot 72057594037927937 (actor [109:56:2097]) rebooted! !Reboot 72057594037927937 (actor [109:56:2097]) tablet resolver refreshed! new actor is[109:136:2152] Leader for TabletID 72057594037927937 is [109:136:2152] sender: [109:154:2057] recipient: [109:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:54:2057] recipient: [110:52:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [110:54:2057] recipient: [110:52:2095] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:57:2057] recipient: [110:52:2095] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:74:2057] recipient: [110:14:2061] !Reboot 72057594037927937 (actor [110:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:133:2057] recipient: [110:36:2083] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:136:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [110:56:2097] sender: [110:137:2057] recipient: [110:135:2153] Leader for TabletID 72057594037927937 is [110:138:2154] sender: [110:139:2057] recipient: [110:135:2153] !Reboot 72057594037927937 (actor [110:56:2097]) rebooted! !Reboot 72057594037927937 (actor [110:56:2097]) tablet resolver refreshed! new actor is[110:138:2154] Leader for TabletID 72057594037927937 is [110:138:2154] sender: [110:192:2057] recipient: [110:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:54:2057] recipient: [111:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [111:54:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:57:2057] recipient: [111:51:2095] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:74:2057] recipient: [111:14:2061] !Reboot 72057594037927937 (actor [111:56:2097]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:133:2057] recipient: [111:36:2083] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:136:2057] recipient: [111:135:2153] Leader for TabletID 72057594037927937 is [111:56:2097] sender: [111:137:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [111:138:2154] sender: [111:139:2057] recipient: [111:135:2153] !Reboot 72057594037927937 (actor [111:56:2097]) rebooted! !Reboot 72057594037927937 (actor [111:56:2097]) tablet resolver refreshed! new actor is[111:138:2154] Leader for TabletID 72057594037927937 is [111:138:2154] sender: [111:192:2057] recipient: [111:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:54:2057] recipient: [112:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [112:54:2057] recipient: [112:51:2095] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:57:2057] recipient: [112:51:2095] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:74:2057] recipient: [112:14:2061] !Reboot 72057594037927937 (actor [112:56:2097]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:134:2057] recipient: [112:36:2083] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:137:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [112:56:2097] sender: [112:138:2057] recipient: [112:136:2153] Leader for TabletID 72057594037927937 is [112:139:2154] sender: [112:140:2057] recipient: [112:136:2153] !Reboot 72057594037927937 (actor [112:56:2097]) rebooted! !Reboot 72057594037927937 (actor [112:56:2097]) tablet resolver refreshed! new actor is[112:139:2154] Leader for TabletID 72057594037927937 is [112:139:2154] sender: [112:193:2057] recipient: [112:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:54:2057] recipient: [113:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [113:54:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:57:2057] recipient: [113:51:2095] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:74:2057] recipient: [113:14:2061] !Reboot 72057594037927937 (actor [113:56:2097]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:137:2057] recipient: [113:36:2083] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:140:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [113:56:2097] sender: [113:141:2057] recipient: [113:139:2156] Leader for TabletID 72057594037927937 is [113:142:2157] sender: [113:143:2057] recipient: [113:139:2156] !Reboot 72057594037927937 (actor [113:56:2097]) rebooted! !Reboot 72057594037927937 (actor [113:56:2097]) tablet resolver refreshed! new actor is[113:142:2157] Leader for TabletID 72057594037927937 is [113:142:2157] sender: [113:196:2057] recipient: [113:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:54:2057] recipient: [114:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [114:54:2057] recipient: [114:51:2095] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:57:2057] recipient: [114:51:2095] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:74:2057] recipient: [114:14:2061] !Reboot 72057594037927937 (actor [114:56:2097]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:137:2057] recipient: [114:36:2083] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:140:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [114:56:2097] sender: [114:141:2057] recipient: [114:139:2156] Leader for TabletID 72057594037927937 is [114:142:2157] sender: [114:143:2057] recipient: [114:139:2156] !Reboot 72057594037927937 (actor [114:56:2097]) rebooted! !Reboot 72057594037927937 (actor [114:56:2097]) tablet resolver refreshed! new actor is[114:142:2157] Leader for TabletID 72057594037927937 is [114:142:2157] sender: [114:196:2057] recipient: [114:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:54:2057] recipient: [115:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [115:54:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:57:2057] recipient: [115:51:2095] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:74:2057] recipient: [115:14:2061] !Reboot 72057594037927937 (actor [115:56:2097]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:138:2057] recipient: [115:36:2083] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:141:2057] recipient: [115:14:2061] Leader for TabletID 72057594037927937 is [115:56:2097] sender: [115:142:2057] recipient: [115:140:2156] Leader for TabletID 72057594037927937 is [115:143:2157] sender: [115:144:2057] recipient: [115:140:2156] !Reboot 72057594037927937 (actor [115:56:2097]) rebooted! !Reboot 72057594037927937 (actor [115:56:2097]) tablet resolver refreshed! new actor is[115:143:2157] Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:54:2057] recipient: [116:51:2095] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [116:54:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:56:2097] sender: [116:57:2057] recipient: [116:51:2095] Leader for TabletID 72057594037927937 is [116:56:2097] sender: [116:74:2057] recipient: [116:14:2061] >> TQuoterServiceTest::StaticRateLimiter [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources >> test.py::test[solomon-InvalidProject-] [GOOD] >> MediatorTest::TabletAckBeforePlanComplete >> test.py::test[solomon-LabelColumns-default.txt] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_init/unittest >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] Test command err: 2025-03-28T09:40:56.092101Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:40:56.113668Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:40:56.117051Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:108:2140] 2025-03-28T09:40:56.118219Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:56.178967Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:102:2136], Recipient [1:108:2140]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:40:56.188276Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:56.188606Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:56.191276Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2025-03-28T09:40:56.191349Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 9437184 2025-03-28T09:40:56.191395Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 9437184 2025-03-28T09:40:56.192639Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:56.193166Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:56.193258Z node 1 :TX_DATASHARD DEBUG: DataShard 9437184 persisting started state actor id [1:131:2140] in generation 2 2025-03-28T09:40:56.217841Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:56.245261Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 9437184 2025-03-28T09:40:56.246581Z node 1 :TX_DATASHARD DEBUG: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:56.246747Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 9437184, actorId: [1:136:2160] 2025-03-28T09:40:56.246783Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 9437184 2025-03-28T09:40:56.246821Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2025-03-28T09:40:56.246866Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2025-03-28T09:40:56.247097Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:108:2140], Recipient [1:108:2140]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:56.247946Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:56.250640Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 9437184 2025-03-28T09:40:56.250774Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2025-03-28T09:40:56.250822Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 9437184 2025-03-28T09:40:56.250902Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:56.250947Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 9437184 2025-03-28T09:40:56.250981Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 9437184 has no attached operations 2025-03-28T09:40:56.251018Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 9437184 2025-03-28T09:40:56.251048Z node 1 :TX_DATASHARD INFO: No tx to execute at 9437184 TxInFly 0 2025-03-28T09:40:56.251085Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 9437184 2025-03-28T09:40:56.253183Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269551617, Sender [1:99:2134], Recipient [1:108:2140]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 99 RawX2: 4294969430 } 2025-03-28T09:40:56.253258Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvGetShardState 2025-03-28T09:40:59.846161Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:59.846583Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:59.846642Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000912/r3tmp/tmp2HOAQX/pdisk_1.dat 2025-03-28T09:41:00.440986Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:41:00.507071Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:41:00.559154Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:41:00.559283Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:41:00.573135Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:41:00.708503Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:41:00.771631Z node 2 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:666:2570] 2025-03-28T09:41:00.772084Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:41:00.816100Z node 2 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:41:00.816214Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:41:00.817764Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:41:00.817856Z node 2 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:41:00.817910Z node 2 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:41:00.818232Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:41:00.818357Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:41:00.818432Z node 2 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [2:682:2570] in generation 1 2025-03-28T09:41:00.831257Z node 2 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:41:00.831337Z node 2 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:41:00.831434Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:41:00.831542Z node 2 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [2:684:2580] 2025-03-28T09:41:00.831579Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:41:00.831615Z node 2 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:41:00.831654Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:00.831992Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:41:00.832093Z node 2 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:41:00.832207Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:41:00.832245Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:41:00.832279Z node 2 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:41:00.832375Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:41:00.832456Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:662:2567], serverId# [2:673:2574], sessionId# [0:0:0] 2025-03-28T09:41:00.832827Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:41:00.833890Z node 2 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:41:00.833988Z node 2 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:41:00.840302Z node 2 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:41:00.851465Z node 2 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:41:00.851601Z node 2 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:41:01.028575Z node 2 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [2:704:2594], serverId# [2:705:2595], sessionId# [0:0:0] 2025-03-28T09:41:01.039781Z node 2 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:41:01.039891Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:01.040823Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:41:01.040883Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:41:01.040942Z node 2 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:41:01.041179Z node 2 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:41:01.041374Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:41:01.041680Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:41:01.041777Z node 2 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:41:01.056035Z node 2 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:41:01.063442Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:41:01.065648Z node 2 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:41:01.065733Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:01.068912Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:41:01.069036Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:41:01.070530Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:41:01.070643Z node 2 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:41:01.070745Z node 2 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037 ... :06.933314Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:41:06.933348Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:41:06.933697Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:662:2567], serverId# [3:673:2574], sessionId# [0:0:0] 2025-03-28T09:41:06.933964Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:41:06.934111Z node 3 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:41:06.934194Z node 3 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:41:06.939791Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:41:06.952905Z node 3 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:41:06.953026Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:41:07.157610Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:703:2593], serverId# [3:705:2595], sessionId# [0:0:0] 2025-03-28T09:41:07.158052Z node 3 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:41:07.158106Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:07.159790Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:41:07.159844Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:41:07.159895Z node 3 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:41:07.160159Z node 3 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:41:07.160292Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:41:07.160942Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:41:07.161016Z node 3 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:41:07.161438Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:41:07.161794Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:41:07.167586Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:41:07.167654Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:07.168516Z node 3 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:41:07.168585Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:41:07.169540Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:41:07.169588Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:41:07.169629Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:41:07.169691Z node 3 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:41:07.169746Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:41:07.169831Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:07.171423Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:41:07.173807Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:41:07.173867Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:41:07.174073Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:41:07.181445Z node 3 :TX_DATASHARD INFO: OnTabletDead: 72075186224037888 2025-03-28T09:41:07.183097Z node 3 :TX_DATASHARD INFO: Change sender killed: at tablet: 72075186224037888 2025-03-28T09:41:07.238478Z node 3 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:742:2622] 2025-03-28T09:41:07.238734Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:41:07.244032Z node 3 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:41:07.244526Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:41:07.246670Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:41:07.246975Z node 3 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:41:07.247044Z node 3 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:41:07.247503Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:41:07.248485Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:41:07.248566Z node 3 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [3:757:2622] in generation 2 2025-03-28T09:41:07.275455Z node 3 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:41:07.275585Z node 3 :TX_DATASHARD INFO: Switched to work state Ready tabletId 72075186224037888 2025-03-28T09:41:07.275691Z node 3 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:41:07.275824Z node 3 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:41:07.275949Z node 3 :TX_DATASHARD DEBUG: Resolve path at 72075186224037888: reason# empty path 2025-03-28T09:41:07.276099Z node 3 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [3:761:2632] 2025-03-28T09:41:07.276150Z node 3 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:41:07.276208Z node 3 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:41:07.276247Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:07.276593Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Execute 2025-03-28T09:41:07.276822Z node 3 :TX_DATASHARD DEBUG: TxInitSchemaDefaults.Complete 2025-03-28T09:41:07.277656Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:41:07.277776Z node 3 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:41:07.278259Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1000 2025-03-28T09:41:07.278317Z node 3 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:07.278516Z node 3 :FLAT_TX_SCHEMESHARD WARN: Got TEvDataShard::TEvSchemaChanged for unknown txId 281474976715657 message# Source { RawX1: 742 RawX2: 12884904510 } Origin: 72075186224037888 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2025-03-28T09:41:07.278610Z node 3 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:41:07.279411Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:41:07.279466Z node 3 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:41:07.279512Z node 3 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:41:07.279586Z node 3 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:41:07.279844Z node 3 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:41:07.323546Z node 3 :TX_DATASHARD DEBUG: Got scheme resolve result at 72075186224037888: Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 1000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 2 PathOwnerId: 72057594046644480 2025-03-28T09:41:07.323853Z node 3 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:41:07.324135Z node 3 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:41:07.324277Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Execute at 72075186224037888 2025-03-28T09:41:07.326484Z node 3 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [3:765:2636], serverId# [3:767:2637], sessionId# [0:0:0] 2025-03-28T09:41:07.341917Z node 3 :TX_DATASHARD DEBUG: TTxStoreTablePath::Complete at 72075186224037888 |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_init/unittest >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 |98.0%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring >> TestFormatHandler::ClientValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_build_index/unittest >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] Test command err: 2025-03-28T09:40:48.218605Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:48.219465Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:48.219546Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000710/r3tmp/tmpBmoM2c/pdisk_1.dat 2025-03-28T09:40:48.845402Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:48.918998Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:48.972998Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:48.973638Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:48.989482Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:49.135664Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:49.235249Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:40:49.235540Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:49.290499Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:49.290644Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:49.293789Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:40:49.293910Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:40:49.293981Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:40:49.295136Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:49.295313Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:49.295420Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:40:49.306240Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:49.338629Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:40:49.340162Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:49.340339Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:40:49.340387Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:49.340445Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:40:49.340488Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:49.341890Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:40:49.342017Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:40:49.345053Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:49.345125Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:49.345237Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:40:49.345293Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:49.345449Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:40:49.345660Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:49.345930Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:40:49.346036Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:40:49.348206Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:49.359107Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:40:49.359232Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:40:49.521972Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:40:49.527174Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:40:49.527272Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:49.528159Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:49.528224Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2025-03-28T09:40:49.528327Z node 1 :TX_DATASHARD DEBUG: Found ready operation [1000:281474976715657] in PlanQueue unit at 72075186224037888 2025-03-28T09:40:49.528649Z node 1 :TX_DATASHARD DEBUG: LoadTxDetails at 72075186224037888 loaded tx from db 1000:281474976715657 keys extracted: 0 2025-03-28T09:40:49.528862Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:40:49.529844Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:49.529945Z node 1 :TX_DATASHARD INFO: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 2] schema version# 1 2025-03-28T09:40:49.535996Z node 1 :TX_DATASHARD INFO: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2025-03-28T09:40:49.537998Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:49.539800Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2025-03-28T09:40:49.539859Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:49.540719Z node 1 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1000} 2025-03-28T09:40:49.540794Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:49.541858Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:49.541904Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:49.541983Z node 1 :TX_DATASHARD INFO: Change sender activated: at tablet: 72075186224037888 2025-03-28T09:40:49.542052Z node 1 :TX_DATASHARD DEBUG: Complete [1000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:40:49.542111Z node 1 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2025-03-28T09:40:49.542287Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:49.546330Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:49.549344Z node 1 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2025-03-28T09:40:49.549415Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2025-03-28T09:40:49.549732Z node 1 :TX_DATASHARD DEBUG: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1000 2025-03-28T09:40:49.579796Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:737:2619], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:49.579944Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:747:2624], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:49.580029Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:49.591006Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T09:40:49.598563Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:49.757265Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:49.761093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:751:2627], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T09:40:49.836678Z node 1 :TX_PROXY ERROR: Actor# [1:825:2670] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:40:50.925946Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976715660. Ctx: { TraceId: 01jqe209s627qhhva3gess3a0n, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTM5NzJmMTgtODU5YTE0YWEtZDhlMDQ3ZDgtNmRlZmFkYzY=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:40:50.947446Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:852:2688], serverId# [1:853:2689], sessionId# [0:0:0] 2025-03-28T09:40:50.949266Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:50.967807Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09 ... 523605Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-28T09:41:08.523632Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:41:08.523664Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for WaitForStreamClearance 2025-03-28T09:41:08.523712Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:41:08.523768Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:41:08.523814Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:41:08.523927Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-03-28T09:41:08.523963Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:25} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:41:08.523991Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:41:08.524018Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:41:08.524057Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037890 for WaitForStreamClearance 2025-03-28T09:41:08.525535Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:41:08.525644Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 134b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-03-28T09:41:08.525692Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:26} Tx{38, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:41:08.526115Z node 2 :TABLET_EXECUTOR INFO: Leader{72075186224037890:1:27} starting Scan{8 on 1001, TReadTableScan} 2025-03-28T09:41:08.527090Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2025-03-28T09:41:08.527683Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 1 for step 24 2025-03-28T09:41:08.527745Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 63000} 2025-03-28T09:41:08.527818Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:41:08.528053Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 8 for step 25 2025-03-28T09:41:08.528519Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:41:08.528696Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2025-03-28T09:41:08.528753Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2025-03-28T09:41:08.592772Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037890 2025-03-28T09:41:08.592835Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037890 2025-03-28T09:41:08.593071Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-03-28T09:41:08.593118Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:41:08.593157Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037890 2025-03-28T09:41:08.593189Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:41:08.593223Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037890 for ReadTableScan 2025-03-28T09:41:08.593463Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:41:08.593562Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{16, redo 336b alter 0b annex 0, ~{ 1, 3, 4, 12, 7, 8 } -{ }, 0 gb} 2025-03-28T09:41:08.593617Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:41:08.610737Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:1:27} commited cookie 1 for step 26 2025-03-28T09:41:08.610891Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:41:08.610956Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037890 2025-03-28T09:41:08.611018Z node 2 :TX_DATASHARD DEBUG: Complete [63000 : 281474976715666] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1532:3310], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:41:08.611092Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:41:08.611431Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} commited cookie 1 for step 24 2025-03-28T09:41:08.611472Z node 2 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 63000} 2025-03-28T09:41:08.611518Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-28T09:41:08.611548Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-28T09:41:08.611762Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations 2025-03-28T09:41:08.611827Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:41:08.612033Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} hope 1 -> done Change{81, redo 184b alter 0b annex 0, ~{ 4, 0 } -{ }, 0 gb} 2025-03-28T09:41:08.612113Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:87} Tx{87, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxMediatorConfirmations} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:41:08.612336Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-03-28T09:41:08.612382Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:25} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:41:08.612422Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-28T09:41:08.612458Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:41:08.612496Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for WaitForStreamClearance 2025-03-28T09:41:08.612720Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:41:08.612786Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{15, redo 134b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2025-03-28T09:41:08.612830Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:26} Tx{39, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:41:08.613109Z node 2 :TABLET_EXECUTOR INFO: Leader{72075186224037891:1:27} starting Scan{8 on 1001, TReadTableScan} 2025-03-28T09:41:08.613483Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} commited cookie 8 for step 25 2025-03-28T09:41:08.613657Z node 2 :TX_DATASHARD DEBUG: Got quota for read table scan ShardId: 72075186224037891, TxId: 281474976715666, MessageQuota: 1 2025-03-28T09:41:08.614097Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:88} commited cookie 1 for step 87 2025-03-28T09:41:08.614412Z node 2 :TX_DATASHARD DEBUG: Send response data ShardId: 72075186224037891, TxId: 281474976715666, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2025-03-28T09:41:08.616225Z node 2 :TX_DATASHARD DEBUG: Got stream data ack ShardId: 72075186224037891, TxId: 281474976715666, PendingAcks: 0 2025-03-28T09:41:08.616276Z node 2 :TX_DATASHARD DEBUG: Finish scan ShardId: 72075186224037891, TxId: 281474976715666, MessageQuota: 0 2025-03-28T09:41:08.673206Z node 2 :TX_DATASHARD DEBUG: FullScan complete at 72075186224037891 2025-03-28T09:41:08.673266Z node 2 :TX_DATASHARD DEBUG: Found op: cookie: 281474976715666, at: 72075186224037891 2025-03-28T09:41:08.673478Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxProgressTransaction 2025-03-28T09:41:08.673527Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:41:08.673568Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037891 2025-03-28T09:41:08.673603Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 active 1 active planned 1 immediate 0 planned 1 2025-03-28T09:41:08.673637Z node 2 :TX_DATASHARD DEBUG: Found ready candidate operation [63000:281474976715666] at 72075186224037891 for ReadTableScan 2025-03-28T09:41:08.673863Z node 2 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037891 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:41:08.673954Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 1 -> done Change{16, redo 336b alter 0b annex 0, ~{ 1, 3, 4, 12, 7, 8 } -{ }, 0 gb} 2025-03-28T09:41:08.674003Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} Tx{40, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:41:08.684758Z node 2 :TABLET_EXECUTOR DEBUG: Leader{72075186224037891:1:27} commited cookie 1 for step 26 2025-03-28T09:41:08.684834Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-28T09:41:08.684873Z node 2 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037891 2025-03-28T09:41:08.684928Z node 2 :TX_DATASHARD DEBUG: Complete [63000 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [2:1532:3310], exec latency: 0 ms, propose latency: 1 ms 2025-03-28T09:41:08.684986Z node 2 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 |98.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_build_index/unittest >> DataShardReplication::ReplicatedTable-UseSink [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable >> TestFormatHandler::ClientError |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |98.0%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestStorageRetention >> TQuoterServiceTest::StaticMultipleAndResources [GOOD] >> TQuoterServiceTest::StaticDeadlines >> KafkaProtocol::BalanceScenario [GOOD] >> KafkaProtocol::OffsetCommitAndFetchScenario >> MediatorTest::TabletAckBeforePlanComplete [GOOD] >> MediatorTest::TabletAckWhenDead >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable [GOOD] >> DataShardFollowers::FollowerStaleRo >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> TCreateAndDropViewTest::DropViewIfExists [FAIL] >> TCreateAndDropViewTest::DropViewInFolder >> TopicSessionTests::SlowSession >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> TestFormatHandler::ClientError [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> test.py::test[solomon-UnknownSetting-] >> TestFormatHandler::ClientErrorWithEmptyFilter |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestStatusWithMultipleConsumers [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:103:2057] recipient: [1:101:2135] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:108:2057] recipient: [1:101:2135] 2025-03-28T09:37:51.494127Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.494236Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:149:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927938 is [1:153:2174] sender: [1:154:2057] recipient: [1:147:2170] Leader for TabletID 72057594037927937 is [1:107:2139] sender: [1:179:2057] recipient: [1:14:2061] 2025-03-28T09:37:51.510341Z node 1 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.525460Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 1 actor [1:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 1 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 Important: false } 2025-03-28T09:37:51.526332Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [1:185:2198] 2025-03-28T09:37:51.528054Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:185:2198] 2025-03-28T09:37:51.529679Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [1:186:2199] 2025-03-28T09:37:51.530959Z node 1 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:186:2199] 2025-03-28T09:37:51.536234Z node 1 :PERSQUEUE INFO: new Cookie owner1|6a8c39e7-7da3a946-df7305dc-775d3766_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-03-28T09:37:51.536675Z node 1 :PERSQUEUE INFO: new Cookie owner2|788f5eb-aee90deb-5aebd6be-712b3ff2_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-03-28T09:37:51.548883Z node 1 :PERSQUEUE INFO: new Cookie owner1|c79853ab-711b101b-f3f71178-9f59000_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:103:2057] recipient: [2:101:2135] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:108:2057] recipient: [2:101:2135] 2025-03-28T09:37:51.903041Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.903125Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:149:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927938 is [2:153:2174] sender: [2:154:2057] recipient: [2:147:2170] Leader for TabletID 72057594037927937 is [2:107:2139] sender: [2:177:2057] recipient: [2:14:2061] 2025-03-28T09:37:51.921901Z node 2 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:51.923002Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 2 actor [2:175:2190] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 2 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 Important: false } 2025-03-28T09:37:51.923761Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [2:183:2196] 2025-03-28T09:37:51.926368Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:183:2196] 2025-03-28T09:37:51.928061Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [2:184:2197] 2025-03-28T09:37:51.929924Z node 2 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:184:2197] 2025-03-28T09:37:51.936016Z node 2 :PERSQUEUE INFO: new Cookie owner1|9fe8aa25-29dfc188-457a1ee6-1fd78409_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-03-28T09:37:51.936514Z node 2 :PERSQUEUE INFO: new Cookie owner2|1555f835-ad7a5f49-8256a08c-28ce6d77_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-03-28T09:37:51.951948Z node 2 :PERSQUEUE INFO: new Cookie owner1|3331d327-655c54f2-bff3c53-8cea2c10_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:103:2057] recipient: [3:101:2135] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:108:2057] recipient: [3:101:2135] 2025-03-28T09:37:52.292600Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.292661Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [3:149:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927938 is [3:153:2174] sender: [3:154:2057] recipient: [3:147:2170] Leader for TabletID 72057594037927937 is [3:107:2139] sender: [3:179:2057] recipient: [3:14:2061] 2025-03-28T09:37:52.306888Z node 3 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.307576Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 3 actor [3:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 3 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 3 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 3 Important: false } 2025-03-28T09:37:52.307953Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [3:185:2198] 2025-03-28T09:37:52.309318Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [3:185:2198] 2025-03-28T09:37:52.310350Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [3:186:2199] 2025-03-28T09:37:52.311504Z node 3 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [3:186:2199] 2025-03-28T09:37:52.314792Z node 3 :PERSQUEUE INFO: new Cookie owner1|20282b0a-8bda954a-fb2dc2a7-632f52f6_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 2025-03-28T09:37:52.315058Z node 3 :PERSQUEUE INFO: new Cookie owner2|54dd2b84-4cc704ef-1cb6ef9a-48a76ca7_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner2 2025-03-28T09:37:52.326443Z node 3 :PERSQUEUE INFO: new Cookie owner1|24200e15-128c67ed-9c62c51a-5f4a1308_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner owner1 Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:103:2057] recipient: [4:101:2135] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:108:2057] recipient: [4:101:2135] 2025-03-28T09:37:52.721500Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.721570Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [4:149:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927938 is [4:153:2174] sender: [4:154:2057] recipient: [4:147:2170] Leader for TabletID 72057594037927937 is [4:107:2139] sender: [4:179:2057] recipient: [4:14:2061] 2025-03-28T09:37:52.735313Z node 4 :PERSQUEUE NOTICE: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2025-03-28T09:37:52.735951Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937] Config applied version 4 actor [4:177:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 4 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } ReadRuleGenerations: 4 MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 4 Important: false } 2025-03-28T09:37:52.736380Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] bootstrapping 0 [4:185:2198] 2025-03-28T09:37:52.738050Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 0, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [4:185:2198] 2025-03-28T09:37:52.739299Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] bootstrapping 1 [4:186:2199] 2025-03-28T09:37:52.740526Z node 4 :PERSQUEUE INFO: [PQ: 72057594037927937, Partition: 1, State: StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [4:186:2199] 2025-03-28T09:37:52.744657Z node 4 :PERSQUEUE INFO: new Cookie default|24a3547e-9b4dd9c1-92c88473-c3b60730_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner d ... R INFO: [72057594037927938][rt3.dc1--topic] pipe [134:394:2388] connected; active server actors: 1 2025-03-28T09:41:15.952554Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:399:2393] connected; active server actors: 1 2025-03-28T09:41:15.955588Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:404:2398] connected; active server actors: 1 2025-03-28T09:41:15.958381Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:409:2403] connected; active server actors: 1 2025-03-28T09:41:15.961627Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:414:2408] connected; active server actors: 1 2025-03-28T09:41:15.965318Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:419:2413] connected; active server actors: 1 2025-03-28T09:41:15.969942Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:424:2418] connected; active server actors: 1 2025-03-28T09:41:15.973455Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:429:2423] connected; active server actors: 1 2025-03-28T09:41:15.978501Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:434:2428] connected; active server actors: 1 2025-03-28T09:41:15.981663Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:439:2433] connected; active server actors: 1 2025-03-28T09:41:15.984476Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:444:2438] connected; active server actors: 1 2025-03-28T09:41:15.995667Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:449:2443] connected; active server actors: 1 2025-03-28T09:41:15.998971Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:454:2448] connected; active server actors: 1 2025-03-28T09:41:16.001780Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:459:2453] connected; active server actors: 1 2025-03-28T09:41:16.004273Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:464:2458] connected; active server actors: 1 2025-03-28T09:41:16.007275Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:469:2463] connected; active server actors: 1 2025-03-28T09:41:16.009596Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:474:2468] connected; active server actors: 1 2025-03-28T09:41:16.011673Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:479:2473] connected; active server actors: 1 2025-03-28T09:41:16.013620Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:484:2478] connected; active server actors: 1 2025-03-28T09:41:16.015541Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:489:2483] connected; active server actors: 1 2025-03-28T09:41:16.017937Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:494:2488] connected; active server actors: 1 2025-03-28T09:41:16.020298Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:499:2493] connected; active server actors: 1 2025-03-28T09:41:16.022886Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:504:2498] connected; active server actors: 1 2025-03-28T09:41:16.028181Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:509:2503] connected; active server actors: 1 2025-03-28T09:41:16.030988Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:514:2508] connected; active server actors: 1 2025-03-28T09:41:16.033673Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:519:2513] connected; active server actors: 1 2025-03-28T09:41:16.036115Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:524:2518] connected; active server actors: 1 2025-03-28T09:41:16.038767Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:529:2523] connected; active server actors: 1 2025-03-28T09:41:16.041496Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:534:2528] connected; active server actors: 1 2025-03-28T09:41:16.044186Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:539:2533] connected; active server actors: 1 2025-03-28T09:41:16.046532Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:544:2538] connected; active server actors: 1 2025-03-28T09:41:16.056816Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:549:2543] connected; active server actors: 1 2025-03-28T09:41:16.060064Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:554:2548] connected; active server actors: 1 2025-03-28T09:41:16.066365Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:559:2553] connected; active server actors: 1 2025-03-28T09:41:16.072553Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:564:2558] connected; active server actors: 1 2025-03-28T09:41:16.076048Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:569:2563] connected; active server actors: 1 2025-03-28T09:41:16.079293Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:574:2568] connected; active server actors: 1 2025-03-28T09:41:16.082501Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:579:2573] connected; active server actors: 1 2025-03-28T09:41:16.085247Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:584:2578] connected; active server actors: 1 2025-03-28T09:41:16.088250Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:589:2583] connected; active server actors: 1 2025-03-28T09:41:16.091662Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:594:2588] connected; active server actors: 1 2025-03-28T09:41:16.094523Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:599:2593] connected; active server actors: 1 2025-03-28T09:41:16.097209Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:604:2598] connected; active server actors: 1 2025-03-28T09:41:16.100383Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:609:2603] connected; active server actors: 1 2025-03-28T09:41:16.103806Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:614:2608] connected; active server actors: 1 2025-03-28T09:41:16.106741Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:619:2613] connected; active server actors: 1 2025-03-28T09:41:16.109772Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:624:2618] connected; active server actors: 1 2025-03-28T09:41:16.113369Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:629:2623] connected; active server actors: 1 2025-03-28T09:41:16.116449Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:634:2628] connected; active server actors: 1 2025-03-28T09:41:16.120603Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:639:2633] connected; active server actors: 1 2025-03-28T09:41:16.123575Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:644:2638] connected; active server actors: 1 2025-03-28T09:41:16.126821Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:649:2643] connected; active server actors: 1 2025-03-28T09:41:16.129569Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:654:2648] connected; active server actors: 1 2025-03-28T09:41:16.132593Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:659:2653] connected; active server actors: 1 2025-03-28T09:41:16.135320Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:664:2658] connected; active server actors: 1 2025-03-28T09:41:16.137572Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:669:2663] connected; active server actors: 1 2025-03-28T09:41:16.140125Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:674:2668] connected; active server actors: 1 2025-03-28T09:41:16.142174Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:679:2673] connected; active server actors: 1 2025-03-28T09:41:16.144138Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:684:2678] connected; active server actors: 1 2025-03-28T09:41:16.146050Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:689:2683] connected; active server actors: 1 2025-03-28T09:41:16.147942Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:694:2688] connected; active server actors: 1 2025-03-28T09:41:16.149882Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:699:2693] connected; active server actors: 1 2025-03-28T09:41:16.153785Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:704:2698] connected; active server actors: 1 2025-03-28T09:41:16.157170Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:709:2703] connected; active server actors: 1 2025-03-28T09:41:16.159203Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:714:2708] connected; active server actors: 1 2025-03-28T09:41:16.161361Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:719:2713] connected; active server actors: 1 2025-03-28T09:41:16.163560Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:724:2718] connected; active server actors: 1 2025-03-28T09:41:16.165865Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:729:2723] connected; active server actors: 1 2025-03-28T09:41:16.168178Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:734:2728] connected; active server actors: 1 2025-03-28T09:41:16.170611Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:739:2733] connected; active server actors: 1 2025-03-28T09:41:16.173177Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:744:2738] connected; active server actors: 1 2025-03-28T09:41:16.175540Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:749:2743] connected; active server actors: 1 2025-03-28T09:41:16.178410Z node 134 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [134:754:2748], now have 1 active actors on pipe 2025-03-28T09:41:16.179772Z node 134 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [134:757:2751], now have 1 active actors on pipe 2025-03-28T09:41:16.180964Z node 134 :PERSQUEUE DEBUG: [PQ: 72057594037927937] server connected, pipe [134:760:2754], now have 1 active actors on pipe 2025-03-28T09:41:16.182277Z node 134 :PERSQUEUE_READ_BALANCER INFO: [72057594037927938][rt3.dc1--topic] pipe [134:763:2757] connected; active server actors: 1 |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 >> DataShardReplication::ApplyChangesToReplicatedTable [GOOD] >> DataShardReplication::ApplyChangesToCommonTable >> TQuoterServiceTest::StaticDeadlines [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::PDiskFaultyState >> DataShardFollowers::FollowerStaleRo [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction >> TestFormatHandler::ClientErrorWithEmptyFilter [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> KafkaProtocol::OffsetCommitAndFetchScenario [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithKafkaAuth >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> TestJsonParser::Simple1 >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows >> TestJsonParser::Simple1 [GOOD] >> TestJsonParser::Simple2 >> MediatorTest::TabletAckWhenDead [GOOD] |98.1%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> MediatorTest::PlanStepAckToReconnectedMediator >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatus [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] >> TestJsonParser::Simple2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrun >> TestJsonParser::Simple3 >> TestJsonParser::Simple3 [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> DataShardReplication::ApplyChangesToCommonTable [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TestJsonParser::Simple4 >> test.py::test[solomon-UnknownSetting-] [GOOD] >> TestJsonParser::Simple4 [GOOD] >> TestJsonParser::LargeStrings >> TestJsonParser::LargeStrings [GOOD] >> TestJsonParser::ManyValues >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath [GOOD] >> TestJsonParser::ManyValues [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] >> TestJsonParser::MissingFields >> TCreateAndDropViewTest::DropViewInFolder [GOOD] >> TestJsonParser::MissingFields [GOOD] >> TCreateAndDropViewTest::ContextPollution |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.1%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestJsonParser::NestedTypes >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> TestJsonParser::NestedTypes [GOOD] >> TestJsonParser::SimpleBooleans |98.1%| [TM] {asan, default-linux-x86_64, pic, release} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] |98.1%| [TM] {RESULT} ydb/tests/fq/solomon/py3test >> TestJsonParser::SimpleBooleans [GOOD] >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack >> TestJsonParser::ManyBatches ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:03.462489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:03.462590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:03.462630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:03.462667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:03.462734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:03.462759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:03.462813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:03.462870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:03.463176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:03.524138Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:03.524190Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:03.530405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:03.530534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:03.530704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:03.536440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:03.536582Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:03.537113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:03.537326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:03.538966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:03.539946Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:03.539989Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:03.540077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:03.540107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:03.540160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:03.540280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:03.546047Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:03.639126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:03.639332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:03.639498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:03.639726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:03.639786Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:03.641554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:03.641647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:03.641786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:03.641825Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:03.641853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:03.641878Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:03.643471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:03.643517Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:03.643555Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:03.644715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:03.644747Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:03.644789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:03.644836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:03.647293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:03.648484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:03.648596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:03.649301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:03.649436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:03.649509Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:03.649748Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:03.649791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:03.649914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:03.649990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:03.651901Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:03.651959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:03.652141Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:03.652183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:03.652431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:03.652477Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:03.652568Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:03.652606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:03.652643Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:03.652675Z no ... Size: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:41:25.046167Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:41:25.046483Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 316us result status StatusSuccess 2025-03-28T09:41:25.047661Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:41:25.063566Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1114:2878] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:41:25.063681Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1060:2878] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-03-28T09:41:25.063898Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1114:2878] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743154885031857 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743154885031857 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743154885031857 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:41:25.068802Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][54:1114:2878] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-28T09:41:25.068941Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][54:1060:2878] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> TestJsonParser::ManyBatches [GOOD] >> TestJsonParser::LittleBatches >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> MediatorTest::PlanStepAckToReconnectedMediator [GOOD] >> TopicSessionTests::SlowSession [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results >> TestJsonParser::LittleBatches [GOOD] >> MediatorTest::WatcherReconnect >> TestJsonParser::MissingFieldsValidation >> TopicSessionTests::TwoSessionsWithDifferentSchemes >> KafkaProtocol::CreateTopicsScenarioWithKafkaAuth [GOOD] >> KafkaProtocol::CreateTopicsScenarioWithoutKafkaAuth |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TestJsonParser::MissingFieldsValidation [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardFollowers::FollowerRebootAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction >> TestJsonParser::TypeKindsValidation >> TestJsonParser::TypeKindsValidation [GOOD] >> TestJsonParser::NumbersValidation >> TestJsonParser::NumbersValidation [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit >> TestJsonParser::StringsValidation >> TestJsonParser::StringsValidation [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] >> TestJsonParser::NestedJsonValidation >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [FAIL] >> TestJsonParser::NestedJsonValidation [GOOD] |98.1%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] [GOOD] >> TestJsonParser::BoolsValidation >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec >> KeyValueGRPCService::SimpleWriteReadOverrun [GOOD] >> KeyValueGRPCService::SimpleWriteReadRange >> TestJsonParser::BoolsValidation [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] >> MediatorTest::WatcherReconnect [GOOD] >> TestJsonParser::JsonStructureValidation >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_replication/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] Test command err: 2025-03-28T09:40:40.287717Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:40.288195Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:40.288260Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000673/r3tmp/tmpJ8zZjq/pdisk_1.dat 2025-03-28T09:40:40.822425Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:40.882623Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:40.937724Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:40:40.939994Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:40:40.940333Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:40.943118Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:40.956266Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:41.056850Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:40:41.056957Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:40:41.058202Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:40:41.201825Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:40:41.201961Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:40:41.202838Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:40:41.202979Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:40:41.203370Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:40:41.203608Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:40:41.203815Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:40:41.204172Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:40:41.212682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:41.214667Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:40:41.215021Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:40:41.275628Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:40:41.276853Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:40:41.278309Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:40:41.278614Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:41.332767Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:40:41.333527Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:41.333650Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:41.335561Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:40:41.335638Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:40:41.335694Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:40:41.337625Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:41.337809Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:41.337934Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:40:41.348695Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:41.378109Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:40:41.380032Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:41.380632Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:40:41.380702Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:41.380743Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:40:41.380785Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:41.381097Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:41.381162Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:41.382375Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:40:41.382503Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:40:41.383943Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:41.383999Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:41.384159Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:40:41.384238Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:40:41.384283Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:40:41.384325Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:40:41.384376Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:41.384513Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:41.384555Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:41.384602Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:40:41.384669Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:40:41.384710Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:40:41.384978Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:41.385383Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:40:41.385473Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:40:41.385716Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:40:41.385791Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:40:41.385834Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:40:41.385873Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:40:41.385911Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:40:41.386295Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:40:41.386344Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:40:41.386395Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:40:41.386433Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:40:41.386484Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:40:41.386515Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:40:41.386562Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:40:41.386602Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:40:41.386632Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:40:41.388634Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:40:41.388712Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 720751862 ... 2025-03-28T09:41:30.069910Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:41:30.069972Z node 8 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [8:874:2705], serverId# [8:875:2706], sessionId# [0:0:0] 2025-03-28T09:41:30.070265Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549570, Sender [8:873:2704], Recipient [8:667:2571]: NKikimrTxDataShard.TEvApplyReplicationChanges TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Source: "my-source" Changes { SourceOffset: 1 WriteTxId: 0 Key: "\001\000\004\000\000\000\001\000\000\000" Upsert { Tags: 2 Data: "\001\000\004\000\000\000\025\000\000\000" } } 2025-03-28T09:41:30.070469Z node 8 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v1000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v1500/18446744073709551615 ImmediateWriteEdge# v1000/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2025-03-28T09:41:30.070617Z node 8 :TX_DATASHARD TRACE: Lock 281474976715660 marked broken at v{min} 2025-03-28T09:41:30.083736Z node 8 :TX_DATASHARD DEBUG: Waiting for PlanStep# 1501 from mediator time cast 2025-03-28T09:41:30.084630Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 270270977, Sender [8:24:2071], Recipient [8:667:2571]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 1501} 2025-03-28T09:41:30.084693Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2025-03-28T09:41:30.084755Z node 8 :TX_DATASHARD DEBUG: Notified by mediator time cast with PlanStep# 1501 at tablet 72075186224037888 2025-03-28T09:41:30.084826Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:30.312377Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715661. Ctx: { TraceId: 01jqe21hb59vcyzwgf90why89f, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=MTAzOWUyMGItNmM3MGJhYWUtMmExZTM2ODYtMmU2NTViZDI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:41:30.314113Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [8:891:2625], Recipient [8:667:2571]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 891 RawX2: 34359740993 } TxBody: " \0018\001j7\010\001\032\'\n#\t\214\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\n\010\340\247\022\020\0020\000@\n\220\001\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 MvccSnapshot { Step: 1500 TxId: 18446744073709551615 } 2025-03-28T09:41:30.314202Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:41:30.314435Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [8:667:2571], Recipient [8:667:2571]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:41:30.314470Z node 8 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:41:30.314564Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:41:30.323268Z node 8 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715660, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-03-28T09:41:30.323533Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2025-03-28T09:41:30.323632Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-28T09:41:30.323702Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2025-03-28T09:41:30.323765Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:41:30.323821Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:41:30.323896Z node 8 :TX_DATASHARD TRACE: Activated operation [0:281474976715661] at 72075186224037888 2025-03-28T09:41:30.323959Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-28T09:41:30.323993Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:41:30.324022Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-28T09:41:30.324057Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-28T09:41:30.324132Z node 8 :TX_DATASHARD TRACE: Operation [0:281474976715661] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193440 2025-03-28T09:41:30.324272Z node 8 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715660 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-03-28T09:41:30.324390Z node 8 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:41:30.324459Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-28T09:41:30.324490Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-28T09:41:30.324515Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:41:30.324542Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit FinishPropose 2025-03-28T09:41:30.324588Z node 8 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715661 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:41:30.324688Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is DelayComplete 2025-03-28T09:41:30.324736Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:41:30.324793Z node 8 :TX_DATASHARD TRACE: Add [0:281474976715661] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:41:30.324842Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:281474976715661] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:41:30.324897Z node 8 :TX_DATASHARD TRACE: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2025-03-28T09:41:30.324922Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:41:30.324957Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:281474976715661] at 72075186224037888 has finished 2025-03-28T09:41:30.325047Z node 8 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:41:30.325105Z node 8 :TX_DATASHARD TRACE: Complete execution for [0:281474976715661] at 72075186224037888 on unit FinishPropose 2025-03-28T09:41:30.325162Z node 8 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:30.326476Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553215, Sender [8:899:2625], Recipient [8:667:2571]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-28T09:41:30.326648Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2025-03-28T09:41:30.327717Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2025-03-28T09:41:30.327865Z node 8 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:41:30.327923Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2025-03-28T09:41:30.327981Z node 8 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:41:30.328032Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:41:30.328089Z node 8 :TX_DATASHARD TRACE: Activated operation [0:4] at 72075186224037888 2025-03-28T09:41:30.328141Z node 8 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:41:30.328170Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:41:30.328197Z node 8 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2025-03-28T09:41:30.328220Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2025-03-28T09:41:30.328374Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 1500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2025-03-28T09:41:30.328653Z node 8 :TX_DATASHARD TRACE: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v1500/18446744073709551615 2025-03-28T09:41:30.328732Z node 8 :TX_DATASHARD TRACE: 72075186224037888 Complete read# {[8:899:2625], 0} after executionsCount# 1 2025-03-28T09:41:30.328805Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:899:2625], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:41:30.328909Z node 8 :TX_DATASHARD TRACE: 72075186224037888 read iterator# {[8:899:2625], 0} finished in read 2025-03-28T09:41:30.328985Z node 8 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:41:30.329014Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2025-03-28T09:41:30.329042Z node 8 :TX_DATASHARD TRACE: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:41:30.329072Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:41:30.329122Z node 8 :TX_DATASHARD TRACE: Execution status for [0:4] at 72075186224037888 is Executed 2025-03-28T09:41:30.329146Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:41:30.329176Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:4] at 72075186224037888 has finished 2025-03-28T09:41:30.329231Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2025-03-28T09:41:30.329362Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2025-03-28T09:41:30.330272Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 269553219, Sender [8:899:2625], Recipient [8:667:2571]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:41:30.330346Z node 8 :TX_DATASHARD TRACE: 72075186224037888 ReadCancel: { ReadId: 0 } 2025-03-28T09:41:30.333644Z node 8 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [8:61:2108], Recipient [8:667:2571]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715660 LockNode: 8 Status: STATUS_NOT_FOUND { items { uint32_value: 1 } items { uint32_value: 11 } } |98.1%| [TM] {RESULT} ydb/core/tx/datashard/ut_replication/unittest >> TestJsonParser::JsonStructureValidation [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] >> MediatorTest::MultipleSteps >> TestPurecalcFilter::Simple1 >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] >> TCreateAndDropViewTest::ContextPollution [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 >> DataShardFollowers::FollowerAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction >> KafkaProtocol::CreateTopicsScenarioWithoutKafkaAuth [GOOD] >> KafkaProtocol::CreatePartitionsScenario >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> MediatorTest::MultipleSteps [GOOD] >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 >> MediatorTest::WatchesBeforeFirstStep >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes >> QuoterWithKesusTest::HandlesNonExistentResource [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> TestPurecalcFilter::Simple1 [GOOD] >> TestPurecalcFilter::Simple2 >> KeyValueGRPCService::SimpleWriteReadRange [GOOD] >> KeyValueGRPCService::SimpleWriteListRange >> MediatorTest::WatchesBeforeFirstStep [GOOD] >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> KafkaProtocol::CreatePartitionsScenario [GOOD] >> KafkaProtocol::AlterConfigsScenario >> MediatorTest::RebootTargetTablets >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true [GOOD] >> Coordinator::LastEmptyStepResent >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource [GOOD] >> QuoterWithKesusTest::GetsQuota >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 >> DataShardFollowers::FollowerAfterDataCompaction [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/solomon/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] Test command err: 2025-03-28T09:41:00.438861Z node 1 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-03-28T09:41:00.443430Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-28T09:41:00.445574Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-03-28T09:41:00.448049Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-03-28T09:41:00.448077Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-28T09:41:00.455096Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Fri, 28 Mar 2025 09:41:00 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-03-28T09:41:00.462973Z node 1 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-28T09:41:10.934988Z node 2 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-03-28T09:41:10.976132Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-28T09:41:11.001139Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-03-28T09:41:11.017350Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-28T09:41:11.032530Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-28T09:41:11.047681Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-28T09:41:11.063247Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-28T09:41:11.080886Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-28T09:41:11.097458Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-28T09:41:11.105136Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2025-03-28T09:41:11.105467Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-03-28T09:41:11.105698Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-28T09:41:11.105923Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-28T09:41:11.105957Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-28T09:41:11.281275Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 28 Mar 2025 09:41:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-28T09:41:11.281757Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-28T09:41:11.281781Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-28T09:41:11.340873Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 28 Mar 2025 09:41:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-28T09:41:11.341250Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-28T09:41:11.341269Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-28T09:41:11.407322Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 28 Mar 2025 09:41:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-28T09:41:11.407773Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-28T09:41:11.407811Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-28T09:41:11.540107Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 28 Mar 2025 09:41:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-28T09:41:11.540517Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-28T09:41:11.540545Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-28T09:41:11.623201Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 28 Mar 2025 09:41:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-28T09:41:11.623461Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2025-03-28T09:41:11.623480Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2025-03-28T09:41:11.707232Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 28 Mar 2025 09:41:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-28T09:41:11.707340Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-28T09:41:11.756315Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Fri, 28 Mar 2025 09:41:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 500} 2025-03-28T09:41:11.756460Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-28T09:41:11.818253Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 28 Mar 2025 09:41:11 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-28T09:41:11.818467Z node 2 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-28T09:41:22.303696Z node 3 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-03-28T09:41:22.304265Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-28T09:41:22.304649Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2025-03-28T09:41:22.304763Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2025-03-28T09:41:22.304784Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-28T09:41:22.331535Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Fri, 28 Mar 2025 09:41:22 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 10} 2025-03-28T09:41:22.331694Z node 3 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-28T09:41:32.690616Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-03-28T09:41:32.696583Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-28T09:41:32.724552Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2025-03-28T09:41:32.736862Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2025-03-28T09:41:32.744179Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2025-03-28T09:41:32.744526Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2025-03-28T09:41:32.744829Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2025-03-28T09:41:32.744931Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2025-03-28T09:41:32.744949Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2025-03-28T09:41:32.774570Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Fri, 28 Mar 2025 09:41:32 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 400} 2025-03-28T09:41:32.774710Z node 4 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2025-03-28T09:41:32.879007Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 28 Mar 2025 09:41:32 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-28T09:41:32.879151Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2025-03-28T09:41:33.015696Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Fri, 28 Mar 2025 09:41:33 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1000} 2025-03-28T09:41:33.015815Z node 4 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-28T09:41:33.936842Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Init 2025-03-28T09:41:33.937291Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-28T09:41:33.937449Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-03-28T09:41:33.937625Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-03-28T09:41:33.937650Z node 5 :KQP_COMPUTE DEBUG: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2025-03-28T09:41:33.942000Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Fri, 28 Mar 2025 09:41:33 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-03-28T09:41:33.942144Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-28T09:41:33.942368Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2025-03-28T09:41:33.942500Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2025-03-28T09:41:33.942632Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2025-03-28T09:41:33.942657Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2025-03-28T09:41:33.945776Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Fri, 28 Mar 2025 09:41:33 GMT Server: Python/3.12 aiohttp/3.9.5 {"writtenMetricsCount": 1} 2025-03-28T09:41:33.945888Z node 5 :KQP_COMPUTE TRACE: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer |98.1%| [TS] {RESULT} ydb/library/yql/providers/solomon/actors/ut/unittest >> TestPurecalcFilter::Simple2 [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> TestPurecalcFilter::ManyValues >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack >> TopicSessionTests::RestartSessionIfQueryStopped |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |98.2%| [TM] {RESULT} ydb/tests/fq/http_api/py3test >> MediatorTest::RebootTargetTablets [GOOD] >> MediatorTest::ResendSubset >> KeyValueGRPCService::SimpleWriteListRange [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatus >> TEvaluateExprInViewTest::EvaluateExpr [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 >> KafkaProtocol::AlterConfigsScenario [GOOD] >> KafkaProtocol::LoginWithApiKey >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> Coordinator::LastEmptyStepResent [GOOD] >> CoordinatorVolatile::PlanResentOnReboots >> QuoterWithKesusTest::GetsQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuota >> TestPurecalcFilter::ManyValues [GOOD] >> TestPurecalcFilter::NullValues >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] >> DataShardStats::SharedCacheGarbage [GOOD] >> DataShardStats::CollectStatsForSeveralParts >> MediatorTest::ResendSubset [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] |98.2%| [TM] {RESULT} ydb/tests/fq/common/py3test >> MediatorTest::ResendNotSubset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[PipeResets] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:21.974843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:21.974944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:21.974995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:21.975038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:21.975084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:21.975113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:21.975182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:21.975260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:21.975571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:22.031359Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:22.031411Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:22.036191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:22.036275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:22.036375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:22.040949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:22.041079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:22.041503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:22.041641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:22.043025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:22.043908Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:22.043945Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:22.044027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:22.044053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:22.044079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:22.044169Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:22.049048Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:22.149492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:22.149662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:22.149813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:22.149993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:22.150030Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:22.151957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:22.152087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:22.152267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:22.152346Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:22.152386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:22.152415Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:22.153822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:22.153858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:22.153906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:22.155145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:22.155192Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:22.155243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:22.155285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:22.157807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:22.159094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:22.159223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:22.160009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:22.160110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:22.160143Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:22.160330Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:22.160370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:22.160497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:22.160568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:22.162283Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:22.162346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:22.162516Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:22.162561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:22.162794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:22.162826Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:22.162891Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:22.162917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:22.162951Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:22.162978Z no ... nd_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:41:53.030177Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:41:53.030423Z node 54 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 324us result status StatusSuccess 2025-03-28T09:41:53.031282Z node 54 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:41:53.042477Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1176:2959] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:41:53.042593Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1145:2959] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-28T09:41:53.042782Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1176:2959] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743154912987500 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743154912987500 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743154912987500 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:41:53.049903Z node 54 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][54:1176:2959] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-03-28T09:41:53.050038Z node 54 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][54:1145:2959] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> DataShardFollowers::FollowerDuringSysPartSwitch [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] |98.2%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 >> CoordinatorVolatile::PlanResentOnReboots [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace >> KeyValueGRPCService::SimpleGetStorageChannelStatus [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume >> TestPurecalcFilter::NullValues [GOOD] >> QuoterWithKesusTest::GetsBigQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline >> MediatorTest::ResendNotSubset [GOOD] >> TestPurecalcFilter::PartialPush >> KafkaProtocol::LoginWithApiKey [GOOD] >> KafkaProtocol::LoginWithApiKeyWithoutAt >> MediatorTest::OneCoordinatorResendTxNotLost >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] >> RowDispatcherTests::OneClientOneSession >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction [GOOD] >> TSelectFromViewTest::OneTable >> RowDispatcherTests::OneClientOneSession [GOOD] >> RowDispatcherTests::TwoClientOneSession >> RowDispatcherTests::TwoClientOneSession [GOOD] >> RowDispatcherTests::SessionError >> RowDispatcherTests::SessionError [GOOD] >> RowDispatcherTests::CoordinatorSubscribe >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] >> DataShardStats::CollectStatsForSeveralParts [GOOD] >> DataShardStats::NoData >> RowDispatcherTests::TwoClients4Sessions >> RowDispatcherTests::TwoClients4Sessions [GOOD] >> RowDispatcherTests::ReinitConsumerIfNewGeneration >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch [GOOD] >> DataShardFollowers::FollowerReadDuringSplit >> RowDispatcherTests::HandleTEvUndelivered >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> RowDispatcherTests::TwoClientTwoConnection >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> RowDispatcherTests::ProcessNoSession >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TestPurecalcFilter::PartialPush [GOOD] >> TDqPqRdReadActorTests::Backpressure [GOOD] >> TestPurecalcFilter::CompilationValidation >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/mediator/ut/unittest >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] Test command err: 2025-03-28T09:41:02.271409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:41:02.272048Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:41:02.272107Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00091f/r3tmp/tmpTPKfro/pdisk_1.dat 2025-03-28T09:41:03.173280Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-03-28T09:41:03.173922Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-03-28T09:41:03.174542Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:617:2532] connected 2025-03-28T09:41:03.181474Z node 1 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [1:600:2522] HANDLE TEvMediatorConfiguration Version# 1 2025-03-28T09:41:03.182321Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-03-28T09:41:03.182536Z node 1 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-03-28T09:41:03.183291Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:623:2537] connected 2025-03-28T09:41:03.183399Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-03-28T09:41:03.183454Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [1:621:2536] bucket# 0 ... waiting for watcher to connect (done) 2025-03-28T09:41:03.193561Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-03-28T09:41:03.193648Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-03-28T09:41:03.193707Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [1:620:2535] bucket.ActiveActor 2025-03-28T09:41:03.193795Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [1:621:2536]} 2025-03-28T09:41:03.194039Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} 2025-03-28T09:41:03.208937Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [1:627:2541] connected 2025-03-28T09:41:03.209055Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-03-28T09:41:03.209098Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [1:625:2539] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 4 Coordinator# 72057594046316545 2025-03-28T09:41:03.209510Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 0}}} marker# M1 2025-03-28T09:41:03.209572Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:620:2535] bucket.ActiveActor step# 1000 2025-03-28T09:41:03.209638Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1000} 2025-03-28T09:41:03.209850Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1000} ... waiting for blocked plan step 2025-03-28T09:41:03.238785Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-03-28T09:41:03.238888Z node 1 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-03-28T09:41:03.239006Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [1:619:2534] ExecQueue {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M0 2025-03-28T09:41:03.239124Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M1 2025-03-28T09:41:03.239395Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 marker# M2 2025-03-28T09:41:03.239437Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND Ev to# [1:620:2535] step# 1010 forTablet# 72057594047365121 txid# 1 marker# M3 2025-03-28T09:41:03.239504Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:620:2535] bucket.ActiveActor step# 1010 2025-03-28T09:41:03.239622Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [1:625:2539]}}} marker# M4 2025-03-28T09:41:03.239935Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-03-28T09:41:03.241145Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [1:649:2553] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:41:03.241285Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-03-28T09:41:03.241350Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for blocked plan step (done) ... waiting for no pending commands 2025-03-28T09:41:03.241795Z node 1 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [1:621:2536] to# [1:619:2534] ExecQueue 2025-03-28T09:41:03.241859Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [1:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [1:621:2536] bucket# 0 ... waiting for no pending commands (done) ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for watch updates 2025-03-28T09:41:03.242196Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2025-03-28T09:41:03.242252Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:625:2539] {TEvPlanStepAck TabletId# 72057594047365121 step# 1010 txid# 1} 2025-03-28T09:41:03.242355Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [1:620:2535] Mediator# 72057594047365120 SEND to# [1:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... waiting for watch updates (done) 2025-03-28T09:41:07.674593Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:302:2346], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:41:07.675016Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:41:07.675073Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00091f/r3tmp/tmp28i6xI/pdisk_1.dat 2025-03-28T09:41:08.379586Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 TTxSchema Complete 2025-03-28T09:41:08.380172Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2025-03-28T09:41:08.380675Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:617:2532] connected 2025-03-28T09:41:08.380771Z node 2 :TX_MEDIATOR NOTICE: tablet# 72057594047365120 actor# [2:600:2522] HANDLE TEvMediatorConfiguration Version# 1 2025-03-28T09:41:08.381188Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2025-03-28T09:41:08.381322Z node 2 :TX_MEDIATOR INFO: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2025-03-28T09:41:08.381780Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [2:623:2537] connected 2025-03-28T09:41:08.381854Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-03-28T09:41:08.381893Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [2:621:2536] bucket# 0 ... waiting for watcher to connect (done) 2025-03-28T09:41:08.382140Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-03-28T09:41:08.382184Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvWatch 2025-03-28T09:41:08.382222Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [2:620:2535] bucket.ActiveActor 2025-03-28T09:41:08.382284Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:620:2535] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [2:621:2536]} 2025-03-28T09:41:08.382371Z node 2 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [2:620:2535] Mediator# 72057594047365120 SEND to# [2:621:2536] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} ... waiting for no pending commands 2025-03-28T09:41:08.429406Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-03-28T09:41:08.429486Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:621:2536] bucket# 0 2025-03-28T09:41:08.429593Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-03-28T09:41:08.429626Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:621:2536] bucket# 0 2025-03-28T09:41:08.429682Z node 2 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 FORWARD Watch from# [2:621:2536] to# [2:619:2534] ExecQueue 2025-03-28T09:41:08.429714Z node 2 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [2:619:2534] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [2:621:2536] bucket# 0 ... waiti ... ket# 0 ... waiting for no pending commands (done) 2025-03-28T09:42:04.532454Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:664:2560] connected 2025-03-28T09:42:04.532541Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-03-28T09:42:04.532593Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:662:2558] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316545 2025-03-28T09:42:04.533005Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:667:2563] connected 2025-03-28T09:42:04.533112Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-03-28T09:42:04.533156Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:665:2561] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 3 Coordinator# 72057594046316546 2025-03-28T09:42:04.533489Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2025-03-28T09:42:04.533555Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2025-03-28T09:42:04.533642Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-03-28T09:42:04.533667Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-03-28T09:42:04.533784Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCommitStep to# [12:619:2534] ExecQueue {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M0 2025-03-28T09:42:04.533910Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M1 2025-03-28T09:42:04.533961Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 2 marker# M2 2025-03-28T09:42:04.534011Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 2 marker# M3 2025-03-28T09:42:04.534071Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 2 marker# M2 2025-03-28T09:42:04.534100Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 2 marker# M3 2025-03-28T09:42:04.534142Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [12:620:2535] bucket.ActiveActor step# 1010 2025-03-28T09:42:04.534239Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}} marker# M4 2025-03-28T09:42:04.534394Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:662:2558]}{TTx Moderator# 0 txid# 2 AckTo# [12:665:2561]}}} marker# M4 2025-03-28T09:42:04.534639Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2025-03-28T09:42:04.536778Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:673:2567] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:42:04.536850Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-03-28T09:42:04.536890Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-03-28T09:42:04.536929Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-03-28T09:42:04.537288Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:674:2568] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:42:04.537324Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-03-28T09:42:04.537351Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-03-28T09:42:04.537381Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-03-28T09:42:04.548428Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 server# [12:677:2571] connected 2025-03-28T09:42:04.548563Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2025-03-28T09:42:04.548622Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:675:2569] Cookie# 2 CompleteStep# 1010 LatestKnownStep# 1010 SubjectiveTime# 4 Coordinator# 72057594046316546 2025-03-28T09:42:04.548890Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2025-03-28T09:42:04.548959Z node 12 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2025-03-28T09:42:04.549049Z node 12 :TX_MEDIATOR DEBUG: tablet# 72057594047365120 SEND EvRequestLostAcks to# [12:619:2534] ExecQueue step {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} 2025-03-28T09:42:04.549160Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 HANDLE TEvRequestLostAcks {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} AckTo# [12:675:2569] 2025-03-28T09:42:04.549208Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 2 marker# M2 2025-03-28T09:42:04.549283Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365121 txid# 2 marker# M3 2025-03-28T09:42:04.549353Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 2 marker# M2 2025-03-28T09:42:04.549402Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [12:619:2534] MediatorId# 72057594047365120 SEND Ev to# [12:620:2535] step# 1010 forTablet# 72057594047365122 txid# 2 marker# M3 2025-03-28T09:42:04.549495Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:675:2569]}}} 2025-03-28T09:42:04.549574Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:675:2569]}}} 2025-03-28T09:42:04.561252Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365121 ClientId: [12:669:2565] ServerId: [12:673:2567] } 2025-03-28T09:42:04.642043Z node 12 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:42:04.673401Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:702:2584] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-03-28T09:42:04.673494Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2025-03-28T09:42:04.673536Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2025-03-28T09:42:04.673580Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-03-28T09:42:04.698330Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365122 ClientId: [12:670:2566] ServerId: [12:674:2568] } 2025-03-28T09:42:04.716505Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:738:2597] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2025-03-28T09:42:04.716625Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2025-03-28T09:42:04.716670Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2025-03-28T09:42:04.716722Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [12:620:2535] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet 2025-03-28T09:42:04.732523Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:42:04.732693Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:42:04.744750Z node 12 :HIVE WARN: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected |98.2%| [TM] {RESULT} ydb/core/tx/mediator/ut/unittest >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 >> KafkaProtocol::LoginWithApiKeyWithoutAt [GOOD] >> KafkaProtocol::MetadataScenario >> TSentinelTests::PDiskRackGuardFullRack [GOOD] >> TSentinelTests::BSControllerUnresponsive >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume [GOOD] >> KeyValueGRPCService::SimpleListPartitions [GOOD] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/keyvalue/ut/unittest >> KeyValueGRPCService::SimpleListPartitions [GOOD] Test command err: 2025-03-28T09:40:12.471803Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792909725064281:2204];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:12.476331Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000698/r3tmp/tmpg9wEQP/pdisk_1.dat 2025-03-28T09:40:13.099970Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:13.109483Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:13.110789Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:13.119463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 65052, node 1 2025-03-28T09:40:13.162255Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-03-28T09:40:13.162776Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-03-28T09:40:13.162856Z node 1 :GRPC_SERVER NOTICE: Grpc request proxy started, nodeid# 1, serve as static node 2025-03-28T09:40:13.162984Z node 1 :GRPC_SERVER DEBUG: Subscribe to /Root 2025-03-28T09:40:13.164714Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-03-28T09:40:13.164740Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-03-28T09:40:13.164756Z node 1 :GRPC_SERVER INFO: Updated app config 2025-03-28T09:40:13.164785Z node 1 :GRPC_SERVER DEBUG: Got proxy service configuration 2025-03-28T09:40:13.164804Z node 1 :GRPC_SERVER INFO: Subscribed for config changes 2025-03-28T09:40:13.164817Z node 1 :GRPC_SERVER INFO: Updated app config 2025-03-28T09:40:13.176934Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:40:13.176981Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:40:13.177000Z node 1 :GRPC_SERVER DEBUG: SchemeBoardUpdate /Root 2025-03-28T09:40:13.177007Z node 1 :GRPC_SERVER DEBUG: Can't update SecurityState for /Root - no PublicKeys 2025-03-28T09:40:13.302739Z node 1 :GRPC_SERVER DEBUG: [0x51a000029a80] created request Name# BlobStorageConfig 2025-03-28T09:40:13.303768Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a080] created request Name# HiveCreateTablet 2025-03-28T09:40:13.304192Z node 1 :GRPC_SERVER DEBUG: [0x51a00002a680] created request Name# TabletStateRequest 2025-03-28T09:40:13.304497Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ac80] created request Name# SchemeOperationStatus 2025-03-28T09:40:13.305529Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b280] created request Name# ChooseProxy 2025-03-28T09:40:13.306000Z node 1 :GRPC_SERVER DEBUG: [0x51a00002b880] created request Name# ResolveNode 2025-03-28T09:40:13.306866Z node 1 :GRPC_SERVER DEBUG: [0x51a00002be80] created request Name# FillNode 2025-03-28T09:40:13.307193Z node 1 :GRPC_SERVER DEBUG: [0x51a00002c480] created request Name# DrainNode 2025-03-28T09:40:13.307544Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ca80] created request Name# InterconnectDebug 2025-03-28T09:40:13.307891Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d080] created request Name# TestShardControl 2025-03-28T09:40:13.308837Z node 1 :GRPC_SERVER DEBUG: [0x51a00002d680] created request Name# RegisterNode 2025-03-28T09:40:13.309179Z node 1 :GRPC_SERVER DEBUG: [0x51a00002dc80] created request Name# CmsRequest 2025-03-28T09:40:13.310386Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e280] created request Name# ConsoleRequest 2025-03-28T09:40:13.310725Z node 1 :GRPC_SERVER DEBUG: [0x51a00002e880] created request Name# SchemeInitRoot 2025-03-28T09:40:13.311761Z node 1 :GRPC_SERVER DEBUG: [0x51a00002ee80] created request Name# PersQueueRequest 2025-03-28T09:40:13.312125Z node 1 :GRPC_SERVER DEBUG: [0x51a0000cfc80] created request Name# SchemeOperation 2025-03-28T09:40:13.312783Z node 1 :GRPC_SERVER DEBUG: [0x51a0000d0280] created request Name# SchemeDescribe 2025-03-28T09:40:13.413676Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:13.413701Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:13.413708Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:13.413835Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21666 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:14.048998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "hdd2-pool" Kind: "hdd2" } StoragePools { Name: "hdd-pool" Kind: "hdd" } StoragePools { Name: "hdd1-pool" Kind: "hdd1" } StoragePools { Name: "ssd-pool" Kind: "ssd" } StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T09:40:14.049977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:14.051737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T09:40:14.053805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T09:40:14.053869Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:14.056639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T09:40:14.057537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T09:40:14.057696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:14.057734Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T09:40:14.057790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2025-03-28T09:40:14.057799Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 2 -> 3 waiting... 2025-03-28T09:40:14.060017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:40:14.060034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2025-03-28T09:40:14.060055Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T09:40:14.060721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:14.060748Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T09:40:14.060759Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 3 -> 128 2025-03-28T09:40:14.062046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:14.062065Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:14.062081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:40:14.062104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2025-03-28T09:40:14.066390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:40:14.068278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2025-03-28T09:40:14.068612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2025-03-28T09:40:14.070747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 1743154814114, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T09:40:14.070850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1743154814114 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T09:40:14.070890Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:40:14.072479Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 281474976710657:0 128 -> 240 2025-03-28T09:40:14.072612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2025-03-28T09:40:14.072821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2025-03-28T09:40:14.072866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T09:40:14.075140Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T09:40:14.075188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-2 ... 8T09:42:00.742854Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [33:7486793370298200103:2384], at schemeshard: 72057594046644480, txId: 281474976715662, path id: 3 2025-03-28T09:42:00.742914Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:42:00.742970Z node 33 :FLAT_TX_SCHEMESHARD INFO: [72057594046644480] TDeleteParts opId# 281474976715662:0 ProgressState 2025-03-28T09:42:00.743045Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-03-28T09:42:00.743067Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-03-28T09:42:00.743100Z node 33 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715662:0 progress is 1/1 2025-03-28T09:42:00.743118Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-03-28T09:42:00.743148Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715662, ready parts: 1/1, is published: false 2025-03-28T09:42:00.743188Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715662 ready parts: 1/1 2025-03-28T09:42:00.743239Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715662:0 2025-03-28T09:42:00.743261Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 281474976715662:0 2025-03-28T09:42:00.743460Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 5 2025-03-28T09:42:00.743494Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715662, publications: 2, subscribers: 1 2025-03-28T09:42:00.743520Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715662, [OwnerId: 72057594046644480, LocalPathId: 2], 7 2025-03-28T09:42:00.743537Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715662, [OwnerId: 72057594046644480, LocalPathId: 3], 18446744073709551615 2025-03-28T09:42:00.744303Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-03-28T09:42:00.744410Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 3 Version: 18446744073709551615 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-03-28T09:42:00.744436Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976715662 2025-03-28T09:42:00.744467Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 3], version: 18446744073709551615 2025-03-28T09:42:00.744500Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 4 2025-03-28T09:42:00.748079Z node 33 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-03-28T09:42:00.748216Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976715662 2025-03-28T09:42:00.748242Z node 33 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715662 2025-03-28T09:42:00.748273Z node 33 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715662, pathId: [OwnerId: 72057594046644480, LocalPathId: 2], version: 7 2025-03-28T09:42:00.748303Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2025-03-28T09:42:00.748411Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715662, subscribers: 1 2025-03-28T09:42:00.748447Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [33:7486793374593168079:2345] 2025-03-28T09:42:00.752318Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:42:00.752349Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:42:00.752362Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2025-03-28T09:42:00.752503Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-03-28T09:42:00.752542Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715662 2025-03-28T09:42:00.755441Z node 33 :GRPC_SERVER DEBUG: Got grpc request# ListDirectoryRequest, traceId# 01jqe22f9kekm623p49j4gwxsz, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:44172, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2025-03-28T09:42:00.755768Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 2 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2025-03-28T09:42:00.756223Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 3 2025-03-28T09:42:00.756512Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 3 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2025-03-28T09:42:00.756718Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 2 2025-03-28T09:42:00.756823Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 1 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2025-03-28T09:42:00.756947Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 3] was 1 2025-03-28T09:42:00.757146Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2025-03-28T09:42:00.757189Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 3], at schemeshard: 72057594046644480 2025-03-28T09:42:00.757269Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2025-03-28T09:42:00.758334Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037889 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-03-28T09:42:00.759335Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037890 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-03-28T09:42:00.759743Z node 33 :KEYVALUE DEBUG: KeyValue# 72075186224037888 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2025-03-28T09:42:00.760725Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037889 not found 2025-03-28T09:42:00.760773Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037890 not found 2025-03-28T09:42:00.760803Z node 33 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 33, TabletId: 72075186224037888 not found 2025-03-28T09:42:00.764538Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:2 2025-03-28T09:42:00.764581Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2025-03-28T09:42:00.764675Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:3 2025-03-28T09:42:00.764689Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2025-03-28T09:42:00.764715Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Deleted shardIdx 72057594046644480:1 2025-03-28T09:42:00.764742Z node 33 :FLAT_TX_SCHEMESHARD DEBUG: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2025-03-28T09:42:00.764802Z node 33 :FLAT_TX_SCHEMESHARD NOTICE: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2025-03-28T09:42:00.770943Z node 33 :GRPC_SERVER DEBUG: [0x51a0000d5c80] received request Name# SchemeOperation ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.771462Z node 33 :GRPC_SERVER DEBUG: [0x51a0000a5680] received request Name# SchemeOperationStatus ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.771853Z node 33 :GRPC_SERVER DEBUG: [0x51a0000de680] received request Name# SchemeDescribe ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.772262Z node 33 :GRPC_SERVER DEBUG: [0x51a00003cc80] received request Name# ChooseProxy ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.772655Z node 33 :GRPC_SERVER DEBUG: [0x51a0000bd680] received request Name# PersQueueRequest ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.773034Z node 33 :GRPC_SERVER DEBUG: [0x51a00013e680] received request Name# SchemeInitRoot ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.773478Z node 33 :GRPC_SERVER DEBUG: [0x51a000073e80] received request Name# ResolveNode ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.773870Z node 33 :GRPC_SERVER DEBUG: [0x51a00001fe80] received request Name# FillNode ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.774290Z node 33 :GRPC_SERVER DEBUG: [0x51a0000aa480] received request Name# DrainNode ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.774805Z node 33 :GRPC_SERVER DEBUG: [0x51a000041a80] received request Name# HiveCreateTablet ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.775245Z node 33 :GRPC_SERVER DEBUG: [0x51a000133280] received request Name# TestShardControl ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.775636Z node 33 :GRPC_SERVER DEBUG: [0x51a0000b5880] received request Name# RegisterNode ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.776057Z node 33 :GRPC_SERVER DEBUG: [0x51a00013ce80] received request Name# CmsRequest ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.776429Z node 33 :GRPC_SERVER DEBUG: [0x51a000136e80] received request Name# ConsoleRequest ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.776830Z node 33 :GRPC_SERVER DEBUG: [0x51a00001b680] received request Name# InterconnectDebug ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.777179Z node 33 :GRPC_SERVER DEBUG: [0x51a0000d6880] received request Name# TabletStateRequest ok# false data# peer# current inflight# 0 2025-03-28T09:42:00.782817Z node 33 :GRPC_SERVER DEBUG: [0x51a0000dfe80] received request Name# BlobStorageConfig ok# false data# peer# current inflight# 0 |98.2%| [TM] {RESULT} ydb/services/keyvalue/ut/unittest >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] >> TDqPqRdReadActorTests::MetadataFields [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::NestedDirs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] Test command err: 2025-03-28T09:39:56.256550Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [1:30:2057] 2025-03-28T09:39:56.256757Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [1:25:2054] 2025-03-28T09:39:56.256899Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [1:25:2054] 2025-03-28T09:39:56.256950Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:26:2054] 2025-03-28T09:39:56.256973Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:26:2054] 2025-03-28T09:39:56.257002Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [3:27:2054] 2025-03-28T09:39:56.257036Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [3:27:2054] 2025-03-28T09:39:56.257145Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-03-28T09:39:56.263206Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-03-28T09:39:56.263424Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2025-03-28T09:39:56.263480Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-03-28T09:39:56.275987Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-03-28T09:39:56.276096Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-03-28T09:39:56.276214Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:31:2055] 2025-03-28T09:39:56.276248Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:31:2055] 2025-03-28T09:39:56.276284Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:31:2055] 2025-03-28T09:39:56.276325Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [2:32:2056] 2025-03-28T09:39:56.276348Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Move all Locations from old actor [2:31:2055] to new [2:32:2056] 2025-03-28T09:39:56.276368Z node 1 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [2:32:2056] 2025-03-28T09:39:56.276417Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2025-03-28T09:39:56.276463Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2025-03-28T09:39:56.276545Z node 1 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2025-03-28T09:39:56.276580Z node 1 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2025-03-28T09:39:56.374595Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [5:30:2057] 2025-03-28T09:39:56.375588Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [5:25:2054] 2025-03-28T09:39:56.375692Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [5:25:2054] 2025-03-28T09:39:56.375735Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [6:26:2054] 2025-03-28T09:39:56.375770Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [6:26:2054] 2025-03-28T09:39:56.375819Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: TEvPing received, [7:27:2054] 2025-03-28T09:39:56.375842Z node 5 :FQ_ROW_DISPATCHER TRACE: Coordinator: Send TEvPong to [7:27:2054] 2025-03-28T09:39:56.375924Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2025-03-28T09:39:56.376057Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2025-03-28T09:39:56.376195Z node 5 :FQ_ROW_DISPATCHER INFO: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2025-03-28T09:39:56.376265Z node 5 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2025-03-28T09:39:56.518885Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Successfully bootstrapped, local coordinator id [9:5:2052] 2025-03-28T09:39:56.519035Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:9:2056] Successfully bootstrapped, local coordinator id [9:6:2053] 2025-03-28T09:39:56.519133Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:10:2057] Successfully bootstrapped, local coordinator id [9:7:2054] 2025-03-28T09:39:56.519229Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-03-28T09:39:56.519263Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.519319Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.533370Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-03-28T09:39:56.533433Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.533978Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.534036Z node 9 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "local/row_dispatcher//tenant" actor 2025-03-28T09:39:56.534857Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.534898Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.567432Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.567563Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.568348Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.568381Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.576323Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.576488Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.576527Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.584478Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.584598Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.584629Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.591936Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.592107Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.592138Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.599128Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.599320Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.599343Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.605906Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.606058Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.606109Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.612869Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.613015Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.613073Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.619991Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.620146Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.620173Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.626909Z node 9 :FQ_ROW_DISPATCHER ERROR: Create coordination node "local/row_dispatcher//tenant" error: OVERLOADED {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exists but creating right now (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.627074Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.627104Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.634182Z node 9 :FQ_ROW_DISPATCHER DEBUG: Successfully created coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.634329Z node 9 :FQ_ROW_DISPATCHER DEBUG: Reply for create coordination node "local/row_dispatcher//tenant": {
: Error: Check failed: path: '/local/row_dispatcher/tenant', error: path exist, request accepts it (id: [OwnerId: 72075186232723360, LocalPathId: 3], type: EPathTypeKesus, state: EPathStateCreate) } 2025-03-28T09:39:56.634404Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Coordination node successfully created 2025-03-28T09:39:56.634435Z node 9 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [9:8:2055] Start session 2025-03-28T09:39:56.635498Z node 9 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "local/row_dispatcher//tenant" 2025-03-28T09:39:56.635540Z node 9 :FQ_ROW_DISPATCHER D ... ation node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-28T09:42:04.424382Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [38:18:2059] 2025-03-28T09:42:04.424797Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:14:2056], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-03-28T09:42:04.425362Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-03-28T09:42:04.425619Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 1 2025-03-28T09:42:04.426057Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [38:15:2057], read group connection_id1, topicPath topic part id 0,1 query id QueryId cookie 1 2025-03-28T09:42:04.426475Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:14:2056] query id QueryId 2025-03-28T09:42:04.426575Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 0 query id QueryId 2025-03-28T09:42:04.426678Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:14:2056] query id QueryId 2025-03-28T09:42:04.426786Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:14:2056] query id QueryId 2025-03-28T09:42:04.426879Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:14:2056] part id 1 query id QueryId 2025-03-28T09:42:04.427010Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:14:2056] query id QueryId 2025-03-28T09:42:04.427136Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:22:2063] to [38:15:2057] query id QueryId 2025-03-28T09:42:04.427220Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 0 query id QueryId 2025-03-28T09:42:04.427317Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:22:2063] to [38:15:2057] query id QueryId 2025-03-28T09:42:04.427413Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [38:23:2064] to [38:15:2057] query id QueryId 2025-03-28T09:42:04.427503Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [38:15:2057] part id 1 query id QueryId 2025-03-28T09:42:04.427574Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [38:23:2064] to [38:15:2057] query id QueryId 2025-03-28T09:42:04.427670Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:14:2056], reason ActorUnknown 2025-03-28T09:42:04.427734Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:14:2056] query id QueryId 2025-03-28T09:42:04.427977Z node 38 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvUndelivered, from [38:15:2057], reason ActorUnknown 2025-03-28T09:42:04.428051Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [38:15:2057] query id QueryId 2025-03-28T09:42:04.428127Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:22:2063] 2025-03-28T09:42:04.428203Z node 38 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [38:23:2064] 2025-03-28T09:42:04.579142Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [40:17:2058], tenant Tenant 2025-03-28T09:42:04.635627Z node 40 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [40:18:2059] 2025-03-28T09:42:04.635715Z node 40 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [40:19:2060] Successfully bootstrapped, local coordinator id [40:18:2059] 2025-03-28T09:42:04.635788Z node 40 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-03-28T09:42:04.639369Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-28T09:42:04.639453Z node 40 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-28T09:42:04.645260Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [40:18:2059] 2025-03-28T09:42:04.645491Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-03-28T09:42:04.645882Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-03-28T09:42:04.646336Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [40:15:2057], read group connection_id2, topicPath topic part id 0 query id QueryId cookie 1 2025-03-28T09:42:04.646555Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id2 topic topic part id 0 2025-03-28T09:42:04.646993Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:22:2063] to [40:14:2056] query id QueryId 2025-03-28T09:42:04.647122Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:14:2056] part id 0 query id QueryId 2025-03-28T09:42:04.647258Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:22:2063] to [40:14:2056] query id QueryId 2025-03-28T09:42:04.647356Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [40:23:2064] to [40:15:2057] query id QueryId 2025-03-28T09:42:04.647434Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [40:15:2057] part id 0 query id QueryId 2025-03-28T09:42:04.647603Z node 40 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [40:23:2064] to [40:15:2057] query id QueryId 2025-03-28T09:42:04.647733Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:14:2056] topic topic query id QueryId 2025-03-28T09:42:04.647804Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:14:2056] query id QueryId 2025-03-28T09:42:04.647904Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:22:2063] 2025-03-28T09:42:04.648090Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [40:15:2057] topic topic query id QueryId 2025-03-28T09:42:04.648144Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [40:15:2057] query id QueryId 2025-03-28T09:42:04.648217Z node 40 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [40:23:2064] 2025-03-28T09:42:04.821397Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [42:17:2058], tenant Tenant 2025-03-28T09:42:04.854930Z node 42 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [42:18:2059] 2025-03-28T09:42:04.855047Z node 42 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [42:19:2060] Successfully bootstrapped, local coordinator id [42:18:2059] 2025-03-28T09:42:04.855167Z node 42 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-03-28T09:42:04.855212Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-28T09:42:04.855243Z node 42 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-28T09:42:04.855628Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [42:18:2059] 2025-03-28T09:42:04.855813Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [43:16:2053], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 42 2025-03-28T09:42:04.856009Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-03-28T09:42:04.864269Z node 42 :FQ_ROW_DISPATCHER ERROR: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: UNKNOWN: Temporary failure in name resolution } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2025-03-28T09:42:04.864851Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: TEvTryConnect to node id 43 2025-03-28T09:42:04.871558Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: EvNodeConnected, node id 43 2025-03-28T09:42:04.883348Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-03-28T09:42:04.883682Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-03-28T09:42:04.883771Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-03-28T09:42:04.883995Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 41 2025-03-28T09:42:04.884052Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [42:22:2063] to [43:16:2053] query id QueryId 2025-03-28T09:42:04.884261Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Received TEvGetNextBatch from [43:16:2053] part id 0 query id QueryId 2025-03-28T09:42:04.884355Z node 42 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvMessageBatch from [42:22:2063] to [43:16:2053] query id QueryId 2025-03-28T09:42:04.884555Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvNoSession from [43:16:2053], generation 42 2025-03-28T09:42:04.884597Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [43:16:2053] query id QueryId 2025-03-28T09:42:04.884707Z node 42 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [42:22:2063] 2025-03-28T09:42:05.024711Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Successfully bootstrapped row dispatcher, id [44:17:2058], tenant Tenant 2025-03-28T09:42:05.034307Z node 44 :FQ_ROW_DISPATCHER DEBUG: Coordinator: Successfully bootstrapped coordinator, id [44:18:2059] 2025-03-28T09:42:05.034397Z node 44 :FQ_ROW_DISPATCHER DEBUG: TLeaderElection [44:19:2060] Successfully bootstrapped, local coordinator id [44:18:2059] 2025-03-28T09:42:05.034473Z node 44 :FQ_ROW_DISPATCHER DEBUG: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2025-03-28T09:42:05.034502Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-28T09:42:05.034530Z node 44 :FQ_ROW_DISPATCHER DEBUG: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2025-03-28T09:42:05.034953Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: TEvCoordinatorChangesSubscribe from [44:18:2059] 2025-03-28T09:42:05.035136Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStartSession from [44:14:2056], read group connection_id1, topicPath topic part id 0 query id QueryId cookie 1 2025-03-28T09:42:05.035321Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Create new session: read group connection_id1 topic topic part id 0 2025-03-28T09:42:05.035674Z node 44 :FQ_ROW_DISPATCHER TRACE: RowDispatcher: Forward TEvNewDataArrived from [44:22:2063] to [44:14:2056] query id QueryId 2025-03-28T09:42:05.035787Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Received TEvStopSession from [44:14:2056] topic topic query id QueryId 2025-03-28T09:42:05.035843Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: DeleteConsumer, readActorId [44:14:2056] query id QueryId 2025-03-28T09:42:05.035940Z node 44 :FQ_ROW_DISPATCHER DEBUG: RowDispatcher: Session is not used, sent TEvPoisonPill to [44:22:2063] |98.2%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep >> TestPurecalcFilter::CompilationValidation [GOOD] >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] >> TSchemeShardTest::NestedDirs [GOOD] >> TestRawParser::Simple >> test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] [GOOD] >> DataShardFollowers::FollowerReadDuringSplit [GOOD] >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 >> TDqPqReadActorTest::TestReadFromTopic >> TestRawParser::Simple [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TestRawParser::ManyValues >> TestRawParser::ManyValues [GOOD] >> TestRawParser::TypeKindsValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_followers/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] Test command err: 2025-03-28T09:41:07.424953Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:41:07.426192Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:41:07.426253Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000854/r3tmp/tmpVCktNi/pdisk_1.dat 2025-03-28T09:41:08.637444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:41:08.715554Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:41:08.769664Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2025-03-28T09:41:08.771894Z node 1 :TX_PROXY DEBUG: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2025-03-28T09:41:08.772283Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:41:08.772456Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:41:08.791165Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:41:08.890145Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Handle TEvProposeTransaction 2025-03-28T09:41:08.890242Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] TxId# 281474976715657 ProcessProposeTransaction 2025-03-28T09:41:08.891446Z node 1 :TX_PROXY DEBUG: actor# [1:59:2106] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:641:2549] 2025-03-28T09:41:09.023314Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 PartitionConfig { FollowerGroups { FollowerCount: 1 AllowLeaderPromotion: false } } } } } ExecTimeoutPeriod: 18446744073709551615 2025-03-28T09:41:09.023426Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T09:41:09.024138Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T09:41:09.024248Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T09:41:09.024591Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T09:41:09.024869Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T09:41:09.025105Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2025-03-28T09:41:09.025771Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 HANDLE EvClientConnected 2025-03-28T09:41:09.028504Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:41:09.030744Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2025-03-28T09:41:09.030859Z node 1 :TX_PROXY DEBUG: Actor# [1:641:2549] txid# 281474976715657 SEND to# [1:593:2518] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2025-03-28T09:41:09.082678Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:41:09.083919Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:41:09.084477Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:41:09.084892Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:41:09.156034Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:41:09.156941Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:41:09.157085Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:41:09.159169Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:41:09.159251Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:41:09.159311Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:41:09.160519Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:41:09.160682Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:41:09.160816Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:41:09.175376Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:41:09.241680Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:41:09.247111Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:41:09.247372Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:41:09.247423Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:41:09.247461Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:41:09.247503Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:41:09.247806Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:41:09.248615Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:41:09.251523Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:41:09.251647Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:41:09.256466Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:41:09.256536Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:41:09.256613Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:41:09.256659Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:41:09.256805Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:41:09.256843Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:41:09.256905Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:41:09.257075Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:41:09.257117Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:41:09.257164Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:41:09.257238Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:41:09.257279Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:41:09.257441Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:41:09.257846Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:41:09.257933Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:41:09.258034Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:41:09.258089Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:41:09.258146Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:41:09.258189Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:41:09.258224Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:41:09.258566Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:41:09.258612Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:41:09.258666Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:41:09.258731Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:41:09.258799Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:41:09.258847Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:41:09.258888Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:41:09.258924Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:41:09.258951Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:41:09.260854Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:41:09.260932Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025- ... ssing event TEvDataShard::TEvReadAck 2025-03-28T09:42:12.095335Z node 8 :TX_DATASHARD DEBUG: 72075186224037889 ReadAck from [8:1099:2873] on missing iterator: { ReadId: 0 SeqNo: 2 MaxRows: 999 MaxBytes: 5242880 } 2025-03-28T09:42:12.095681Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1099:2873], Recipient [8:1011:2802]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:42:12.095737Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-03-28T09:42:12.095839Z node 8 :TX_DATASHARD TRACE: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } ... reading from the right follower 2025-03-28T09:42:12.351550Z node 8 :TX_PROXY DEBUG: actor# [8:59:2106] Handle TEvExecuteKqpTransaction 2025-03-28T09:42:12.351648Z node 8 :TX_PROXY DEBUG: actor# [8:59:2106] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-03-28T09:42:12.352990Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269877761, Sender [8:1117:2888], Recipient [8:1013:2803]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:42:12.353073Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:42:12.353147Z node 8 :TX_DATASHARD DEBUG: Server connected at follower 1 tablet# 72075186224037890, clientId# [8:1116:2887], serverId# [8:1117:2888], sessionId# [0:0:0] 2025-03-28T09:42:12.353382Z node 8 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe22tc868vvc8kt73vxddba, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=8&id=NzYwYTc3MzQtOTI3Zjc0MzAtN2VlODc0NzMtZWUwM2I2ZjI=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:42:12.355557Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553215, Sender [8:1121:2889], Recipient [8:1013:2803]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2025-03-28T09:42:12.355618Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2025-03-28T09:42:12.355770Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2025-03-28T09:42:12.355857Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:42:12.355995Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-03-28T09:42:12.356101Z node 8 :TX_DATASHARD DEBUG: Updating sys metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=7, epoch=1} 2025-03-28T09:42:12.356874Z node 8 :TX_DATASHARD DEBUG: Updating tables metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=5, epoch=1} 2025-03-28T09:42:12.357277Z node 8 :TX_DATASHARD DEBUG: Updating snapshots metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=0, epoch=1} 2025-03-28T09:42:12.357385Z node 8 :TX_DATASHARD TRACE: 72075186224037890 changed HEAD read to repeatable v1500/18446744073709551615 2025-03-28T09:42:12.357468Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2025-03-28T09:42:12.357585Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-28T09:42:12.357649Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2025-03-28T09:42:12.357711Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2025-03-28T09:42:12.357771Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2025-03-28T09:42:12.357818Z node 8 :TX_DATASHARD TRACE: Activated operation [0:1] at 72075186224037890 2025-03-28T09:42:12.357871Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-28T09:42:12.357901Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2025-03-28T09:42:12.357926Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2025-03-28T09:42:12.357954Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-03-28T09:42:12.358125Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-03-28T09:42:12.358398Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Restart 2025-03-28T09:42:12.358437Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037890 2025-03-28T09:42:12.358540Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> retry Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:42:12.358663Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} touch new 0b, 65b lo load (65b in total), 0b requested for data (4194304b in total) 2025-03-28T09:42:12.358780Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 8388608b of static mem, Memory{8388608 dyn 0} 2025-03-28T09:42:12.358867Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} requests PageCollection [72075186224037888:1:28:1:12288:190:0] 65 bytes, 1 pages: [0 4] 2025-03-28T09:42:12.358969Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} postponed, 65b, pages {1 wait, 1 load}, freshly touched 1 pages 2025-03-28T09:42:12.359153Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} got result TEvResult{1 pages [72075186224037888:1:28:1:12288:190:0] ok OK}, category 1 2025-03-28T09:42:12.359343Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2025-03-28T09:42:12.359390Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2025-03-28T09:42:12.359501Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 2 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2025-03-28T09:42:12.359727Z node 8 :TX_DATASHARD TRACE: 72075186224037890 Complete read# {[8:1121:2889], 0} after executionsCount# 2 2025-03-28T09:42:12.359808Z node 8 :TX_DATASHARD TRACE: 72075186224037890 read iterator# {[8:1121:2889], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2025-03-28T09:42:12.359924Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-28T09:42:12.359956Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2025-03-28T09:42:12.359988Z node 8 :TX_DATASHARD TRACE: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2025-03-28T09:42:12.360019Z node 8 :TX_DATASHARD TRACE: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2025-03-28T09:42:12.360066Z node 8 :TX_DATASHARD TRACE: Execution status for [0:1] at 72075186224037890 is Executed 2025-03-28T09:42:12.360090Z node 8 :TX_DATASHARD TRACE: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2025-03-28T09:42:12.360120Z node 8 :TX_DATASHARD TRACE: Execution plan for [0:1] at 72075186224037890 has finished 2025-03-28T09:42:12.360173Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2025-03-28T09:42:12.360275Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 2 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:42:12.360380Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 8388608b of static, Memory{0 dyn 0} 2025-03-28T09:42:12.360441Z node 8 :TX_DATASHARD TRACE: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2025-03-28T09:42:12.360680Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553217, Sender [8:1013:2803], Recipient [8:1013:2803]: NKikimr::TEvDataShard::TEvReadContinue 2025-03-28T09:42:12.360727Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadContinue 2025-03-28T09:42:12.360844Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} queued, type NKikimr::NDataShard::TDataShard::TTxReadContinue 2025-03-28T09:42:12.360923Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} took 4194304b of static mem, Memory{4194304 dyn 0} 2025-03-28T09:42:12.361011Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue for iterator# {[8:1121:2889], 0}, firstUnprocessedQuery# 0 2025-03-28T09:42:12.361094Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadContinue: iterator# {[8:1121:2889], 0}, FirstUnprocessedQuery# 0 2025-03-28T09:42:12.361213Z node 8 :TX_DATASHARD TRACE: 72075186224037890 readContinue iterator# {[8:1121:2889], 0} sends rowCount# 0, bytes# 0, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2025-03-28T09:42:12.361287Z node 8 :TX_DATASHARD DEBUG: 72075186224037890 read iterator# {[8:1121:2889], 0} finished in ReadContinue 2025-03-28T09:42:12.361419Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2025-03-28T09:42:12.361524Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:8} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2025-03-28T09:42:12.362218Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553218, Sender [8:1121:2889], Recipient [8:1013:2803]: NKikimrTxDataShard.TEvReadAck ReadId: 0 SeqNo: 1 MaxRows: 1000 MaxBytes: 5242880 2025-03-28T09:42:12.362263Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadAck 2025-03-28T09:42:12.362369Z node 8 :TX_DATASHARD DEBUG: 72075186224037890 ReadAck from [8:1121:2889] on missing iterator: { ReadId: 0 SeqNo: 1 MaxRows: 1000 MaxBytes: 5242880 } 2025-03-28T09:42:12.363073Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, received event# 269553219, Sender [8:1121:2889], Recipient [8:1013:2803]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2025-03-28T09:42:12.363119Z node 8 :TX_DATASHARD TRACE: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2025-03-28T09:42:12.363193Z node 8 :TX_DATASHARD TRACE: 72075186224037890 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 33 } } |98.2%| [TM] {RESULT} ydb/core/tx/datashard/ut_followers/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::NestedDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T09:38:36.364859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:38:36.364942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.364979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:38:36.365002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:38:36.365062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:38:36.365090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:38:36.365150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:38:36.365214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:38:36.365520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:38:36.424988Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T09:38:36.425034Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:36.435085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:38:36.435244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:38:36.435398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:38:36.440057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:38:36.440280Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:38:36.440752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.440903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.442331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.443468Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.443518Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.443697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:38:36.443743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.443794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:38:36.443865Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.448375Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T09:38:36.536604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:38:36.536780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.536999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:38:36.537264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:38:36.537327Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.539052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.539164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:38:36.539312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.539374Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:38:36.539407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:38:36.539441Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:38:36.541023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.541078Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:38:36.541103Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:38:36.542272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.542315Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.542341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.542377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.545180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:38:36.546587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:38:36.546778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:38:36.547641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:38:36.547746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:38:36.547782Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.548053Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:38:36.548096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:38:36.548239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:38:36.548319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:38:36.549949Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:38:36.549999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:38:36.550157Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:38:36.550192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:38:36.550510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:38:36.550545Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:38:36.550621Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.550653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.550704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:38:36.550756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.550790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T09:38:36.550828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:38:36.550857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T09:38:36.550881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T09:38:36.550936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T09:38:36.550968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T09:38:36.551002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T09:38:36.552587Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.552679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T09:38:36.552708Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.279684Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.279712Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.279831Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2025-03-28T09:42:12.279891Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 132: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.279933Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 132: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.280089Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 140, at schemeshard: 72057594046678944 2025-03-28T09:42:12.280217Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 133: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.280245Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 133: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.280445Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 141, at schemeshard: 72057594046678944 2025-03-28T09:42:12.280565Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 142, at schemeshard: 72057594046678944 2025-03-28T09:42:12.280653Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 143, at schemeshard: 72057594046678944 2025-03-28T09:42:12.280706Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 134: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.280735Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 134: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.280860Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 135: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.280889Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 135: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.280989Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 144, at schemeshard: 72057594046678944 2025-03-28T09:42:12.281038Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 136: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.281063Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 136: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.281251Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 145, at schemeshard: 72057594046678944 2025-03-28T09:42:12.281326Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 137: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.281358Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 137: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.281490Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 138: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.281530Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 138: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.281663Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 146, at schemeshard: 72057594046678944 2025-03-28T09:42:12.281756Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.281798Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.281918Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 147, at schemeshard: 72057594046678944 2025-03-28T09:42:12.281996Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 140: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.282028Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 140: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.282142Z node 15 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 148, at schemeshard: 72057594046678944 2025-03-28T09:42:12.282210Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 141: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.282241Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 141: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.282380Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 142: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.282415Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 142: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.282528Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 143: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.282565Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 143: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.282764Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 144: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.282794Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 144: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.282925Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 145: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.282961Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 145: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.283097Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 146: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.283122Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 146: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.283230Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 147: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.283253Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 147: satisfy waiter [15:1075:3066] 2025-03-28T09:42:12.283416Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 148: got EvNotifyTxCompletionResult 2025-03-28T09:42:12.283447Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: tests -- TTxNotificationSubscriber for txId 148: satisfy waiter [15:1075:3066] TestWaitNotification: OK eventTxId 149 TestWaitNotification: OK eventTxId 150 TestWaitNotification: OK eventTxId 151 TestWaitNotification: OK eventTxId 152 TestWaitNotification: OK eventTxId 153 TestWaitNotification: OK eventTxId 154 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 TestWaitNotification: OK eventTxId 133 TestWaitNotification: OK eventTxId 134 TestWaitNotification: OK eventTxId 135 TestWaitNotification: OK eventTxId 136 TestWaitNotification: OK eventTxId 137 TestWaitNotification: OK eventTxId 138 TestWaitNotification: OK eventTxId 139 TestWaitNotification: OK eventTxId 140 TestWaitNotification: OK eventTxId 141 TestWaitNotification: OK eventTxId 142 TestWaitNotification: OK eventTxId 143 TestWaitNotification: OK eventTxId 144 TestWaitNotification: OK eventTxId 145 TestWaitNotification: OK eventTxId 146 TestWaitNotification: OK eventTxId 147 TestWaitNotification: OK eventTxId 148 TestModificationResults wait txId: 155 2025-03-28T09:42:12.290883Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31" OperationType: ESchemeOpMkDir MkDir { Name: "fail" } } TxId: 155 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:42:12.291584Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: TMkDir Propose, path: /MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/fail, operationId: 155:0, at schemeshard: 72057594046678944 2025-03-28T09:42:12.291917Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 155:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/fail', error: paths depth limit exceeded, limit: 32, depth: 33, delta: 0, at schemeshard: 72057594046678944 2025-03-28T09:42:12.295475Z node 15 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 155, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/fail\', error: paths depth limit exceeded, limit: 32, depth: 33, delta: 0" TxId: 155 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:42:12.295951Z node 15 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 155, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/fail', error: paths depth limit exceeded, limit: 32, depth: 33, delta: 0, operation: CREATE DIRECTORY, path: /MyRoot/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/fail TestModificationResult got TxId: 155, wait until txId: 155 2025-03-28T09:42:12.297284Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2025-03-28T09:42:12.297576Z node 15 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot" took 343us result status StatusSuccess 2025-03-28T09:42:12.298193Z node 15 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "1" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 31 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TestRawParser::TypeKindsValidation [GOOD] >> KafkaProtocol::MetadataScenario [GOOD] >> KafkaProtocol::MetadataInServerlessScenario >> TSelectFromViewTest::OneTable [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName >> QuoterWithKesusTest::FailsToGetBigQuota [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TestRawParser::TypeKindsValidation [GOOD] Test command err: 2025-03-28T09:40:44.189652Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-03-28T09:40:44.189681Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_0 == "str1"' (filter id: [0:0:0]) 2025-03-28T09:40:44.189703Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_0 == "str1"; 2025-03-28T09:40:44.189723Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-03-28T09:40:44.195286Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [1:7486793047614397478:2051] 2025-03-28T09:40:47.370244Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7486793047614397478:2051] [id 1]: Started compile request 2025-03-28T09:40:48.265984Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7486793047614397478:2051] [id 1]: Compilation completed for request 2025-03-28T09:40:48.266124Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [1:7486793047614397478:2051] 2025-03-28T09:40:48.266384Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-03-28T09:40:48.266420Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-03-28T09:40:48.266495Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-03-28T09:40:48.266505Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where col_1 == "str2"' (filter id: [1:0:0]) 2025-03-28T09:40:48.266523Z node 1 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where col_1 == "str2"; 2025-03-28T09:40:48.266544Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Send compile request with id 2 2025-03-28T09:40:48.266649Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 2 from [1:7486793047614397478:2051] 2025-03-28T09:40:48.266727Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7486793047614397478:2051] [id 2]: Started compile request 2025-03-28T09:40:48.295250Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [1:7486793047614397478:2051] [id 2]: Compilation completed for request 2025-03-28T09:40:48.295372Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 2 from [1:7486793047614397478:2051] 2025-03-28T09:40:48.295490Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 2 2025-03-28T09:40:48.295512Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [1:0:0] : Filter compilation finished 2025-03-28T09:40:48.295546Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [2:0:0] 2025-03-28T09:40:48.295618Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 3 clients, number rows: 3 2025-03-28T09:40:48.295638Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [1:0:0]) 2025-03-28T09:40:48.295643Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-03-28T09:40:48.300300Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Add 3 rows to client [2:0:0] without filtering 2025-03-28T09:40:48.300344Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 3 rows to purecalc filter (filter id: [0:0:0]) 2025-03-28T09:40:48.300351Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 3 rows 2025-03-28T09:40:48.300425Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Remove filter with id [2:0:0] 2025-03-28T09:40:48.300485Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: FilterData for 2 clients, number rows: 1 2025-03-28T09:40:48.300497Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [1:0:0]) 2025-03-28T09:40:48.300503Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-28T09:40:48.300526Z node 1 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Pass 1 rows to purecalc filter (filter id: [0:0:0]) 2025-03-28T09:40:48.300530Z node 1 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-28T09:40:48.624402Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-03-28T09:40:48.624434Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a1 = "str1"' (filter id: [0:0:0]) 2025-03-28T09:40:48.624473Z node 2 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 = "str1"; 2025-03-28T09:40:48.624509Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-03-28T09:40:48.627935Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [2:7486793066795853633:2051] 2025-03-28T09:40:51.478656Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7486793066795853633:2051] [id 1]: Started compile request 2025-03-28T09:40:51.514773Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [2:7486793066795853633:2051] [id 1]: Compilation completed for request 2025-03-28T09:40:51.514902Z node 2 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [2:7486793066795853633:2051] 2025-03-28T09:40:51.515102Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-03-28T09:40:51.515120Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-03-28T09:40:51.515140Z node 2 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-03-28T09:40:51.935732Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-03-28T09:40:51.935778Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'where a2 ... 50' (filter id: [0:0:0]) 2025-03-28T09:40:51.935819Z node 3 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-03-28T09:40:51.935857Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-03-28T09:40:51.935989Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 1 from [3:7486793080390077664:2051] 2025-03-28T09:40:54.661179Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [3:7486793080390077664:2051] [id 1]: Started compile request 2025-03-28T09:40:54.724079Z node 3 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [3:7486793080390077664:2051] [id 1]: Compilation failed for request 2025-03-28T09:40:54.724221Z node 3 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 1 from [3:7486793080390077664:2051] 2025-03-28T09:40:54.728323Z node 3 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-03-28T09:40:54.728524Z node 3 :FQ_ROW_DISPATCHER ERROR: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation error: {
: Error: Failed to compile purecalc program subissue: {
: Error: Compile issues: generated.sql:2:36: Error: mismatched input '.' expecting {'$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ATTACH, ATTRIBUTES, AUTOINCREMENT, BACKUP, BATCH, COLLECTION, BEFORE, BEGIN, BERNOULLI, BETWEEN, BITCAST, BY, CALLABLE, CASCADE, CASE, CAST, CHANGEFEED, CHECK, CLASSIFIER, COLLATE, COLUMN, COLUMNS, COMMIT, COMPACT, CONDITIONAL, CONFLICT, CONNECT, CONSTRAINT, CONSUMER, COVER, CREATE, CROSS, CUBE, CURRENT, CURRENT_DATE, CURRENT_TIME, CURRENT_TIMESTAMP, DATA, DATABASE, DECIMAL, DECLARE, DEFAULT, DEFERRABLE, DEFERRED, DEFINE, DELETE, DESC, DESCRIBE, DETACH, DICT, DIRECTORY, DISABLE, DISCARD, DISTINCT, DO, DROP, EACH, ELSE, EMPTY, EMPTY_ACTION, ENCRYPTED, END, ENUM, ERASE, ERROR, ESCAPE, EVALUATE, EXCEPT, EXCLUDE, EXCLUSION, EXCLUSIVE, EXISTS, EXPLAIN, EXPORT, EXTERNAL, FAIL, FAMILY, FILTER, FIRST, FLATTEN, FLOW, FOLLOWING, FOR, FOREIGN, FROM, FULL, FUNCTION, GLOB, GLOBAL, GRANT, GROUP, GROUPING, GROUPS, HASH, HAVING, HOP, IF, IGNORE, ILIKE, IMMEDIATE, IMPORT, IN, INCREMENT, INCREMENTAL, INDEX, INDEXED, INHERITS, INITIAL, INITIALLY, INNER, INSERT, INSTEAD, INTERSECT, INTO, IS, ISNULL, JOIN, JSON_EXISTS, JSON_QUERY, JSON_VALUE, KEY, LAST, LEFT, LEGACY, LIKE, LIMIT, LIST, LOCAL, LOGIN, MANAGE, MATCH, MATCHES, MATCH_RECOGNIZE, MEASURES, MICROSECONDS, MILLISECONDS, MODIFY, NANOSECONDS, NATURAL, NEXT, NO, NOLOGIN, NOT, NOTNULL, NULL, NULLS, OBJECT, OF, OFFSET, OMIT, ON, ONE, ONLY, OPTION, OPTIONAL, OR, ORDER, OTHERS, OUTER, OVER, OWNER, PARALLEL, PARTITION, PASSING, PASSWORD, PAST, PATTERN, PER, PERMUTE, PLAN, POOL, PRAGMA, PRECEDING, PRESORT, PRIMARY, PRIVILEGES, PROCESS, QUERY, QUEUE, RAISE, RANGE, REDUCE, REFERENCES, REGEXP, REINDEX, RELEASE, REMOVE, RENAME, REPLACE, REPLICATION, RESET, RESOURCE, RESPECT, RESTART, RESTORE, RESTRICT, RESULT, RETURN, RETURNING, REVERT, REVOKE, RIGHT, RLIKE, ROLLBACK, ROLLUP, ROW, ROWS, SAMPLE, SAVEPOINT, SCHEMA, SECONDS, SEEK, SELECT, SEMI, SET, SETS, SHOW, TSKIP, SEQUENCE, SOURCE, START, STREAM, STRUCT, SUBQUERY, SUBSET, SYMBOLS, SYMMETRIC, SYNC, SYSTEM, TABLE, TABLES, TABLESAMPLE, TABLESTORE, TAGGED, TEMP, TEMPORARY, THEN, TIES, TO, TOPIC, TRANSACTION, TRANSFER, TRIGGER, TUPLE, TYPE, UNBOUNDED, UNCONDITIONAL, UNION, UNIQUE, UNKNOWN, UNMATCHED, UPDATE, UPSERT, USE, USER, USING, VACUUM, VALUES, VARIANT, VIEW, VIRTUAL, WHEN, WHERE, WINDOW, WITH, WITHOUT, WRAPPER, XOR, STRING_VALUE, ID_PLAIN, ID_QUOTED, DIGITS} } subissue: {
: Error: Final yql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; } } 2025-03-28T09:40:58.569876Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [0:0:0] 2025-03-28T09:40:58.571827Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 2 2025-03-28T09:40:58.663384Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-28T09:40:58.663611Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 2 columns 2025-03-28T09:40:58.664607Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [0:0:0] 2025-03-28T09:40:58.664641Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create purecalc filter for predicate 'WHERE col_first = "str_first__large__"' (filter id: [0:0:0]) 2025-03-28T09:40:58.664673Z node 4 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input WHERE col_first = "str_first__large__"; 2025-03-28T09:40:58.664708Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Send compile request with id 1 2025-03-28T09:40:58.697972Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Got compile response for request with id 1 2025-03-28T09:40:58.698005Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: TFilterHandler [0:0:0] : Filter compilation finished 2025-03-28T09:40:58.698899Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Add client with id [1:0:0] 2025-03-28T09:40:58.765590Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: UpdateParser to new schema with size 3 2025-03-28T09:40:58.862148Z node 4 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-28T09:40:58.862393Z node 4 :FQ_ROW_DISPATCHER DEBUG: TTopicFormatHandler [json_each_row]: Parser was updated on new schema with 3 columns 2025-03-28T09:40:58.862415Z node 4 :FQ_ROW_DISPATCHER TRACE: TTopicFilters: Create filter with id [1:0:0] 2025-03-2 ... 36Z node 20 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "hello1", "a2": null, "event": "event1"} 2025-03-28T09:41:29.383960Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-28T09:41:29.703730Z node 21 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-28T09:41:30.274615Z node 22 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-28T09:41:30.275160Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:30.275201Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": 456, "a2": 42} 2025-03-28T09:41:30.275661Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:30.275696Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "456", "a2": -42} 2025-03-28T09:41:30.275982Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:30.276011Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "str", "a2": 99999} 2025-03-28T09:41:30.276308Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:30.276339Z node 22 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: {"a1": "456", "a2": 42, "a3": 1.11.1} 2025-03-28T09:41:30.970163Z node 23 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-28T09:41:30.970466Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:30.970518Z node 23 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": "-456"} 2025-03-28T09:41:31.722326Z node 24 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-28T09:41:31.723410Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:31.723474Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": {"key": "value"}, "a2": {"key2": "value2"}} 2025-03-28T09:41:31.724066Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:31.724110Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": {"key": "value", "nested": {"a": "b", "c":}}, "a2": "str"} 2025-03-28T09:41:31.724485Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:31.724525Z node 24 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": {"key" "value"}, "a2": "str"} 2025-03-28T09:41:32.414101Z node 25 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-28T09:41:32.414429Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:32.414476Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": true, "a2": false} 2025-03-28T09:41:32.415004Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:32.415062Z node 25 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "true", "a2": falce} 2025-03-28T09:41:33.056156Z node 26 :FQ_ROW_DISPATCHER INFO: TJsonParser: Simdjson active implementation icelake 2025-03-28T09:41:33.056444Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:33.056486Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 42, values: {"a1": Yelse} 2025-03-28T09:41:33.057046Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:33.057082Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 43, values: {"a1": "st""r"} 2025-03-28T09:41:33.057272Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:33.057305Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 44, values: {"a1": "x"} {"a1": "y"} 2025-03-28T09:41:33.057493Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Add 1 messages to parse 2025-03-28T09:41:33.057532Z node 26 :FQ_ROW_DISPATCHER TRACE: TJsonParser: Do parsing, first offset: 45, values: { 2025-03-28T09:41:33.558601Z node 27 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-03-28T09:41:33.563155Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [27:7486793257913983636:2051] 2025-03-28T09:41:39.600621Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7486793257913983636:2051] [id 0]: Started compile request 2025-03-28T09:41:39.693600Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [27:7486793257913983636:2051] [id 0]: Compilation completed for request 2025-03-28T09:41:39.693738Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [27:7486793257913983636:2051] 2025-03-28T09:41:39.697595Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-28T09:41:39.698965Z node 27 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-28T09:41:40.608105Z node 28 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-03-28T09:41:40.613823Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [28:7486793290732515965:2051] 2025-03-28T09:41:46.862827Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7486793290732515965:2051] [id 0]: Started compile request 2025-03-28T09:41:46.923676Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [28:7486793290732515965:2051] [id 0]: Compilation completed for request 2025-03-28T09:41:46.923820Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [28:7486793290732515965:2051] 2025-03-28T09:41:46.924206Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-28T09:41:46.924301Z node 28 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-28T09:41:47.556015Z node 29 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 100; 2025-03-28T09:41:47.556195Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [29:7486793318220021837:2051] 2025-03-28T09:41:52.566785Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7486793318220021837:2051] [id 0]: Started compile request 2025-03-28T09:41:52.621568Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [29:7486793318220021837:2051] [id 0]: Compilation completed for request 2025-03-28T09:41:52.621731Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [29:7486793318220021837:2051] 2025-03-28T09:41:52.621919Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-03-28T09:41:52.622054Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-03-28T09:41:52.622094Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-03-28T09:41:52.622128Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-03-28T09:41:52.622163Z node 29 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 2 rows 2025-03-28T09:41:53.286242Z node 30 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a1 is null; 2025-03-28T09:41:53.286716Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [30:7486793343334280178:2051] 2025-03-28T09:41:58.854793Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7486793343334280178:2051] [id 0]: Started compile request 2025-03-28T09:41:58.915517Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [30:7486793343334280178:2051] [id 0]: Compilation completed for request 2025-03-28T09:41:58.915652Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [30:7486793343334280178:2051] 2025-03-28T09:41:58.919069Z node 30 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-28T09:41:59.698168Z node 31 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 > 50; 2025-03-28T09:41:59.703276Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [31:7486793369385327690:2051] 2025-03-28T09:42:05.775978Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7486793369385327690:2051] [id 0]: Started compile request 2025-03-28T09:42:05.844903Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [31:7486793369385327690:2051] [id 0]: Compilation completed for request 2025-03-28T09:42:05.845068Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [31:7486793369385327690:2051] 2025-03-28T09:42:05.846948Z node 31 :FQ_ROW_DISPATCHER TRACE: TPurecalcFilter: Do filtering for 1 rows 2025-03-28T09:42:06.688614Z node 32 :FQ_ROW_DISPATCHER DEBUG: TPurecalcFilter: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT _offset FROM Input where a2 ... 50; 2025-03-28T09:42:06.688968Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Add to compile queue request with id 0 from [32:7486793400730498866:2051] 2025-03-28T09:42:12.123512Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileActor [32:7486793400730498866:2051] [id 0]: Started compile request 2025-03-28T09:42:12.135806Z node 32 :FQ_ROW_DISPATCHER ERROR: TPurecalcCompileActor [32:7486793400730498866:2051] [id 0]: Compilation failed for request 2025-03-28T09:42:12.135968Z node 32 :FQ_ROW_DISPATCHER TRACE: TPurecalcCompileService: Compile finished for request with id 0 from [32:7486793400730498866:2051] 2025-03-28T09:42:13.442506Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 1 messages to parse 2025-03-28T09:42:13.442573Z node 33 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1__large_str", "a2": 101, "event": "event1"} 2025-03-28T09:42:14.146263Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Add 3 messages to parse 2025-03-28T09:42:14.146314Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1", "a2": "101", "event": "event1"} 2025-03-28T09:42:14.146388Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 43, value: {"a1": "hello1", "a2": "101", "event": "event2"} 2025-03-28T09:42:14.146406Z node 34 :FQ_ROW_DISPATCHER TRACE: TRawParser: Do parsing, first offset: 44, value: {"a2": "101", "a1": "hello1", "event": "event3"} |98.3%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> test_drain.py::TestHive::test_drain_tablets >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |98.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] |98.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/coordinator/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] Test command err: 2025-03-28T09:40:20.605824Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:698:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:20.606228Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:20.606441Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:40:20.607268Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [2:695:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:20.607854Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:20.608082Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a1b/r3tmp/tmpi0w5cd/pdisk_1.dat 2025-03-28T09:40:21.186947Z node 1 :IMPORT WARN: Table profiles were not loaded ... waiting for the first mediator step 2025-03-28T09:40:21.483742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:21.484368Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:21.487383Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:21.487508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:21.503678Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:40:21.504501Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:21.505242Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... found first step to be 500 2025-03-28T09:40:21.777274Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 500 ... waiting for the next mediator step ... found second step to be 1000 ... read step subscribe result: [500, 1000] ... read step subscribe update: 2000 2025-03-28T09:40:22.969943Z node 1 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 2000 ... read step subscribe result: [2000, 2000] ... read step subscribe update: 2500 ... read step subscribe update: 2500 ... read step subscribe update: 3000 ... read step subscribe update: 4000 ... read step subscribe update: 5000 ... read step subscribe update: 6000 ... read step subscribe result: [2000, 6000] 2025-03-28T09:40:25.720991Z node 2 :TX_PROXY WARN: actor# [2:238:2129] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2025-03-28T09:40:25.721374Z node 1 :PIPE_SERVER ERROR: [72057594037932033] NodeDisconnected NodeId# 2 2025-03-28T09:40:25.721437Z node 1 :PIPE_SERVER ERROR: [72057594037936131] NodeDisconnected NodeId# 2 2025-03-28T09:40:25.721477Z node 1 :PIPE_SERVER ERROR: [72057594046447617] NodeDisconnected NodeId# 2 2025-03-28T09:40:25.721511Z node 1 :PIPE_SERVER ERROR: [72057594037968897] NodeDisconnected NodeId# 2 2025-03-28T09:40:25.721593Z node 1 :PIPE_SERVER ERROR: [72057594037936129] NodeDisconnected NodeId# 2 2025-03-28T09:40:25.721766Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:59} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:105:2087] ServerId# [1:1071:2628] TabletId# 72057594037932033 PipeClientId# [2:105:2087] 2025-03-28T09:40:25.722279Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeDisconnected, NodeId 2 2025-03-28T09:40:25.722378Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnecting 2025-03-28T09:40:25.723261Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnecting -> Disconnected 2025-03-28T09:40:25.729735Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:25.764277Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:40:25.765419Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... read step subscribe update: 7000 ... read step subscribe update: 8000 ... read step subscribe update: 9000 ... read step subscribe update: 10000 ... read step subscribe update: 11000 2025-03-28T09:40:37.080289Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [3:700:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:37.080787Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:37.080945Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2025-03-28T09:40:37.083570Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [4:697:2355], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:37.084172Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:37.084282Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a1b/r3tmp/tmpHw1DQb/pdisk_1.dat 2025-03-28T09:40:37.486414Z node 3 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:37.705355Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1141:2375] at coordinator 72057594046316545 with seqNo 123 and cookie 234 2025-03-28T09:40:37.796714Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:37.796865Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:37.819823Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:37.819946Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:37.836463Z node 3 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2025-03-28T09:40:37.837049Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:37.837490Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:38.499258Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1142:2376] at coordinator 72057594046316545 with seqNo 234 and cookie 345 2025-03-28T09:40:39.268690Z node 3 :TX_COORDINATOR DEBUG: Processing TEvSubscribeLastStep from [4:1141:2375] at coordinator 72057594046316545 with seqNo 124 and cookie 245 2025-03-28T09:40:39.280434Z node 3 :TX_COORDINATOR DEBUG: Ignored TEvSubscribeLastStep from [4:1141:2375] at coordinator 72057594046316545 with seqNo 123 existing seqNo 124 2025-03-28T09:40:39.940806Z node 3 :TX_COORDINATOR DEBUG: Processing TEvUnsubscribeLastStep from [4:1141:2375] at coordinator 72057594046316545 with seqNo 124 2025-03-28T09:40:47.191572Z node 5 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [5:297:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:47.191790Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:47.191947Z node 5 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a1b/r3tmp/tmpPaeANa/pdisk_1.dat 2025-03-28T09:40:47.523910Z node 5 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:47.572345Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:47.572931Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:47.584664Z node 5 :HIVE WARN: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected Rebooting coordinator to restore config 2025-03-28T09:40:47.816759Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-03-28T09:40:47.818154Z node 5 :TX_COORDINATOR INFO: Coordinator# 72057594046316545 restoring static processing params 2025-03-28T09:40:47.818591Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-03-28T09:40:47.819008Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 version# 0 TTxConfigure Complete Rebooting coordinator a second time 2025-03-28T09:40:48.129418Z node 5 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete 2025-03-28T09:40:48.131611Z node 5 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-03-28T09:40:52.964929Z node 6 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [6:688:2416], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:52.965295Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:52.965495Z node 6 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000a1b/r3tmp/tmp2JIfK4/pdisk_1.dat 2025-03-28T09:40:53.352114Z node 6 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:53.591587Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:53.591728Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:53.614739Z node 6 :HIVE WARN: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected Sending CreateDatabase request 2025-03-28T09:40:54.467772Z node 6 :CMS_TENANT ... d ... coordinator 72057594046316545 gen 2 is planning step 1000 2025-03-28T09:42:17.694497Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 50 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 50}}} marker# M1 2025-03-28T09:42:17.694625Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:563:2492] bucket.ActiveActor step# 1000 2025-03-28T09:42:17.694723Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:564:2493] bucket.ActiveActor step# 1000 2025-03-28T09:42:17.694808Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:563:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1000} 2025-03-28T09:42:17.694957Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:564:2493] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1000} ... waiting for blocked put responses 2025-03-28T09:42:17.708701Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 HANDLE EvProposeTransaction marker# C0 2025-03-28T09:42:17.708793Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 step# 1050 Status# 16 SEND to# [20:595:2519] Proxy marker# C1 ... waiting for blocked put responses ... coordinator 72057594046316545 gen 2 is planning step 1050 2025-03-28T09:42:17.722944Z node 20 :TX_COORDINATOR DEBUG: Transaction 10000000 has been planned 2025-03-28T09:42:17.723084Z node 20 :TX_COORDINATOR DEBUG: Planned transaction 10000000 for mediator 72057594046382081 tablet 72057594047365120 ... blocking put [72057594046316545:2:12:1:24576:168:0] response ... waiting for planning for the required step ... waiting for planning for the required step ... coordinator 72057594046316545 gen 2 is planning step 1100 ... starting a new coordinator instance ... waiting for migrated state 2025-03-28T09:42:17.786015Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-03-28T09:42:17.786251Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-03-28T09:42:17.806530Z node 20 :TX_COORDINATOR INFO: tablet# 72057594046316545 CreateTxInit Complete ... blocking state response from [20:559:2400] to [20:690:2563] LastSentStep: 1000 LastAcquiredStep: 0 LastConfirmedStep: 0 ... unblocking put responses and requests 2025-03-28T09:42:17.824752Z node 20 :TX_COORDINATOR INFO: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2025-03-28T09:42:17.824908Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000000 stepId# 1050 Status# 17 SEND EvProposeTransactionStatus to# [20:595:2519] Proxy 2025-03-28T09:42:17.826453Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [20:566:2495] disconnnected 2025-03-28T09:42:17.826600Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 HANDLE TEvServerDisconnected server# [20:566:2495] ... trying to plan tx 10000011 ... waiting for planned another persistent tx 2025-03-28T09:42:17.857168Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 server# [20:697:2573] connected 2025-03-28T09:42:17.857391Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorSync 2025-03-28T09:42:17.857467Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCoordinatorSyncResult to# [20:693:2571] Cookie# 1 CompleteStep# 1000 LatestKnownStep# 1000 SubjectiveTime# 952 Coordinator# 72057594046316545 2025-03-28T09:42:17.857667Z node 20 :TX_COORDINATOR NOTICE: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2025-03-28T09:42:17.857733Z node 20 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1050, txid# 10000000 marker# C2 ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000000 } Step: 1050 PrevStep: 0 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-03-28T09:42:17.862397Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1050 2025-03-28T09:42:17.862499Z node 20 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1050] transactions [1] 2025-03-28T09:42:17.862704Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [20:562:2491] ExecQueue {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M0 2025-03-28T09:42:17.862947Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1050Steps: {{TCoordinatorStep step# 1050 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M1 2025-03-28T09:42:17.863053Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 SEND Ev to# [20:563:2492] step# 1050 forTablet# 72057594047365120 txid# 10000000 marker# M3 2025-03-28T09:42:17.863149Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:563:2492] bucket.ActiveActor step# 1050 2025-03-28T09:42:17.863225Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:564:2493] bucket.ActiveActor step# 1050 2025-03-28T09:42:17.863355Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:563:2492] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1050 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000000 AckTo# [20:693:2571]}}} marker# M4 2025-03-28T09:42:17.863530Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:564:2493] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-03-28T09:42:17.863674Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 HANDLE EvProposeTransaction marker# C0 2025-03-28T09:42:17.863735Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 step# 1100 Status# 16 SEND to# [20:595:2519] Proxy marker# C1 2025-03-28T09:42:17.863892Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:563:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1050} 2025-03-28T09:42:17.864420Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:563:2492] Mediator# 72057594046382081 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365120 Status: OK ServerId: [20:701:2576] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:42:17.864543Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:563:2492] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1050 MediatorId# 72057594046382081 TabletID 72057594047365120} ... observed tablet step: Transactions { TxId: 10000000 AckTo { RawX1: 0 RawX2: 0 } } Step: 1050 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... waiting for planned another persistent tx ... coordinator 72057594046316545 gen 3 is planning step 1100 2025-03-28T09:42:17.876581Z node 20 :TX_COORDINATOR DEBUG: Transaction 10000011 has been planned 2025-03-28T09:42:17.876715Z node 20 :TX_COORDINATOR DEBUG: Planned transaction 10000011 for mediator 72057594046382081 tablet 72057594047365120 2025-03-28T09:42:17.877670Z node 20 :TX_COORDINATOR DEBUG: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1100, txid# 10000011 marker# C2 2025-03-28T09:42:17.877772Z node 20 :TX_COORDINATOR DEBUG: tablet# 72057594046316545 txid# 10000011 stepId# 1100 Status# 17 SEND EvProposeTransactionStatus to# [20:595:2519] Proxy ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000011 } Step: 1100 PrevStep: 1050 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-03-28T09:42:17.878142Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1100 2025-03-28T09:42:17.878210Z node 20 :TX_MEDIATOR INFO: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [1100] transactions [1] 2025-03-28T09:42:17.878370Z node 20 :TX_MEDIATOR DEBUG: tablet# 72057594046382081 SEND EvCommitStep to# [20:562:2491] ExecQueue {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M0 2025-03-28T09:42:17.878532Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1050 To# 1100Steps: {{TCoordinatorStep step# 1100 PrevStep# 1050Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M1 2025-03-28T09:42:17.878636Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 SEND Ev to# [20:563:2492] step# 1100 forTablet# 72057594047365120 txid# 10000011 marker# M3 2025-03-28T09:42:17.878754Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:563:2492] bucket.ActiveActor step# 1100 2025-03-28T09:42:17.878814Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:564:2493] bucket.ActiveActor step# 1100 2025-03-28T09:42:17.878970Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:563:2492] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 1100 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000011 AckTo# [20:693:2571]}}} marker# M4 2025-03-28T09:42:17.879075Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:563:2492] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 1100 MediatorId# 72057594046382081 TabletID 72057594047365120} 2025-03-28T09:42:17.879165Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:564:2493] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} 2025-03-28T09:42:17.879330Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:563:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1100} ... observed tablet step: Transactions { TxId: 10000011 AckTo { RawX1: 0 RawX2: 0 } } Step: 1100 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 1150 ... observed step: Step: 1150 PrevStep: 1100 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2025-03-28T09:42:17.890673Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 1100 To# 1150Steps: {{TCoordinatorStep step# 1150 PrevStep# 1100}}} marker# M1 2025-03-28T09:42:17.890753Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:563:2492] bucket.ActiveActor step# 1150 2025-03-28T09:42:17.890795Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: Actor# [20:562:2491] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:564:2493] bucket.ActiveActor step# 1150 2025-03-28T09:42:17.890844Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:563:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} 2025-03-28T09:42:17.890888Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: Actor# [20:564:2493] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 1150} |98.3%| [TM] {RESULT} ydb/core/tx/coordinator/ut/unittest >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDataShardRSTest::TestCleanupInRS+UseSink [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink >> TDqPqReadActorTest::TestReadFromTopic [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFromNow |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> KafkaProtocol::MetadataInServerlessScenario [GOOD] >> KafkaProtocol::NativeKafkaBalanceScenario >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> QuoterWithKesusTest::PrefetchCoefficient [GOOD] >> QuoterWithKesusTest::GetsQuotaAfterPause >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] >> TDqPqReadActorTest::ReadWithFreeSpace |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::NoData [GOOD] >> DataShardStats::Follower >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery >> TSentinelTests::BSControllerUnresponsive [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> TSelectFromViewTest::OneTableUsingRelativeName [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::ReadNonExistentTopic >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] >> TControlPlaneProxyTest::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] Test command err: 2025-03-28T09:40:55.518763Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateConfig 2025-03-28T09:40:55.518880Z node 1 :CMS DEBUG: [Sentinel] [Main] Start ConfigUpdater 2025-03-28T09:40:55.519000Z node 1 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-28T09:40:55.519031Z node 1 :CMS INFO: [Sentinel] [Main] StateUpdater was delayed 2025-03-28T09:40:55.519094Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2025-03-28T09:40:55.519193Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2025-03-28T09:40:55.520948Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2025-03-28T09:40:55.526670Z node 1 :CMS DEBUG: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDom ... kId: 286 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-286.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 287 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-287.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:42:28.270471Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 72, response# PDiskStateInfo { PDiskId: 288 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-288.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 289 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-289.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 290 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-290.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 291 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-291.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:42:28.270617Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 66, response# PDiskStateInfo { PDiskId: 264 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-264.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 265 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-265.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-266.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-267.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:42:28.270776Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 67, response# PDiskStateInfo { PDiskId: 268 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-268.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 269 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-269.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 270 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-270.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 271 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-271.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860026 2025-03-28T09:42:28.270841Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s 2025-03-28T09:42:28.271260Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 72:290, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-28T09:42:28.271314Z node 65 :CMS NOTICE: [Sentinel] [Main] PDisk status changed: pdiskId# 70:282, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2025-03-28T09:42:28.271358Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-03-28T09:42:28.283096Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 1 2025-03-28T09:42:28.283167Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-03-28T09:42:28.293672Z node 65 :CMS DEBUG: [Sentinel] [Main] UpdateState 2025-03-28T09:42:28.293743Z node 65 :CMS DEBUG: [Sentinel] [Main] Start StateUpdater 2025-03-28T09:42:28.293851Z node 65 :CMS DEBUG: [Sentinel] [Main] Retrying: attempt# 2 2025-03-28T09:42:28.293879Z node 65 :CMS DEBUG: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2025-03-28T09:42:28.294040Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 65, wbId# [65:8388350642965737326:1634689637] 2025-03-28T09:42:28.294084Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 66, wbId# [66:8388350642965737326:1634689637] 2025-03-28T09:42:28.294121Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 67, wbId# [67:8388350642965737326:1634689637] 2025-03-28T09:42:28.294153Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 68, wbId# [68:8388350642965737326:1634689637] 2025-03-28T09:42:28.294185Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 69, wbId# [69:8388350642965737326:1634689637] 2025-03-28T09:42:28.294213Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 70, wbId# [70:8388350642965737326:1634689637] 2025-03-28T09:42:28.294241Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 71, wbId# [71:8388350642965737326:1634689637] 2025-03-28T09:42:28.294282Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 72, wbId# [72:8388350642965737326:1634689637] 2025-03-28T09:42:28.294496Z node 65 :CMS DEBUG: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { }, cookie# 123 2025-03-28T09:42:28.294548Z node 65 :CMS ERROR: [Sentinel] [Main] Unsuccesful response from BSC: error# 2025-03-28T09:42:28.295045Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 65, response# PDiskStateInfo { PDiskId: 260 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-260.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 261 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-261.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 262 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-262.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 263 CreateTime: 0 ChangeTime: 0 Path: "/65/pdisk-263.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-03-28T09:42:28.295509Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 69, response# PDiskStateInfo { PDiskId: 276 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-276.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 277 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-277.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 278 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-278.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 279 CreateTime: 0 ChangeTime: 0 Path: "/69/pdisk-279.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-03-28T09:42:28.295724Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 70, response# PDiskStateInfo { PDiskId: 280 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-280.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 281 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-281.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 282 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-282.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 283 CreateTime: 0 ChangeTime: 0 Path: "/70/pdisk-283.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-03-28T09:42:28.295844Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 71, response# PDiskStateInfo { PDiskId: 284 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-284.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 285 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-285.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 286 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-286.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 287 CreateTime: 0 ChangeTime: 0 Path: "/71/pdisk-287.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-03-28T09:42:28.295983Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 72, response# PDiskStateInfo { PDiskId: 288 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-288.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 289 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-289.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 290 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-290.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 291 CreateTime: 0 ChangeTime: 0 Path: "/72/pdisk-291.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-03-28T09:42:28.296105Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 66, response# PDiskStateInfo { PDiskId: 264 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-264.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 265 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-265.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 266 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-266.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 267 CreateTime: 0 ChangeTime: 0 Path: "/66/pdisk-267.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-03-28T09:42:28.296239Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 67, response# PDiskStateInfo { PDiskId: 268 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-268.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 269 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-269.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 270 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-270.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 271 CreateTime: 0 ChangeTime: 0 Path: "/67/pdisk-271.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-03-28T09:42:28.296374Z node 65 :CMS DEBUG: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 68, response# PDiskStateInfo { PDiskId: 272 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-272.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 273 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-273.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 274 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-274.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 275 CreateTime: 0 ChangeTime: 0 Path: "/68/pdisk-275.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880026 2025-03-28T09:42:28.296450Z node 65 :CMS DEBUG: [Sentinel] [Main] State was updated in 0.000000s |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendListQueries [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery |98.3%| [TM] {RESULT} ydb/core/cms/ut_sentinel/unittest >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> TControlPlaneProxyTest::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus >> QuoterWithKesusTest::GetsQuotaAfterPause [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink >> TControlPlaneProxyTest::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery >> DataShardStats::Follower [GOOD] >> DataShardStats::Tli >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] >> TControlPlaneProxyTest::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpTpch::Query22 [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData >> TSelectFromViewTest::DisabledFeatureFlag [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] >> TControlPlaneProxyTest::ShouldSendListJobs [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeJob >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] >> QuoterWithKesusTest::GetsSeveralQuotas [GOOD] >> QuoterWithKesusTest::KesusRecreation ------- [TM] {asan, default-linux-x86_64, pic, release} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 |98.4%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest >> TControlPlaneProxyTest::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> TControlPlaneProxyTest::ShouldSendListConnections [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::Tli [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection [GOOD] >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates >> TControlPlaneProxyTest::ShouldSendModifyConnection |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteConnection |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] >> TControlPlaneProxyTest::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> QuoterWithKesusTest::KesusRecreation [GOOD] >> QuoterWithKesusTest::AllocationStatistics >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateBinding |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark >> TControlPlaneProxyTest::ShouldSendListBindings [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeBinding |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:27.565249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:27.565347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:27.565398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:27.565437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:27.565478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:27.565503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:27.565564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:27.565638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:27.565891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:27.618558Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:27.618596Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:27.624896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:27.624997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:27.625086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:27.629370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:27.629511Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:27.630055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:27.630247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:27.631756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:27.633008Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:27.633070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:27.633208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:27.633254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:27.633289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:27.633425Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:27.640555Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:27.736696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:27.736874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.737025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:27.737209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:27.737261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.739028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:27.739119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:27.739265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.739319Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:27.739357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:27.739385Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:27.740886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.740934Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:27.740973Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:27.742263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.742297Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.742337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:27.742372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:27.745070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:27.746371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:27.746518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:27.747389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:27.747492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:27.747537Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:27.747744Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:27.747781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:27.747905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:27.747981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:27.749562Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:27.749610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:27.749773Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:27.749807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:27.749983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.750016Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:27.750089Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:27.750117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:27.750148Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:27.750170Z no ... 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:42:57.471456Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:42:57.471729Z node 93 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 315us result status StatusSuccess 2025-03-28T09:42:57.472540Z node 93 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:42:57.484431Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:802:2618] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:42:57.484552Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:723:2618] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-28T09:42:57.484722Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:802:2618] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743154977431785 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743154977431785 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743154977431785 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:42:57.487662Z node 93 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409546][93:802:2618] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-28T09:42:57.487768Z node 93 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][93:723:2618] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> TControlPlaneProxyTest::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding >> DataShardStats::HasSchemaChanges_BTreeIndex [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] >> QuoterWithKesusTest::AllocationStatistics [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] Test command err: 2025-03-28T09:39:32.376793Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-03-28T09:39:32.835245Z node 2 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:33.218867Z node 3 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:33.618902Z node 4 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:33.994532Z node 5 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:34.397003Z node 6 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:34.772995Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:35.202214Z node 8 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:35.648803Z node 9 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-03-28T09:39:36.065902Z node 10 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:36.479905Z node 11 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:36.915657Z node 12 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:37.350890Z node 13 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:37.791352Z node 14 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:38.230360Z node 15 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:38.696585Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:39.142640Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:39.577110Z node 18 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:40.048733Z node 19 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:40.530197Z node 20 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:41.008126Z node 21 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:41.474094Z node 22 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:41.927373Z node 23 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:39:42.383607Z node 24 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:08.785206Z node 72 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-03-28T09:40:09.487897Z node 73 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:10.209458Z node 74 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:10.892274Z node 75 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:11.596027Z node 76 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:12.288238Z node 77 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:12.997622Z node 78 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:13.644938Z node 79 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:14.355111Z node 80 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-03-28T09:40:15.027751Z node 81 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:15.699741Z node 82 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:16.365131Z node 83 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:17.100186Z node 84 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:17.764070Z node 85 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:18.449468Z node 86 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:19.151455Z node 87 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:19.837618Z node 88 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:20.526898Z node 89 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:21.265722Z node 90 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:21.996698Z node 91 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:22.782340Z node 92 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:23.577166Z node 93 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:24.320842Z node 94 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:40:25.043864Z node 95 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:33.000517Z node 163 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateQueryRequest, validation failed: test_user_3@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2025-03-28T09:41:37.005332Z node 166 :YQ_CONTROL_PLANE_STORAGE ERROR: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:38.486547Z node 167 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:39.906468Z node 168 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:42.146355Z node 169 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:44.012622Z node 170 :YQ_CONTROL_PLANE_STORAGE ERROR: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:47.082998Z node 173 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:50.474339Z node 176 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:51.643348Z node 177 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:52.672962Z node 178 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:53.698412Z node 179 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:57.451180Z node 182 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:41:58.546930Z node 183 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:01.098989Z node 185 :YQ_CONTROL_PLANE_STORAGE ERROR: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:02.590735Z node 186 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:05.168021Z node 188 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:06.502889Z node 189 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:07.884324Z node 190 :YQ_CONTROL_PLANE_STORAGE ERROR: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:10.561545Z node 192 :YQ_CONTROL_PLANE_STORAGE ERROR: ListJobsRequest, validation failed: test_user_4@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2025-03-28T09:42:11.632007Z node 193 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:13.096502Z node 194 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:14.641883Z node 195 :YQ_CONTROL_PLANE_STORAGE ERROR: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:16.421221Z node 196 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:17.777329Z node 197 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:19.386035Z node 198 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:20.917365Z node 199 :YQ_CONTROL_PLANE_STORAGE ERROR: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:22.473620Z node 200 :YQ_CONTROL_PLANE_STORAGE ERROR: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:24.113420Z node 201 :YQ_CONTROL_PLANE_STORAGE ERROR: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:25.930785Z node 202 :YQ_CONTROL_PLANE_STORAGE ERROR: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:27.379999Z node 203 :YQ_CONTROL_PLANE_STORAGE ERROR: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2025-03-28T09:42:28.831662Z node 204 :YQ_CONTROL_PLANE_STORAGE ERROR: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 |98.5%| [TM] {RESULT} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start >> test_crud.py::TestYdbCrudOperations::test_crud_operations |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> BulkUpsert::BulkUpsert [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt >> DataShardStats::HasSchemaChanges_ByKeyFilter [GOOD] >> DataShardStats::HasSchemaChanges_Columns >> TPqWriterTest::TestWriteToTopic |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::TestWriteToTopic [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] |98.6%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] >> test_drain.py::TestHive::test_drain_on_stop >> TPqWriterTest::TestWriteToTopicMultiBatch >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] >> TPqWriterTest::TestDeferredWriteToTopic |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::WriteNonExistentTopic >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> TPqWriterTest::TestCheckpoints |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::TestCheckpoints [GOOD] >> TPqWriterTest::TestCheckpointWithEmptyBatch |98.6%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeMainWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:19.839766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:19.839857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:19.839910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:19.839952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:19.839989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:19.840008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:19.840049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:19.840105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:19.840334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:19.906469Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:19.906513Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:19.912019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:19.912128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:19.912254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:19.916715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:19.916840Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:19.917267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:19.917428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:19.918728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:19.919680Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:19.919725Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:19.919827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:19.919857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:19.919883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:19.919985Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:19.925343Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:20.004248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:20.004434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.004600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:20.004818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:20.004856Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.006599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:20.006715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:20.006841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.006879Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:20.006905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:20.006928Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:20.008308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.008346Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:20.008380Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:20.009793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.009838Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.009895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:20.009941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:20.012535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:20.013614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:20.013742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:20.014464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:20.014548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:20.014581Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:20.014795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:20.014832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:20.014947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:20.015005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:20.016358Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:20.016396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:20.016511Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:20.016541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:20.016686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.016714Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:20.016795Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:20.016816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:20.016838Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:20.016859Z no ... ct: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:43:26.854634Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:43:26.854966Z node 107 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 364us result status StatusSuccess 2025-03-28T09:43:26.855957Z node 107 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:43:26.867296Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:843:2674] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:43:26.867440Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:782:2674] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-28T09:43:26.867629Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:843:2674] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743155006817141 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743155006817141 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743155006817141 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:43:26.870741Z node 107 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409549:2][72075186233409546][107:843:2674] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-28T09:43:26.870859Z node 107 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409549:2][107:782:2674] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> DataShardStats::HasSchemaChanges_Columns [GOOD] >> DataShardStats::HasSchemaChanges_Families >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [FAIL] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2025-03-28T09:39:48.983833Z node 1 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7486792809695323672:2053], metadatafields: , partitions: 666 2025-03-28T09:39:48.984138Z node 1 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-03-28T09:39:48.984220Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7486792809695323672:2053]) 2025-03-28T09:39:48.984400Z node 1 :KQP_COMPUTE DEBUG: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [1:7486792809695323673:2054] 2025-03-28T09:39:48.984458Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [1:7486792809695323673:2054], partIds: 666 cookie 1 2025-03-28T09:39:48.984775Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [1:7486792809695323673:2054], cookie 1 2025-03-28T09:39:48.984798Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-03-28T09:39:48.984805Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Distribution is changed, remove sessions 2025-03-28T09:39:48.984846Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Create session to [1:7486792809695323675:2056], generation 1 2025-03-28T09:39:48.984911Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Send TEvStartSession to [1:7486792809695323675:2056], connection id 1 partitions offsets (666 / ), 2025-03-28T09:39:48.985310Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Received TEvStartSessionAck from [1:7486792809695323675:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-03-28T09:39:48.985937Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [1:7486792809695323675:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-28T09:39:48.991389Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [1:7486792809695323675:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-28T09:39:48.991434Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 1 2025-03-28T09:39:48.991443Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 2 2025-03-28T09:39:48.991467Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-03-28T09:39:48.991568Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. NextOffset 2 2025-03-28T09:39:48.991583Z node 1 :KQP_COMPUTE TRACE: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-03-28T09:39:48.991996Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. PassAway 2025-03-28T09:39:48.992107Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. State: used buffer size 0 ready buffer event size 0 state 3 InFlyAsyncInputData 0 Counters: GetAsyncInputData 1 CoordinatorChanged 1 CoordinatorResult 0 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 NotifyCA 1 [1:7486792809695323675:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partId 666 next offset 2 is waiting batch 0 has pending data 0 2025-03-28T09:39:48.992131Z node 1 :KQP_COMPUTE INFO: SelfId: [1:7486792809695323678:2048], TxId: query_1, task: 0. PQ source. Send StopSession to [1:7486792809695323675:2056] generation 1 2025-03-28T09:39:49.302238Z node 2 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [2:7486792814219324471:2053], metadatafields: , partitions: 666 2025-03-28T09:39:49.302520Z node 2 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-03-28T09:39:49.302546Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([2:7486792814219324471:2053]) 2025-03-28T09:39:49.302799Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [2:7486792814219324472:2054] 2025-03-28T09:39:49.302831Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [2:7486792814219324472:2054], partIds: 666 cookie 1 2025-03-28T09:39:49.303022Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [2:7486792814219324472:2054], cookie 1 2025-03-28T09:39:49.303049Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-03-28T09:39:49.303057Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Distribution is changed, remove sessions 2025-03-28T09:39:49.303086Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Create session to [2:7486792814219324474:2056], generation 1 2025-03-28T09:39:49.303115Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Send TEvStartSession to [2:7486792814219324474:2056], connection id 1 partitions offsets (666 / ), 2025-03-28T09:39:49.303278Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Received TEvStartSessionAck from [2:7486792814219324474:2056], seqNo 0, ConfirmedSeqNo 0, generation 1 2025-03-28T09:39:49.303337Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [2:7486792814219324474:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-28T09:39:49.304146Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [2:7486792814219324474:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-28T09:39:49.304229Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 1 2025-03-28T09:39:49.304239Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 2 2025-03-28T09:39:49.304266Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-03-28T09:39:49.304345Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. NextOffset 2 2025-03-28T09:39:49.304357Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Return 2 rows, buffer size 0, free space 948, result size 52 2025-03-28T09:39:49.304518Z node 2 :KQP_COMPUTE DEBUG: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Received TEvUndelivered, TSystem::Undelivered from [2:7486792814219324474:2056], reason Disconnected, cookie 999 2025-03-28T09:39:49.304565Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Received TEvNewDataArrived from [2:7486792814219324474:2056], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-28T09:39:49.305051Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Received TEvRetry, EventQueueId 1 2025-03-28T09:39:49.305085Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Received TEvMessageBatch from [2:7486792814219324474:2056], seqNo 0, ConfirmedSeqNo 0 generation 1 2025-03-28T09:39:49.305094Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. TEvMessageBatch NextOffset 3 2025-03-28T09:39:49.305111Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 1000 2025-03-28T09:39:49.305147Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. NextOffset 3 2025-03-28T09:39:49.305153Z node 2 :KQP_COMPUTE TRACE: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Return 1 rows, buffer size 0, free space 974, result size 26 2025-03-28T09:39:49.305601Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. PassAway 2025-03-28T09:39:49.305707Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. State: used buffer size 0 ready buffer event size 0 state 3 InFlyAsyncInputData 0 Counters: GetAsyncInputData 1 CoordinatorChanged 1 CoordinatorResult 0 MessageBatch 2 StartSessionAck 1 NewDataArrived 2 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 1 Retry 1 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 NotifyCA 2 [2:7486792814219324474:2056] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partId 666 next offset 3 is waiting batch 0 has pending data 0 2025-03-28T09:39:49.305728Z node 2 :KQP_COMPUTE INFO: SelfId: [2:7486792814219324477:2048], TxId: query_1, task: 0. PQ source. Send StopSession to [2:7486792814219324474:2056] generation 1 2025-03-28T09:39:49.686268Z node 3 :KQP_COMPUTE INFO: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [3:7486792812109342904:2053], metadatafields: , partitions: 666 2025-03-28T09:39:49.686490Z node 3 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2025-03-28T09:39:49.686527Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7486792812109342910:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([3:7486792812109342904:2053]) 2025-03-28T09:39:49.686772Z node 3 :KQP_COMPUTE DEBUG: SelfId: [3:7486792812109342910:2048], TxId: query_1, task: 0. PQ source. TEvCoordinatorChanged, new coordinator [3:7486792812109342905:2054] 2025-03-28T09:39:49.686863Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7486792812109342910:2048], TxId: query_1, task: 0. PQ source. Send TEvCoordinatorRequest to coordinator [3:7486792812109342905:2054], partIds: 666 cookie 1 2025-03-28T09:39:49.687197Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7486792812109342910:2048], TxId: query_1, task: 0. PQ source. Received TEvCoordinatorResult from [3:7486792812109342905:2054], cookie 1 2025-03-28T09:39:49.687223Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7486792812109342910:2048], TxId: query_1, task: 0. PQ source. UpdateSessions, Sessions size 0 2025-03-28T09:39:49.687237Z node 3 :KQP_COMPUTE INFO: SelfId: [3:7486792812109342910:2048], TxId: query_1, task: 0. PQ source. Distribution is chan ... e: 0 2025-03-28T09:43:24.807154Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [98e81fa2-c6c45595-dc848392-2ab9c429] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743155004807 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:43:24.807299Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [98e81fa2-c6c45595-dc848392-2ab9c429] Write session established. Init response: last_seq_no: 5 session_id: "98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0" 2025-03-28T09:43:24.807361Z :TRACE: [local] TRACE_EVENT InitResponse partition_id=0 session_id=98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0 2025-03-28T09:43:24.807424Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] MessageGroupId [98e81fa2-c6c45595-dc848392-2ab9c429] Write session: set DirectWriteToPartitionId 0 2025-03-28T09:43:24.807545Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [0] Get partition location async, partition 0, delay 0.000000s 2025-03-28T09:43:24.807606Z :TRACE: [local] TRACE_EVENT DescribePartitionRequest path=local/Checkpoints partition_id=0 2025-03-28T09:43:24.807761Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] Successfully connected. Initializing session 2025-03-28T09:43:24.810465Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [0] Getting partition location, partition 0 2025-03-28T09:43:24.813042Z :INFO: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] Server session id: test_client_1_22_1464072323402762909_v1 2025-03-28T09:43:24.813115Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2025-03-28T09:43:24.813440Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-03-28T09:43:24.818712Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [0] Got PartitionLocation response. Status SUCCESS, proto: partition { active: true partition_location { node_id: 1 generation: 1 } } 2025-03-28T09:43:24.818783Z :TRACE: [local] TRACE_EVENT DescribePartitionResponse partition_id=0 active=1 pl_node_id=1 pl_generation=1 2025-03-28T09:43:24.818848Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [0] GetPreferredEndpoint: partitionId 0, partitionNodeId 1 exists in the endpoint pool. 2025-03-28T09:43:24.818896Z :TRACE: [local] TRACE_EVENT PreferredPartitionLocation Endpoint= NodeId=1 Generation=1 2025-03-28T09:43:24.818950Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [1] Start write session. Will connect to nodeId: 1 2025-03-28T09:43:24.822213Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2025-03-28T09:43:24.825466Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [1] Write session: direct write to partition: 0, generation 1 2025-03-28T09:43:24.825646Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [1] Write session: send init request: init_request { path: "Checkpoints" producer_id: "98e81fa2-c6c45595-dc848392-2ab9c429" partition_with_generation { generation: 1 } } 2025-03-28T09:43:24.825717Z :TRACE: [local] TRACE_EVENT InitRequest pwg_partition_id=0 pwg_generation=1 2025-03-28T09:43:24.826135Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [1] Write session: OnWriteDone gRpcStatusCode: 0 2025-03-28T09:43:24.826662Z :INFO: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2025-03-28T09:43:24.832781Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] Got ReadResponse, serverBytesSize = 940, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427860 2025-03-28T09:43:24.832991Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427860 2025-03-28T09:43:24.833367Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2025-03-28T09:43:24.833445Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] Returning serverBytesSize = 940 to budget 2025-03-28T09:43:24.833512Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] In ContinueReadingDataImpl, ReadSizeBudget = 940, ReadSizeServerDelta = 52427860 2025-03-28T09:43:24.833773Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2025-03-28T09:43:24.833864Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2025-03-28T09:43:24.833910Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2025-03-28T09:43:24.833958Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 1} (2-2) 2025-03-28T09:43:24.833985Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (3-3) 2025-03-28T09:43:24.834040Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (4-4) 2025-03-28T09:43:24.834374Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2025-03-28T09:43:24.834431Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] Returning serverBytesSize = 0 to budget 2025-03-28T09:43:24.835351Z :INFO: [local] [local] [eb424861-641bd751-826a865a-127922d0] Closing read session. Close timeout: 0.000000s 2025-03-28T09:43:24.835438Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-03-28T09:43:24.835511Z :INFO: [local] [local] [eb424861-641bd751-826a865a-127922d0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 35 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:43:24.835704Z :NOTICE: [local] [local] [eb424861-641bd751-826a865a-127922d0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2025-03-28T09:43:24.835778Z :DEBUG: [local] [local] [eb424861-641bd751-826a865a-127922d0] [] Abort session to cluster 2025-03-28T09:43:24.837314Z :INFO: [local] [local] [eb424861-641bd751-826a865a-127922d0] Closing read session. Close timeout: 0.000000s 2025-03-28T09:43:24.837426Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2025-03-28T09:43:24.837494Z :INFO: [local] [local] [eb424861-641bd751-826a865a-127922d0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 37 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:43:24.837658Z :NOTICE: [local] [local] [eb424861-641bd751-826a865a-127922d0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2025-03-28T09:43:24.840982Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2025-03-28T09:43:24.841027Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [1] Write session will now close 2025-03-28T09:43:24.841075Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [1] Write session: aborting 2025-03-28T09:43:24.841618Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2025-03-28T09:43:24.841693Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [98e81fa2-c6c45595-dc848392-2ab9c429|f0dab3d8-27a70655-30ded86-1b17f947_0] PartitionId [0] Generation [1] Write session: destroy 2025-03-28T09:43:26.071175Z node 40 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2025-03-28T09:43:26.194603Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [79536acf-fd7e11b5-dd3b9c3a-47e2b251] Write session: try to update token 2025-03-28T09:43:26.195406Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [79536acf-fd7e11b5-dd3b9c3a-47e2b251] Start write session. Will connect to nodeId: 0 2025-03-28T09:43:26.319548Z node 40 :KQP_COMPUTE DEBUG: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2025-03-28T09:43:26.323293Z node 40 :KQP_COMPUTE TRACE: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "79536acf-fd7e11b5-dd3b9c3a-47e2b251" } 2025-03-28T09:43:26.339028Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [79536acf-fd7e11b5-dd3b9c3a-47e2b251] Write session: close. Timeout 0.000000s 2025-03-28T09:43:26.339090Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [79536acf-fd7e11b5-dd3b9c3a-47e2b251] Write session will now close 2025-03-28T09:43:26.339151Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [79536acf-fd7e11b5-dd3b9c3a-47e2b251] Write session: aborting 2025-03-28T09:43:26.339412Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [79536acf-fd7e11b5-dd3b9c3a-47e2b251] Write session: gracefully shut down, all writes complete 2025-03-28T09:43:26.339531Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [79536acf-fd7e11b5-dd3b9c3a-47e2b251] Write session: destroy |98.7%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest >> TSelectFromViewTest::ReadTestCasesFromFiles [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/sql/py3test >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] |98.7%| [TM] {RESULT} ydb/tests/sql/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2025-03-28T09:37:07.067409Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T09:37:07.134669Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T09:37:07.149308Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T09:37:07.149583Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T09:37:07.155871Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:37:07.156022Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:37:07.156180Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:37:07.156265Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:37:07.156332Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:37:07.156395Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:37:07.156457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:37:07.156557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:37:07.156656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:37:07.156734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:37:07.156800Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:37:07.156858Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:37:07.174998Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T09:37:07.175293Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T09:37:07.175343Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T09:37:07.175486Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:07.175647Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:37:07.175713Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T09:37:07.175745Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T09:37:07.175821Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T09:37:07.175870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T09:37:07.175921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T09:37:07.175954Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T09:37:07.176074Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T09:37:07.176120Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T09:37:07.176148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T09:37:07.176172Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T09:37:07.176231Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T09:37:07.176266Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T09:37:07.176313Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T09:37:07.176340Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T09:37:07.176410Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T09:37:07.176454Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T09:37:07.176470Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T09:37:07.176501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T09:37:07.176537Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T09:37:07.176561Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T09:37:07.176883Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=31; 2025-03-28T09:37:07.176963Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=33; 2025-03-28T09:37:07.177022Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=26; 2025-03-28T09:37:07.177079Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=24; 2025-03-28T09:37:07.177198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T09:37:07.177236Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T09:37:07.177257Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T09:37:07.177390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T09:37:07.177419Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T09:37:07.177446Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T09:37:07.177559Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T09:37:07.177588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T09:37:07.177610Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T09:37:07.177785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T09:37:07.177835Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T09:37:07.177859Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T09:37:07.177939Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T09:37:07.177974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T09:37:07.178013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... Object;id=[9437184:2:99:255:108:2776:0]; 2025-03-28T09:43:30.441665Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:109:2792:0]; 2025-03-28T09:43:30.441747Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:110:2776:0]; 2025-03-28T09:43:30.441815Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:111:2784:0]; 2025-03-28T09:43:30.441880Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:112:2760:0]; 2025-03-28T09:43:30.441942Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:113:2776:0]; 2025-03-28T09:43:30.442003Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:114:9568:0]; 2025-03-28T09:43:30.442060Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:115:2848:0]; 2025-03-28T09:43:30.442125Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:116:2840:0]; 2025-03-28T09:43:30.442193Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:117:2856:0]; 2025-03-28T09:43:30.442260Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:118:2856:0]; 2025-03-28T09:43:30.442344Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:119:2848:0]; 2025-03-28T09:43:30.442413Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:120:2840:0]; 2025-03-28T09:43:30.442480Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:121:2856:0]; 2025-03-28T09:43:30.442595Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:122:2848:0]; 2025-03-28T09:43:30.442666Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:123:2848:0]; 2025-03-28T09:43:30.442761Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:124:2856:0]; 2025-03-28T09:43:30.442832Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:125:2856:0]; 2025-03-28T09:43:30.442902Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:126:2848:0]; 2025-03-28T09:43:30.443003Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:127:2856:0]; 2025-03-28T09:43:30.443076Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:128:2832:0]; 2025-03-28T09:43:30.443143Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:129:2840:0]; 2025-03-28T09:43:30.443206Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:130:2848:0]; 2025-03-28T09:43:30.443272Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:131:2840:0]; 2025-03-28T09:43:30.443358Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:132:2840:0]; 2025-03-28T09:43:30.443426Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:133:2848:0]; 2025-03-28T09:43:30.443488Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:134:2848:0]; 2025-03-28T09:43:30.443553Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:135:2832:0]; 2025-03-28T09:43:30.443640Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:136:2848:0]; 2025-03-28T09:43:30.443708Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:137:2848:0]; 2025-03-28T09:43:30.443776Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:138:2840:0]; 2025-03-28T09:43:30.443839Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:139:2832:0]; 2025-03-28T09:43:30.443905Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:140:2840:0]; 2025-03-28T09:43:30.443996Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:141:2848:0]; 2025-03-28T09:43:30.444086Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:142:2848:0]; 2025-03-28T09:43:30.444153Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:143:2776:0]; 2025-03-28T09:43:30.444231Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:144:2792:0]; 2025-03-28T09:43:30.444303Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:145:2784:0]; 2025-03-28T09:43:30.444366Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:146:2784:0]; 2025-03-28T09:43:30.444429Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:147:2776:0]; 2025-03-28T09:43:30.444493Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:148:2792:0]; 2025-03-28T09:43:30.444553Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:149:2776:0]; 2025-03-28T09:43:30.444612Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:150:2768:0]; 2025-03-28T09:43:30.444702Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:151:2776:0]; 2025-03-28T09:43:30.444769Z node 1 :S3_WRAPPER DEBUG: fline=fake_storage.cpp:106;method=PutObject;id=[9437184:2:99:255:152:9576:0]; 2025-03-28T09:43:30.451097Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NOlap::NResourceBroker::NSubscribe::TEvStartTask;fline=actor.cpp:38;event=ask_resources;task=cpu=0;mem=2799958;external_task_id=1b5b6856-bb911f0-b5a3e6a0-f7a89464;type=CS::INDEXATION;priority=0;; 2025-03-28T09:43:30.452936Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=100;task=cpu=0;mem=3571882;external_task_id=1b5b1df6-bb911f0-8a8b64de-4b312fd3;type=CS::INDEXATION;priority=0;; 2025-03-28T09:43:30.453000Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=1b5b1df6-bb911f0-8a8b64de-4b312fd3;mem=3571882;cpu=0; 2025-03-28T09:43:30.453051Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=1b5b1df6-bb911f0-8a8b64de-4b312fd3;task_id=100;mem=3571882;cpu=0; 2025-03-28T09:43:30.454348Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=actor.cpp:29;event=result_resources;task_id=101;task=cpu=0;mem=2799958;external_task_id=1b5b6856-bb911f0-b5a3e6a0-f7a89464;type=CS::INDEXATION;priority=0;; 2025-03-28T09:43:30.454401Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:9;event=resource_allocated;external_task_id=1b5b6856-bb911f0-b5a3e6a0-f7a89464;mem=2799958;cpu=0; 2025-03-28T09:43:30.454444Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;parent=[1:139:2171];ev_type=NKikimr::NResourceBroker::TEvResourceBroker::TEvResourceAllocated;fline=task.cpp:40;event=allocate_resources;external_task_id=1b5b6856-bb911f0-b5a3e6a0-f7a89464;task_id=101;mem=2799958;cpu=0; 2025-03-28T09:43:30.456388Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=1b5b1df6-bb911f0-8a8b64de-4b312fd3;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=1b5b1df6-bb911f0-8a8b64de-4b312fd3; 2025-03-28T09:43:32.539834Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=1b5b1df6-bb911f0-8a8b64de-4b312fd3;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-28T09:43:32.541315Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=1b5b6856-bb911f0-b5a3e6a0-f7a89464;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=1b5b6856-bb911f0-b5a3e6a0-f7a89464; 2025-03-28T09:43:34.308363Z node 1 :TX_COLUMNSHARD DEBUG: external_task_id=1b5b6856-bb911f0-b5a3e6a0-f7a89464;fline=actor.cpp:48;task=agents_waiting=0;additional_info=();; 2025-03-28T09:43:34.311443Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2025-03-28T09:43:34.326369Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:139:2171];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:50;event=TEvWriteIndex;count=2; 2025-03-28T09:43:34.994010Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 9437184 2025-03-28T09:43:34.994919Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[306] (CS::GENERAL) apply at tablet 9437184 2025-03-28T09:43:35.077515Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:125 Blob count: 422 2025-03-28T09:43:35.082327Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3445356;raw_bytes=5239242;count=3;records=53332} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=5183316;raw_bytes=7864534;count=3;records=80000} inactive {blob_bytes=215061504;raw_bytes=326598142;count=144;records=3322060} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=write_controller.h:65;event=IWriteController aborted;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=compacted_blob_constructor.cpp:47;event=TCompactedWriteController::DoAbort;reason=TTxWriteDraft aborted before complete; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TCompactedWriteController destructed with WriteIndexEv and WriteIndexEv->IndexChanges;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:105;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:150 :Tier '/cold' stopped at tablet 9437184 >> KqpLimits::CancelAfterRwTx-useSink [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] Test command err: 2025-03-28T09:41:03.148624Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-28T09:41:03.148750Z node 1 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-28T09:41:03.154584Z node 1 :QUOTER_PROXY WARN: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2025-03-28T09:41:03.154653Z node 1 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-03-28T09:41:03.193748Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-28T09:41:03.193865Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-28T09:41:03.194150Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-28T09:41:03.194226Z node 2 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-03-28T09:41:03.194253Z node 2 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-03-28T09:41:03.194524Z node 2 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-28T09:41:03.213819Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-28T09:41:03.213937Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-28T09:41:03.214217Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "/resource" 2025-03-28T09:41:03.214269Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "/resource" has incorrect name. Maybe this was some error on client side. 2025-03-28T09:41:03.214305Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("/resource", Error: GenericError) 2025-03-28T09:41:03.214413Z node 3 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-28T09:41:03.214507Z node 3 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "resource//resource" 2025-03-28T09:41:03.214543Z node 3 :QUOTER_PROXY WARN: [/Path/KesusName]: Resource "resource//resource" has incorrect name. Maybe this was some error on client side. 2025-03-28T09:41:03.214592Z node 3 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("resource//resource", Error: GenericError) 2025-03-28T09:41:03.237209Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-28T09:41:03.237323Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-28T09:41:03.237438Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-03-28T09:41:03.238782Z node 4 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-28T09:41:03.298854Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-03-28T09:41:03.298944Z node 4 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-03-28T09:41:03.298993Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-03-28T09:41:03.299089Z node 4 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-03-28T09:41:03.323441Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-28T09:41:03.327546Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-28T09:41:03.327810Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res0" 2025-03-28T09:41:03.328178Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-28T09:41:03.328529Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-03-28T09:41:03.328594Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-03-28T09:41:03.328664Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res0", 42) 2025-03-28T09:41:03.328740Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }]) 2025-03-28T09:41:03.328878Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res1" 2025-03-28T09:41:03.328976Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res1" 2025-03-28T09:41:03.329225Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-03-28T09:41:03.329267Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-03-28T09:41:03.329320Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res1", 43) 2025-03-28T09:41:03.329372Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-03-28T09:41:03.329577Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res2" 2025-03-28T09:41:03.329677Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Subscribe on resource "res2" 2025-03-28T09:41:03.329901Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-03-28T09:41:03.329948Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-03-28T09:41:03.329998Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res2", 44) 2025-03-28T09:41:03.330063Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res2", Normal, {0: Front(1, 2)} }]) 2025-03-28T09:41:03.330294Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res1", Consumed: 0, Queue: 5}]) 2025-03-28T09:41:03.330338Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res1": { Available: 1, QueueWeight: 5 } 2025-03-28T09:41:03.330381Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res1". Connected: 1 2025-03-28T09:41:03.339449Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 43 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 21474838532 } }) 2025-03-28T09:41:03.339809Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2025-03-28T09:41:03.340238Z node 5 :QUOTER_PROXY WARN: [/Path/KesusName]: Disconnected from tablet 2025-03-28T09:41:03.340273Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Reconnecting to kesus 2025-03-28T09:41:03.340409Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Mark "res1" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 0.5 2025-03-28T09:41:03.340459Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: Schedule offline allocation in 0.000000s: [{ "res1", 0.5 }] 2025-03-28T09:41:03.343222Z node 5 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-28T09:41:03.345428Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2025-03-28T09:41:03.345492Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res0" 2025-03-28T09:41:03.345563Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res1" 2025-03-28T09:41:03.345599Z node 5 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res2" 2025-03-28T09:41:03.345705Z node 5 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }, { "res1", Normal, {0: Front(1, 2)} }, { "res2", Normal, {0: Front(1, 2)} }]) 2025-03-28T09:41:03.361388Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-28T09:41:03.361506Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-28T09:41:03.361818Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-03-28T09:41:03.362016Z node 6 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-28T09:41:03.362311Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-03-28T09:41:03.362355Z node 6 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-03-28T09:41:03.362404Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-03-28T09:41:03.362495Z node 6 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-03-28T09:41:03.384045Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-28T09:41:03.384189Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-28T09:41:03.384331Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-03-28T09:41:03.384648Z node 7 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-28T09:41:03.384986Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2025-03-28T09:41:03.385033Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Initialized new session with resource "res" 2025-03-28T09:41:03.385079Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxySession("res", 42) 2025-03-28T09:41:03.385149Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-03-28T09:41:03.385487Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyStats([{"res", Consumed: 0, Queue: 25}]) 2025-03-28T09:41:03.385533Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: Set info for resource "res": { Available: 20, QueueWeight: 25 } 2025-03-28T09:41:03.385600Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Activate session to "res". Connected: 1 2025-03-28T09:41:03.385709Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 30064773124 } }) 2025-03-28T09:41:03.385776Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2025-03-28T09:41:03.385956Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: ProxyCloseSession("res", 42) 2025-03-28T09:41:03.386006Z node 7 :QUOTER_PROXY INFO: [/Path/KesusName]: Deactivate session to "res". Connected: 1 2025-03-28T09:41:03.386099Z node 7 :QUOTER_PROXY TRACE: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 42 } ActorID { RawX1: 5 RawX2: 30064773124 } }) 2025-03-28T09:41:03.409196Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2025-03-28T09:41:03.409331Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Connecting to kesus 2025-03-28T09:41:03.409453Z node 8 :QUOTER_PROXY INFO: [/Path/KesusName]: ProxyRequest "res" 2025-03-28T09:41:03.409607Z node 8 :QUOTER_PROXY DEBUG: [/Path/KesusName]: Successfully connected to tablet 2025-03-28T09:41:03.409925Z node 8 :QUOTER_PROXY TRACE: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 H ... { Available: -4.000101961, QueueWeight: 5 } 2025-03-28T09:43:30.167824Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.167958Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.203049Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7486793758006222338:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-28T09:43:30.203567Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-28T09:43:30.203627Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-28T09:43:30.203679Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.204092Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.267797Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-28T09:43:30.267853Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-03-28T09:43:30.367000Z 2025-03-28T09:43:30.267877Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-03-28T09:43:30.268086Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-03-28T09:43:30.268140Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -3.000101961, QueueWeight: 5 } 2025-03-28T09:43:30.268200Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.268300Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.303034Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7486793758006222338:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-28T09:43:30.303961Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-28T09:43:30.304033Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-28T09:43:30.304103Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.304152Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.367343Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-28T09:43:30.367413Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-03-28T09:43:30.467000Z 2025-03-28T09:43:30.367442Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-03-28T09:43:30.371020Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-03-28T09:43:30.371078Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -2.000101961, QueueWeight: 5 } 2025-03-28T09:43:30.371156Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.374977Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.400665Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7486793758006222338:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-28T09:43:30.403209Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-28T09:43:30.403270Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-28T09:43:30.403330Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.403607Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.467571Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-28T09:43:30.467629Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-03-28T09:43:30.567000Z 2025-03-28T09:43:30.467655Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-03-28T09:43:30.467853Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-03-28T09:43:30.467890Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -1.000101961, QueueWeight: 5 } 2025-03-28T09:43:30.467944Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.468063Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.501679Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7486793758006222338:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-28T09:43:30.502249Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-28T09:43:30.502319Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-28T09:43:30.502373Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.502440Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.568359Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-28T09:43:30.568411Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-03-28T09:43:30.667000Z 2025-03-28T09:43:30.568437Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-03-28T09:43:30.568782Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2025-03-28T09:43:30.568818Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -0.0001019607931, QueueWeight: 5 } 2025-03-28T09:43:30.568863Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.569035Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.601230Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7486793758006222338:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-28T09:43:30.601952Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-28T09:43:30.602005Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-28T09:43:30.602091Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.9998980392, 2)} }]) 2025-03-28T09:43:30.602364Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.667369Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0.9998980392. FreeBalance: 0.9998980392 2025-03-28T09:43:30.667435Z node 49 :QUOTER_SERVICE TRACE: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2025-03-28T09:43:30.767000Z 2025-03-28T09:43:30.667462Z node 49 :QUOTER_SERVICE TRACE: Allocate resource "Resource" 2025-03-28T09:43:30.667530Z node 49 :QUOTER_SERVICE TRACE: Charge "Resource" for 5. Balance: 0.9998980392. FreeBalance: 0.9998980392. TicksToFullfill: 5.000509856. DurationToFullfillInUs: 500050.9856. TimeToFullfill: 2025-03-28T09:43:30.164695Z. Now: 2025-03-28T09:43:30.667022Z. LastAllocated: 2025-03-28T09:43:29.664644Z 2025-03-28T09:43:30.667779Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 5, Queue: 0}]) 2025-03-28T09:43:30.667814Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.000101961, QueueWeight: 0 } 2025-03-28T09:43:30.667865Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.668161Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.700940Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7486793758006222338:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-28T09:43:30.701780Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-28T09:43:30.701848Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-28T09:43:30.701911Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.701964Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.770805Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-28T09:43:30.805492Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7486793758006222338:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-28T09:43:30.807528Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-28T09:43:30.807584Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-28T09:43:30.807632Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.807702Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.807736Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-28T09:43:30.903004Z node 50 :KESUS_TABLET TRACE: [72075186224037888] Send TEvResourcesAllocated to [49:7486793758006222338:2303]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2025-03-28T09:43:30.903704Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2025-03-28T09:43:30.903773Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2025-03-28T09:43:30.903827Z node 49 :QUOTER_PROXY TRACE: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2025-03-28T09:43:30.903940Z node 49 :QUOTER_SERVICE DEBUG: ProxyUpdate for quoter /dc-1/KesusQuoter 2025-03-28T09:43:30.903964Z node 49 :QUOTER_SERVICE TRACE: Feed resource "Resource". Balance: 0. FreeBalance: 0 2025-03-28T09:43:33.300130Z node 49 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[49:7486793753711254190:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:43:33.300239Z node 49 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |98.8%| [TM] {RESULT} ydb/core/quoter/ut/unittest >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 22508, MsgBus: 12518 2025-03-28T09:39:25.420466Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792707968849041:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:25.420858Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00148a/r3tmp/tmpEv2pyI/pdisk_1.dat 2025-03-28T09:39:25.677738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:25.677829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:25.678545Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:25.680454Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22508, node 1 2025-03-28T09:39:25.734171Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:25.734202Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:25.734218Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:25.734391Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12518 TClient is connected to server localhost:12518 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:26.161735Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:26.189514Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:27.860264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792716558784687:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:27.860275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792716558784696:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:27.860385Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:27.864057Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:39:27.873208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792716558784701:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:39:27.953713Z node 1 :TX_PROXY ERROR: Actor# [1:7486792716558784752:2603] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:28.273831Z node 1 :KQP_COMPUTE WARN: fline=kqp_compute_actor_factory.cpp:40;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=2;memory=1048576; 2025-03-28T09:39:28.273874Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710661, task: 2. [Mem] memory 1048576 NOT granted 2025-03-28T09:39:28.286010Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792720853752092:2368], TxId: 281474976710661, task: 2. Ctx: { TraceId : 01jqe1xszha8aesndre3amsqr5. SessionId : ydb://session/3?node_id=1&id=MjgwNTNjN2ItMjU4MzkzM2YtYmY2NDBlZjItMjE3MWE1MGE=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-mdsjyjltvm, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-03-28T09:39:28.271616Z }, code: 2029 }. 2025-03-28T09:39:28.286537Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486792720853752090:2367], TxId: 281474976710661, task: 1. Ctx: { TraceId : 01jqe1xszha8aesndre3amsqr5. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MjgwNTNjN2ItMjU4MzkzM2YtYmY2NDBlZjItMjE3MWE1MGE=. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486792720853752078:2353], status: OVERLOADED, reason: {
: Error: Terminate execution } 2025-03-28T09:39:28.290487Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjgwNTNjN2ItMjU4MzkzM2YtYmY2NDBlZjItMjE3MWE1MGE=, ActorId: [1:7486792716558784669:2353], ActorState: ExecuteState, TraceId: 01jqe1xszha8aesndre3amsqr5, Create QueryResponse for error on request, msg:
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-mdsjyjltvm, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2025-03-28T09:39:28.271616Z } , code: 2029 Trying to start YDB, gRPC: 7229, MsgBus: 17566 2025-03-28T09:39:28.815479Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792721145267398:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:28.815569Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00148a/r3tmp/tmpOgkpOd/pdisk_1.dat 2025-03-28T09:39:28.889084Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 7229, node 2 2025-03-28T09:39:28.938766Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:28.938786Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:28.938793Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:28.938897Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:28.944894Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:28.944958Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:28.946449Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:17566 TClient is connected to server localhost:17566 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:29.327061Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:29.342253Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:31.282013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792734030170348:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:31.282013Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792734030170339:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:31.282073Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:31.284694Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T09:39:31.291989Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486792734030170353:2359], DatabaseId: /Root, PoolId: default, Scheduled retry ... sponse for error on request, msg: 2025-03-28T09:43:22.420593Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486793729675252301:2497] TxId: 281474976711059. Ctx: { TraceId: 01jqe24yjjage1xj1jmf07qm9v, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 471ms } {
: Error: Cancelling after 480ms during execution } ] 2025-03-28T09:43:22.420885Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793729675252309:5777], TxId: 281474976711059, task: 2. Ctx: { SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. TraceId : 01jqe24yjjage1xj1jmf07qm9v. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486793729675252301:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:22.421214Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793729675252310:5778], TxId: 281474976711059, task: 3. Ctx: { SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CustomerSuppliedId : . TraceId : 01jqe24yjjage1xj1jmf07qm9v. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7486793729675252301:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:22.421486Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793729675252311:5779], TxId: 281474976711059, task: 4. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. TraceId : 01jqe24yjjage1xj1jmf07qm9v. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7486793729675252301:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:22.421817Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793729675252313:5781], TxId: 281474976711059, task: 6. Ctx: { TraceId : 01jqe24yjjage1xj1jmf07qm9v. SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7486793729675252301:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:22.422019Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793729675252316:5784], TxId: 281474976711059, task: 9. Ctx: { CustomerSuppliedId : . TraceId : 01jqe24yjjage1xj1jmf07qm9v. SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486793729675252301:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:22.422633Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, ActorId: [4:7486793291588571728:2497], ActorState: ExecuteState, TraceId: 01jqe24yjjage1xj1jmf07qm9v, Create QueryResponse for error on request, msg: 2025-03-28T09:43:23.409978Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486793733970219756:2497] TxId: 281474976711063. Ctx: { TraceId: 01jqe24zhqcvtt3ms45kg9z34q, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 473ms } {
: Error: Cancelling after 473ms during execution } ] 2025-03-28T09:43:23.413211Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793733970219771:5813], TxId: 281474976711063, task: 8. Ctx: { SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CustomerSuppliedId : . TraceId : 01jqe24zhqcvtt3ms45kg9z34q. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7486793733970219756:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:23.415626Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, ActorId: [4:7486793291588571728:2497], ActorState: ExecuteState, TraceId: 01jqe24zhqcvtt3ms45kg9z34q, Create QueryResponse for error on request, msg: 2025-03-28T09:43:24.394924Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, ActorId: [4:7486793291588571728:2497], ActorState: ExecuteState, TraceId: 01jqe250gddaddpecx61jkv8xh, Create QueryResponse for error on request, msg: 2025-03-28T09:43:25.977459Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486793742560154746:2497] TxId: 281474976711072. Ctx: { TraceId: 01jqe252072yyft7qsm9hq8z85, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 478ms } {
: Error: Cancelling after 529ms during execution } ] 2025-03-28T09:43:25.977683Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, ActorId: [4:7486793291588571728:2497], ActorState: ExecuteState, TraceId: 01jqe252072yyft7qsm9hq8z85, Create QueryResponse for error on request, msg: 2025-03-28T09:43:27.926105Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486793751150089612:2497] TxId: 281474976711080. Ctx: { TraceId: 01jqe253yjd67q6s237zbdy7kn, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 482ms } {
: Error: Cancelling after 482ms during execution } ] 2025-03-28T09:43:27.926405Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, ActorId: [4:7486793291588571728:2497], ActorState: ExecuteState, TraceId: 01jqe253yjd67q6s237zbdy7kn, Create QueryResponse for error on request, msg: 2025-03-28T09:43:33.159703Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, ActorId: [4:7486793291588571728:2497], ActorState: ExecuteState, TraceId: 01jqe2591c61492d7j8tjbqb12, Create QueryResponse for error on request, msg: 2025-03-28T09:43:34.140503Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486793781214861660:2497] TxId: 281474976711104. Ctx: { TraceId: 01jqe259zd50ambj4gc17wwb4d, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. CANCELLED: [ {
: Error: Request canceled after 495ms } {
: Error: Cancelling after 526ms during execution } ] 2025-03-28T09:43:34.140704Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793781214861670:6112], TxId: 281474976711104, task: 3. Ctx: { TraceId : 01jqe259zd50ambj4gc17wwb4d. SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486793781214861660:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:34.140718Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793781214861684:6118], TxId: 281474976711104, task: 9. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. TraceId : 01jqe259zd50ambj4gc17wwb4d. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7486793781214861660:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:34.141018Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793781214861671:6113], TxId: 281474976711104, task: 4. Ctx: { TraceId : 01jqe259zd50ambj4gc17wwb4d. SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7486793781214861660:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:34.141221Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793781214861672:6114], TxId: 281474976711104, task: 5. Ctx: { TraceId : 01jqe259zd50ambj4gc17wwb4d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486793781214861660:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:34.141226Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793781214861667:6110], TxId: 281474976711104, task: 1. Ctx: { TraceId : 01jqe259zd50ambj4gc17wwb4d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486793781214861660:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:34.141423Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793781214861674:6115], TxId: 281474976711104, task: 6. Ctx: { SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CustomerSuppliedId : . TraceId : 01jqe259zd50ambj4gc17wwb4d. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486793781214861660:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:34.141470Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793781214861669:6111], TxId: 281474976711104, task: 2. Ctx: { TraceId : 01jqe259zd50ambj4gc17wwb4d. CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486793781214861660:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:34.141605Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793781214861675:6116], TxId: 281474976711104, task: 7. Ctx: { CustomerSuppliedId : . TraceId : 01jqe259zd50ambj4gc17wwb4d. SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [4:7486793781214861660:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:34.141657Z node 4 :KQP_COMPUTE ERROR: SelfId: [4:7486793781214861677:6117], TxId: 281474976711104, task: 8. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=. TraceId : 01jqe259zd50ambj4gc17wwb4d. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [4:7486793781214861660:2497], status: CANCELLED, reason: {
: Error: Terminate execution } 2025-03-28T09:43:34.142128Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=YjNhNGM0MjMtZWJlZDkzNGItNGVmNTczMTktNjVkMGFmZDk=, ActorId: [4:7486793291588571728:2497], ActorState: ExecuteState, TraceId: 01jqe259zd50ambj4gc17wwb4d, Create QueryResponse for error on request, msg: |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> DataShardStats::HasSchemaChanges_Families [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_stats/unittest >> DataShardStats::HasSchemaChanges_Families [GOOD] Test command err: 2025-03-28T09:40:08.090664Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:08.091360Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:08.091454Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/0006d5/r3tmp/tmprFlckn/pdisk_1.dat 2025-03-28T09:40:08.687484Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:08.763067Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:08.788813Z node 1 :TABLET_SAUSAGECACHE NOTICE: Update config MemoryLimit: 33554432 2025-03-28T09:40:08.835153Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:08.836565Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:08.851548Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:08.996082Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:09.079613Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828672, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvBoot 2025-03-28T09:40:09.080782Z node 1 :TX_DATASHARD TRACE: StateInit, received event# 268828673, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvRestored 2025-03-28T09:40:09.082504Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:666:2570] 2025-03-28T09:40:09.082856Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:09.140176Z node 1 :TX_DATASHARD TRACE: StateInactive, received event# 268828684, Sender [1:657:2564], Recipient [1:666:2570]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T09:40:09.140952Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:09.141170Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:09.148551Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:40:09.148766Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:40:09.148860Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:40:09.150195Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:09.150399Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:09.150492Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:682:2570] in generation 1 2025-03-28T09:40:09.161291Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:09.193411Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:40:09.194781Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:09.194954Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:684:2580] 2025-03-28T09:40:09.194991Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:09.195066Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:40:09.195112Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:09.195362Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:09.195420Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:09.197211Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:40:09.197322Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:40:09.198927Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:09.199031Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:09.199082Z node 1 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:40:09.199117Z node 1 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:40:09.199167Z node 1 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:40:09.199199Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:40:09.199263Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:09.199407Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:673:2574], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:09.199456Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:09.199504Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:663:2568], serverId# [1:673:2574], sessionId# [0:0:0] 2025-03-28T09:40:09.199582Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [1:409:2404], Recipient [1:673:2574] 2025-03-28T09:40:09.199618Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:40:09.199812Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:09.200142Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2025-03-28T09:40:09.200209Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:40:09.201014Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:40:09.201175Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2025-03-28T09:40:09.201236Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2025-03-28T09:40:09.201330Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2025-03-28T09:40:09.201365Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:40:09.201733Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2025-03-28T09:40:09.201769Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2025-03-28T09:40:09.201807Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:40:09.201841Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:40:09.201899Z node 1 :TX_DATASHARD TRACE: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2025-03-28T09:40:09.201944Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:40:09.201987Z node 1 :TX_DATASHARD TRACE: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2025-03-28T09:40:09.202017Z node 1 :TX_DATASHARD TRACE: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:40:09.202043Z node 1 :TX_DATASHARD TRACE: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2025-03-28T09:40:09.203828Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269746185, Sender [1:685:2581], Recipient [1:666:2570]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2025-03-28T09:40:09.203885Z node 1 :TX_DATASHARD DEBUG: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2025-03-28T09:40:09.215322Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:40:09.215403Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2025-03-28T09:40:09.215453Z node 1 :TX_DATASHARD TRACE: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2025-03-28T09:40:09.215503Z node 1 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: PREPARED 2025-03-28T09:40:09.215575Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme 2025-03-28T09:40:09.387218Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [1:705:2595], Recipient [1:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:09.387277Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:40:09.387313Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:703:2593], serverId# [1:705:2595], sessionId# [0:0:0] 2025-03-28T09:40:09.387733Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:666:2570]: {TEvPlanStep step# 1000 MediatorId# 72057594046382081 TabletID 72075186224037888} 2025-03-28T09:40:09.387776Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T09:40:09.387881Z node 1 :TX_DATASHARD TRACE: Trying to execute [1000:281474976715657] at 72075186224037888 on unit WaitForPlan 2025-03-28T09:40:09.387920Z node 1 :TX_DATASHARD TRACE: Execution status for [1000:281474976715657] at 72075186224037888 is Executed 2025-03-28T09:40:09.387979Z node 1 :TX_DATASHARD TRACE: Advance execution plan for [1000:281474976715657] at 72075186224037888 executing on unit WaitForPlan 2025-03-28T09:40:09.388018Z node 1 :TX_DATASHARD TRACE: Add [1000:281474976715657] at 72075186224037888 to execution unit PlanQueue 2025-03-28T09:40:09.392336Z node 1 :TX_DATASHARD DEBUG: Planned transaction txId 281474976715657 at step 1000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2025-03-28T09:40:09.392413Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:09.394315Z node 1 :TX_DATASHARD TRACE: StateWork, received event# 2146435072, Sender [1:666:2570], Recipient [1:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:09.394363Z node 1 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressTransaction 2025-03-28T09:40:09.394423Z ... lanQueue at 72075186224037888 2025-03-28T09:43:41.829124Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:43:41.829190Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:43:41.831856Z node 13 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 25500} 2025-03-28T09:43:41.831991Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:43:41.836328Z node 13 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:43:41.836408Z node 13 :TX_DATASHARD TRACE: Complete execution for [25500:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-03-28T09:43:41.836528Z node 13 :TX_DATASHARD DEBUG: Complete [25500 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [13:409:2404], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:43:41.836623Z node 13 :TX_DATASHARD INFO: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715664 state Ready TxInFly 0 2025-03-28T09:43:41.836814Z node 13 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:43:41.840689Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877760, Sender [13:1160:2974], Recipient [13:931:2762]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046644480 Status: OK ServerId: [13:1165:2979] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T09:43:41.840756Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T09:43:41.842134Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269552132, Sender [13:409:2404], Recipient [13:931:2762]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715664 2025-03-28T09:43:41.842195Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2025-03-28T09:43:41.842277Z node 13 :TX_DATASHARD DEBUG: Handle TEvSchemaChangedResult 281474976715664 datashard 72075186224037888 state Ready 2025-03-28T09:43:41.842387Z node 13 :TX_DATASHARD DEBUG: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 waiting for schema changes 2025-03-28T09:43:41.855403Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877763, Sender [13:1160:2974], Recipient [13:931:2762]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046644480 ClientId: [13:1160:2974] ServerId: [13:1165:2979] } 2025-03-28T09:43:41.855507Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2025-03-28T09:43:42.768713Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:931:2762]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:43:42.768825Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-03-28T09:43:42.769138Z node 13 :TABLET_STATS_BUILDER INFO: Skipped at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount 1, with schema changes 2025-03-28T09:43:42.769351Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 9 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 20 HasLoanedParts: false Channels { Channel: 1 DataSize: 65 IndexSize: 82 } Channels { Channel: 2 DataSize: 65 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: true LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 2364 Memory: 124352 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-28T09:43:42.770809Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [13:1198:3012], Recipient [13:931:2762]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:43:42.770924Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:43:42.771019Z node 13 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [13:1197:3011], serverId# [13:1198:3012], sessionId# [0:0:0] 2025-03-28T09:43:42.771330Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 269553210, Sender [13:1196:3010], Recipient [13:931:2762]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 2 } CompactBorrowed: false 2025-03-28T09:43:42.771567Z node 13 :TX_DATASHARD INFO: Started background compaction# 3 of 72075186224037888 tableId# 2 localTid# 1001, requested from [13:1196:3010], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2025-03-28T09:43:42.773338Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 2, ts 1970-01-01T00:00:20.452024Z 2025-03-28T09:43:42.773415Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-03-28T09:43:42.773534Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 2, front# 3 2025-03-28T09:43:42.779601Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: starting for mixed index 2025-03-28T09:43:42.788168Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:929:2761], Recipient [13:931:2762]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-28T09:43:42.788892Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: finished for mixed index ready: 1 stats: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-03-28T09:43:42.789020Z node 13 :TABLET_STATS_BUILDER INFO: Stats at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount: 1, with schema changes, LoadedSize 82, Spent{time=0.000s,wait=0.000s,interrupts=2} 2025-03-28T09:43:42.789583Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1203:3016], Recipient [13:931:2762]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-03-28T09:43:42.789677Z node 13 :TABLET_STATS_BUILDER INFO: Result received at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-03-28T09:43:42.789817Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 2025-03-28T09:43:42.808969Z node 13 :TX_DATASHARD DEBUG: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 3, ts 1970-01-01T00:00:30.452024Z 2025-03-28T09:43:42.809088Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-03-28T09:43:42.809143Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 3, front# 3 2025-03-28T09:43:42.809220Z node 13 :TX_DATASHARD DEBUG: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [13:1196:3010]pathId# [OwnerId: 72057594046644480, LocalPathId: 2] 2025-03-28T09:43:42.811645Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: starting for mixed index 2025-03-28T09:43:42.812038Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 268828683, Sender [13:929:2761], Recipient [13:931:2762]: NKikimr::TEvTablet::TEvFollowerGcApplied 2025-03-28T09:43:42.813870Z node 13 :TABLET_STATS_BUILDER TRACE: Building stats at datashard 72075186224037888, for tableId 2: finished for mixed index ready: 1 stats: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-03-28T09:43:42.814018Z node 13 :TABLET_STATS_BUILDER INFO: Stats at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount: 1, LoadedSize 82, Spent{time=0.000s,wait=0.000s,interrupts=2} 2025-03-28T09:43:42.814331Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435080, Sender [13:1210:3022], Recipient [13:931:2762]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvAsyncTableStats 2025-03-28T09:43:42.814398Z node 13 :TABLET_STATS_BUILDER INFO: Result received at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 2025-03-28T09:43:42.814486Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 waiting for no schema changes 2025-03-28T09:43:42.827056Z node 13 :TX_DATASHARD DEBUG: Updated last full compaction of tablet# 72075186224037888, tableId# 2, last full compaction# 1970-01-01T00:00:30.452024Z 2025-03-28T09:43:43.719126Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [13:931:2762]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2025-03-28T09:43:43.719261Z node 13 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2025-03-28T09:43:43.719416Z node 13 :TX_DATASHARD TRACE: No cleanup at 72075186224037888 outdated step 35000 last cleanup 0 2025-03-28T09:43:43.719541Z node 13 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:43:43.719652Z node 13 :TX_DATASHARD TRACE: Check unit PlanQueue at 72075186224037888 2025-03-28T09:43:43.719728Z node 13 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037888 has no attached operations 2025-03-28T09:43:43.719824Z node 13 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037888 2025-03-28T09:43:43.720032Z node 13 :TX_DATASHARD TRACE: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:931:2762]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2025-03-28T09:43:43.720090Z node 13 :TABLET_STATS_BUILDER INFO: UpdateTableStats at datashard 72075186224037888 2025-03-28T09:43:43.720324Z node 13 :TABLET_STATS_BUILDER INFO: Skipped at datashard 72075186224037888, for tableId 2: RowCount: 3 DataSize: 130 IndexSize: 82 ByKeyFilterSize: 0 RowCountHistogram: 0 DataSizeHistogram: 0 PartCount 1 2025-03-28T09:43:43.720524Z node 13 :TX_DATASHARD TRACE: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 2 Captured TEvDataShard::TEvPeriodicTableStats DatashardId: 72075186224037888 TableLocalId: 2 Generation: 2 Round: 12 TableStats { DataSize: 130 RowCount: 3 IndexSize: 82 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 30 HasLoanedParts: false Channels { Channel: 1 DataSize: 80 IndexSize: 82 } Channels { Channel: 2 DataSize: 50 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1863 Memory: 124352 Storage: 254 } ShardState: 2 UserTablePartOwners: 72075186224037888 NodeId: 13 StartTime: 5451 TableOwnerId: 72057594046644480 FollowerId: 0 >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] |98.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] Test command err: Trying to start YDB, gRPC: 17068, MsgBus: 1104 2025-03-28T09:39:53.147014Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792831890173233:2066];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:53.147084Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000964/r3tmp/tmpHgmusD/pdisk_1.dat 2025-03-28T09:39:53.518999Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:53.570503Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:53.570621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:53.574030Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 17068, node 1 2025-03-28T09:39:53.744147Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:53.744182Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:53.744193Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:53.744346Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1104 TClient is connected to server localhost:1104 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:54.453982Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:39:55.902055Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792840480108483:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:55.902183Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 8977, MsgBus: 30673 2025-03-28T09:39:57.220075Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792845635765766:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:57.220154Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000964/r3tmp/tmpWh2H6u/pdisk_1.dat 2025-03-28T09:39:57.288004Z node 2 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8977, node 2 2025-03-28T09:39:57.335832Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:39:57.335848Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:39:57.335853Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:39:57.335930Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:57.349019Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:57.349117Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:57.350562Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30673 TClient is connected to server localhost:30673 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:57.665167Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:00.124868Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792858520668312:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:00.124963Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 19072, MsgBus: 14995 2025-03-28T09:40:00.720213Z node 3 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7486792859544932955:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:00.720321Z node 3 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000964/r3tmp/tmpkvyLMd/pdisk_1.dat 2025-03-28T09:40:00.802164Z node 3 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19072, node 3 2025-03-28T09:40:00.839113Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:00.839229Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:00.841279Z node 3 :HIVE WARN: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:00.858744Z node 3 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:00.858770Z node 3 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:00.858776Z node 3 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:40:00.858910Z node 3 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14995 TClient is connected to server localhost:14995 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:01.315010Z node 3 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:40:04.047265Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7486792876724802779:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:40:04.047384Z node 3 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 26603, MsgBus: 26475 2025-03-28T09:40:04.717331Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486792876970265427:2224];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000964/r3tmp/tmpqiE8Xe/pdisk_1.dat 2025-03-28T09:40:04.733272Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:40:04.787747Z node 4 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26603, node 4 2025-03-28T09:40:04.821148Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:04.821253Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:04.823112Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:04.867419Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:40:04.867447Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:40:04.867453Z node 4 :NET_CLASS ... "Select")) '())) $7 (Void))) (let $9 (DataSink 'result)) (let $10 (ResPull! (Left! $8) $9 (Key) (Nth (Right! $8) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $10 $9) $1 $7)) ) 2025-03-28 09:43:29.392 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpLogical-ApplyExtractMembersToReadTableRanges 2025-03-28 09:43:29.399 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpLogical-RewriteAggregate 2025-03-28 09:43:29.410 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-03-28 09:43:29.421 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpPhysical-PushAggregateCombineToStage 2025-03-28 09:43:29.433 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-03-28 09:43:29.449 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-03-28 09:43:29.465 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-03-28 09:43:29.487 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpPhysical-ExpandAggregatePhase 2025-03-28 09:43:29.519 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildShuffleStage 2025-03-28 09:43:29.538 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-03-28 09:43:29.570 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-03-28 09:43:29.853 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] yql_optimize.cpp:135: KqpPeepholeFinal-SetCombinerMemoryLimit 2025-03-28 09:43:30.030 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/count_episodes")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-03-28 09:43:30.053 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/count_episodes" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) 2025-03-28 09:43:30.141 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Right! (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/view_series"))) (Void) '()))) (let $2 '('('"query_ast" (RemoveSystemMembers (PersistableRepr (SqlProject $1 '((SqlProjectStarItem (TypeOf $1) '"" (lambda '($3) $3) '())))))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'createObject) '('features $2)))) ) 2025-03-28 09:43:30.205 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 '('('"query_ast" (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '()))) '('"query_text" '"SELECT * FROM `/Root/view_series`") '('"security_invoker" (Bool '"true")))) (let $3 (KiCreateObject! world $1 '"/Root/read_from_one_view" '"VIEW" $2 '0 '0)) (return (Commit! $3 $1 '('('"mode" '"flush")))) ) 2025-03-28 09:43:30.297 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] kqp_host.cpp:1382: Compiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/read_from_one_view"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Right! $1)) (let $4 (Write! (Left! $1) $2 (Key) (RemoveSystemMembers (Sort (PersistableRepr (SqlProject $3 '((SqlProjectStarItem (TypeOf $3) '"" (lambda '($5) $5) '())))) (Bool 'true) (lambda '($6) (PersistableRepr (Member $6 '"series_id"))))) '('('type) '('autoref)))) (return (Commit! $4 $2)) ) 2025-03-28 09:43:30.479 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EDAF0640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (Sort (Right! (KiReadTable! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/series"))) (Void) '())) (Bool 'true) (lambda '($7) (Member $7 '"series_id")))) (let $3 '('('"mode" '"flush"))) (let $4 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($2 '() '0)) (KiEffects) '('('"db" '"/Root/series" '"Select")) '())) $3 (Void))) (let $5 (DataSink 'result)) (let $6 (ResPull! (Left! $4) $5 (Key) (Nth (Right! $4) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $6 $5) $1 $3)) ) 2025-03-28 09:43:30.490 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EDAF0640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildReadTableRangesStage 2025-03-28 09:43:30.496 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EDAF0640) [KQP] yql_optimize.cpp:135: KqpPhysical-BuildSortStage 2025-03-28 09:43:30.503 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EDAF0640) [KQP] yql_optimize.cpp:135: KqpPhysical-RemoveRedundantSortByPk 2025-03-28 09:43:30.510 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EDAF0640) [KQP] yql_optimize.cpp:135: KqpPhysical-RewriteKqpReadTable 2025-03-28 09:43:30.701 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] kqp_host.cpp:1382: Compiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/read_from_one_view")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) 2025-03-28 09:43:30.709 INFO ydb-core-kqp-ut-view(pid=699510, tid=0x00007F59EE43E640) [KQP] kqp_transform.cpp:33: Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/read_from_one_view" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) Trying to start YDB, gRPC: 19661, MsgBus: 24728 2025-03-28T09:43:32.527469Z node 23 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7486793769239759415:2059];send_to=[0:7307199536658146131:7762515]; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000964/r3tmp/tmpvTD5oH/pdisk_1.dat 2025-03-28T09:43:32.794921Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T09:43:33.036491Z node 23 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:43:33.058274Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:43:33.058480Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:43:33.061142Z node 23 :HIVE WARN: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19661, node 23 2025-03-28T09:43:33.215741Z node 23 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:43:33.215775Z node 23 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:43:33.215796Z node 23 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:43:33.216029Z node 23 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:24728 TClient is connected to server localhost:24728 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:43:34.682093Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:43:37.504643Z node 23 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[23:7486793769239759415:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:43:37.504793Z node 23 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:43:40.817719Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7486793803599498466:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:43:40.818662Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:43:40.883579Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7486793803599498496:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:43:40.883771Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:43:40.884518Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7486793803599498501:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:43:40.891449Z node 23 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T09:43:40.918071Z node 23 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7486793803599498503:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T09:43:40.975656Z node 23 :TX_PROXY ERROR: Actor# [23:7486793803599498554:2368] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |98.8%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest >> TDataShardRSTest::TestCleanupInRS-UseSink [GOOD] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> test_drain.py::TestHive::test_drain_tablets [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] [GOOD] >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown |98.8%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] |98.9%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges >> test_drain.py::TestHive::test_drain_on_stop [FAIL] >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] >> TDataShardRSTest::TestGenericReadSetDecisionCommit [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_rs/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] Test command err: 2025-03-28T09:40:58.783080Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:40:58.783685Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:40:58.783755Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00065e/r3tmp/tmpj6M0Tx/pdisk_1.dat 2025-03-28T09:40:59.389629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:40:59.454622Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:59.509205Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:59.509742Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:59.522549Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:40:59.658354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:40:59.725216Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:682:2579] 2025-03-28T09:40:59.725583Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:59.782826Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:59.783087Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:59.791777Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2025-03-28T09:40:59.791898Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037888 2025-03-28T09:40:59.791962Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037888 2025-03-28T09:40:59.805206Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:59.805598Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:59.805678Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037888 persisting started state actor id [1:716:2579] in generation 1 2025-03-28T09:40:59.807552Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:686:2581] 2025-03-28T09:40:59.807752Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:59.820971Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:59.821378Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:689:2583] 2025-03-28T09:40:59.821635Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:59.835896Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:59.837462Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2025-03-28T09:40:59.837539Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037889 2025-03-28T09:40:59.837589Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037889 2025-03-28T09:40:59.837933Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:59.839480Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:59.839548Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037889 persisting started state actor id [1:741:2581] in generation 1 2025-03-28T09:40:59.840301Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:59.840428Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:59.841873Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2025-03-28T09:40:59.841934Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037891 2025-03-28T09:40:59.841985Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037891 2025-03-28T09:40:59.842291Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:59.842554Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:59.842604Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037891 persisting started state actor id [1:745:2583] in generation 1 2025-03-28T09:40:59.846310Z node 1 :TX_DATASHARD INFO: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:695:2585] 2025-03-28T09:40:59.846527Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:40:59.857266Z node 1 :TX_DATASHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:40:59.857374Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Execute 2025-03-28T09:40:59.859159Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2025-03-28T09:40:59.859238Z node 1 :TX_DATASHARD DEBUG: LoadLockChangeRecords at tablet: 72075186224037890 2025-03-28T09:40:59.859296Z node 1 :TX_DATASHARD DEBUG: LoadChangeRecordCommits at tablet: 72075186224037890 2025-03-28T09:40:59.859621Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInit::Complete 2025-03-28T09:40:59.859765Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Execute 2025-03-28T09:40:59.859823Z node 1 :TX_DATASHARD DEBUG: DataShard 72075186224037890 persisting started state actor id [1:753:2585] in generation 1 2025-03-28T09:40:59.871005Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:59.902629Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037888 2025-03-28T09:40:59.904499Z node 1 :TX_DATASHARD DEBUG: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:59.904705Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037888, actorId: [1:758:2621] 2025-03-28T09:40:59.904760Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037888 2025-03-28T09:40:59.904799Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2025-03-28T09:40:59.904853Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:40:59.906040Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:59.906118Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037889 2025-03-28T09:40:59.906206Z node 1 :TX_DATASHARD DEBUG: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:59.906278Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037889, actorId: [1:759:2622] 2025-03-28T09:40:59.906306Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037889 2025-03-28T09:40:59.906333Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2025-03-28T09:40:59.906394Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:40:59.906989Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037888 2025-03-28T09:40:59.907108Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2025-03-28T09:40:59.907209Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:59.907243Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037891 2025-03-28T09:40:59.907311Z node 1 :TX_DATASHARD DEBUG: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:59.907389Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037891, actorId: [1:760:2623] 2025-03-28T09:40:59.907421Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037891 2025-03-28T09:40:59.907463Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2025-03-28T09:40:59.907502Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2025-03-28T09:40:59.907775Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037888 2025-03-28T09:40:59.907821Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:59.908500Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037888 TxInFly 0 2025-03-28T09:40:59.908599Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:40:59.908679Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037889 2025-03-28T09:40:59.908746Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2025-03-28T09:40:59.908827Z node 1 :TX_DATASHARD DEBUG: TDataShard::TTxInitRestored::Complete 2025-03-28T09:40:59.908891Z node 1 :TX_DATASHARD INFO: Switched to work state WaitScheme tabletId 72075186224037890 2025-03-28T09:40:59.909000Z node 1 :TX_DATASHARD DEBUG: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2025-03-28T09:40:59.909059Z node 1 :TX_DATASHARD DEBUG: Change sender created: at tablet: 72075186224037890, actorId: [1:761:2624] 2025-03-28T09:40:59.909083Z node 1 :TX_DATASHARD DEBUG: Trying to activate change sender: at tablet: 72075186224037890 2025-03-28T09:40:59.909106Z node 1 :TX_DATASHARD INFO: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2025-03-28T09:40:59.909128Z node 1 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2025-03-28T09:40:59.909842Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [1:671:2573], serverId# [1:712:2596], sessionId# [0:0:0] 2025-03-28T09:40:59.909907Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Execute at 72075186224037889 2025-03-28T09:40:59.909939Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:40:59.909990Z node 1 :TX_DATASHARD INFO: No tx to execute at 72075186224037889 TxInFly 0 2025-03-28T09:40:59.910035Z node 1 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:40:59.910346Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:40:59.911984Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2025-03-28T09:40:59.912108Z node 1 :TX_DATASHARD DEBUG: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2025-03-28T09:40:59.912658Z node 1 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037889, clientId# [1:672:2574], serverId# [1:718:2599], sessionId# [0:0:0] 2025-03-28T09:40:59.912715Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Execute at 72075186224037891 2025-03-28T09:40:59.912789Z node 1 :TX_DATASHARD DEBUG: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037891 2025-03-28T09:40:59.913051Z node 1 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037889 2025-03-28T09:40:59.913208Z node 1 :TX_DATASHARD DEBUG: Propose scheme transaction at tablet 720751862240 ... 6 :TX_DATASHARD TRACE: Execution status for [2043:281474976715664] at 72075186224037889 is Executed 2025-03-28T09:44:22.654054Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [2043:281474976715664] at 72075186224037889 executing on unit CompletedOperations 2025-03-28T09:44:22.654078Z node 6 :TX_DATASHARD TRACE: Execution plan for [2043:281474976715664] at 72075186224037889 has finished 2025-03-28T09:44:22.654122Z node 6 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T09:44:22.654149Z node 6 :TX_DATASHARD TRACE: Check candidate unit PlanQueue at 72075186224037889 2025-03-28T09:44:22.654175Z node 6 :TX_DATASHARD TRACE: TPlanQueueUnit at 72075186224037889 has no attached operations 2025-03-28T09:44:22.654202Z node 6 :TX_DATASHARD TRACE: Unit PlanQueue has no ready operations at 72075186224037889 2025-03-28T09:44:22.655065Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2043} 2025-03-28T09:44:22.655563Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269877761, Sender [6:1003:2808], Recipient [6:666:2570]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T09:44:22.655604Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T09:44:22.655646Z node 6 :TX_DATASHARD DEBUG: Server connected at leader tablet# 72075186224037888, clientId# [6:1000:2805], serverId# [6:1003:2808], sessionId# [0:0:0] 2025-03-28T09:44:22.655720Z node 6 :TX_DATASHARD DEBUG: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2043} 2025-03-28T09:44:22.655942Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [6:757:2635], Recipient [6:666:2570]: {TEvReadSet step# 2043 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:44:22.655981Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:44:22.656037Z node 6 :TX_DATASHARD DEBUG: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715664 2025-03-28T09:44:22.656128Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 2043 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2025-03-28T09:44:22.656330Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037888 2025-03-28T09:44:22.656383Z node 6 :TX_DATASHARD TRACE: Complete execution for [2043:281474976715664] at 72075186224037888 on unit CompleteOperation 2025-03-28T09:44:22.656464Z node 6 :TX_DATASHARD DEBUG: Complete [2043 : 281474976715664] from 72075186224037888 at tablet 72075186224037888 send result to client [6:993:2755], exec latency: 0 ms, propose latency: 0 ms 2025-03-28T09:44:22.656533Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:44:22.656690Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037888 2025-03-28T09:44:22.656759Z node 6 :TX_DATASHARD DEBUG: Send RS Reply at 72075186224037888 {TEvReadSet step# 2043 txid# 281474976715664 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 0 Seqno# 0 Flags# 7} ... nodata readset 2025-03-28T09:44:22.656881Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269287425, Sender [6:666:2570], Recipient [6:757:2635]: {TEvReadSet step# 2043 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-03-28T09:44:22.656912Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvTxProcessing::TEvReadSet 2025-03-28T09:44:22.656943Z node 6 :TX_DATASHARD DEBUG: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715664 2025-03-28T09:44:22.656995Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2043 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 3} 2025-03-28T09:44:22.657043Z node 6 :TX_DATASHARD TRACE: Processed readset without data from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2025-03-28T09:44:22.657847Z node 6 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=6&id=NDJjMjk3MDItZmYzMDI0Ny1lYjkyOWU3YS1jZjc4MjcxYw==, ActorId: [6:938:2755], ActorState: ExecuteState, TraceId: 01jqe26srke8fsndpp6n4mktq1, Create QueryResponse for error on request, msg: 2025-03-28T09:44:22.658375Z node 6 :TX_PROXY DEBUG: actor# [6:59:2106] Handle TEvExecuteKqpTransaction 2025-03-28T09:44:22.658416Z node 6 :TX_PROXY DEBUG: actor# [6:59:2106] TxId# 281474976715665 ProcessProposeKqpTransaction 2025-03-28T09:44:22.658787Z node 6 :TX_DATASHARD DEBUG: TTxProgressTransaction::Complete at 72075186224037889 2025-03-28T09:44:22.658821Z node 6 :TX_DATASHARD TRACE: Complete execution for [2043:281474976715664] at 72075186224037889 on unit CompleteOperation 2025-03-28T09:44:22.658877Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:44:22.659146Z node 6 :TX_DATASHARD DEBUG: TTxReadSet::Complete at 72075186224037889 2025-03-28T09:44:22.659473Z node 6 :TX_DATASHARD ERROR: Complete [2043 : 281474976715664] from 72075186224037889 at tablet 72075186224037889, error: EXECUTION_CANCELLED (Distributed transaction aborted due to commit failure) | 2025-03-28T09:44:22.659563Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2025-03-28T09:44:22.659848Z node 6 :KQP_EXECUTER ERROR: TxId: 281474976715665. Ctx: { TraceId: 01jqe26srke8fsndpp6n4mktq1, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=6&id=NDJjMjk3MDItZmYzMDI0Ny1lYjkyOWU3YS1jZjc4MjcxYw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T09:44:22.660207Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 269549568, Sender [6:1004:2755], Recipient [6:666:2570]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1004 RawX2: 25769806531 } TxBody: " \0018\001j5\010\001\032\'\n#\t\216\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010\0028\000 \003\"\006\020\0020\000@\n8\001\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2025-03-28T09:44:22.660243Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvDataShard::TEvProposeTransaction 2025-03-28T09:44:22.660360Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 2146435074, Sender [6:666:2570], Recipient [6:666:2570]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:44:22.660397Z node 6 :TX_DATASHARD TRACE: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2025-03-28T09:44:22.660483Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Execute at 72075186224037888 2025-03-28T09:44:22.660708Z node 6 :TX_DATASHARD TRACE: -- AddWriteRange: (Uint64 : 281474976715662, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 2) table: [1:997:0] 2025-03-28T09:44:22.660827Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2025-03-28T09:44:22.660891Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:44:22.660949Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2025-03-28T09:44:22.660994Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2025-03-28T09:44:22.661038Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2025-03-28T09:44:22.661084Z node 6 :TX_DATASHARD TRACE: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2043/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v2000/18446744073709551615 ImmediateWriteEdge# v2001/0 ImmediateWriteEdgeReplied# v2001/0 2025-03-28T09:44:22.661149Z node 6 :TX_DATASHARD TRACE: Activated operation [0:281474976715665] at 72075186224037888 2025-03-28T09:44:22.661202Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:44:22.661228Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2025-03-28T09:44:22.661250Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2025-03-28T09:44:22.661275Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2025-03-28T09:44:22.661335Z node 6 :TX_DATASHARD TRACE: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193444 2025-03-28T09:44:22.661435Z node 6 :TX_DATASHARD TRACE: KqpEraseLock LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 2 HasWrites: false 2025-03-28T09:44:22.661508Z node 6 :TX_DATASHARD TRACE: add locks to result: 0 2025-03-28T09:44:22.661559Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:44:22.661585Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2025-03-28T09:44:22.661609Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2025-03-28T09:44:22.661635Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-28T09:44:22.661683Z node 6 :TX_DATASHARD TRACE: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2025-03-28T09:44:22.661765Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2025-03-28T09:44:22.661801Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2025-03-28T09:44:22.661848Z node 6 :TX_DATASHARD TRACE: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2025-03-28T09:44:22.661890Z node 6 :TX_DATASHARD TRACE: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2025-03-28T09:44:22.661935Z node 6 :TX_DATASHARD TRACE: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2025-03-28T09:44:22.661960Z node 6 :TX_DATASHARD TRACE: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2025-03-28T09:44:22.661992Z node 6 :TX_DATASHARD TRACE: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2025-03-28T09:44:22.662058Z node 6 :TX_DATASHARD DEBUG: TTxProposeTransactionBase::Complete at 72075186224037888 2025-03-28T09:44:22.662125Z node 6 :TX_DATASHARD TRACE: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2025-03-28T09:44:22.662183Z node 6 :TX_DATASHARD DEBUG: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2025-03-28T09:44:22.663403Z node 6 :TX_DATASHARD TRACE: StateWork, received event# 275709965, Sender [6:61:2108], Recipient [6:666:2570]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715662 LockNode: 6 Status: STATUS_NOT_FOUND |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_rs/unittest >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown >> KafkaProtocol::NativeKafkaBalanceScenario [GOOD] >> KafkaProtocol::InitProducerId_withoutTransactionalIdShouldReturnRandomInt |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [FAIL] |98.9%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> KafkaProtocol::InitProducerId_withoutTransactionalIdShouldReturnRandomInt [GOOD] >> KafkaProtocol::InitProducerId_forNewTransactionalIdShouldReturnRandomInt >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> test_alloc_default.py::TestAlloc::test_up_down[kikimr0] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item >> KafkaProtocol::InitProducerId_forNewTransactionalIdShouldReturnRandomInt [GOOD] >> KafkaProtocol::InitProducerId_forSqlInjectionShouldReturnWithoutDropingDatabase >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] >> KafkaProtocol::InitProducerId_forSqlInjectionShouldReturnWithoutDropingDatabase [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnSameProducerIdAndIncrementEpoch |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] |99.0%| [TM] {RESULT} ydb/tests/fq/restarts/py3test >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] 2025-03-28 09:44:56,457 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:44:56,741 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 609574 46.0M 44.7M 23.0M test_tool run_ut @/home/runner/.ya/build/build_root/2909/000b33/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.ar 609808 1.2G 1.2G 798M └─ ydb-core-statistics-aggregator-ut --trace-path-append /home/runner/.ya/build/build_root/2909/000b33/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_s 834775 1.2G 1.2G 798M └─ ydb-core-statistics-aggregator-ut --trace-path-append /home/runner/.ya/build/build_root/2909/000b33/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_ou Test command err: 2025-03-28T09:35:00.174757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T09:35:00.174956Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T09:35:00.174995Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/000b33/r3tmp/tmpm77Rq0/pdisk_1.dat 2025-03-28T09:35:00.489040Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 26717, node 1 2025-03-28T09:35:00.699536Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:35:00.699577Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:35:00.699598Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:35:00.699895Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:35:00.704099Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T09:35:00.781031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:00.781171Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:00.794989Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:21512 2025-03-28T09:35:01.302163Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T09:35:03.873413Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T09:35:03.894101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:03.894184Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:03.930347Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T09:35:03.931865Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:04.141573Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.141974Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.142356Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.142447Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.142629Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.142703Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.142767Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.142855Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.142930Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T09:35:04.297581Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:35:04.297668Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:35:04.310551Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T09:35:04.443927Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:35:04.487153Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T09:35:04.487217Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T09:35:04.509080Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T09:35:04.509189Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T09:35:04.509322Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T09:35:04.509375Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T09:35:04.509412Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T09:35:04.509457Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T09:35:04.509496Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T09:35:04.509527Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T09:35:04.509840Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T09:35:04.535624Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:04.535708Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T09:35:04.541197Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T09:35:04.545781Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T09:35:04.546000Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T09:35:04.550838Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T09:35:04.564459Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T09:35:04.564513Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T09:35:04.564611Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T09:35:04.575398Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T09:35:04.584658Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T09:35:04.584797Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T09:35:04.746114Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T09:35:04.872875Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T09:35:04.959468Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T09:35:05.789495Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:05.789579Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:35:05.802219Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T09:35:05.899565Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T09:35:05.899741Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T09:35:05.899964Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T09:35:05.900044Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T09:35:05.900133Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T09:35:05.900206Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T09:35:05.900308Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T09:35:05.900411Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T09:35:05.900513Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T09:35:05.900604Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T09:35:05.900686Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T09:35:05.900755Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T09:35:05.921111Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T09:35:05.921187Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;desc ... heduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:30.687748Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:44:32.004388Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:44:32.015183Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:44:32.015249Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:32.015279Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:44:33.326242Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:44:33.326322Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:33.326375Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:44:34.636694Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:44:34.636941Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:44:34.648049Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:44:34.648128Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:34.648185Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:44:35.971562Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:44:35.971636Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:35.971672Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:44:37.257124Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:44:37.257210Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:37.257249Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:44:38.478338Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:44:38.489336Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:44:38.489419Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:38.489455Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:44:39.895201Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:44:39.895263Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:39.895294Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:44:41.200350Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:44:41.200592Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:44:41.211597Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:44:41.211696Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:41.211734Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:44:42.463851Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:44:42.463949Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:42.463998Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:44:43.699958Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:44:43.700053Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:43.700091Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:44:44.991706Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:44:45.002591Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:44:45.002673Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:45.002755Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:44:46.402802Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:44:46.402882Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:46.402919Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:44:47.718715Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:44:47.718945Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:44:47.729926Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:44:47.730028Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:47.730066Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:44:49.018642Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:44:49.018740Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:49.018776Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:44:50.282883Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:44:50.282950Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:50.282988Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:44:51.630877Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T09:44:51.641881Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:44:51.641962Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:51.642013Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T09:44:53.172437Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T09:44:53.172514Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:53.172551Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T09:44:54.510609Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T09:44:54.510880Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T09:44:54.522621Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T09:44:54.522718Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T09:44:54.522753Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture()+28 (0x18BB873C) TWithBackTrace::TWithBackTrace<>()+80 (0x187E0630) NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+485 (0x187B4CB5) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TTestCaseAnalyzeRebootColumnShard::Execute_(NUnitTest::TTestContext&)+4263 (0x187D1857) std::__y1::__function::__func, void ()>::operator()()+280 (0x187DC6E8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x190A3FA6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1907CBB9) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TCurrentTest::Execute()+1204 (0x187DB8B4) NUnitTest::TTestFactory::Execute()+2438 (0x1907E486) NUnitTest::RunMain(int, char**)+5213 (0x1909E51D) ??+0 (0x7F1396022D90) __libc_start_main+128 (0x7F1396022E40) _start+41 (0x16144029) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/000b33/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/8330113388/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/2909/000b33/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |99.0%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndSplitWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:20.145573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:20.145696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:20.145751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:20.145797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:20.145840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:20.145868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:20.145929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:20.146008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:20.146324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:20.213501Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:20.213555Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:20.219811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:20.219927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:20.220049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:20.225075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:20.225206Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:20.225674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:20.225817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:20.227546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:20.228635Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:20.228676Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:20.228859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:20.228893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:20.228920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:20.229020Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:20.234231Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:20.345446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:20.345664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.345839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:20.346060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:20.346111Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.348273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:20.348386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:20.348668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.348728Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:20.348765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:20.348794Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:20.350881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.350939Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:20.350986Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:20.352811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.352855Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.352912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:20.352953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:20.362229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:20.364023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:20.364166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:20.364881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:20.364980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:20.365018Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:20.365197Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:20.365232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:20.365354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:20.365419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:20.366995Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:20.367039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:20.367203Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:20.367233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:20.367459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:20.367498Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:20.367636Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:20.367659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:20.367686Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:20.367706Z no ... 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:45:14.228428Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:45:14.228771Z node 118 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 355us result status StatusSuccess 2025-03-28T09:45:14.229772Z node 118 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:45:14.241164Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1086:2874] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:45:14.241295Z node 118 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1045:2874] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-28T09:45:14.241483Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1086:2874] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743155114195317 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743155114195317 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743155114195317 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:45:14.244448Z node 118 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409546][118:1086:2874] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-03-28T09:45:14.244555Z node 118 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][118:1045:2874] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnSameProducerIdAndIncrementEpoch [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnNewProducerIdIfEpochOverflown >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> KqpLimits::TooBigQuery-useSink [GOOD] >> KqpLimits::TooBigKey+useSink |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes >> KqpLimits::TooBigKey+useSink [GOOD] >> KqpLimits::TooBigKey-useSink >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> KafkaProtocol::InitProducerId_forPreviouslySeenTransactionalIdShouldReturnNewProducerIdIfEpochOverflown [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad >> KqpLimits::TooBigKey-useSink [GOOD] >> test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 63971, MsgBus: 14738 2025-03-28T09:38:45.516971Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792536658745002:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:45.517088Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001496/r3tmp/tmpNheNMX/pdisk_1.dat 2025-03-28T09:38:45.771754Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:38:45.779808Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:38:45.779908Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:38:45.782107Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 63971, node 1 2025-03-28T09:38:45.834724Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:38:45.834746Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:38:45.834754Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:38:45.834838Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14738 TClient is connected to server localhost:14738 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:38:46.291046Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:46.308983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:46.437709Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:46.558133Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:46.632397Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:47.766721Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792545248681381:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:47.766860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:47.967394Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T09:38:47.990510Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T09:38:48.016744Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:38:48.042184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:38:48.067724Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T09:38:48.097831Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T09:38:48.162067Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792549543649188:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:48.162135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792549543649193:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:48.162135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:38:48.164833Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T09:38:48.172267Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792549543649195:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T09:38:48.267043Z node 1 :TX_PROXY ERROR: Actor# [1:7486792549543649251:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:38:49.113121Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:38:50.517157Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792536658745002:2064];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:38:50.517245Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:39:00.770983Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:39:00.771019Z node 1 :IMPORT WARN: Table profiles were not loaded Trying to start YDB, gRPC: 21823, MsgBus: 27756 2025-03-28T09:42:36.905187Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486793528918433915:2127];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:42:36.905835Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001496/r3tmp/tmptfPLxL/pdisk_1.dat 2025-03-28T09:42:37.201027Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:42:37.262535Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:42:37.266750Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:42:37.272320Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21823, node 2 2025-03-28T09:42:37.463489Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:42:37.463523Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:42:37.463542Z node 2 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:42:37.463739Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:27756 TClient is connected to server localhost:27756 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:42:38.479908Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:42:38.504189Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:42:38.639680Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:42:38.955003Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESc ... 998Z node 3 :TX_DATASHARD ERROR: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 2025-03-28T09:45:33.991318Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486794291105381950:2502], Table: `/Root/Test` ([72057594046644480:9:1]), SessionActorId: [3:7486794291105381908:2502]Got BAD REQUEST for table `/Root/Test`. ShardID=72075186224037914, Sink=[3:7486794291105381950:2502].{
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 } 2025-03-28T09:45:33.992326Z node 3 :KQP_COMPUTE ERROR: SelfId: [3:7486794291105381942:2502], SessionActorId: [3:7486794291105381908:2502], statusCode=BAD_REQUEST. Issue=
: Error: Bad request. Table: `/Root/Test`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 . sessionActorId=[3:7486794291105381908:2502]. isRollback=0 2025-03-28T09:45:34.084307Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDY1ODUyNWEtOWVhM2UwYzctZWUzMzYwMWQtODhlYjAxZWQ=, ActorId: [3:7486794291105381908:2502], ActorState: ExecuteState, TraceId: 01jqe28zcfasb5yspaxq9f6r0k, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [3:7486794291105381943:2502] from: [3:7486794291105381942:2502] 2025-03-28T09:45:34.084469Z node 3 :KQP_EXECUTER ERROR: ActorId: [3:7486794291105381943:2502] TxId: 281474976710671. Ctx: { TraceId: 01jqe28zcfasb5yspaxq9f6r0k, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=3&id=NDY1ODUyNWEtOWVhM2UwYzctZWUzMzYwMWQtODhlYjAxZWQ=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table: `/Root/Test`., code: 2017 subissue: {
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 } } 2025-03-28T09:45:34.085740Z node 3 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=3&id=NDY1ODUyNWEtOWVhM2UwYzctZWUzMzYwMWQtODhlYjAxZWQ=, ActorId: [3:7486794291105381908:2502], ActorState: ExecuteState, TraceId: 01jqe28zcfasb5yspaxq9f6r0k, Create QueryResponse for error on request, msg:
: Error: Bad request. Table: `/Root/Test`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row key size of 2097156 bytes is larger than the allowed threshold 1049600 at tablet# 72075186224037914, code: 2017 Trying to start YDB, gRPC: 24110, MsgBus: 13009 2025-03-28T09:45:35.295944Z node 4 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7486794300579296311:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:45:35.296043Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/001496/r3tmp/tmpbS63kw/pdisk_1.dat 2025-03-28T09:45:35.426972Z node 4 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:45:35.448674Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:45:35.448783Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:45:35.450534Z node 4 :HIVE WARN: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24110, node 4 2025-03-28T09:45:35.502249Z node 4 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T09:45:35.502272Z node 4 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T09:45:35.502283Z node 4 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T09:45:35.502473Z node 4 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13009 TClient is connected to server localhost:13009 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:45:36.217158Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:45:36.245636Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:45:36.385688Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:45:36.595334Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T09:45:36.750737Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T09:45:40.295836Z node 4 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7486794300579296311:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:45:40.295954Z node 4 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T09:45:40.644321Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486794322054134582:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:45:40.644438Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:45:40.715399Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T09:45:40.754872Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T09:45:40.791155Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T09:45:40.827063Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480 2025-03-28T09:45:40.871333Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480 2025-03-28T09:45:40.914100Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480 2025-03-28T09:45:40.966152Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486794322054135092:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:45:40.966283Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:45:40.966358Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7486794322054135097:2461], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:45:40.970729Z node 4 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480 2025-03-28T09:45:40.986153Z node 4 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7486794322054135099:2462], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2025-03-28T09:45:41.081340Z node 4 :TX_PROXY ERROR: Actor# [4:7486794326349102450:3451] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:45:42.773448Z node 4 :TX_DATASHARD ERROR: Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 2025-03-28T09:45:42.773651Z node 4 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715671 at tablet 72075186224037914 status: BAD_REQUEST errors: BAD_ARGUMENT (Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914) | 2025-03-28T09:45:42.773934Z node 4 :KQP_EXECUTER ERROR: ActorId: [4:7486794330644070073:2493] TxId: 281474976715671. Ctx: { TraceId: 01jqe297yq2vp9nbacg6a9x8k5, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=4&id=NDQ1ZGFjNS1jM2RjYTg0MS03NTJkMWM5ZC1hYWViYzI2YQ==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: [BAD_ARGUMENT] Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914; 2025-03-28T09:45:42.774495Z node 4 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=4&id=NDQ1ZGFjNS1jM2RjYTg0MS03NTJkMWM5ZC1hYWViYzI2YQ==, ActorId: [4:7486794330644070037:2493], ActorState: ExecuteState, TraceId: 01jqe297yq2vp9nbacg6a9x8k5, Create QueryResponse for error on request, msg:
: Error: Bad request., code: 2017
: Error: [BAD_ARGUMENT] Operation [0:281474976715671] writes key of 2097156 bytes which exceeds limit 1049600 bytes at 72075186224037914 |99.0%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> test_result_limits.py::TestResultLimits::test_many_rows >> test_public_api.py::TestExplain::test_explain_data_query >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> TMetadataActorTests::TopicMetadataGoodAndBad [GOOD] >> PublishKafkaEndpoints::HaveEndpointInLookup |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> PublishKafkaEndpoints::HaveEndpointInLookup [GOOD] >> PublishKafkaEndpoints::MetadataActorGetsEndpoint |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak >> PublishKafkaEndpoints::MetadataActorGetsEndpoint [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithNoNode |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] >> test_result_limits.py::TestResultLimits::test_many_rows [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithNoNode [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithError >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::SplitBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:23.226284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:23.226362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:23.226404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:23.226434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:23.226463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:23.226483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:23.226521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:23.226573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:23.226879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:23.284236Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:23.284304Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:23.290594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:23.290706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:23.290815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:23.295968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:23.296115Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:23.296734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:23.296912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:23.298533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:23.299666Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:23.299716Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:23.299836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:23.299877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:23.299918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:23.300055Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:23.306157Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:23.429019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:23.429177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.429316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:23.429485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:23.429533Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.431443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:23.431539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:23.431675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.431731Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:23.431789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:23.431817Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:23.433241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.433280Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:23.433312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:23.435283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.435350Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.435432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:23.435489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:23.439784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:23.442042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:23.442240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:23.443537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:23.443699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:23.443757Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:23.444177Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:23.444373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:23.444564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:23.444672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:23.447089Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:23.447185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:23.447385Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:23.447434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:23.447700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.447751Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:23.447855Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:23.447899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:23.447939Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:23.447974Z no ... CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\001\000\004\000\000\0002\000\000\000" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409549 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:47:12.438161Z node 174 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:47:12.438470Z node 174 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 354us result status StatusSuccess 2025-03-28T09:47:12.439432Z node 174 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 50 } } Tuple { } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "\002\000\004\000\000\0002\000\000\000\000\000\000\200" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:47:12.450936Z node 174 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][174:1042:2807] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:47:12.451041Z node 174 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][174:1043:2807] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:47:12.451114Z node 174 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][174:962:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-03-28T09:47:12.451192Z node 174 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][174:962:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-03-28T09:47:12.451315Z node 174 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][174:1042:2807] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743155232418857 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743155232418857 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:47:12.451485Z node 174 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][174:1043:2807] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 3 Group: 1743155232418857 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:47:12.461539Z node 174 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409550][174:1042:2807] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 2 2025-03-28T09:47:12.461693Z node 174 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][174:962:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409550 } 2025-03-28T09:47:12.462764Z node 174 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409548:2][72075186233409551][174:1043:2807] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-28T09:47:12.462876Z node 174 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409548:2][174:962:2807] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> PublishKafkaEndpoints::DiscoveryResponsesWithError [GOOD] >> PublishKafkaEndpoints::DiscoveryResponsesWithOtherPort >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_public_api.py::TestSessionNotFound::test_session_not_found |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> PublishKafkaEndpoints::DiscoveryResponsesWithOtherPort [GOOD] >> PublishKafkaEndpoints::MetadataActorDoubleTopic >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::CdcAndMergeWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:26.817794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:26.817885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:26.817923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:26.817954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:26.817989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:26.818008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:26.818047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:26.818100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:26.818352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:26.889307Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:26.889380Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:26.895602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:26.895701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:26.895805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:26.900803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:26.900962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:26.901614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:26.901824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:26.903650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:26.904911Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:26.904964Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:26.905087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:26.905127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:26.905165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:26.905322Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:26.912077Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:27.002582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:27.002791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.002939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:27.003109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:27.003160Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.005100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:27.005195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:27.005316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.005354Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:27.005385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:27.005409Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:27.006831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.006869Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:27.006906Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:27.008129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.008162Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.008200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:27.008230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:27.010652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:27.011833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:27.011947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:27.012689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:27.012781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:27.012823Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:27.012999Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:27.013036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:27.013151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:27.013244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:27.014606Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:27.014651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:27.014797Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:27.014828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:27.014996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:27.015024Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:27.015099Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:27.015120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:27.015165Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:27.015183Z no ... " } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "Stream" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 6 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:47:38.241044Z node 163 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:47:38.241229Z node 163 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 201us result status StatusSuccess 2025-03-28T09:47:38.241812Z node 163 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 6 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:47:38.252823Z node 163 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][163:1182:2965] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:47:38.252933Z node 163 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][163:1151:2965] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } 2025-03-28T09:47:38.253087Z node 163 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][163:1182:2965] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743155258215723 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743155258215723 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 },{ Order: 5 Group: 1743155258215723 Step: 5000004 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 2 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:47:38.255691Z node 163 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409551:2][72075186233409546][163:1182:2965] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 5 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 5 2025-03-28T09:47:38.255795Z node 163 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409551:2][163:1151:2965] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409546 } |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tpch.py::TestTpchS1::test_tpch[1] >> test_clickbench.py::TestClickbench::test_clickbench[0] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> PublishKafkaEndpoints::MetadataActorDoubleTopic [GOOD] >> Serialization::RequestHeader [GOOD] >> Serialization::ResponseHeader [GOOD] >> Serialization::ApiVersionsRequest [GOOD] >> Serialization::ApiVersionsResponse [GOOD] >> Serialization::ApiVersion_WithoutSupportedFeatures [GOOD] >> Serialization::ProduceRequest [GOOD] >> Serialization::UnsignedVarint32 [GOOD] >> Serialization::UnsignedVarint64 [GOOD] >> Serialization::Varint32 [GOOD] >> Serialization::Varint64 [GOOD] >> Serialization::UnsignedVarint32_Wrong [GOOD] >> Serialization::UnsignedVarint64_Wrong [GOOD] >> Serialization::UnsignedVarint32_Deserialize [GOOD] >> Serialization::TKafkaInt8_NotPresentVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaInt8_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::Struct_IsDefault [GOOD] >> Serialization::TKafkaString_IsDefault [GOOD] >> Serialization::TKafkaString_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaString_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaArray_IsDefault [GOOD] >> Serialization::TKafkaArray_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaArray_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TKafkaBytes_IsDefault [GOOD] >> Serialization::TKafkaBytes_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion [GOOD] >> Serialization::TKafkaBytes_PresentVersion_TaggedVersion_Default [GOOD] >> Serialization::TRequestHeaderData_reference [GOOD] >> Serialization::TKafkaFloat64_PresentVersion_NotTaggedVersion [GOOD] >> Serialization::RequestHeader_reference [GOOD] >> Serialization::ProduceRequestData [GOOD] >> Serialization::ProduceRequestData_Record_v0 [GOOD] >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kafka_proxy/ut/unittest >> Serialization::ProduceRequestData_Record_v0 [GOOD] Test command err: 2025-03-28T09:39:52.115554Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486792827223442085:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:52.115589Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00064c/r3tmp/tmpXHFTtx/pdisk_1.dat 2025-03-28T09:39:52.485535Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:39:52.538197Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:39:52.538285Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:39:52.540851Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 5099, node 1 2025-03-28T09:39:52.767455Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00064c/r3tmp/yandexc6tXLA.tmp 2025-03-28T09:39:52.767490Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00064c/r3tmp/yandexc6tXLA.tmp 2025-03-28T09:39:52.769171Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00064c/r3tmp/yandexc6tXLA.tmp 2025-03-28T09:39:52.769329Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:39:53.020291Z INFO: TTestServer started on Port 9026 GrpcPort 5099 TClient is connected to server localhost:9026 PQClient connected to localhost:5099 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:39:53.320834Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:39:53.361295Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:39:54.629297Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792835813377493:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:54.629520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:54.630032Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486792835813377501:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T09:39:54.637223Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T09:39:54.647230Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486792835813377508:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T09:39:54.754254Z node 1 :TX_PROXY ERROR: Actor# [1:7486792835813377572:2451] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T09:39:55.139419Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486792835813377580:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:39:55.146026Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWRmNGQwMGItMWFkNTM2YTYtNDBjODBlN2EtNWIzMDRjYQ==, ActorId: [1:7486792835813377491:2340], ActorState: ExecuteState, TraceId: 01jqe1ym4159z3wxxe6zq7esm0, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:39:55.149782Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:39:55.191613Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T09:39:55.221527Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T09:39:55.298737Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:39:55.715237Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe1ymtt4ah02zgp830b0r2m, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=OTUzNzc2NGYtYzdiZGUzMS01NDBhOTQ1YS1mMWI1MDlkYg==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [1:7486792840108345165:2637] 2025-03-28T09:39:57.117128Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486792827223442085:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:39:57.117258Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Run with port = 5099, kafka port = 24423 2025-03-28T09:40:02.168962Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486792869499514402:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T09:40:02.169036Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/2909/00064c/r3tmp/tmpEAJAIL/pdisk_1.dat 2025-03-28T09:40:02.295123Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:40:02.323385Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T09:40:02.323472Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T09:40:02.325035Z node 2 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 20082, node 2 2025-03-28T09:40:02.363464Z node 2 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/2909/00064c/r3tmp/yandexEzZeOL.tmp 2025-03-28T09:40:02.363498Z node 2 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/2909/00064c/r3tmp/yandexEzZeOL.tmp 2025-03-28T09:40:02.363645Z node 2 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/2909/00064c/r3tmp/yandexEzZeOL.tmp 2025-03-28T09:40:02.363804Z node 2 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T09:40:02.409053Z INFO: TTestServer started on Port 18954 GrpcPort 20082 TClient is connected to server localhost:18954 PQClient connected to localhost:20082 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T09:40:02.555278Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T09:40:02.580510Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T09:40:05.107240Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486792882384417062:2341], DatabaseId: /Root, PoolId: default, Failed t ... led, self: [29:7486794843407874109:2368], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T09:47:42.528190Z node 29 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=29&id=NWE1ZDUwMjgtZmEyNzQzODgtNjcxODE5NjktNDQxNDhhMTI=, ActorId: [29:7486794843407873928:2350], ActorState: ExecuteState, TraceId: 01jqe2cwwyaet3gfhqdmgtvern, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T09:47:42.528863Z node 29 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T09:47:42.669498Z node 29 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2025-03-28T09:47:43.199738Z node 29 :KQP_EXECUTER ERROR: TxId: 281474976710667. Ctx: { TraceId: 01jqe2cxbeetdw8ds0mg4qtc65, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=29&id=YzEzZDU5OTItNWM4OTI1NjEtM2U2ZTY5NS1lMDZhMWM4Yw==, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root === CheckClustersList. Subcribe to ClusterTracker from [29:7486794847702841638:2654] === CheckClustersList. Ok PQ Client: create topic: rt3.dc1--topic1 with 1 partitions CallPersQueueGRPC request to localhost:18001 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:18001 MetaRequest { CmdCreateTopic { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 } } } CallPersQueueGRPC response: Status: 129 ProxyErrorCode: 53 SchemeStatus: 1 FlatTxId { TxId: 281474976710678 SchemeShardTabletId: 72057594046644480 PathId: 13 } ErrorCode: OK AddTopic: rt3.dc1--topic1 ===Run query:``DECLARE $version as Int64; DECLARE $path AS Utf8; DECLARE $cluster as Utf8; UPSERT INTO `/Root/PQ/Config/V2/Topics` (path, dc) VALUES ($path, $cluster); UPSERT INTO `/Root/PQ/Config/V2/Versions` (name, version) VALUES ("Topics", $version);`` with topic = topic1, dc = dc1 2025-03-28T09:47:49.642302Z node 29 :KQP_EXECUTER ERROR: TxId: 281474976710679. Ctx: { TraceId: 01jqe2d3r2a7s8wwxgwf00z2qs, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=29&id=MjZhY2ZhOGEtNThkMGMyMTctNDlkNTFjZjItZmI4MmYyNDc=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root ===Query complete TClient::Ls request: /Root/PQ/rt3.dc1--topic1 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710678 CreateStep: 1743155269450 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic1" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20... (TRUNCATED) GetTopicVersionFromPath: record Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "rt3.dc1--topic1" PathId: 13 SchemeshardId: 72057594046644480 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 281474976710678 CreateStep: 1743155269450 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false BalancerTabletID: 72075186224037893 } PersQueueGroup { Name: "rt3.dc1--topic1" PathId: 13 TotalGroupCount: 1 PartitionPerTablet: 5 PQTabletConfig { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } LocalDC: true ReadRules: "user" ReadFromTimestampsMs: 0 ConsumerFormatVersions: 0 ConsumerCodecs { } Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } ReadRuleVersions: 0 YdbDatabasePath: "/Root" } Partitions { PartitionId: 0 TabletId: 72075186224037892 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186224037893 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 12 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 } } Path: "/Root/PQ/rt3.dc1--topic1" name rt3.dc1--topic1 version1 CallPersQueueGRPC request to localhost:18001 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC CallPersQueueGRPC request to localhost:18001 MetaRequest { CmdGetTopicMetadata { Topic: "rt3.dc1--topic1" } } CallPersQueueGRPC response: Status: 1 ErrorCode: OK MetaResponse { CmdGetTopicMetadataResult { TopicInfo { Topic: "rt3.dc1--topic1" NumPartitions: 1 Config { PartitionConfig { LifetimeSeconds: 86400 LowWatermark: 8388608 SourceIdLifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 20000000 BurstSize: 20000000 SourceIdMaxCounts: 6000000 } Version: 1 LocalDC: true Codecs { Ids: 0 Ids: 1 Ids: 2 Codecs: "raw" Codecs: "gzip" Codecs: "lzop" } TopicPath: "/Root/PQ/rt3.dc1--topic1" YdbDatabasePath: "/Root" Consumers { Name: "user" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } Version: 0 Important: false } } ErrorCode: OK } } } === Topic created, have version: 1 2025-03-28T09:47:50.188545Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: try to update token 2025-03-28T09:47:50.189282Z :INFO: [] MessageGroupId [src] SessionId [] Write session: Do CDS request 2025-03-28T09:47:50.189374Z :INFO: [] MessageGroupId [src] SessionId [] Start write session. Will connect to endpoint: localhost:18001 2025-03-28T09:47:50.200097Z :DEBUG: [] MessageGroupId [src] SessionId [] Write session: send init request: init_request { topic: "topic1" message_group_id: "src" } 2025-03-28T09:47:50.508637Z node 29 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:47:50.508672Z node 29 :IMPORT WARN: Table profiles were not loaded 2025-03-28T09:47:50.763731Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1743155270763 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2025-03-28T09:47:50.763881Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|ee224d99-469f7acd-f8a21149-8925774a_0" topic: "topic1" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2025-03-28T09:47:50.764109Z :INFO: [] MessageGroupId [src] SessionId [src|ee224d99-469f7acd-f8a21149-8925774a_0] Write session: close. Timeout = 0 ms 2025-03-28T09:47:50.764216Z :INFO: [] MessageGroupId [src] SessionId [src|ee224d99-469f7acd-f8a21149-8925774a_0] Write session will now close 2025-03-28T09:47:50.764338Z :DEBUG: [] MessageGroupId [src] SessionId [src|ee224d99-469f7acd-f8a21149-8925774a_0] Write session: aborting 2025-03-28T09:47:50.765173Z :INFO: [] MessageGroupId [src] SessionId [src|ee224d99-469f7acd-f8a21149-8925774a_0] Write session: gracefully shut down, all writes complete 2025-03-28T09:47:50.765290Z :DEBUG: [] MessageGroupId [src] SessionId [src|ee224d99-469f7acd-f8a21149-8925774a_0] Write session: destroy Broker 29 - [::]:23530 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=4294967295 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Check value=18446744073709551615 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483647 >>>>> Check value=-9223372036854775808 >>>>> Check value=-2147483648 >>>>> Check value=-167966 >>>>> Check value=-1 >>>>> Check value=0 >>>>> Check value=1 >>>>> Check value=127 >>>>> Check value=128 >>>>> Check value=32191 >>>>> Check value=2147483648 >>>>> Check value=9223372036854775807 >>>>> Buffer size: 251 >>>>> Buffer size: 104 |99.2%| [TM] {RESULT} ydb/core/kafka_proxy/ut/unittest |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[0] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[1] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[1] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[2] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> test_clickbench.py::TestClickbench::test_clickbench[2] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[3] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[3] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[4] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] >> test_clickbench.py::TestClickbench::test_clickbench[4] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[5] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestBadSession::test_simple >> test_clickbench.py::TestClickbench::test_clickbench[5] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[6] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[6] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[7] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[7] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[8] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata >> test_clickbench.py::TestClickbench::test_clickbench[8] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[9] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[9] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[10] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.4%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.4%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test_clickbench.py::TestClickbench::test_clickbench[10] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata >> test_public_api.py::TestBadSession::test_simple [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[11] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[12] >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index/unittest >> TAsyncIndexTests::MergeBothWithReboots[TabletReboots] [GOOD] Test command err: =========== RUN: Trace =========== Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:115:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:116:2058] recipient: [1:111:2142] Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:124:2058] recipient: [1:108:2140] Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:131:2058] recipient: [1:109:2141] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:111:2142] 2025-03-28T09:39:23.370319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T09:39:23.370407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:23.370448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2025-03-28T09:39:23.370476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T09:39:23.370508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T09:39:23.370526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T09:39:23.370561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T09:39:23.370617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T09:39:23.370855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T09:39:23.427592Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" AvailableExternalDataSources: "ClickHouse" AvailableExternalDataSources: "PostgreSQL" AvailableExternalDataSources: "MySQL" AvailableExternalDataSources: "Ydb" AvailableExternalDataSources: "YT" AvailableExternalDataSources: "Greenplum" AvailableExternalDataSources: "MsSQLServer" AvailableExternalDataSources: "Oracle" AvailableExternalDataSources: "Logging" AvailableExternalDataSources: "Solomon" } 2025-03-28T09:39:23.427641Z node 1 :IMPORT WARN: Table profiles were not loaded Leader for TabletID 72057594046447617 is [1:123:2149] sender: [1:188:2058] recipient: [1:15:2062] 2025-03-28T09:39:23.433283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T09:39:23.433381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T09:39:23.433492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T09:39:23.438800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T09:39:23.438947Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T09:39:23.439547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:23.439727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T09:39:23.441395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:23.442555Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:23.442613Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:23.442761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T09:39:23.442804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:23.442883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T09:39:23.443028Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] IGNORE Leader for TabletID 72057594037968897 is [0:0:0] sender: [1:214:2058] recipient: [1:212:2213] Leader for TabletID 72057594037968897 is [1:218:2217] sender: [1:219:2058] recipient: [1:212:2213] 2025-03-28T09:39:23.449813Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:239:2058] recipient: [1:15:2062] 2025-03-28T09:39:23.549061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T09:39:23.549269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.549459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T09:39:23.549678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T09:39:23.549736Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.552457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:23.552567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T09:39:23.552727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.552785Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T09:39:23.552821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T09:39:23.552850Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T09:39:23.554652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.554723Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T09:39:23.554779Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T09:39:23.556325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.556389Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.556434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:23.556478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T09:39:23.559902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T09:39:23.561537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T09:39:23.561701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 Leader for TabletID 72057594046316545 is [1:129:2154] sender: [1:254:2058] recipient: [1:15:2062] FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T09:39:23.562665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T09:39:23.562795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 129 RawX2: 4294969450 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:39:23.562843Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:23.563079Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T09:39:23.563129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T09:39:23.563290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T09:39:23.563371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T09:39:23.565173Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T09:39:23.565225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T09:39:23.565380Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T09:39:23.565419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:206:2208], at schemeshard: 72057594046678944, txId: 1, path id: 1 2025-03-28T09:39:23.565641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T09:39:23.565678Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T09:39:23.565756Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:23.565792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T09:39:23.565828Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T09:39:23.565855Z no ... } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableIndexes { Name: "UserDefinedIndex" LocalPathId: 4 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "indexed" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409550 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:48:53.335699Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2025-03-28T09:48:53.335960Z node 202 :SCHEMESHARD_DESCRIBE INFO: Tablet 72057594046678944 describe path "/MyRoot/Table/UserDefinedIndex/indexImplTable" took 302us result status StatusSuccess 2025-03-28T09:48:53.336755Z node 202 :SCHEMESHARD_DESCRIBE DEBUG: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/UserDefinedIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000003 ParentPathId: 4 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "indexed" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "indexed" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409551 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 4 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T09:48:53.350397Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1107:2875] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2025-03-28T09:48:53.350516Z node 202 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1054:2875] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } 2025-03-28T09:48:53.350673Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1107:2875] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 1743155333306190 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 2 Group: 1743155333306190 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 },{ Order: 3 Group: 1743155333306190 Step: 5000003 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046678944, LocalPathId: 4] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046678944, LocalPathId: 3] SchemaVersion: 1 LockId: 0 LockOffset: 0 }] } 2025-03-28T09:48:53.354063Z node 202 :CHANGE_EXCHANGE DEBUG: [TableChangeSenderShard][72075186233409550:2][72075186233409551][202:1107:2875] Handle NKikimrChangeExchange.TEvStatus Status: STATUS_OK RecordStatuses { Order: 1 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 2 Status: STATUS_OK Reason: REASON_NONE } RecordStatuses { Order: 3 Status: STATUS_OK Reason: REASON_NONE } LastRecordOrder: 3 2025-03-28T09:48:53.354172Z node 202 :CHANGE_EXCHANGE DEBUG: [AsyncIndexChangeSenderMain][72075186233409550:2][202:1054:2875] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186233409551 } >> test_clickbench.py::TestClickbench::test_clickbench[12] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[13] >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> test_public_api.py::TestDriverCanRecover::test_driver_recovery >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_cluster_works_with_auto_conf_dir [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[13] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[14] |99.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[14] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[15] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/test-results/unittest/{meta.json ... results_accumulator.log} >> test_clickbench.py::TestClickbench::test_clickbench[15] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithMetadata::test_cluster_is_operational_with_metadata [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[16] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRStoreConfigDir::test_config_stored_in_config_store [GOOD] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[17] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[18] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata >> test_clickbench.py::TestClickbench::test_clickbench[18] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[19] >> test_self_heal.py::TestEnableSelfHeal::test_replication >> test_clickbench.py::TestClickbench::test_clickbench[19] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[20] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataBlock::test_cluster_is_operational_without_metadata [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[20] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[21] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] >> test_clickbench.py::TestClickbench::test_clickbench[21] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[22] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] >> test_clickbench.py::TestClickbench::test_clickbench[22] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[23] >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithoutMetadataMirror::test_cluster_is_operational_without_metadata [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[23] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[24] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] >> test_clickbench.py::TestClickbench::test_clickbench[24] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[25] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] >> test_result_limits.py::TestResultLimits::test_large_row >> test_clickbench.py::TestClickbench::test_clickbench[25] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataMirrorMax::test_cluster_is_operational_with_metadata [GOOD] >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[26] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[27] >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[27] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[1] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[2] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfigFromConfigDir::test_generate_dynamic_config_from_config_store [GOOD] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[28] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[29] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestKiKiMRWithoutMetadata::test_cluster_is_operational_without_metadata [GOOD] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_generate_dynamic_config.py::TestGenerateDynamicConfig::test_generate_dynamic_config [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression 2025-03-28 09:49:40,767 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:49:41,332 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 692060 853M 832M 782M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/2909/000a08/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 695417 5.6G 5.6G 5.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2909/000a08/ydb/tests/olap/column_family/compression/test-results/py3test/testing_ou Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 102, in test_all_supported_compression tasks.start_and_wait_all() File "ydb/tests/olap/common/thread_helper.py", line 49, in start_and_wait_all self.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...olumn_family-compression', '--basetemp', '/home/runner/.ya/build/build_root/2909/000a08/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/000a08/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/000a08', '--source-root', '/home/runner/.ya/build/build_root/2909/000a08/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/000a08/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...olumn_family-compression', '--basetemp', '/home/runner/.ya/build/build_root/2909/000a08/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/000a08/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/000a08', '--source-root', '/home/runner/.ya/build/build_root/2909/000a08/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/000a08/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export 2025-03-28 09:49:45,716 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:49:46,459 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 695331 596M 564M 515M ydb-tests-olap-s3_import --basetemp /home/runner/.ya/build/build_root/2909/000bfe/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 698517 7.6G 7.4G 7.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2909/000bfe/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpc 700932 642M 686M 660M └─ moto_server s3 --port 4766 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/s3_import/test_tpch_import.py", line 99, in test_import_and_export self.validate_table("s3_table") File "ydb/tests/olap/s3_import/test_tpch_import.py", line 12, in validate_table result_sets = self.ydb_client.query(f""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...unner/.ya/build/build_root/2909/000bfe/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import', '--basetemp', '/home/runner/.ya/build/build_root/2909/000bfe/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/000bfe/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/000bfe', '--source-root', '/home/runner/.ya/build/build_root/2909/000bfe/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/000bfe/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...unner/.ya/build/build_root/2909/000bfe/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import', '--basetemp', '/home/runner/.ya/build/build_root/2909/000bfe/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/000bfe/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/000bfe', '--source-root', '/home/runner/.ya/build/build_root/2909/000bfe/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/000bfe/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.6%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tpch.py::TestTpchS1::test_tpch[2] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[29] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[30] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[31] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] >> test_clickbench.py::TestClickbench::test_clickbench[31] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[32] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestJsonExample::test_json_unexpected_failure |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[3] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[4] >> test_clickbench.py::TestClickbench::test_clickbench[32] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> test_public_api.py::TestJsonExample::test_json_success [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[33] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[34] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[34] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[35] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/config/py3test >> test_config_with_metadata.py::TestConfigWithMetadataBlock::test_cluster_is_operational_with_metadata [GOOD] |99.7%| [TA] $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/config/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[35] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[36] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[36] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[37] >> test_tpch.py::TestTpchS1::test_tpch[4] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[5] >> test_result_limits.py::TestResultLimits::test_large_row [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_clickbench.py::TestClickbench::test_clickbench[37] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[38] |99.8%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_alter_compression.py::TestAlterCompression::test[alter_compression] 2025-03-28 09:50:27,812 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:50:28,097 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 717472 1.5G 1.5G 1.4G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/2909/000a40/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 834516 2.3G 2.2G 1.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alte Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/2909/000a40/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_alter_compression.py", line 321, in scenario_alter_compression self._scenario( File "ydb/tests/olap/scenario/test_alter_compression.py", line 227, in _scenario assert self._read_data(ctx=ctx, tables=tables, column_names=column_names) File "ydb/tests/olap/scenario/test_alter_compression.py", line 197, in _read_data scan_result = sth.execute_scan_query( File "contrib/python/allure-python-commons/allure_commons/_allure.py", line 192, in impl return func(*a, **kw) File "ydb/tests/olap/scenario/helpers/scenario_tests_helper.py", line 461, in execute_scan_query for result_set in it: File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/ydb/py3/ydb/table.py", line 1181, in lambda resp: _wrap_scan_query_response(resp, self._table_client_settings), File "contrib/python/ydb/py3/ydb/table.py", line 953, in _wrap_scan_query_response return ScanQueryResult(response.result, table_client_settings) File "contrib/python/ydb/py3/ydb/table.py", line 925, in __init__ self.result_set = convert.ResultSet.from_message(self._result.result_set, table_client_settings) File "contrib/python/ydb/py3/ydb/convert.py", line 375, in from_message v_type = value.WhichOneof("value") File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) contrib/python/pytest/py3/_pytest/main.py:306: PluggyTeardownRaisedWarning: A plugin raised an exception during an old-style hookwrapper teardown. Plugin: terminalreporter, Hook: pytest_sessionfinish SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. Process exit_code = -6. Stdout file name: /home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_compression.py.TestAlterCompression.test.alter_compression/cluster/node_1/stdout Stderr file name: /home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_compression.py.TestAlterCompression.test.alter_compression/cluster/node_1/stderr Stderr content: GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option VERIFY failed (2025-03-28T09:50:32.760258Z): event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;verification=status.GetStatus() != NKikimrProto::EReplyStatus::NODATA;fline=columnshard_impl.cpp:605;blob_id={ Blob: DS:2181038081:[72075186224037924:1:2:4:0:1552:0] Offset: 0 Size: 1552 };status=NODATA;error=cannot get blob: ;type=CS::GENERAL;task_id=16ba0e6e-bba11f0-b4e946e5-d1d82b1e;debug=type=CS::GENERAL;details=(remove=3;append=0;move=0original_granule=9;switch 3 portions:((portion_id:1;path_id:9;records_count:18;min_schema_snapshot:(plan_step=1743155110720;tx_id=281474976715682;);schema_version:4;level:0;column_size:1552;index_size:0;meta:((produced=INSERTED;));)(portion_id:2;path_id:9;records_count:25;min_schema_snapshot:(plan_step=1743155121260;tx_id=281474976715724;);schema_version:11;level:0;column_size:1896;index_size:0;meta:((produced=INSERTED;));)(portion_id:3;path_id:9;records_count:20;min_schema_snapshot:(plan_step=1743155133190;tx_id=281474976715757;);schema_version:13;level:0;column_size:1664;index_size:0;meta:((produced=INSERTED;));)); );; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed Current KQP shutdown state: spent 0 seconds, not started yet Current KQP shutdown state: spent 3.002065 seconds, 195 sessions to shutdown 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1D9827B8 1. /-S/util/system/yassert.cpp:55: Panic @ 0x1D970A4A 2. /tmp//-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1FFB21F5 3. /tmp//-S/ydb/core/tx/columnshard/columnshard_impl.cpp:605: DoOnError @ 0x3AF56DBA 4. /tmp//-S/ydb/core/tx/columnshard/blobs_reader/task.cpp:118: OnError @ 0x38CC5B00 5. /tmp//-S/ydb/core/tx/columnshard/blobs_reader/task.cpp:26: AddError @ 0x38CC4B75 6. /tmp//-S/ydb/core/tx/columnshard/blobs_reader/actor.cpp:18: Handle @ 0x38CD2ECD 7. /-S/ydb/core/tx/columnshard/blobs_reader/actor.h:27: StateWait @ 0x38CD4DEA 8. /tmp//-S/ydb/library/actors/core/actor.cpp:280: Receive @ 0x1FEE43AC 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:269: Execute @ 0x1FF8D0A4 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:460: operator() @ 0x1FF95DCE 11. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:512: ProcessExecutorPool @ 0x1FF95329 12. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:538: ThreadProc @ 0x1FF972BE 13. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1D986C04 14. /tmp//-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239: asan_thread_start @ 0x1D630288 15. ??:0: ?? @ 0x7F44F76FFAC2 16. ??:0: ?? @ 0x7F44F779184F For more information see https://pluggy.readthedocs.io/en/stable/api_reference.html#pluggy.PluggyTeardownRaisedWarning config.hook.pytest_sessionfinish( Traceback (most recent call last): File "library/python/pytest/main.py", line 101, in main rc = pytest.main( ^^^^^^^^^^^^ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "contrib/python/pluggy/py3/pluggy/_callers.py", line 139, in _multicall raise exception.with_traceback(exception.__traceback__) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) ^^^^^^^^^^^^^^^^^^^^^^^^^ File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) ^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "contrib/python/pytest/py3/_pytest/main.py", line 306, in wrap_session config.hook.pytest_sessionfinish( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "contrib/python/pluggy/py3/pluggy/_callers.py", line 156, in _multicall teardown[0].send(outcome) File "contrib/python/pytest/py3/_pytest/terminal.py", line 857, in pytest_sessionfinish outcome.get_result() File "contrib/python/pluggy/py3/pluggy/_result.py", line 100, in get_result raise exc.with_traceback(exc.__traceback__) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) ^^^^^^^^^^^^^^^^^^^^^^^^^ File "contrib/python/pytest/py3/_pytest/runner.py", line 108, in pytest_sessionfinish session._setupstate.teardown_exact(None) File "contrib/python/pytest/py3/_pytest/runner.py", line 537, in teardown_exact raise exceptions[0] File "contrib/python/pytest/py3/_pytest/runner.py", line 526, in teardown_exact fin() File "contrib/python/pytest/py3/_pytest/fixtures.py", line 701, in subrequest.node.addfinalizer(lambda: fixturedef.finish(request=subrequest)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1031, in finish raise exc File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1024, in finish func() File "contrib/python/pytest/py3/_pytest/fixtures.py", line 911, in _teardown_yield_fixture next(it) File "contrib/python/pytest/py3/_pytest/python.py", line 860, in xunit_setup_class_fixture _call_with_optional_argument(func, self.obj) File "contrib/python/pytest/py3/_pytest/python.py", line 777, in _call_with_optional_argument func(arg) File "/home/runner/.ya/build/build_root/2909/000a40/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 81, in teardown_class cls._ydb_instance.stop() File "/home/runner/.ya/build/build_root/2909/000a40/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 59, in stop self._temp_ydb_cluster.stop() File "ydb/tests/library/harness/kikimr_runner.py", line 494, in stop raise daemon.SeveralDaemonErrors(saved_exceptions) ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. Process exit_code = -6. Stdout file name: /home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_compression.py.TestAlterCompression.test.alter_compression/cluster/node_1/stdout Stderr file name: /home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_compression.py.TestAlterCompression.test.alter_compression/cluster/node_1/stderr Stderr content: GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option VERIFY failed (2025-03-28T09:50:32.760258Z): event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;verification=status.GetStatus() != NKikimrProto::EReplyStatus::NODATA;fline=columnshard_impl.cpp:605;blob_id={ Blob: DS:2181038081:[72075186224037924:1:2:4:0:1552:0] Offset: 0 Size: 1552 };status=NODATA;error=cannot get blob: ;type=CS::GENERAL;task_id=16ba0e6e-bba11f0-b4e946e5-d1d82b1e;debug=type=CS::GENERAL;details=(remove=3;append=0;move=0original_granule=9;switch 3 portions:((portion_id:1;path_id:9;records_count:18;min_schema_snapshot:(plan_step=1743155110720;tx_id=281474976715682;);schema_version:4;level:0;column_size:1552;index_size:0;meta:((produced=INSERTED;));)(portion_id:2;path_id:9;records_count:25;min_schema_snapshot:(plan_step=1743155121260;tx_id=281474976715724;);schema_version:11;level:0;column_size:1896;index_size:0;meta:((produced=INSERTED;));)(portion_id:3;path_id:9;records_count:20;min_schema_snapshot:(plan_step=1743155133190;tx_id=281474976715757;);schema_version:13;level:0;column_size:1664;index_size:0;meta:((produced=INSERTED;));)); );; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed Current KQP shutdown state: spent 0 seconds, not started yet Current KQP shutdown state: spent 3.002065 seconds, 195 sessions to shutdown 0. /-S/util/system/yassert.cpp:83: InternalPanicImpl @ 0x1D9827B8 1. /-S/util/system/yassert.cpp:55: Panic @ 0x1D970A4A 2. /tmp//-S/ydb/library/actors/core/log.cpp:754: ~TVerifyFormattedRecordWriter @ 0x1FFB21F5 3. /tmp//-S/ydb/core/tx/columnshard/columnshard_impl.cpp:605: DoOnError @ 0x3AF56DBA 4. /tmp//-S/ydb/core/tx/columnshard/blobs_reader/task.cpp:118: OnError @ 0x38CC5B00 5. /tmp//-S/ydb/core/tx/columnshard/blobs_reader/task.cpp:26: AddError @ 0x38CC4B75 6. /tmp//-S/ydb/core/tx/columnshard/blobs_reader/actor.cpp:18: Handle @ 0x38CD2ECD 7. /-S/ydb/core/tx/columnshard/blobs_reader/actor.h:27: StateWait @ 0x38CD4DEA 8. /tmp//-S/ydb/library/actors/core/actor.cpp:280: Receive @ 0x1FEE43AC 9. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:269: Execute @ 0x1FF8D0A4 10. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:460: operator() @ 0x1FF95DCE 11. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:512: ProcessExecutorPool @ 0x1FF95329 12. /tmp//-S/ydb/library/actors/core/executor_thread.cpp:538: ThreadProc @ 0x1FF972BE 13. /-S/util/system/thread.cpp:244: ThreadProxy @ 0x1D986C04 14. /tmp//-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239: asan_thread_start @ 0x1D630288 15. ??:0: ?? @ 0x7F44F76FFAC2 16. ??:0: ?? @ 0x7F44F779184F Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/2909/000a40/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/000a40', '--source-root', '/home/runner/.ya/build/build_root/2909/000a40/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/ydb-tests-olap-scenario', '--basetemp', '/home/runner/.ya/build/build_root/2909/000a40/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/000a40', '--source-root', '/home/runner/.ya/build/build_root/2909/000a40/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/000a40/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) >> test_clickbench.py::TestClickbench::test_clickbench[38] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[39] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup |99.8%| [TM] {RESULT} ydb/tests/olap/scenario/py3test >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] >> ConsistentIndexRead::InteractiveTx >> test_clickbench.py::TestClickbench::test_clickbench[39] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[40] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[41] >> KqpQuerySession::NoLocalAttach >> test_public_api.py::TestRecursiveCreation::test_mkdir >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[41] [GOOD] >> test_clickbench.py::TestClickbench::test_clickbench[42] >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> KqpQueryService::ReplyPartLimitProxyNode >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] >> S3PathStyleBackup::DisableVirtualAddressing >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_clickbench.py::TestClickbench::test_clickbench[42] [GOOD] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.8%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest >> test_workload.py::TestYdbWorkload::test[row] >> test_public_api.py::TestAttributes::test_create_table ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/py3test >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] 2025-03-28 09:50:58,516 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:50:58,826 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 736062 789M 771M 57.3M ydb-tests-fq-mem_alloc --basetemp /home/runner/.ya/build/build_root/2909/00104f/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 918454 719M 15.3M 52.8M ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/.ya/build/build_root/2909/00104f/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-mo 918465 1.4G 1.4G 1.0G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2909/00104f/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test_resul Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/fq/mem_alloc/test_result_limits.py", line 105, in test_quotas huge_client.wait_query_status(huge_query_id, fq.QueryMeta.COMPLETED) File "ydb/tests/tools/fq_runner/fq_client.py", line 307, in wait_query_status return self.wait_query(query_id, timeout, statuses=statuses).query.meta.status File "ydb/tests/tools/fq_runner/fq_client.py", line 302, in wait_query time.sleep(plain_or_under_sanitizer(0.5, 2)) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...d '['/home/runner/.ya/build/build_root/2909/00104f/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc', '--basetemp', '/home/runner/.ya/build/build_root/2909/00104f/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/00104f/ydb/tests/fq/mem_alloc/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/00104f', '--source-root', '/home/runner/.ya/build/build_root/2909/00104f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/00104f/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/mem_alloc', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/mem_alloc', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...d '['/home/runner/.ya/build/build_root/2909/00104f/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc', '--basetemp', '/home/runner/.ya/build/build_root/2909/00104f/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/00104f/ydb/tests/fq/mem_alloc/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/00104f', '--source-root', '/home/runner/.ya/build/build_root/2909/00104f/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/00104f/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/fq/mem_alloc', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/fq/mem_alloc', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.8%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> KqpQuerySession::NoLocalAttach [GOOD] >> test_workload.py::TestYdbLogWorkload::test[row] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |99.8%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] >> Replication::Types >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] >> NodeIdDescribe::HasDistribution [GOOD] >> Replication::Types [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.8%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest >> test_public_api.py::TestDocApiTables::test_create_table |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/replication/unittest >> Replication::Types [GOOD] |99.8%| [TM] {RESULT} ydb/tests/functional/replication/unittest >> Backup::UuidValue >> test_encryption.py::TestEncryption::test_simple_encryption >> Backup::UuidValue [GOOD] >> test_workload.py::TestYdbKvWorkload::test[row] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/scheme.pb" |99.8%| [TM] {RESULT} ydb/tests/functional/backup/unittest >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true] [GOOD] >> test_workload.py::TestYdbWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> Transfer::Main_ColumnTable_KeyColumnFirst |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test |99.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> Transfer::Main_ColumnTable_KeyColumnFirst [GOOD] >> Transfer::Main_ColumnTable_KeyColumnLast |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> Transfer::Main_ColumnTable_KeyColumnLast [GOOD] >> Transfer::Main_ColumnTable_ComplexKey >> Transfer::Main_ColumnTable_ComplexKey [GOOD] >> Transfer::Main_ColumnTable_JsonMessage >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer::Main_ColumnTable_JsonMessage [GOOD] >> Transfer::Main_ColumnTable_NullableColumn >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> Transfer::Main_ColumnTable_NullableColumn [GOOD] >> Transfer::Main_ColumnTable_Date |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test >> Transfer::Main_ColumnTable_Date [GOOD] >> Transfer::Main_ColumnTable_Double >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> Transfer::Main_ColumnTable_Double [GOOD] >> Transfer::Main_ColumnTable_Utf8_Long >> Transfer::Main_ColumnTable_Utf8_Long [GOOD] >> Transfer::Main_MessageField_Partition |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] >> Transfer::Main_MessageField_Partition [GOOD] >> Transfer::Main_MessageField_SeqNo >> Transfer::Main_MessageField_SeqNo [GOOD] >> Transfer::Main_MessageField_ProducerId >> Transfer::Main_MessageField_ProducerId [GOOD] >> Transfer::Main_MessageField_MessageGroupId >> Transfer::Main_MessageField_MessageGroupId [GOOD] >> Transfer::AlterLambda >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] >> Transfer::AlterLambda [GOOD] >> Transfer::DropTransfer |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.9%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> Transfer::DropTransfer [GOOD] >> Transfer::CreateAndDropConsumer >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> Transfer::CreateAndDropConsumer [GOOD] >> Transfer::DescribeError_OnLambdaCompilation >> Transfer::DescribeError_OnLambdaCompilation [GOOD] >> Transfer::CustomConsumer >> test_workload.py::TestYdbLogWorkload::test[row] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> test_workload.py::TestYdbWorkload::test[column] >> Transfer::CustomConsumer [GOOD] >> Transfer::CustomFlushInterval >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/encryption/py3test >> Transfer::CustomFlushInterval [GOOD] >> Transfer::AlterFlushInterval |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true] [GOOD] >> Transfer::AlterFlushInterval [GOOD] >> Transfer::AlterBatchSize |99.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> Transfer::AlterBatchSize [FAIL] >> Transfer::CreateTransferSourceNotExists >> Transfer::CreateTransferSourceNotExists [GOOD] >> Transfer::CreateTransferSourceIsNotTopic >> Transfer::CreateTransferSourceIsNotTopic [GOOD] >> Transfer::CreateTransferRowTable >> Transfer::CreateTransferRowTable [GOOD] >> Transfer::CreateTransferTargetIsNotTable >> Transfer::CreateTransferTargetIsNotTable [GOOD] >> Transfer::CreateTransferTargetNotExists >> Transfer::CreateTransferTargetNotExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/transfer/unittest >> Transfer::CreateTransferTargetNotExists [GOOD] Test command err: Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=0 Attempt=16 count=0 Attempt=15 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=1 Attempt=19 count=0 Attempt=18 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=1 Attempt=19 count=0 Attempt=18 count=1 Attempt=19 count=0 Attempt=18 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=1 Consumer name is '3b5c8633-86ef3184-6817381f-81524c29' >>>>> ACTUAL: {
: Error: Error in target #1: Compilation failed: {
: Error: Failed to compile purecalc program subissue: {
: Error: Compile issues:
: Error: Type annotation, code: 1030 generated.sql:5:1: Error: At function: PersistableRepr, At function: SqlProject, At function: FlatMap generated.sql:6:3: Error: At function: PersistableRepr, At function: SqlProject, At function: SqlProjectItem generated.sql:2:28: Error: At function: Member generated.sql:2:28: Error: Member not found: _unknown_field_for_lambda_compilation_error } subissue: {
: Error: Final yql: } } } >>>>> EXPECTED: _unknown_field_for_lambda_compilation_error Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=0 Attempt=16 count=0 Attempt=15 count=0 Attempt=14 count=1 Attempt=4 count=0 Attempt=3 count=0 Attempt=2 count=0 Attempt=1 count=0 Attempt=0 count=0 Attempt=19 count=0 Attempt=18 count=1 Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=0 Attempt=16 count=0 Attempt=15 count=0 Attempt=14 count=0 Attempt=13 count=0 Attempt=12 count=0 Attempt=11 count=0 Attempt=10 count=0 Attempt=9 count=0 Attempt=8 count=0 Attempt=7 count=0 Attempt=6 count=0 Attempt=5 count=0 Attempt=4 count=0 Attempt=3 count=0 Attempt=2 count=0 Attempt=1 count=0 Attempt=0 count=0 assertion failed at ydb/tests/functional/transfer/main.cpp:355, void (anonymous namespace)::MainTestCase::CheckResult(const TExpectations &): (attempt) Unable to wait transfer result TBackTrace::Capture()+28 (0x1BDB3BC) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CE1060) ??+0 (0x189A4B1) NTestSuiteTransfer::TTestCaseAlterBatchSize::Execute_(NUnitTest::TTestContext&)+2297 (0x189B409) std::__y1::__function::__func, void ()>::operator()()+280 (0x18A1F28) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1D0F566) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CE7BD9) NTestSuiteTransfer::TCurrentTest::Execute()+1244 (0x18A0C1C) NUnitTest::TTestFactory::Execute()+2438 (0x1CE94A6) NUnitTest::RunMain(int, char**)+5213 (0x1D09A4D) ??+0 (0x7FCDC8E2FD90) __libc_start_main+128 (0x7FCDC8E2FE40) _start+41 (0x183C029) >>>>> ACTUAL: {
: Error: Discovery error: local/Topic_8213434698723844229: SCHEME_ERROR ({
: Error: Path not found }) } >>>>> EXPECTED: Discovery error: local/Topic_ >>>>> ACTUAL: {
: Error: Discovery error: local/Topic_8213434698723844230: UNSUPPORTED ({
: Error: Unsupported entry type: Table }) } >>>>> EXPECTED: Discovery error: local/Topic_ >>>>> ACTUAL: {
: Error: Error in target #1: Only column tables are supported as transfer targets } >>>>> EXPECTED: Only column tables are supported as transfer targets >>>>> ACTUAL: {
: Error: Error in target #1: Only column tables are supported as transfer targets } >>>>> EXPECTED: Only column tables are supported as transfer targets >>>>> ACTUAL: {
: Error: Error in target #1: Create dst error: StatusPathDoesNotExist, The target table `/local/Table_8213434698723844233` does not exist } >>>>> EXPECTED: The target table `/local/Table_8213434698723844233` does not exist |99.9%| [TM] {RESULT} ydb/tests/functional/transfer/unittest >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test >> test_tpch.py::TestTpchS1::test_tpch[5] [FAIL] >> test_tpch.py::TestTpchS1::test_tpch[6] >> test_tpch.py::TestTpchS1::test_tpch[6] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[7] >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] >> test_workload.py::TestYdbWorkload::test[column] [GOOD] >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |99.9%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[7] 2025-03-28 09:57:43,889 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 09:57:44,355 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 864545 625M 603M 544M ydb-tests-functional-tpc-medium --basetemp /home/runner/.ya/build/build_root/2909/001344/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 864642 5.8G 5.6G 5.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2909/001344/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/t 865365 5.4G 5.1G 4.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/2909/001344/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/t 952147 269M 267M 206M └─ ydb -e grpc://localhost:4516 -d /local/test_db workload tpch --path olap_yatests/tpch/s1 run --json /home/runner/.ya/build/build_root/2909/001344/ydb/tests/functional/tp Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/load/lib/tpch.py", line 48, in test_tpch self.run_workload_test(self._get_path(), query_num) File "ydb/tests/olap/load/lib/conftest.py", line 286, in run_workload_test result = YdbCliHelper.workload_run( File "ydb/tests/olap/lib/ydb_cli.py", line 310, in workload_run ).process() File "ydb/tests/olap/lib/ydb_cli.py", line 283, in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) File "library/python/testing/yatest_common/yatest/common/process.py", line 656, in execute res.wait(check_exit_code, timeout, on_timeout) File "library/python/testing/yatest_common/yatest/common/process.py", line 400, in wait _wait() File "library/python/testing/yatest_common/yatest/common/process.py", line 335, in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Thread 0x00007f08a0054640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/ydb/py3/ydb/pool.py", line 274 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f08d137d940 (most recent call first): File "contrib/tools/python3/Lib/subprocess.py", line 2019 in _try_wait File "contrib/tools/python3/Lib/subprocess.py", line 2061 in _wait File "contrib/tools/python3/Lib/subprocess.py", line 1266 in wait File "library/python/testing/yatest_common/yatest/common/process.py", line 370 in _wait File "library/python/testing/yatest_common/yatest/common/process.py", line 400 in wait File "library/python/testing/yatest_common/yatest/common/process.py", line 656 in execute File "ydb/tests/olap/lib/ydb_cli.py", line 283 in process File "ydb/tests/olap/lib/ydb_cli.py", line 310 in workload_run File "ydb/tests/olap/load/lib/conftest.py", line 286 in run_workload_test File "ydb/tests/olap/load/lib/tpch.py", line 48 in test_tpch File "library/python/pytest/plugins/ya.py", line 563 in pytest_pyfunc_call File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/python.py", line 1805 in runtest File "contrib/python/pytest/py3/_pytest/runner.py", line 169 in pytest_runtest_call File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/runner.py", line 262 in File "contrib/python/pytest/py3/_pytest/runner.py", line 341 in from_call File "contrib/python/pytest/py3/_pytest/runner.py", line 261 in call_runtest_hook File "contrib/python/pytest/py3/_pytest/runner.py", line 222 in call_and_report File "contrib/python/pytest/py3/_pytest/runner.py", line 133 in runtestprotocol File "contrib/python/pytest/py3/_pytest/runner.py", line 114 in pytest_runtest_protocol File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 350 in pytest_runtestloop File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 325 in _main File "contrib/python/pytest/py3/_pytest/main.py", line 271 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 318 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: .../build/build_root/2909/001344/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/001344/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/001344', '--source-root', '/home/runner/.ya/build/build_root/2909/001344/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/001344/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/tpc/medium', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: ((".../build/build_root/2909/001344/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/2909/001344/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/2909/001344', '--source-root', '/home/runner/.ya/build/build_root/2909/001344/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/2909/001344/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/tpc/medium', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout",), {}) 2025-03-28 09:58:14,887 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2025-03-28 09:58:14,887 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.9%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 86 ydb/core/viewer/tests [size:medium] ------ sole chunk ran 27 tests (total:38.95s - test:38.54s canon:0.07s) [fail] test.py::test_viewer_nodes [default-linux-x86_64-release-asan] (0.12s) Test results differ from canonical: test_result['/Root']['Nodes'][0]['Peers'][0]['ScopeId']['X2']: - 6 + 2 test_result['/Root/dedicated_db']['Nodes'][0]['ReversePeers'][0]['ScopeId']['X2']: - 6 + 2 test_result['/Root/dedicated_db']['Nodes'][0]['ReversePeers'][1]['ScopeId']['X2']: - 6 + 2 test_result['/Root/serverless_db']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 6 + 2 test_result['/Root/shared_db']['Nodes'][0]['Peers'][1]['ScopeId']['X2']: - 6 + 2 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.test_viewer_nodes.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::test_viewer_describe [default-linux-x86_64-release-asan] (0.07s) Test results differ from canonical: test_result['/Root/dedicated_db']['PathDescription']['DomainDescription']['PathId_Depricated']: - 6 + 2 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.test_viewer_describe.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::test_viewer_tenantinfo [default-linux-x86_64-release-asan] (0.03s) Test results differ from canonical: test_result['TenantInfo'][1]['Id']: - 72075186232723360-6 ? ^ + 72075186232723360-2 ? ^ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.test_viewer_tenantinfo.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::test_viewer_tenantinfo_db [default-linux-x86_64-release-asan] (0.09s) Test results differ from canonical: test_result['/Root/dedicated_db']['TenantInfo'][0]['Id']: - 72075186232723360-6 ? ^ + 72075186232723360-2 ? ^ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.test_viewer_tenantinfo_db.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff ------ FAIL: 23 - GOOD, 4 - FAIL ydb/core/viewer/tests ydb/tests/fq/mem_alloc [size:medium] ------ sole chunk ran 12 tests (total:631.57s - recipes:10.05s test:600.11s recipes:4.78s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_result_limits.py::TestResultLimits::test_many_rows (good) duration: 233.10s test_alloc_default.py::TestAlloc::test_node_limit[kikimr0] (good) duration: 80.27s test_result_limits.py::TestResultLimits::test_large_row (good) duration: 63.34s test_alloc_default.py::TestAlloc::test_default_limits[kikimr0] (good) duration: 38.00s test_alloc_default.py::TestAlloc::test_hard_limit[kikimr0] (good) duration: 37.14s test_alloc_default.py::TestAlloc::test_alloc_and_free[kikimr0] (good) duration: 36.15s test_alloc_default.py::TestAlloc::test_default_delta[kikimr0] (good) duration: 35.68s test_alloc_default.py::TestAlloc::test_up_down[kikimr0] (good) duration: 32.39s test_result_limits.py::TestResultLimits::test_quotas[kikimr0] (timeout) duration: 27.40s test_alloc_default.py::TestAlloc::test_mkql_not_increased[kikimr0] (good) duration: 25.26s 2 tests were not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 9.0G (9432172K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 728971 44.8M 43.6M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 729200 35.9M 23.9M 11.6M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 736062 791M 772M 455M │ └─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 897628 710M 15.3M 438M │ ├─ ydb-tests-fq-mem_alloc --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 897780 5.5G 5.5G 5.1G │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test 731542 2.3G 2.3G 1.8G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/ydb_data_l Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/stderr [timeout] test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [default-linux-x86_64-release-asan] (27.40s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff/test_result_limits.py.TestResultLimits.test_quotas.kikimr0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/mem_alloc/test-results/py3test/testing_out_stuff ------ TIMEOUT: 9 - GOOD, 2 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/fq/mem_alloc ydb/tests/fq/multi_plane [size:medium] nchunks:4 ------ [test_retry.py] chunk ran 2 tests (total:648.55s - recipes:13.84s test:600.04s recipes:3.68s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_retry.py::TestRetry::test_low_rate[kikimr0] (timeout) duration: 398.20s test_retry.py::TestRetry::test_fail_first[kikimr0] (good) duration: 227.64s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/multi_plane/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/stderr [timeout] test_retry.py::TestRetry::test_low_rate[kikimr0] [default-linux-x86_64-release-asan] (398.20s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff/test_retry.py.TestRetry.test_low_rate.kikimr0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/multi_plane/test-results/py3test/testing_out_stuff ------ TIMEOUT: 5 - GOOD, 1 - TIMEOUT ydb/tests/fq/multi_plane ------ [test_discovery.py] chunk ran 3 tests (total:164.99s - test:164.93s) Info: Test run has exceeded 10.0G (10485760K) memory limit with 13.8G (14503800K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 831759 44.8M 44.4M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 831791 33.6M 21.7M 9.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 831793 834M 837M 759M └─ ydb-tests-functional-api --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 849294 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 849295 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 849328 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 849330 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 849332 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 849333 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 849334 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 849336 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 849348 1.4G 1.4G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/te 853524 128M 0b 0b └─ ydbd --server=grpc://localhost:14928 admin blobstorage config invoke --proto=Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/home/runn Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/hive [size:medium] nchunks:80 ------ [test_drain.py 0/20] chunk ran 1 test (total:73.65s - setup:0.03s test:73.40s) [fail] test_drain.py::TestHive::test_drain_on_stop [default-linux-x86_64-release-asan] (67.16s) ydb/tests/functional/hive/test_drain.py:93: in test_drain_on_stop wait_tablets_are_active( ydb/tests/library/common/delayed.py:151: in wait_tablets_are_active predicate(raise_error=True) ydb/tests/library/common/delayed.py:141: in predicate raise AssertionError( E AssertionError: E ############################## E 0 seconds passed, 66 tablet(s) are not active. Inactive tablets are (first 10 entries): (72075186224037903: None) (72075186224038616: 6) (72075186224038617: None) (72075186224038624: 4) (72075186224038633: None) (72075186224038648: 5) (72075186224038661: None) (72075186224038688: 6) (72075186224038696: 4) (72075186224038705: None). Additional info is empty E ############################## Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff/test_drain.py.TestHive.test_drain_on_stop.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/hive/test-results/py3test/testing_out_stuff ------ FAIL: 6 - GOOD, 1 - FAIL ydb/tests/functional/hive ydb/tests/functional/postgresql [size:medium] ------ sole chunk ran 14 tests (total:98.85s - test:98.64s) [fail] test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [default-linux-x86_64-release-asan] (42.89s) teardown failed: ydb/tests/functional/postgresql/test_postgres.py:77: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.horology/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==702295==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 26544 byte(s) in 7 object(s) allocated from: E #0 0x1d6662cd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x468183df in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x468183df in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x465e3f6b in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x465e3f6b in insert /-S/util/generic/hash.h:153:20 E #9 0x465e3f6b in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:344:23 E #10 0x465c7619 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:466:5 E #11 0x468183f0 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:372:11 E #12 0x468183f0 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x468183f0 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBas... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff/test_postgres.py.TestPostgresSuite.test_postgres_suite.float8.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/postgresql/test-results/py3test/testing_out_stuff ------ FAIL: 13 - GOOD, 1 - FAIL ydb/tests/functional/postgresql ydb/tests/functional/sqs/cloud [size:medium] nchunks:40 ------ [34/40] chunk ran 2 tests (total:115.44s - test:115.31s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [default-linux-x86_64-release-asan] (78.62s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:829: in test_yc_events_processor assert len(lines) >= 2, "Got only %s event lines after all attempts" % len(lines) E AssertionError: Got only 0 event lines after all attempts E assert 0 >= 2 E + where 0 = len([]) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ FAIL: 75 - GOOD, 1 - FAIL ydb/tests/functional/sqs/cloud ------ sole chunk ran 1 test (total:116.15s - test:115.99s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 14.1G (14763580K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 703334 44.8M 44.4M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 703494 34.1M 22.2M 9.7M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 703496 1.0G 1.0G 978M └─ functional-sqs-merge_split_common_table-std --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini - 705768 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 705779 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 705791 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 705792 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 705794 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 705795 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 705796 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results 705797 1.6G 1.6G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/tpc/medium [size:medium] nchunks:2 ------ [test_tpch.py] chunk ran 22 tests (total:631.15s - test:600.07s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_tpch.py::TestTpchS1::test_tpch[5] (fail) duration: 313.34s test_tpch.py::TestTpchS1::test_tpch[7] (timeout) duration: 137.30s test_tpch.py::TestTpchS1::test_tpch[1] (good) duration: 124.61s test_tpch.py::TestTpchS1::test_tpch[2] (good) duration: 18.34s test_tpch.py::TestTpchS1::test_tpch[3] (good) duration: 15.48s test_tpch.py::TestTpchS1::test_tpch[4] (good) duration: 13.72s test_tpch.py::TestTpchS1::test_tpch[6] (good) duration: 3.69s 15 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/stderr [fail] test_tpch.py::TestTpchS1::test_tpch[5] [default-linux-x86_64-release-asan] (313.34s) ydb/tests/olap/load/lib/tpch.py:48: in test_tpch self.run_workload_test(self._get_path(), query_num) ydb/tests/olap/load/lib/conftest.py:298: in run_workload_test self.process_query_result(result, query_num, qparams.iterations, True) ydb/tests/olap/load/lib/conftest.py:245: in process_query_result raise exc E Failed: Iteration 0:
: Error: Table /local/test_db/olap_yatests/tpch/s1/supplier (shard 72075186224038250) scan failed, reason: cannot build metadata/Snapshot too old: {1743155448000:max}. CS min read snapshot: {1743155449000:max}. now: 2025-03-28T09:55:49.255347Z, code: 2017 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch.py.TestTpchS1.test_tpch.5.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff [timeout] test_tpch.py::TestTpchS1::test_tpch[7] [default-linux-x86_64-release-asan] (137.30s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch.py.TestTpchS1.test_tpch.7.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff ------ TIMEOUT: 48 - GOOD, 1 - FAIL, 15 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/functional/tpc/medium ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:626.68s - test:600.03s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (timeout) duration: 621.66s alter_compression.py::TestAlterCompression::test_availability_data test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [timeout] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (621.66s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:622.41s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch_import.py::TestS3TpchImport::test_import_and_export (timeout) duration: 617.59s Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.8G (9217076K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 695162 44.8M 43.0M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 695322 38.7M 26.4M 14.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 695331 713M 697M 635M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 698517 7.7G 7.5G 7.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/te 700932 642M 585M 559M └─ moto_server s3 --port 4766 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (617.59s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/s3_import ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 9 tests (total:616.80s - setup:0.02s test:600.03s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (timeout) duration: 348.68s test_alter_tiering.py::TestAlterTiering::test[many_tables] (fail) duration: 201.62s test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] (good) duration: 52.63s 6 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [fail] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-release-asan] (201.62s) ydb/tests/olap/scenario/conftest.py:88: in test ctx.executable(self, ctx) ydb/tests/olap/scenario/test_alter_tiering.py:353: in scenario_many_tables threads.start_and_wait_all() ydb/tests/olap/common/thread_helper.py:49: in start_and_wait_all self.join_all() ydb/tests/olap/common/thread_helper.py:45: in join_all thread.join(timeout=timeout) ydb/tests/olap/common/thread_helper.py:18: in join raise self.exc ydb/tests/olap/common/thread_helper.py:11: in run self.ret = self._target(*self._args, **self._kwargs) ydb/tests/olap/scenario/test_alter_tiering.py:242: in _loop_scan sth.execute_scan_query( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:461: in execute_scan_query for result_set in it: contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/table.py:1181: in lambda resp: _wrap_scan_query_response(resp, self._table_client_settings), contrib/python/ydb/py3/ydb/table.py:952: in _wrap_scan_query_response issues._process_response(response) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.GenericError: message: "Scan failed at tablet 72075186224037896, reason: task_error:Error reading blob range for data: { Blob: DS:4294967295:[72075186224037896:1:1:255:1:6736:0] Offset: 6064 Size: 240 }, error: cannot get blob: curlCode: 28, Timeout was reached, status: ERROR" issue_code: 2013 severity: 1 (server_code: 400080) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff [timeout] test_alter_compression.py::TestAlterCompression::test[alter_compression] [default-linux-x86_64-release-asan] (348.68s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_compression.py.TestAlterCompression.test.alter_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 1 - FAIL, 6 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:6 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:622.77s - test:600.03s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 618.16s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (618.16s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 3 tests (total:617.41s - test:600.06s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change (timeout) duration: 612.97s 2 tests were not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 9.4G (9862008K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 354891 44.9M 44.7M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 355094 36.6M 25.1M 12.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 355104 799M 795M 716M └─ ydb-tests-olap-ttl_tiering --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 356763 4.3G 4.3G 3.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ 502431 4.3G 4.3G 3.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (612.97s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 3 - GOOD, 2 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ydb/tests/stress/log/tests [size:medium] ------ sole chunk ran 2 tests (total:321.75s - setup:0.02s test:321.66s) [fail] test_workload.py::TestYdbLogWorkload::test[column] [default-linux-x86_64-release-asan] (155.59s) teardown failed: ydb/tests/stress/log/tests/test_workload.py:41: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.row/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.row/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==926724==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 53088 byte(s) in 14 object(s) allocated from: E #0 0x1d6662cd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x468183df in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x468183df in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/yt/yql/providers/yt/provider ..[snippet truncated].. llocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 E #3 0x465fb076 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 E #4 0x465fb076 in initialize_dynamic /-S/util/generic/hash_table.h:239:35 E #5 0x465fb076 in initialize_buckets_dynamic /-S/util/generic/hash_table.h:912:17 E #6 0x465fb076 in THashTable> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x465e3f6b in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x465e3f6b in insert /-S/util/generic/hash.h:153:20 E #9 0x465e3f6b in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:344:23 E #10 0x465c7619 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:466:5 E #11 0x468183f0 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:372:11 E #12 0x468183f0 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x468183f0 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - GOOD, 1 - FAIL ydb/tests/stress/log/tests ydb/tests/stress/olap_workload/tests [size:medium] ------ sole chunk ran 1 test (total:26.93s - test:26.88s) Test failed with 1 exit code. See logs for more info Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr [crashed] test_workload.py::TestYdbWorkload::test [default-linux-x86_64-release-asan] (0.00s) Test crashed Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - CRASHED ydb/tests/stress/olap_workload/tests ------ [test_disk.py 0/10] chunk ran 1 test (total:44.88s - test:44.82s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.3G (13919864K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 293984 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 294084 33.2M 21.4M 8.8M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 294090 758M 761M 679M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 295998 1.4G 1.4G 991M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296001 1.4G 1.4G 984M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296002 1.4G 1.4G 986M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296006 1.4G 1.4G 992M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296009 1.4G 1.4G 982M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296012 1.4G 1.4G 984M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296015 1.4G 1.4G 1005M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296017 1.4G 1.4G 1007M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296022 1.4G 1.4G 991M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_tablet.py 0/10] chunk ran 1 test (total:92.51s - test:92.36s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.8G (14473976K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 293970 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 294074 33.9M 22.0M 9.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 294087 840M 844M 761M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 296005 1.7G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296011 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296016 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296020 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296026 1.7G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296029 1.6G 1.4G 1000M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296034 1.6G 1.4G 967M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 296037 1.6G 1.4G 1002M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ydb/core/health_check/ut [size:medium] nchunks:10 ------ [3/10] chunk ran 5 tests (total:127.84s - setup:0.02s test:127.59s) [fail] THealthCheckTest::LayoutIncorrect [default-linux-x86_64-release-asan] (9.79s) assertion failed at ydb/core/health_check/health_check_ut.cpp:2275, virtual void NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext &): (issue_log.message() == "Database has storage issues") failed: ("Database has multiple issues" != Database has storage issues) , with diff: ("|)Database has (mul|s)t(ipl|orag)e issues("|) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::NTestSuiteTHealthCheckTest::TTestCaseLayoutIncorrect::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/health_check/health_check_ut.cpp:0:17 operator() at /-S/ydb/core/health_check/health_check_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff/THealthCheckTest.LayoutIncorrect.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/health_check/ut/test-results/unittest/testing_out_stuff/THealthCheckTest.LayoutIncorrect.out ------ FAIL: 47 - GOOD, 1 - FAIL ydb/core/health_check/ut ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:6.34s - test:6.30s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.30s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:6.89s - test:6.82s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.17s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:7.09s - setup:0.02s test:7.04s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (1.95s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:6.75s - test:6.70s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (1.97s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [12/50] chunk ran 1 test (total:19.85s - test:19.82s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==543407==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b1ca4d bp 0x7ffd8304ae80 sp 0x7ffd8304ace0 T0) ==543407==The signal is caused by a READ memory access. ==543407==Hint: address points to the zero page. 2025-03-28T09:31:13.251615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T09:31:13.251646Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18b1ca4d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b1ca4d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b1ca4d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b1ca4d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b1ca4d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b415d7 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b415d7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b415d7 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18b415d7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18b415d7 in std::__y1::__function::__func< ..[snippet truncated].. 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194936d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19463228 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b40483 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19464af5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x1948dc4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f1f6e011d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f1f6e011e3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x162b8028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x162b8028) (BuildId: 42cc14b09b8e1be640cc9d032c9c2bf606ecb0c4) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==543407==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 21 - GOOD, 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/indexes [size:medium] nchunks:50 ------ [33/50] chunk ran 2 tests (total:117.52s - test:117.42s) [fail] KqpMultishardIndex::DataColumnWrite-UseSink [default-linux-x86_64-release-asan] (79.21s) assertion failed at ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2037, virtual void NKikimr::NKqp::NTestSuiteKqpMultishardIndex::TTestCaseDataColumnWrite::Execute_(NUnitTest::TTestContext &) [UseSink = false]: (result.IsSuccess())
: Error: Kikimr cluster or one of its subsystems is overloaded., code: 2006
: Error: [WRONG_SHARD_STATE] Rejecting data TxId 281474976715738 because datashard 72075186224037919: is in process of split opId 281474976710661 state SplitSrcWaitForPartitioningChanged (wrong shard state) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x196C311B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x19B885CF 2. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:2037: Execute_ @ 0x193073C2 3. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: operator() @ 0x192C1507 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354:1) &> @ 0x192C1507 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354:1) &> @ 0x192C1507 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x192C1507 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x192C1507 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19BBF5F5 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19BBF5F5 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19BBF5F5 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19B8F148 12. /tmp//-S/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp:1354: Execute @ 0x192C06D3 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19B90A15 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x19BB9B6C 15. ??:0: ?? @ 0x7F94C1166D8F 16. ??:0: ?? @ 0x7F94C1166E3F 17. ??:0: ?? @ 0x1640E028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff/KqpMultishardIndex.DataColumnWrite-UseSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/indexes/test-results/unittest/testing_out_stuff/KqpMultishardIndex.DataColumnWrite-UseSink.out ------ FAIL: 119 - GOOD, 1 - FAIL ydb/core/kqp/ut/indexes ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [12/50] chunk ran 4 tests (total:606.83s - setup:0.01s test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: KqpLimits::OutOfSpaceBulkUpsertFail (good) duration: 317.38s KqpLimits::OutOfSpaceYQLUpsertFail+useSink (good) duration: 179.28s KqpLimits::OutOfSpaceYQLUpsertFail-useSink (timeout) duration: 108.17s KqpLimits::QSReplySize+useSink test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/stderr [timeout] KqpLimits::OutOfSpaceYQLUpsertFail-useSink [default-linux-x86_64-release-asan] (108.17s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.OutOfSpaceYQLUpsertFail-useSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.OutOfSpaceYQLUpsertFail-useSink.out ------ [15/50] chunk ran 4 tests (total:116.68s - setup:0.15s test:116.10s) [crashed] KqpLimits::TooBigColumn+useSink [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.TooBigColumn.useSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.TooBigColumn.useSink.out ------ [47/50] chunk ran 3 tests (total:207.92s - setup:0.07s test:207.75s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (153.40s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1949767B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1995C4FF 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1903CBE8 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1904FBF7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1904FBF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1904FBF7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19993525 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19993525 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19993525 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19963078 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1904ED7B 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19964945 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1998DA9C 15. ??:0: ?? @ 0x7F631041ED8F 16. ??:0: ?? @ 0x7F631041EE3F 17. ??:0: ?? @ 0x1640D028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ [9/50] chunk ran 4 tests (total:217.83s - test:217.65s) [fail] KqpLimits::ComputeNodeMemoryLimit [default-linux-x86_64-release-asan] (138.54s) assertion failed at ydb/core/kqp/ut/query/kqp_limits_ut.cpp:787, virtual void NKikimr::NKqp::NTestSuiteKqpLimits::TTestCaseComputeNodeMemoryLimit::Execute_(NUnitTest::TTestContext &): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (TIMEOUT != PRECONDITION_FAILED) , with diff: (TIM|PR)E(|C)O(U|NDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1949767B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1995C4FF 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:787: Execute_ @ 0x18D19A8E 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56: operator() @ 0x18D71E37 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56:1) &> @ 0x18D71E37 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56:1) &> @ 0x18D71E37 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D71E37 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D71E37 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19993525 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19993525 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19993525 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19963078 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_limits_ut.cpp:56: Execute @ 0x18D71003 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19964945 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1998DA9C 15. ??:0: ?? @ 0x7FDFF4104D8F 16. ??:0: ?? @ 0x7FDFF4104E3F 17. ??:0: ?? @ 0x1640D028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.ComputeNodeMemoryLimit.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpLimits.ComputeNodeMemoryLimit.out ------ TIMEOUT: 167 - GOOD, 2 - FAIL, 1 - NOT_LAUNCHED, 1 - TIMEOUT, 1 - CRASHED ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [21/50] chunk ran 2 tests (total:78.60s - setup:0.01s test:78.21s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (20.53s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18D46A4E 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18D2569A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18D2CB17 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D2CB17 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D2CB17 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D2CB17 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D2CB17 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18D2BCE3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F9EB5486D8F 18. ??:0: ?? @ 0x7F9EB5486E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [25/50] chunk ran 2 tests (total:39.31s - test:39.24s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (17.99s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18D7C798 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18D63E6A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F489EA31D8F 18. ??:0: ?? @ 0x7F489EA31E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [26/50] chunk ran 2 tests (total:33.99s - test:33.96s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (18.58s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18D63A12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7EFDBB8D1D8F 18. ??:0: ?? @ 0x7EFDBB8D1E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (7.05s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18D63C3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7EFDBB8D1D8F 18. ??:0: ?? @ 0x7EFDBB8D1E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out ------ [27/50] chunk ran 2 tests (total:42.74s - test:42.66s) [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (18.69s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18D63392 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FFAE5DA0D8F 18. ??:0: ?? @ 0x7FFAE5DA0E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (16.31s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18D74DF8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18D637EA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FFAE5DA0D8F 18. ??:0: ?? @ 0x7FFAE5DA0E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out ------ [28/50] chunk ran 2 tests (total:45.54s - test:45.46s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (18.65s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18D635BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F19F8E21D8F 18. ??:0: ?? @ 0x7F19F8E21E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [29/50] chunk ran 2 tests (total:32.22s - test:32.18s) [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (18.90s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18D64092 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC58236FD8F 18. ??:0: ?? @ 0x7FC58236FE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (7.78s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18D642BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC58236FD8F 18. ??:0: ?? @ 0x7FC58236FE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out ------ [30/50] chunk ran 2 tests (total:40.34s - test:40.25s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (16.17s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18D62D12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7EFE1A320D8F 18. ??:0: ?? @ 0x7EFE1A320E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ [31/50] chunk ran 2 tests (total:31.08s - test:31.04s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (18.88s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18D62F3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F94FE303D8F 18. ??:0: ?? @ 0x7F94FE303E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ FAIL: 93 - GOOD, 11 - FAIL ydb/core/kqp/ut/tx ydb/core/kqp/ut/view [size:medium] ------ sole chunk ran 23 tests (total:243.83s - test:243.33s) [fail] TCreateAndDropViewTest::DropViewIfExists [default-linux-x86_64-release-asan] (9.50s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.out ------ FAIL: 22 - GOOD, 1 - FAIL ydb/core/kqp/ut/view ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [4/60] chunk ran 1 test (total:614.48s - test:600.11s) Test was terminated by signal 3 Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: AnalyzeColumnshard::AnalyzeRebootColumnShard (fail) duration: 598.73s Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/stderr [fail] AnalyzeColumnshard::AnalyzeRebootColumnShard [default-linux-x86_64-release-asan] (598.73s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/bt_exception.h:16:5 NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.h:0:24 DoDestroy at /-S/util/generic/ptr.h:237:13 operator() at /-S/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.out ------ TIMEOUT: 35 - GOOD, 1 - FAIL ydb/core/statistics/aggregator/ut ydb/core/tx/columnshard/ut_rw [size:medium] nchunks:60 ------ [28/60] chunk ran 1 test (total:602.96s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString (timeout) duration: 601.00s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [default-linux-x86_64-release-asan] (601.00s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKString.out ------ [29/60] chunk ran 1 test (total:601.84s - test:600.06s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 (timeout) duration: 600.44s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [default-linux-x86_64-release-asan] (600.44s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranuleStrKey_PKUtf8.out ------ [32/60] chunk ran 1 test (total:603.27s - setup:0.01s test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 (timeout) duration: 600.40s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [default-linux-x86_64-release-asan] (600.40s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKInt64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKInt64.out ------ [35/60] chunk ran 1 test (total:604.35s - test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 (timeout) duration: 601.74s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/stderr [timeout] TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [default-linux-x86_64-release-asan] (601.74s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKUInt64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/columnshard/ut_rw/test-results/unittest/testing_out_stuff/TColumnShardTestReadWrite.CompactionSplitGranule_PKUInt64.out ------ TIMEOUT: 55 - GOOD, 4 - TIMEOUT ydb/core/tx/columnshard/ut_rw ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 3 tests (total:255.06s - test:254.48s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (154.57s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 10 - GOOD, 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/schemeshard/ut_data_erasure [size:medium] nchunks:10 ------ [1/10] chunk ran 1 test (total:39.55s - setup:0.02s test:20.20s) Test was terminated by signal 3 Chunk exceeded 20s timeout and was killed List of the tests involved in the launch: TestDataErasure::DataErasureManualLaunch3Cycles (good) duration: 14.12s Killed by timeout (20 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff/stderr ------ [2/10] chunk ran 1 test (total:38.51s - setup:0.07s test:20.12s) Chunk exceeded 20s timeout and was killed List of the tests involved in the launch: TestDataErasure::DataErasureRun3Cycles (timeout) duration: 33.10s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff/stderr [timeout] TestDataErasure::DataErasureRun3Cycles [default-linux-x86_64-release-asan] (33.10s) Killed by timeout (20 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff/TestDataErasure.DataErasureRun3Cycles.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/testing_out_stuff/TestDataErasure.DataErasureRun3Cycles.out ------ TIMEOUT: 3 - GOOD, 1 - TIMEOUT ydb/core/tx/schemeshard/ut_data_erasure ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [3/60] chunk ran 1 test (total:37.41s - test:37.37s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 9 - GOOD, 1 - CRASHED ydb/core/tx/tiering/ut ydb/core/tx/tx_proxy/ut_schemereq [size:medium] nchunks:10 ------ [0/10] chunk ran 30 tests (total:252.09s - test:251.76s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [default-linux-x86_64-release-asan] (9.07s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29067 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [default-linux-x86_64-release-asan] (9.39s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22364 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [default-linux-x86_64-release-asan] (8.76s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:2030 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [default-linux-x86_64-release-asan] (10.76s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16349 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54.out ------ [1/10] chunk ran 30 tests (total:257.31s - test:256.96s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [default-linux-x86_64-release-asan] (8.44s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29207 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [default-linux-x86_64-release-asan] (8.41s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:32486 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [default-linux-x86_64-release-asan] (9.25s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16143 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [default-linux-x86_64-release-asan] (10.36s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:62503 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [default-linux-x86_64-release-asan] (9.52s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5834 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61.out ------ [2/10] chunk ran 30 tests (total:254.89s - test:254.58s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [default-linux-x86_64-release-asan] (10.39s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12123 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [default-linux-x86_64-release-asan] (9.25s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29201 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [default-linux-x86_64-release-asan] (9.68s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:9435 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [default-linux-x86_64-release-asan] (9.21s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:2119 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [default-linux-x86_64-release-asan] (9.01s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:13649 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21.out ------ [3/10] chunk ran 30 tests (total:237.51s - test:237.33s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [default-linux-x86_64-release-asan] (8.32s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:65232 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [default-linux-x86_64-release-asan] (8.36s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24206 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [default-linux-x86_64-release-asan] (9.22s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:18208 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [default-linux-x86_64-release-asan] (8.36s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8663 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [default-linux-x86_64-release-asan] (8.89s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:13725 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [default-linux-x86_64-release-asan] (8.59s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:15611 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66.out ------ [4/10] chunk ran 30 tests (total:254.72s - test:254.48s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [default-linux-x86_64-release-asan] (8.86s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:32215 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [default-linux-x86_64-release-asan] (11.49s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:9293 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [default-linux-x86_64-release-asan] (9.49s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4332 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37.out ------ [5/10] chunk ran 30 tests (total:254.14s - test:253.82s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [default-linux-x86_64-release-asan] (9.16s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20187 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [default-linux-x86_64-release-asan] (9.98s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14871 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [default-linux-x86_64-release-asan] (8.37s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:1267 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [default-linux-x86_64-release-asan] (8.93s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:28922 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49.out ------ [6/10] chunk ran 30 tests (total:254.85s - test:254.49s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [default-linux-x86_64-release-asan] (9.09s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19207 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [default-linux-x86_64-release-asan] (9.39s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:18726 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [default-linux-x86_64-release-asan] (8.28s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:62448 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [default-linux-x86_64-release-asan] (9.61s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27709 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [default-linux-x86_64-release-asan] (8.52s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29984 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [default-linux-x86_64-release-asan] (8.89s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25918 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71.out ------ [7/10] chunk ran 30 tests (total:252.03s - test:251.87s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [default-linux-x86_64-release-asan] (9.13s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25069 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [default-linux-x86_64-release-asan] (10.41s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:15295 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [default-linux-x86_64-release-asan] (8.77s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5495 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [default-linux-x86_64-release-asan] (8.41s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:65142 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22.out ------ [8/10] chunk ran 30 tests (total:255.86s - setup:0.01s test:255.57s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [default-linux-x86_64-release-asan] (8.67s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5582 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [default-linux-x86_64-release-asan] (8.13s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20508 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [default-linux-x86_64-release-asan] (9.48s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17978 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [default-linux-x86_64-release-asan] (9.79s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4395 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69.out ------ [9/10] chunk ran 30 tests (total:260.30s - test:259.93s) [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [default-linux-x86_64-release-asan] (12.62s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29843 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [default-linux-x86_64-release-asan] (13.75s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26834 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [default-linux-x86_64-release-asan] (15.47s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20084 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [default-linux-x86_64-release-asan] (11.55s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:1199 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [default-linux-x86_64-release-asan] (10.45s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4688 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [default-linux-x86_64-release-asan] (11.17s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5947 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [default-linux-x86_64-release-asan] (10.56s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16483 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [default-linux-x86_64-release-asan] (8.26s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:11787 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48.out ------ FAIL: 251 - GOOD, 49 - FAIL ydb/core/tx/tx_proxy/ut_schemereq ydb/core/viewer/ut [size:medium] nchunks:10 ------ [6/10] chunk ran 5 tests (total:76.18s - test:76.13s) [fail] Viewer::QueryExecuteScript [default-linux-x86_64-release-asan] (14.56s) assertion failed at ydb/core/viewer/viewer_ut.cpp:1948, virtual void NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext &): (json.GetMap().contains("metadata")) {} TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteViewer::TTestCaseQueryExecuteScript::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/viewer/viewer_ut.cpp:0:9 operator() at /-S/ydb/core/viewer/viewer_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/ut/test-results/unittest/testing_out_stuff/Viewer.QueryExecuteScript.out ------ FAIL: 50 - GOOD, 1 - FAIL ydb/core/viewer/ut ydb/services/persqueue_v1/ut [size:medium] nchunks:10 ------ [3/10] chunk ran 13 tests (total:295.01s - test:294.85s) [crashed] TPersQueueTest::DisableDeduplication [default-linux-x86_64-release-asan] (17.09s) Test crashed (return code: 100) ==447643==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 4787048 byte(s) in 73 object(s) allocated from: #0 0x18f93cdf in malloc /-S/contrib/libs/clang18-rt/lib/asan/asan_malloc_linux.cpp:68:3 #1 0x1a24abd3 in grpc_event_engine::experimental::MemoryAllocator::MakeSlice(grpc_event_engine::experimental::MemoryRequest) /-S/contrib/libs/grpc/src/core/lib/event_engine/memory_allocator.cc:63:13 #2 0x1a2254fd in maybe_make_read_slices /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1070:57 #3 0x1a2254fd in tcp_handle_read(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1094:5 #4 0x1a229957 in Run /-S/contrib/libs/grpc/src/core/lib/iomgr/closure.h:303:5 #5 0x1a229957 in tcp_read(grpc_endpoint*, grpc_slice_buffer*, grpc_closure*, bool, int) /-S/contrib/libs/grpc/src/core/lib/iomgr/tcp_posix.cc:1156:5 #6 0x1a5870c8 in continue_read_action_locked /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.cc:2594:3 #7 0x1a5870c8 in read_action_locked(void*, y_absl::lts_y_20240722::Status) /-S/contrib/libs/grpc/src/core/ext/transport/chttp2/transport/chttp2_transport.cc:2583:7 #8 0x1a114576 in grpc_combiner_continue_exec_ctx() /-S/contrib/libs/grpc/src/core/lib/iomgr/combiner.cc:231:5 #9 0x1a0ecb34 in grpc_core::ExecCtx::Flush() /-S/contrib/libs/grpc/src/core/lib/iomgr/exec_ctx.cc:75:17 #10 0x1a238d84 in end_worker /-S/contrib/l ..[snippet truncated].. llset.cc:48:10 #14 0x1a213417 in cq_next(grpc_completion_queue*, gpr_timespec, void*) /-S/contrib/libs/grpc/src/core/lib/surface/completion_queue.cc:1036:29 #15 0x1adce7be in grpc::CompletionQueue::AsyncNextInternal(void**, bool*, gpr_timespec) /-S/contrib/libs/grpc/src/cpp/common/completion_queue_cc.cc:166:15 #16 0x1ff4e82f in Next /-S/contrib/libs/grpc/include/grpcpp/completion_queue.h:182:13 #17 0x1ff4e82f in NYdbGrpc::Dev::PullEvents(grpc::CompletionQueue*) /-S/ydb/public/sdk/cpp/src/library/grpc/client/grpc_client_low.cpp:190:18 #18 0x1ade3c9e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #19 0x1ade3c9e in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #20 0x1ade3c9e in (anonymous namespace)::TThreadFactoryFuncObj::DoExecute() /-S/util/thread/factory.cpp:61:13 #21 0x1ade41ec in Execute /-S/util/thread/factory.h:15:13 #22 0x1ade41ec in (anonymous namespace)::TSystemThreadFactory::TPoolThread::ThreadProc(void*) /-S/util/thread/factory.cpp:36:41 #23 0x192e2bd4 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #24 0x18f91828 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 SUMMARY: AddressSanitizer: 5611308 byte(s) leaked in 1394 allocation(s). Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.DisableDeduplication.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/persqueue_v1/ut/test-results/unittest/testing_out_stuff/TPersQueueTest.DisableDeduplication.out ------ FAIL: 132 - GOOD, 1 - CRASHED ydb/services/persqueue_v1/ut ydb/services/ydb/sdk_sessions_ut [size:medium] nchunks:10 ------ [8/10] chunk ran 1 test (total:70.21s - test:70.12s) [fail] YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [default-linux-x86_64-release-asan] (10.30s) assertion failed at ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:253, virtual void NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext &): (session.GetId() == sessionId) failed: ("ydb://session/3?node_id=1&id=ZmUxODRkZGQtZmIwMmY2NjYtOTE0OGJmNDUtYTNkNjRhMzc=" != "ydb://session/3?node_id=1&id=Yzk5YzFjYTYtZTlmYmU3ZmItOWNhMWI5ZGEtNmU1NDU5OTM=") , with diff: "ydb://session/3?node_id=1&id=(|Yzk5YzFjYTYt)Z(|TlmY)mU(xODRk|3)Z(GQtZ|)mI(wMmY2NjY|)tO(T|WNhMWI5ZG)E(0OGJ|tN)m(|U1)NDU(tY|5O)T(NkNjRh|)M(zc|)=" TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbSdkSessions::TTestCaseTestSdkFreeSessionAfterBadSessionQueryServiceStreamCall::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/sdk_sessions_ut/sdk_sessions_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/testing_out_stuff/YdbSdkSessions.TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall.out ------ FAIL: 15 - GOOD, 1 - FAIL ydb/services/ydb/sdk_sessions_ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [29/60] chunk ran 5 tests (total:47.62s - test:47.53s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (5.44s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 286 - GOOD, 1 - FAIL ydb/services/ydb/ut ------ sole chunk ran 2 tests (total:300.73s - recipes:10.29s test:287.73s recipes:2.63s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.6G (17422316K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 913019 44.8M 44.8M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 913039 34.3M 22.5M 10.0M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 918548 46.0M 46.0M 23.0M │ └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/test_tool.args 918745 2.5G 2.5G 2.5G │ └─ ydb-tests-functional-kqp-kqp_indexes --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/y 913589 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 913591 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 913593 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 913594 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 913595 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 913597 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 913598 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s 913600 1.8G 1.8G 1.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/stderr ydb/tests/functional/transfer [size:medium] ------ sole chunk ran 25 tests (total:176.11s - setup:0.01s recipes:17.16s test:155.32s recipes:3.51s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 17.3G (18177644K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 935389 44.8M 44.6M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 935895 34.2M 21.4M 8.7M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 949932 336M 418M 349M │ └─ ydb_recipe --build-root /home/runner/actions_runner/_work/ydb/ydb/tmp/out --source-root /home/runner/actions_runner/_work/ydb/ydb --gdb-path /home/runner/.ya/tools/v4/668 937629 2.2G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 937637 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 937639 2.2G 2.2G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 937651 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 937652 2.2G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 937654 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 937661 2.1G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd 937680 2.2G 2.1G 1.7G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/yd Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/stderr [fail] Transfer::AlterBatchSize [default-linux-x86_64-release-asan] (23.47s) assertion failed at ydb/tests/functional/transfer/main.cpp:355, void (anonymous namespace)::MainTestCase::CheckResult(const TExpectations &): (attempt) Unable to wait transfer result TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 (anonymous namespace)::MainTestCase::CheckResult(TVector>, std::__y1::shared_ptr<(anonymous namespace)::IChecker>>, std::__y1::allocator>, std::__y1::shared_ptr<(anonymous namespace)::IChecker>>>>, std::__y1::allocator>, std::__y1::shared_ptr<(anonymous namespace)::IChecker>>, std::__y1::allocator>, std::__y1::shared_ptr<(anonymous namespace)::IChecker>>>>>> const&) at /-S/ydb/tests/functional/transfer/main.cpp:0:13 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/vector:527:18 operator() at /-S/ydb/tests/functional/transfer/main.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/Transfer.AlterBatchSize.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/transfer/test-results/unittest/testing_out_stuff/Transfer.AlterBatchSize.out ------ FAIL: 24 - GOOD, 1 - FAIL ydb/tests/functional/transfer Total 395 suites: 364 - GOOD 20 - FAIL 11 - TIMEOUT Total 9205 tests: 9073 - GOOD 85 - FAIL 27 - NOT_LAUNCHED 14 - TIMEOUT 1 - SKIPPED 5 - CRASHED Cache efficiency ratio is 89.27% (40125 of 44948). Local: 0 (0.00%), dist: 4808 (10.70%), by dynamic uids: 0 (0.00%), avoided: 35317 (78.57%) Dist cache download: count=4446, size=13.75 GiB, speed=159.09 MiB/s Disk usage for tools/sdk at least 266.23 MiB Additional disk space consumed for build cache 889.16 GiB Critical path: [ 57553 ms] [CC] [PpBfr5gGp3tLHMpmAt9KCw default-linux-x86_64 release asan]: $(SOURCE_ROOT)/ydb/core/kqp/opt/kqp_query_plan.cpp [started: 0 (1743152247014), finished: 57553 (1743152304567)] [ 89 ms] [AR] [mJOl33En5NpsWoazsiVo-Q default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/kqp/opt/libcore-kqp-opt.a [started: 57568 (1743152304582), finished: 57657 (1743152304671)] [ 77283 ms] [LD] [_zPkU4XC8Feyvv2ngiWh8A default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/apps/ydbd/ydbd [started: 632160 (1743152879174), finished: 709443 (1743152956457)] [623390 ms] [TM] [rnd-17938248478919393034 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1290370 (1743153537384), finished: 1913760 (1743154160774)] [ 26291 ms] [TA] [rnd-d0ys715ts2zm4hz3]: $(BUILD_ROOT)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} [started: 1913782 (1743154160796), finished: 1940073 (1743154187087)] Time from start: 3745060.5080566406 ms, time elapsed by graph 784606 ms, time diff 2960454.5080566406 ms. The longest 10 tasks: [649097 ms] [TM] [rnd-17033782026988510087 asan default-linux-x86_64 release]: ydb/tests/fq/multi_plane/py3test [started: 1743153515271, finished: 1743154164368] [632088 ms] [TM] [rnd-uui7qm4qsoxrvjas asan default-linux-x86_64 release]: ydb/tests/fq/mem_alloc/py3test [started: 1743154847961, finished: 1743155480049] [631466 ms] [TM] [rnd-8455028271315632183 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1743155263538, finished: 1743155895004] [627099 ms] [TM] [rnd-cuokgb5wxphxyvmq asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1743154780371, finished: 1743155407470] [623390 ms] [TM] [rnd-17938248478919393034 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743153537384, finished: 1743154160774] [622807 ms] [TM] [rnd-x2mi8r5enmzxhvw8 asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1743154785341, finished: 1743155408148] [617960 ms] [TM] [rnd-5871959780963312698 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743153537195, finished: 1743154155155] [617293 ms] [TM] [rnd-5gld01m8v2ydrfpf asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1743154827326, finished: 1743155444619] [614900 ms] [TM] [rnd-6463605605054116012 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1743154496023, finished: 1743155110923] [608931 ms] [TM] [rnd-11154230096973208755 asan default-linux-x86_64 release]: ydb/core/kqp/ut/query/unittest [started: 1743152338651, finished: 1743152947582] Total time by type: [149457948 ms] [TM] [count: 4017, ave time 37206.36 msec] [ 11272471 ms] [LD] [count: 353, ave time 31933.35 msec] [ 9833181 ms] [prepare:get from dist cache] [count: 4808, ave time 2045.17 msec] [ 1645896 ms] [TS] [count: 224, ave time 7347.75 msec] [ 372359 ms] [prepare:tools] [count: 19, ave time 19597.84 msec] [ 346365 ms] [TA] [count: 205, ave time 1689.59 msec] [ 186884 ms] [prepare:bazel-store] [count: 3, ave time 62294.67 msec] [ 124148 ms] [prepare:put into local cache, clean build dir] [count: 4821, ave time 25.75 msec] [ 91435 ms] [prepare:AC] [count: 4, ave time 22858.75 msec] [ 60241 ms] [CC] [count: 6, ave time 10040.17 msec] [ 6775 ms] [AR] [count: 10, ave time 677.50 msec] [ 3641 ms] [prepare:put to dist cache] [count: 268, ave time 13.59 msec] [ 1521 ms] [prepare:resources] [count: 1, ave time 1521.00 msec] [ 809 ms] [ld] [count: 2, ave time 404.50 msec] [ 566 ms] [CF] [count: 2, ave time 283.00 msec] [ 513 ms] [UN] [count: 1, ave time 513.00 msec] [ 455 ms] [BI] [count: 1, ave time 455.00 msec] [ 277 ms] [SB] [count: 1, ave time 277.00 msec] [ 175 ms] [CP] [count: 1, ave time 175.00 msec] [ 10 ms] [prepare:clean] [count: 3, ave time 3.33 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 151450209 ms (93.03%) Total run tasks time - 162792491 ms Configure time - 31.3 s Statistics overhead 1894 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.zjCvduAmFp --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools [2 ymakes processing] [7920/7920 modules configured] [3920/4374 modules rendered] [2 ymakes processing] [7920/7920 modules configured] [4306/4374 modules rendered] [2 ymakes processing] [7920/7920 modules configured] [4374/4374 modules rendered] Configuring dependencies for platform test_tool_tc1-global [0 ymakes processing] [7926/7926 modules configured] [4374/4374 modules rendered] Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution | 1.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut | 0.9%| CLEANING SYMRES | 1.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw | 1.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes | 2.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator | 2.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge | 2.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain | 3.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source | 3.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut | 4.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large | 4.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut | 4.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator | 4.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut | 4.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut | 5.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export | 5.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a | 5.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut | 5.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut | 6.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq | 7.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std | 7.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema | 7.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer | 7.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut | 8.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests | 8.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots | 8.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup | 8.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction | 8.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign | 8.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k | 9.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive | 9.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut | 9.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut | 9.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut | 9.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut | 9.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow | 9.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut | 9.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots | 9.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |10.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |10.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |10.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |10.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |10.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |10.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |10.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |10.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |11.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |11.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |11.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |11.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |11.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |11.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |11.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |11.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |11.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |11.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |12.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |12.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |12.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |12.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |12.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |12.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |12.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |12.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |12.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |13.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |13.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |13.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |13.8%| PREPARE $(CLANG-2518231432) |13.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |14.0%| PREPARE $(CLANG-1922233694) |14.1%| PREPARE $(WITH_JDK17-sbr:7832760150) |14.1%| PREPARE $(TEST_TOOL_HOST-sbr:8330113388) |14.3%| PREPARE $(YMAKE_PYTHON3-4256832079) |14.4%| PREPARE $(FLAKE8_PY3-715603131) |14.5%| PREPARE $(WITH_JDK-sbr:7832760150) |14.6%| PREPARE $(CLANG16-1380963495) |14.7%| PREPARE $(CLANG_FORMAT-2212207123) |14.8%| PREPARE $(PYTHON) |14.9%| PREPARE $(GDB) |15.0%| PREPARE $(CLANG18-3363451693) |15.1%| PREPARE $(JDK17-472926544) |15.2%| PREPARE $(LLD_ROOT-3808007503) |15.3%| PREPARE $(JDK_DEFAULT-472926544) |15.4%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |15.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |15.8%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |16.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |16.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |16.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |17.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |17.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |17.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |18.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |18.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |18.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |19.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |19.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |20.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |20.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |20.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |20.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |20.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |20.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |20.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |20.8%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |20.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |21.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |21.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |21.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |21.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |21.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |21.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |21.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |21.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |21.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |22.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |22.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |22.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |22.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |22.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |22.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |22.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |22.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |22.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |23.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |23.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |23.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |23.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |23.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |23.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |23.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |23.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |23.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |23.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |24.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |24.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |24.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |23.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |23.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |23.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |23.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |24.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |24.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |24.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |24.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |24.8%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |24.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |25.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |25.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |25.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |25.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |26.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |26.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |26.2%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |26.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |26.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |26.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |26.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |26.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |27.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |27.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |28.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |28.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |29.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |29.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |29.3%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |29.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |29.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |29.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |29.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |29.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |29.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |30.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |30.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |30.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |30.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |30.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |30.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |30.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |30.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |30.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |30.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |31.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |31.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |31.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |31.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |31.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |30.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |30.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |31.0%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |31.1%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |31.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |33.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |33.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |33.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |33.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |33.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |33.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |33.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |34.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |34.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |34.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |34.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |34.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |34.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |34.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |34.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |35.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |35.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |36.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |36.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |36.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |36.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |36.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |37.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |37.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |37.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |37.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |37.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |37.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |37.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |38.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |38.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |38.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |38.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |38.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |38.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |38.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |38.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |38.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |38.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |39.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |39.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |38.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |39.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |40.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |40.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |40.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |40.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |40.7%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |40.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |40.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |41.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |41.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |41.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |41.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |41.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |41.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |41.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |41.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |41.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |41.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |41.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |42.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |42.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |42.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |41.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |41.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |41.8%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |41.9%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |42.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |42.2%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |42.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |42.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |42.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |43.7%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |43.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |43.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |44.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |44.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |44.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |44.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |44.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |45.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |45.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |44.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |45.1%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |45.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |45.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |45.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |45.5%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |45.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |46.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |46.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |47.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |47.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |47.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |47.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |47.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |47.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |47.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |47.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |48.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |48.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |48.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |48.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |47.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |47.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |47.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |47.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |47.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |47.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |48.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |48.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |48.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |48.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |48.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |48.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |48.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |48.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |49.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |49.2%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |49.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |49.9%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |50.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |50.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |50.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |50.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |50.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |50.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |50.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |50.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |50.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |50.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |50.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |50.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |50.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |50.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |50.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |50.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |51.0%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |51.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |51.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |51.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |51.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |51.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |51.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |51.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |51.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |51.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |52.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |52.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |52.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |53.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |53.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |53.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |53.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |53.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |53.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |53.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |44.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |44.2%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |44.3%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |44.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |44.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |44.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |45.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |45.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |45.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |45.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |45.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |45.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |45.6%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |45.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |45.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |45.8%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |45.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |45.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |44.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |44.7%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |44.8%| COMPACTING CACHE 889.2GiB |44.9%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |44.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |45.0%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |45.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |45.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |45.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |45.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |45.3%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |45.3%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |45.4%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |45.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |45.5%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |45.5%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |45.6%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |45.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |45.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |45.8%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |45.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |45.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |45.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |46.0%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |46.1%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 |46.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |46.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |46.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |46.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |46.3%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |46.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |46.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |46.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |46.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |46.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |46.7%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |46.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |46.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |46.9%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |46.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |47.0%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |47.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |47.1%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |47.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |47.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |47.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |47.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |47.4%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |47.4%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |47.5%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |47.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |47.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |47.7%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |47.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |47.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |47.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |47.9%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |48.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |48.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |48.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |48.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |48.2%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |48.2%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |48.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |48.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |48.4%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |48.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |48.5%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |48.6%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |48.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |48.7%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |48.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |48.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |48.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |48.9%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |49.0%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |49.1%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |49.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |49.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |49.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |49.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |49.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |49.4%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |49.5%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |49.5%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |49.6%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |49.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |49.7%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |49.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |49.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |49.9%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |49.9%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |50.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |50.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |50.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |50.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |50.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |50.3%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |50.3%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |50.4%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |50.4%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |50.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |50.5%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |50.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |50.7%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |50.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |50.8%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |50.8%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |50.9%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |50.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |51.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |51.1%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |51.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |51.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |51.2%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |51.3%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |51.4%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |51.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |51.5%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |51.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |51.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |51.6%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |51.7%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |51.8%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |51.8%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |51.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |51.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |52.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |52.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |52.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |52.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |52.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |52.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |52.3%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |52.4%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |52.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |52.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |52.6%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |52.6%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |52.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |52.7%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |52.8%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |52.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |52.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |53.0%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |53.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |53.1%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |53.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |53.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |53.2%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |53.3%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |53.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |53.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |53.5%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |53.5%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |53.6%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |53.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |53.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |53.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |53.8%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |53.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |53.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |54.0%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |54.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |54.1%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |54.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |54.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |54.3%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |54.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |54.4%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |54.5%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |54.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |54.6%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |54.6%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |54.7%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |54.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |54.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |54.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |54.9%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |55.0%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |55.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |55.1%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |55.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |55.2%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |55.3%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |55.3%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |55.4%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |55.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |55.5%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |55.5%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |55.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |55.7%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |55.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |55.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |55.8%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |55.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |56.0%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |56.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |56.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |56.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |56.2%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |56.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |56.4%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |56.4%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |56.5%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |56.5%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |56.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |56.6%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |56.7%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |56.8%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |56.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |56.9%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |56.9%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |57.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |57.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |57.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |57.2%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |57.2%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |57.3%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |57.3%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |57.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |57.4%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |57.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |57.6%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |57.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |57.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |57.7%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |57.8%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |57.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |57.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |58.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |58.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |58.1%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |58.1%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |58.2%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |58.3%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |58.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |58.4%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |58.4%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |58.5%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |58.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |58.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |58.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |58.7%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |58.8%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |58.8%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |58.9%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |58.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |59.0%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |59.1%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |59.1%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |59.2%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |59.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |59.3%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |59.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |59.4%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |59.5%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |59.5%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |59.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |59.6%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |59.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |59.7%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |59.8%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |59.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |59.9%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |60.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |60.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |60.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |60.1%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |60.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |60.3%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |60.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |60.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |60.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |60.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |60.6%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |60.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |60.7%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |60.8%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |60.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |60.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |61.0%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |61.0%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |61.1%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |61.1%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |61.2%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |61.2%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |61.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |61.4%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |61.4%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |61.5%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |61.5%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |61.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |61.6%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |61.7%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |61.8%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |61.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |61.9%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |61.9%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |62.0%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |62.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |62.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |62.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |62.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |62.3%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |62.3%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |62.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |62.4%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |62.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |62.6%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |62.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |62.7%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |62.7%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |62.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |62.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |62.9%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |63.0%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |63.0%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |63.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |63.1%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |63.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |63.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |63.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |63.4%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |63.4%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |63.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |63.5%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |63.6%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |63.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |63.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |63.8%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |63.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |63.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |63.9%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |64.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |64.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |64.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |64.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |64.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |64.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |64.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |64.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |64.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |64.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |64.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpLimits::ComputeNodeMemoryLimit |65.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage |65.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |65.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |66.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> TKeyValueTracingTest::WriteHuge >> TKeyValueTracingTest::ReadHuge |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> TKeyValueTracingTest::ReadSmall >> TKeyValueTracingTest::WriteSmall |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |66.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |66.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |67.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |67.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |67.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |67.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |67.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |67.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |67.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |67.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |67.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall |68.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |68.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest |69.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |70.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] >> TKeyValueTracingTest::WriteHuge [FAIL] >> TKeyValueTracingTest::ReadSmall [FAIL] >> TKeyValueTracingTest::WriteSmall [FAIL] |70.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |70.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |71.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] |72.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |72.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |73.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |73.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |73.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest |73.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication |73.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/sdk_sessions_ut/unittest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [GOOD] Test command err: 2025-03-28T10:09:55.772472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800569999813152:2076];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:09:55.773236Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000344/r3tmp/tmp0dHudZ/pdisk_1.dat 2025-03-28T10:09:56.173968Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 20708, node 1 2025-03-28T10:09:56.190560Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T10:09:56.190627Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T10:09:56.198152Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:09:56.198238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:09:56.209062Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:09:56.284637Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:09:56.284666Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:09:56.284675Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:09:56.284790Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13787 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:09:56.669646Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:09:57.795986Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800578589748762:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:09:57.796062Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:09:58.206518Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:09:58.384353Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800582884716253:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:09:58.384409Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:09:58.384489Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800582884716258:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:09:58.387802Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480 2025-03-28T10:09:58.399812Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800582884716260:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2025-03-28T10:09:58.458615Z node 1 :TX_PROXY ERROR: Actor# [1:7486800582884716333:2799] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:09:58.673708Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710661. Ctx: { TraceId: 01jqe3nnkf744dde5ky3x2c107, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=YWQxNWIwZjEtY2QzOTMyMDQtOTc0MzAxMDMtYWVhNWEwNWE=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root 2025-03-28T10:09:58.774023Z node 1 :KQP_EXECUTER ERROR: TxId: 281474976710662. Ctx: { TraceId: 01jqe3nnzk2r507mxy3b0fpjrk, Database: , DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NTRmNTA4ZjgtMWIxNzcwMS01YjEwNDI3LTM1OGFhOWI0, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. Database not set, use /Root |73.5%| [TA] $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} |73.5%| [TA] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD53C1D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFD5F748) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7F034EA80D90) __libc_start_main+128 (0x7F034EA80E40) _start+41 (0xD6F6029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD595EC) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFD601BE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7FF838F0FD90) __libc_start_main+128 (0x7FF838F0FE40) _start+41 (0xD6F6029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD595EC) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFD5FDCE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7F1FAA8FED90) __libc_start_main+128 (0x7F1FAA8FEE40) _start+41 (0xD6F6029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD53C1D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFD5FA58) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7F177003DD90) __libc_start_main+128 (0x7F177003DE40) _start+41 (0xD6F6029) |73.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |73.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |74.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles |74.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |74.1%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |74.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |74.3%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |74.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |74.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |74.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |74.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |74.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |74.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |75.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |75.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |75.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |75.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |75.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |75.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest |75.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutIncorrect |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |75.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink |76.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |76.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap >> KqpSnapshotIsolation::TConflictReadWriteOlap |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltp |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink >> KqpSnapshotIsolation::TConflictWriteOltp |77.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |77.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpSnapshotIsolation::TReadOnlyOltp |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn+useSink |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> Viewer::QueryExecuteScript |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink >> KqpLimits::QSReplySize+useSink |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> THealthCheckTest::LayoutIncorrect [GOOD] |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutIncorrect [GOOD] Test command err: 2025-03-28T10:10:08.215237Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T10:10:08.215628Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T10:10:08.215682Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000262/r3tmp/tmp3xAvli/pdisk_1.dat 2025-03-28T10:10:08.742958Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24421, node 1 TClient is connected to server localhost:11779 2025-03-28T10:10:09.486324Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:09.486387Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:09.486422Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:09.486996Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.7%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |81.7%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestDataErasure::DataErasureRun3Cycles [GOOD] |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_data_erasure/unittest >> TestDataErasure::DataErasureRun3Cycles [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:114:2058] recipient: [1:108:2140] Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:126:2058] recipient: [1:108:2140] 2025-03-28T10:10:04.095797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T10:10:04.095994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T10:10:04.096040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T10:10:04.096085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T10:10:04.096979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T10:10:04.097027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T10:10:04.097107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T10:10:04.097188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T10:10:04.098328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T10:10:04.191966Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot subscribe to console configs 2025-03-28T10:10:04.192027Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:04.208145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T10:10:04.208386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T10:10:04.208535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2025-03-28T10:10:04.216597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T10:10:04.216916Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T10:10:04.220248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2025-03-28T10:10:04.220557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T10:10:04.226038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2025-03-28T10:10:04.233427Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T10:10:04.233495Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T10:10:04.234203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T10:10:04.234261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T10:10:04.234344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T10:10:04.234431Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2025-03-28T10:10:04.241189Z node 1 :HIVE INFO: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:123:2149] sender: [1:237:2058] recipient: [1:15:2062] 2025-03-28T10:10:04.343658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2025-03-28T10:10:04.344894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T10:10:04.346751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2025-03-28T10:10:04.347728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2025-03-28T10:10:04.347809Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944 2025-03-28T10:10:04.350583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2025-03-28T10:10:04.350769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2025-03-28T10:10:04.350949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T10:10:04.351070Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2025-03-28T10:10:04.351104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T10:10:04.351150Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T10:10:04.352929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T10:10:04.352982Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2025-03-28T10:10:04.353013Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T10:10:04.354591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T10:10:04.354637Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T10:10:04.354700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2025-03-28T10:10:04.354758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T10:10:04.359314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T10:10:04.361252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T10:10:04.364956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2025-03-28T10:10:04.366113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2025-03-28T10:10:04.366249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 132 RawX2: 4294969451 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2025-03-28T10:10:04.366335Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T10:10:04.367417Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 128 -> 240 2025-03-28T10:10:04.367474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2025-03-28T10:10:04.367706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2025-03-28T10:10:04.367779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2025-03-28T10:10:04.369860Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2025-03-28T10:10:04.369904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2025-03-28T10:10:04.370054Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2025-03-28T10:10:04.370082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [1:204:2206], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2025-03-28T10:10:04.370848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2025-03-28T10:10:04.370892Z node 1 :FLAT_TX_SCHEMESHARD INFO: [72057594046678944] TDone opId# 1:0 ProgressState 2025-03-28T10:10:04.370962Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T10:10:04.370996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T10:10:04.371031Z node 1 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#1:0 progress is 1/1 2025-03-28T10:10:04.371060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T10:10:04.371086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2025-03-28T10:10:04.371113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2025-03-28T10:10:04.371142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 1:0 2025-03-28T10:10:04.371162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: RemoveTx for txid 1:0 2025-03-28T10:10:04.371215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2025-03-28T10:10:04.371263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2025-03-28T10:10:04.371296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 3 2025-03-28T10:10:04.373207Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T10:10:04.373301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 3 PathOwnerId: 72057594046678944, cookie: 1 2025-03-28T10:10:04.373338Z node 1 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, co ... nt# 269553241, Sender [1:1007:2854], Recipient [1:832:2715]: NKikimrTxDataShard.TEvForceDataCleanupResult DataCleanupGeneration: 3 TabletId: 72075186233409555 Status: OK 2025-03-28T10:10:10.741150Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvForceDataCleanupResult 2025-03-28T10:10:10.741183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Execute at schemestard: 72075186233409551 2025-03-28T10:10:10.741223Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] [Finished] Data erasure is completed for pathId# [OwnerId: 72075186233409551, LocalPathId: 2], datashard# 72075186233409555, shardIdx# 72075186233409551:5 in# 65 ms, next wakeup in# 8.935000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409551 2025-03-28T10:10:10.741263Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Data erasure in shards is completed. Send response to root schemeshard 2025-03-28T10:10:10.741288Z node 1 :FLAT_TX_SCHEMESHARD INFO: [TenantDataErasureManager] Complete: Generation# 3 2025-03-28T10:10:10.743107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# false 2025-03-28T10:10:10.743209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureShard Complete at schemestard: 72075186233409551, NeedResponseComplete# true 2025-03-28T10:10:10.743375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125514, Sender [1:832:2715], Recipient [1:290:2274]: NKikimrScheme.TEvTenantDataErasureResponse PathId { OwnerId: 72057594046678944 LocalId: 3 } Generation: 3 Status: COMPLETED 2025-03-28T10:10:10.743412Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvTenantDataErasureResponse 2025-03-28T10:10:10.743463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Execute at schemeshard: 72057594046678944 2025-03-28T10:10:10.743512Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] [Finished] Data erasure completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 3] in# 66 ms, next wakeup# 593.934000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2025-03-28T10:10:10.743587Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Data erasure in tenants is completed. Send request to BS controller 2025-03-28T10:10:10.745033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2025-03-28T10:10:10.745076Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-28T10:10:10.745260Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2025-03-28T10:10:10.745294Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T10:10:10.745324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T10:10:10.745366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T10:10:10.745392Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 0 2025-03-28T10:10:10.745434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-28T10:10:10.745475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-28T10:10:11.311021Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.311107Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.311179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.311202Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.311248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.311272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.311330Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.311358Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.311425Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2408], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.311448Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.311500Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:832:2715], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.311523Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.346402Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T10:10:11.346481Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T10:10:11.346516Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-28T10:10:11.346766Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2025-03-28T10:10:11.346796Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T10:10:11.346823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T10:10:11.346889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T10:10:11.346920Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Progress data shred in BSC 5000 2025-03-28T10:10:11.346979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2025-03-28T10:10:11.347018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleRequestToBSC: Interval# 1.000000s 2025-03-28T10:10:11.771555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.771645Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.771720Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.771745Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.771797Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.771819Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:10:11.771888Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:832:2715], Recipient [1:832:2715]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.771915Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.771985Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:290:2274], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.772014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.772063Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:455:2408], Recipient [1:455:2408]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.772089Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:10:11.802881Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:290:2274]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T10:10:11.802957Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToRunDataErasureBSC 2025-03-28T10:10:11.802991Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] SendRequestToBSC: Generation# 3 2025-03-28T10:10:11.803193Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 268637738, Sender [1:295:2277], Recipient [1:290:2274]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2025-03-28T10:10:11.803216Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2025-03-28T10:10:11.803237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2025-03-28T10:10:11.803286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Execute at schemeshard: 72057594046678944 2025-03-28T10:10:11.803307Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxCompleteDataErasureBSC: Data shred in BSC is completed 2025-03-28T10:10:11.803337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: [RootDataErasureManager] ScheduleDataErasureWakeup: Interval# 0.933000s 2025-03-28T10:10:11.803360Z node 1 :FLAT_TX_SCHEMESHARD INFO: [RootDataErasureManager] Complete: Generation# 3, duration# 2 s 2025-03-28T10:10:11.807516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxCompleteDataErasureBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2025-03-28T10:10:11.808035Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:3597:4956], Recipient [1:290:2274]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T10:10:11.808082Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T10:10:11.808122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046678944 2025-03-28T10:10:11.808246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125519, Sender [1:272:2263], Recipient [1:290:2274]: NKikimrScheme.TEvDataErasureInfoRequest 2025-03-28T10:10:11.808281Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvDataErasureInfoRequest 2025-03-28T10:10:11.808312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Handle TEvDataErasureInfoRequest, at schemeshard: 72057594046678944 |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/test-results/unittest/{meta.json ... results_accumulator.log} |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> KqpLimits::TooBigColumn+useSink [GOOD] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/unittest >> TPersQueueTest::DisableDeduplication [GOOD] Test command err: 2025-03-28T10:10:01.010499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800593163637478:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:01.010590Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T10:10:01.031878Z node 2 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7486800596819319771:2072];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:01.032317Z node 2 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2025-03-28T10:10:01.139164Z node 2 :PQ_READ_PROXY DEBUG: Direct read cache: : Created test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00027d/r3tmp/tmp5yXZLb/pdisk_1.dat 2025-03-28T10:10:01.157612Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: : Created 2025-03-28T10:10:01.306519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:01.306617Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:01.308932Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T10:10:01.309519Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 27589, node 1 2025-03-28T10:10:01.335841Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:01.339160Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T10:10:01.339190Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T10:10:01.361928Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/km4y/00027d/r3tmp/yandexH1ruSK.tmp 2025-03-28T10:10:01.361958Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: /home/runner/.ya/build/build_root/km4y/00027d/r3tmp/yandexH1ruSK.tmp 2025-03-28T10:10:01.362079Z node 1 :NET_CLASSIFIER WARN: successfully initialized from file: /home/runner/.ya/build/build_root/km4y/00027d/r3tmp/yandexH1ruSK.tmp 2025-03-28T10:10:01.362191Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:10:01.368841Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:01.368944Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:01.371632Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:01.406844Z INFO: TTestServer started on Port 6998 GrpcPort 27589 TClient is connected to server localhost:6998 PQClient connected to localhost:27589 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:01.623430Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... waiting... 2025-03-28T10:10:01.658602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... waiting... waiting... 2025-03-28T10:10:03.142980Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486800605409254676:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:03.143013Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800601753573214:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:03.142990Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7486800605409254685:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:03.143077Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:03.143118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800601753573240:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:03.143140Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:03.147001Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480 2025-03-28T10:10:03.149594Z node 2 :TX_PROXY ERROR: Actor# [2:7486800605409254692:2123] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2025-03-28T10:10:03.149753Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800601753573281:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:03.149811Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:03.161149Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800601753573243:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T10:10:03.161226Z node 2 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7486800605409254691:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2025-03-28T10:10:03.247376Z node 1 :TX_PROXY ERROR: Actor# [1:7486800601753573328:2817] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:03.254850Z node 2 :TX_PROXY ERROR: Actor# [2:7486800605409254719:2129] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 9], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:03.383280Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T10:10:03.387280Z node 1 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [1:7486800601753573346:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T10:10:03.387479Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDNlMDU2ZGEtNGQyYjU0YjQtNTgzY2EwYzYtYTViMmZjNmM=, ActorId: [1:7486800601753573211:2340], ActorState: ExecuteState, TraceId: 01jqe3nt7xbes8s031tw383h9d, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T10:10:03.387919Z node 2 :KQP_COMPILE_ACTOR ERROR: Compilation failed, self: [2:7486800605409254734:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2025-03-28T10:10:03.388207Z node 2 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=2&id=YWExM2NjZWItYTljM2FlNjMtNGI3YzE0ZWEtZDllMmYxNTg=, ActorId: [2:7486800605409254660:2307], ActorState: ExecuteState, TraceId: 01jqe3nt7w6tr028t1ah94f22f, ReplyQueryCompileError, status SCHEME_ERROR remove tx with tx_id: 2025-03-28T10:10:03.389226Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2025-03-28T10:10:03.389235Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 ... ion 2 2025-03-28T10:10:11.606503Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2025-03-28T10:10:11.606522Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) ready for read with readOffset 0 endOffset 3 2025-03-28T10:10:11.606586Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), readOffset# 0, endOffset# 3, WTime# 1743156611486, sizeLag# 409 2025-03-28T10:10:11.606597Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1TEvPartitionReady. Aval parts: 1 2025-03-28T10:10:11.606624Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 performing read request: guid# 7194df79-b7b3cc32-7482f8b4-9866fc46, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), count# 3, size# 490, partitionsAsked# 1, maxTimeLag# 0ms 2025-03-28T10:10:11.606727Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2)maxCount 3 maxSize 490 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 7194df79-b7b3cc32-7482f8b4-9866fc46 2025-03-28T10:10:11.606775Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 2 user debug offset 0 count 3 size 490 endOffset 3 max time lag 0ms effective offset 0 2025-03-28T10:10:11.606795Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-03-28T10:10:11.606889Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 2, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-28T10:10:11.606903Z node 1 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T10:10:11.606979Z node 1 :PERSQUEUE DEBUG: Handle TEvRequest topic: 'rt3.dc1--topic1' requestId: 2025-03-28T10:10:11.606992Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--topic1' partition 1 2025-03-28T10:10:11.607033Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 2 messageNo: 0 requestId: cookie: 0 2025-03-28T10:10:11.607093Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 2 Topic 'rt3.dc1--topic1' partition 1 user debug offset 0 count 3 size 490 endOffset 3 max time lag 0ms effective offset 0 2025-03-28T10:10:11.607115Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] read cookie 2 added 0 blobs, size 0 count 0 last offset 0, current partition end offset: 3 2025-03-28T10:10:11.607163Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892, Partition: 1, State: StateIdle] Reading cookie 2. All data is from uncompacted head. 2025-03-28T10:10:11.607173Z node 1 :PERSQUEUE DEBUG: FormAnswer for 0 blobs 2025-03-28T10:10:11.607217Z node 1 :PERSQUEUE DEBUG: Answer ok topic: 'rt3.dc1--topic1' partition: 1 messageNo: 0 requestId: cookie: 0 2025-03-28T10:10:11.607484Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1743156611400 CreateTimestampMS: 1743156611396 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1743156611515 CreateTimestampMS: 1743156611396 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1743156611516 CreateTimestampMS: 1743156611397 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 7 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-03-28T10:10:11.607588Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) wait data in partition inited, cookie 1 from offset3 2025-03-28T10:10:11.607614Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 129287f3-ba3292cc-b3ea6396-d6682441 has messages 1 2025-03-28T10:10:11.607783Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 99 bytes ..." SourceId: "" SeqNo: 1 WriteTimestampMS: 1743156611486 CreateTimestampMS: 1743156611462 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 99 bytes ..." SourceId: "" SeqNo: 2 WriteTimestampMS: 1743156611515 CreateTimestampMS: 1743156611462 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "" SeqNo: 3 WriteTimestampMS: 1743156611515 CreateTimestampMS: 1743156611462 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 0 SizeLag: 7 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2025-03-28T10:10:11.607861Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) wait data in partition inited, cookie 1 from offset3 2025-03-28T10:10:11.607879Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2) EndOffset 3 ReadOffset 3 ReadGuid 7194df79-b7b3cc32-7482f8b4-9866fc46 has messages 1 2025-03-28T10:10:11.607951Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 read done: guid# 129287f3-ba3292cc-b3ea6396-d6682441, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 2(assignId:1), size# 496 2025-03-28T10:10:11.607971Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 response to read: guid# 129287f3-ba3292cc-b3ea6396-d6682441 2025-03-28T10:10:11.608133Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 Process answer. Aval parts: 0 2025-03-28T10:10:11.608196Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 read done: guid# 7194df79-b7b3cc32-7482f8b4-9866fc46, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 1(assignId:2), size# 386 2025-03-28T10:10:11.608209Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 response to read: guid# 7194df79-b7b3cc32-7482f8b4-9866fc46 2025-03-28T10:10:11.608224Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 Process answer. Aval parts: 0 Got data event with total 3 messages, current total messages: 3 2025-03-28T10:10:11.611016Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 grpc read done: success# 1, data# { read_request { bytes_size: 496 } } 2025-03-28T10:10:11.611105Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 got read request: guid# 806ee75-aa497a1d-187d5daf-ff9db0de Got data event with total 3 messages, current total messages: 6 Got data event with total 3 messages, current total messages: 9 2025-03-28T10:10:11.622816Z node 1 :PQ_READ_PROXY DEBUG: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 grpc read done: success# 1, data# { read_request { bytes_size: 496 } } 2025-03-28T10:10:11.622864Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 grpc closed 2025-03-28T10:10:11.622904Z node 1 :PQ_READ_PROXY INFO: session cookie 1 consumer shared/debug session shared/debug_1_1_35664697066638836_v1 is DEAD 2025-03-28T10:10:11.624091Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_1_1_35664697066638836_v1 2025-03-28T10:10:11.624144Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7486800636113313050:2543] destroyed 2025-03-28T10:10:11.624166Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_1_1_35664697066638836_v1 2025-03-28T10:10:11.624178Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7486800636113313049:2542] destroyed 2025-03-28T10:10:11.624187Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] Destroy direct read session shared/debug_1_1_35664697066638836_v1 2025-03-28T10:10:11.624198Z node 1 :PERSQUEUE DEBUG: [PQ: 72075186224037892] server disconnected, pipe [1:7486800636113313051:2544] destroyed 2025-03-28T10:10:11.624236Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_1_1_35664697066638836_v1 2025-03-28T10:10:11.624248Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_1_1_35664697066638836_v1 2025-03-28T10:10:11.624258Z node 1 :PQ_READ_PROXY DEBUG: Direct read cache: server session deregistered: shared/debug_1_1_35664697066638836_v1 2025-03-28T10:10:11.626819Z node 2 :PERSQUEUE_READ_BALANCER INFO: [72075186224037893][rt3.dc1--topic1] pipe [1:7486800636113313045:2539] disconnected; active server actors: 1 2025-03-28T10:10:11.626888Z node 2 :PERSQUEUE_READ_BALANCER NOTICE: [72075186224037893][rt3.dc1--topic1] pipe [1:7486800636113313045:2539] client debug disconnected session shared/debug_1_1_35664697066638836_v1 2025-03-28T10:10:11.947933Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710688, task: 1, CA Id [1:7486800636113313089:2549]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 0 2025-03-28T10:10:11.980596Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710688, task: 1, CA Id [1:7486800636113313089:2549]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-28T10:10:12.041012Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710688, task: 1, CA Id [1:7486800636113313089:2549]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-28T10:10:12.107191Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710688, task: 1, CA Id [1:7486800636113313089:2549]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 2025-03-28T10:10:12.192220Z node 1 :KQP_COMPUTE WARN: TxId: 281474976710688, task: 1, CA Id [1:7486800636113313089:2549]. Got EvDeliveryProblem, TabletId: 72075186224037891, NotDelivered: 1 |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.1%| [TA] $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.2%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 29833, MsgBus: 12185 2025-03-28T10:10:07.608512Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800622143589736:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:07.608562Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/0003bf/r3tmp/tmpfaEBDj/pdisk_1.dat 2025-03-28T10:10:08.029314Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:08.065953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:08.066054Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 29833, node 1 2025-03-28T10:10:08.068014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:08.110010Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:08.110038Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:08.110048Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:08.110195Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:12185 TClient is connected to server localhost:12185 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:08.623019Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:08.641444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:08.762168Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:08.904428Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:08.982606Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.820414Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800635028493348:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:10.820512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:11.217079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.299221Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.341216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.373721Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.411612Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.464417Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.522374Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800639323461157:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:11.522476Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:11.522550Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800639323461162:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:11.526673Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T10:10:11.540208Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800639323461164:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T10:10:11.632704Z node 1 :TX_PROXY ERROR: Actor# [1:7486800639323461220:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:12.609683Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800622143589736:2128];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:12.609792Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:13.160809Z node 1 :GLOBAL WARN: fline=events.h:103;event=ev_write_error;status=STATUS_BAD_REQUEST;details=Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911;tx_id=3; 2025-03-28T10:10:13.169036Z node 1 :TX_DATASHARD ERROR: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 2025-03-28T10:10:13.169371Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486800647913396181:2496], Table: `/Root/KeyValue` ([72057594046644480:6:1]), SessionActorId: [1:7486800643618428809:2496]Got BAD REQUEST for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[1:7486800647913396181:2496].{
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 } 2025-03-28T10:10:13.170357Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486800643618428857:2496], SessionActorId: [1:7486800643618428809:2496], statusCode=BAD_REQUEST. Issue=
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 . sessionActorId=[1:7486800643618428809:2496]. isRollback=0 2025-03-28T10:10:13.221318Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjNmOWQ0NTUtZWU3MDZjNTctOTI2ODNmYzEtYTI5OTMwZjM=, ActorId: [1:7486800643618428809:2496], ActorState: ExecuteState, TraceId: 01jqe3p3ngbdkp61f2vmzt4h4c, got TEvKqpBuffer::TEvError in ExecuteState, status: BAD_REQUEST send to: [1:7486800643618428858:2496] from: [1:7486800643618428857:2496] 2025-03-28T10:10:13.221441Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486800643618428858:2496] TxId: 281474976710671. Ctx: { TraceId: 01jqe3p3ngbdkp61f2vmzt4h4c, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NjNmOWQ0NTUtZWU3MDZjNTctOTI2ODNmYzEtYTI5OTMwZjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. BAD_REQUEST: {
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017 subissue: {
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 } } 2025-03-28T10:10:13.222389Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjNmOWQ0NTUtZWU3MDZjNTctOTI2ODNmYzEtYTI5OTMwZjM=, ActorId: [1:7486800643618428809:2496], ActorState: ExecuteState, TraceId: 01jqe3p3ngbdkp61f2vmzt4h4c, Create QueryResponse for error on request, msg:
: Error: Bad request. Table: `/Root/KeyValue`., code: 2017
: Error: Cannot parse tx 3. BAD_ARGUMENT: Row cell size of 20971520 bytes is larger than the allowed threshold 16777216 at tablet# 72075186224037911, code: 2017 |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 18084, MsgBus: 18826 2025-03-28T10:09:59.537218Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800587489266756:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:09:59.537578Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/0002e6/r3tmp/tmpXXA5GH/pdisk_1.dat 2025-03-28T10:09:59.925717Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:09:59.948641Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:09:59.949164Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting TServer::EnableGrpc on GrpcPort 18084, node 1 2025-03-28T10:09:59.957666Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:09:59.958426Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T10:09:59.958472Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T10:10:00.093953Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:00.093981Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:00.094006Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:00.094105Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18826 TClient is connected to server localhost:18826 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:00.767180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:00.806065Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:00.943603Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:01.088406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:01.136812Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:01.761093Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800596079203017:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:01.762709Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:02.315180Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:02.340801Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T10:10:02.404319Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T10:10:02.429146Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T10:10:02.457269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T10:10:02.488870Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T10:10:02.580413Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800600374170867:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:02.580492Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:02.580574Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800600374170872:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:02.585266Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T10:10:02.593213Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800600374170874:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T10:10:02.690031Z node 1 :TX_PROXY ERROR: Actor# [1:7486800600374170930:3452] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:03.700800Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T10:10:03.813793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:03.814033Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:03.814287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:10:03.814368Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:10:03.814449Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:10:03.814545Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:10:03.814619Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:10:03.814705Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:10:03.814798Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:10:03.814902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:10:03.815003Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:10:03.815082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037927;self_id=[1:7486800604669138668:2508];tablet_id=72075186224037927;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:10:03.823949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486800604669138670:2509];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:03.824018Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486800604669138670:2509];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:03.824226Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486800604669138670:2509];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03 ... dateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T10:10:03.961012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T10:10:03.961020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T10:10:03.961059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T10:10:03.961078Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T10:10:03.961109Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T10:10:03.961146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T10:10:03.961146Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T10:10:03.961168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T10:10:03.961221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T10:10:03.961244Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T10:10:03.961430Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T10:10:03.961507Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T10:10:03.961629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T10:10:03.961650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T10:10:03.961718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T10:10:03.961735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T10:10:03.961849Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T10:10:03.961881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T10:10:03.961951Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T10:10:03.961966Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037924;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T10:10:04.003570Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:10:04.003951Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:10:04.008811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:10:04.008811Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:10:04.014126Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:10:04.014125Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:10:04.019474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:10:04.019474Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:10:04.024713Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:10:04.024716Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:10:04.191828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T10:10:04.191828Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T10:10:04.192201Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T10:10:04.244590Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 5282 cpu_time_us: 1739 affected_shards: 1 } query_phases { duration_us: 5407 cpu_time_us: 168 affected_shards: 1 } compilation { duration_us: 36077 cpu_time_us: 34209 } process_cpu_time_us: 510 total_duration_us: 48539 total_cpu_time_us: 36626 AddressSanitizer:DEADLYSIGNAL ================================================================= ==955427==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b1ca4d bp 0x7ffdff787a40 sp 0x7ffdff7878a0 T0) ==955427==The signal is caused by a READ memory access. ==955427==Hint: address points to the zero page. 2025-03-28T10:10:04.537636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800587489266756:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:04.537746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:14.920457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:14.920502Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18b1ca4d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b1ca4d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b1ca4d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b1ca4d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b1ca4d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b415d7 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b415d7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b415d7 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18b415d7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18b415d7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194936d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19463228 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b40483 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19464af5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x1948dc4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f5c242bdd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f5c242bde3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x162b8028 in _start (/home/runner/.ya/build/build_root/km4y/0002e6/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x162b8028) (BuildId: 42cc14b09b8e1be640cc9d032c9c2bf606ecb0c4) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==955427==ABORTING |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Viewer::QueryExecuteScript [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString |87.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.5%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 |87.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/ut/unittest >> Viewer::QueryExecuteScript [GOOD] Test command err: 2025-03-28T10:10:08.558191Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800627377184652:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:08.558352Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # SectorMap:test-client[:2000] 2025-03-28T10:10:08.891744Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 14927, node 1 2025-03-28T10:10:08.946366Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:08.946473Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:08.948874Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:08.957187Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:08.957220Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:08.957229Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:08.957388Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30934 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:09.207042Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:09.223455Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T10:10:09.239118Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480 2025-03-28T10:10:09.241892Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:11.655910Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:11.655952Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:11.895778Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800640262087237:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:11.895860Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800640262087248:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:11.895931Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:11.899538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480 2025-03-28T10:10:11.911011Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800640262087258:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2025-03-28T10:10:11.978073Z node 1 :TX_PROXY ERROR: Actor# [1:7486800640262087309:2357] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:12.232916Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.388527Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:12.388571Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:13.100400Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:13.100447Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:13.343957Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:13.344005Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:13.558306Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800627377184652:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:13.558381Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:13.564644Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:13.564700Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:13.770363Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:13.770406Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:14.015826Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:14.015864Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:14.226510Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:14.226550Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:14.453796Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:14.453836Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:14.700331Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:14.700366Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:14.897542Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:14.897577Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:15.135893Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:15.135924Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:15.319658Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:15.319695Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:15.539134Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:15.539175Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:15.750397Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:15.750432Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:15.976109Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:15.976152Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:16.223785Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:16.223825Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:16.232312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710709:0, at schemeshard: 72057594046644480 2025-03-28T10:10:16.233848Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710710:0, at schemeshard: 72057594046644480 2025-03-28T10:10:16.236216Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710711:0, at schemeshard: 72057594046644480 2025-03-28T10:10:17.666390Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:17.666442Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:18.151822Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:18.151863Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:18.458064Z node 1 :TICKET_PARSER INFO: Ticket parser: got TEvAuthorizeTicket event: test_ydb_token /Root 1 2025-03-28T10:10:18.458107Z node 1 :TICKET_PARSER INFO: Send TEvAuthorizeTicketResult success 2025-03-28T10:10:18.997643Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-28T10:10:18.999720Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156619022, txId: 281474976710723] shutting down |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.3%| [TA] $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TA] {RESULT} $(B)/ydb/core/viewer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpLimits::QSReplySize+useSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 11253, MsgBus: 15836 2025-03-28T10:10:09.168598Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800631083810652:2104];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:09.169371Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000385/r3tmp/tmpMFLv36/pdisk_1.dat 2025-03-28T10:10:09.526817Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 11253, node 1 2025-03-28T10:10:09.561461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:09.561595Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:09.565248Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:09.636793Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:09.636817Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:09.636827Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:09.636964Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15836 TClient is connected to server localhost:15836 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:10.188300Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.213549Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T10:10:10.224862Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.371456Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.564123Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.646959Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:12.600925Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800643968714281:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:12.602739Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:12.902310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.936317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.964079Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.994751Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T10:10:13.029008Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T10:10:13.069217Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T10:10:13.127039Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800648263682085:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:13.127118Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:13.127291Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800648263682090:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:13.131602Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T10:10:13.141033Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800648263682092:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T10:10:13.224922Z node 1 :TX_PROXY ERROR: Actor# [1:7486800648263682148:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:14.066202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:14.176581Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800631083810652:2104];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:14.177584Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:22.255746Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486800682623421856:2642], SessionActorId: [1:7486800678328454541:2642], statusCode=PRECONDITION_FAILED. Issue=
: Error: Stream write queries aren't allowed., code: 2029 . sessionActorId=[1:7486800678328454541:2642]. isRollback=0 2025-03-28T10:10:22.342191Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWY5ZjM1ZTUtNzc1ZjQwMWUtYzBlODljMjMtYjgyOTBlM2Q=, ActorId: [1:7486800678328454541:2642], ActorState: ExecuteState, TraceId: 01jqe3pbjc18ers1kzdw5vbz3c, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7486800682623421857:2642] from: [1:7486800682623421856:2642] 2025-03-28T10:10:22.342332Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486800682623421857:2642] TxId: 281474976710672. Ctx: { TraceId: 01jqe3pbjc18ers1kzdw5vbz3c, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=MWY5ZjM1ZTUtNzc1ZjQwMWUtYzBlODljMjMtYjgyOTBlM2Q=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. PRECONDITION_FAILED: {
: Error: Stream write queries aren't allowed., code: 2029 } 2025-03-28T10:10:22.342490Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486800682623421864:2653], TxId: 281474976710672, task: 3. Ctx: { TraceId : 01jqe3pbjc18ers1kzdw5vbz3c. SessionId : ydb://session/3?node_id=1&id=MWY5ZjM1ZTUtNzc1ZjQwMWUtYzBlODljMjMtYjgyOTBlM2Q=. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486800682623421857:2642], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T10:10:22.342813Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486800682623421866:2655], TxId: 281474976710672, task: 5. Ctx: { CustomerSuppliedId : . SessionId : ydb://session/3?node_id=1&id=MWY5ZjM1ZTUtNzc1ZjQwMWUtYzBlODljMjMtYjgyOTBlM2Q=. TraceId : 01jqe3pbjc18ers1kzdw5vbz3c. CurrentExecutionId : . DatabaseId : /Root. PoolId : default. Database : /Root. }. Handle abort execution event from: [1:7486800682623421857:2642], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T10:10:22.347524Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWY5ZjM1ZTUtNzc1ZjQwMWUtYzBlODljMjMtYjgyOTBlM2Q=, ActorId: [1:7486800678328454541:2642], ActorState: ExecuteState, TraceId: 01jqe3pbjc18ers1kzdw5vbz3c, Create QueryResponse for error on request, msg:
: Error: Stream write queries aren't allowed., code: 2029 >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-03-28T10:09:56.238407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T10:09:56.238856Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T10:09:56.238943Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/0002e1/r3tmp/tmp41UhiH/pdisk_1.dat TServer::EnableGrpc on GrpcPort 6065, node 1 TClient is connected to server localhost:13697 2025-03-28T10:09:56.788149Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:09:56.820864Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:09:56.824186Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:09:56.824245Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:09:56.824270Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:09:56.824467Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:09:56.860318Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:09:56.860486Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:09:56.872332Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:09:57.000191Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-03-28T10:09:57.107666Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvBoot 2025-03-28T10:09:57.108842Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvRestored 2025-03-28T10:09:57.109114Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T10:09:57.125083Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T10:09:57.125378Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-03-28T10:09:57.132044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:09:57.132229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:09:57.132436Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:09:57.132525Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:09:57.132591Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:09:57.132699Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:09:57.132791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:09:57.132874Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:09:57.132977Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:09:57.133072Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:09:57.133148Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:09:57.133229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:09:57.147768Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T10:09:57.149139Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-03-28T10:09:57.149335Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T10:09:57.149419Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T10:09:57.149569Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:09:57.149675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T10:09:57.149730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T10:09:57.149763Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T10:09:57.149845Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T10:09:57.149897Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T10:09:57.149927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T10:09:57.149950Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T10:09:57.150081Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:09:57.150126Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T10:09:57.150170Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T10:09:57.150204Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T10:09:57.150277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T10:09:57.150314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T10:09:57.150352Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T10:09:57.150376Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T10:09:57.150444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T10:09:57.150474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T10:09:57.150506Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T10:09:57.150552Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T10:09:57.150577Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T10:09:57.150598Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T10:09:57.151045Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=68; 2025-03-28T10:09:57.151128Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=28; 2025-03-28T10:09:57.151208Z node 1 :TX_COLUMNSHARD INFO: tablet_id= ... LUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1315:3116], Recipient [1:746:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-03-28T10:10:23.467813Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-03-28T10:10:23.468148Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[31] (CS::GENERAL) apply at tablet 72075186224037888 2025-03-28T10:10:23.473404Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 2 2025-03-28T10:10:23.473617Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3720408;raw_bytes=123937532;count=3;records=105525} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=17463040;raw_bytes=589051848;count=6;records=494475} inactive {blob_bytes=23493144;raw_bytes=787383134;count=15;records=667575} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:73/0:size=4045;count=18;;1:size=53108;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445360;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445920;count=1;;11:size=4137072;count=5;;12:size=1445744;count=1;;13:size=1445928;count=1;;14:size=1445608;count=1;;15:size=1445528;count=1;;16:size=1445360;count=1;;17:size=1168928;count=1;;18:size=1402768;count=1;;19:size=1403096;count=1;;20:size=1402840;count=1;;21:size=1445376;count=1;;22:size=808584;count=1;;23:size=1496088;count=2;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:73/0:size=4114;count=19;;1:size=53108;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445360;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445920;count=1;;11:size=4137072;count=5;;12:size=1445744;count=1;;13:size=1445928;count=1;;14:size=1445608;count=1;;15:size=1445528;count=1;;16:size=1445360;count=1;;17:size=1168928;count=1;;18:size=1402768;count=1;;19:size=1403096;count=1;;20:size=1402840;count=1;;21:size=1445376;count=1;;22:size=808584;count=1;;23:size=1496088;count=2;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-03-28T10:10:23.486388Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-03-28T10:10:23.486476Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;fline=with_appended.cpp:65;portions=25,26,;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1; 2025-03-28T10:10:23.486851Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:25;path_id:3;records_count:52716;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:1857384;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-03-28T10:10:23.487092Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/547285.000000s;; 2025-03-28T10:10:23.487221Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:26;path_id:3;records_count:52714;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:1857112;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-03-28T10:10:23.487331Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/599999.000000s;; 2025-03-28T10:10:23.487403Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::dbf26814-bbc11f0-b2e4f41c-8f68bac1; 2025-03-28T10:10:23.487497Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:21186720;portions_count:26;); 2025-03-28T10:10:23.487567Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T10:10:23.487651Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:10:23.487762Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-28T10:10:23.487850Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-03-28T10:10:23.487912Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T10:10:23.487979Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T10:10:23.488027Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T10:10:23.488122Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.600000s; 2025-03-28T10:10:23.488190Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T10:10:23.488432Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 2 VERIFY failed (2025-03-28T10:10:23.488644Z): tablet_id=72075186224037888;task_id=dbf26814-bbc11f0-b2e4f41c-8f68bac1;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18B7DF99) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18B6C22B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x19E83CD6) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x48517211) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x30555E9D) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1E8660F3) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1E749AD0) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E58FEC4) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E52CC45) NActors::IActor::Receive(TAutoPtr&)+237 (0x19DB54AD) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x3585FE75) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x358586EA) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+1076 (0x35862A64) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+292 (0x35A2FBE4) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+419 (0x35A2ED03) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+307 (0x35A26F63) NActors::TTestActorRuntime::SimulateSleep(TDuration)+1115 (0x35A26B3B) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4702 (0x1875C99E) std::__y1::__function::__func, void ()>::operator()()+280 (0x1876E558) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1902ACB6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x18FFA7E9) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x1876D504) NUnitTest::TTestFactory::Execute()+2438 (0x18FFC0B6) NUnitTest::RunMain(int, char**)+5213 (0x1902522D) ??+0 (0x7F8949B6ED90) __libc_start_main+128 (0x7F8949B6EE40) _start+41 (0x160E8029) |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 1299, MsgBus: 63117 2025-03-28T10:10:05.770056Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800612115506044:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:05.770141Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/0003b8/r3tmp/tmp9WvbBi/pdisk_1.dat 2025-03-28T10:10:06.201648Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:06.231652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:06.231763Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:06.233888Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 1299, node 1 2025-03-28T10:10:06.414518Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:06.414568Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:06.414578Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:06.414709Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:63117 TClient is connected to server localhost:63117 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:07.198373Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:07.215460Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T10:10:08.821512Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800625000408459:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.821594Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800625000408445:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.821945Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.826095Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:08.836363Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800625000408469:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:08.923134Z node 1 :TX_PROXY ERROR: Actor# [1:7486800625000408520:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:09.310977Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:09.434507Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:10:10.350972Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.179231Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800612115506044:2198];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:11.236822Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:12.313123Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OTZiNzE2ZDktN2JlNzdmMDYtNDBkYjczM2QtODc5MDc2M2E=, ActorId: [1:7486800642180286334:2968], ActorState: ExecuteState, TraceId: 01jqe3p3260jbeftgfsackbxgm, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:10:21.198116Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:21.198165Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18D63C3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F4F8E6BED8F 18. ??:0: ?? @ 0x7F4F8E6BEE3F 19. ??:0: ?? @ 0x16366028 |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 8918, MsgBus: 11050 2025-03-28T10:10:05.959592Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800613816549054:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:05.959690Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00038a/r3tmp/tmpNFdRSV/pdisk_1.dat 2025-03-28T10:10:06.398127Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:06.402104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:06.402203Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:06.405508Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 8918, node 1 2025-03-28T10:10:06.471243Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:06.471269Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:06.471274Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:06.471400Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:11050 TClient is connected to server localhost:11050 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:07.213843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:07.231852Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T10:10:09.031380Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800630996418845:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.031381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800630996418834:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.031469Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.036324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:09.046460Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800630996418848:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:09.135840Z node 1 :TX_PROXY ERROR: Actor# [1:7486800630996418899:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:09.426277Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:09.569610Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:10:10.540059Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.372884Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800613816549054:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:11.422712Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:12.457782Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZThmYWZkOWUtODc5Y2ExN2YtYzJhOTI5NDctZmEzZjNiZWE=, ActorId: [1:7486800643881329425:2968], ActorState: ExecuteState, TraceId: 01jqe3p32a3kf46tnsfdv8b11k, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:10:21.397462Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:21.397493Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18D642BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F08EB0AED8F 18. ??:0: ?? @ 0x7F08EB0AEE3F 19. ??:0: ?? @ 0x16366028 |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 10033, MsgBus: 28660 2025-03-28T10:10:05.770705Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800614365217802:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:05.770767Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/0003c6/r3tmp/tmpzG1kyW/pdisk_1.dat 2025-03-28T10:10:06.195240Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:06.230898Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:06.230991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:06.235520Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 10033, node 1 2025-03-28T10:10:06.415966Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:06.415987Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:06.416015Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:06.416110Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:28660 TClient is connected to server localhost:28660 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:07.189253Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:08.810419Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800627250120343:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.810525Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800627250120355:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.810601Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.815346Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:08.825887Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800627250120358:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:08.905888Z node 1 :TX_PROXY ERROR: Actor# [1:7486800627250120409:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:09.314020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:09.490853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:09.490853Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:09.491068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:09.491338Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:10:09.491451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:10:09.491578Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:10:09.491698Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:10:09.491793Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:10:09.491925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:10:09.492027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:10:09.492135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:10:09.492241Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:09.492277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:10:09.492408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631545087897:2348];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:10:09.492480Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:10:09.492599Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:10:09.492717Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:10:09.492852Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:10:09.492955Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:10:09.493095Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:10:09.493233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:10:09.493379Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:10:09.493487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:10:09.493594Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486800631545087887:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:10:09.524310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486800631545087895:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:09.524355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486800631545087895:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:09.524518Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;sel ... 6224038010;self_id=[1:7486800648724962132:3186];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.843328Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[1:7486800648724963227:3287];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.843634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[1:7486800648724963445:3352];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038062;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.843779Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7486800648724962132:3186];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.843891Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[1:7486800648724963227:3287];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.844052Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7486800648724963419:3341];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.844213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[1:7486800648724963180:3277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.844444Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038062;self_id=[1:7486800648724963445:3352];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038062;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.844604Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7486800648724963419:3341];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.844609Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[1:7486800648724963180:3277];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.845231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[1:7486800648724963188:3279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.845433Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038082;self_id=[1:7486800648724963188:3279];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038082;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.845700Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[1:7486800648724963341:3309];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.845724Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7486800648724963390:3336];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.845940Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038065;self_id=[1:7486800648724963341:3309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038065;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.846035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7486800648724963390:3336];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.846221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7486800648724963413:3339];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.846390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7486800648724963413:3339];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.848830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[1:7486800648724963438:3351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.849260Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038072;self_id=[1:7486800648724963438:3351];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038072;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.854932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7486800648724963410:3338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.855198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7486800648724963348:3312];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.855376Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[1:7486800648724963164:3268];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038077;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.855548Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[1:7486800648724963264:3296];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.855712Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7486800648724963373:3326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.856217Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7486800648724963410:3338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.856402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7486800648724963348:3312];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.856556Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038077;self_id=[1:7486800648724963164:3268];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038077;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.856668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038089;self_id=[1:7486800648724963264:3296];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038089;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.856791Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7486800648724963373:3326];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.856949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[1:7486800648724963166:3269];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038096;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.857097Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[1:7486800648724963142:3262];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.857458Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038096;self_id=[1:7486800648724963166:3269];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038096;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.857579Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038091;self_id=[1:7486800648724963142:3262];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038091;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.138264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[1:7486800635840057043:2623];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037952;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.138629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037952;self_id=[1:7486800635840057043:2623];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037952;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18D7C798 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18D63E6A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F6D2C91ED8F 18. ??:0: ?? @ 0x7F6D2C91EE3F 19. ??:0: ?? @ 0x16366028 |91.7%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 19805, MsgBus: 15891 2025-03-28T10:10:07.364857Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800620062425483:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:07.365976Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000357/r3tmp/tmp8jynT7/pdisk_1.dat 2025-03-28T10:10:07.765326Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:07.803850Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:07.803969Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:07.805526Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19805, node 1 2025-03-28T10:10:07.877649Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:07.877682Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:07.877694Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:07.877836Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:15891 TClient is connected to server localhost:15891 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:08.375254Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.427361Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800632947327991:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:10.427705Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:10.428057Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800632947328018:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:10.432578Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:10.443993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800632947328020:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:10.508633Z node 1 :TX_PROXY ERROR: Actor# [1:7486800632947328071:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:10.824131Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:10.952535Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.977671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.646415Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800620062425483:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:12.686789Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:13.267880Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MjUwZmIxMjgtZTRhYzZmOWUtM2RjZTZiYmMtMTQ3MzUzNjU=, ActorId: [1:7486800645832238460:2969], ActorState: ExecuteState, TraceId: 01jqe3p41n9sxk9zwmnjg8r6zg, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:10:22.762631Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:22.762660Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18D63A12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F135999ED8F 18. ??:0: ?? @ 0x7F135999EE3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltp [FAIL] Test command err: Trying to start YDB, gRPC: 18173, MsgBus: 14533 2025-03-28T10:10:06.621621Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800616509512636:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:06.621725Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000373/r3tmp/tmpPXR7Dw/pdisk_1.dat 2025-03-28T10:10:07.004858Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 18173, node 1 2025-03-28T10:10:07.056790Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:07.057135Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:07.066104Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:07.078774Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:07.078792Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:07.078801Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:07.078887Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:14533 TClient is connected to server localhost:14533 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:07.598169Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:07.629077Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T10:10:09.624006Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800629394415169:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.624135Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.624422Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800629394415196:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.634348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:09.654350Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800629394415198:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:09.742852Z node 1 :TX_PROXY ERROR: Actor# [1:7486800629394415252:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:10.086734Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:10.196993Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.219117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.936837Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800616509512636:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:11.982526Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:12.631132Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTBiNWI3NDMtMmZhMzVhZjAtNGVhYzg5MzQtNTg2OTQ0NDQ=, ActorId: [1:7486800642279325483:2969], ActorState: ExecuteState, TraceId: 01jqe3p3af9qw9b3vphae54cz9, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:10:21.994537Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:21.994574Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18D62D12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F89D364CD8F 18. ??:0: ?? @ 0x7F89D364CE3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] Test command err: Trying to start YDB, gRPC: 31294, MsgBus: 7663 2025-03-28T10:10:07.224402Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800622759139918:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:07.224449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00036d/r3tmp/tmpWMmubp/pdisk_1.dat 2025-03-28T10:10:07.601544Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:07.621076Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:07.621185Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:07.624459Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 31294, node 1 2025-03-28T10:10:07.775877Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:07.775900Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:07.775932Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:07.776077Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7663 TClient is connected to server localhost:7663 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:08.298064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:08.312931Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T10:10:10.232410Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800635644042468:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:10.232502Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800635644042479:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:10.232787Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:10.239925Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:10.250671Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800635644042482:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:10.339179Z node 1 :TX_PROXY ERROR: Actor# [1:7486800635644042535:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:10.724645Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:10.839294Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.885020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.526093Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800622759139918:2058];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:12.556759Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:13.216481Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=YjA0NDcwZWMtNDZjOTBlMTgtMTRkN2JiOGUtZWVhZThmYjc=, ActorId: [1:7486800644233985629:2969], ActorState: ExecuteState, TraceId: 01jqe3p3x26pjzvkxgbzqsc0z9, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:10:22.600304Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:22.600334Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18D64092 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F069910DD8F 18. ??:0: ?? @ 0x7F069910DE3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 29195, MsgBus: 21057 2025-03-28T10:10:06.682014Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800616274489643:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:06.682068Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000380/r3tmp/tmp8dF56m/pdisk_1.dat 2025-03-28T10:10:07.067618Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:07.073088Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:07.073190Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:07.076031Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29195, node 1 2025-03-28T10:10:07.286767Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:07.286800Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:07.286820Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:07.286988Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21057 TClient is connected to server localhost:21057 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:07.855895Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:07.876540Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T10:10:09.786776Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800629159392192:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.786920Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800629159392200:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.786989Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.791458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:09.805468Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800629159392206:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:09.880372Z node 1 :TX_PROXY ERROR: Actor# [1:7486800629159392257:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:10.167040Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:10.301284Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.391623Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.130403Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800616274489643:2062];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:12.171242Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:12.847172Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MzY4MDJjMTMtOGFhMTZjNGItNWI5OGI5ZWMtNDVmNWQzNWI=, ActorId: [1:7486800642044302575:2968], ActorState: ExecuteState, TraceId: 01jqe3p3hs2yhyan48dxa5c323, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:10:22.063978Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:22.064023Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18D62F3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F78999F4D8F 18. ??:0: ?? @ 0x7F78999F4E3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 29540, MsgBus: 61086 2025-03-28T10:10:06.719932Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800615817012875:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:06.720291Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000382/r3tmp/tmpcjIEsb/pdisk_1.dat 2025-03-28T10:10:07.099443Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:07.158410Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:07.158518Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:07.159913Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 29540, node 1 2025-03-28T10:10:07.223472Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:07.223494Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:07.223510Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:07.223631Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:61086 TClient is connected to server localhost:61086 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:07.836410Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:09.740403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800628701915420:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.740567Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800628701915412:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.741250Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:09.746868Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:09.758275Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800628701915443:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:09.815676Z node 1 :TX_PROXY ERROR: Actor# [1:7486800628701915494:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:10.137039Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:10.248318Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.333521Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.094440Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800615817012875:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:12.159899Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:12.720876Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NjVmZWNiNzMtYTQwNzIzOTAtM2VkZmZmMjMtNmFjODZmYzE=, ActorId: [1:7486800641586825711:2969], ActorState: ExecuteState, TraceId: 01jqe3p3g6eyj60r6gq2tdjk61, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:10:22.098957Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:22.098995Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18D63392 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F2D8DEC3D8F 18. ??:0: ?? @ 0x7F2D8DEC3E3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] Test command err: Trying to start YDB, gRPC: 24144, MsgBus: 16647 2025-03-28T10:10:05.769330Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800611251516794:2069];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:05.769987Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/0003a5/r3tmp/tmpBgBuwF/pdisk_1.dat 2025-03-28T10:10:06.219701Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:06.227120Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:06.227238Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:06.233116Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 24144, node 1 2025-03-28T10:10:06.414512Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:06.414550Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:06.414574Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:06.414751Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16647 TClient is connected to server localhost:16647 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:07.210650Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:07.227496Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2025-03-28T10:10:08.711955Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800624136419332:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.711967Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800624136419344:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.712109Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.717781Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:08.727702Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800624136419346:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:08.793266Z node 1 :TX_PROXY ERROR: Actor# [1:7486800624136419399:2338] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:09.319971Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:09.492694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:09.492702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:09.492892Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:09.493177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:10:09.493340Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:10:09.493500Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:10:09.493501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:09.493746Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:10:09.493780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:10:09.493921Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:10:09.493932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:10:09.494091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:10:09.494108Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:10:09.494230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:10:09.494245Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:10:09.494351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:10:09.494373Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:10:09.494512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:10:09.494538Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:10:09.494650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:10:09.494667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486800628431386902:2350];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:10:09.494799Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:10:09.494927Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:10:09.495030Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037893;self_id=[1:7486800628431386920:2351];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:10:09.534411Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800628431386900:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:09.534498Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800628431386900:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abs ... t_id=72075186224038087;self_id=[1:7486800645611262407:3383];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038087;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.985596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[1:7486800645611262419:3388];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.985702Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038087;self_id=[1:7486800645611262407:3383];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038087;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.985866Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038095;self_id=[1:7486800645611262419:3388];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038095;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.989085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7486800645611262082:3366];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.989701Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7486800645611262082:3366];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.992968Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7486800645611261557:3300];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.993420Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7486800645611261557:3300];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.997035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7486800645611261727:3327];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.997474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7486800645611261727:3327];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:22.999801Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[1:7486800645611262414:3386];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.000540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[1:7486800645611262414:3386];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.002838Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7486800645611261606:3317];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.003055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7486800645611261606:3317];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.007803Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[1:7486800645611261595:3313];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.008392Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[1:7486800645611261595:3313];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.008424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[1:7486800632726356286:2621];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037969;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.008727Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037969;self_id=[1:7486800632726356286:2621];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037969;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.014934Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[1:7486800645611261957:3354];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.015344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038029;self_id=[1:7486800645611261957:3354];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038029;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.018932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[1:7486800645611262417:3387];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038083;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.019168Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038083;self_id=[1:7486800645611262417:3387];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038083;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.022890Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[1:7486800645611262066:3363];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.023374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038026;self_id=[1:7486800645611262066:3363];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038026;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.030722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7486800645611262441:3391];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.031066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038093;self_id=[1:7486800645611262441:3391];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038093;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.031503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7486800645611262409:3384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.031708Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[1:7486800632726354926:2483];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037918;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.032208Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038088;self_id=[1:7486800645611262409:3384];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038088;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.032365Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037918;self_id=[1:7486800632726354926:2483];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037918;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.039804Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[1:7486800632726356413:2656];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037947;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.040389Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037947;self_id=[1:7486800632726356413:2656];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037947;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.041152Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[1:7486800645611262451:3394];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.041364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[1:7486800645611262451:3394];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.371390Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037954;self_id=[1:7486800632726356274:2619];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037954;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:23.374959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037954;self_id=[1:7486800632726356274:2619];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037954;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18D46A4E 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18D2569A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18D2CB17 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D2CB17 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D2CB17 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D2CB17 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D2CB17 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18D2BCE3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F6D33309D8F 18. ??:0: ?? @ 0x7F6D33309E3F 19. ??:0: ?? @ 0x16366028 |92.2%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 4063, MsgBus: 29258 2025-03-28T10:10:07.416861Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800619666301440:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:07.416993Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000364/r3tmp/tmpUnHZyL/pdisk_1.dat 2025-03-28T10:10:07.780335Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 4063, node 1 2025-03-28T10:10:07.815400Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:07.815506Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:07.817533Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:07.877693Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:07.877721Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:07.877729Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:07.877826Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:29258 TClient is connected to server localhost:29258 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:08.368843Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.453613Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800632551203758:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:10.453612Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800632551203740:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:10.453744Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:10.458117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:10.473241Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800632551203778:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T10:10:10.541761Z node 1 :TX_PROXY ERROR: Actor# [1:7486800632551203829:2337] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:10.841009Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.005937Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T10:10:11.995832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.712138Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800619666301440:2273];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:12.761672Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:13.329867Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2M4ODZlNGItNjRlMzhiNmItNzNmZGZhYWItYWNmMDk3Nw==, ActorId: [1:7486800645436114092:2968], ActorState: ExecuteState, TraceId: 01jqe3p43w85jqtgx21vbe0j6r, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:10:22.769629Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:22.769651Z node 1 :IMPORT WARN: Table profiles were not loaded assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18D635BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F0617BA9D8F 18. ??:0: ?? @ 0x7F0617BA9E3F 19. ??:0: ?? @ 0x16366028 |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 3933, MsgBus: 8337 2025-03-28T10:10:05.790910Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800614269342424:2133];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:05.791034Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/0003bd/r3tmp/tmpHUOW8C/pdisk_1.dat 2025-03-28T10:10:06.231404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:06.231522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:06.233515Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:06.259300Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3933, node 1 2025-03-28T10:10:06.415270Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:06.415295Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:06.415304Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:06.415431Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:8337 TClient is connected to server localhost:8337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:07.184643Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:08.840678Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800627154244889:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.840801Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800627154244911:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.840942Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:08.845477Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:08.856015Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800627154244918:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:08.958344Z node 1 :TX_PROXY ERROR: Actor# [1:7486800627154244970:2339] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:09.319897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:10:09.514721Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:09.514722Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:09.514953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:09.515234Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:10:09.515382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:10:09.515388Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:09.515533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:10:09.515574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:10:09.515644Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:10:09.515678Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:10:09.515782Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:10:09.515785Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:10:09.515889Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:10:09.515902Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:10:09.516035Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:10:09.516151Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:10:09.516277Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:10:09.516398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:10:09.516526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:10:09.516704Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486800631449212442:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:10:09.518976Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:10:09.519198Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:10:09.519336Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:10:09.519448Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486800631449212444:2350];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:10:09.557360Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486800631449212428:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:09.557431Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486800631449212428:2345];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:09.557674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id= ... EvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.374942Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[1:7486800652924055045:3378];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.375608Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038064;self_id=[1:7486800652924054884:3308];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038064;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.375780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7486800648629087502:3288];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.375843Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038008;self_id=[1:7486800652924055045:3378];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.377346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7486800652924054931:3325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.377529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[1:7486800648629087188:3233];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038090;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.380019Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7486800652924054963:3338];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.380847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7486800652924054931:3325];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.381013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038090;self_id=[1:7486800648629087188:3233];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038090;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.381242Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7486800652924054963:3338];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.388408Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7486800652924054835:3298];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.388729Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038053;self_id=[1:7486800652924054835:3298];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038053;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.388780Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7486800648629087464:3277];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.389145Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7486800648629087464:3277];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.389748Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[1:7486800648629086932:3169];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.389751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7486800652924054968:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.389943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038094;self_id=[1:7486800648629086932:3169];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038094;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.390009Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7486800652924054968:3340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.390157Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[1:7486800652924054893:3314];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.390324Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038061;self_id=[1:7486800652924054893:3314];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038061;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.391287Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7486800652924054961:3337];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.391605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7486800652924054961:3337];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.396031Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7486800652924054900:3317];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.396041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[1:7486800652924054947:3331];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.396501Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7486800652924054900:3317];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.396718Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038043;self_id=[1:7486800652924054947:3331];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038043;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.403355Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[1:7486800648629086905:3153];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.403667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[1:7486800648629087155:3215];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.404094Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038097;self_id=[1:7486800648629086905:3153];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038097;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.404270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038085;self_id=[1:7486800648629087155:3215];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038085;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.404640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7486800652924055000:3360];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.404658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7486800652924054929:3324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.405059Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038019;self_id=[1:7486800652924055000:3360];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038019;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.405243Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7486800652924054929:3324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.405596Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7486800652924054829:3296];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:10:24.405773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038063;self_id=[1:7486800652924054829:3296];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18D74DF8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18D637EA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F7C6EB24D8F 18. ??:0: ?? @ 0x7F7C6EB24E3F 19. ??:0: ?? @ 0x16366028 >> YdbLogStore::AlterLogTable [FAIL] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |92.9%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-UseSink |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> TCreateAndDropViewTest::DropViewIfExists >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> test_workload.py::TestYdbLogWorkload::test[column] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> test_workload.py::TestYdbWorkload::test >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly >> test_tpch.py::TestTpchS1::test_tpch[5] >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> test_tpch_import.py::TestS3TpchImport::test_import_and_export |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> test_alter_tiering.py::TestAlterTiering::test[many_tables] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> TCreateAndDropViewTest::DropViewIfExists [GOOD] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TCreateAndDropViewTest::DropViewIfExists [GOOD] Test command err: Trying to start YDB, gRPC: 3874, MsgBus: 6748 2025-03-28T10:10:32.067716Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800726829908618:2203];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:32.069921Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000158/r3tmp/tmpOOu6nc/pdisk_1.dat 2025-03-28T10:10:32.406435Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3874, node 1 2025-03-28T10:10:32.452753Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:32.452884Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:32.454516Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:32.510169Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:32.510194Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:32.510201Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:32.510335Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:6748 TClient is connected to server localhost:6748 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:33.039939Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:35.136791Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800739714811018:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:35.136929Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:35.137311Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800739714811030:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:35.148687Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:10:35.165466Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800739714811032:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:10:35.273793Z node 1 :TX_PROXY ERROR: Actor# [1:7486800739714811083:2337] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:35.679444Z node 1 :TX_PROXY ERROR: Actor# [1:7486800739714811159:2385] txid# 281474976710662, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } |95.4%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] Test command err: Starting YDB, grpc: 13919, msgbus: 3548 2025-03-28T10:10:14.694751Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800652691994168:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:14.694815Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00027f/r3tmp/tmp0ev5Xu/pdisk_1.dat 2025-03-28T10:10:15.165395Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:15.165521Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:15.171027Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:15.203284Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13919, node 1 2025-03-28T10:10:15.238049Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T10:10:15.238082Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T10:10:15.368562Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:15.368590Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:15.368602Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:15.368802Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3548 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T10:10:15.745334Z node 1 :TX_PROXY DEBUG: actor# [1:7486800652691994414:2123] Handle TEvNavigate describe path dc-1 2025-03-28T10:10:15.745389Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962218:2448] HANDLE EvNavigateScheme dc-1 2025-03-28T10:10:15.745684Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962218:2448] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:15.786794Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962218:2448] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T10:10:15.798997Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962218:2448] Handle TEvDescribeSchemeResult Forward to# [1:7486800656986962217:2447] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T10:10:15.826595Z node 1 :TX_PROXY DEBUG: actor# [1:7486800652691994414:2123] Handle TEvProposeTransaction 2025-03-28T10:10:15.826644Z node 1 :TX_PROXY DEBUG: actor# [1:7486800652691994414:2123] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T10:10:15.830827Z node 1 :TX_PROXY DEBUG: actor# [1:7486800652691994414:2123] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486800656986962224:2453] 2025-03-28T10:10:15.906863Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962224:2453] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:15.908357Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962224:2453] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T10:10:15.908403Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962224:2453] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:15.908489Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962224:2453] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:15.908789Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962224:2453] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:15.909037Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962224:2453] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T10:10:15.909089Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962224:2453] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T10:10:15.909223Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962224:2453] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T10:10:15.915398Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:15.919566Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962224:2453] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T10:10:15.919672Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962224:2453] txid# 281474976710657 SEND to# [1:7486800656986962223:2452] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T10:10:15.947698Z node 1 :TX_PROXY DEBUG: actor# [1:7486800652691994414:2123] Handle TEvProposeTransaction 2025-03-28T10:10:15.947736Z node 1 :TX_PROXY DEBUG: actor# [1:7486800652691994414:2123] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T10:10:15.947790Z node 1 :TX_PROXY DEBUG: actor# [1:7486800652691994414:2123] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486800656986962272:2494] 2025-03-28T10:10:15.950570Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962272:2494] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:15.950637Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962272:2494] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T10:10:15.950676Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962272:2494] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:15.950748Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962272:2494] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:15.951270Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962272:2494] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:15.951393Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962272:2494] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:15.951436Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962272:2494] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T10:10:15.951611Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962272:2494] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T10:10:15.952110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:10:15.956027Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962272:2494] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T10:10:15.956093Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800656986962272:2494] txid# 281474976710658 SEND to# [1:7486800656986962271:2493] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T10:10:16.011146Z node 1 :TX_PROXY DEBUG: actor# [1:7486800652691994414:2123] Handle TEvProposeTransaction 2025-03-28T10:10:16.011188Z node 1 :TX_PROXY DEBUG: actor# [1:7486800652691994414:2123] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T10:10:16.011255Z node 1 :TX_PROXY DEBUG: actor# [1:7486800652691994414:2123] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486800661281929586:2504] 2025-03-28T10:10:16.013830Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800661281929586:2504] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\026\010\001\022\022\032\020db_admin@builtin\n\036\010\000\022\032\010\001\020\200\200\002\032\020db_admin@builtin \000\n\035\010\000\022\031\010\001\020\200\010\032\020db_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root ... gateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T10:10:34.873527Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800737527466902:2530] txid# 281474976715660 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:34.873734Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800737527466902:2530] txid# 281474976715660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:34.873894Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800737527466902:2530] HANDLE EvNavigateKeySetResult, txid# 281474976715660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:34.873945Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800737527466902:2530] txid# 281474976715660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715660 TabletId# 72057594046644480} 2025-03-28T10:10:34.874102Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800737527466902:2530] txid# 281474976715660 HANDLE EvClientConnected 2025-03-28T10:10:34.875493Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480 2025-03-28T10:10:34.884359Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800737527466902:2530] txid# 281474976715660 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2025-03-28T10:10:34.884420Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800737527466902:2530] txid# 281474976715660 SEND to# [9:7486800737527466901:2340] Source {TEvProposeTransactionStatus txid# 281474976715660 Status# 53} 2025-03-28T10:10:34.917022Z node 9 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7486800737527466901:2340], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2025-03-28T10:10:35.011139Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] Handle TEvProposeTransaction 2025-03-28T10:10:35.011179Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] TxId# 281474976715661 ProcessProposeTransaction 2025-03-28T10:10:35.011221Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [9:7486800741822434273:2585] 2025-03-28T10:10:35.014171Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2025-03-28T10:10:35.014227Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:35.014246Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2025-03-28T10:10:35.014937Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T10:10:35.015019Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:35.015257Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:35.015404Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:35.015457Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-28T10:10:35.015611Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] txid# 281474976715661 HANDLE EvClientConnected 2025-03-28T10:10:35.020222Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-28T10:10:35.020364Z node 9 :TX_PROXY ERROR: Actor# [9:7486800741822434273:2585] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:35.020394Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434273:2585] txid# 281474976715661 SEND to# [9:7486800737527466901:2340] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-28T10:10:35.039342Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] Handle TEvProposeTransaction 2025-03-28T10:10:35.039382Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-03-28T10:10:35.039434Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [9:7486800741822434296:2596] 2025-03-28T10:10:35.042122Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434296:2596] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42114" 2025-03-28T10:10:35.042198Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434296:2596] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:35.042218Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434296:2596] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:35.042266Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434296:2596] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:35.042592Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434296:2596] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:35.042850Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434296:2596] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:35.042911Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434296:2596] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-28T10:10:35.043065Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434296:2596] txid# 281474976715662 HANDLE EvClientConnected 2025-03-28T10:10:35.050850Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434296:2596] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-28T10:10:35.050908Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434296:2596] txid# 281474976715662 SEND to# [9:7486800741822434295:2333] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-28T10:10:35.143263Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] Handle TEvProposeTransaction 2025-03-28T10:10:35.143295Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-03-28T10:10:35.143336Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [9:7486800741822434331:2611] 2025-03-28T10:10:35.145644Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434331:2611] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42150" 2025-03-28T10:10:35.145712Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434331:2611] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:35.145731Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434331:2611] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2025-03-28T10:10:35.145774Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434331:2611] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:35.146039Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434331:2611] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:35.146084Z node 9 :TX_PROXY ERROR: Actor# [9:7486800741822434331:2611] txid# 281474976715663, Access denied for ordinaryuser@builtin on path /dc-1, with access AlterSchema 2025-03-28T10:10:35.146185Z node 9 :TX_PROXY ERROR: Actor# [9:7486800741822434331:2611] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin on path /dc-1" issue_code: 200000 severity: 1 } 2025-03-28T10:10:35.146211Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800741822434331:2611] txid# 281474976715663 SEND to# [9:7486800741822434330:2351] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T10:10:35.146465Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YTY0ZDhkMWEtMzE0YmE3YzMtNzcyNzVjYzAtNzViNTcw, ActorId: [9:7486800741822434316:2351], ActorState: ExecuteState, TraceId: 01jqe3psfk1avp2eryvxjd7qnt, Create QueryResponse for error on request, msg: 2025-03-28T10:10:35.146786Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] Handle TEvExecuteKqpTransaction 2025-03-28T10:10:35.146810Z node 9 :TX_PROXY DEBUG: actor# [9:7486800728937531619:2113] TxId# 281474976715664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [GOOD] Test command err: Starting YDB, grpc: 8859, msgbus: 13600 2025-03-28T10:10:14.669268Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800651367759538:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:14.669310Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00027c/r3tmp/tmpYVskYM/pdisk_1.dat 2025-03-28T10:10:15.162231Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:15.164412Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:15.171059Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:15.205417Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8859, node 1 2025-03-28T10:10:15.246789Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T10:10:15.246874Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T10:10:15.368532Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:15.368568Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:15.368577Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:15.368736Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13600 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T10:10:15.731694Z node 1 :TX_PROXY DEBUG: actor# [1:7486800651367759774:2116] Handle TEvNavigate describe path dc-1 2025-03-28T10:10:15.731758Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727589:2444] HANDLE EvNavigateScheme dc-1 2025-03-28T10:10:15.732833Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727589:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:15.791895Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727589:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T10:10:15.804794Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727589:2444] Handle TEvDescribeSchemeResult Forward to# [1:7486800655662727588:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T10:10:15.827013Z node 1 :TX_PROXY DEBUG: actor# [1:7486800651367759774:2116] Handle TEvProposeTransaction 2025-03-28T10:10:15.827052Z node 1 :TX_PROXY DEBUG: actor# [1:7486800651367759774:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T10:10:15.830827Z node 1 :TX_PROXY DEBUG: actor# [1:7486800651367759774:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486800655662727597:2451] 2025-03-28T10:10:15.927485Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727597:2451] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:15.927568Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727597:2451] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:15.927603Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727597:2451] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:15.927660Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727597:2451] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:15.928014Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727597:2451] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:15.928219Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727597:2451] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T10:10:15.928304Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727597:2451] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T10:10:15.928475Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727597:2451] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T10:10:15.929403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:15.932417Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727597:2451] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T10:10:15.932855Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727597:2451] txid# 281474976710657 SEND to# [1:7486800655662727595:2449] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T10:10:15.954819Z node 1 :TX_PROXY DEBUG: actor# [1:7486800651367759774:2116] Handle TEvProposeTransaction 2025-03-28T10:10:15.954853Z node 1 :TX_PROXY DEBUG: actor# [1:7486800651367759774:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T10:10:15.954902Z node 1 :TX_PROXY DEBUG: actor# [1:7486800651367759774:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486800655662727643:2490] 2025-03-28T10:10:15.957530Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727643:2490] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:15.957593Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727643:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:15.957610Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727643:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:15.957662Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727643:2490] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:15.957947Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727643:2490] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:15.958033Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727643:2490] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:15.958089Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727643:2490] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T10:10:15.958232Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727643:2490] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T10:10:15.958891Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:10:15.964441Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727643:2490] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T10:10:15.964511Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800655662727643:2490] txid# 281474976710658 SEND to# [1:7486800655662727642:2489] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T10:10:17.603659Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800664252662319:2338], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:17.603662Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800664252662311:2335], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:17.603773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:17.603998Z node 1 :TX_PROXY DEBUG: actor# [1:7486800651367759774:2116] Han ... 59275554:2596] 2025-03-28T10:10:35.651404Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275554:2596] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\r\010\001\022\t\032\007dbadmin\n\025\010\000\022\021\010\001\020\200\200\002\032\007dbadmin \000\n\024\010\000\022\020\010\001\020\200\010\032\007dbadmin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56110" 2025-03-28T10:10:35.651484Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275554:2596] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:35.651507Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275554:2596] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:35.651562Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275554:2596] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:35.651872Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275554:2596] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:35.651990Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275554:2596] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:35.652051Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275554:2596] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-28T10:10:35.652188Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275554:2596] txid# 281474976715662 HANDLE EvClientConnected 2025-03-28T10:10:35.652809Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:35.656791Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275554:2596] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-28T10:10:35.656867Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275554:2596] txid# 281474976715662 SEND to# [9:7486800740559275553:2346] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-28T10:10:35.728013Z node 9 :TX_PROXY DEBUG: actor# [9:7486800727674372810:2113] Handle TEvProposeTransaction 2025-03-28T10:10:35.728041Z node 9 :TX_PROXY DEBUG: actor# [9:7486800727674372810:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-03-28T10:10:35.728084Z node 9 :TX_PROXY DEBUG: actor# [9:7486800727674372810:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [9:7486800740559275591:2619] 2025-03-28T10:10:35.730559Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275591:2619] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56124" 2025-03-28T10:10:35.730606Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275591:2619] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:35.730626Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275591:2619] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:35.730672Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275591:2619] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:35.730964Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275591:2619] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:35.731054Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275591:2619] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:35.731098Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275591:2619] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-03-28T10:10:35.731247Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275591:2619] txid# 281474976715663 HANDLE EvClientConnected 2025-03-28T10:10:35.738634Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275591:2619] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-03-28T10:10:35.738678Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275591:2619] txid# 281474976715663 SEND to# [9:7486800740559275590:2348] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-03-28T10:10:35.748858Z node 9 :TX_PROXY DEBUG: actor# [9:7486800727674372810:2113] Handle TEvProposeTransaction 2025-03-28T10:10:35.748879Z node 9 :TX_PROXY DEBUG: actor# [9:7486800727674372810:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-03-28T10:10:35.748904Z node 9 :TX_PROXY DEBUG: actor# [9:7486800727674372810:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [9:7486800740559275604:2628] 2025-03-28T10:10:35.750526Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275604:2628] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "dbadmin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56132" 2025-03-28T10:10:35.750564Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275604:2628] txid# 281474976715664 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:35.750576Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275604:2628] txid# 281474976715664 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:35.750609Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275604:2628] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:35.750871Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275604:2628] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:35.750968Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275604:2628] HANDLE EvNavigateKeySetResult, txid# 281474976715664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:35.751012Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275604:2628] txid# 281474976715664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715664 TabletId# 72057594046644480} 2025-03-28T10:10:35.751276Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275604:2628] txid# 281474976715664 HANDLE EvClientConnected 2025-03-28T10:10:35.751693Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480 2025-03-28T10:10:35.753814Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275604:2628] txid# 281474976715664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715664} 2025-03-28T10:10:35.753863Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275604:2628] txid# 281474976715664 SEND to# [9:7486800740559275603:2355] Source {TEvProposeTransactionStatus txid# 281474976715664 Status# 48} 2025-03-28T10:10:35.824648Z node 9 :TX_PROXY DEBUG: actor# [9:7486800727674372810:2113] Handle TEvProposeTransaction 2025-03-28T10:10:35.824685Z node 9 :TX_PROXY DEBUG: actor# [9:7486800727674372810:2113] TxId# 281474976715665 ProcessProposeTransaction 2025-03-28T10:10:35.824736Z node 9 :TX_PROXY DEBUG: actor# [9:7486800727674372810:2113] Cookie# 0 userReqId# "" txid# 281474976715665 SEND to# [9:7486800740559275634:2640] 2025-03-28T10:10:35.827227Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275634:2640] txid# 281474976715665 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\007dbadmin\022\030\022\026\n\024all-users@well-known\032\325\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTgzNSwiaWF0IjoxNzQzMTU2NjM1LCJzdWIiOiJkYmFkbWluIn0.fzffQGqxzkNX1oTbQecwgiYRQMvrVHbxS3Rx4f2-EGcLmSvC4T7MbUxDhaWxS-Am6jo41M2pjmnkfJsGsQzjhp8sNAMGKFh5-awh6Z2Q3uQ_8V2_cn-a48xFoHEkYczuBOMkQxuFDsfVzgkP8SbDecPbyVwJm9jvhUecZvz395X3jBM2rpVthpm4rj-3ion53eYjNYl6aQcj1V88v3R2lRMWA5Y60XHQ3F8qYauS8hWNePYoD0nAwV7jK_oxnScEphqndGUz9k9T53ADZiJd_py6Qwiue3erjUy3ZiEOuuG4AQ4ZPj065nBsT6S2_XBCvV9Jgb_eO-cSDuAD_JQpRw\"\005Login*\201\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTgzNSwiaWF0IjoxNzQzMTU2NjM1LCJzdWIiOiJkYmFkbWluIn0.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:56146" 2025-03-28T10:10:35.827294Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275634:2640] txid# 281474976715665 Bootstrap, UserSID: dbadmin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:35.827336Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275634:2640] txid# 281474976715665 Bootstrap, UserSID: dbadmin IsClusterAdministrator: 0 2025-03-28T10:10:35.827384Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275634:2640] txid# 281474976715665 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:35.827632Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275634:2640] txid# 281474976715665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:35.828388Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275634:2640] HANDLE EvNavigateKeySetResult, txid# 281474976715665 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:35.828449Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275634:2640] txid# 281474976715665 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715665 TabletId# 72057594046644480} 2025-03-28T10:10:35.828623Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275634:2640] txid# 281474976715665 HANDLE EvClientConnected 2025-03-28T10:10:35.834987Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275634:2640] txid# 281474976715665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715665} 2025-03-28T10:10:35.835051Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800740559275634:2640] txid# 281474976715665 SEND to# [9:7486800740559275633:2361] Source {TEvProposeTransactionStatus txid# 281474976715665 Status# 48} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ComputeNodeMemoryLimit [GOOD] Test command err: Trying to start YDB, gRPC: 19880, MsgBus: 61027 2025-03-28T10:09:52.769031Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800556825747238:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:09:52.769118Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000369/r3tmp/tmpJKr2GU/pdisk_1.dat 2025-03-28T10:09:53.112809Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 19880, node 1 2025-03-28T10:09:53.124535Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T10:09:53.152130Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:09:53.152167Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:09:53.152176Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:09:53.152329Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:09:53.162221Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:09:53.162387Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:09:53.164455Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61027 TClient is connected to server localhost:61027 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:09:53.604682Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:09:53.619956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:09:53.721562Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:09:53.882433Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:09:53.947694Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:09:55.258236Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800569710650894:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:09:55.258382Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:09:55.568003Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:09:55.596633Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T10:09:55.625424Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T10:09:55.653310Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T10:09:55.704884Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T10:09:55.736207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T10:09:55.772266Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800569710651403:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:09:55.772383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:09:55.772736Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800569710651408:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:09:55.777207Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T10:09:55.787403Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800569710651410:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T10:09:55.851444Z node 1 :TX_PROXY ERROR: Actor# [1:7486800569710651463:3442] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:09:57.768997Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800556825747238:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:09:57.769081Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:08.108343Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:08.108410Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:36.165006Z node 1 :KQP_EXECUTER WARN: ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01jqe3nky665nw73xkngmwcbhr, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=ZTI1NWYzODEtN2FlZDg1YzQtMjYzY2FkZGYtMTcxZDgyNjM=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. TKqpLiteralExecuter, memory limit exceeded. 2025-03-28T10:10:36.180313Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZTI1NWYzODEtN2FlZDg1YzQtMjYzY2FkZGYtMTcxZDgyNjM=, ActorId: [1:7486800574005619014:2487], ActorState: ExecuteState, TraceId: 01jqe3nky665nw73xkngmwcbhr, Create QueryResponse for error on request, msg: 2025-03-28T10:10:36.180489Z node 1 :KQP_SLOW_LOG WARN: TraceId: "01jqe3nky665nw73xkngmwcbhr", SessionId: ydb://session/3?node_id=1&id=ZTI1NWYzODEtN2FlZDg1YzQtMjYzY2FkZGYtMTcxZDgyNjM=, Slow query, duration: 39.501337s, status: PRECONDITION_FAILED, user: UNAUTHENTICATED, results: 0b, text: "\n SELECT ToDict(\n ListMap(\n ListFromRange(0ul, 5000000ul),\n ($x) -> { RETURN AsTuple($x, $x + 1); }\n )\n );\n ", parameters: 0b
: Warning: Type annotation, code: 1030
:2:13: Warning: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At function: SqlProjectItem
:2:20: Warning: At function: ToDict
:5:38: Warning: At function: OrderedMap
:5:53: Warning: At function: +
:5:53: Warning: Integral type implicit bitcast: Uint64 and Int32, code: 1107
: Error: Memory limit exceeded, code: 2029 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 >> test_drain.py::TestHive::test_drain_on_stop >> Transfer::AlterBatchSize >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-03-28T10:10:25.185782Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800699800606690:2075];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:25.185859Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000232/r3tmp/tmps54s6E/pdisk_1.dat 2025-03-28T10:10:25.570271Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:25.602246Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:25.602350Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:25.605146Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25782, node 1 2025-03-28T10:10:25.687278Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:25.687303Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:25.687317Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:25.687457Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:1676 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:25.966984Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:26.167072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:51170" , at schemeshard: 72057594046644480 2025-03-28T10:10:26.167570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:10:26.167611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-03-28T10:10:26.170038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976710658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T10:10:26.170214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-03-28T10:10:26.172541Z node 1 :TX_PROXY ERROR: Actor# [1:7486800704095574891:2591] txid# 281474976710658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C67422C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CB312E0) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8721 (0x1C1A6F71) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C1CFE08) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CB68306) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CB37E59) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C1CEFD4) NUnitTest::TTestFactory::Execute()+2438 (0x1CB39726) NUnitTest::RunMain(int, char**)+5213 (0x1CB6287D) ??+0 (0x7F74760ABD90) __libc_start_main+128 (0x7F74760ABE40) _start+41 (0x1902A029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 25131, msgbus: 20509 2025-03-28T10:10:22.549770Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800687006887387:2209];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:22.563385Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00026f/r3tmp/tmpAVfJJn/pdisk_1.dat 2025-03-28T10:10:22.992621Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:22.992777Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:22.995150Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:22.999786Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 25131, node 1 2025-03-28T10:10:23.139227Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:23.139254Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:23.139260Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:23.139392Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:20509 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T10:10:23.389988Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687006887483:2116] Handle TEvNavigate describe path dc-1 2025-03-28T10:10:23.390057Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855285:2444] HANDLE EvNavigateScheme dc-1 2025-03-28T10:10:23.391069Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855285:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.424053Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855285:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T10:10:23.435555Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855285:2444] Handle TEvDescribeSchemeResult Forward to# [1:7486800691301855284:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T10:10:23.456198Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687006887483:2116] Handle TEvProposeTransaction 2025-03-28T10:10:23.456242Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687006887483:2116] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T10:10:23.456344Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687006887483:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486800691301855298:2450] 2025-03-28T10:10:23.549619Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855298:2450] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.549707Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855298:2450] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.549743Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855298:2450] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.549835Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855298:2450] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.550056Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855298:2450] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.550184Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855298:2450] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T10:10:23.550234Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855298:2450] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T10:10:23.550378Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855298:2450] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T10:10:23.551024Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.555589Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855298:2450] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T10:10:23.555683Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855298:2450] txid# 281474976710657 SEND to# [1:7486800691301855297:2449] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T10:10:23.572121Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687006887483:2116] Handle TEvProposeTransaction 2025-03-28T10:10:23.572157Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687006887483:2116] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T10:10:23.572205Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687006887483:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486800691301855350:2491] 2025-03-28T10:10:23.574805Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855350:2491] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.574868Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855350:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.574883Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855350:2491] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.574931Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855350:2491] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.575205Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855350:2491] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.575327Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855350:2491] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:23.575398Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855350:2491] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T10:10:23.575569Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855350:2491] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T10:10:23.576041Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.579915Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855350:2491] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T10:10:23.579978Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855350:2491] txid# 281474976710658 SEND to# [1:7486800691301855349:2490] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T10:10:23.631171Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687006887483:2116] Handle TEvProposeTransaction 2025-03-28T10:10:23.631208Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687006887483:2116] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T10:10:23.631251Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687006887483:2116] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486800691301855368:2501] 2025-03-28T10:10:23.633710Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691301855368:2501] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000\n!\010\000\022\035\010\001\020\200\010\032\024ordinaryuser@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:38966" 2025-03-28T10:10:23.633796Z node 1 :TX_PROXY DEBUG: Ac ... rSID: metadata@system IsClusterAdministrator: 0 2025-03-28T10:10:44.843734Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380082:2572] txid# 281474976710661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-28T10:10:44.843772Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380082:2572] txid# 281474976710661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-28T10:10:44.844486Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380082:2572] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2025-03-28T10:10:44.844566Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380082:2572] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:44.844814Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380082:2572] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:44.844968Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380082:2572] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:44.845022Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380082:2572] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-03-28T10:10:44.845148Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380082:2572] txid# 281474976710661 HANDLE EvClientConnected 2025-03-28T10:10:44.851096Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380082:2572] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-28T10:10:44.851236Z node 9 :TX_PROXY ERROR: Actor# [9:7486800782072380082:2572] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:44.851267Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380082:2572] txid# 281474976710661 SEND to# [9:7486800782072380012:2341] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-03-28T10:10:44.869217Z node 9 :TX_PROXY DEBUG: actor# [9:7486800769187477243:2113] Handle TEvProposeTransaction 2025-03-28T10:10:44.869254Z node 9 :TX_PROXY DEBUG: actor# [9:7486800769187477243:2113] TxId# 281474976710662 ProcessProposeTransaction 2025-03-28T10:10:44.869296Z node 9 :TX_PROXY DEBUG: actor# [9:7486800769187477243:2113] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [9:7486800782072380106:2584] 2025-03-28T10:10:44.871849Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380106:2584] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:40084" 2025-03-28T10:10:44.871901Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380106:2584] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:44.871920Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380106:2584] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:44.871966Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380106:2584] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:44.872258Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380106:2584] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:44.872344Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380106:2584] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:44.872392Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380106:2584] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-28T10:10:44.872526Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380106:2584] txid# 281474976710662 HANDLE EvClientConnected 2025-03-28T10:10:44.880122Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380106:2584] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-28T10:10:44.880178Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380106:2584] txid# 281474976710662 SEND to# [9:7486800782072380105:2334] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-28T10:10:44.888458Z node 9 :TX_PROXY DEBUG: actor# [9:7486800769187477243:2113] Handle TEvProposeTransaction 2025-03-28T10:10:44.888499Z node 9 :TX_PROXY DEBUG: actor# [9:7486800769187477243:2113] TxId# 281474976710663 ProcessProposeTransaction 2025-03-28T10:10:44.888543Z node 9 :TX_PROXY DEBUG: actor# [9:7486800769187477243:2113] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [9:7486800782072380119:2593] 2025-03-28T10:10:44.891144Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380119:2593] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:40100" 2025-03-28T10:10:44.891203Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380119:2593] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:44.891223Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380119:2593] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:44.891273Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380119:2593] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:44.891617Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380119:2593] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:44.891740Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380119:2593] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:44.891799Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380119:2593] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-28T10:10:44.891996Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380119:2593] txid# 281474976710663 HANDLE EvClientConnected 2025-03-28T10:10:44.892377Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T10:10:44.896169Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380119:2593] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-28T10:10:44.896219Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380119:2593] txid# 281474976710663 SEND to# [9:7486800782072380118:2346] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-28T10:10:44.952912Z node 9 :TX_PROXY DEBUG: actor# [9:7486800769187477243:2113] Handle TEvProposeTransaction 2025-03-28T10:10:44.952944Z node 9 :TX_PROXY DEBUG: actor# [9:7486800769187477243:2113] TxId# 281474976710664 ProcessProposeTransaction 2025-03-28T10:10:44.952972Z node 9 :TX_PROXY DEBUG: actor# [9:7486800769187477243:2113] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [9:7486800782072380154:2610] 2025-03-28T10:10:44.955395Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:40128" 2025-03-28T10:10:44.955454Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:44.955473Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2025-03-28T10:10:44.955633Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-28T10:10:44.955653Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2025-03-28T10:10:44.955754Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:44.956099Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:44.956199Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:44.956247Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2025-03-28T10:10:44.956611Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 HANDLE EvClientConnected 2025-03-28T10:10:44.958681Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2025-03-28T10:10:44.958752Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800782072380154:2610] txid# 281474976710664 SEND to# [9:7486800782072380153:2352] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [GOOD] Test command err: Starting YDB, grpc: 26687, msgbus: 30266 2025-03-28T10:10:21.935659Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800683221283103:2212];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:21.935889Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00027a/r3tmp/tmpupWcMo/pdisk_1.dat 2025-03-28T10:10:22.324371Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:22.332369Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:22.332493Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:22.341522Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 26687, node 1 2025-03-28T10:10:22.453084Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:22.453107Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:22.453115Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:22.453247Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:30266 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T10:10:22.727456Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683221283191:2115] Handle TEvNavigate describe path dc-1 2025-03-28T10:10:22.727509Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516250992:2443] HANDLE EvNavigateScheme dc-1 2025-03-28T10:10:22.728685Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516250992:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:22.777022Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516250992:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T10:10:22.794909Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516250992:2443] Handle TEvDescribeSchemeResult Forward to# [1:7486800687516250991:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T10:10:22.813267Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683221283191:2115] Handle TEvProposeTransaction 2025-03-28T10:10:22.813297Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683221283191:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T10:10:22.813416Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683221283191:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486800687516251005:2449] 2025-03-28T10:10:22.884947Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251005:2449] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:22.885036Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251005:2449] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:22.885064Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251005:2449] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:22.885127Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251005:2449] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:22.885425Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251005:2449] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:22.885579Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251005:2449] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T10:10:22.885624Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251005:2449] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T10:10:22.885968Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251005:2449] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T10:10:22.886940Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:22.892675Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251005:2449] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T10:10:22.892759Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251005:2449] txid# 281474976710657 SEND to# [1:7486800687516251004:2448] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T10:10:22.913467Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683221283191:2115] Handle TEvProposeTransaction 2025-03-28T10:10:22.913493Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683221283191:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T10:10:22.913531Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683221283191:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486800687516251047:2487] 2025-03-28T10:10:22.916154Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251047:2487] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:22.916202Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251047:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:22.916218Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251047:2487] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:22.916268Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251047:2487] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:22.916663Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251047:2487] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:22.916766Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251047:2487] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:22.916842Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251047:2487] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T10:10:22.916994Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251047:2487] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T10:10:22.917458Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:10:22.920033Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251047:2487] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T10:10:22.920101Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251047:2487] txid# 281474976710658 SEND to# [1:7486800687516251046:2486] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T10:10:22.971498Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683221283191:2115] Handle TEvProposeTransaction 2025-03-28T10:10:22.971525Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683221283191:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T10:10:22.971570Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683221283191:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486800687516251075:2500] 2025-03-28T10:10:22.974141Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687516251075:2500] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000\n\"\010\000\022\036\010\001\020\200\010\032\025cluster_admin@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:50090" 2025-03-28T10:10:22.974200Z node 1 :TX_PROXY DEBUG: ... G: actor# [9:7486800766846292036:2113] TxId# 281474976715661 ProcessProposeTransaction 2025-03-28T10:10:44.955953Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [9:7486800779731194896:2577] 2025-03-28T10:10:44.958479Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800779731194896:2577] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49392" 2025-03-28T10:10:44.958532Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800779731194896:2577] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T10:10:44.958551Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800779731194896:2577] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:44.958596Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800779731194896:2577] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:44.958892Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800779731194896:2577] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:44.958979Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800779731194896:2577] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:44.959024Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800779731194896:2577] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-28T10:10:44.959168Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800779731194896:2577] txid# 281474976715661 HANDLE EvClientConnected 2025-03-28T10:10:44.967113Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800779731194896:2577] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-03-28T10:10:44.967166Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800779731194896:2577] txid# 281474976715661 SEND to# [9:7486800779731194895:2333] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-28T10:10:45.201190Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] Handle TEvProposeTransaction 2025-03-28T10:10:45.201229Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] TxId# 281474976715662 ProcessProposeTransaction 2025-03-28T10:10:45.201286Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [9:7486800784026162214:2592] 2025-03-28T10:10:45.203262Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162214:2592] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49396" 2025-03-28T10:10:45.203308Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162214:2592] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T10:10:45.203323Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162214:2592] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:45.203359Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162214:2592] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.203664Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162214:2592] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.203758Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162214:2592] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:45.203810Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162214:2592] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-28T10:10:45.203981Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162214:2592] txid# 281474976715662 HANDLE EvClientConnected 2025-03-28T10:10:45.204457Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:45.207857Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162214:2592] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-28T10:10:45.207912Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162214:2592] txid# 281474976715662 SEND to# [9:7486800784026162213:2347] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-28T10:10:45.286446Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] Handle TEvProposeTransaction 2025-03-28T10:10:45.286480Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] TxId# 281474976715663 ProcessProposeTransaction 2025-03-28T10:10:45.286524Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [9:7486800784026162253:2617] 2025-03-28T10:10:45.288518Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162253:2617] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49408" 2025-03-28T10:10:45.288566Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162253:2617] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T10:10:45.288580Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162253:2617] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:45.288613Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162253:2617] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.288840Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162253:2617] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.288905Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162253:2617] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:45.288938Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162253:2617] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-03-28T10:10:45.289061Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162253:2617] txid# 281474976715663 HANDLE EvClientConnected 2025-03-28T10:10:45.305281Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162253:2617] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-03-28T10:10:45.305337Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162253:2617] txid# 281474976715663 SEND to# [9:7486800784026162252:2349] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-03-28T10:10:45.389102Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] Handle TEvProposeTransaction 2025-03-28T10:10:45.389163Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] TxId# 281474976715664 ProcessProposeTransaction 2025-03-28T10:10:45.389221Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [9:7486800784026162281:2629] 2025-03-28T10:10:45.392203Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162281:2629] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "targetuser" Password: "passwd" IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTg0NSwiaWF0IjoxNzQzMTU2NjQ1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.ZlzxV3Bq5Nhpq-DDlM91ybXG-BX4f1SYfqiv6Ffw8fq58Ris90x2jMAiqAW62T5DBY1DdFkINz-bw_ZGHeBbpUWv7kLOrL6oApLqRxzQTCkydQ4nqN4l1zZrt9q-tGIzTlnPeSCrDt9ggNnqs6F2DXymDspHRKJ5XcxbNaDEeYdFJFaDEc2t_ljTeLZATtEhuaDthyDuFfjYeTrsPQdvAEFzM5aZ1Ivg45bvF8-f7ynKABuOc6tNQ46sZswsGYbSDQd4dJkW67cSQSNhIt0Shi72gw2minUU9St0W0not8KiqBLIR8GfiwmBREm9531bCMJWE8vZz8hzM_c5UVtM9Q\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTg0NSwiaWF0IjoxNzQzMTU2NjQ1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49440" 2025-03-28T10:10:45.392285Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162281:2629] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T10:10:45.392306Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162281:2629] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-28T10:10:45.392366Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162281:2629] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.392690Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162281:2629] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.392713Z node 9 :TX_PROXY ERROR: Actor# [9:7486800784026162281:2629] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2025-03-28T10:10:45.392820Z node 9 :TX_PROXY ERROR: Actor# [9:7486800784026162281:2629] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-28T10:10:45.392845Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784026162281:2629] txid# 281474976715664 SEND to# [9:7486800784026162280:2361] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T10:10:45.393563Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=MjMyNDAyNmQtMTgxMzMyMjItOGJiZDk3N2UtMTQxYmU0YmU=, ActorId: [9:7486800784026162271:2361], ActorState: ExecuteState, TraceId: 01jqe3q3f90e0w2b8srefw4hsx, Create QueryResponse for error on request, msg: 2025-03-28T10:10:45.393875Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] Handle TEvExecuteKqpTransaction 2025-03-28T10:10:45.393900Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766846292036:2113] TxId# 281474976715665 ProcessProposeKqpTransaction >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] Test command err: Starting YDB, grpc: 23803, msgbus: 7514 2025-03-28T10:10:22.340321Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800685156959543:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:22.340500Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000273/r3tmp/tmptWl7wk/pdisk_1.dat 2025-03-28T10:10:22.822499Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:22.822601Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:22.825798Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:22.858904Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 23803, node 1 2025-03-28T10:10:22.937627Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T10:10:22.938004Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T10:10:22.985636Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:22.985659Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:22.985665Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:22.985781Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7514 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T10:10:23.229556Z node 1 :TX_PROXY DEBUG: actor# [1:7486800685156959773:2115] Handle TEvNavigate describe path dc-1 2025-03-28T10:10:23.229604Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927577:2445] HANDLE EvNavigateScheme dc-1 2025-03-28T10:10:23.230712Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927577:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.270581Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927577:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T10:10:23.312028Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927577:2445] Handle TEvDescribeSchemeResult Forward to# [1:7486800689451927576:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T10:10:23.351145Z node 1 :TX_PROXY DEBUG: actor# [1:7486800685156959773:2115] Handle TEvProposeTransaction 2025-03-28T10:10:23.351177Z node 1 :TX_PROXY DEBUG: actor# [1:7486800685156959773:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T10:10:23.351270Z node 1 :TX_PROXY DEBUG: actor# [1:7486800685156959773:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486800689451927601:2455] 2025-03-28T10:10:23.437554Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927601:2455] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.437634Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927601:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.437659Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927601:2455] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.437697Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927601:2455] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.438045Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927601:2455] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.438162Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927601:2455] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T10:10:23.438217Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927601:2455] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T10:10:23.438358Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927601:2455] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T10:10:23.438873Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:23.448826Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927601:2455] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T10:10:23.448907Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927601:2455] txid# 281474976710657 SEND to# [1:7486800689451927600:2454] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2025-03-28T10:10:23.470482Z node 1 :TX_PROXY DEBUG: actor# [1:7486800685156959773:2115] Handle TEvProposeTransaction 2025-03-28T10:10:23.470511Z node 1 :TX_PROXY DEBUG: actor# [1:7486800685156959773:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T10:10:23.470572Z node 1 :TX_PROXY DEBUG: actor# [1:7486800685156959773:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486800689451927639:2489] 2025-03-28T10:10:23.472932Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927639:2489] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.472985Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927639:2489] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.473000Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927639:2489] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.473038Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927639:2489] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.473534Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927639:2489] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.473601Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927639:2489] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:23.473665Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927639:2489] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T10:10:23.473780Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927639:2489] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T10:10:23.474215Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.482851Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927639:2489] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T10:10:23.482911Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927639:2489] txid# 281474976710658 SEND to# [1:7486800689451927638:2488] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T10:10:23.525865Z node 1 :TX_PROXY DEBUG: actor# [1:7486800685156959773:2115] Handle TEvProposeTransaction 2025-03-28T10:10:23.525902Z node 1 :TX_PROXY DEBUG: actor# [1:7486800685156959773:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T10:10:23.525946Z node 1 :TX_PROXY DEBUG: actor# [1:7486800685156959773:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486800689451927658:2500] 2025-03-28T10:10:23.528402Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800689451927658:2500] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000\n!\010\000\022\035\010\001\020\200\010\032\024ordinaryuser@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\ ... marker# P6 2025-03-28T10:10:45.275797Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885297:2572] txid# 281474976710660 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.276000Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885297:2572] txid# 281474976710660 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.276146Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885297:2572] HANDLE EvNavigateKeySetResult, txid# 281474976710660 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:45.276198Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885297:2572] txid# 281474976710660 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710660 TabletId# 72057594046644480} 2025-03-28T10:10:45.276344Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885297:2572] txid# 281474976710660 HANDLE EvClientConnected 2025-03-28T10:10:45.279984Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885297:2572] txid# 281474976710660 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2025-03-28T10:10:45.280125Z node 9 :TX_PROXY ERROR: Actor# [9:7486800784342885297:2572] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:45.280160Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885297:2572] txid# 281474976710660 SEND to# [9:7486800784342885219:2340] Source {TEvProposeTransactionStatus txid# 281474976710660 Status# 48} 2025-03-28T10:10:45.301961Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] Handle TEvProposeTransaction 2025-03-28T10:10:45.301997Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] TxId# 281474976710661 ProcessProposeTransaction 2025-03-28T10:10:45.302043Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [9:7486800784342885321:2584] 2025-03-28T10:10:45.304677Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885321:2584] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42506" 2025-03-28T10:10:45.304746Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885321:2584] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:45.304766Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885321:2584] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:45.304830Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885321:2584] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.305148Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885321:2584] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.305239Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885321:2584] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:45.305288Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885321:2584] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-03-28T10:10:45.305421Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885321:2584] txid# 281474976710661 HANDLE EvClientConnected 2025-03-28T10:10:45.315087Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885321:2584] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-03-28T10:10:45.315151Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885321:2584] txid# 281474976710661 SEND to# [9:7486800784342885320:2331] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-03-28T10:10:45.432451Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] Handle TEvProposeTransaction 2025-03-28T10:10:45.432492Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] TxId# 281474976710662 ProcessProposeTransaction 2025-03-28T10:10:45.432554Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [9:7486800784342885344:2598] 2025-03-28T10:10:45.435438Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885344:2598] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000\n\031\010\000\022\025\010\001\020\200\010\032\014ordinaryuser \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42516" 2025-03-28T10:10:45.435658Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885344:2598] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:45.435686Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885344:2598] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:45.435744Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885344:2598] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.436069Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885344:2598] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.436179Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885344:2598] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:45.436233Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885344:2598] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-28T10:10:45.436416Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885344:2598] txid# 281474976710662 HANDLE EvClientConnected 2025-03-28T10:10:45.437016Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:45.440073Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885344:2598] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-28T10:10:45.440141Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885344:2598] txid# 281474976710662 SEND to# [9:7486800784342885343:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-28T10:10:45.498724Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] Handle TEvProposeTransaction 2025-03-28T10:10:45.498766Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] TxId# 281474976710663 ProcessProposeTransaction 2025-03-28T10:10:45.498834Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [9:7486800784342885386:2622] 2025-03-28T10:10:45.501788Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885386:2622] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTg0NSwiaWF0IjoxNzQzMTU2NjQ1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.d892MtyR9e71SVPKgHvYry8i7Utn2c7vP2WdhpgYeC0GDKVsTJON56TRHCrMqfW-5_UYpz9KVZMv2s-d0OQ6cmMAweuAE6NkquX2DVnUX0kQ3r1SdlPkrctW_RgiPzBwSu7OthDG-JgSuvuypQHxtxxon9RO2KhpLf1w8eEKC3XZX2vfYcYo9JH-KtcrBjOffDpAL_wucfR4agiW5SWRTXW4-aUiolcvbYnPFBverNB6UFtrZ-H0XRkTjIKitSSIqmNCFk0HBIdt8utNnVUm8Us0fpGvH6dk9Q_3WwkQ1I-1-Jnka3d5ofg_mw-XrXxGWVC3xPYPiq3xtlp40G0WWA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTg0NSwiaWF0IjoxNzQzMTU2NjQ1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:42542" 2025-03-28T10:10:45.501861Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885386:2622] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:45.501884Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885386:2622] txid# 281474976710663 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-28T10:10:45.502049Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885386:2622] txid# 281474976710663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-28T10:10:45.502076Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885386:2622] txid# 281474976710663 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2025-03-28T10:10:45.502146Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885386:2622] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.502412Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885386:2622] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.502439Z node 9 :TX_PROXY ERROR: Actor# [9:7486800784342885386:2622] txid# 281474976710663, Access denied for ordinaryuser, attempt to manage user 2025-03-28T10:10:45.502541Z node 9 :TX_PROXY ERROR: Actor# [9:7486800784342885386:2622] txid# 281474976710663, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2025-03-28T10:10:45.502571Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800784342885386:2622] txid# 281474976710663 SEND to# [9:7486800784342885385:2352] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T10:10:45.502956Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=YTkwMDgwYy02YjZmY2JkNS1iYWQzOGU4OC0yZGI3YmJiYQ==, ActorId: [9:7486800784342885371:2352], ActorState: ExecuteState, TraceId: 01jqe3q3k82gmjp3cct6mvr066, Create QueryResponse for error on request, msg: 2025-03-28T10:10:45.503465Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] Handle TEvExecuteKqpTransaction 2025-03-28T10:10:45.503498Z node 9 :TX_PROXY DEBUG: actor# [9:7486800767163015162:2112] TxId# 281474976710664 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [GOOD] Test command err: Starting YDB, grpc: 5562, msgbus: 4394 2025-03-28T10:10:22.792627Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800687439789583:2073];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:22.792732Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00024c/r3tmp/tmpkhD88Q/pdisk_1.dat 2025-03-28T10:10:23.273127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:23.273241Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:23.278828Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:23.313227Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 5562, node 1 2025-03-28T10:10:23.443998Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:23.444025Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:23.444031Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:23.444162Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:4394 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T10:10:23.707218Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687439789813:2115] Handle TEvNavigate describe path dc-1 2025-03-28T10:10:23.707315Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757615:2442] HANDLE EvNavigateScheme dc-1 2025-03-28T10:10:23.708668Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757615:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.757757Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757615:2442] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T10:10:23.777130Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757615:2442] Handle TEvDescribeSchemeResult Forward to# [1:7486800691734757614:2441] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T10:10:23.801350Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687439789813:2115] Handle TEvProposeTransaction 2025-03-28T10:10:23.801388Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687439789813:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T10:10:23.801533Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687439789813:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486800691734757639:2452] 2025-03-28T10:10:23.891062Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757639:2452] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.891145Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757639:2452] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.891173Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757639:2452] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.891219Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757639:2452] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.891512Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757639:2452] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.891687Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757639:2452] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T10:10:23.891767Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757639:2452] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T10:10:23.891938Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757639:2452] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T10:10:23.892690Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.903867Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757639:2452] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T10:10:23.903925Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757639:2452] txid# 281474976710657 SEND to# [1:7486800691734757638:2451] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T10:10:23.924186Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687439789813:2115] Handle TEvProposeTransaction 2025-03-28T10:10:23.924234Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687439789813:2115] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T10:10:23.924309Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687439789813:2115] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486800691734757679:2488] 2025-03-28T10:10:23.926299Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757679:2488] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.926351Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757679:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.926364Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757679:2488] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.926413Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757679:2488] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.926659Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757679:2488] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.926777Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757679:2488] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:23.926818Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757679:2488] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T10:10:23.926932Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757679:2488] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T10:10:23.927312Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.929436Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757679:2488] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T10:10:23.929500Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757679:2488] txid# 281474976710658 SEND to# [1:7486800691734757678:2487] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T10:10:23.968740Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687439789813:2115] Handle TEvProposeTransaction 2025-03-28T10:10:23.968778Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687439789813:2115] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28T10:10:23.968864Z node 1 :TX_PROXY DEBUG: actor# [1:7486800687439789813:2115] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [1:7486800691734757697:2498] 2025-03-28T10:10:23.971275Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757697:2498] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\026\010\001\022\022\032\020db_admin@builtin\n\036\010\000\022\032\010\001\020\200\200\002\032\020db_admin@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54968" 2025-03-28T10:10:23.971335Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800691734757697:2498] txid# 281474976710659 Bootstrap, UserSID: root@b ... "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:38302" 2025-03-28T10:10:45.663930Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157606:2591] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:45.663949Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157606:2591] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:45.664013Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157606:2591] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.664275Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157606:2591] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.664370Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157606:2591] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:45.664425Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157606:2591] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-28T10:10:45.664578Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157606:2591] txid# 281474976715661 HANDLE EvClientConnected 2025-03-28T10:10:45.672631Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157606:2591] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-03-28T10:10:45.672696Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157606:2591] txid# 281474976715661 SEND to# [9:7486800783779157605:2332] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-28T10:10:45.776148Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766599287594:2112] Handle TEvProposeTransaction 2025-03-28T10:10:45.776196Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766599287594:2112] TxId# 281474976715662 ProcessProposeTransaction 2025-03-28T10:10:45.776255Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766599287594:2112] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [9:7486800783779157626:2605] 2025-03-28T10:10:45.778781Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157626:2605] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\r\010\001\022\t\032\007dbadmin\n\025\010\000\022\021\010\001\020\200\200\002\032\007dbadmin \000\n\024\010\000\022\020\010\001\020\200\010\032\007dbadmin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:38316" 2025-03-28T10:10:45.778837Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157626:2605] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:45.778859Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157626:2605] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:45.778925Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157626:2605] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.779273Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157626:2605] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.779381Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157626:2605] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:45.779442Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157626:2605] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-28T10:10:45.779601Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157626:2605] txid# 281474976715662 HANDLE EvClientConnected 2025-03-28T10:10:45.780232Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:45.783242Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157626:2605] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-28T10:10:45.783297Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157626:2605] txid# 281474976715662 SEND to# [9:7486800783779157625:2346] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-28T10:10:45.793984Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766599287594:2112] Handle TEvProposeTransaction 2025-03-28T10:10:45.794024Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766599287594:2112] TxId# 281474976715663 ProcessProposeTransaction 2025-03-28T10:10:45.794066Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766599287594:2112] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [9:7486800783779157642:2618] 2025-03-28T10:10:45.796647Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157642:2618] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "dbadmin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:38332" 2025-03-28T10:10:45.796726Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157642:2618] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:45.796749Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157642:2618] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:45.796817Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157642:2618] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.797125Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157642:2618] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.797215Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157642:2618] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:45.797288Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157642:2618] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-03-28T10:10:45.797448Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157642:2618] txid# 281474976715663 HANDLE EvClientConnected 2025-03-28T10:10:45.797858Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715663:0, at schemeshard: 72057594046644480 2025-03-28T10:10:45.804221Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157642:2618] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-03-28T10:10:45.804288Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157642:2618] txid# 281474976715663 SEND to# [9:7486800783779157641:2347] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-03-28T10:10:45.862466Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766599287594:2112] Handle TEvProposeTransaction 2025-03-28T10:10:45.862499Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766599287594:2112] TxId# 281474976715664 ProcessProposeTransaction 2025-03-28T10:10:45.862574Z node 9 :TX_PROXY DEBUG: actor# [9:7486800766599287594:2112] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [9:7486800783779157674:2632] 2025-03-28T10:10:45.865150Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\007dbadmin\022\030\022\026\n\024all-users@well-known\032\325\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTg0NSwiaWF0IjoxNzQzMTU2NjQ1LCJzdWIiOiJkYmFkbWluIn0.OFufnaDoCj_BLmCqiB5QGv549mJMLr0bnXcxElpV_TEz7t2ukx5EV8ZOADSdoowXH53R9fRVk8OfJGiQDsUciybtyQZPMaKDcFvRlmA8fSeVAixhNBXiEHpXYpYLQvpBZ-1Je_Q8dGBn-H2rZ3PhYpZuWCPtC6LjeeVkOJjUHbM8i_-geQujsXWGsXc-W0S3_qpaLV0qs4NA4P3rfr5O2rW_MyK3fbvKmJ1YcLaWF4hVbjfMcrtCrWXg-ZoBR6OoDeCHltXwBZVd3coYrSw5ywSDBjM4jf1aez9PYizSC4Ft1mAP5y7RL62bTUb5OHuxGGueb6IdFlX5xO4jjtJrTg\"\005Login*\201\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTg0NSwiaWF0IjoxNzQzMTU2NjQ1LCJzdWIiOiJkYmFkbWluIn0.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:38362" 2025-03-28T10:10:45.865236Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 Bootstrap, UserSID: dbadmin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:45.865256Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 Bootstrap, UserSID: dbadmin IsClusterAdministrator: 0 2025-03-28T10:10:45.865405Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2025-03-28T10:10:45.865428Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 HandleResolveDatabase, UserSID: dbadmin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: dbadmin 2025-03-28T10:10:45.865469Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:45.865681Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:45.865775Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] HANDLE EvNavigateKeySetResult, txid# 281474976715664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:45.865823Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715664 TabletId# 72057594046644480} 2025-03-28T10:10:45.868003Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 HANDLE EvClientConnected 2025-03-28T10:10:45.875895Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715664} 2025-03-28T10:10:45.875953Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800783779157674:2632] txid# 281474976715664 SEND to# [9:7486800783779157673:2353] Source {TEvProposeTransactionStatus txid# 281474976715664 Status# 48} |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |96.5%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [GOOD] Test command err: Starting YDB, grpc: 3159, msgbus: 3977 2025-03-28T10:10:22.417712Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800684495964264:2077];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:22.417774Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00026d/r3tmp/tmpnEeqer/pdisk_1.dat 2025-03-28T10:10:22.889308Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:22.889448Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:22.899287Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:22.945966Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3159, node 1 2025-03-28T10:10:22.969191Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T10:10:22.969211Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T10:10:23.066087Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:23.066111Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:23.066139Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:23.066301Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:3977 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T10:10:23.304920Z node 1 :TX_PROXY DEBUG: actor# [1:7486800684495964510:2131] Handle TEvNavigate describe path dc-1 2025-03-28T10:10:23.305005Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932287:2443] HANDLE EvNavigateScheme dc-1 2025-03-28T10:10:23.306030Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932287:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.347753Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932287:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T10:10:23.364330Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932287:2443] Handle TEvDescribeSchemeResult Forward to# [1:7486800688790932286:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T10:10:23.384606Z node 1 :TX_PROXY DEBUG: actor# [1:7486800684495964510:2131] Handle TEvProposeTransaction 2025-03-28T10:10:23.384631Z node 1 :TX_PROXY DEBUG: actor# [1:7486800684495964510:2131] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T10:10:23.384753Z node 1 :TX_PROXY DEBUG: actor# [1:7486800684495964510:2131] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486800688790932302:2451] 2025-03-28T10:10:23.483695Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932302:2451] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.483777Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932302:2451] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.483800Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932302:2451] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.483874Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932302:2451] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.484358Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932302:2451] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.484542Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932302:2451] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T10:10:23.484606Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932302:2451] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T10:10:23.484809Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932302:2451] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T10:10:23.485596Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.488243Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932302:2451] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T10:10:23.488301Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932302:2451] txid# 281474976710657 SEND to# [1:7486800688790932301:2450] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T10:10:23.502047Z node 1 :TX_PROXY DEBUG: actor# [1:7486800684495964510:2131] Handle TEvProposeTransaction 2025-03-28T10:10:23.502076Z node 1 :TX_PROXY DEBUG: actor# [1:7486800684495964510:2131] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T10:10:23.502112Z node 1 :TX_PROXY DEBUG: actor# [1:7486800684495964510:2131] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486800688790932355:2493] 2025-03-28T10:10:23.505080Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932355:2493] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.505137Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932355:2493] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.505154Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932355:2493] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.505205Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932355:2493] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.505510Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932355:2493] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.505615Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932355:2493] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:23.505676Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932355:2493] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T10:10:23.505837Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932355:2493] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T10:10:23.506317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.508335Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932355:2493] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T10:10:23.508383Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688790932355:2493] txid# 281474976710658 SEND to# [1:7486800688790932354:2492] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T10:10:25.523624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800697380867034:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:25.523626Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800697380867042:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:25.523723Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:25.524053Z node 1 :TX_PROXY DEBUG: actor# [1:7486800684495964510:2131] Handl ... actor# [9:7486800774585461434:2103] TxId# 281474976715661 ProcessProposeTransaction 2025-03-28T10:10:46.491483Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [9:7486800787470364126:2584] 2025-03-28T10:10:46.494177Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364126:2584] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54690" 2025-03-28T10:10:46.494252Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364126:2584] txid# 281474976715661 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:46.494271Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364126:2584] txid# 281474976715661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:46.494319Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364126:2584] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:46.494662Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364126:2584] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:46.494799Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364126:2584] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:46.494888Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364126:2584] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2025-03-28T10:10:46.495086Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364126:2584] txid# 281474976715661 HANDLE EvClientConnected 2025-03-28T10:10:46.503317Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364126:2584] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2025-03-28T10:10:46.503387Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364126:2584] txid# 281474976715661 SEND to# [9:7486800787470364125:2333] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2025-03-28T10:10:46.597774Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] Handle TEvProposeTransaction 2025-03-28T10:10:46.597809Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] TxId# 281474976715662 ProcessProposeTransaction 2025-03-28T10:10:46.597867Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [9:7486800787470364149:2598] 2025-03-28T10:10:46.600572Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364149:2598] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54698" 2025-03-28T10:10:46.600641Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364149:2598] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:46.600664Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364149:2598] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:46.600720Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364149:2598] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:46.601052Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364149:2598] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:46.601145Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364149:2598] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:46.601215Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364149:2598] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2025-03-28T10:10:46.601362Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364149:2598] txid# 281474976715662 HANDLE EvClientConnected 2025-03-28T10:10:46.601864Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:46.605284Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364149:2598] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2025-03-28T10:10:46.605343Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364149:2598] txid# 281474976715662 SEND to# [9:7486800787470364148:2346] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2025-03-28T10:10:46.647332Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] Handle TEvProposeTransaction 2025-03-28T10:10:46.647363Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] TxId# 281474976715663 ProcessProposeTransaction 2025-03-28T10:10:46.647409Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [9:7486800787470364182:2617] 2025-03-28T10:10:46.650167Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364182:2617] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54726" 2025-03-28T10:10:46.650253Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364182:2617] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:46.650276Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364182:2617] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:46.650324Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364182:2617] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:46.650608Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364182:2617] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:46.651130Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364182:2617] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:46.651193Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364182:2617] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2025-03-28T10:10:46.651341Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364182:2617] txid# 281474976715663 HANDLE EvClientConnected 2025-03-28T10:10:46.659813Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364182:2617] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2025-03-28T10:10:46.659871Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364182:2617] txid# 281474976715663 SEND to# [9:7486800787470364181:2348] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2025-03-28T10:10:46.714487Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] Handle TEvProposeTransaction 2025-03-28T10:10:46.714516Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] TxId# 281474976715664 ProcessProposeTransaction 2025-03-28T10:10:46.714558Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [9:7486800787470364210:2629] 2025-03-28T10:10:46.717295Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364210:2629] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTg0NiwiaWF0IjoxNzQzMTU2NjQ2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.U22TqlY3VTwAhAGECU9y441SnTGn50w_XOejDQgdPQk7FKLA7EiTjHNVbGJ9NdMINO_3cHGd_4DT0H-aM1391ddSQ0CvK7lCPhFU6is6OqRp2OXVh-E2QPHnG9pHx2O24xpFjqqZ8pFGwp7oCZCwKt4uxh_8Fz99H5rNe9ace5xW5wTTIeJ3w3p8maZB9ZMWpUgmfkg18iANdJHfFJe2CSHUs-DLdbPUzRen0YRh5MwQhhrZ2SiOhDBvVjlFUyEEmYNqch8DaKSeF9d6PP9PoHhsYKeUuj1idHe-g4ppor6IfEXOwtPLvwsRVPGhiSJWMihq0vPha5sERu76E7-aUg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTg0NiwiaWF0IjoxNzQzMTU2NjQ2LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:54754" 2025-03-28T10:10:46.717354Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364210:2629] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:46.717376Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364210:2629] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-28T10:10:46.717422Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364210:2629] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:46.717723Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364210:2629] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:46.717782Z node 9 :TX_PROXY ERROR: Actor# [9:7486800787470364210:2629] txid# 281474976715664, Access denied for ordinaryuser on path /dc-1, with access AlterSchema 2025-03-28T10:10:46.717901Z node 9 :TX_PROXY ERROR: Actor# [9:7486800787470364210:2629] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser on path /dc-1" issue_code: 200000 severity: 1 } 2025-03-28T10:10:46.717940Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800787470364210:2629] txid# 281474976715664 SEND to# [9:7486800787470364209:2360] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T10:10:46.723127Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=OGRjYzMyYWYtYzkwMGUyNTYtZDZhODA5ZTMtZWIzYWQxNTI=, ActorId: [9:7486800787470364200:2360], ActorState: ExecuteState, TraceId: 01jqe3q4sdcjt4pkrbpp9bx8hv, Create QueryResponse for error on request, msg: 2025-03-28T10:10:46.723671Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] Handle TEvExecuteKqpTransaction 2025-03-28T10:10:46.723695Z node 9 :TX_PROXY DEBUG: actor# [9:7486800774585461434:2103] TxId# 281474976715665 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [GOOD] Test command err: Starting YDB, grpc: 3237, msgbus: 19968 2025-03-28T10:10:22.795168Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800686161654896:2168];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:22.796673Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00024e/r3tmp/tmpN2zUpc/pdisk_1.dat 2025-03-28T10:10:23.284223Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:23.284342Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:23.288494Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:23.325034Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 3237, node 1 2025-03-28T10:10:23.374904Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T10:10:23.374954Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /dc-1 Strong=0 2025-03-28T10:10:23.449761Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:23.449795Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:23.449810Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:23.449955Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19968 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T10:10:23.660223Z node 1 :TX_PROXY DEBUG: actor# [1:7486800686161655025:2114] Handle TEvNavigate describe path dc-1 2025-03-28T10:10:23.660283Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622832:2444] HANDLE EvNavigateScheme dc-1 2025-03-28T10:10:23.661643Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622832:2444] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.705490Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622832:2444] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T10:10:23.721770Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622832:2444] Handle TEvDescribeSchemeResult Forward to# [1:7486800690456622831:2443] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T10:10:23.751324Z node 1 :TX_PROXY DEBUG: actor# [1:7486800686161655025:2114] Handle TEvProposeTransaction 2025-03-28T10:10:23.751353Z node 1 :TX_PROXY DEBUG: actor# [1:7486800686161655025:2114] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T10:10:23.751520Z node 1 :TX_PROXY DEBUG: actor# [1:7486800686161655025:2114] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486800690456622845:2450] 2025-03-28T10:10:23.849108Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622845:2450] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.849191Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622845:2450] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.849219Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622845:2450] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.849266Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622845:2450] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.849559Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622845:2450] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.849697Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622845:2450] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T10:10:23.849743Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622845:2450] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T10:10:23.849887Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622845:2450] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T10:10:23.850560Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.853202Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622845:2450] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T10:10:23.853257Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622845:2450] txid# 281474976710657 SEND to# [1:7486800690456622844:2449] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T10:10:23.876467Z node 1 :TX_PROXY DEBUG: actor# [1:7486800686161655025:2114] Handle TEvProposeTransaction 2025-03-28T10:10:23.876500Z node 1 :TX_PROXY DEBUG: actor# [1:7486800686161655025:2114] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T10:10:23.876529Z node 1 :TX_PROXY DEBUG: actor# [1:7486800686161655025:2114] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486800690456622896:2490] 2025-03-28T10:10:23.879050Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622896:2490] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.879097Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622896:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.879113Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622896:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.879183Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622896:2490] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.879473Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622896:2490] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.879559Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622896:2490] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:23.879628Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622896:2490] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T10:10:23.879760Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622896:2490] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T10:10:23.880238Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.882904Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622896:2490] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T10:10:23.882953Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800690456622896:2490] txid# 281474976710658 SEND to# [1:7486800690456622895:2489] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T10:10:25.890982Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800699046557581:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:25.890991Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800699046557572:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:25.891071Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:25.891651Z node 1 :TX_PROXY DEBUG: actor# [1:7486800686161655025:2114] Han ... actor# [9:7486800777460039746:2113] TxId# 281474976710661 ProcessProposeTransaction 2025-03-28T10:10:47.500287Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] Cookie# 0 userReqId# "" txid# 281474976710661 SEND to# [9:7486800794639909765:2585] 2025-03-28T10:10:47.502946Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909765:2585] txid# 281474976710661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "ordinaryuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:51920" 2025-03-28T10:10:47.503018Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909765:2585] txid# 281474976710661 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:47.503037Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909765:2585] txid# 281474976710661 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:47.503080Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909765:2585] txid# 281474976710661 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:47.503410Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909765:2585] txid# 281474976710661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:47.503517Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909765:2585] HANDLE EvNavigateKeySetResult, txid# 281474976710661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:47.503564Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909765:2585] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2025-03-28T10:10:47.503703Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909765:2585] txid# 281474976710661 HANDLE EvClientConnected 2025-03-28T10:10:47.515116Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909765:2585] txid# 281474976710661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710661} 2025-03-28T10:10:47.515207Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909765:2585] txid# 281474976710661 SEND to# [9:7486800794639909764:2331] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2025-03-28T10:10:47.645586Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] Handle TEvProposeTransaction 2025-03-28T10:10:47.645608Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] TxId# 281474976710662 ProcessProposeTransaction 2025-03-28T10:10:47.645642Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [9:7486800794639909785:2599] 2025-03-28T10:10:47.647727Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909785:2599] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44066" 2025-03-28T10:10:47.647782Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909785:2599] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:47.647801Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909785:2599] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:47.647852Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909785:2599] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:47.648171Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909785:2599] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:47.648256Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909785:2599] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:47.648307Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909785:2599] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2025-03-28T10:10:47.648437Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909785:2599] txid# 281474976710662 HANDLE EvClientConnected 2025-03-28T10:10:47.648950Z node 9 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:47.652309Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909785:2599] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2025-03-28T10:10:47.652356Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909785:2599] txid# 281474976710662 SEND to# [9:7486800794639909784:2346] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2025-03-28T10:10:47.705998Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] Handle TEvProposeTransaction 2025-03-28T10:10:47.706030Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] TxId# 281474976710663 ProcessProposeTransaction 2025-03-28T10:10:47.706075Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [9:7486800794639909818:2618] 2025-03-28T10:10:47.708392Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909818:2618] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true IsHashedPassword: false } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44090" 2025-03-28T10:10:47.708457Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909818:2618] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:47.708473Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909818:2618] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:47.708518Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909818:2618] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:47.708797Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909818:2618] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:47.708888Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909818:2618] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:47.708938Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909818:2618] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2025-03-28T10:10:47.709090Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909818:2618] txid# 281474976710663 HANDLE EvClientConnected 2025-03-28T10:10:47.723998Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909818:2618] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2025-03-28T10:10:47.724064Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909818:2618] txid# 281474976710663 SEND to# [9:7486800794639909817:2348] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2025-03-28T10:10:47.783038Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] Handle TEvProposeTransaction 2025-03-28T10:10:47.783072Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] TxId# 281474976710664 ProcessProposeTransaction 2025-03-28T10:10:47.783134Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [9:7486800794639909846:2630] 2025-03-28T10:10:47.785679Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909846:2630] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTg0NywiaWF0IjoxNzQzMTU2NjQ3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.RSNxhEqfDJUWqIQUS-fcHDfJ9Yi3oyIfAg8tysRW9R43slWK0l2-GyPUYgdnSDSStTOtJWRJEZAQ3Chm_X4g-Ifab1qEwl-ESBhFmOWn-kl-aZr-MbzCtOrtGhA79dzLN6E3-5vKEHxJvOMgsTh6d5oCSUzSIslcrdCsloLT7WP8dXjhAXrcVwqIqUEhvAr5VmWl07chAzHdmOVEN06vFzz6eJfDJWLgA7QVC3x7uBil0l7FZXOorZllzJymqzVsNtwB5Usw1f1gfYrP3aUOAh8laW1HCH796Ett8-wskpP9AoKniVQtfYmBSB9lj65SH0W1SKiW1k25gRBFBwrhYA\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc0MzE5OTg0NywiaWF0IjoxNzQzMTU2NjQ3LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:44122" 2025-03-28T10:10:47.785753Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909846:2630] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2025-03-28T10:10:47.785791Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909846:2630] txid# 281474976710664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2025-03-28T10:10:47.785837Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909846:2630] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:47.786150Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909846:2630] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:47.786201Z node 9 :TX_PROXY ERROR: Actor# [9:7486800794639909846:2630] txid# 281474976710664, Access denied for ordinaryuser on path /dc-1, with access AlterSchema 2025-03-28T10:10:47.786302Z node 9 :TX_PROXY ERROR: Actor# [9:7486800794639909846:2630] txid# 281474976710664, issues: { message: "Access denied for ordinaryuser on path /dc-1" issue_code: 200000 severity: 1 } 2025-03-28T10:10:47.786331Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800794639909846:2630] txid# 281474976710664 SEND to# [9:7486800794639909845:2360] Source {TEvProposeTransactionStatus Status# 5} 2025-03-28T10:10:47.786643Z node 9 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=9&id=N2FmMzI5MWEtMWZkODk3NzAtZTBkNDZlZmUtYTgxZmJjY2Q=, ActorId: [9:7486800794639909836:2360], ActorState: ExecuteState, TraceId: 01jqe3q5tr2gr4n3rahwa58kat, Create QueryResponse for error on request, msg: 2025-03-28T10:10:47.786954Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] Handle TEvExecuteKqpTransaction 2025-03-28T10:10:47.786975Z node 9 :TX_PROXY DEBUG: actor# [9:7486800777460039746:2113] TxId# 281474976710665 ProcessProposeKqpTransaction >> KqpMultishardIndex::DataColumnWrite-UseSink [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/indexes/unittest >> KqpMultishardIndex::DataColumnWrite-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22485, MsgBus: 5108 2025-03-28T10:10:30.452063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800717843322967:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:30.452135Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/0001ba/r3tmp/tmptFOLiE/pdisk_1.dat 2025-03-28T10:10:30.909687Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:30.930209Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:30.930948Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:30.946426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 22485, node 1 2025-03-28T10:10:31.131148Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:31.131216Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:31.131230Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:31.131399Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5108 TClient is connected to server localhost:5108 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:31.791209Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:31.827987Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:31.995406Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:32.154956Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:32.227182Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:33.551972Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800730728226623:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:33.552150Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:34.160678Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:34.190414Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T10:10:34.222193Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T10:10:34.290708Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T10:10:34.327671Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T10:10:34.398188Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T10:10:34.487521Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800735023194439:2457], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:34.487608Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:34.488133Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800735023194444:2460], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:34.493924Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T10:10:34.514670Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800735023194447:2461], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T10:10:34.574740Z node 1 :TX_PROXY ERROR: Actor# [1:7486800735023194504:3456] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:35.454847Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800717843322967:2068];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:35.454933Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:36.254437Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:45.889438Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:45.889486Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:46.154335Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486800786562806367:2855], TxId: 281474976710731, task: 1. Ctx: { SessionId : ydb://session/3?node_id=1&id=N2RkZmI1MC01YWI0OTgwMC1mMGFhNDdkZi03NTUyOGI2OA==. CustomerSuppliedId : . TraceId : 01jqe3q3sq7y99bxts8b29m13f. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2025-03-28T10:10:46.157476Z node 1 :KQP_COMPUTE ERROR: SelfId: [1:7486800786562806368:2856], TxId: 281474976710731, task: 2. Ctx: { TraceId : 01jqe3q3sq7y99bxts8b29m13f. SessionId : ydb://session/3?node_id=1&id=N2RkZmI1MC01YWI0OTgwMC1mMGFhNDdkZi03NTUyOGI2OA==. CustomerSuppliedId : . CurrentExecutionId : . DatabaseId : /Root. Database : /Root. PoolId : default. }. Handle abort execution event from: [1:7486800786562806364:2490], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2025-03-28T10:10:46.158071Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=N2RkZmI1MC01YWI0OTgwMC1mMGFhNDdkZi03NTUyOGI2OA==, ActorId: [1:7486800743613129356:2490], ActorState: ExecuteState, TraceId: 01jqe3q3sq7y99bxts8b29m13f, Create QueryResponse for error on request, msg: 2025-03-28T10:10:46.814212Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2025-03-28T10:10:46.814254Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2025-03-28T10:10:46.814267Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2025-03-28T10:10:46.814280Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2025-03-28T10:10:46.814295Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037929 not found 2025-03-28T10:10:46.827856Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2025-03-28T10:10:46.841956Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2025-03-28T10:10:46.844742Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2025-03-28T10:10:46.853645Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found |96.8%| [TA] $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/indexes/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 6976, msgbus: 16728 2025-03-28T10:10:22.126136Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800683309575596:2071];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:22.126272Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000275/r3tmp/tmp1Vo47R/pdisk_1.dat 2025-03-28T10:10:22.536303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2025-03-28T10:10:22.536344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T10:10:22.536362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2025-03-28T10:10:22.536377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: using default configuration 2025-03-28T10:10:22.536402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2025-03-28T10:10:22.536407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2025-03-28T10:10:22.536425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2025-03-28T10:10:22.536459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: [RootDataErasureManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, DataErasureInterval# 604800.000000s, DataErasureBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2025-03-28T10:10:22.536736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Execute 2025-03-28T10:10:22.585322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: Subscription to Console has been set up, schemeshardId: 72057594046644480 2025-03-28T10:10:22.585383Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: 2025-03-28T10:10:22.585393Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:22.609708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TxInitSchema.Complete 2025-03-28T10:10:22.609779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Execute 2025-03-28T10:10:22.609864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: UpgradeInitState as Uninitialized, schemeshardId: 72057594046644480 2025-03-28T10:10:22.615081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxUpgradeSchema.Complete 2025-03-28T10:10:22.615247Z node 1 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T10:10:22.616025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046644480 2025-03-28T10:10:22.616160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoExecute, path: dc-1, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2025-03-28T10:10:22.617822Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:22.617953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:22.620767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxInitRoot DoComplete, at schemeshard: 72057594046644480 2025-03-28T10:10:22.621542Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2025-03-28T10:10:22.621568Z node 1 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2025-03-28T10:10:22.621616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Execute 2025-03-28T10:10:22.621639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046644480, domainId: [OwnerId: 72057594046644480, LocalPathId: 1] 2025-03-28T10:10:22.621663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxServerlessStorageBilling.Complete 2025-03-28T10:10:22.621719Z node 1 :FLAT_TX_SCHEMESHARD INFO: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046644480 2025-03-28T10:10:22.625829Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 6976, node 1 2025-03-28T10:10:22.731193Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:22.731212Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:22.731222Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:22.731329Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:16728 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T10:10:23.012238Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683309575835:2115] Handle TEvNavigate describe path dc-1 2025-03-28T10:10:23.012298Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543643:2450] HANDLE EvNavigateScheme dc-1 2025-03-28T10:10:23.013528Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543643:2450] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.064238Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543643:2450] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T10:10:23.075918Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543643:2450] Handle TEvDescribeSchemeResult Forward to# [1:7486800687604543642:2449] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T10:10:23.099868Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683309575835:2115] Handle TEvProposeTransaction 2025-03-28T10:10:23.099912Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683309575835:2115] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T10:10:23.100051Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683309575835:2115] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486800687604543657:2457] 2025-03-28T10:10:23.201838Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543657:2457] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.201906Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543657:2457] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T10:10:23.201942Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543657:2457] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.201991Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543657:2457] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.202398Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543657:2457] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.202514Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543657:2457] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T10:10:23.202573Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543657:2457] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T10:10:23.202791Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800687604543657:2457] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T10:10:23.205510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 Owner: "root@builtin" UserToken: "***" PeerName: "" , at schemeshard: 72057594046644480 2025-03-28T10:10:23.205782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //dc-1, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.206107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T10:10:23.206377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T10:10:23.206412Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.208455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Comple ... OXY DEBUG: Actor# [7:7486800799532465195:2857] txid# 281474976715665 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T10:10:49.857626Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800799532465195:2857] txid# 281474976715665 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:49.857670Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800799532465195:2857] txid# 281474976715665 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:49.857998Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800799532465195:2857] txid# 281474976715665 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:49.858116Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800799532465195:2857] HANDLE EvNavigateKeySetResult, txid# 281474976715665 shardToRequest# 72075186224037891 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 2] DomainInfo.Params# Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 RedirectRequired# true 2025-03-28T10:10:49.858179Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800799532465195:2857] txid# 281474976715665 SEND to# 72075186224037891 shardToRequest {TEvModifySchemeTransaction txid# 281474976715665 TabletId# 72075186224037891} 2025-03-28T10:10:49.858730Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800799532465195:2857] txid# 281474976715665 HANDLE EvClientConnected 2025-03-28T10:10:49.860521Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } TxId: 281474976715665 TabletId: 72075186224037891 Owner: "root@builtin" UserToken: "***" PeerName: "ipv6:[::1]:46232" , at schemeshard: 72075186224037891 2025-03-28T10:10:49.860725Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /dc-1/tenant-db, operationId: 281474976715665:0, at schemeshard: 72075186224037891 2025-03-28T10:10:49.860885Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-28T10:10:49.860898Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-28T10:10:49.861084Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-03-28T10:10:49.861113Z node 8 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72075186224037891 2025-03-28T10:10:49.861194Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-03-28T10:10:49.861208Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-03-28T10:10:49.861231Z node 8 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715665:0 progress is 1/1 2025-03-28T10:10:49.861244Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-03-28T10:10:49.861283Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-03-28T10:10:49.861336Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2025-03-28T10:10:49.861362Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-03-28T10:10:49.861375Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2025-03-28T10:10:49.861388Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715665:0 2025-03-28T10:10:49.861402Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2025-03-28T10:10:49.861413Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715665, [OwnerId: 72075186224037891, LocalPathId: 1], 9 2025-03-28T10:10:49.864107Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-03-28T10:10:49.864337Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715665, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-03-28T10:10:49.864566Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-03-28T10:10:49.864589Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976715665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-28T10:10:49.864844Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976715665, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-28T10:10:49.864949Z node 8 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-03-28T10:10:49.864969Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:7486800777797146169:2306], at schemeshard: 72075186224037891, txId: 281474976715665, path id: 1 2025-03-28T10:10:49.864991Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [8:7486800777797146169:2306], at schemeshard: 72075186224037891, txId: 281474976715665, path id: 1 2025-03-28T10:10:49.865776Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800799532465195:2857] txid# 281474976715665 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715665} 2025-03-28T10:10:49.865839Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800799532465195:2857] txid# 281474976715665 SEND to# [7:7486800799532465194:2357] Source {TEvProposeTransactionStatus txid# 281474976715665 Status# 48} TEST clusteradmin triggers auth on tenant 2025-03-28T10:10:49.867884Z node 8 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976715665 2025-03-28T10:10:49.867997Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 9 PathOwnerId: 72075186224037891, cookie: 281474976715665 2025-03-28T10:10:49.868015Z node 8 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976715665 2025-03-28T10:10:49.868049Z node 8 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186224037891, txId: 281474976715665, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 9 2025-03-28T10:10:49.868071Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-03-28T10:10:49.868160Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976715665, subscribers: 0 2025-03-28T10:10:49.871003Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 TClient is connected to server localhost:25646 TClient::Ls request: /dc-1/tenant-db 2025-03-28T10:10:50.124060Z node 7 :TX_PROXY DEBUG: actor# [7:7486800773762660252:2113] Handle TEvNavigate describe path /dc-1/tenant-db 2025-03-28T10:10:50.124114Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800803827432501:2866] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-03-28T10:10:50.124478Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800803827432501:2866] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:50.124610Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800803827432501:2866] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-03-28T10:10:50.127610Z node 7 :TX_PROXY DEBUG: Actor# [7:7486800803827432501:2866] Handle TEvDescribeSchemeResult Forward to# [7:7486800803827432500:2865] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-03-28T10:10:50.171125Z node 8 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 2025-03-28T10:10:50.176300Z node 7 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 8 2025-03-28T10:10:50.176772Z node 7 :HIVE WARN: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connected -> Disconnected >> Transfer::AlterBatchSize [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] |97.1%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 19612, msgbus: 25151 2025-03-28T10:10:22.136364Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800683973163497:2132];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:22.136414Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/000278/r3tmp/tmpDcObHw/pdisk_1.dat 2025-03-28T10:10:22.535484Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:22.563121Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:22.563220Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:22.568938Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 19612, node 1 2025-03-28T10:10:22.667638Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:22.667657Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:22.667661Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:22.667741Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:25151 WaitRootIsUp 'dc-1'... TClient::Ls request: dc-1 2025-03-28T10:10:22.938542Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683973163688:2133] Handle TEvNavigate describe path dc-1 2025-03-28T10:10:22.938636Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800683973164164:2440] HANDLE EvNavigateScheme dc-1 2025-03-28T10:10:22.939618Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800683973164164:2440] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:22.983091Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800683973164164:2440] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2025-03-28T10:10:22.998261Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800683973164164:2440] Handle TEvDescribeSchemeResult Forward to# [1:7486800683973164163:2439] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } } } PathId: 1 PathOwnerId: 72057594046644480 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'dc-1' success. 2025-03-28T10:10:23.015084Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683973163688:2133] Handle TEvProposeTransaction 2025-03-28T10:10:23.015111Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683973163688:2133] TxId# 281474976710657 ProcessProposeTransaction 2025-03-28T10:10:23.015215Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683973163688:2133] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7486800688268131476:2449] 2025-03-28T10:10:23.140176Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131476:2449] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.140264Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131476:2449] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:23.140297Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131476:2449] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.140344Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131476:2449] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.141179Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131476:2449] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.141296Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131476:2449] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2025-03-28T10:10:23.141363Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131476:2449] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2025-03-28T10:10:23.142523Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131476:2449] txid# 281474976710657 HANDLE EvClientConnected 2025-03-28T10:10:23.144696Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.158091Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131476:2449] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2025-03-28T10:10:23.158169Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131476:2449] txid# 281474976710657 SEND to# [1:7486800688268131475:2448] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} waiting... 2025-03-28T10:10:23.180805Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683973163688:2133] Handle TEvProposeTransaction 2025-03-28T10:10:23.180833Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683973163688:2133] TxId# 281474976710658 ProcessProposeTransaction 2025-03-28T10:10:23.180868Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683973163688:2133] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7486800688268131531:2490] 2025-03-28T10:10:23.183269Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131531:2490] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)" PeerName: "" 2025-03-28T10:10:23.183341Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131531:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2025-03-28T10:10:23.183358Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131531:2490] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:23.183409Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131531:2490] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:23.183711Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131531:2490] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:23.183829Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131531:2490] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2025-03-28T10:10:23.183871Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131531:2490] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2025-03-28T10:10:23.184034Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131531:2490] txid# 281474976710658 HANDLE EvClientConnected 2025-03-28T10:10:23.184460Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:10:23.186898Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131531:2490] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2025-03-28T10:10:23.186959Z node 1 :TX_PROXY DEBUG: Actor# [1:7486800688268131531:2490] txid# 281474976710658 SEND to# [1:7486800688268131530:2489] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2025-03-28T10:10:25.226058Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800696858066209:2336], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:25.226157Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:25.226292Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800696858066217:2339], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:25.226653Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683973163688:2133] Handle TEvProposeTransaction 2025-03-28T10:10:25.226675Z node 1 :TX_PROXY DEBUG: actor# [1:7486800683973163688:2133] TxId# 281474976710659 ProcessProposeTransaction 2025-03-28 ... _PROXY DEBUG: Actor# [9:7486800818810663379:2846] txid# 281474976715664 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2025-03-28T10:10:53.501166Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663379:2846] txid# 281474976715664 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2025-03-28T10:10:53.501236Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663379:2846] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2025-03-28T10:10:53.501607Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663379:2846] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:53.501746Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663379:2846] HANDLE EvNavigateKeySetResult, txid# 281474976715664 shardToRequest# 72075186224037891 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 2] DomainInfo.Params# Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 RedirectRequired# true 2025-03-28T10:10:53.501816Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663379:2846] txid# 281474976715664 SEND to# 72075186224037891 shardToRequest {TEvModifySchemeTransaction txid# 281474976715664 TabletId# 72075186224037891} 2025-03-28T10:10:53.502613Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663379:2846] txid# 281474976715664 HANDLE EvClientConnected 2025-03-28T10:10:53.505457Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpModifyACL ModifyACL { Name: "tenant-db" DiffACL: "\n\022\010\001\022\016\032\014clusteradmin\n\031\010\000\022\025\010\001\020\200\004\032\014clusteradmin \003" } } TxId: 281474976715664 TabletId: 72075186224037891 Owner: "root@builtin" UserToken: "***" PeerName: "ipv6:[::1]:37596" , at schemeshard: 72075186224037891 2025-03-28T10:10:53.505726Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: TModifyACL Propose, path: /dc-1/tenant-db, operationId: 281474976715664:0, at schemeshard: 72075186224037891 2025-03-28T10:10:53.505894Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS visit path id [OwnerId: 72075186224037891, LocalPathId: 1] name: dc-1/tenant-db type: EPathTypeSubDomain state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-28T10:10:53.505913Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: ExamineTreeVFS run path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-28T10:10:53.506101Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976715664:1, propose status:StatusSuccess, reason: , at schemeshard: 72075186224037891 2025-03-28T10:10:53.506137Z node 10 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72075186224037891 2025-03-28T10:10:53.506220Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:0 progress is 1/1 2025-03-28T10:10:53.506241Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715664 ready parts: 1/1 2025-03-28T10:10:53.506269Z node 10 :FLAT_TX_SCHEMESHARD INFO: Part operation is done id#281474976715664:0 progress is 1/1 2025-03-28T10:10:53.506282Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715664 ready parts: 1/1 2025-03-28T10:10:53.506326Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 4 2025-03-28T10:10:53.506391Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 281474976715664, ready parts: 1/1, is published: false 2025-03-28T10:10:53.506415Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72075186224037891, LocalPathId: 1], at schemeshard: 72075186224037891 2025-03-28T10:10:53.506429Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToDone TxId: 281474976715664 ready parts: 1/1 2025-03-28T10:10:53.506441Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Operation and all the parts is done, operation id: 281474976715664:0 2025-03-28T10:10:53.506454Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication still in progress, tx: 281474976715664, publications: 1, subscribers: 0 2025-03-28T10:10:53.506468Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: Publication details: tx: 281474976715664, [OwnerId: 72075186224037891, LocalPathId: 1], 8 2025-03-28T10:10:53.510640Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663379:2846] txid# 281474976715664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715664} 2025-03-28T10:10:53.510758Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663379:2846] txid# 281474976715664 SEND to# [9:7486800818810663378:2357] Source {TEvProposeTransactionStatus txid# 281474976715664 Status# 48} 2025-03-28T10:10:53.510139Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976715664, response: Status: StatusSuccess TxId: 281474976715664 SchemeshardId: 72075186224037891, at schemeshard: 72075186224037891 2025-03-28T10:10:53.510376Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976715664, subject: root@builtin, status: StatusSuccess, operation: MODIFY ACL, path: /dc-1/tenant-db, add access: +(DS):clusteradmin, remove access: -():clusteradmin:- 2025-03-28T10:10:53.510616Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186224037891 2025-03-28T10:10:53.510640Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976715664, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-28T10:10:53.510944Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186224037891, txId: 281474976715664, path id: [OwnerId: 72075186224037891, LocalPathId: 1] 2025-03-28T10:10:53.511049Z node 10 :FLAT_TX_SCHEMESHARD INFO: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186224037891 2025-03-28T10:10:53.511071Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [10:7486800797356595076:2308], at schemeshard: 72075186224037891, txId: 281474976715664, path id: 1 2025-03-28T10:10:53.511099Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TTxPublishToSchemeBoard Send, to populator: [10:7486800797356595076:2308], at schemeshard: 72075186224037891, txId: 281474976715664, path id: 1 2025-03-28T10:10:53.512050Z node 10 :FLAT_TX_SCHEMESHARD INFO: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 8 PathOwnerId: 72075186224037891, cookie: 281474976715664 2025-03-28T10:10:53.512142Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 1 Version: 8 PathOwnerId: 72075186224037891, cookie: 281474976715664 2025-03-28T10:10:53.512154Z node 10 :FLAT_TX_SCHEMESHARD INFO: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976715664 2025-03-28T10:10:53.512165Z node 10 :FLAT_TX_SCHEMESHARD INFO: AckPublish, at schemeshard: 72075186224037891, txId: 281474976715664, pathId: [OwnerId: 72075186224037891, LocalPathId: 1], version: 8 2025-03-28T10:10:53.512180Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 1] was 5 2025-03-28T10:10:53.512263Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976715664, subscribers: 0 TEST clusteradmin triggers auth on tenant 2025-03-28T10:10:53.514800Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715664 TClient is connected to server localhost:1249 TClient::Ls request: /dc-1/tenant-db 2025-03-28T10:10:53.770233Z node 9 :TX_PROXY DEBUG: actor# [9:7486800793040858250:2113] Handle TEvNavigate describe path /dc-1/tenant-db 2025-03-28T10:10:53.770313Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663390:2853] HANDLE EvNavigateScheme /dc-1/tenant-db 2025-03-28T10:10:53.770728Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663390:2853] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2025-03-28T10:10:53.770867Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663390:2853] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2025-03-28T10:10:53.772625Z node 9 :TX_PROXY DEBUG: Actor# [9:7486800818810663390:2853] Handle TEvDescribeSchemeResult Forward to# [9:7486800818810663389:2852] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1/tenant-db" } } } PathId: 1 PathOwnerId: 72075186224037891 TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 SubDomainVersion: 3 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 184467440737095... (TRUNCATED) 2025-03-28T10:10:53.810431Z node 9 :HIVE WARN: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2025-03-28T10:10:53.810883Z node 9 :HIVE WARN: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2025-03-28T10:10:53.810903Z node 10 :FLAT_TX_SCHEMESHARD INFO: Clear TempDirsState with owners number: 0 |97.2%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/transfer/unittest >> Transfer::AlterBatchSize [GOOD] Test command err: Attempt=19 count=0 Attempt=18 count=0 Attempt=17 count=0 Attempt=16 count=1 |97.4%| [TM] {RESULT} ydb/tests/functional/transfer/unittest >> KqpStats::SysViewClientLost [FAIL] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [GOOD] |97.5%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost [FAIL] Test command err: Trying to start YDB, gRPC: 13169, MsgBus: 2801 2025-03-28T10:10:09.015472Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800628786635543:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:09.016288Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00038d/r3tmp/tmpX5u7GK/pdisk_1.dat 2025-03-28T10:10:09.355472Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:09.360689Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:09.360791Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:09.363014Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 13169, node 1 2025-03-28T10:10:09.463994Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:09.464013Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:09.464017Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:09.464097Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2801 TClient is connected to server localhost:2801 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:10.038053Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.061261Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.210186Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.362269Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.434879Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:12.228459Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800641671539127:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:12.228624Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:12.561559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.594920Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.629108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.705989Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.740692Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.778472Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T10:10:12.865084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800641671539644:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:12.865163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:12.865436Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800641671539649:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:12.869444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T10:10:12.882083Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800641671539651:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T10:10:12.976219Z node 1 :TX_PROXY ERROR: Actor# [1:7486800641671539708:3449] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:13.998481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:14.015685Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800628786635543:2144];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:14.015848Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:24.341776Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:24.341824Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:45.047600Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156645009, txId: 281474976710672] shutting down 2025-03-28T10:10:45.108192Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-28T10:10:46.385488Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156646367, txId: 281474976710674] shutting down 2025-03-28T10:10:47.695726Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156647653, txId: 281474976710676] shutting down 2025-03-28T10:10:48.969362Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156648956, txId: 281474976710678] shutting down 2025-03-28T10:10:50.249089Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156650231, txId: 281474976710680] shutting down 2025-03-28T10:10:51.479508Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156651472, txId: 281474976710682] shutting down 2025-03-28T10:10:52.675316Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156652663, txId: 281474976710684] shutting down 2025-03-28T10:10:53.886176Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156653872, txId: 281474976710686] shutting down 2025-03-28T10:10:55.151900Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156655079, txId: 281474976710688] shutting down 2025-03-28T10:10:56.359112Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156656345, txId: 281474976710690] shutting down 2025-03-28T10:10:57.553623Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743156657546, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1949767B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1995C4FF 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1903CBE8 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1904FBF7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1904FBF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1904FBF7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19993525 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19993525 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19993525 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19963078 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1904ED7B 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19964945 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1998DA9C 15. ??:0: ?? @ 0x7FEB904EAD8F 16. ??:0: ?? @ 0x7FEB904EAE3F 17. ??:0: ?? @ 0x1640D028 >> test.py::test_viewer_nodes >> test.py::test_viewer_nodes [GOOD] >> test.py::test_viewer_describe [GOOD] >> test.py::test_viewer_tenantinfo [GOOD] >> test.py::test_viewer_tenantinfo_db >> test.py::test_viewer_tenantinfo_db [GOOD] >> test_dc_local.py::TestAlloc::test_dc_locality[kikimr0] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::test_viewer_tenantinfo_db [GOOD] |97.7%| [TM] {RESULT} ydb/core/viewer/tests/py3test >> test_scheduling.py::TestSchedule::test_skip_busy[kikimr0] [SKIPPED] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/mem_alloc/py3test >> test_result_limits.py::TestResultLimits::test_quotas[kikimr0] [GOOD] |97.8%| [TM] {RESULT} ydb/tests/fq/mem_alloc/py3test >> test_drain.py::TestHive::test_drain_on_stop [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [GOOD] |97.9%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |98.0%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::OutOfSpaceYQLUpsertFail-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 2043, MsgBus: 19129 2025-03-28T10:10:09.208499Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486800631112544613:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:09.209386Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00038f/r3tmp/tmpVOfLTj/pdisk_1.dat 2025-03-28T10:10:09.774738Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:09.774845Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:09.780718Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:09.812918Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2043, node 1 2025-03-28T10:10:09.896400Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:09.896424Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:09.896433Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:09.896575Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19129 TClient is connected to server localhost:19129 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:10:10.525120Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:10.554924Z node 1 :FLAT_TX_SCHEMESHARD WARN: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2025-03-28T10:10:10.571324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:10:12.729491Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800643997447578:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:12.729609Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:12.729979Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486800643997447590:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:12.733348Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480 2025-03-28T10:10:12.758434Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486800643997447592:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2025-03-28T10:10:12.836378Z node 1 :TX_PROXY ERROR: Actor# [1:7486800643997447670:2703] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 6], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:10:14.208595Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800631112544613:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:14.212730Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:24.788482Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:24.788529Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:11:11.190727Z node 1 :TX_DATASHARD ERROR: CPU usage 65.8386 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/LargeTable] 2025-03-28T10:12:02.368278Z node 1 :OPS_COMPACT ERROR: Compact{72075186224037888.1.300, eph 74} end=2, 9 blobs 0r (max 600), put Spent{time=1.088s,wait=0.043s,interrupts=3} 2025-03-28T10:12:02.368403Z node 1 :TABLET_EXECUTOR ERROR: Leader{72075186224037888:1:317} Compact 189 on TGenCompactionParams{1001: gen 2 epoch 0, 4 parts} step 300, product {0 parts epoch 0} thrown 2025-03-28T10:12:02.381999Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486801116443858219:2338] TxId: 281474976715736. Ctx: { TraceId: 01jqe3se2k4m0c38jtfrev575x, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDA5NTc1NWEtNWYxMWRkMzQtYTg3NTVkYTAtM2M3OTgxYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [WRONG_SHARD_STATE] Rejecting data TxId 281474976715736 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state); 2025-03-28T10:12:02.405425Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDA5NTc1NWEtNWYxMWRkMzQtYTg3NTVkYTAtM2M3OTgxYTg=, ActorId: [1:7486800643997447571:2338], ActorState: ExecuteState, TraceId: 01jqe3se2k4m0c38jtfrev575x, Create QueryResponse for error on request, msg: 2025-03-28T10:12:17.426065Z node 1 :TX_DATASHARD ERROR: CPU usage 62.3037 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037890 table: [/Root/LargeTable] 2025-03-28T10:12:31.515032Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923003912288}: tablet 72075186224037890 could not find a group for channel 0 pool /Root:test 2025-03-28T10:12:31.515111Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923003912288}: tablet 72075186224037890 could not find a group for channel 1 pool /Root:test 2025-03-28T10:12:31.515137Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923003912288}: tablet 72075186224037890 wasn't changed 2025-03-28T10:12:31.515163Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923003912288}: tablet 72075186224037890 skipped channel 0 2025-03-28T10:12:31.515228Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923003912288}: tablet 72075186224037890 skipped channel 1 2025-03-28T10:12:33.450054Z node 1 :TX_DATASHARD ERROR: Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976715752 2025-03-28T10:12:33.450141Z node 1 :TX_DATASHARD ERROR: Prepare transaction failed. txid 281474976715752 at tablet 72075186224037890 errors: OUT_OF_SPACE (Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976715752) | 2025-03-28T10:12:33.450237Z node 1 :TX_DATASHARD ERROR: Errors while proposing transaction txid 281474976715752 at tablet 72075186224037890 status: ERROR errors: OUT_OF_SPACE (Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976715752) | 2025-03-28T10:12:33.450365Z node 1 :KQP_EXECUTER ERROR: ActorId: [1:7486801245292880304:2338] TxId: 281474976715752. Ctx: { TraceId: 01jqe3tbvj1cg2pc4an371jzck, Database: /Root, DatabaseId: /Root, SessionId: ydb://session/3?node_id=1&id=NDA5NTc1NWEtNWYxMWRkMzQtYTg3NTVkYTAtM2M3OTgxYTg=, CurrentExecutionId: , CustomerSuppliedId: , PoolId: default}. ERROR: [OUT_OF_SPACE] Cannot perform transaction: out of disk space at tablet 72075186224037890 txId 281474976715752; 2025-03-28T10:12:33.450657Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NDA5NTc1NWEtNWYxMWRkMzQtYTg3NTVkYTAtM2M3OTgxYTg=, ActorId: [1:7486800643997447571:2338], ActorState: ExecuteState, TraceId: 01jqe3tbvj1cg2pc4an371jzck, Create QueryResponse for error on request, msg: 2025-03-28T10:12:33.549485Z node 1 :HIVE ERROR: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923016175616}: tablet 72075186224037889 could not find a group for channel 1 pool /Root:test 2025-03-28T10:12:33.549542Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923016175616}: tablet 72075186224037889 wasn't changed 2025-03-28T10:12:33.549558Z node 1 :HIVE WARN: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{88923016175616}: tablet 72075186224037889 skipped channel 1 Got out of space. Successfully inserted 30 x 0 lines, each of size 1048576bytes |98.1%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] Test command err: 2025-03-28T10:10:07.505144Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T10:10:07.505624Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T10:10:07.505716Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/00018c/r3tmp/tmpAwpKAC/pdisk_1.dat 2025-03-28T10:10:08.045892Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T10:10:08.045993Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T10:10:08.046054Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T10:10:08.046203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-28T10:10:08.046242Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T10:10:08.213498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-28T10:10:08.216809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:10:08.218047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T10:10:08.221887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T10:10:08.222044Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:10:08.222255Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T10:10:08.223658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T10:10:08.226834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T10:10:08.226919Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:10:08.226961Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T10:10:08.227219Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T10:10:08.227265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T10:10:08.227363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:10:08.227429Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T10:10:08.227544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T10:10:08.227579Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T10:10:08.228831Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T10:10:08.229485Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:10:08.229540Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T10:10:08.229683Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T10:10:08.229729Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T10:10:08.229797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:10:08.229844Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T10:10:08.229885Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T10:10:08.229994Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T10:10:08.230388Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:10:08.230417Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T10:10:08.230509Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T10:10:08.230537Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T10:10:08.230572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:10:08.230602Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:10:08.230659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-28T10:10:08.230713Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T10:10:08.230768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T10:10:08.235653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T10:10:08.236225Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:10:08.236292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T10:10:08.237388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-28T10:10:08.238867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T10:10:08.238920Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T10:10:08.238975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-28T10:10:08.239124Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-28T10:10:08.239533Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T10:10:08.239582Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T10:10:08.239615Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T10:10:08.239731Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-28T10:10:08.239760Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T10:10:08.239828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-28T10:10:08.239864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-28T10:10:08.239912Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-28T10:10:08.280498Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-28T10:10:08.280624Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-28T10:10:08.280683Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:08.282374Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T10:10:08.282470Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-28T10:10:08.329124Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:08.330176Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:08.347245Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:08.423142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-28T10:10:08.423927Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T10:10:08.423970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T10:10:08.424017Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T10:10:08.424220Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-28T10:10:08.424270Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T10:10:08.424411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T10:10:08.424547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... pient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T10:12:24.842037Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T10:12:24.842058Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T10:12:24.842116Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T10:12:24.842142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-28T10:12:24.842224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 22 shard idx 72057594046644480:7 data size 0 row count 0 2025-03-28T10:12:24.842272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 22], pathId map=TableA, is column=0, is olap=0 2025-03-28T10:12:24.842321Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037894 followerId=0, pathId 22: RowCount 0, DataSize 0 2025-03-28T10:12:24.842343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2025-03-28T10:12:24.842393Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-28T10:12:24.842474Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:12:24.852961Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T10:12:24.853056Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T10:12:24.853090Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T10:12:24.927730Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-28T10:12:24.928341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1558:3199], Recipient [1:409:2404]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037895 TableLocalId: 24 Generation: 1 Round: 58 TableStats { DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 23 Memory: 119576 Storage: 142 } ShardState: 2 UserTablePartOwners: 72075186224037895 NodeId: 1 StartTime: 4950 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-28T10:12:24.928408Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T10:12:24.928460Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] state 'Ready' dataSize 54 rowCount 2 cpuUsage 0.0023 2025-03-28T10:12:24.928579Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] raw table stats: DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T10:12:24.928625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-28T10:12:24.984023Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T10:12:24.984097Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T10:12:24.984124Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T10:12:24.984200Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T10:12:24.984233Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-28T10:12:24.984311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 24 shard idx 72057594046644480:8 data size 54 row count 2 2025-03-28T10:12:24.984359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037895 maps to shardIdx: 72057594046644480:8 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 24], pathId map=TableA, is column=0, is olap=0 2025-03-28T10:12:24.984393Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037895 followerId=0, pathId 24: RowCount 2, DataSize 54 2025-03-28T10:12:24.984415Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037895, followerId 0 2025-03-28T10:12:24.984468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:8 with partCount# 1, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-28T10:12:24.984560Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:12:24.995675Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T10:12:24.995752Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T10:12:24.995788Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T10:12:25.067040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:12:25.067123Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:12:25.067206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:12:25.067241Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:12:25.091088Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T10:12:25.186131Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-28T10:12:25.265224Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:12:25.265314Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:12:25.265396Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:12:25.265426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:12:25.288076Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T10:12:25.383509Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037899 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T10:12:25.477425Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:12:25.477518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:12:25.477797Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDZkYWEyOGMtOTFmNDlhYTEtZTJjNzk1YTEtYTQ2MDcxZTA=, ActorId: [1:2001:3528], ActorState: ExecuteState, TraceId: 01jqe3p5417fymhv0n863ahtky, Create QueryResponse for error on request, msg: 2025-03-28T10:12:25.478039Z node 1 :KQP_SLOW_LOG WARN: SessionId: ydb://session/3?node_id=1&id=ZDZkYWEyOGMtOTFmNDlhYTEtZTJjNzk1YTEtYTQ2MDcxZTA=, Slow query, duration: 600.000000s, status: GENERIC_ERROR, user: UNAUTHENTICATED, results: 0b, text: "RESTORE `MyCollection`;", parameters: 0b 2025-03-28T10:12:25.479698Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:12:25.479758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture()+28 (0x18F7423C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x19431380) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+3639 (0x48B40E97) NKikimr::NTestSuiteIncrementalBackup::TTestCaseComplexRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+26163 (0x18BC4943) std::__y1::__function::__func, void ()>::operator()()+280 (0x18B80448) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x194683A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19437EF9) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+1204 (0x18B7F2F4) NUnitTest::TTestFactory::Execute()+2438 (0x194397C6) NUnitTest::RunMain(int, char**)+5213 (0x1946291D) ??+0 (0x7F3448145D90) __libc_start_main+128 (0x7F3448145E40) _start+41 (0x1630C029) |98.3%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.3%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [FAIL] >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |98.4%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test >> test_simple.py::TestSimple::test[alter_table] >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[table] >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test[tablestores] >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test_workload.py::TestYdbWorkload::test [GOOD] >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/multi_plane/py3test >> test_retry.py::TestRetry::test_low_rate[kikimr0] [GOOD] |98.6%| [TA] $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/tests/fq/multi_plane/test-results/py3test/{meta.json ... results_accumulator.log} |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |98.7%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2025-03-28T10:09:51.154136Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T10:09:51.325164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T10:09:51.351205Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T10:09:51.351520Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T10:09:51.359904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:09:51.360122Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:09:51.360361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:09:51.360491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:09:51.360631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:09:51.360796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:09:51.360913Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:09:51.361036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:09:51.361164Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:09:51.361270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:09:51.361394Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:09:51.361514Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:09:51.392588Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T10:09:51.392959Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T10:09:51.393010Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T10:09:51.393184Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:09:51.393325Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T10:09:51.393406Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T10:09:51.393454Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T10:09:51.393571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T10:09:51.393640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T10:09:51.393694Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T10:09:51.393730Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T10:09:51.393894Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:09:51.393965Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T10:09:51.394008Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T10:09:51.394037Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T10:09:51.394173Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T10:09:51.394227Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T10:09:51.394272Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T10:09:51.394301Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T10:09:51.394386Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T10:09:51.394428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T10:09:51.394459Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T10:09:51.394503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T10:09:51.394540Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T10:09:51.394570Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T10:09:51.394960Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=54; 2025-03-28T10:09:51.395059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2025-03-28T10:09:51.395164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=48; 2025-03-28T10:09:51.395267Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-03-28T10:09:51.395437Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T10:09:51.395492Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T10:09:51.395523Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T10:09:51.395709Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T10:09:51.395755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T10:09:51.395784Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T10:09:51.395936Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T10:09:51.395982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T10:09:51.396013Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T10:09:51.396213Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T10:09:51.396264Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T10:09:51.396296Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T10:09:51.396428Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T10:09:51.396472Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T10:09:51.396528Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... .843013Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15719:17677];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-28T10:16:53.327781Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T10:16:53.327878Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=14; 2025-03-28T10:16:53.328214Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=270; 2025-03-28T10:16:53.328253Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=330; 2025-03-28T10:16:53.332680Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T10:16:53.332762Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=11; 2025-03-28T10:16:53.339933Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=7086; 2025-03-28T10:16:53.347577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6395; 2025-03-28T10:16:53.347681Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7660; 2025-03-28T10:16:53.347832Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=93; 2025-03-28T10:16:53.347935Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=63; 2025-03-28T10:16:53.348116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=143; 2025-03-28T10:16:53.348263Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=93; 2025-03-28T10:16:53.348461Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=161; 2025-03-28T10:16:53.348497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=15691; 2025-03-28T10:16:53.352801Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T10:16:53.352872Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=11; 2025-03-28T10:16:53.359409Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6461; 2025-03-28T10:16:53.385002Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=25500; 2025-03-28T10:16:53.385129Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=37; 2025-03-28T10:16:53.385199Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=27; 2025-03-28T10:16:53.385266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2025-03-28T10:16:53.385312Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2025-03-28T10:16:53.385373Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=10; 2025-03-28T10:16:53.385475Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2025-03-28T10:16:53.385539Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2025-03-28T10:16:53.385631Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=58; 2025-03-28T10:16:53.385678Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-03-28T10:16:53.385741Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2025-03-28T10:16:53.385835Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2025-03-28T10:16:53.385925Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=55; 2025-03-28T10:16:53.385962Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=33042; 2025-03-28T10:16:53.386165Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T10:16:53.386866Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15719:17677];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T10:16:53.386922Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15719:17677];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T10:16:53.387017Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T10:16:53.387071Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-28T10:16:53.387266Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T10:16:53.387326Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:16:53.387509Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-28T10:16:53.387581Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T10:16:53.387626Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T10:16:53.387670Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T10:16:53.387712Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T10:16:53.387803Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T10:16:53.392248Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:16:53.394024Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T10:16:53.396035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T10:16:53.396079Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T10:16:53.396105Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T10:16:53.396168Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T10:16:53.396234Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:16:53.396293Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-28T10:16:53.396371Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T10:16:53.396435Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T10:16:53.396491Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T10:16:53.396544Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T10:16:53.396624Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=1.000000s; 2025-03-28T10:16:53.396696Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15719:17677];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2025-03-28T10:09:51.154116Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T10:09:51.325136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T10:09:51.342668Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T10:09:51.343789Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T10:09:51.356487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:09:51.356773Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:09:51.357048Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:09:51.357210Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:09:51.357346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:09:51.357494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:09:51.357615Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:09:51.357774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:09:51.357898Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:09:51.358017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:09:51.358137Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:09:51.358279Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:09:51.384943Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T10:09:51.385298Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T10:09:51.385342Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T10:09:51.385479Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:09:51.387675Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T10:09:51.387795Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T10:09:51.387835Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T10:09:51.387915Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T10:09:51.387982Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T10:09:51.388012Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T10:09:51.388045Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T10:09:51.388163Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:09:51.388203Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T10:09:51.388232Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T10:09:51.388272Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T10:09:51.388402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T10:09:51.388438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T10:09:51.388502Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T10:09:51.388531Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T10:09:51.388617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T10:09:51.388642Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T10:09:51.388660Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T10:09:51.388688Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T10:09:51.388711Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T10:09:51.388734Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T10:09:51.389049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2025-03-28T10:09:51.389149Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=40; 2025-03-28T10:09:51.389208Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=30; 2025-03-28T10:09:51.389287Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=32; 2025-03-28T10:09:51.389438Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T10:09:51.389487Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T10:09:51.389528Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T10:09:51.389687Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T10:09:51.389716Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T10:09:51.389743Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T10:09:51.389925Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T10:09:51.389974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T10:09:51.389994Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T10:09:51.390639Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T10:09:51.390710Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T10:09:51.390747Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T10:09:51.390887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T10:09:51.390926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T10:09:51.390960Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... :54.584245Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15723:17681];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-28T10:16:55.098811Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T10:16:55.098905Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-03-28T10:16:55.099212Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=241; 2025-03-28T10:16:55.099249Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=299; 2025-03-28T10:16:55.105364Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T10:16:55.105471Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=15; 2025-03-28T10:16:55.114393Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=8810; 2025-03-28T10:16:55.123682Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=7686; 2025-03-28T10:16:55.123783Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=9287; 2025-03-28T10:16:55.123937Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=94; 2025-03-28T10:16:55.124048Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=72; 2025-03-28T10:16:55.124221Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=133; 2025-03-28T10:16:55.124342Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=85; 2025-03-28T10:16:55.124536Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=159; 2025-03-28T10:16:55.124571Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=19045; 2025-03-28T10:16:55.128171Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T10:16:55.128241Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-03-28T10:16:55.134567Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=6253; 2025-03-28T10:16:55.161213Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=26555; 2025-03-28T10:16:55.161350Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=37; 2025-03-28T10:16:55.161420Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=26; 2025-03-28T10:16:55.161463Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2025-03-28T10:16:55.161505Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2025-03-28T10:16:55.161551Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=9; 2025-03-28T10:16:55.161626Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2025-03-28T10:16:55.161671Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=9; 2025-03-28T10:16:55.161773Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=63; 2025-03-28T10:16:55.161818Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-03-28T10:16:55.161882Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2025-03-28T10:16:55.162008Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=82; 2025-03-28T10:16:55.162097Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=50; 2025-03-28T10:16:55.162136Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=33847; 2025-03-28T10:16:55.162318Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=2425692;raw_bytes=4011492;count=1;records=39328} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=111593408;raw_bytes=187115688;count=44;records=1805672} inactive {blob_bytes=178990352;raw_bytes=300417312;count=90;records=2897034} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T10:16:55.162971Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15723:17681];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T10:16:55.163033Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:15723:17681];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T10:16:55.163122Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T10:16:55.163177Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-28T10:16:55.163345Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T10:16:55.163409Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:16:55.163620Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-28T10:16:55.163696Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T10:16:55.163744Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T10:16:55.163789Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T10:16:55.163834Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T10:16:55.163958Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T10:16:55.168554Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:16:55.172120Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T10:16:55.174170Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T10:16:55.174247Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T10:16:55.174276Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T10:16:55.174324Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T10:16:55.174393Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:16:55.174622Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=22; 2025-03-28T10:16:55.174720Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T10:16:55.174773Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=22;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T10:16:55.174838Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T10:16:55.174897Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T10:16:55.175000Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-28T10:16:55.175072Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:15723:17681];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> test_tpch.py::TestTpchS1::test_tpch[5] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[7] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [FAIL] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete >> test_tpch.py::TestTpchS1::test_tpch[7] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[8] >> test_tpch.py::TestTpchS1::test_tpch[8] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[9] >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[9] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[10] >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> test_tpch.py::TestTpchS1::test_tpch[10] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[11] >> test_tpch.py::TestTpchS1::test_tpch[11] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[12] >> test_tpch.py::TestTpchS1::test_tpch[12] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[13] >> test_tpch.py::TestTpchS1::test_tpch[13] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[14] >> test_tpch.py::TestTpchS1::test_tpch[14] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[15] >> test_tpch.py::TestTpchS1::test_tpch[15] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[16] >> test_tpch.py::TestTpchS1::test_tpch[16] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[17] >> test_tpch.py::TestTpchS1::test_tpch[17] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[18] >> test_tpch.py::TestTpchS1::test_tpch[18] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[19] >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] >> test_tpch.py::TestTpchS1::test_tpch[19] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[20] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] Test command err: 2025-03-28T10:10:14.114146Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T10:10:14.114470Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T10:10:14.114540Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/km4y/0001ea/r3tmp/tmpTZyxkL/pdisk_1.dat 2025-03-28T10:10:14.494132Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2640, node 1 2025-03-28T10:10:14.774873Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:10:14.774940Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:10:14.774971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:10:14.775469Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:10:14.777975Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:10:14.873271Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:14.873463Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:14.888409Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:61988 2025-03-28T10:10:15.481908Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T10:10:18.202373Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T10:10:18.226172Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:18.226259Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:18.264427Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T10:10:18.266474Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:18.493053Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:10:18.493687Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:10:18.494320Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:10:18.494492Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:10:18.494805Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:10:18.494899Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:10:18.494975Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:10:18.495055Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:10:18.495158Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:10:18.664814Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:10:18.664954Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:10:18.678860Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:10:18.832378Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:10:18.885044Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T10:10:18.885159Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T10:10:18.920932Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T10:10:18.921077Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T10:10:18.921266Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T10:10:18.921320Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T10:10:18.921362Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T10:10:18.921401Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T10:10:18.921438Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T10:10:18.921475Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T10:10:18.921840Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T10:10:18.952336Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T10:10:18.952421Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1868:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T10:10:18.959778Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1882:2608] 2025-03-28T10:10:18.966198Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1916:2623] 2025-03-28T10:10:18.966427Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1916:2623], schemeshard id = 72075186224037897 2025-03-28T10:10:18.972976Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T10:10:18.989636Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T10:10:18.989712Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T10:10:18.989822Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T10:10:19.004135Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T10:10:19.010246Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T10:10:19.010381Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T10:10:19.183808Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T10:10:19.359737Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T10:10:19.440366Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T10:10:20.307871Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2237:3071], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:20.307994Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:10:20.324337Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T10:10:20.459903Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:20.460139Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:20.460441Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:10:20.460563Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:10:20.460718Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:10:20.460843Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:10:20.460968Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:10:20.461101Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:10:20.461215Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:10:20.461324Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:10:20.461519Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:10:20.461693Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2328:2853];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:10:20.492338Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T10:10:20.492457Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... requests. OperationId=operationId 2025-03-28T10:19:28.202204Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:29.451956Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T10:19:29.452102Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T10:19:29.472825Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:29.472875Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:29.472900Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:19:30.723184Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:19:30.723256Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:30.723283Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:31.903726Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:31.903792Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:31.903819Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:19:33.125254Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T10:19:33.146196Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:19:33.146261Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:33.146288Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:34.390589Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:34.390709Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:34.390784Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:19:35.560129Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T10:19:35.560312Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T10:19:35.581559Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:19:35.581640Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:35.581667Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:36.784874Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:36.784939Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:36.784966Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:19:37.978822Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:19:37.978894Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:37.978921Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:39.132348Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T10:19:39.153512Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:39.153579Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:39.153604Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:19:40.497694Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:19:40.497760Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:40.497788Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:41.729730Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T10:19:41.729892Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T10:19:41.750747Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:41.750802Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:41.750830Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:19:42.931116Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:19:42.931184Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:42.931211Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:44.091830Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:44.091894Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:44.091920Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:19:45.299521Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T10:19:45.320523Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:19:45.320587Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:45.320623Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:46.648324Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:46.648388Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:46.648415Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:19:47.909740Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T10:19:47.909897Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T10:19:47.931165Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:19:47.931237Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:47.931265Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:49.180089Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:49.180153Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:49.180180Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:19:50.378403Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:19:50.378492Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:50.378519Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:51.558203Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T10:19:51.579519Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:51.579584Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:51.579611Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:19:52.927829Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:19:52.927898Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:52.927924Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:19:54.113034Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T10:19:54.113194Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T10:19:54.134233Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:19:54.134297Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:19:54.134324Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture()+28 (0x18BB873C) TWithBackTrace::TWithBackTrace<>()+80 (0x187E0630) NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+485 (0x187B4CB5) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TTestCaseAnalyzeRebootColumnShard::Execute_(NUnitTest::TTestContext&)+4263 (0x187D1857) std::__y1::__function::__func, void ()>::operator()()+280 (0x187DC6E8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x190A3FA6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1907CBB9) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TCurrentTest::Execute()+1204 (0x187DB8B4) NUnitTest::TTestFactory::Execute()+2438 (0x1907E486) NUnitTest::RunMain(int, char**)+5213 (0x1909E51D) ??+0 (0x7FA87CE33D90) __libc_start_main+128 (0x7FA87CE33E40) _start+41 (0x16144029) |99.0%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.0%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-03-28 10:19:59,720 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 10:19:59,903 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 955698 723M 728M 644M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/km4y/000332/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 956298 2.7G 2.7G 2.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/km4y/000332/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_m 958345 450M 450M 416M └─ moto_server s3 --port 18933 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 70, in wait_for time.sleep(1) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ..._root/km4y/000332', '--source-root', '/home/runner/.ya/build/build_root/km4y/000332/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/km4y/000332/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("..._root/km4y/000332', '--source-root', '/home/runner/.ya/build/build_root/km4y/000332/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/km4y/000332/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) >> test_tpch.py::TestTpchS1::test_tpch[20] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[21] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2025-03-28T10:10:21.177355Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T10:10:21.287890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T10:10:21.313162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T10:10:21.313709Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T10:10:21.322248Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:21.322494Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:21.322763Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:10:21.322887Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:10:21.322991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:10:21.323143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:10:21.323274Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:10:21.323402Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:10:21.323532Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:10:21.323636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:10:21.323764Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:10:21.323881Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:10:21.354857Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T10:10:21.355217Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T10:10:21.355287Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T10:10:21.355537Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:10:21.355733Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T10:10:21.355839Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T10:10:21.355896Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T10:10:21.356026Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T10:10:21.356110Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T10:10:21.356165Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T10:10:21.356205Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T10:10:21.356385Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:10:21.356477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T10:10:21.356520Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T10:10:21.356557Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T10:10:21.356740Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T10:10:21.356809Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T10:10:21.356857Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T10:10:21.356895Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T10:10:21.356978Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T10:10:21.357017Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T10:10:21.357051Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T10:10:21.357107Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T10:10:21.357149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T10:10:21.357205Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T10:10:21.357673Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=55; 2025-03-28T10:10:21.357791Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=53; 2025-03-28T10:10:21.357903Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=59; 2025-03-28T10:10:21.358030Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=50; 2025-03-28T10:10:21.358225Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T10:10:21.358298Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T10:10:21.358339Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T10:10:21.358555Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T10:10:21.358606Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T10:10:21.358639Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T10:10:21.358831Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T10:10:21.358895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T10:10:21.358929Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T10:10:21.359154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T10:10:21.359207Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T10:10:21.359249Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T10:10:21.359393Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T10:10:21.359447Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T10:10:21.359497Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... OLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=300278; 2025-03-28T10:20:08.151983Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:13998:15941];tablet_id=9437184;parent=[1:13959:15909];fline=manager.cpp:82;event=ask_data;request=request_id=399;1={portions_count=193};; 2025-03-28T10:20:08.153725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13959:15909];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=193;path_id=1; 2025-03-28T10:20:08.163881Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13959:15909];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-28T10:20:08.707271Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T10:20:08.707355Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-03-28T10:20:08.710326Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=2901; 2025-03-28T10:20:08.717745Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=6397; 2025-03-28T10:20:08.717834Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7445; 2025-03-28T10:20:08.717955Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=78; 2025-03-28T10:20:08.718039Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=53; 2025-03-28T10:20:08.718164Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=95; 2025-03-28T10:20:08.718266Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=73; 2025-03-28T10:20:08.718417Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=122; 2025-03-28T10:20:08.718448Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=11054; 2025-03-28T10:20:08.724360Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T10:20:08.724426Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-03-28T10:20:08.752899Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=28397; 2025-03-28T10:20:08.800725Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=47726; 2025-03-28T10:20:08.800836Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=33; 2025-03-28T10:20:08.800890Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=19; 2025-03-28T10:20:08.800928Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-03-28T10:20:08.800969Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-03-28T10:20:08.801010Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2025-03-28T10:20:08.801078Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=37; 2025-03-28T10:20:08.801116Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-28T10:20:08.801195Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2025-03-28T10:20:08.801234Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2025-03-28T10:20:08.801307Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2025-03-28T10:20:08.801422Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=64; 2025-03-28T10:20:08.801499Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=44; 2025-03-28T10:20:08.801531Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=77062; 2025-03-28T10:20:08.801691Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=128676200;raw_bytes=191860770;count=66;records=1845000} inactive {blob_bytes=245561044;raw_bytes=360315949;count=127;records=3499542} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T10:20:08.802172Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13959:15909];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T10:20:08.802226Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13959:15909];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T10:20:08.802296Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T10:20:08.802342Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-28T10:20:08.802572Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T10:20:08.802631Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:20:08.802850Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-03-28T10:20:08.802910Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T10:20:08.802949Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T10:20:08.802991Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T10:20:08.803023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T10:20:08.803105Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T10:20:08.807286Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:20:08.810242Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T10:20:08.812364Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T10:20:08.812405Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T10:20:08.812431Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T10:20:08.812474Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T10:20:08.812530Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:20:08.812762Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-03-28T10:20:08.812826Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T10:20:08.812867Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T10:20:08.812909Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T10:20:08.812950Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T10:20:08.813035Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.998000s; 2025-03-28T10:20:08.813083Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13959:15909];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2025-03-28T10:10:21.106240Z node 1 :BLOB_CACHE NOTICE: MaxCacheDataSize: 20971520 InFlightDataSize: 0 2025-03-28T10:10:21.206240Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T10:10:21.230340Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:139:2171];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T10:10:21.231137Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 9437184 2025-03-28T10:10:21.238484Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:10:21.238670Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:10:21.238877Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:10:21.238956Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:10:21.239042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:10:21.239135Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:10:21.239206Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:10:21.239315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:10:21.239391Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:10:21.239459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:10:21.239562Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:10:21.239674Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;self_id=[1:139:2171];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:10:21.266500Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 9437184 2025-03-28T10:10:21.266857Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T10:10:21.266913Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T10:10:21.267110Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:10:21.267283Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T10:10:21.267374Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T10:10:21.267426Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T10:10:21.267525Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T10:10:21.267587Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T10:10:21.267630Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T10:10:21.267659Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T10:10:21.267820Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:10:21.267896Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T10:10:21.267937Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T10:10:21.267967Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T10:10:21.268123Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T10:10:21.268177Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T10:10:21.268221Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T10:10:21.268253Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T10:10:21.268318Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T10:10:21.268350Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T10:10:21.268379Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T10:10:21.268423Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T10:10:21.268457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T10:10:21.268486Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T10:10:21.268908Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=42; 2025-03-28T10:10:21.269007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=46; 2025-03-28T10:10:21.269091Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=44; 2025-03-28T10:10:21.269178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=31; 2025-03-28T10:10:21.269342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T10:10:21.269399Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T10:10:21.269435Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2025-03-28T10:10:21.269622Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T10:10:21.269666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T10:10:21.269695Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=13;type=SyncMinSnapshotFromChunks; 2025-03-28T10:10:21.269847Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T10:10:21.269904Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T10:10:21.269933Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2025-03-28T10:10:21.270092Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T10:10:21.270149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T10:10:21.270177Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2025-03-28T10:10:21.270314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T10:10:21.270361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T10:10:21.270402Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:157;step=TTxUpdateSchema.Execute_Finish; ... LUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=314970; 2025-03-28T10:20:10.317378Z node 1 :TX_COLUMNSHARD INFO: self_id=[1:13990:15933];tablet_id=9437184;parent=[1:13951:15901];fline=manager.cpp:82;event=ask_data;request=request_id=399;1={portions_count=193};; 2025-03-28T10:20:10.320053Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1452;size=193;path_id=1; 2025-03-28T10:20:10.331178Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=Enqueue;ev=NKikimr::NOlap::NDataAccessorControl::TEvAskTabletDataAccessors;consumer=ANALYZE;event=TTxAskPortionChunks::Execute;fline=columnshard_impl.cpp:1503;stage=finished; 2025-03-28T10:20:10.894449Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T10:20:10.894533Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=12; 2025-03-28T10:20:10.897552Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:insert_tableLoadingTime=2951; 2025-03-28T10:20:10.904572Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/insert_table;fline=common_data.cpp:29;InsertTableLoadingTime=5982; 2025-03-28T10:20:10.904650Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:insert_tableLoadingTime=7039; 2025-03-28T10:20:10.904766Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=71; 2025-03-28T10:20:10.904853Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=55; 2025-03-28T10:20:10.904986Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=101; 2025-03-28T10:20:10.905091Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=75; 2025-03-28T10:20:10.905246Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=121; 2025-03-28T10:20:10.905277Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=10705; 2025-03-28T10:20:10.908951Z node 1 :TX_COLUMNSHARD DEBUG: TTxInit.Execute at tablet 9437184 2025-03-28T10:20:10.909011Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;PRECHARGE:composite_initLoadingTime=10; 2025-03-28T10:20:10.936515Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=27427; 2025-03-28T10:20:10.982768Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=46143; 2025-03-28T10:20:10.982895Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:long_txLoadingTime=38; 2025-03-28T10:20:10.982966Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:long_txLoadingTime=27; 2025-03-28T10:20:10.983007Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2025-03-28T10:20:10.983049Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2025-03-28T10:20:10.983093Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2025-03-28T10:20:10.983162Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=36; 2025-03-28T10:20:10.983207Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2025-03-28T10:20:10.983292Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2025-03-28T10:20:10.983334Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2025-03-28T10:20:10.983392Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=25; 2025-03-28T10:20:10.983480Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=51; 2025-03-28T10:20:10.983557Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=45; 2025-03-28T10:20:10.983597Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=74539; 2025-03-28T10:20:10.983772Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=128676200;raw_bytes=191860770;count=66;records=1845000} inactive {blob_bytes=245561044;raw_bytes=360315949;count=127;records=3499542} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2025-03-28T10:20:10.984310Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=columnshard.cpp:77;event=initialize_shard;step=SwitchToWork; 2025-03-28T10:20:10.984369Z node 1 :TX_COLUMNSHARD INFO: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=columnshard.cpp:80;event=initialize_shard;step=SignalTabletActive; 2025-03-28T10:20:10.984440Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=columnshard_impl.cpp:1616;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2025-03-28T10:20:10.984486Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];process=SwitchToWork;fline=column_engine_logs.cpp:496;event=OnTieringModified;new_count_tierings=0; 2025-03-28T10:20:10.984745Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T10:20:10.984809Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:20:10.985024Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-03-28T10:20:10.985094Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T10:20:10.985158Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T10:20:10.985214Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T10:20:10.985253Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T10:20:10.985358Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T10:20:10.990438Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:20:10.994391Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:253;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2025-03-28T10:20:10.996548Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:242;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2025-03-28T10:20:10.996587Z node 1 :TX_COLUMNSHARD DEBUG: Send periodic stats. 2025-03-28T10:20:10.996608Z node 1 :TX_COLUMNSHARD DEBUG: Disabled periodic stats at tablet 9437184 2025-03-28T10:20:10.996657Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T10:20:10.996717Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:20:10.996955Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=21; 2025-03-28T10:20:10.997023Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=101; 2025-03-28T10:20:10.997070Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=21;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T10:20:10.997118Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T10:20:10.997159Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T10:20:10.997257Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.999000s; 2025-03-28T10:20:10.997308Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=9437184;self_id=[1:13951:15901];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete 2025-03-28 10:19:59,783 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 10:20:00,110 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 955697 686M 690M 605M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/km4y/000331/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 956299 6.7G 6.7G 6.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/km4y/000331/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_de 958363 393M 393M 359M └─ moto_server s3 --port 17851 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 269, in test_ttl_delete self.ydb_client.query(""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '/home/runner/.ya/build/build_root/km4y/000331', '--source-root', '/home/runner/.ya/build/build_root/km4y/000331/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/km4y/000331/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '/home/runner/.ya/build/build_root/km4y/000331', '--source-root', '/home/runner/.ya/build/build_root/km4y/000331/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/km4y/000331/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) |99.3%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |99.4%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.4%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |99.5%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export 2025-03-28 10:20:30,307 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 10:20:30,734 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 964219 623M 626M 540M ydb-tests-olap-s3_import --basetemp /home/runner/.ya/build/build_root/km4y/00029b/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 967765 9.8G 9.8G 9.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/km4y/00029b/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpc 973236 478M 478M 443M └─ moto_server s3 --port 10138 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/s3_import/test_tpch_import.py", line 109, in test_import_and_export self.validate_table("from_s3") File "ydb/tests/olap/s3_import/test_tpch_import.py", line 12, in validate_table result_sets = self.ydb_client.query(f""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) contrib/python/pytest/py3/_pytest/main.py:306: PluggyTeardownRaisedWarning: A plugin raised an exception during an old-style hookwrapper teardown. Plugin: terminalreporter, Hook: pytest_sessionfinish SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. Process exit_code = 100. Stdout file name: /home/runner/.ya/build/build_root/km4y/00029b/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export/cluster/node_1/stdout Stderr file name: /home/runner/.ya/build/build_root/km4y/00029b/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export/cluster/node_1/stderr Stderr content: GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option Current KQP shutdown state: spent 0 seconds, not started yet warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 ================================================================= ==967765==ERROR: LeakSanitizer: detected memory leaks Indirect leak of 3792 byte(s) in 1 object(s) allocated from: #0 0x1d6662cd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x468183df in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 #2 0x468183df in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 #3 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 #4 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/yt/yql/providers/yt/provider/yql_yt_provider.cpp:345:20 #5 0x4b71d66f in InitYtProvider /-S/ydb/core/kqp/host/kqp_host.cpp:1864:38 #6 0x4b71d66f in NKikimr::NKqp::(anonymous namespace)::TKqpHost::Init(NYql::EKikimrQueryType) /-S/ydb/core/kqp/host/kqp_host.cpp:1939:17 #7 0x4b71a130 in NKikimr::NKqp::(anonymous namespace)::TKqpHost::SetupSession(NYql::EKikimrQueryType) /-S/ydb/core/kqp/host/kqp_host.cpp:2020:9 #8 0x4b716bc8 in NKikimr::NKqp::(anonymous namespace)::TKqpHost::SetupYqlTransformer(NYql::EKikimrQueryType) /-S/ydb/core/kqp/host/kqp_host.cpp:2035:9 #9 0x4b77e454 in NKikimr::NKqp::(anonymous namespace)::TKqpHost::PrepareQueryInternal(NKikimr::NKqp::TKqpQueryRef const&, TIntrusivePtr>, NYql::EKikimrQueryType, NKikimr::NKqp::IKqpHost::TPrepareSettings const&, NYql::TExprContext&) /-S/ydb/core/kqp/host/kqp_host.cpp:1595:9 #10 0x4b705fef in operator() /-S/ydb/core/kqp/host/kqp_host.cpp:1238:24 #11 0x4b705fef in CheckedProcess /-S/ydb/core/kqp/host/kqp_host.cpp:830:28 #12 0x4b705fef in CheckedProcessQuery<(lambda at /-S/ydb/core/kqp/host/kqp_host.cpp:1237:13)> /-S/ydb/core/kqp/host/kqp_host.cpp:853:12 #13 0x4b705fef in NKikimr::NKqp::(anonymous namespace)::TKqpHost::PrepareGenericQuery(NKikimr::NKqp::TKqpQueryRef const&, NKikimr::NKqp::IKqpHost::TPrepareSettings const&, TIntrusivePtr>) /-S/ydb/core/kqp/host/kqp_host.cpp:1236:16 #14 0x4b3393b5 in NKikimr::NKqp::TKqpCompileActor::StartCompilation(NActors::TActorContext const&) /-S/ydb/core/kqp/compile_service/kqp_compile_actor.cpp:273:47 #15 0x4b337067 in Bootstrap /-S/ydb/core/kqp/compile_service/kqp_compile_actor.cpp:110:17 #16 0x4b337067 in NActors::TActorBootstrapped::StateBootstrap(TAutoPtr&) /-S/ydb/library/actors/core/actor_bootstrapped.h:22:22 #17 0x1fee43ac in NActors::IActor::Receive(TAutoPtr&) /-S/ydb/library/actors/core/actor.cpp:280:13 #18 0x1ff8d0a4 in NActors::TExecutorThread::Execute(NActors::TMailbox*, bool) /-S/ydb/library/actors/core/executor_thread.cpp:269:28 #19 0x1ff95dce in NActors::TExecutorThread::ProcessExecutorPool()::$_0::operator()(NActors::TMailbox*, bool) const /-S/ydb/library/actors/core/executor_thread.cpp:460:39 #20 0x1ff95329 in NActors::TExecutorThread::ProcessExecutorPool() /-S/ydb/library/actors/core/executor_thread.cpp:512:13 #21 0x1ff972be in NActors::TExecutorThread::ThreadProc() /-S/ydb/library/actors/core/executor_thread.cpp:538:9 #22 0x1d986c04 in (anonymous namespace)::TPosixThread::ThreadProxy(void*) /-S/util/system/thread.cpp:244:20 #23 0x1d630288 in asan_thread_start(void*) /-S/contrib/libs/clang18-rt/lib/asan/asan_interceptors.cpp:239:28 Indirect leak of 3128 byte(s) in 1 object(s) allocated from: #0 0x1d6662cd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x465fb076 in __libcpp_operator_new /-S/contrib/libs/cxxsupp/libcxx/include/new:265:10 #2 0x465fb076 in __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 #3 0x465fb076 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 #4 0x465fb076 in initialize_dynamic /-S/util/generic/hash_table.h:239:35 #5 0x465fb076 in initialize_buckets_dynamic /-S/util/generic/hash_table.h:912:17 #6 0x465fb076 in THashTable> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator< ... g> const&) /-S/yql/essentials/core/file_storage/storage.cpp:462:16 #18 0x45c0270c in NYql::TFileStorageImpl::TFileStorageImpl(NYql::TFileStorageConfig const&, std::__y1::vector>, std::__y1::allocator>>> const&) /-S/yql/essentials/core/file_storage/file_storage.cpp:47:11 #19 0x45c01bc0 in NYql::CreateFileStorage(NYql::TFileStorageConfig const&, std::__y1::vector>, std::__y1::allocator>>> const&) /-S/yql/essentials/core/file_storage/file_storage.cpp:342:16 #20 0x456521e2 in NKikimr::NKqp::MakeYtGateway(NKikimr::NMiniKQL::IFunctionRegistry const*, NKikimrConfig::TQueryServiceConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:29:44 #21 0x45654cd0 in NKikimr::NKqp::TKqpFederatedQuerySetupFactoryDefault::TKqpFederatedQuerySetupFactoryDefault(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:88:21 #22 0x45657aed in construct /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:165:24 #23 0x45657aed in construct /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:320:9 #24 0x45657aed in __shared_ptr_emplace, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #25 0x45657aed in allocate_shared, NActors::TActorSystemSetup *&, const NKikimr::TAppData *&, const NKikimrConfig::TAppConfig &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:875:51 #26 0x45657aed in make_shared /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:883:10 #27 0x45657aed in NKikimr::NKqp::MakeKqpFederatedQuerySetupFactory(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:177:16 #28 0x42b8d69c in NKikimr::NKikimrServicesInitializers::TKqpServiceInitializer::InitializeServices(NActors::TActorSystemSetup*, NKikimr::TAppData const*) /-S/ydb/core/driver_lib/run/kikimr_services_initializers.cpp:2135:43 #29 0x4faeb322 in NKikimr::TServiceInitializersList::InitializeServices(NActors::TActorSystemSetup*, NKikimr::TAppData const*) /-S/ydb/core/driver_lib/run/service_initializer.cpp:13:29 #30 0x3de3c833 in NKikimr::TKikimrRunner::InitializeActorSystem(NKikimr::TKikimrRunConfig const&, TIntrusivePtr>, NKikimr::TBasicKikimrServicesMask const&) /-S/ydb/core/driver_lib/run/run.cpp:1349:26 #31 0x3de53bc5 in NKikimr::TKikimrRunner::CreateKikimrRunner(NKikimr::TKikimrRunConfig const&, std::__y1::shared_ptr) /-S/ydb/core/driver_lib/run/run.cpp:2003:13 #32 0x3b88fbe6 in NKikimr::MainRun(NKikimr::TKikimrRunConfig const&, std::__y1::shared_ptr) /-S/ydb/core/driver_lib/run/main.cpp:44:43 #33 0x3dd3b2d4 in NKikimr::NDriverClient::TClientCommandServer::Run(NYdb::NConsoleClient::TClientCommand::TConfig&) /-S/ydb/core/driver_lib/cli_utils/cli_cmds_server.cpp:51:12 #34 0x3b90a2b5 in NYdb::NConsoleClient::TClientCommandTree::Run(NYdb::NConsoleClient::TClientCommand::TConfig&) /-S/ydb/public/lib/ydb_cli/common/command.cpp:465:33 #35 0x3b8f6f57 in NKikimr::NDriverClient::NewClient(int, char**, std::__y1::shared_ptr) /-S/ydb/core/driver_lib/cli_utils/cli_cmds_root.cpp:77:26 #36 0x3b895124 in NKikimr::Main(int, char**, std::__y1::shared_ptr) /-S/ydb/core/driver_lib/run/main.cpp:151:20 #37 0x3b8988bd in ParameterizedMain(int, char**, std::__y1::shared_ptr) /-S/ydb/core/driver_lib/run/main.cpp:201:16 #38 0x1d599033 in main /-S/ydb/apps/ydbd/main.cpp:27:12 #39 0x7f1f91de6d8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) Indirect leak of 232 byte(s) in 1 object(s) allocated from: #0 0x1d6662cd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 #1 0x45c01baf in NYql::CreateFileStorage(NYql::TFileStorageConfig const&, std::__y1::vector>, std::__y1::allocator>>> const&) /-S/yql/essentials/core/file_storage/file_storage.cpp:342:12 #2 0x456521e2 in NKikimr::NKqp::MakeYtGateway(NKikimr::NMiniKQL::IFunctionRegistry const*, NKikimrConfig::TQueryServiceConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:29:44 #3 0x45654cd0 in NKikimr::NKqp::TKqpFederatedQuerySetupFactoryDefault::TKqpFederatedQuerySetupFactoryDefault(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:88:21 #4 0x45657aed in construct /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:165:24 #5 0x45657aed in construct /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator_traits.h:320:9 #6 0x45657aed in __shared_ptr_emplace, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:296:5 #7 0x45657aed in allocate_shared, NActors::TActorSystemSetup *&, const NKikimr::TAppData *&, const NKikimrConfig::TAppConfig &, 0> /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:875:51 #8 0x45657aed in make_shared /-S/contrib/libs/cxxsupp/libcxx/include/__memory/shared_ptr.h:883:10 #9 0x45657aed in NKikimr::NKqp::MakeKqpFederatedQuerySetupFactory(NActors::TActorSystemSetup*, NKikimr::TAppData const*, NKikimrConfig::TAppConfig const&) /-S/ydb/core/kqp/federated_query/kqp_federated_query_helpers.cpp:177:16 #10 0x42b8d69c in NKikimr::NKikimrServicesInitializers::TKqpServiceInitializer::InitializeServices(NActors::TActorSystemSetup*, NKikimr::TAppData const*) /-S/ydb/core/driver_lib/run/kikimr_services_initializers.cpp:2135:43 #11 0x4faeb322 in NKikimr::TServiceInitializersList::InitializeServices(NActors::TActorSystemSetup*, NKikimr::TAppData const*) /-S/ydb/core/driver_lib/run/service_initializer.cpp:13:29 #12 0x3de3c833 in NKikimr::TKikimrRunner::InitializeActorSystem(NKikimr::TKikimrRunConfig const&, TIntrusivePtr>, NKikimr::TBasicKikimrServicesMask const&) /-S/ydb/core/driver_lib/run/run.cpp:1349:26 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...import', '--basetemp', '/home/runner/.ya/build/build_root/km4y/00029b/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/km4y/00029b/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/km4y/00029b', '--source-root', '/home/runner/.ya/build/build_root/km4y/00029b/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/km4y/00029b/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...import', '--basetemp', '/home/runner/.ya/build/build_root/km4y/00029b/tmp', '--capture', 'no', '-c', 'pkg:library.python.pytest:pytest.yatest.ini', '-p', 'no:factor', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/km4y/00029b/ydb/tests/olap/s3_import/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/km4y/00029b', '--source-root', '/home/runner/.ya/build/build_root/km4y/00029b/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/km4y/00029b/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/s3_import', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_tpch_import.py::TestS3TpchImport::test_import_and_export', '--tb', 'short', '--dep-root', 'ydb/tests/olap/s3_import', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.6%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test >> test_tpch.py::TestTpchS1::test_tpch[21] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-03-28 10:20:30,370 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 10:20:30,529 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 964230 1.7G 1.7G 1.6G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/km4y/000182/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modules 990085 1.9G 1.9G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/km4y/000182/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_inse Test command err: Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 4236 30% 4236 50% 4236 90% 4236 99% 4236 ms Write: 10% 5971 30% 5971 50% 5971 90% 5971 99% 5971 ms Write: 10% 13124 30% 13124 50% 13124 90% 13124 99% 13124 ms Write: 10% 14274 30% 14274 50% 14274 90% 14274 99% 14274 ms Write: 10% 16438 30% 16438 50% 16438 90% 16438 99% 16438 ms Write: 10% 15640 30% 15640 50% 15640 90% 15640 99% 15640 ms Write: 10% 15772 30% 15772 50% 15772 90% 15772 99% 15772 ms Write: 10% 14944 30% 14944 50% 14944 90% 14944 99% 14944 ms Write: 10% 15614 30% 15614 50% 15614 90% 15614 99% 15614 ms Write: 10% 15997 30% 15997 50% 15997 90% 15997 99% 15997 ms Write: 10% 15434 30% 15434 50% 15434 90% 15434 99% 15434 ms Write: 10% 15327 30% 15327 50% 15327 90% 15327 99% 15327 ms Write: 10% 15177 30% 15177 50% 15177 90% 15177 99% 15177 ms Write: 10% 14853 30% 14853 50% 14853 90% 14853 99% 14853 ms Write: 10% 15400 30% 15400 50% 15400 90% 15400 99% 15400 ms Write: 10% 13602 30% 13602 50% 13602 90% 13602 99% 13602 ms Write: 10% 13080 30% 13080 50% 13080 90% 13080 99% 13080 ms Write: 10% 12833 30% 12833 50% 12833 90% 12833 99% 12833 ms Write: 10% 11911 30% 11911 50% 11911 90% 11911 99% 11911 ms Write: 10% 11345 30% 11345 50% 11345 90% 11345 99% 11345 ms Write: 10% 12083 30% 12083 50% 12083 90% 12083 99% 12083 ms Write: 10% 10970 30% 10970 50% 10970 90% 10970 99% 10970 ms Write: 10% 9890 30% 9890 50% 9890 90% 9890 99% 9890 ms Write: 10% 9618 30% 9618 50% 9618 90% 9618 99% 9618 ms Write: 10% 9188 30% 9188 50% 9188 90% 9188 99% 9188 ms Write: 10% 9302 30% 9302 50% 9302 90% 9302 99% 9302 ms Write: 10% 8832 30% 8832 50% 8832 90% 8832 99% 8832 ms Write: 10% 8803 30% 8803 50% 8803 90% 8803 99% 8803 ms Write: 10% 9272 30% 9272 50% 9272 90% 9272 99% 9272 ms Write: 10% 8852 30% 8852 50% 8852 90% 8852 99% 8852 ms Write: 10% 9318 30% 9318 50% 9318 90% 9318 99% 9318 ms Write: 10% 8624 30% 8624 50% 8624 90% 8624 99% 8624 ms Write: 10% 8485 30% 8485 50% 8485 90% 8485 99% 8485 ms Write: 10% 8092 30% 8092 50% 8092 90% 8092 99% 8092 ms Write: 10% 7016 30% 7016 50% 7016 90% 7016 99% 7016 ms Write: 10% 7196 30% 7196 50% 7196 90% 7196 99% 7196 ms Write: 10% 8187 30% 8187 50% 8187 90% 8187 99% 8187 ms Write: 10% 7933 30% 7933 50% 7933 90% 7933 99% 7933 ms Write: 10% 6543 30% 6543 50% 6543 90% 6543 99% 6543 ms Write: 10% 5417 30% 5417 50% 5417 90% 5417 99% 5417 ms Write: 10% 6521 30% 6521 50% 6521 90% 6521 99% 6521 ms Write: 10% 5784 30% 5784 50% 5784 90% 5784 99% 5784 ms Write: 10% 5696 30% 5696 50% 5696 90% 5696 99% 5696 ms Write: 10% 6223 30% 6223 50% 6223 90% 6223 99% 6223 ms Write: 10% 5409 30% 5409 50% 5409 90% 5409 99% 5409 ms Write: 10% 4104 30% 4104 50% 4104 90% 4104 99% 4104 ms Write: 10% 4075 30% 4075 50% 4075 90% 4075 99% 4075 ms Write: 10% 3835 30% 3835 50% 3835 90% 3835 99% 3835 ms Write: 10% 4261 30% 4261 50% 4261 90% 4261 99% 4261 ms Write: 10% 3792 30% 3792 50% 3792 90% 3792 99% 3792 ms Write: 10% 3816 30% 3816 50% 3816 90% 3816 99% 3816 ms Write: 10% 3275 30% 3275 50% 3275 90% 3275 99% 3275 ms Write: 10% 3110 30% 3110 50% 3110 90% 3110 99% 3110 ms Write: 10% 3831 30% 3831 50% 3831 90% 3831 99% 3831 ms Write: 10% 3908 30% 3908 50% 3908 90% 3908 99% 3908 ms Write: 10% 3201 30% 3201 50% 3201 90% 3201 99% 3201 ms Write: 10% 3676 30% 3676 50% 3676 90% 3676 99% 3676 ms Write: 10% 2870 30% 2870 50% 2870 90% 2870 99% 2870 ms Write: 10% 2832 30% 2832 50% 2832 90% 2832 99% 2832 ms Write: 10% 2834 30% 2834 50% 2834 90% 2834 99% 2834 ms Write: 10% 2759 30% 2759 50% 2759 90% 2759 99% 2759 ms Write: 10% 2578 30% 2578 50% 2578 90% 2578 99% 2578 ms Write: 10% 3255 30% 3255 50% 3255 90% 3255 99% 3255 ms Write: 10% 2686 30% 2686 50% 2686 90% 2686 99% 2686 ms Step 2. read write Write: 10% 5139 30% 5139 50% 5139 90% 5139 99% 5139 ms Write: 10% 6646 30% 6646 50% 6646 90% 6646 99% 6646 ms Write: 10% 16183 30% 16183 50% 16183 90% 16183 99% 16183 ms Write: 10% 14161 30% 14161 50% 14161 90% 14161 99% 14161 ms Write: 10% 13157 30% 13157 50% 13157 90% 13157 99% 13157 ms Write: 10% 13067 30% 13067 50% 13067 90% 13067 99% 13067 ms Write: 10% 13896 30% 13896 50% 13896 90% 13896 99% 13896 ms Write: 10% 13157 30% 13157 50% 13157 90% 13157 99% 13157 ms Write: 10% 14488 30% 14488 50% 14488 90% 14488 99% 14488 ms Write: 10% 12921 30% 12921 50% 12921 90% 12921 99% 12921 ms Write: 10% 12427 30% 12427 50% 12427 90% 12427 99% 12427 ms Write: 10% 13224 30% 13224 50% 13224 90% 13224 99% 13224 ms Write: 10% 12078 30% 12078 50% 12078 90% 12078 99% 12078 ms Write: 10% 12188 30% 12188 50% 12188 90% 12188 99% 12188 ms Write: 10% 11743 30% 11743 50% 11743 90% 11743 99% 11743 ms Write: 10% 10626 30% 10626 50% 10626 90% 10626 99% 10626 ms Write: 10% 11049 30% 11049 50% 11049 90% 11049 99% 11049 ms Write: 10% 10581 30% 10581 50% 10581 90% 10581 99% 10581 ms Write: 10% 11457 30% 11457 50% 11457 90% 11457 99% 11457 ms Write: 10% 9475 30% 9475 50% 9475 90% 9475 99% 9475 ms Write: 10% 9445 30% 9445 50% 9445 90% 9445 99% 9445 ms Write: 10% 9918 30% 9918 50% 9918 90% 9918 99% 9918 ms Write: 10% 9445 30% 9445 50% 9445 90% 9445 99% 9445 ms Write: 10% 9188 30% 9188 50% 9188 90% 9188 99% 9188 ms Write: 10% 9182 30% 9182 50% 9182 90% 9182 99% 9182 ms Write: 10% 8576 30% 8576 50% 8576 90% 8576 99% 8576 ms Write: 10% 9764 30% 9764 50% 9764 90% 9764 99% 9764 ms Write: 10% 9189 30% 9189 50% 9189 90% 9189 99% 9189 ms Write: 10% 8319 30% 8319 50% 8319 90% 8319 99% 8319 ms Write: 10% 7596 30% 7596 50% 7596 90% 7596 99% 7596 ms Write: 10% 7909 30% 7909 50% 7909 90% 7909 99% 7909 ms Write: 10% 7923 30% 7923 50% 7923 90% 7923 99% 7923 ms Write: 10% 5698 30% 5698 50% 5698 90% 5698 99% 5698 ms Write: 10% 5528 30% 5528 50% 5528 90% 5528 99% 5528 ms Write: 10% 5645 30% 5645 50% 5645 90% 5645 99% 5645 ms Write: 10% 6733 30% 6733 50% 6733 90% 6733 99% 6733 ms Write: 10% 5145 30% 5145 50% 5145 90% 5145 99% 5145 ms Write: 10% 5264 30% 5264 50% 5264 90% 5264 99% 5264 ms Write: 10% 5442 30% 5442 50% 5442 90% 5442 99% 5442 ms Write: 10% 4284 30% 4284 50% 4284 90% 4284 99% 4284 ms Write: 10% 4510 30% 4510 50% 4510 90% 4510 99% 4510 ms Write: 10% 4337 30% 4337 50% 4337 90% 4337 99% 4337 ms Write: 10% 3067 30% 3067 50% 3067 90% 3067 99% 3067 ms Write: 10% 4688 30% 4688 50% 4688 90% 4688 99% 4688 ms Write: 10% 3151 30% 3151 50% 3151 90% 3151 99% 3151 ms Write: 10% 4069 30% 4069 50% 4069 90% 4069 99% 4069 ms Write: 10% 3649 30% 3649 50% 3649 90% 3649 99% 3649 ms Write: 10% 3800 30% 3800 50% 3800 90% 3800 99% 3800 ms Write: 10% 2955 30% 2955 50% 2955 90% 2955 99% 2955 ms Write: 10% 3521 30% 3521 50% 3521 90% 3521 99% 3521 ms Write: 10% 3024 30% 3024 50% 3024 90% 3024 99% 3024 ms Write: 10% 2518 30% 2518 50% 2518 90% 2518 99% 2518 ms Write: 10% 3136 30% 3136 50% 3136 90% 3136 99% 3136 ms Write: 10% 2505 30% 2505 50% 2505 90% 2505 99% 2505 ms Write: 10% 2859 30% 2859 50% 2859 90% 2859 99% 2859 ms Write: 10% 2932 30% 2932 50% 2932 90% 2932 99% 2932 ms Write: 10% 2985 30% 2985 50% 2985 90% 2985 99% 2985 ms Write: 10% 2776 30% 2776 50% 2776 90% 2776 99% 2776 ms Write: 10% 2438 30% 2438 50% 2438 90% 2438 99% 2438 ms Write: 10% 2599 30% 2599 50% 2599 90% 2599 99% 2599 ms Write: 10% 3909 30% 3909 50% 3909 90% 3909 99% 3909 ms Write: 10% 3062 30% 3062 50% 3062 90% 3062 99% 3062 ms Write: 10% 2587 30% 2587 50% 2587 90% 2587 99% 2587 ms Write: 10% 2518 30% 2518 50% 2518 90% 2518 99% 2518 ms Read: 10% 13176 30% 13900 50% 14624 90% 16072 99% 16398 ms Step 3. write modify Was written: 35.7421875 MiB, Speed: 0.595703125 MiB/s Write: 10% 5485 30% 5485 50% 5485 90% 5485 99% 5485 ms Write: 10% 10301 30% 10301 50% 10301 90% 10301 99% 10301 ms Write: 10% 13253 30% 13253 50% 13253 90% 13253 99% 13253 ms Write: 10% 14639 30% 14639 50% 14639 90% 14639 99% 14639 ms Write: 10% 15916 30% 15916 50% 15916 90% 15916 99% 15916 ms Write: 10% 15234 30% 15234 50% 15234 90% 15234 99% 15234 ms Write: 10% 16163 30% 16163 50% 16163 90% 16163 99% 16163 ms Write: 10% 15193 30% 15193 50% 15193 90% 15193 99% 15193 ms Write: 10% 15493 30% 15493 50% 15493 90% 15493 99% 15493 ms Write: 10% 13338 30% 13338 50% 13338 90% 13338 99% 13338 ms Write: 10% 13371 30% 13371 50% 13371 90% 13371 99% 13371 ms Write: 10% 13326 30% 13326 50% 13326 90% 13326 99% 13326 ms Write: 10% 13519 30% 13519 50% 13519 90% 13519 99% 13519 ms Write: 10% 12012 30% 12012 50% 12012 90% 12012 99% 12012 ms Write: 10% 12270 30% 12270 50% 12270 90% 12270 99% 12270 ms Write: 10% 11740 30% 11740 50% 11740 90% 11740 99% 11740 ms Write: 10% 12448 30% 12448 50% 12448 90% 12448 99% 12448 ms Write: 10% 11745 30% 11745 50% 11745 90% 11745 99% 11745 ms Write: 10% 11768 30% 11768 50% 11768 90% 11768 99% 11768 ms Write: 10% 11661 30% 11661 50% 11661 90% 11661 99% 11661 ms Write: 10% 11252 30% 11252 50% 11252 90% 11252 99% 11252 ms Write: 10% 10469 30% 10469 50% 10469 90% 10469 99% 10469 ms Write: 10% 11346 30% 11346 50% 11346 90% 11346 99% 11346 ms Write: 10% 10121 30% 10121 50% 10121 90% 10121 99% 10121 ms Write: 10% 10240 30% 10240 50% 10240 90% 10240 99% 10240 ms Write: 10% 10100 30% 10100 50% 10100 90% 10100 99% 10100 ms Write: 10% 10152 30% 10152 50% 10152 90% 10152 99% 10152 ms Write: 10% 10063 30% 10063 50% 10063 90% 10063 99% 10063 ms Write: 10% 9358 30% 9358 50% 9358 90% 9358 99% 9358 ms Write: 10% 9704 30% 9704 50% 9704 90% 9704 99% 9704 ms Write: 10% 8686 30% 8686 50% 8686 90% 8686 99% 8686 ms Write: 10% 7739 30% 7739 50% 7739 90% 7739 99% 7739 ms Write: 10% 8427 30% 8427 50% 8427 90% 8427 99% 8427 ms Write: 10% 8697 30% 8697 50% 8697 90% 8697 99% 8697 ms Write: 10% 7778 30% 7778 50% 7778 90% 7778 99% 7778 ms Write: 10% 9872 30% 9872 50% 9872 90% 9872 99% 9872 ms Write: 10% 9354 30% 9354 50% 9354 90% 9354 99% 9354 ms Write: 10% 6495 30% 6495 50% 6495 90% 6495 99% 6495 ms Write: 10% 6800 30% 6800 50% 6800 90% 6800 99% 6800 ms Write: 10% 6413 30% 6413 50% 6413 90% 6413 99% 6413 ms Write: 10% 7367 30% 7367 50% 7367 90% 7367 99% 7367 ms Write: 10% 6001 30% 6001 50% 6001 90% 6001 99% 6001 ms Write: 10% 6829 30% 6829 50% 6829 90% 6829 99% 6829 ms Write: 10% 8381 30% 8381 50% 8381 90% 8381 99% 8381 ms Write: 10% 5477 30% 5477 50% 5477 90% 5477 99% 5477 ms Write: 10% 6156 30% 6156 50% 6156 90% 6156 99% 6156 ms Write: 10% 6048 30% 6048 50% 6048 90% 6048 99% 6048 ms Write: 10% 5263 30% 5263 50% 5263 90% 5263 99% 5263 ms Write: 10% 6234 30% 6234 50% 6234 90% 6234 99% 6234 ms Write: 10% 6014 30% 6014 50% 6014 90% 6014 99% 6014 ms Write: 10% 5857 30% 5857 50% 5857 90% 5857 99% 5857 ms Write: 10% 5940 30% 5940 50% 5940 90% 5940 99% 5940 ms Write: 10% 5173 30% 5173 50% 5173 90% 5173 99% 5173 ms Write: 10% 4889 30% 4889 50% 4889 90% 4889 99% 4889 ms Write: 10% 5715 30% 5715 50% 5715 90% 5715 99% 5715 ms Write: 10% 5881 30% 5881 50% 5881 90% 5881 99% 5881 ms Write: 10% 5467 30% 5467 50% 5467 90% 5467 99% 5467 ms Write: 10% 6693 30% 6693 50% 6693 90% 6693 99% 6693 ms Write: 10% 4717 30% 4717 50% 4717 90% 4717 99% 4717 ms Write: 10% 5177 30% 5177 50% 5177 90% 5177 99% 5177 ms Write: 10% 5114 30% 5114 50% 5114 90% 5114 99% 5114 ms Write: 10% 4840 30% 4840 50% 4840 90% 4840 99% 4840 ms Write: 10% 5084 30% 5084 50% 5084 90% 5084 99% 5084 ms Write: 10% 5154 30% 5154 50% 5154 90% 5154 99% 5154 ms Update: 10% 912 30% 912 50% 912 90% 912 99% 912 ms Step 4. read modify write Was written: 87.5 MiB, Speed: 0.8626302083333334 MiB/s Write: 10% 23233 30% 23233 50% 23233 90% 23233 99% 23233 ms Write: 10% 21927 30% 21927 50% 21927 90% 21927 99% 21927 ms Write: 10% 33418 30% 33418 50% 33418 90% 33418 99% 33418 ms Write: 10% 23073 30% 23073 50% 23073 90% 23073 99% 23073 ms Write: 10% 25264 30% 25264 50% 25264 90% 25264 99% 25264 ms Write: 10% 24107 30% 24107 50% 24107 90% 24107 99% 24107 ms Write: 10% 26293 30% 26293 50% 26293 90% 26293 99% 26293 ms Write: 10% 24820 30% 24820 50% 24820 90% 24820 99% 24820 ms Write: 10% 24993 30% 24993 50% 24993 90% 24993 99% 24993 ms Write: 10% 24828 30% 24828 50% 24828 90% 24828 99% 24828 ms Write: 10% 24050 30% 24050 50% 24050 90% 24050 99% 24050 ms Write: 10% 21687 30% 21687 50% 21687 90% 21687 99% 21687 ms Write: 10% 23973 30% 23973 50% 23973 90% 23973 99% 23973 ms Write: 10% 21450 30% 21450 50% 21450 90% 21450 99% 21450 ms Write: 10% 23159 30% 23159 50% 23159 90% 23159 99% 23159 ms Write: 10% 21862 30% 21862 50% 21862 90% 21862 99% 21862 ms Write: 10% 21752 30% 21752 50% 21752 90% 21752 99% 21752 ms Write: 10% 22181 30% 22181 50% 22181 90% 22181 99% 22181 ms Write: 10% 20279 30% 20279 50% 20279 90% 20279 99% 20279 ms Write: 10% 21846 30% 21846 50% 21846 90% 21846 99% 21846 ms Write: 10% 20487 30% 20487 50% 20487 90% 20487 99% 20487 ms Write: 10% 19974 30% 19974 50% 19974 90% 19974 99% 19974 ms Write: 10% 21535 30% 21535 50% 21535 90% 21535 99% 21535 ms Write: 10% 21208 30% 21208 50% 21208 90% 21208 99% 21208 ms Write: 10% 19915 30% 19915 50% 19915 90% 19915 99% 19915 ms Write: 10% 19599 30% 19599 50% 19599 90% 19599 99% 19599 ms Write: 10% 19145 30% 19145 50% 19145 90% 19145 99% 19145 ms Write: 10% 19174 30% 19174 50% 19174 90% 19174 99% 19174 ms Write: 10% 19192 30% 19192 50% 19192 90% 19192 99% 19192 ms Write: 10% 16658 30% 16658 50% 16658 90% 16658 99% 16658 ms Write: 10% 16631 30% 16631 50% 16631 90% 16631 99% 16631 ms Write: 10% 17407 30% 17407 50% 17407 90% 17407 99% 17407 ms Write: 10% 15724 30% 15724 50% 15724 90% 15724 99% 15724 ms Write: 10% 16325 30% 16325 50% 16325 90% 16325 99% 16325 ms Write: 10% 16458 30% 16458 50% 16458 90% 16458 99% 16458 ms Write: 10% 17166 30% 17166 50% 17166 90% 17166 99% 17166 ms Write: 10% 15117 30% 15117 50% 15117 90% 15117 99% 15117 ms Write: 10% 17127 30% 17127 50% 17127 90% 17127 99% 17127 ms Write: 10% 15439 30% 15439 50% 15439 90% 15439 99% 15439 ms Write: 10% 14928 30% 14928 50% 14928 90% 14928 99% 14928 ms Write: 10% 13244 30% 13244 50% 13244 90% 13244 99% 13244 ms Write: 10% 13327 30% 13327 50% 13327 90% 13327 99% 13327 ms Write: 10% 16723 30% 16723 50% 16723 90% 16723 99% 16723 ms Write: 10% 19358 30% 19358 50% 19358 90% 19358 99% 19358 ms Write: 10% 14277 30% 14277 50% 14277 90% 14277 99% 14277 ms Write: 10% 15825 30% 15825 50% 15825 90% 15825 99% 15825 ms Write: 10% 11782 30% 11782 50% 11782 90% 11782 99% 11782 ms Write: 10% 9903 30% 9903 50% 9903 90% 9903 99% 9903 ms Write: 10% 14428 30% 14428 50% 14428 90% 14428 99% 14428 ms Write: 10% 15610 30% 15610 50% 15610 90% 15610 99% 15610 ms Write: 10% 12872 30% 12872 50% 12872 90% 12872 99% 12872 ms Write: 10% 10622 30% 10622 50% 10622 90% 10622 99% 10622 ms Write: 10% 10074 30% 10074 50% 10074 90% 10074 99% 10074 ms Write: 10% 12594 30% 12594 50% 12594 90% 12594 99% 12594 ms Write: 10% 12706 30% 12706 50% 12706 90% 12706 99% 12706 ms Write: 10% 14235 30% 14235 50% 14235 90% 14235 99% 14235 ms Write: 10% 13327 30% 13327 50% 13327 90% 13327 99% 13327 ms Write: 10% 11392 30% 11392 50% 11392 90% 11392 99% 11392 ms Write: 10% 12318 30% 12318 50% 12318 90% 12318 99% 12318 ms Write: 10% 10105 30% 10105 50% 10105 90% 10105 99% 10105 ms Write: 10% 16013 30% 16013 50% 16013 90% 16013 99% 16013 ms Write: 10% 14447 30% 14447 50% 14447 90% 14447 99% 14447 ms Write: 10% 9307 30% 9307 50% 9307 90% 9307 99% 9307 ms Write: 10% 10362 30% 10362 50% 10362 90% 10362 99% 10362 ms Update: 10% 833 30% 833 50% 833 90% 833 99% 833 ms Read: 10% 5886 30% 12134 50% 21906 90% 41705 99% 48041 ms File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/km4y/000182/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 86, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: .../olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_alter_compression.py::TestAlterCompression::test[alter_compression]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_simple.py::TestSimple::test[alter_table]', '--test-filter', 'test_simple.py::TestSimple::test[alter_tablestore]', '--test-filter', 'test_simple.py::TestSimple::test[table]', '--test-filter', 'test_simple.py::TestSimple::test[tablestores]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: ((".../olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_alter_compression.py::TestAlterCompression::test[alter_compression]', '--test-filter', 'test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_simple.py::TestSimple::test[alter_table]', '--test-filter', 'test_simple.py::TestSimple::test[alter_tablestore]', '--test-filter', 'test_simple.py::TestSimple::test[table]', '--test-filter', 'test_simple.py::TestSimple::test[tablestores]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_all_supported_compression 2025-03-28 10:20:30,374 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 10:20:30,633 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 964234 777M 783M 699M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/km4y/00015a/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 967406 6.0G 6.0G 5.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/km4y/00015a/ydb/tests/olap/column_family/compression/test-results/py3test/testing_ou Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 110, in test_all_supported_compression volumes: tuple[int, int] = tables[i].get_volumes_column("value") File "ydb/tests/olap/common/column_table_helper.py", line 73, in get_volumes_column time.sleep(10) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/km4y/00015a/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/km4y/00015a', '--source-root', '/home/runner/.ya/build/build_root/km4y/00015a/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/km4y/00015a/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/km4y/00015a/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/km4y/00015a', '--source-root', '/home/runner/.ya/build/build_root/km4y/00015a/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/km4y/00015a/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[21] [FAIL] 2025-03-28 10:20:30,473 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 10:20:30,993 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 964242 659M 662M 577M ydb-tests-functional-tpc-medium --basetemp /home/runner/.ya/build/build_root/km4y/000349/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest 966437 6.5G 6.5G 6.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/km4y/000349/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/t 972012 5.4G 5.4G 4.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/km4y/000349/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/t 992810 153M 156M 90.0M └─ ydb -e grpc://localhost:26874 -d /local/test_db workload tpch --path olap_yatests/tpch/s1 run --json /home/runner/.ya/build/build_root/km4y/000349/ydb/tests/functional/t Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/load/lib/tpch.py", line 48, in test_tpch self.run_workload_test(self._get_path(), query_num) File "ydb/tests/olap/load/lib/conftest.py", line 286, in run_workload_test result = YdbCliHelper.workload_run( File "ydb/tests/olap/lib/ydb_cli.py", line 310, in workload_run ).process() File "ydb/tests/olap/lib/ydb_cli.py", line 283, in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) File "library/python/testing/yatest_common/yatest/common/process.py", line 656, in execute res.wait(check_exit_code, timeout, on_timeout) File "library/python/testing/yatest_common/yatest/common/process.py", line 400, in wait _wait() File "library/python/testing/yatest_common/yatest/common/process.py", line 335, in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...tTpchS1::test_tpch[8]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[9]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[10]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[11]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[12]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[13]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[14]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[15]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[16]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[17]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[18]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[19]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[20]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[21]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[22]', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...tTpchS1::test_tpch[8]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[9]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[10]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[11]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[12]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[13]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[14]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[15]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[16]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[17]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[18]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[19]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[20]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[21]', '--test-filter', 'test_tpch.py::TestTpchS1::test_tpch[22]', '--tb', 'short', '--dep-root', 'ydb/tests/functional/tpc/medium', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_tpch.py']' stopped by 600 seconds timeout",), {}) |99.8%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test |99.9%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| CLEANING BUILD ROOT ydb/tests/functional/tpc/medium [size:medium] ------ [test_tpch.py] chunk ran 17 tests (total:624.29s - test:600.10s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch.py::TestTpchS1::test_tpch[5] (good) duration: 398.32s test_tpch.py::TestTpchS1::test_tpch[21] (fail) duration: 42.12s test_tpch.py::TestTpchS1::test_tpch[17] (good) duration: 27.51s test_tpch.py::TestTpchS1::test_tpch[9] (good) duration: 18.43s test_tpch.py::TestTpchS1::test_tpch[18] (good) duration: 18.03s test_tpch.py::TestTpchS1::test_tpch[7] (good) duration: 17.86s test_tpch.py::TestTpchS1::test_tpch[8] (good) duration: 16.86s test_tpch.py::TestTpchS1::test_tpch[20] (good) duration: 14.38s test_tpch.py::TestTpchS1::test_tpch[10] (good) duration: 11.70s test_tpch.py::TestTpchS1::test_tpch[15] (good) duration: 10.82s 6 more tests with 41.97s total duration are not listed. test_tpch.py::TestTpchS1::test_tpch[22] test was not launched inside chunk. Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/stderr [fail] test_tpch.py::TestTpchS1::test_tpch[21] [default-linux-x86_64-release-asan] (42.12s) ydb/tests/olap/load/lib/tpch.py:48: in test_tpch self.run_workload_test(self._get_path(), query_num) ydb/tests/olap/load/lib/conftest.py:298: in run_workload_test self.process_query_result(result, query_num, qparams.iterations, True) ydb/tests/olap/load/lib/conftest.py:245: in process_query_result raise exc ydb/tests/olap/lib/ydb_cli.py:283: in process process = yatest.common.process.execute(self._get_cmd(), check_exit_code=False) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:400: in wait _wait() library/python/testing/yatest_common/yatest/common/process.py:335: in _wait pid, sts, rusage = os.wait4(self._process.pid, 0) library/python/pytest/plugins/ya.py:347: in _graceful_shutdown _graceful_shutdown_on_log(not capman.is_globally_capturing()) library/python/pytest/plugins/ya.py:321: in _graceful_shutdown_on_log pytest.exit("Graceful shutdown requested") E Failed: Graceful shutdown requested Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff/test_tpch.py.TestTpchS1.test_tpch.21.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/tpc/medium/test-results/py3test/testing_out_stuff ------ TIMEOUT: 15 - GOOD, 1 - FAIL, 1 - NOT_LAUNCHED ydb/tests/functional/tpc/medium ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:624.63s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (timeout) duration: 619.91s alter_compression.py::TestAlterCompression::test_availability_data test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [timeout] alter_compression.py::TestAlterCompression::test_all_supported_compression [default-linux-x86_64-release-asan] (619.91s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_all_supported_compression.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:621.08s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_tpch_import.py::TestS3TpchImport::test_import_and_export (timeout) duration: 616.41s Info: Test run has exceeded 8.0G (8388608K) memory limit with 11.0G (11561756K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 964157 44.8M 44.8M 6.5M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 964213 38.3M 26.7M 14.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 964219 793M 796M 710M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --docte 967765 9.8G 9.8G 9.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/te 973236 478M 0b 0b └─ moto_server s3 --port 10138 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [timeout] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (616.41s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - TIMEOUT ydb/tests/olap/s3_import ydb/tests/olap/scenario [size:medium] ------ sole chunk ran 8 tests (total:624.23s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] (good) duration: 230.88s test_alter_tiering.py::TestAlterTiering::test[many_tables] (fail) duration: 181.06s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 159.89s test_simple.py::TestSimple::test[tablestores] (good) duration: 25.59s test_simple.py::TestSimple::test[alter_table] (good) duration: 8.46s test_simple.py::TestSimple::test[alter_tablestore] (good) duration: 4.01s test_simple.py::TestSimple::test[table] (good) duration: 3.36s test_alter_compression.py::TestAlterCompression::test[alter_compression] test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [fail] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-release-asan] (181.06s) ydb/tests/olap/scenario/conftest.py:88: in test ctx.executable(self, ctx) ydb/tests/olap/scenario/test_alter_tiering.py:353: in scenario_many_tables threads.start_and_wait_all() ydb/tests/olap/common/thread_helper.py:49: in start_and_wait_all self.join_all() ydb/tests/olap/common/thread_helper.py:45: in join_all thread.join(timeout=timeout) ydb/tests/olap/common/thread_helper.py:18: in join raise self.exc ydb/tests/olap/common/thread_helper.py:11: in run self.ret = self._target(*self._args, **self._kwargs) ydb/tests/olap/scenario/test_alter_tiering.py:242: in _loop_scan sth.execute_scan_query( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:461: in execute_scan_query for result_set in it: contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/table.py:1181: in lambda resp: _wrap_scan_query_response(resp, self._table_client_settings), contrib/python/ydb/py3/ydb/table.py:952: in _wrap_scan_query_response issues._process_response(response) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.GenericError: message: "Scan failed at tablet 72075186224037946, reason: task_error:Error reading blob range for data: { Blob: DS:4294967295:[72075186224037946:1:1:255:1:8056:0] Offset: 6032 Size: 2024 }, error: cannot get blob: curlCode: 28, Timeout was reached, status: ERROR" issue_code: 2013 severity: 1 (server_code: 400080) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (159.89s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 5 - GOOD, 1 - FAIL, 1 - NOT_LAUNCHED, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:2 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:610.31s - test:600.04s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 606.24s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (606.24s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 3 tests (total:615.52s - test:600.11s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change (fail) duration: 442.46s ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete (timeout) duration: 168.34s ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering test was not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.1G (8454656K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 955576 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 955680 31.6M 19.8M 7.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 955697 685M 688M 604M └─ ydb-tests-olap-ttl_tiering --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 956299 6.9G 6.9G 6.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ 958363 393M 393M 359M └─ moto_server s3 --port 17851 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (442.46s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:141: in test_data_unchanged_after_ttl_change data = self.get_aggregated(table_path) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:27: in get_aggregated answer = self.ydb_client.query(f"SELECT count(*), sum(val), sum(Digest::Fnv32(s)) from `{table_path}`") ydb/tests/olap/common/ydb_client.py:24: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.BadRequest: message: "Table /Root/test_data_unchanged_after_ttl_change/table (shard 72075186224037938) scan failed, reason: cannot build metadata/Snapshot too old: {1743157040410:max}. CS min read snapshot: {1743157041000:max}. now: 2025-03-28T10:17:25.968168Z" issue_code: 2017 severity: 1 (server_code: 400010) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (168.34s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - FAIL, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ydb/tests/stress/log/tests [size:medium] ------ sole chunk ran 1 test (total:173.27s - test:173.22s) [fail] test_workload.py::TestYdbLogWorkload::test[column] [default-linux-x86_64-release-asan] (168.41s) teardown failed: ydb/tests/stress/log/tests/test_workload.py:41: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==965555==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 15168 byte(s) in 4 object(s) allocated from: E #0 0x1d6662cd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x468183df in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x468183df in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/yt/yql/providers/yt/pro ..[snippet truncated].. pp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 E #3 0x465fb076 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 E #4 0x465fb076 in initialize_dynamic /-S/util/generic/hash_table.h:239:35 E #5 0x465fb076 in initialize_buckets_dynamic /-S/util/generic/hash_table.h:912:17 E #6 0x465fb076 in THashTable> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x465e3f6b in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x465e3f6b in insert /-S/util/generic/hash.h:153:20 E #9 0x465e3f6b in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:344:23 E #10 0x465c7619 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:466:5 E #11 0x468183f0 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:372:11 E #12 0x468183f0 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x468183f0 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> co... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/log/tests ------ sole chunk ran 1 test (total:268.40s - test:268.24s) Info: Test run has exceeded 32.0G (33554432K) memory limit with 43.5G (45565848K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 964184 44.8M 44.8M 6.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 964260 34.8M 22.9M 10.3M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 964266 763M 768M 687M └─ ydb-tests-stress-olap_workload-tests --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:fa 966387 4.9G 4.9G 4.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_ 966401 4.6G 4.7G 4.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_ 966410 4.8G 4.8G 4.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_ 966424 4.9G 4.9G 4.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_ 966434 4.6G 4.6G 4.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_ 966455 4.6G 4.6G 4.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_ 966458 5.0G 5.0G 4.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_ 966465 4.6G 4.7G 4.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_ 966487 4.6G 4.5G 4.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/stderr ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:11.87s - test:11.84s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.27s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:11.87s - test:11.84s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.31s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:11.87s - test:11.84s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.35s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:11.77s - test:11.74s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.30s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:25.02s - test:24.99s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==955427==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b1ca4d bp 0x7ffdff787a40 sp 0x7ffdff7878a0 T0) ==955427==The signal is caused by a READ memory access. ==955427==Hint: address points to the zero page. 2025-03-28T10:10:04.537636Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486800587489266756:2131];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:10:04.537746Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:10:14.920457Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:10:14.920502Z node 1 :IMPORT WARN: Table profiles were not loaded #0 0x18b1ca4d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b1ca4d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b1ca4d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b1ca4d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b1ca4d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b415d7 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b415d7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b415d7 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> ..[snippet truncated].. 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194936d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19463228 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b40483 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19464af5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x1948dc4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f5c242bdd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f5c242bde3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x162b8028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x162b8028) (BuildId: 42cc14b09b8e1be640cc9d032c9c2bf606ecb0c4) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==955427==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [4/50] chunk ran 1 test (total:67.73s - test:67.67s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (62.77s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1949767B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1995C4FF 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1903CBE8 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1904FBF7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1904FBF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1904FBF7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19993525 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19993525 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19993525 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19963078 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1904ED7B 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19964945 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1998DA9C 15. ??:0: ?? @ 0x7FEB904EAD8F 16. ??:0: ?? @ 0x7FEB904EAE3F 17. ??:0: ?? @ 0x1640D028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 4 - GOOD, 1 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:24.46s - test:24.37s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (19.49s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18D46A4E 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18D2569A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18D2CB17 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D2CB17 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D2CB17 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D2CB17 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D2CB17 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18D2BCE3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F6D33309D8F 18. ??:0: ?? @ 0x7F6D33309E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [1/50] chunk ran 1 test (total:27.13s - test:27.07s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (18.91s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18D7C798 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18D63E6A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F6D2C91ED8F 18. ??:0: ?? @ 0x7F6D2C91EE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [10/50] chunk ran 1 test (total:23.07s - test:23.03s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (17.75s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18D62F3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F78999F4D8F 18. ??:0: ?? @ 0x7F78999F4E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ [2/50] chunk ran 1 test (total:22.35s - setup:0.01s test:22.31s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (17.12s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18D63A12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F135999ED8F 18. ??:0: ?? @ 0x7F135999EE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out ------ [3/50] chunk ran 1 test (total:24.59s - test:24.55s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (18.67s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18D63C3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F4F8E6BED8F 18. ??:0: ?? @ 0x7F4F8E6BEE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out ------ [4/50] chunk ran 1 test (total:27.60s - test:27.55s) [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (19.97s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18D74DF8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18D637EA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F7C6EB24D8F 18. ??:0: ?? @ 0x7F7C6EB24E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out ------ [5/50] chunk ran 1 test (total:23.10s - test:23.05s) [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (17.83s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18D63392 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F2D8DEC3D8F 18. ??:0: ?? @ 0x7F2D8DEC3E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out ------ [6/50] chunk ran 1 test (total:22.57s - test:22.53s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (16.97s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18D635BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F0617BA9D8F 18. ??:0: ?? @ 0x7F0617BA9E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [7/50] chunk ran 1 test (total:22.49s - test:22.46s) [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (17.43s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18D64092 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F069910DD8F 18. ??:0: ?? @ 0x7F069910DE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out ------ [8/50] chunk ran 1 test (total:23.56s - test:23.53s) [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (18.78s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18D642BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F08EB0AED8F 18. ??:0: ?? @ 0x7F08EB0AEE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out ------ [9/50] chunk ran 1 test (total:23.00s - test:22.96s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (17.78s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18D62D12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F89D364CD8F 18. ??:0: ?? @ 0x7F89D364CE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ FAIL: 11 - FAIL ydb/core/kqp/ut/tx ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:589.10s - test:589.06s) [fail] AnalyzeColumnshard::AnalyzeRebootColumnShard [default-linux-x86_64-release-asan] (584.43s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/bt_exception.h:16:5 NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.h:0:24 DoDestroy at /-S/util/generic/ptr.h:237:13 operator() at /-S/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.out ------ FAIL: 1 - FAIL ydb/core/statistics/aggregator/ut ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 1 test (total:161.73s - test:161.65s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (141.93s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:34.91s - test:34.88s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 1 - CRASHED ydb/core/tx/tiering/ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:24.27s - test:24.22s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (4.29s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - FAIL ydb/services/ydb/ut Total 31 suites: 17 - GOOD 9 - FAIL 5 - TIMEOUT Total 131 tests: 96 - GOOD 23 - FAIL 4 - NOT_LAUNCHED 5 - TIMEOUT 1 - SKIPPED 2 - CRASHED Cache efficiency ratio is 98.40% (37143 of 37747). Local: 365 (0.97%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 36778 (97.43%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [625681 ms] [TM] [rnd-2734725143485386979 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 0 (1743156629992), finished: 625681 (1743157255673)] [ 1115 ms] [TA] [rnd-q642uazenime3p73]: $(BUILD_ROOT)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} [started: 625716 (1743157255708), finished: 626831 (1743157256823)] Time from start: 683699.5378417969 ms, time elapsed by graph 626796 ms, time diff 56903.537841796875 ms. The longest 10 tasks: [625740 ms] [TM] [rnd-xcu8he865kdgqnf0 asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1743156629929, finished: 1743157255669] [625681 ms] [TM] [rnd-2734725143485386979 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1743156629992, finished: 1743157255673] [624614 ms] [TM] [rnd-ip0s7ehkaehssz6v asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1743156629935, finished: 1743157254549] [621487 ms] [TM] [rnd-3vidnqw09e871lqn asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1743156629864, finished: 1743157251351] [616037 ms] [TM] [rnd-16129891960037657631 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743156599193, finished: 1743157215230] [610829 ms] [TM] [rnd-16096758546993925239 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743156599188, finished: 1743157210017] [594214 ms] [TM] [rnd-1391031817487107967 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743156619336, finished: 1743157213550] [592012 ms] [TM] [rnd-6255642218176589068 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743156619441, finished: 1743157211453] [589559 ms] [TM] [rnd-16820436337035822903 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1743156608960, finished: 1743157198519] [428264 ms] [TM] [rnd-16321895843852111702 asan default-linux-x86_64 release]: ydb/core/tx/columnshard/ut_rw/unittest [started: 1743156589511, finished: 1743157017775] Total time by type: [9945390 ms] [TM] [count: 583, ave time 17058.99 msec] [ 80360 ms] [prepare:get from local cache] [count: 365, ave time 220.16 msec] [ 27702 ms] [prepare:AC] [count: 2, ave time 13851.00 msec] [ 21229 ms] [prepare:put to dist cache] [count: 360, ave time 58.97 msec] [ 10779 ms] [TA] [count: 21, ave time 513.29 msec] [ 5803 ms] [prepare:bazel-store] [count: 1, ave time 5803.00 msec] [ 5724 ms] [prepare:tools] [count: 16, ave time 357.75 msec] [ 544 ms] [prepare:clean] [count: 3, ave time 181.33 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 9956169 ms (100.00%) Total run tasks time - 9956169 ms Configure time - 25.1 s Statistics overhead 1390 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_2/report.json Ok + echo 0 + ./ya make . -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.zjCvduAmFp --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -X --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/ya_evlog.jsonl --junit /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring dependencies for platform default-linux-x86_64-release-asan Configuring dependencies for platform tools Configuring dependencies for platform test_tool_tc1-global Configuring tests execution Configuring local and dist store caches Configuration done. Preparing for execution | 1.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut | 2.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut | 2.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut | 3.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap | 4.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes | 5.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large | 5.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut | 5.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut | 6.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical | 5.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/config/ydb-tests-functional-config | 5.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap | 6.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut | 6.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl | 6.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots | 7.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table | 7.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut | 8.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut | 9.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |10.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |11.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |12.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |13.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |13.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |13.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |11.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |11.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |11.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |11.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |12.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |12.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |12.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |12.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |12.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |12.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |12.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |12.9%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |13.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |13.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |13.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |13.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ydb-tests-olap |13.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |13.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |14.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |14.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |14.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |14.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |14.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |14.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |14.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |14.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |15.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |15.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |15.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |15.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |15.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |15.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |15.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |16.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |16.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |16.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |16.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |16.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |16.6%| PREPARE $(PYTHON) |16.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |16.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |17.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |16.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |16.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |17.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |17.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |18.0%| PREPARE $(OS_SDK_ROOT-sbr:243881345) |18.2%| PREPARE $(CLANG16-1380963495) |18.3%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |18.5%| PREPARE $(GDB) |18.6%| PREPARE $(JDK17-472926544) |18.7%| PREPARE $(JDK_DEFAULT-472926544) |18.8%| PREPARE $(WITH_JDK-sbr:7832760150) |18.9%| PREPARE $(WITH_JDK17-sbr:7832760150) |19.0%| PREPARE $(CLANG_FORMAT-2212207123) |19.2%| PREPARE $(CLANG-2518231432) |19.3%| PREPARE $(CLANG-1922233694) |19.4%| PREPARE $(FLAKE8_PY3-715603131) |19.5%| PREPARE $(YMAKE_PYTHON3-4256832079) |19.6%| PREPARE $(CLANG18-3363451693) |19.9%| PREPARE $(TEST_TOOL_HOST-sbr:8330113388) |20.1%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |20.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |21.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |21.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |22.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |22.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |22.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |22.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |23.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |23.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |23.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |24.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |24.9%| PREPARE $(LLD_ROOT-3808007503) |24.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |26.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |26.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |26.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |26.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |26.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |27.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |27.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |27.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun |27.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |27.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |27.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |27.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |27.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |28.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |28.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |28.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |28.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |28.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |28.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |28.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |28.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |29.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |29.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |29.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |29.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |29.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |29.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |29.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |29.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |29.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |30.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |30.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |30.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |30.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |30.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |30.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |30.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |31.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut |31.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |31.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |31.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |31.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |31.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut |31.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |30.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |31.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |31.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |32.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |33.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |33.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |33.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |33.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |33.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |34.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |34.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |34.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |34.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |34.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |36.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |36.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |36.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |37.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |37.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |37.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |37.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |37.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |37.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |37.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |38.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |38.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |38.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |38.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |38.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |38.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |38.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |38.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |38.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |38.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |39.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |39.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |39.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |39.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |39.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |39.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |39.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |39.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |39.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |40.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |40.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |40.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |40.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |40.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |40.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |40.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |40.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |40.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |41.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |41.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |40.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |41.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |41.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |41.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |41.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |42.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |42.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |42.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |42.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |42.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |42.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |43.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |43.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |44.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |44.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |44.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |44.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |44.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |44.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |44.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |43.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |44.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |44.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |44.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |44.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |45.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |45.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |45.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |45.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |45.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |45.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |46.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |46.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |46.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |47.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |46.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |46.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |46.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |46.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |46.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |47.0%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |47.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |47.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |47.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |47.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |47.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |47.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |48.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |48.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |48.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |49.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |49.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |49.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |49.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |48.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |48.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |48.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |48.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |49.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |49.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |49.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |50.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |50.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |50.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |50.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |50.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |50.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |51.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |51.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |51.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |51.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |51.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |51.4%| CLEANING SYMRES |51.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |51.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |51.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |52.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |52.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |52.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |52.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |52.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |52.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |52.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |53.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |53.1%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |53.2%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |53.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |53.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |53.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |53.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |53.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |53.7%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |53.9%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |54.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |54.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |54.5%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |54.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |54.7%| [LD] {RESULT} $(B)/ydb/tests/functional/ydb_cli/ydb-tests-functional-ydb_cli |54.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |54.9%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |55.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |55.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |55.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |55.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |55.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |55.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |55.6%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |55.8%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |56.0%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |56.3%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |55.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |55.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |55.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |55.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |55.9%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |56.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |56.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |56.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |56.6%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |56.8%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |56.9%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |57.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |57.2%| [LD] {RESULT} $(B)/ydb/tests/functional/compatibility/ydb-tests-functional-compatibility |57.3%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |57.4%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |57.5%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |57.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |57.7%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |57.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |57.9%| [AR] {RESULT} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |58.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |58.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |58.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |58.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |58.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |58.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |58.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |58.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/spilling/ydb-core-kqp-ut-spilling |59.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |59.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |59.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |58.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |58.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |58.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |58.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/docs/generator/generator |59.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |59.4%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |59.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |59.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |59.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |59.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |60.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |60.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |60.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |60.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |60.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |60.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |61.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |61.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |61.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |61.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |61.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |61.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |62.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |61.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |61.1%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |61.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |61.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |61.6%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |61.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |62.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |62.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |62.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |62.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |62.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_prefix_kmeans/ydb-core-tx-datashard-ut_prefix_kmeans |62.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |62.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |63.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut |63.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |63.3%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |63.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |63.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |63.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |63.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |63.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |63.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |63.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |64.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |64.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |64.4%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |64.6%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |64.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |64.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |65.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |65.3%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |65.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |65.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |66.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |66.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |66.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |63.3%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |63.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |63.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |63.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |63.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |63.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |64.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |64.1%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |64.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |64.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |64.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |64.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |65.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |65.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |65.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |65.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |65.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |65.7%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |65.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |66.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |66.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |66.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |66.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |66.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |66.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |66.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |66.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |67.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |67.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |67.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> TKeyValueTracingTest::ReadHuge |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |56.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |56.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.5%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |56.5%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/medium/ydb-tests-functional-tpc-medium |56.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |56.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |56.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap >> TKeyValueTracingTest::ReadSmall |55.1%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |55.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |55.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |55.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |55.4%| [LD] {RESULT} $(B)/ydb/tests/functional/canonical/ydb-tests-functional-canonical |55.5%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/ut/ydb-core-ymq-actor-ut |55.5%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |55.6%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |55.7%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |55.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |55.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |55.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |55.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |56.0%| [LD] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/ydb-tests-olap-ttl_tiering |56.2%| COMPACTING CACHE 889.2GiB |56.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |56.4%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |56.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |56.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |56.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |56.7%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |56.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |56.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> TKeyValueTracingTest::WriteSmall >> TKeyValueTracingTest::WriteHuge >> KqpSnapshotIsolation::TConflictWriteOltp |56.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |57.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |57.0%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |57.1%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |57.2%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |57.2%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |57.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |57.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |57.4%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |57.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |57.6%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |57.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |57.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |57.8%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |57.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |57.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |58.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |58.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |58.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |58.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |58.3%| [LD] {RESULT} $(B)/ydb/tests/olap/docs/generator/generator |58.3%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |58.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reshuffle_kmeans/ydb-core-tx-datashard-ut_reshuffle_kmeans |58.5%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |58.5%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |58.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_transfer_writer/core-tx-replication-service-ut_transfer_writer |58.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |58.7%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |58.8%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |58.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |58.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |59.0%| [LD] {RESULT} $(B)/ydb/tests/fq/multi_plane/ydb-tests-fq-multi_plane |59.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |59.1%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |59.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |59.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |59.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |59.4%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |59.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |59.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |59.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |59.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_build_index/ydb-core-tx-datashard-ut_build_index |59.8%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |59.8%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |59.9%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |60.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |60.0%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |60.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |60.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |60.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |60.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |60.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |60.4%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay |60.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |60.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |60.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |60.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |60.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |60.8%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |60.9%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |61.0%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |61.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |61.1%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |61.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/multinode/ydb-tests-functional-sqs-multinode |61.3%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/streaming/ydb-tests-fq-generic-streaming |61.3%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |61.4%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |61.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |61.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |61.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |61.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |61.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |61.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |61.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |61.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.0%| [LD] {RESULT} $(B)/ydb/tests/functional/autoconfig/ydb-tests-functional-autoconfig |62.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |62.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure/ydb-core-tx-schemeshard-ut_data_erasure |62.2%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |62.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |62.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |62.4%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |62.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |62.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |62.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |62.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |62.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |62.8%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |62.9%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |63.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |63.0%| [LD] {RESULT} $(B)/ydb/tests/olap/s3_import/ydb-tests-olap-s3_import |63.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |63.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |63.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |63.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |63.4%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |63.4%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |63.5%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_pool_ut/ydb-services-ydb-sdk_sessions_pool_ut |63.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |63.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |63.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |63.8%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |63.8%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |63.9%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |64.0%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |64.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |64.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |64.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |64.2%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |64.3%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |64.4%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |64.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |64.5%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |64.6%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |64.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |64.7%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |64.8%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |64.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |64.9%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |65.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |65.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |65.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |65.2%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |65.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |65.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |65.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |65.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |65.5%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |65.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |65.7%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |65.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |65.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |65.9%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |65.9%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/common/ydb-tests-functional-sqs-common |66.0%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |66.1%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |66.1%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |66.2%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |66.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |66.3%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |66.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |66.5%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |66.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |66.6%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/fifo/functional-sqs-merge_split_common_table-fifo |66.7%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |66.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |66.8%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |66.9%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |67.0%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |67.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |67.1%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |67.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |67.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |67.3%| [LD] {RESULT} $(B)/ydb/tests/fq/s3/ydb-tests-fq-s3 |67.4%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |67.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |67.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |67.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |67.6%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |67.7%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |67.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |67.8%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |67.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |68.0%| [LD] {RESULT} $(B)/ydb/core/tx/locks/ut_range_treap/ydb-core-tx-locks-ut_range_treap |68.0%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |68.1%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut/ydb-core-cms-console-ut |68.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |68.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |68.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |68.4%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |68.5%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |68.5%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |68.6%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |68.7%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |68.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |68.8%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |68.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |68.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |69.0%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |69.1%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |69.1%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |69.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |69.3%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |69.3%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |69.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |69.5%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |69.5%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |69.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |69.7%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |69.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |69.8%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/large/ydb-tests-functional-sqs-large |69.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |70.0%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |70.0%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |70.1%| [LD] {RESULT} $(B)/ydb/tests/fq/mem_alloc/ydb-tests-fq-mem_alloc |70.2%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |70.2%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |70.3%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |70.4%| [LD] {RESULT} $(B)/ydb/tests/olap/ydb-tests-olap |70.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |70.5%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |70.6%| [LD] {RESULT} $(B)/ydb/tests/fq/generic/analytics/ydb-tests-fq-generic-analytics |70.6%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |70.7%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |70.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |70.8%| [LD] {RESULT} $(B)/ydb/tests/fq/plans/ydb-tests-fq-plans |70.9%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |71.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |71.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |71.1%| [LD] {RESULT} $(B)/ydb/services/ydb/sdk_sessions_ut/ydb-services-ydb-sdk_sessions_ut |71.2%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |71.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |71.3%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |71.4%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |71.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |71.5%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |71.6%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |71.7%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |71.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |71.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |71.9%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |71.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |72.0%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |72.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |72.1%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |72.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |72.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |72.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sample_k/ydb-core-tx-datashard-ut_sample_k |72.4%| [LD] {RESULT} $(B)/ydb/tests/functional/tenants/ydb-tests-functional-tenants |72.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |72.5%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |72.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |72.7%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |72.7%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |72.8%| [LD] {RESULT} $(B)/ydb/tests/postgres_integrations/go-libpq/ydb-tests-postgres_integrations-go-libpq |72.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |72.9%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |73.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |73.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |73.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |73.2%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |73.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |73.4%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |73.4%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |73.5%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |73.6%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |73.6%| [LD] {RESULT} $(B)/ydb/tests/stress/log/tests/ydb-tests-stress-log-tests |73.7%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun |73.8%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |73.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_local_kmeans/ydb-core-tx-datashard-ut_local_kmeans |73.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |74.0%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |74.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |74.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |74.2%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |74.2%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |74.3%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |74.4%| [LD] {RESULT} $(B)/ydb/tests/fq/yds/ydb-tests-fq-yds |74.4%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |74.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |74.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |74.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |74.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |74.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |74.8%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |74.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_data_cleanup/ydb-core-tx-datashard-ut_data_cleanup |75.0%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |75.1%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |75.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |75.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |75.3%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |75.3%| [LD] {RESULT} $(B)/ydb/tests/functional/tpc/large/ydb-tests-functional-tpc-large |75.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |75.5%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |75.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |75.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |75.7%| [LD] {RESULT} $(B)/ydb/tests/functional/config/ydb-tests-functional-config |75.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |75.8%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |75.9%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |75.9%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |76.0%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |76.1%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |76.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |76.2%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |76.3%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |76.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |76.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |76.5%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |76.5%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |76.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |76.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |76.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |76.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |76.9%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut |77.0%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |77.0%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |77.1%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |77.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |77.2%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |77.3%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |77.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |77.4%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |77.5%| [LD] {RESULT} $(B)/ydb/tests/olap/load/ydb-tests-olap-load |77.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |77.6%| [LD] {RESULT} $(B)/ydb/tests/sql/large/ydb-tests-sql-large |77.7%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |77.8%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |77.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_data_erasure_reboots/ydb-core-tx-schemeshard-ut_data_erasure_reboots |77.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |78.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |78.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |78.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |78.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |79.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] |80.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] >> TKeyValueTracingTest::WriteHuge [FAIL] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD595EC) NTestSuiteTKeyValueTracingTest::TTestCaseReadSmall::Execute_(NUnitTest::TTestContext&)+318 (0xFD5FDCE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7F3D9D476D90) __libc_start_main+128 (0x7F3D9D476E40) _start+41 (0xD6F6029) |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD53C1D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteHuge::Execute_(NUnitTest::TTestContext&)+216 (0xFD5FA58) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7F83D54F2D90) __libc_start_main+128 (0x7F83D54F2E40) _start+41 (0xD6F6029) |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [FAIL] Test command err: equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneRead(TBasicString>, TBasicString>)+4828 (0xFD595EC) NTestSuiteTKeyValueTracingTest::TTestCaseReadHuge::Execute_(NUnitTest::TTestContext&)+318 (0xFD601BE) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7EFF5083AD90) __libc_start_main+128 (0x7EFF5083AE40) _start+41 (0xD6F6029) |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |85.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [FAIL] Test command err: assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture()+28 (0x1011113C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x105CCFD0) TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&)+4253 (0xFD53C1D) NTestSuiteTKeyValueTracingTest::TTestCaseWriteSmall::Execute_(NUnitTest::TTestContext&)+216 (0xFD5F748) std::__y1::__function::__func, void ()>::operator()()+280 (0xFD73998) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x105FAF36) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x105D3B49) NTestSuiteTKeyValueTracingTest::TCurrentTest::Execute()+1204 (0xFD72844) NUnitTest::TTestFactory::Execute()+2438 (0x105D5416) NUnitTest::RunMain(int, char**)+5213 (0x105F54AD) ??+0 (0x7F0C46265D90) __libc_start_main+128 (0x7F0C46265E40) _start+41 (0xD6F6029) |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow |87.4%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> KqpSinkTx::OlapInvalidateOnError |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |93.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable >> KqpSnapshotIsolation::TConflictReadWriteOltp |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink >> KqpSnapshotIsolation::TSimpleOltp >> KqpSnapshotIsolation::TReadOnlyOltp >> KqpSnapshotIsolation::TConflictWriteOlap |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 24845, MsgBus: 7161 2025-03-28T10:22:44.313429Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803872366815464:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:44.313764Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/000176/r3tmp/tmpTf13fG/pdisk_1.dat 2025-03-28T10:22:44.643671Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24845, node 1 2025-03-28T10:22:44.697751Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:44.697881Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:44.699956Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:22:44.705482Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:44.705501Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:44.705506Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:44.705621Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:7161 TClient is connected to server localhost:7161 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:45.162356Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:46.774443Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803880956750719:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:46.774611Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803880956750724:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:46.774732Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:46.778383Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:46.787085Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803880956750733:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:22:46.888293Z node 1 :TX_PROXY ERROR: Actor# [1:7486803880956750784:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:47.119403Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:47.257479Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:22:48.123108Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:22:49.294174Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MWRmMTlkMC1hMDMwMTZmYi0xYWNmNmUwNS05NGNhNjIzNA==, ActorId: [1:7486803893841661111:2968], ActorState: ExecuteState, TraceId: 01jqe4d6bp3jv9zk4fad3zby8k, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:22:49.313706Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803872366815464:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:49.313793Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18D63392 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F96B9494D8F 18. ??:0: ?? @ 0x7F96B9494E3F 19. ??:0: ?? @ 0x16366028 >> YdbLogStore::AlterLogTable [FAIL] >> test_workload.py::TestYdbLogWorkload::test[column] >> test_tpch.py::TestTpchS1::test_tpch[21] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export >> alter_compression.py::TestAlterCompression::test_all_supported_compression >> test_alter_tiering.py::TestAlterTiering::test[many_tables] >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow Test command err: Trying to start YDB, gRPC: 63924, MsgBus: 2000 2025-03-28T10:22:49.588308Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803892097699555:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:49.590166Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/00020d/r3tmp/tmpOgok9w/pdisk_1.dat 2025-03-28T10:22:49.962595Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 63924, node 1 2025-03-28T10:22:49.982143Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:49.982288Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:49.983981Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:22:49.999713Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:49.999751Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:49.999759Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:49.999897Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:2000 TClient is connected to server localhost:2000 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:50.473489Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:50.502226Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:50.613370Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:50.758466Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:50.829559Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:52.453023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803904982603210:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:52.453163Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:52.707575Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:22:52.737777Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T10:22:52.765943Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T10:22:52.790936Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T10:22:52.815313Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T10:22:52.884637Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T10:22:52.963025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803904982603730:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:52.963120Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:52.963298Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803904982603735:2459], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:52.967139Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T10:22:52.976947Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803904982603737:2460], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T10:22:53.066738Z node 1 :TX_PROXY ERROR: Actor# [1:7486803909277571087:3450] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:53.829813Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 2025-03-28T10:22:53.950450Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7486803909277571513:2505];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:53.950451Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486803909277571537:2510];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:53.950649Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486803909277571537:2510];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:53.950752Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7486803909277571513:2505];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:53.951044Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486803909277571537:2510];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:22:53.951082Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7486803909277571513:2505];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:22:53.951199Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486803909277571537:2510];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:22:53.951275Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7486803909277571513:2505];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:22:53.951345Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486803909277571537:2510];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:22:53.951421Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7486803909277571513:2505];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:22:53.951477Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486803909277571537:2510];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:22:53.951534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7486803909277571513:2505];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:22:53.951588Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486803909277571537:2510];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:22:53.951634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7486803909277571513:2505];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:22:53.951695Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037926;self_id=[1:7486803909277571537:2510];tablet_id=72075186224037926;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:22:53.951735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037921;self_id=[1:7486803909277571513:2505];tablet_id=72075186224037921;process=TTxInitSchema::Execute;fline=abstract.c ... RN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T10:22:54.168100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T10:22:54.168278Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T10:22:54.168310Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T10:22:54.168414Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T10:22:54.168465Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T10:22:54.168526Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T10:22:54.168553Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T10:22:54.168589Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T10:22:54.168617Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T10:22:54.169096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestorePortionFromChunks;id=10; 2025-03-28T10:22:54.169143Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2025-03-28T10:22:54.169348Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2025-03-28T10:22:54.169387Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=SyncMinSnapshotFromChunks; 2025-03-28T10:22:54.169529Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncMinSnapshotFromChunks;id=13; 2025-03-28T10:22:54.169595Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2025-03-28T10:22:54.169772Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2025-03-28T10:22:54.169805Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2025-03-28T10:22:54.169926Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2025-03-28T10:22:54.169953Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037925;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2025-03-28T10:22:54.198894Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:22:54.199164Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:22:54.205140Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037922;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:22:54.205148Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037920;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:22:54.211212Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:22:54.211563Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037921;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:22:54.217351Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:22:54.217378Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:22:54.223478Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:22:54.223479Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037926;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710671;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710671; 2025-03-28T10:22:54.337610Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;self_id=[1:7486803909277571533:2508];ev=NKikimr::TEvTxProcessing::TEvReadSetAck;tablet_id=72075186224037923;local_tx_no=12;method=execute;tx_info=;fline=primary.h:138;event=ack_tablet_duplication;wait=72075186224037928;receive=72075186224037927; 2025-03-28T10:22:54.337648Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T10:22:54.338129Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T10:22:54.338388Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=281474976710673;tx_id=281474976710673;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710673; 2025-03-28T10:22:54.403291Z node 1 :TX_COLUMNSHARD_TX WARN: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=281474976710675;tx_id=281474976710675;fline=tx_controller.cpp:211;event=finished_tx;tx_id=281474976710675; query_phases { duration_us: 6200 cpu_time_us: 2192 affected_shards: 1 } query_phases { duration_us: 5685 cpu_time_us: 317 affected_shards: 1 } compilation { duration_us: 45155 cpu_time_us: 42776 } process_cpu_time_us: 613 total_duration_us: 59445 total_cpu_time_us: 45898 AddressSanitizer:DEADLYSIGNAL ================================================================= ==996072==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b1ca4d bp 0x7ffdb5dc6b60 sp 0x7ffdb5dc69c0 T0) ==996072==The signal is caused by a READ memory access. ==996072==Hint: address points to the zero page. 2025-03-28T10:22:54.588629Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803892097699555:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:54.588753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; #0 0x18b1ca4d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b1ca4d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b1ca4d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b1ca4d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b1ca4d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b415d7 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b415d7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b415d7 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18b415d7 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171:12 #9 0x18b415d7 in std::__y1::__function::__func, void ()>::operator()() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313:10 #10 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194936d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19463228 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b40483 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19464af5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x1948dc4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f2568a7cd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f2568a7ce3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x162b8028 in _start (/home/runner/.ya/build/build_root/all7/00020d/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x162b8028) (BuildId: 42cc14b09b8e1be640cc9d032c9c2bf606ecb0c4) AddressSanitizer can not provide additional info. SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==996072==ABORTING |97.1%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |97.1%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/ut/unittest >> YdbLogStore::AlterLogTable [FAIL] Test command err: 2025-03-28T10:22:53.752114Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803911661569218:2074];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:53.752264Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/000269/r3tmp/tmpXQuDmI/pdisk_1.dat 2025-03-28T10:22:54.095730Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:22:54.141574Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:54.141652Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:54.144322Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 21813, node 1 2025-03-28T10:22:54.203445Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:54.203462Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:54.203468Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:54.203559Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21523 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:54.420563Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:54.595149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "LogStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_type" Type: "Utf8" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" NotNull: true } Columns { Name: "uid" Type: "Utf8" NotNull: true } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" } Columns { Name: "json_payload" Type: "JsonDocument" } Columns { Name: "request_id" Type: "Utf8" } Columns { Name: "ingested_at" Type: "Timestamp" } Columns { Name: "saved_at" Type: "Timestamp" } KeyColumnNames: "timestamp" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" DefaultCompression { Codec: ColumnCodecLZ4 } } } } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:56650" , at schemeshard: 72057594046644480 2025-03-28T10:22:54.595589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TCreateOlapStore Propose, path: /Root/LogStore, opId: 281474976710658:0, at schemeshard: 72057594046644480 2025-03-28T10:22:54.595620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 281474976710658:1, propose status:StatusPreconditionFailed, reason: Column stores are not supported, at schemeshard: 72057594046644480 2025-03-28T10:22:54.597765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusPreconditionFailed Reason: "Column stores are not supported" TxId: 281474976710658 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2025-03-28T10:22:54.597888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusPreconditionFailed, reason: Column stores are not supported, operation: CREATE COLUMN STORE, path: /Root/LogStore 2025-03-28T10:22:54.599978Z node 1 :TX_PROXY ERROR: Actor# [1:7486803915956537428:2599] txid# 281474976710658, issues: { message: "Column stores are not supported" severity: 1 } assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture()+28 (0x1C67422C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x1CB312E0) NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&)+8721 (0x1C1A6F71) std::__y1::__function::__func, void ()>::operator()()+280 (0x1C1CFE08) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1CB68306) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1CB37E59) NTestSuiteYdbLogStore::TCurrentTest::Execute()+1204 (0x1C1CEFD4) NUnitTest::TTestFactory::Execute()+2438 (0x1CB39726) NUnitTest::RunMain(int, char**)+5213 (0x1CB6287D) ??+0 (0x7FEC00210D90) __libc_start_main+128 (0x7FEC00210E40) _start+41 (0x1902A029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 12585, MsgBus: 5390 2025-03-28T10:22:44.170954Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803871417994180:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:44.171026Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/000165/r3tmp/tmprKOLhl/pdisk_1.dat 2025-03-28T10:22:44.478528Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12585, node 1 2025-03-28T10:22:44.549703Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:44.549826Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:44.551802Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:22:44.572309Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:44.572339Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:44.572349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:44.572485Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:5390 TClient is connected to server localhost:5390 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:45.070323Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:46.573531Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803880007929433:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:46.573646Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803880007929420:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:46.573949Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:46.578117Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:46.588147Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803880007929449:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:22:46.675460Z node 1 :TX_PROXY ERROR: Actor# [1:7486803880007929500:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:46.966444Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:47.099273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:47.099503Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:47.099812Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:22:47.099969Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:22:47.100091Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:22:47.100224Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:22:47.100347Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:22:47.100474Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:22:47.100631Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:22:47.100740Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:22:47.100888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:22:47.100991Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803884302896998:2348];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:22:47.101895Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:47.102027Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:47.102305Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:22:47.102483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:22:47.102640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:22:47.102834Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:22:47.102962Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:22:47.103105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:22:47.103233Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:22:47.103344Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:22:47.103475Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:22:47.103629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037892;self_id=[1:7486803884302896992:2345];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:22:47.141291Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486803884302896996:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:47.141361Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_id=[1:7486803884302896996:2347];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:47.141597Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037895;self_i ... 6224038063;self_id=[1:7486803901482772181:3299];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038063;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.262743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[1:7486803901482772210:3309];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.262923Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038048;self_id=[1:7486803901482772210:3309];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038048;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.263223Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7486803901482772407:3370];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.263380Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7486803901482772407:3370];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.271426Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7486803901482772267:3323];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.271633Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038020;self_id=[1:7486803901482772267:3323];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038020;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.271922Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7486803901482772518:3390];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.272119Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7486803901482772518:3390];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.272946Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7486803901482771819:3236];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.273134Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038054;self_id=[1:7486803901482771819:3236];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038054;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.273912Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[1:7486803901482772194:3306];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.274020Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[1:7486803901482772194:3306];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.274489Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[1:7486803901482772275:3326];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.274667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038041;self_id=[1:7486803901482772275:3326];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038041;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.274943Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7486803901482772452:3381];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.275068Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038022;self_id=[1:7486803901482772452:3381];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038022;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.275483Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7486803901482772441:3378];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.275650Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038007;self_id=[1:7486803901482772441:3378];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038007;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.275954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7486803901482772372:3358];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.276037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7486803901482772470:3384];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.276061Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038018;self_id=[1:7486803901482772372:3358];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038018;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.276216Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038013;self_id=[1:7486803901482772470:3384];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038013;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.276491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[1:7486803901482772346:3352];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.276512Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7486803901482772492:3387];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.276666Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038015;self_id=[1:7486803901482772492:3387];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038015;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.276668Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038032;self_id=[1:7486803901482772346:3352];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038032;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.277037Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[1:7486803901482772315:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.277187Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[1:7486803901482772315:3340];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.277515Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7486803901482772269:3324];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.277634Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7486803901482772269:3324];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.278041Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7486803901482772354:3355];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.278211Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038025;self_id=[1:7486803901482772354:3355];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038025;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.279557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7486803901482772388:3363];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.279661Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038017;self_id=[1:7486803901482772388:3363];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038017;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:22:57.280055Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038034;self_id=[1:7486803901482772327:3345];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038034;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18D7C798 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18D63E6A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F593DD1CD8F 18. ??:0: ?? @ 0x7F593DD1CE3F 19. ??:0: ?? @ 0x16366028 |97.3%| [TA] $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.4%| [TA] {RESULT} $(B)/ydb/services/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] >> KqpSnapshotIsolation::TSimpleOltp [FAIL] >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] >> KqpSinkTx::OlapInvalidateOnError [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltp [FAIL] Test command err: Trying to start YDB, gRPC: 30824, MsgBus: 13074 2025-03-28T10:22:53.649864Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803910765533273:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:53.650252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/000184/r3tmp/tmpdPOKE4/pdisk_1.dat 2025-03-28T10:22:53.943419Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 30824, node 1 2025-03-28T10:22:54.002582Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:54.002708Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:54.004404Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:22:54.008720Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:54.008747Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:54.008759Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:54.008871Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:13074 TClient is connected to server localhost:13074 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:54.434667Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:55.812825Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803919355468538:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:55.812913Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803919355468527:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:55.813023Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:55.816110Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:55.822698Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803919355468541:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:22:55.887574Z node 1 :TX_PROXY ERROR: Actor# [1:7486803919355468592:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:56.076545Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:56.190615Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:22:56.838029Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:22:57.937520Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZGY0MDlkMWEtNjg4ODc5ZTQtZmY1YTNlNjctN2UwZGFiMA==, ActorId: [1:7486803927945411634:2968], ActorState: ExecuteState, TraceId: 01jqe4deste8ccnb3sad2zdv15, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:22:58.650028Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803910765533273:2059];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:58.650119Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18D63A12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F3B21795D8F 18. ??:0: ?? @ 0x7F3B21795E3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 16476, MsgBus: 19066 2025-03-28T10:22:53.776529Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803911327120812:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:53.776704Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/0001ad/r3tmp/tmpi20uoo/pdisk_1.dat 2025-03-28T10:22:54.117467Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 16476, node 1 2025-03-28T10:22:54.172705Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:54.173018Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:54.175977Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:22:54.203030Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:54.203049Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:54.203055Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:54.203208Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:19066 TClient is connected to server localhost:19066 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:54.662480Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:55.855010Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803919917056061:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:55.855043Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803919917056042:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:55.855160Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:55.857755Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:55.864875Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803919917056071:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:22:55.918669Z node 1 :TX_PROXY ERROR: Actor# [1:7486803919917056122:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:56.116354Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:56.203202Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:22:56.888211Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:22:58.081692Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=Y2E0ZTc4MjQtYmQ0MmI4ZWUtMjU3NWQxZGQtOWQwOWU5N2M=, ActorId: [1:7486803928506999273:2967], ActorState: ExecuteState, TraceId: 01jqe4detqe964rpagfw7a5933, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:22:58.776982Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803911327120812:2067];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:58.777054Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18D62F3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC6D313FD8F 18. ??:0: ?? @ 0x7FC6D313FE3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltp [FAIL] Test command err: Trying to start YDB, gRPC: 12687, MsgBus: 6813 2025-03-28T10:22:54.119492Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803913998551095:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:54.119565Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/000189/r3tmp/tmpvglP8D/pdisk_1.dat 2025-03-28T10:22:54.415088Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 12687, node 1 2025-03-28T10:22:54.484335Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:54.484363Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:54.484371Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:54.484498Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:22:54.496047Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:54.496192Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:54.497824Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:6813 TClient is connected to server localhost:6813 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:54.861064Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:56.008193Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803922588486348:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.008264Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803922588486356:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.008302Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.011184Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:56.017731Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803922588486362:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:22:56.114213Z node 1 :TX_PROXY ERROR: Actor# [1:7486803922588486413:2333] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:56.315106Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:56.413409Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:22:57.027282Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:22:57.942195Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=MThiZDY4NjMtYTdjNjk5ZWQtNDRiYjIxOGMtZGE4ZWM5OTA=, ActorId: [1:7486803926883462065:2966], ActorState: ExecuteState, TraceId: 01jqe4deqxekq8c4mv1s8h3k1t, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:22:59.119428Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803913998551095:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:59.119483Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18D64092 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC0E23A4D8F 18. ??:0: ?? @ 0x7FC0E23A4E3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSimpleOltp [FAIL] Test command err: Trying to start YDB, gRPC: 24457, MsgBus: 10381 2025-03-28T10:22:54.020828Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803913646488666:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:54.020890Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/00018a/r3tmp/tmprsDAiH/pdisk_1.dat 2025-03-28T10:22:54.315116Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 24457, node 1 2025-03-28T10:22:54.373345Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:54.373710Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:54.376398Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:22:54.393933Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:54.393963Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:54.393971Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:54.394107Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:10381 TClient is connected to server localhost:10381 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:54.841220Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:56.009348Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803922236423911:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.009407Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803922236423919:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.009461Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.012826Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:56.024369Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803922236423929:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:22:56.116065Z node 1 :TX_PROXY ERROR: Actor# [1:7486803922236423982:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:56.301317Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:56.390849Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:22:57.101487Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:22:58.153080Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZWQ3NTFjZS01MmRmOGQ2OS0xZDM2OWYwZC1lMmYxM2JmMg==, ActorId: [1:7486803926531399726:2966], ActorState: ExecuteState, TraceId: 01jqe4dexc8vkxy2jx1n3v8jb3, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:22:59.021214Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803913646488666:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:59.021384Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18D62D12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC38680AD8F 18. ??:0: ?? @ 0x7FC38680AE3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 14007, MsgBus: 21273 2025-03-28T10:22:53.977522Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803910200504939:2110];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:53.978489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/0001a4/r3tmp/tmpsh6rH7/pdisk_1.dat 2025-03-28T10:22:54.249406Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:22:54.255866Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:54.255961Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:54.258766Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 14007, node 1 2025-03-28T10:22:54.309580Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:54.309617Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:54.309630Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:54.309735Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:21273 TClient is connected to server localhost:21273 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:54.732950Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:56.276757Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803923085407448:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.276758Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803923085407440:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.276867Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.279408Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:56.285868Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803923085407454:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:22:56.375886Z node 1 :TX_PROXY ERROR: Actor# [1:7486803923085407505:2335] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:56.565677Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:56.651145Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:22:57.317897Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:22:58.310777Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=NWY3NzdmYTctMzU4MzkyNGYtNjk4MzViLWY0YzVmOGM=, ActorId: [1:7486803931675350592:2966], ActorState: ExecuteState, TraceId: 01jqe4df4zfzydkk0ewz9fffep, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:22:58.977181Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803910200504939:2110];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:58.977252Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18D63C3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC685AC0D8F 18. ??:0: ?? @ 0x7FC685AC0E3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOnlyOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 13981, MsgBus: 13816 2025-03-28T10:22:54.995428Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803915925608651:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:54.995517Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/000179/r3tmp/tmp6b1PLR/pdisk_1.dat 2025-03-28T10:22:55.201542Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 13981, node 1 2025-03-28T10:22:55.259920Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:55.259956Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:55.259964Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:55.260079Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:22:55.320426Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:55.320572Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:55.322127Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:13816 TClient is connected to server localhost:13816 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:55.638810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:56.828388Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803924515543903:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.828447Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803924515543912:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.828497Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.832532Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:56.840049Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803924515543917:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T10:22:56.913126Z node 1 :TX_PROXY ERROR: Actor# [1:7486803924515543968:2333] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:57.113538Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:57.249020Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480 2025-03-28T10:22:58.000012Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480 2025-03-28T10:22:59.090273Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZmUyZmQyYjctYTAwM2E3Yi02NzdjZjQ0MC1jZmUxOGMyOQ==, ActorId: [1:7486803933105486971:2966], ActorState: ExecuteState, TraceId: 01jqe4dfv89v2ac6nxw728gvx7, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:22:59.995462Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803915925608651:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:59.995515Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18D642BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FF4E0443D8F 18. ??:0: ?? @ 0x7FF4E0443E3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOltpNoSink [FAIL] Test command err: Trying to start YDB, gRPC: 16699, MsgBus: 18723 2025-03-28T10:22:55.170798Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803920882368080:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:55.170864Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/00017a/r3tmp/tmpxyTKRF/pdisk_1.dat 2025-03-28T10:22:55.380014Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:22:55.391734Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:55.391820Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:55.393269Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TServer::EnableGrpc on GrpcPort 16699, node 1 2025-03-28T10:22:55.432427Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:55.432450Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:55.432458Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:55.432544Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration TClient is connected to server localhost:18723 TClient is connected to server localhost:18723 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:55.750571Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:57.032381Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803929472303339:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:57.032383Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803929472303334:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:57.032498Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:57.036091Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:57.045132Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803929472303348:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:22:57.101920Z node 1 :TX_PROXY ERROR: Actor# [1:7486803929472303399:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:57.347543Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:57.439441Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 2025-03-28T10:22:58.283330Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:22:59.340093Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=ZDNlMmNiMTktZDAwMzgzOTUtNzVmYzdjNmMtNjc3NDg5ZTY=, ActorId: [1:7486803938062246405:2967], ActorState: ExecuteState, TraceId: 01jqe4dg6e8453xffn99s118jn, Create QueryResponse for error on request, msg: SnapshotRW can only be used with olap tables. 2025-03-28T10:23:00.170884Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803920882368080:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:23:00.170970Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18D635BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F030A759D8F 18. ??:0: ?? @ 0x7F030A759E3F 19. ??:0: ?? @ 0x16366028 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [FAIL] Test command err: Trying to start YDB, gRPC: 8258, MsgBus: 11337 2025-03-28T10:22:50.978125Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803898206539985:2061];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:50.978186Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/000185/r3tmp/tmpwBZIJp/pdisk_1.dat 2025-03-28T10:22:51.287463Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8258, node 1 2025-03-28T10:22:51.367201Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:51.367235Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:51.367245Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:51.367345Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:22:51.372002Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:51.372102Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:51.373779Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:11337 TClient is connected to server localhost:11337 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:51.819309Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:53.419021Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803911091442544:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:53.419111Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803911091442530:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:53.419520Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:53.423013Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:53.432405Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803911091442547:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2025-03-28T10:22:53.513076Z node 1 :TX_PROXY ERROR: Actor# [1:7486803911091442598:2334] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:53.809896Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:53.948127Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:53.948315Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:53.948542Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:22:53.948658Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:22:53.948743Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:22:53.948870Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:22:53.948959Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:22:53.949085Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:22:53.949212Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:22:53.949312Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:22:53.949398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:22:53.949478Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037889;self_id=[1:7486803911091442795:2345];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:22:53.952238Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:53.952307Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:53.952572Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:22:53.952755Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:22:53.952911Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:22:53.953038Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:22:53.953162Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:22:53.953342Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:22:53.953491Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:22:53.953637Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:22:53.953774Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:22:53.953983Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803911091442807:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:22:53.988533Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486803911091442800:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:53.988605Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:7486803911091442800:2348];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:53.988888Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_ ... t_id=72075186224038078;self_id=[1:7486803928271317424:3176];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038078;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.528954Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7486803928271317467:3195];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.529100Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038078;self_id=[1:7486803928271317424:3176];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038078;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.529104Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7486803928271317467:3195];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.530990Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[1:7486803928271317452:3188];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.531087Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038039;self_id=[1:7486803928271317452:3188];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038039;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.532481Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[1:7486803928271317544:3199];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038058;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.532586Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038058;self_id=[1:7486803928271317544:3199];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038058;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.534013Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7486803928271317465:3194];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.534111Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038028;self_id=[1:7486803928271317465:3194];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038028;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.536062Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7486803928271317379:3156];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.536173Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038069;self_id=[1:7486803928271317379:3156];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038069;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.538730Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7486803928271317456:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.539063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038042;self_id=[1:7486803928271317456:3190];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038042;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.540682Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7486803928271317373:3153];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038079;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.540787Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038079;self_id=[1:7486803928271317373:3153];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038079;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.542751Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7486803928271317402:3167];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.542879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038073;self_id=[1:7486803928271317402:3167];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038073;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.544827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7486803928271317469:3196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.544974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038016;self_id=[1:7486803928271317469:3196];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038016;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.547883Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[1:7486803928271317450:3187];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.548063Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038045;self_id=[1:7486803928271317450:3187];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038045;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.550413Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7486803928271317460:3192];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.550535Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038001;self_id=[1:7486803928271317460:3192];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038001;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.552974Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7486803928271317433:3180];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.553076Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7486803928271317433:3180];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.560656Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7486803928271317447:3186];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.560811Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038033;self_id=[1:7486803928271317447:3186];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038033;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.561149Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[1:7486803928271317439:3183];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.561301Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038075;self_id=[1:7486803928271317439:3183];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038075;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.565273Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7486803928271317745:3200];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.565412Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038023;self_id=[1:7486803928271317745:3200];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038023;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.567842Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7486803928271317445:3185];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.568042Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038057;self_id=[1:7486803928271317445:3185];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038057;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.578071Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[1:7486803928271317437:3182];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:01.578220Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038044;self_id=[1:7486803928271317437:3182];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038044;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18D46A4E 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18D2569A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18D2CB17 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D2CB17 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D2CB17 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D2CB17 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D2CB17 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18D2BCE3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F316B6C2D8F 18. ??:0: ?? @ 0x7F316B6C2E3F 19. ??:0: ?? @ 0x16366028 >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictWriteOlap [FAIL] Test command err: Trying to start YDB, gRPC: 21540, MsgBus: 63594 2025-03-28T10:22:54.184063Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803913416998215:2063];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:54.184122Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/000174/r3tmp/tmpMUb166/pdisk_1.dat 2025-03-28T10:22:54.514857Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 21540, node 1 2025-03-28T10:22:54.534767Z node 1 :GRPC_SERVER WARN: SchemeBoardDelete /Root Strong=0 2025-03-28T10:22:54.560315Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:54.560336Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:54.560349Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:54.560477Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:22:54.577918Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:54.578101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:54.579817Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:63594 TClient is connected to server localhost:63594 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:54.952190Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:56.040773Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803922006933472:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.040861Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803922006933464:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.041189Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:56.044170Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480 2025-03-28T10:22:56.052301Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803922006933478:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2025-03-28T10:22:56.150452Z node 1 :TX_PROXY ERROR: Actor# [1:7486803922006933529:2334] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:56.336832Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 2025-03-28T10:22:56.445574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:56.445574Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:56.445734Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:56.445945Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:22:56.446036Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:22:56.446103Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:22:56.446158Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:56.446175Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:22:56.446230Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:22:56.446304Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:22:56.446314Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:22:56.446398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:22:56.446425Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:22:56.446482Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:22:56.446557Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:22:56.446568Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:22:56.446636Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:22:56.446640Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:22:56.446735Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037896;self_id=[1:7486803922006933739:2349];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:22:56.446744Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:22:56.446827Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:22:56.446916Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:22:56.446993Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:22:56.447066Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037897;self_id=[1:7486803922006933724:2346];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:22:56.471602Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037891;self_id=[1:7486803922006933718:2343];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:56.471607Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037894;self_id=[1:7486803922006933726:2347];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22: ... cast::TEvNotifyPlanStep;tablet_id=72075186224038008;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.324830Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[1:7486803934891841228:3218];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.324999Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038038;self_id=[1:7486803934891841228:3218];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038038;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.326879Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[1:7486803934891841183:3188];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.327024Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038047;self_id=[1:7486803934891841183:3188];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038047;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.327364Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7486803934891841207:3204];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.327531Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038012;self_id=[1:7486803934891841207:3204];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038012;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.328932Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7486803934891841219:3212];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.329093Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038031;self_id=[1:7486803934891841219:3212];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038031;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.329371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[1:7486803934891841162:3174];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.329523Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038067;self_id=[1:7486803934891841162:3174];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038067;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.330459Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7486803934891841190:3193];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.330620Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038010;self_id=[1:7486803934891841190:3193];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038010;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.330938Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7486803934891841156:3170];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.331077Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038009;self_id=[1:7486803934891841156:3170];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038009;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.333021Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[1:7486803934891841142:3164];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.333179Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038006;self_id=[1:7486803934891841142:3164];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038006;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.334558Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7486803934891841138:3162];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.334738Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038051;self_id=[1:7486803934891841138:3162];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038051;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.336629Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7486803934891841105:3147];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.336796Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038024;self_id=[1:7486803934891841105:3147];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038024;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.337070Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7486803934891841167:3178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.337229Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038046;self_id=[1:7486803934891841167:3178];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038046;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.339154Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[1:7486803934891841277:3226];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038014;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.339326Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038014;self_id=[1:7486803934891841277:3226];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038014;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.341249Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[1:7486803934891841193:3195];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.341417Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038003;self_id=[1:7486803934891841193:3195];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038003;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.344833Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[1:7486803934891841123:3154];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.345014Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038037;self_id=[1:7486803934891841123:3154];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038037;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.345281Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[1:7486803934891841197:3198];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.345424Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038035;self_id=[1:7486803934891841197:3198];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038035;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.358667Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7486803934891840968:3122];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.359016Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224038060;self_id=[1:7486803934891840968:3122];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224038060;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.525065Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7486803926301902559:2539];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037928;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.525346Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037928;self_id=[1:7486803926301902559:2539];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037928;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.777382Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[1:7486803926301902280:2491];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=72075186224037974;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; 2025-03-28T10:23:07.777584Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037974;self_id=[1:7486803926301902280:2491];ev=NKikimr::TEvMediatorTimecast::TEvNotifyPlanStep;tablet_id=72075186224037974;fline=columnshard_impl.cpp:764;event=skip_indexation;reason=disabled; assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18D74DF8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18D637EA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FB3786AAD8F 18. ??:0: ?? @ 0x7FB3786AAE3F 19. ??:0: ?? @ 0x16366028 |98.1%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |98.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage Test command err: 2025-03-28T10:22:55.083993Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T10:22:55.084293Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T10:22:55.084363Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/000227/r3tmp/tmpIPgk64/pdisk_1.dat TServer::EnableGrpc on GrpcPort 24551, node 1 TClient is connected to server localhost:8458 2025-03-28T10:22:55.501481Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:22:55.531889Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:22:55.537313Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:55.537359Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:55.537383Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:55.537537Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:22:55.572063Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:55.572198Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:55.583377Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:22:55.684969Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480 Status: 53 TxId: 281474976715657 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 2 2025-03-28T10:22:55.756269Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828672, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvBoot 2025-03-28T10:22:55.757293Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828673, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvRestored 2025-03-28T10:22:55.757538Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:102;event=initialize_shard;step=OnActivateExecutor; 2025-03-28T10:22:55.773819Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;self_id=[1:746:2626];fline=columnshard.cpp:120;event=initialize_shard;step=initialize_tiring_finished; 2025-03-28T10:22:55.774081Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Execute at tablet 72075186224037888 2025-03-28T10:22:55.780351Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:55.780527Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:55.780739Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:22:55.780818Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:22:55.780910Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:22:55.781001Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:22:55.781096Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:22:55.781181Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:22:55.781270Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:22:55.781359Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:22:55.781453Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:22:55.781534Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;self_id=[1:746:2626];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:22:55.796284Z node 1 :TX_COLUMNSHARD TRACE: StateInit, received event# 268828684, Sender [1:688:2580], Recipient [1:746:2626]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2025-03-28T10:22:55.797776Z node 1 :TX_COLUMNSHARD DEBUG: TxInitSchema.Complete at tablet 72075186224037888 2025-03-28T10:22:55.798000Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:137;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=11;current_normalizer=CLASS_NAME=Granules; 2025-03-28T10:22:55.798058Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=1;type=Granules; 2025-03-28T10:22:55.798224Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:22:55.798398Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T10:22:55.798457Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2025-03-28T10:22:55.798490Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2025-03-28T10:22:55.798577Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=chunks.cpp:139;normalizer=TChunksNormalizer;message=0 chunks found; 2025-03-28T10:22:55.798626Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2025-03-28T10:22:55.798657Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2025-03-28T10:22:55.798680Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2025-03-28T10:22:55.798865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2025-03-28T10:22:55.798919Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2025-03-28T10:22:55.798949Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2025-03-28T10:22:55.798971Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2025-03-28T10:22:55.799059Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2025-03-28T10:22:55.799105Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2025-03-28T10:22:55.799147Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanInsertionDedup;id=CleanInsertionDedup; 2025-03-28T10:22:55.799168Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=8;type=CleanInsertionDedup; 2025-03-28T10:22:55.799231Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertionDedup;id=8; 2025-03-28T10:22:55.799262Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2025-03-28T10:22:55.799291Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2025-03-28T10:22:55.799343Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2025-03-28T10:22:55.799371Z node 1 :TX_COLUMNSHARD WARN: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestorePortionFromChunks;id=RestorePortionFromChunks; 2025-03-28T10:22:55.799389Z node 1 :TX_COLUMNSHARD NOTICE: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:154;event=normalizer_init;last=0;seq_id=10;type=RestorePortionFromChunks; 2025-03-28T10:22:55.799771Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=50; 2025-03-28T10:22:55.799865Z node 1 :TX_COLUMNSHARD INFO: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=29; 2025-03-28T10:22:55.799952Z node 1 :TX_COLUMNSHARD INFO: tablet_id= ... LUMNSHARD TRACE: StateWork, received event# 2146435073, Sender [1:1315:3116], Recipient [1:746:2626]: NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex 2025-03-28T10:23:16.379212Z node 1 :TX_COLUMNSHARD DEBUG: WriteIndex at tablet 72075186224037888 2025-03-28T10:23:16.379415Z node 1 :TX_COLUMNSHARD DEBUG: TxWriteIndex[31] (CS::GENERAL) apply at tablet 72075186224037888 2025-03-28T10:23:16.381953Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager on execute at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 2 2025-03-28T10:23:16.382073Z node 1 :TX_COLUMNSHARD DEBUG: Index: tables 1 inserted {blob_bytes=3720408;raw_bytes=123937532;count=3;records=105525} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=17463040;raw_bytes=589051848;count=6;records=494475} inactive {blob_bytes=23493144;raw_bytes=787383134;count=15;records=667575} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037888 TEvBlobStorage::TEvPut tId=72075186224037888;c=1;:73/0:size=4045;count=18;;1:size=53108;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445360;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445920;count=1;;11:size=4137072;count=5;;12:size=1445744;count=1;;13:size=1445928;count=1;;14:size=1445608;count=1;;15:size=1445528;count=1;;16:size=1445360;count=1;;17:size=1168928;count=1;;18:size=1402768;count=1;;19:size=1403096;count=1;;20:size=1402840;count=1;;21:size=1445376;count=1;;22:size=808584;count=1;;23:size=1496088;count=2;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72075186224037888;c=0;:73/0:size=4114;count=19;;1:size=53108;count=8;;2:size=0;count=0;;3:size=1466448;count=1;;4:size=1479208;count=1;;5:size=1458600;count=1;;6:size=1445408;count=1;;7:size=1445360;count=1;;8:size=1445448;count=1;;9:size=1445400;count=1;;10:size=1445920;count=1;;11:size=4137072;count=5;;12:size=1445744;count=1;;13:size=1445928;count=1;;14:size=1445608;count=1;;15:size=1445528;count=1;;16:size=1445360;count=1;;17:size=1168928;count=1;;18:size=1402768;count=1;;19:size=1403096;count=1;;20:size=1402840;count=1;;21:size=1445376;count=1;;22:size=808584;count=1;;23:size=1496088;count=2;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2025-03-28T10:23:16.393540Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;fline=abstract.cpp:53;event=WriteIndexComplete;type=CS::GENERAL;success=1; 2025-03-28T10:23:16.393595Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;fline=with_appended.cpp:65;portions=25,26,;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c; 2025-03-28T10:23:16.393823Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:25;path_id:3;records_count:52716;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:1857384;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-03-28T10:23:16.393982Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/547285.000000s;; 2025-03-28T10:23:16.394070Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;fline=granule.cpp:19;event=upsert_portion;portion=(portion_id:26;path_id:3;records_count:52714;min_schema_snapshot:(plan_step=1500;tx_id=281474976715658;);schema_version:1;level:0;column_size:1857112;index_size:0;meta:((produced=SPLIT_COMPACTED;)););path_id=3; 2025-03-28T10:23:16.394141Z node 1 :TX_COLUMNSHARD TRACE: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;fline=tiering.cpp:49;tiering_info=__DEFAULT/0.000000s;$$DELETE/599999.000000s;; 2025-03-28T10:23:16.394189Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;fline=manager.cpp:15;event=unlock;process_id=CS::GENERAL::a8ece172-bbe11f0-8dc5ac1b-59f5c81c; 2025-03-28T10:23:16.394250Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;fline=granule.cpp:101;event=OnCompactionFinished;info=(granule:3;path_id:3;size:21186720;portions_count:26;); 2025-03-28T10:23:16.394298Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;tablet_id=72075186224037888;fline=columnshard_impl.cpp:515;event=EnqueueBackgroundActivities;periodic=0; 2025-03-28T10:23:16.394352Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;tablet_id=72075186224037888;fline=columnshard_impl.cpp:785;event=start_indexation_tasks;insert_overload_size=0; 2025-03-28T10:23:16.394423Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;tablet_id=72075186224037888;fline=column_engine_logs.cpp:244;event=StartCleanup;portions_count=1; 2025-03-28T10:23:16.394490Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;tablet_id=72075186224037888;fline=column_engine_logs.cpp:286;event=StartCleanupStop;snapshot=plan_step=0;tx_id=18446744073709551615;;current_snapshot_ts=21000; 2025-03-28T10:23:16.394531Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;tablet_id=72075186224037888;fline=column_engine_logs.cpp:319;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2025-03-28T10:23:16.394585Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1061;background=cleanup;skip_reason=no_changes; 2025-03-28T10:23:16.394622Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1093;background=cleanup;skip_reason=no_changes; 2025-03-28T10:23:16.394729Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;tablet_id=72075186224037888;queue=ttl;external_count=0;fline=granule.cpp:167;event=skip_actualization;waiting=0.600000s; 2025-03-28T10:23:16.394797Z node 1 :TX_COLUMNSHARD DEBUG: tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;tablet_id=72075186224037888;fline=columnshard_impl.cpp:1002;background=ttl;skip_reason=no_changes; 2025-03-28T10:23:16.394969Z node 1 :TX_COLUMNSHARD DEBUG: BlobManager at tablet 72075186224037888 Save Batch GenStep: 1:21 Blob count: 2 VERIFY failed (2025-03-28T10:23:16.395110Z): tablet_id=72075186224037888;task_id=a8ece172-bbe11f0-8dc5ac1b-59f5c81c;verification=CompactionsLimit.Dec() >= 0;fline=ro_controller.cpp:39; ydb/library/actors/core/log.cpp:754 ~TVerifyFormattedRecordWriter(): requirement false failed NPrivate::InternalPanicImpl(int, char const*, char const*, int, int, int, TBasicStringBuf>, char const*, unsigned long)+873 (0x18B7DF99) NPrivate::Panic(NPrivate::TStaticBuf const&, int, char const*, char const*, char const*, ...)+571 (0x18B6C22B) NActors::TVerifyFormattedRecordWriter::~TVerifyFormattedRecordWriter()+326 (0x19E83CD6) NKikimr::NYDBTest::NColumnShard::TReadOnlyController::DoOnWriteIndexComplete(NKikimr::NOlap::TColumnEngineChanges const&, NKikimr::NColumnShard::TColumnShard const&)+4577 (0x48517211) NKikimr::NColumnShard::TTxWriteIndex::Complete(NActors::TActorContext const&)+4797 (0x30555E9D) NKikimr::NTabletFlatExecutor::TSeat::Complete(NActors::TActorContext const&, bool)+899 (0x1E8660F3) NKikimr::NTabletFlatExecutor::TLogicRedo::Confirm(unsigned int, NActors::TActorContext const&, NActors::TActorId const&)+3856 (0x1E749AD0) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&, NActors::TActorContext const&)+3444 (0x1E58FEC4) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+2821 (0x1E52CC45) NActors::IActor::Receive(TAutoPtr&)+237 (0x19DB54AD) NActors::TTestActorRuntimeBase::SendInternal(TAutoPtr, unsigned int, bool)+3557 (0x3585FE75) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+12602 (0x358586EA) NActors::TTestActorRuntimeBase::WaitForEdgeEvents(std::__y1::function&)>, TSet, std::__y1::allocator> const&, TDuration)+1076 (0x35862A64) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventIf(TSet, std::__y1::allocator> const&, std::__y1::function const&, TDuration)+292 (0x35A2FBE4) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEvent(NActors::TActorId const&, TDuration)+419 (0x35A2ED03) NActors::TEvents::TEvWakeup::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+307 (0x35A26F63) NActors::TTestActorRuntime::SimulateSleep(TDuration)+1115 (0x35A26B3B) NKikimr::NTestSuiteColumnShardTiers::TTestCaseTTLUsage::Execute_(NUnitTest::TTestContext&)+4702 (0x1875C99E) std::__y1::__function::__func, void ()>::operator()()+280 (0x1876E558) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x1902ACB6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x18FFA7E9) NKikimr::NTestSuiteColumnShardTiers::TCurrentTest::Execute()+1204 (0x1876D504) NUnitTest::TTestFactory::Execute()+2438 (0x18FFC0B6) NUnitTest::RunMain(int, char**)+5213 (0x1902522D) ??+0 (0x7F29B2AD0D90) __libc_start_main+128 (0x7F29B2AD0E40) _start+41 (0x160E8029) |98.3%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::SysViewClientLost [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SysViewClientLost [FAIL] Test command err: Trying to start YDB, gRPC: 8198, MsgBus: 5704 2025-03-28T10:22:46.001483Z node 1 :METADATA_PROVIDER WARN: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7486803881830422127:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:46.001692Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/0002c0/r3tmp/tmpS5musF/pdisk_1.dat 2025-03-28T10:22:46.277754Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 8198, node 1 2025-03-28T10:22:46.369573Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:46.369664Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:46.369880Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:46.369895Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:46.369904Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:46.370010Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:22:46.371407Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:5704 TClient is connected to server localhost:5704 WaitRootIsUp 'Root'... TClient::Ls request: Root TClient::Ls response: Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } Children { Name: ".sys" PathId: 18446744073709551615 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 0 CreateStep: 0 ParentPathId: 18446744073709551615 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 Pl... (TRUNCATED) WaitRootIsUp 'Root' success. 2025-03-28T10:22:46.762852Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:46.792241Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:46.895290Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:47.048324Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:47.125568Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:48.565156Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803890420358499:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:48.565293Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:48.819431Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480 2025-03-28T10:22:48.848298Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480 2025-03-28T10:22:48.876556Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480 2025-03-28T10:22:48.906810Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480 2025-03-28T10:22:48.932506Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480 2025-03-28T10:22:48.962782Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480 2025-03-28T10:22:49.000084Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803890420359009:2455], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:49.000152Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:49.000430Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7486803894715326310:2458], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:49.005151Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710668:3, at schemeshard: 72057594046644480 2025-03-28T10:22:49.017553Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7486803894715326312:2459], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710668 completed, doublechecking } 2025-03-28T10:22:49.087965Z node 1 :TX_PROXY ERROR: Actor# [1:7486803894715326368:3447] txid# 281474976710669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 15], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2025-03-28T10:22:49.923374Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480 waiting... 2025-03-28T10:22:51.001552Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803881830422127:2060];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:51.001665Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2025-03-28T10:23:01.276289Z node 1 :FLAT_TX_SCHEMESHARD WARN: Cannot get console configs 2025-03-28T10:23:01.276319Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:23:09.938406Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157389927, txId: 281474976710672] shutting down 2025-03-28T10:23:09.993115Z node 1 :RPC_REQUEST WARN: Client lost 2025-03-28T10:23:11.189028Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157391182, txId: 281474976710674] shutting down 2025-03-28T10:23:12.319522Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157392313, txId: 281474976710676] shutting down 2025-03-28T10:23:13.436013Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157393431, txId: 281474976710678] shutting down 2025-03-28T10:23:14.556090Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157394543, txId: 281474976710680] shutting down 2025-03-28T10:23:15.678315Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157395673, txId: 281474976710682] shutting down 2025-03-28T10:23:16.810588Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157396805, txId: 281474976710684] shutting down 2025-03-28T10:23:17.933823Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157397928, txId: 281474976710686] shutting down 2025-03-28T10:23:19.061032Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157399055, txId: 281474976710688] shutting down 2025-03-28T10:23:20.214737Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157400209, txId: 281474976710690] shutting down 2025-03-28T10:23:21.425239Z node 1 :KQP_RESOURCE_MANAGER WARN: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1743157401419, txId: 281474976710692] shutting down assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1949767B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1995C4FF 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1903CBE8 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1904FBF7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1904FBF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1904FBF7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19993525 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19993525 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19993525 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19963078 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1904ED7B 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19964945 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1998DA9C 15. ??:0: ?? @ 0x7F40663DBD8F 16. ??:0: ?? @ 0x7F40663DBE3F 17. ??:0: ?? @ 0x1640D028 |98.5%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbLogWorkload::test[column] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[21] [GOOD] >> test_tpch.py::TestTpchS1::test_tpch[22] >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/log/tests/py3test >> test_workload.py::TestYdbLogWorkload::test[column] [FAIL] |98.7%| [TM] {RESULT} ydb/tests/stress/log/tests/py3test >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [FAIL] Test command err: 2025-03-28T10:22:53.253025Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:324:2366], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T10:22:53.253449Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T10:22:53.253522Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/00021f/r3tmp/tmpmUdEmd/pdisk_1.dat 2025-03-28T10:22:53.570194Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:596:2520], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T10:22:53.570271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T10:22:53.570305Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T10:22:53.570412Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271122432, Sender [1:593:2518], Recipient [1:409:2404]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2025-03-28T10:22:53.570440Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2025-03-28T10:22:53.663218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2025-03-28T10:22:53.663433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:22:53.663642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 0 2025-03-28T10:22:53.663850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2025-03-28T10:22:53.663912Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:22:53.664008Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T10:22:53.664734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2025-03-28T10:22:53.664867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2025-03-28T10:22:53.664912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:22:53.664940Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T10:22:53.665104Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T10:22:53.665142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T10:22:53.665244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:22:53.665305Z node 1 :FLAT_TX_SCHEMESHARD INFO: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2025-03-28T10:22:53.665335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2025-03-28T10:22:53.665366Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 2 -> 3 2025-03-28T10:22:53.665454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T10:22:53.665855Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:22:53.665898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T10:22:53.666014Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T10:22:53.666054Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T10:22:53.666114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:22:53.666154Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2025-03-28T10:22:53.666182Z node 1 :FLAT_TX_SCHEMESHARD INFO: Change state for txid 1:0 3 -> 128 2025-03-28T10:22:53.666260Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T10:22:53.666636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:22:53.666664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Activate send for 1:0 2025-03-28T10:22:53.666764Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435072, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2025-03-28T10:22:53.666792Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvProgressOperation 2025-03-28T10:22:53.666826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:22:53.666858Z node 1 :FLAT_TX_SCHEMESHARD INFO: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:22:53.666925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2025-03-28T10:22:53.666957Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2025-03-28T10:22:53.667000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2025-03-28T10:22:53.670357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2025-03-28T10:22:53.670793Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:22:53.670842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2025-03-28T10:22:53.671012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2025-03-28T10:22:53.672154Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877760, Sender [1:601:2525], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046316545 Status: OK ServerId: [1:603:2526] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2025-03-28T10:22:53.672206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvClientConnected 2025-03-28T10:22:53.672259Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Handle TEvClientConnected, tabletId: 72057594046316545, status: OK, at schemeshard: 72057594046644480 2025-03-28T10:22:53.672394Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 500 TxId: 1 2025-03-28T10:22:53.672813Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:605:2528], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T10:22:53.672861Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T10:22:53.672891Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T10:22:53.672998Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124996, Sender [1:593:2518], Recipient [1:409:2404]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2025-03-28T10:22:53.673027Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2025-03-28T10:22:53.673085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2025-03-28T10:22:53.673116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2025-03-28T10:22:53.673152Z node 1 :FLAT_TX_SCHEMESHARD INFO: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2025-03-28T10:22:53.716093Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 273285138, Sender [1:43:2090], Recipient [1:409:2404]: NKikimr::NConsole::TEvConsole::TEvConfigNotificationRequest { Config { QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } } ItemKinds: 26 ItemKinds: 34 ItemKinds: 52 ItemKinds: 54 ItemKinds: 73 Local: true } 2025-03-28T10:22:53.716214Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got new config: QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2025-03-28T10:22:53.716257Z node 1 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:22:53.716663Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [RootDataErasureManager] Stop 2025-03-28T10:22:53.716737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } 2025-03-28T10:22:53.755921Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:53.756037Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:53.767461Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:22:53.841818Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269091328, Sender [1:405:2400], Recipient [1:409:2404]: NKikimrTx.TEvProposeTransactionStatus Status: 17 StepId: 500 TxId: 1 2025-03-28T10:22:53.842500Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269877761, Sender [1:636:2544], Recipient [1:409:2404]: NKikimr::TEvTabletPipe::TEvServerConnected 2025-03-28T10:22:53.842544Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTabletPipe::TEvServerConnected 2025-03-28T10:22:53.842590Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Pipe server connected, at tablet: 72057594046644480 2025-03-28T10:22:53.842758Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269287424, Sender [1:569:2496], Recipient [1:409:2404]: {TEvPlanStep step# 500 MediatorId# 72057594046382081 TabletID 72057594046644480} 2025-03-28T10:22:53.842790Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvTxProcessing::TEvPlanStep 2025-03-28T10:22:53.842889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: TTxOperationPlanStep Execute, stepId: 500, transactions count in step: 1, at schemeshard: 72057594046644480 2025-03-28T10:22:53.843026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemes ... pient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T10:25:00.750468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T10:25:00.750491Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T10:25:00.750552Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T10:25:00.750582Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-28T10:25:00.750647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 22 shard idx 72057594046644480:7 data size 0 row count 0 2025-03-28T10:25:00.750719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037894 maps to shardIdx: 72057594046644480:7 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 22], pathId map=TableA, is column=0, is olap=0 2025-03-28T10:25:00.750750Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037894 followerId=0, pathId 22: RowCount 0, DataSize 0 2025-03-28T10:25:00.750774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037894, followerId 0 2025-03-28T10:25:00.750826Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:7 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-28T10:25:00.750910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:25:00.761236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T10:25:00.761288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T10:25:00.761312Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T10:25:00.833862Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-28T10:25:00.834314Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 269553162, Sender [1:1558:3199], Recipient [1:409:2404]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186224037895 TableLocalId: 24 Generation: 1 Round: 58 TableStats { DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 18 Memory: 119576 Storage: 142 } ShardState: 2 UserTablePartOwners: 72075186224037895 NodeId: 1 StartTime: 4950 TableOwnerId: 72057594046644480 FollowerId: 0 2025-03-28T10:25:00.834364Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2025-03-28T10:25:00.834405Z node 1 :FLAT_TX_SCHEMESHARD INFO: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] state 'Ready' dataSize 54 rowCount 2 cpuUsage 0.0018 2025-03-28T10:25:00.834496Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037895 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 24] raw table stats: DataSize: 54 RowCount: 2 IndexSize: 0 InMemSize: 0 LastAccessTime: 5450 LastUpdateTime: 5450 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 2 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 54 IndexSize: 0 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2025-03-28T10:25:00.834543Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2025-03-28T10:25:00.887612Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T10:25:00.887678Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T10:25:00.887704Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2025-03-28T10:25:00.887762Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will execute TTxStoreStats, queue# 1 2025-03-28T10:25:00.887789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2025-03-28T10:25:00.887856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: PersistSingleStats for pathId 24 shard idx 72057594046644480:8 data size 54 row count 2 2025-03-28T10:25:00.887909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037895 maps to shardIdx: 72057594046644480:8 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 24], pathId map=TableA, is column=0, is olap=0 2025-03-28T10:25:00.887943Z node 1 :FLAT_TX_SCHEMESHARD INFO: Add stats from shard with datashardId(TabletID)=72075186224037895 followerId=0, pathId 24: RowCount 2, DataSize 54 2025-03-28T10:25:00.887966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: BuildStatsForCollector: datashardId 72075186224037895, followerId 0 2025-03-28T10:25:00.888022Z node 1 :FLAT_TX_SCHEMESHARD TRACE: [BackgroundCompaction] [Update] Skipped shard# 72057594046644480:8 with partCount# 1, rowCount# 2, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046644480 2025-03-28T10:25:00.888112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2025-03-28T10:25:00.898408Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 2146435091, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2025-03-28T10:25:00.898464Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvPrivate::TEvPersistTableStats 2025-03-28T10:25:00.898486Z node 1 :FLAT_TX_SCHEMESHARD INFO: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2025-03-28T10:25:00.960192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:25:00.960264Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:25:00.960332Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:25:00.960353Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:25:00.981455Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T10:25:01.064351Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 1 2025-03-28T10:25:01.137179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:25:01.137250Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:25:01.137312Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:25:01.137334Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:25:01.158297Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037898 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T10:25:01.241504Z node 1 :TX_DATASHARD DEBUG: GetNextActiveOp at 72075186224037899 (dry run) active 0 active planned 0 immediate 0 planned 0 2025-03-28T10:25:01.324775Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:25:01.324848Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2025-03-28T10:25:01.325103Z node 1 :KQP_SESSION WARN: SessionId: ydb://session/3?node_id=1&id=OGM0NjZhZWUtNGU5YWFiZTYtMzg0NjQ2MGItZmE5Zjg4Nzc=, ActorId: [1:2001:3528], ActorState: ExecuteState, TraceId: 01jqe4df1f78d6xg8mj5d260nb, Create QueryResponse for error on request, msg: 2025-03-28T10:25:01.325256Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, received event# 271124999, Sender [1:409:2404], Recipient [1:409:2404]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:25:01.325286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2025-03-28T10:25:01.325389Z node 1 :KQP_SLOW_LOG WARN: SessionId: ydb://session/3?node_id=1&id=OGM0NjZhZWUtNGU5YWFiZTYtMzg0NjQ2MGItZmE5Zjg4Nzc=, Slow query, duration: 600.000000s, status: GENERIC_ERROR, user: UNAUTHENTICATED, results: 0b, text: "RESTORE `MyCollection`;", parameters: 0b assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture()+28 (0x18F7423C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+592 (0x19431380) NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode)+3639 (0x48B40E97) NKikimr::NTestSuiteIncrementalBackup::TTestCaseComplexRestoreBackupCollection::Execute_(NUnitTest::TTestContext&)+26163 (0x18BC4943) std::__y1::__function::__func, void ()>::operator()()+280 (0x18B80448) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x194683A6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x19437EF9) NKikimr::NTestSuiteIncrementalBackup::TCurrentTest::Execute()+1204 (0x18B7F2F4) NUnitTest::TTestFactory::Execute()+2438 (0x194397C6) NUnitTest::RunMain(int, char**)+5213 (0x1946291D) ??+0 (0x7F3643C29D90) __libc_start_main+128 (0x7F3643C29E40) _start+41 (0x1630C029) |98.8%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |98.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/tpc/medium/py3test >> test_tpch.py::TestTpchS1::test_tpch[22] [GOOD] |99.0%| [TA] $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/tests/functional/tpc/medium/test-results/py3test/{meta.json ... results_accumulator.log} >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [FAIL] >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [FAIL] Test command err: 2025-03-28T10:22:51.718167Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TCleanupTablesActor] ActorId: [1:451:2413], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2025-03-28T10:22:51.718437Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2025-03-28T10:22:51.718489Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; test_client.cpp: SetPath # /home/runner/.ya/build/build_root/all7/0001b4/r3tmp/tmpSacob9/pdisk_1.dat 2025-03-28T10:22:52.114913Z node 1 :IMPORT WARN: Table profiles were not loaded TServer::EnableGrpc on GrpcPort 2727, node 1 2025-03-28T10:22:52.345433Z node 1 :NET_CLASSIFIER WARN: distributable config is empty, broken or outdated, will use file: (empty maybe) 2025-03-28T10:22:52.345482Z node 1 :NET_CLASSIFIER WARN: will try to initialize from file: (empty maybe) 2025-03-28T10:22:52.345504Z node 1 :NET_CLASSIFIER WARN: failed to initialize from file: (empty maybe) 2025-03-28T10:22:52.345811Z node 1 :NET_CLASSIFIER ERROR: got bad distributable configuration 2025-03-28T10:22:52.347520Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480 2025-03-28T10:22:52.437953Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:52.438111Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:52.452871Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected TClient is connected to server localhost:30937 2025-03-28T10:22:52.968144Z node 1 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480 2025-03-28T10:22:55.643455Z node 2 :STATISTICS INFO: Subscribed for config changes on node 2 2025-03-28T10:22:55.668991Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:55.669101Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:55.716336Z node 1 :HIVE WARN: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2025-03-28T10:22:55.717928Z node 1 :HIVE WARN: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:22:55.925314Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:22:55.925744Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:22:55.926121Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:22:55.926235Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:22:55.926385Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:22:55.926457Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:22:55.926520Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:22:55.926584Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:22:55.926651Z node 2 :HIVE WARN: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2025-03-28T10:22:56.073072Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2025-03-28T10:22:56.073171Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2025-03-28T10:22:56.085887Z node 2 :HIVE WARN: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2025-03-28T10:22:56.200441Z node 2 :IMPORT WARN: Table profiles were not loaded 2025-03-28T10:22:56.244016Z node 2 :STATISTICS INFO: [72075186224037894] OnActivateExecutor 2025-03-28T10:22:56.244124Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Execute 2025-03-28T10:22:56.276535Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInitSchema::Complete 2025-03-28T10:22:56.278147Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Execute 2025-03-28T10:22:56.278371Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2025-03-28T10:22:56.278441Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ColumnStatistics: column count# 0 2025-03-28T10:22:56.278497Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2025-03-28T10:22:56.278547Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2025-03-28T10:22:56.278599Z node 2 :STATISTICS DEBUG: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2025-03-28T10:22:56.278647Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxInit::Complete 2025-03-28T10:22:56.279768Z node 2 :STATISTICS INFO: [72075186224037894] Subscribed for config changes 2025-03-28T10:22:56.312634Z node 2 :STATISTICS DEBUG: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2025-03-28T10:22:56.312728Z node 2 :STATISTICS DEBUG: ConnectToSA(), pipe client id: [2:1870:2597], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2025-03-28T10:22:56.318082Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1883:2606] 2025-03-28T10:22:56.324697Z node 2 :STATISTICS DEBUG: [72075186224037894] EvServerConnected, pipe server id = [2:1924:2623] 2025-03-28T10:22:56.324976Z node 2 :STATISTICS DEBUG: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:1924:2623], schemeshard id = 72075186224037897 2025-03-28T10:22:56.328223Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2025-03-28T10:22:56.344173Z node 2 :STATISTICS DEBUG: Table _statistics updater. Describe result: PathErrorUnknown 2025-03-28T10:22:56.344232Z node 2 :STATISTICS NOTICE: Table _statistics updater. Creating table 2025-03-28T10:22:56.344323Z node 2 :STATISTICS DEBUG: Table _statistics updater. Full table path:/Root/Database/.metadata/_statistics 2025-03-28T10:22:56.355654Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897 2025-03-28T10:22:56.360688Z node 2 :STATISTICS DEBUG: Table _statistics updater. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 3 } 2025-03-28T10:22:56.360790Z node 2 :STATISTICS DEBUG: Table _statistics updater. Subscribe on create table tx: 281474976720657 2025-03-28T10:22:56.508069Z node 2 :STATISTICS DEBUG: [72075186224037894] TTxConfigure::Complete 2025-03-28T10:22:56.704365Z node 2 :STATISTICS DEBUG: Table _statistics updater. Request: create. Transaction completed: 281474976720657. Doublechecking... 2025-03-28T10:22:56.760256Z node 2 :STATISTICS DEBUG: Table _statistics updater. Column diff is empty, finishing 2025-03-28T10:22:57.594650Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2238:3073], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:57.594816Z node 1 :KQP_WORKLOAD_SERVICE WARN: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2025-03-28T10:22:57.612649Z node 2 :FLAT_TX_SCHEMESHARD WARN: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897 2025-03-28T10:22:57.730022Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2025-03-28T10:22:57.730275Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2025-03-28T10:22:57.730549Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2025-03-28T10:22:57.730703Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2025-03-28T10:22:57.730824Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2025-03-28T10:22:57.731001Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertionDedup; 2025-03-28T10:22:57.731164Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2025-03-28T10:22:57.731292Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestorePortionFromChunks; 2025-03-28T10:22:57.731417Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2025-03-28T10:22:57.731555Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncMinSnapshotFromChunks; 2025-03-28T10:22:57.731687Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2025-03-28T10:22:57.731823Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;self_id=[2:2325:2848];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2025-03-28T10:22:57.762548Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2025-03-28T10:22:57.762668Z node 2 :TX_COLUMNSHARD WARN: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;descr ... 28T10:29:39.484676Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:39.484719Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:39.484743Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:40.333938Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:40.333999Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:40.334024Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:29:41.091316Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:41.091393Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:41.091425Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:41.921133Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T10:29:41.931534Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:41.931583Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:41.931609Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:29:42.736900Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:42.736954Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:42.736975Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:43.496840Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T10:29:43.496973Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T10:29:43.507454Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:43.507499Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:43.507522Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:29:44.313422Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:44.313481Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:44.313504Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:45.107751Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:45.107816Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:45.107843Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:29:45.874775Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T10:29:45.885159Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:45.885200Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:45.885224Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:46.797828Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:46.797891Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:46.797916Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:29:47.635317Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T10:29:47.635443Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T10:29:47.645913Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:47.645957Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:47.645979Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:48.439824Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:48.439885Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:48.439909Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:29:49.215421Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:49.215480Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:49.215502Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:50.031173Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T10:29:50.041487Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:50.041523Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:50.041547Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:29:50.986567Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:50.986635Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:50.986666Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:51.845542Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T10:29:51.845673Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T10:29:51.856228Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:51.856271Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:51.856294Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:29:52.652404Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:52.652456Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:52.652478Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:53.432653Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:53.432711Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:53.432734Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:29:54.208225Z node 2 :STATISTICS DEBUG: [72075186224037894] EvPropagateTimeout 2025-03-28T10:29:54.218608Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:54.218647Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:54.218670Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:55.131599Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:55.131654Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:55.131678Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. 2025-03-28T10:29:55.937204Z node 2 :STATISTICS DEBUG: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2025-03-28T10:29:55.937329Z node 2 :STATISTICS DEBUG: EvPropagateStatistics, node id = 2 2025-03-28T10:29:55.947851Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze 2025-03-28T10:29:55.947891Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:55.947913Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2025-03-28T10:29:56.763280Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal 2025-03-28T10:29:56.763335Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal tables sent the requests. OperationId=operationId 2025-03-28T10:29:56.763359Z node 2 :STATISTICS DEBUG: [72075186224037894] ScheduleNextTraversal. All the force traversal operations sent the requests. (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture()+28 (0x18BB873C) TWithBackTrace::TWithBackTrace<>()+80 (0x187E0630) NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration)+485 (0x187B4CB5) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TTestCaseAnalyzeRebootColumnShard::Execute_(NUnitTest::TTestContext&)+4263 (0x187D1857) std::__y1::__function::__func, void ()>::operator()()+280 (0x187DC6E8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+534 (0x190A3FA6) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+505 (0x1907CBB9) NKikimr::NStat::NTestSuiteAnalyzeColumnshard::TCurrentTest::Execute()+1204 (0x187DB8B4) NUnitTest::TTestFactory::Execute()+2438 (0x1907E486) NUnitTest::RunMain(int, char**)+5213 (0x1909E51D) ??+0 (0x7F14CBEF9D90) __libc_start_main+128 (0x7F14CBEF9E40) _start+41 (0x16144029) |99.3%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.3%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [GOOD] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [FAIL] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/s3_import/py3test >> test_tpch_import.py::TestS3TpchImport::test_import_and_export [FAIL] |99.5%| [TM] {RESULT} ydb/tests/olap/s3_import/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] >> alter_compression.py::TestAlterCompression::test_all_supported_compression [GOOD] >> alter_compression.py::TestAlterCompression::test_availability_data ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete 2025-03-28 10:32:52,000 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 10:32:52,428 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 996987 691M 695M 609M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/all7/000208/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 998299 7.2G 7.3G 6.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/all7/000208/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_de 999614 393M 393M 359M └─ moto_server s3 --port 10483 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/ttl_delete_s3.py", line 269, in test_ttl_delete self.ydb_client.query(""" File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '/home/runner/.ya/build/build_root/all7/000208', '--source-root', '/home/runner/.ya/build/build_root/all7/000208/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/all7/000208/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '/home/runner/.ya/build/build_root/all7/000208', '--source-root', '/home/runner/.ya/build/build_root/all7/000208/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/all7/000208/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'ttl_delete_s3.py']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/ttl_tiering/py3test >> data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test 2025-03-28 10:32:52,077 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 10:32:52,289 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 997002 730M 735M 649M ydb-tests-olap-ttl_tiering --basetemp /home/runner/.ya/build/build_root/all7/0001fa/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-modu 998300 2.7G 2.7G 2.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/all7/0001fa/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_m 999603 485M 486M 451M └─ moto_server s3 --port 65376 Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 171, in test if not self.wait_for( File "ydb/tests/olap/ttl_tiering/base.py", line 68, in wait_for if condition_func(): File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 172, in lambda: get_rows_in_portion(bucket1_path) == self.row_count File "ydb/tests/olap/ttl_tiering/data_migration_when_alter_ttl.py", line 159, in get_rows_in_portion portions_stat = table.get_portion_stat_by_tier() File "ydb/tests/olap/common/column_table_helper.py", line 26, in get_portion_stat_by_tier results = self.ydb_client.query( File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ..._root/all7/0001fa', '--source-root', '/home/runner/.ya/build/build_root/all7/0001fa/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/all7/0001fa/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("..._root/all7/0001fa', '--source-root', '/home/runner/.ya/build/build_root/all7/0001fa/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/all7/0001fa/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/ttl_tiering', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete', '--test-filter', 'ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering', '--tb', 'short', '--dep-root', 'ydb/tests/olap/ttl_tiering', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'data_migration_when_alter_ttl.py']' stopped by 600 seconds timeout",), {}) |99.7%| [TA] $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/olap/ttl_tiering/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] 2025-03-28 10:32:54,105 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 10:32:54,284 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 997682 1.7G 1.7G 1.6G ydb-tests-olap-scenario --basetemp /home/runner/.ya/build/build_root/all7/000163/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctest-module 1010552 1.9G 1.9G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/all7/000163/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_ins Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "/home/runner/.ya/build/build_root/all7/000163/environment/arcadia/ydb/tests/olap/scenario/conftest.py", line 88, in test ctx.executable(self, ctx) File "ydb/tests/olap/scenario/test_insert.py", line 86, in scenario_read_data_during_bulk_upsert thread2.join_all() File "ydb/tests/olap/common/thread_helper.py", line 45, in join_all thread.join(timeout=timeout) File "ydb/tests/olap/common/thread_helper.py", line 16, in join super().join(timeout) File "contrib/tools/python3/Lib/threading.py", line 1149, in join self._wait_for_tstate_lock() File "contrib/tools/python3/Lib/threading.py", line 1169, in _wait_for_tstate_lock if lock.acquire(block, timeout): File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...-doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/all7/000163/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/all7/000163', '--source-root', '/home/runner/.ya/build/build_root/all7/000163/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/all7/000163/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_alter_compression.py::TestAlterCompression::test[alter_compression]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...-doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/all7/000163/ydb/tests/olap/scenario/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/all7/000163', '--source-root', '/home/runner/.ya/build/build_root/all7/000163/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/all7/000163/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/scenario', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'test_alter_tiering.py::TestAlterTiering::test[many_tables]', '--test-filter', 'test_insert.py::TestInsert::test[read_data_during_bulk_upsert]', '--test-filter', 'test_alter_compression.py::TestAlterCompression::test[alter_compression]', '--tb', 'short', '--dep-root', 'ydb/tests/olap/scenario', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data 2025-03-28 10:32:54,085 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2025-03-28 10:32:54,357 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 997677 771M 778M 693M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/all7/000164/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor 998856 6.0G 6.0G 5.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/all7/000164/ydb/tests/olap/column_family/compression/test-results/py3test/testing_ou Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 169, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 318, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 271, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 325, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 350, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 114, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 133, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 222, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 261, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 341, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 169, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1805, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 513, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 103, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 563, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 159, in test_availability_data self.upsert_and_wait_portions(test_table, single_upsert_rows_count, upsert_rows_count) File "ydb/tests/olap/column_family/compression/alter_compression.py", line 47, in upsert_and_wait_portions if not self.wait_for( File "ydb/tests/olap/column_family/compression/base.py", line 48, in wait_for if condition_func(): File "ydb/tests/olap/column_family/compression/alter_compression.py", line 48, in lambda: table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == number_rows_for_insert * count_upsert, plain_or_under_sanitizer(70, 140) File "ydb/tests/olap/common/column_table_helper.py", line 26, in get_portion_stat_by_tier results = self.ydb_client.query( File "ydb/tests/olap/common/ydb_client.py", line 24, in query return self.session_pool.execute_with_retries(statement) File "contrib/python/ydb/py3/ydb/query/pool.py", line 202, in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) File "contrib/python/ydb/py3/ydb/retries.py", line 133, in retry_operation_sync for next_opt in opt_generator: File "contrib/python/ydb/py3/ydb/retries.py", line 94, in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) File "contrib/python/ydb/py3/ydb/query/pool.py", line 200, in wrapped_callee return [result_set for result_set in it] File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ return self._next() File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next res = self.wrapper(next(self.it)) File "contrib/python/grpcio/py3/grpc/_channel.py", line 475, in __next__ return self._next() File "contrib/python/grpcio/py3/grpc/_channel.py", line 872, in _next _common.wait(self._state.condition.wait, _response_ready) File "contrib/python/grpcio/py3/grpc/_common.py", line 150, in wait _wait_once(wait_fn, MAXIMUM_WAIT_TIMEOUT, spin_cb) File "contrib/python/grpcio/py3/grpc/_common.py", line 112, in _wait_once wait_fn(timeout=timeout) File "contrib/tools/python3/Lib/threading.py", line 359, in wait gotit = waiter.acquire(True, timeout) File "library/python/pytest/plugins/ya.py", line 344, in _graceful_shutdown traceback.print_stack(file=sys.stderr) Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 764, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/all7/000164/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/all7/000164', '--source-root', '/home/runner/.ya/build/build_root/all7/000164/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/all7/000164/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1749, in main res.wait(check_exit_code=False, timeout=run_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...', '--doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/all7/000164/ydb/tests/olap/column_family/compression/test-results/py3test/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/all7/000164', '--source-root', '/home/runner/.ya/build/build_root/all7/000164/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/all7/000164/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/8330113388/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--test-filter', 'alter_compression.py::TestAlterCompression::test_all_supported_compression', '--test-filter', 'alter_compression.py::TestAlterCompression::test_availability_data', '--tb', 'short', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) |99.9%| [TM] {RESULT} ydb/tests/olap/column_family/compression/py3test |99.9%| CLEANING BUILD ROOT ydb/tests/olap/column_family/compression [size:medium] ------ sole chunk ran 2 tests (total:624.63s - test:600.07s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: alter_compression.py::TestAlterCompression::test_all_supported_compression (good) duration: 568.13s alter_compression.py::TestAlterCompression::test_availability_data (timeout) duration: 52.90s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr [timeout] alter_compression.py::TestAlterCompression::test_availability_data [default-linux-x86_64-release-asan] (52.90s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAlterCompression.test_availability_data.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - GOOD, 1 - TIMEOUT ydb/tests/olap/column_family/compression ydb/tests/olap/s3_import [size:medium] ------ sole chunk ran 1 test (total:526.70s - test:526.64s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 11.2G (11706064K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 997448 44.8M 44.8M 6.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 997650 36.8M 25.2M 12.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 997674 598M 601M 516M └─ ydb-tests-olap-s3_import --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 998775 10.0G 10.0G 9.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/t 1000171 537M 537M 503M └─ moto_server s3 --port 11346 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/stderr [fail] test_tpch_import.py::TestS3TpchImport::test_import_and_export [default-linux-x86_64-release-asan] (522.16s) teardown failed: ydb/tests/olap/s3_import/base.py:24: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/s3_import/test-results/py3test/testing_out_stuff/test_tpch_import.py.TestS3TpchImport.test_import_and_export/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==998775==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 3792 byte(s) in 1 object(s) allocated from: E #0 0x1d6662cd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x468183df in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x468183df in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/yt/yql/provider ..[snippet truncated].. __libcpp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 E #3 0x465fb076 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 E #4 0x465fb076 in initialize_dynamic /-S/util/generic/hash_table.h:239:35 E #5 0x465fb076 in initialize_buckets_dynamic /-S/util/generic/hash_table.h:912:17 E #6 0x465fb076 in THashTable> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x465e3f6b in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x465e3f6b in insert /-S/util/generic/hash.h:153:20 E #9 0x465e3f6b in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:344:23 E #10 0x465c7619 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:466:5 E #11 0x468183f0 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:372:11 E #12 0x468183f0 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x468183f0 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString [size:medium] ------ sole chunk ran 3 tests (total:622.69s - test:600.08s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: test_alter_compression.py::TestAlterCompression::test[alter_compression] (good) duration: 354.90s test_alter_tiering.py::TestAlterTiering::test[many_tables] (good) duration: 168.47s test_insert.py::TestInsert::test[read_data_during_bulk_upsert] (timeout) duration: 94.36s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [timeout] test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (94.36s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ TIMEOUT: 2 - GOOD, 1 - TIMEOUT ydb/tests/olap/scenario ydb/tests/olap/ttl_tiering [size:medium] nchunks:2 ------ [data_migration_when_alter_ttl.py] chunk ran 1 test (total:615.84s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test (timeout) duration: 612.14s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [timeout] data_migration_when_alter_ttl.py::TestDataMigrationWhenAlterTtl::test [default-linux-x86_64-release-asan] (612.14s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/data_migration_when_alter_ttl.py.TestDataMigrationWhenAlterTtl.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ [ttl_delete_s3.py] chunk ran 3 tests (total:615.52s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change (fail) duration: 354.62s ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete (timeout) duration: 256.73s ttl_delete_s3.py::TestDeleteS3Ttl::test_delete_s3_tiering test was not launched inside chunk. Info: Test run has exceeded 8.0G (8388608K) memory limit with 8.5G (8937744K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 996905 44.8M 44.8M 6.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 996983 35.0M 23.2M 10.7M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 996987 690M 693M 608M └─ ydb-tests-olap-ttl_tiering --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 998299 7.4G 7.4G 6.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ 999614 393M 393M 359M └─ moto_server s3 --port 10483 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/stderr [fail] ttl_delete_s3.py::TestDeleteS3Ttl::test_data_unchanged_after_ttl_change [default-linux-x86_64-release-asan] (354.62s) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:141: in test_data_unchanged_after_ttl_change data = self.get_aggregated(table_path) ydb/tests/olap/ttl_tiering/ttl_delete_s3.py:27: in get_aggregated answer = self.ydb_client.query(f"SELECT count(*), sum(val), sum(Digest::Fnv32(s)) from `{table_path}`") ydb/tests/olap/common/ydb_client.py:24: in query return self.session_pool.execute_with_retries(statement) contrib/python/ydb/py3/ydb/query/pool.py:202: in execute_with_retries return retry_operation_sync(wrapped_callee, retry_settings) contrib/python/ydb/py3/ydb/retries.py:133: in retry_operation_sync for next_opt in opt_generator: contrib/python/ydb/py3/ydb/retries.py:94: in retry_operation_impl result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) contrib/python/ydb/py3/ydb/query/pool.py:200: in wrapped_callee return [result_set for result_set in it] contrib/python/ydb/py3/ydb/_utilities.py:173: in __next__ return self._next() contrib/python/ydb/py3/ydb/_utilities.py:164: in _next res = self.wrapper(next(self.it)) contrib/python/ydb/py3/ydb/query/session.py:350: in lambda resp: base.wrap_execute_query_response( contrib/python/ydb/py3/ydb/query/base.py:172: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/query/base.py:189: in wrap_execute_query_response issues._process_response(response_pb) contrib/python/ydb/py3/ydb/issues.py:225: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.BadRequest: message: "Table /Root/test_data_unchanged_after_ttl_change/table (shard 72075186224037941) scan failed, reason: cannot build metadata/Snapshot too old: {1743157724000:max}. CS min read snapshot: {1743157725000:max}. now: 2025-03-28T10:28:50.005072Z" issue_code: 2017 severity: 1 (server_code: 400010) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_data_unchanged_after_ttl_change.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff [timeout] ttl_delete_s3.py::TestDeleteS3Ttl::test_ttl_delete [default-linux-x86_64-release-asan] (256.73s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff/ttl_delete_s3.py.TestDeleteS3Ttl.test_ttl_delete.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/ttl_tiering/test-results/py3test/testing_out_stuff ------ TIMEOUT: 1 - FAIL, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/olap/ttl_tiering ydb/tests/stress/log/tests [size:medium] ------ sole chunk ran 1 test (total:127.03s - test:126.96s) [fail] test_workload.py::TestYdbLogWorkload::test[column] [default-linux-x86_64-release-asan] (123.44s) teardown failed: ydb/tests/stress/log/tests/test_workload.py:41: in teardown_class cls.cluster.stop() ydb/tests/library/harness/kikimr_runner.py:494: in stop raise daemon.SeveralDaemonErrors(saved_exceptions) E ydb.tests.library.harness.daemon.SeveralDaemonErrors: Daemon failed with message: Bad exit_code.. E Process exit_code = 100. E Stdout file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column/cluster/node_1/stdout E Stderr file name: E /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column/cluster/node_1/stderr E Stderr content: E E GRpc memory quota was set but disabled due to issues with grpc quoter, to enable it use EnableGRpcMemoryQuota option E Current KQP shutdown state: spent 0 seconds, not started yet E warning: address range table at offset 0x10c0 has a premature terminator entry at offset 0x10d0 E E ================================================================= E ==998476==ERROR: LeakSanitizer: detected memory leaks E E Indirect leak of 15168 byte(s) in 4 object(s) allocated from: E #0 0x1d6662cd in operator new(unsigned long) /-S/contrib/libs/clang18-rt/lib/asan/asan_new_delete.cpp:86:3 E #1 0x468183df in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:12 E #2 0x468183df in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #3 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #4 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> const&, TBasicString> const&, NYql::TYtGatewayConfig const*, TIntrusivePtr>, std::__y1::shared_ptr const&, std::__y1::shared_ptr const&) /-S/yt/yql/providers/yt/pro ..[snippet truncated].. pp_allocate /-S/contrib/libs/cxxsupp/libcxx/include/new:289:10 E #3 0x465fb076 in allocate /-S/contrib/libs/cxxsupp/libcxx/include/__memory/allocator.h:118:32 E #4 0x465fb076 in initialize_dynamic /-S/util/generic/hash_table.h:239:35 E #5 0x465fb076 in initialize_buckets_dynamic /-S/util/generic/hash_table.h:912:17 E #6 0x465fb076 in THashTable> const, TIntrusivePtr>>, TBasicString>, THash>>, TSelect1st, TEqualTo>>, std::__y1::allocator>>>::reserve(unsigned long) /-S/util/generic/hash_table.h:1330:13 E #7 0x465e3f6b in insert_unique >, TIntrusivePtr > > > /-S/util/generic/hash_table.h:679:9 E #8 0x465e3f6b in insert /-S/util/generic/hash.h:153:20 E #9 0x465e3f6b in NYql::NCommon::TSettingDispatcher::TSettingHandlerImpl& NYql::NCommon::TSettingDispatcher::AddSetting(TBasicString> const&, NYql::NCommon::TConfSetting&) /-S/yql/essentials/providers/common/config/yql_dispatch.h:344:23 E #10 0x465c7619 in NYql::TYtConfiguration::TYtConfiguration(NYql::TTypeAnnotationContext&) /-S/yt/yql/providers/yt/common/yql_yt_settings.cpp:466:5 E #11 0x468183f0 in TYtVersionedConfiguration /-S/yt/yql/providers/yt/common/yql_yt_settings.h:372:11 E #12 0x468183f0 in MakeIntrusive, NYql::TTypeAnnotationContext &> /-S/util/generic/ptr.h:818:16 E #13 0x468183f0 in NYql::TYtState::TYtState(NYql::TTypeAnnotationContext*) /-S/yt/yql/providers/yt/provider/yql_yt_provider.h:102:25 E #14 0x4680a8a9 in MakeIntrusive, NYql::TTypeAnnotationContext *> /-S/util/generic/ptr.h:818:16 E #15 0x4680a8a9 in NYql::CreateYtNativeState(TIntrusivePtr>, TBasicString> co... Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbLogWorkload.test.column.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/log/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/log/tests ydb/core/keyvalue/ut_trace [size:medium] nchunks:5 ------ [0/5] chunk ran 1 test (total:5.78s - test:5.75s) [fail] TKeyValueTracingTest::ReadHuge [default-linux-x86_64-release-asan] (2.05s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadHuge.out ------ [1/5] chunk ran 1 test (total:5.58s - test:5.54s) [fail] TKeyValueTracingTest::ReadSmall [default-linux-x86_64-release-asan] (2.10s) equal assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:124, void TestOneRead(TString, TString): env.WilsonUploader->Traces.size() == 1 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.ReadSmall.out ------ [2/5] chunk ran 1 test (total:5.69s - test:5.64s) [fail] TKeyValueTracingTest::WriteHuge [default-linux-x86_64-release-asan] (2.09s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteHuge.out ------ [3/5] chunk ran 1 test (total:5.98s - test:5.94s) [fail] TKeyValueTracingTest::WriteSmall [default-linux-x86_64-release-asan] (2.07s) assertion failed at ydb/core/keyvalue/keyvalue_ut_trace.cpp:103, void TestOneWrite(TString, TVector &&): (env.WilsonUploader->Traces.size() == 1) failed: (2 != 1) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 TestOneWrite(TBasicString>, TVector>, std::__y1::allocator>>>&&) at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:103:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/keyvalue/keyvalue_ut_trace.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/keyvalue/ut_trace/test-results/unittest/testing_out_stuff/TKeyValueTracingTest.WriteSmall.out ------ FAIL: 4 - FAIL ydb/core/keyvalue/ut_trace ydb/core/kqp/ut/cost [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:10.60s - test:10.56s) [crashed] KqpCost::OlapWriteRow [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: 100) ==996072==ERROR: AddressSanitizer: SEGV on unknown address 0x000000000008 (pc 0x000018b1ca4d bp 0x7ffdb5dc6b60 sp 0x7ffdb5dc69c0 T0) ==996072==The signal is caused by a READ memory access. ==996072==Hint: address points to the zero page. 2025-03-28T10:22:54.588629Z node 1 :METADATA_PROVIDER ERROR: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7486803892097699555:2065];send_to=[0:7307199536658146131:7762515]; 2025-03-28T10:22:54.588753Z node 1 :METADATA_PROVIDER ERROR: fline=accessor_snapshot_base.cpp:69;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; #0 0x18b1ca4d in Get::TypeHandler> /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 #1 0x18b1ca4d in Get /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:1348:32 #2 0x18b1ca4d in _internal_table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1762:31 #3 0x18b1ca4d in table_access /-B/ydb/public/api/protos/ydb_query_stats.pb.h:1766:10 #4 0x18b1ca4d in NKikimr::NKqp::NTestSuiteKqpCost::TTestCaseOlapWriteRow::Execute_(NUnitTest::TTestContext&) /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:636:13 #5 0x18b415d7 in operator() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #6 0x18b415d7 in __invoke<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150:25 #7 0x18b415d7 in __call<(lambda at /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1) &> /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225:5 #8 0x18b415d7 in o ..[snippet truncated].. 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430:12 #11 0x194936d5 in operator() /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989:10 #12 0x194936d5 in TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/utmain.cpp:525:20 #13 0x19463228 in NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool) /-S/library/cpp/testing/unittest/registar.cpp:374:18 #14 0x18b40483 in NKikimr::NKqp::NTestSuiteKqpCost::TCurrentTest::Execute() /-S/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp:93:1 #15 0x19464af5 in NUnitTest::TTestFactory::Execute() /-S/library/cpp/testing/unittest/registar.cpp:495:19 #16 0x1948dc4c in NUnitTest::RunMain(int, char**) /-S/library/cpp/testing/unittest/utmain.cpp:872:44 #17 0x7f2568a7cd8f (/lib/x86_64-linux-gnu/libc.so.6+0x29d8f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #18 0x7f2568a7ce3f in __libc_start_main (/lib/x86_64-linux-gnu/libc.so.6+0x29e3f) (BuildId: 490fef8403240c91833978d494d39e537409b92e) #19 0x162b8028 in _start (/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost+0x162b8028) (BuildId: 42cc14b09b8e1be640cc9d032c9c2bf606ecb0c4) SUMMARY: AddressSanitizer: SEGV /-S/contrib/libs/protobuf/src/google/protobuf/repeated_ptr_field.h:273:31 in Get::TypeHandler> ==996072==ABORTING Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/cost/test-results/unittest/testing_out_stuff/KqpCost.OlapWriteRow.out ------ FAIL: 1 - CRASHED ydb/core/kqp/ut/cost ydb/core/kqp/ut/query [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:43.28s - test:43.24s) [fail] KqpStats::SysViewClientLost [default-linux-x86_64-release-asan] (39.38s) assertion failed at ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591, virtual void NKikimr::NKqp::NTestSuiteKqpStats::TTestCaseSysViewClientLost::Execute_(NUnitTest::TTestContext &): (timeoutedCount == 1) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1949767B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1995C4FF 2. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:591: Execute_ @ 0x1903CBE8 3. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: operator() @ 0x1904FBF7 4. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 5. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18:1) &> @ 0x1904FBF7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x1904FBF7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x1904FBF7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x19993525 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x19993525 10. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x19993525 11. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19963078 12. /tmp//-S/ydb/core/kqp/ut/query/kqp_stats_ut.cpp:18: Execute @ 0x1904ED7B 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19964945 14. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x1998DA9C 15. ??:0: ?? @ 0x7F40663DBD8F 16. ??:0: ?? @ 0x7F40663DBE3F 17. ??:0: ?? @ 0x1640D028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/query/test-results/unittest/testing_out_stuff/KqpStats.SysViewClientLost.out ------ FAIL: 1 - FAIL ydb/core/kqp/ut/query ydb/core/kqp/ut/tx [size:medium] nchunks:50 ------ [0/50] chunk ran 1 test (total:15.71s - test:15.66s) [fail] KqpSinkTx::OlapInvalidateOnError [default-linux-x86_64-release-asan] (12.18s) assertion failed at ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182, virtual void NKikimr::NKqp::NTestSuiteKqpSinkTx::TInvalidateOnError::DoExecute(): (result.GetStatus() == EStatus::PRECONDITION_FAILED) failed: (BAD_REQUEST != PRECONDITION_FAILED)
: Error: Bad request. Table: `/Root/KV`., code: 2017
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Key","value":"1"}],"fields":["Key: uint32"]}, code: 2017 , with diff: (BAD_|P)RE(QUES|CONDI)T(|ION_FAILED) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:182: DoExecute @ 0x18D46A4E 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:201: Execute_ @ 0x18D2569A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: operator() @ 0x18D2CB17 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D2CB17 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14:1) &> @ 0x18D2CB17 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D2CB17 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D2CB17 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp:14: Execute @ 0x18D2BCE3 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F316B6C2D8F 18. ??:0: ?? @ 0x7F316B6C2E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSinkTx.OlapInvalidateOnError.out ------ [1/50] chunk ran 1 test (total:17.93s - test:17.89s) [fail] KqpSnapshotIsolation::TConflictReadWriteOlap [default-linux-x86_64-release-asan] (14.19s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (ABORTED != SUCCESS)
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 , with diff: (ABORT|SUCC)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:146: DoExecute @ 0x18D7C798 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:172: Execute_ @ 0x18D63E6A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F593DD1CD8F 18. ??:0: ?? @ 0x7F593DD1CE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOlap.out ------ [10/50] chunk ran 1 test (total:11.37s - test:11.34s) [fail] KqpSnapshotIsolation::TSimpleOltpNoSink [default-linux-x86_64-release-asan] (8.02s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:57: Execute_ @ 0x18D62F3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC6D313FD8F 18. ??:0: ?? @ 0x7FC6D313FE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltpNoSink.out ------ [2/50] chunk ran 1 test (total:11.56s - test:11.54s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltp [default-linux-x86_64-release-asan] (8.04s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:159: Execute_ @ 0x18D63A12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F3B21795D8F 18. ??:0: ?? @ 0x7F3B21795E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltp.out ------ [3/50] chunk ran 1 test (total:11.79s - test:11.75s) [fail] KqpSnapshotIsolation::TConflictReadWriteOltpNoSink [default-linux-x86_64-release-asan] (8.29s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictReadWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:131: DoExecute @ 0x18D79E87 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:166: Execute_ @ 0x18D63C3A 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC685AC0D8F 18. ??:0: ?? @ 0x7FC685AC0E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictReadWriteOltpNoSink.out ------ [4/50] chunk ran 1 test (total:18.51s - test:18.45s) [fail] KqpSnapshotIsolation::TConflictWriteOlap [default-linux-x86_64-release-asan] (14.69s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::ABORTED) failed: (SUCCESS != ABORTED) , with diff: (SUCC|ABORT)E(SS|D) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:92: DoExecute @ 0x18D74DF8 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:118: Execute_ @ 0x18D637EA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FB3786AAD8F 18. ??:0: ?? @ 0x7FB3786AAE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOlap.out ------ [5/50] chunk ran 1 test (total:13.30s - test:13.27s) [fail] KqpSnapshotIsolation::TConflictWriteOltp [default-linux-x86_64-release-asan] (9.58s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:105: Execute_ @ 0x18D63392 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F96B9494D8F 18. ??:0: ?? @ 0x7F96B9494E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltp.out ------ [6/50] chunk ran 1 test (total:11.17s - test:11.14s) [fail] KqpSnapshotIsolation::TConflictWriteOltpNoSink [default-linux-x86_64-release-asan] (7.61s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TConflictWrite::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:76: DoExecute @ 0x18D724E7 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:112: Execute_ @ 0x18D635BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7F030A759D8F 18. ??:0: ?? @ 0x7F030A759E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TConflictWriteOltpNoSink.out ------ [7/50] chunk ran 1 test (total:11.18s - test:11.15s) [fail] KqpSnapshotIsolation::TReadOnlyOltp [default-linux-x86_64-release-asan] (7.71s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:214: Execute_ @ 0x18D64092 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC0E23A4D8F 18. ??:0: ?? @ 0x7FC0E23A4E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltp.out ------ [8/50] chunk ran 1 test (total:10.76s - test:10.73s) [fail] KqpSnapshotIsolation::TReadOnlyOltpNoSink [default-linux-x86_64-release-asan] (7.44s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TReadOnly::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:185: DoExecute @ 0x18D81843 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:221: Execute_ @ 0x18D642BA 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FF4E0443D8F 18. ??:0: ?? @ 0x7FF4E0443E3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TReadOnlyOltpNoSink.out ------ [9/50] chunk ran 1 test (total:11.37s - test:11.34s) [fail] KqpSnapshotIsolation::TSimpleOltp [default-linux-x86_64-release-asan] (7.80s) assertion failed at ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25, virtual void NKikimr::NKqp::NTestSuiteKqpSnapshotIsolation::TSimple::DoExecute(): (result.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: SnapshotRW can only be used with olap tables. , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x191A93DB 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:47: RaiseError @ 0x1967143F 2. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:25: DoExecute @ 0x18D6BF57 3. /-S/ydb/core/kqp/ut/tx/kqp_sink_common.h:102: Execute @ 0x18CB6C6A 4. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:50: Execute_ @ 0x18D62D12 5. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: operator() @ 0x18D6A0C7 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:150: __invoke<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:225: __call<(lambda at /-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14:1) &> @ 0x18D6A0C7 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:171: operator() @ 0x18D6A0C7 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:313: operator() @ 0x18D6A0C7 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:430: operator() @ 0x196A8465 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:989: operator() @ 0x196A8465 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:525: Run @ 0x196A8465 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:374: Run @ 0x19677FB8 14. /tmp//-S/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp:14: Execute @ 0x18D69293 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:495: Execute @ 0x19679885 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:872: RunMain @ 0x196A29DC 17. ??:0: ?? @ 0x7FC38680AD8F 18. ??:0: ?? @ 0x7FC38680AE3F 19. ??:0: ?? @ 0x16366028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/tx/test-results/unittest/testing_out_stuff/KqpSnapshotIsolation.TSimpleOltp.out ------ FAIL: 11 - FAIL ydb/core/kqp/ut/tx ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:433.75s - test:433.72s) [fail] AnalyzeColumnshard::AnalyzeRebootColumnShard [default-linux-x86_64-release-asan] (429.12s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.h:579: Exception occured while waiting for NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse: (NActors::TSchedulingLimitReachedException) TestActorRuntime Processed over 100000 events.ydb/library/actors/testlib/test_runtime.cpp:716: TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/bt_exception.h:16:5 NKikimr::NStat::TEvStatistics::TEvAnalyzeResponse::TPtr NActors::TTestActorRuntimeBase::GrabEdgeEventRethrow(NActors::TActorId const&, TDuration) at /-S/ydb/library/actors/testlib/test_runtime.h:0:24 DoDestroy at /-S/util/generic/ptr.h:237:13 operator() at /-S/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeRebootColumnShard.out ------ FAIL: 1 - FAIL ydb/core/statistics/aggregator/ut ydb/core/tx/datashard/ut_incremental_backup [size:medium] nchunks:4 ------ [0/4] chunk ran 1 test (total:135.63s - test:135.58s) [fail] IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [default-linux-x86_64-release-asan] (131.12s) assertion failed at ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:2091, void NKikimr::ExecSQL(Tests::TServer::TPtr, TActorId, const TString &, bool, Ydb::StatusIds::StatusCode): (response.GetYdbStatus() == code) failed: (TIMEOUT != SUCCESS) Response { QueryIssues { message: "Request timeout 600000ms exceeded" severity: 1 } QueryIssues { message: "Cancelling after 600000ms in ExecuteState" severity: 1 } TxMeta { } } YdbStatus: TIMEOUT , with diff: (TIM|SUCC)E(OUT|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NKikimr::ExecSQL(TIntrusivePtr>, NActors::TActorId, TBasicString> const&, bool, Ydb::StatusIds_StatusCode) at /-S/ydb/core/tx/datashard/ut_common/datashard_ut_common.cpp:0:5 UnRef at /-S/util/generic/ptr.h:624:13 operator() at /-S/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/testing_out_stuff/IncrementalBackup.ComplexRestoreBackupCollection.WithIncremental.out ------ FAIL: 1 - FAIL ydb/core/tx/datashard/ut_incremental_backup ydb/core/tx/tiering/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:27.22s - test:27.18s) [crashed] ColumnShardTiers::TTLUsage [default-linux-x86_64-release-asan] (0.00s) Test crashed (return code: -6) See logs for more info Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tiering/ut/test-results/unittest/testing_out_stuff/ColumnShardTiers.TTLUsage.out ------ FAIL: 1 - CRASHED ydb/core/tx/tiering/ut ydb/services/ydb/ut [size:medium] nchunks:60 ------ [0/60] chunk ran 1 test (total:8.16s - test:8.13s) [fail] YdbLogStore::AlterLogTable [default-linux-x86_64-release-asan] (3.10s) assertion failed at ydb/services/ydb/ydb_logstore_ut.cpp:435, virtual void NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext &): (res.GetStatus() == EStatus::SUCCESS) failed: (PRECONDITION_FAILED != SUCCESS)
: Error: Column stores are not supported , with diff: (PRE|SUC)C(ONDITION_FAIL|)E(D|SS) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:71:12 NTestSuiteYdbLogStore::TTestCaseAlterLogTable::Execute_(NUnitTest::TTestContext&) at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:13 operator() at /-S/ydb/services/ydb/ydb_logstore_ut.cpp:0:1 operator() at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:0:12 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:397:16 UnRef at /-S/util/generic/ptr.h:624:13 UnRef at /-S/util/generic/ptr.h:624:13 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:0 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/services/ydb/ut/test-results/unittest/testing_out_stuff/YdbLogStore.AlterLogTable.out ------ FAIL: 1 - FAIL ydb/services/ydb/ut Total 14 suites: 1 - GOOD 10 - FAIL 3 - TIMEOUT Total 34 tests: 5 - GOOD 22 - FAIL 1 - NOT_LAUNCHED 4 - TIMEOUT 2 - CRASHED Cache efficiency ratio is 99.05% (37137 of 37493). Local: 362 (0.97%), dist: 0 (0.00%), by dynamic uids: 0 (0.00%), avoided: 36775 (98.08%) Dist cache download: count=0, size=0 bytes, speed=0.0 bytes/s Disk usage for tools/sdk 3.64 GiB Additional disk space consumed for build cache 0 bytes Critical path: [625056 ms] [TM] [rnd-4i8xagr21ako3x1p asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 0 (1743157373627), finished: 625056 (1743157998683)] Time from start: 649032.0991210938 ms, time elapsed by graph 625056 ms, time diff 23976.09912109375 ms. The longest 10 tasks: [625056 ms] [TM] [rnd-4i8xagr21ako3x1p asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1743157373627, finished: 1743157998683] [623101 ms] [TM] [rnd-yhlnqjkwke2a673l asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1743157373636, finished: 1743157996737] [616186 ms] [TM] [rnd-17604540480418037956 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743157371713, finished: 1743157987899] [615852 ms] [TM] [rnd-4997419023243961708 asan default-linux-x86_64 release]: ydb/tests/olap/ttl_tiering/py3test [started: 1743157371688, finished: 1743157987540] [527130 ms] [TM] [rnd-xsry55d7olvrg29w asan default-linux-x86_64 release]: ydb/tests/olap/s3_import/py3test [started: 1743157373622, finished: 1743157900752] [434106 ms] [TM] [rnd-10940301224561594811 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1743157367161, finished: 1743157801267] [136072 ms] [TM] [rnd-18360163306847467278 asan default-linux-x86_64 release]: ydb/core/tx/datashard/ut_incremental_backup/unittest [started: 1743157369080, finished: 1743157505152] [133726 ms] [TM] [rnd-12365836846253098515 asan default-linux-x86_64 release]: ydb/tests/functional/tpc/medium/py3test [started: 1743157373642, finished: 1743157507368] [127466 ms] [TM] [rnd-79v4z2sbosfoxkmg asan default-linux-x86_64 release]: ydb/tests/stress/log/tests/py3test [started: 1743157373648, finished: 1743157501114] [ 43684 ms] [TM] [rnd-3652224695361455342 asan default-linux-x86_64 release]: ydb/core/kqp/ut/query/unittest [started: 1743157364250, finished: 1743157407934] Total time by type: [4602570 ms] [TM] [count: 346, ave time 13302.23 msec] [ 68817 ms] [prepare:get from local cache] [count: 362, ave time 190.10 msec] [ 24919 ms] [prepare:AC] [count: 2, ave time 12459.50 msec] [ 20602 ms] [prepare:put to dist cache] [count: 360, ave time 57.23 msec] [ 5974 ms] [prepare:bazel-store] [count: 1, ave time 5974.00 msec] [ 5263 ms] [prepare:tools] [count: 16, ave time 328.94 msec] [ 3216 ms] [TA] [count: 10, ave time 321.60 msec] [ 1593 ms] [prepare:clean] [count: 2, ave time 796.50 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 4605786 ms (100.00%) Total run tasks time - 4605786 ms Configure time - 21.2 s Statistics overhead 1299 ms Info: Dump junit report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/junit.xml Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_3/report.json Ok + echo 0